##// END OF EJS Templates
push: use bundle2 to push obsmarkers when possible
Pierre-Yves David -
r22347:7198cb9b default
parent child Browse files
Show More
@@ -1,1045 +1,1056
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
12 import discovery, phases, obsolete, bookmarks, bundle2, pushkey
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.unbundle10(fh, alg)
34 return changegroup.unbundle10(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40 def buildobsmarkerspart(bundler, markers):
40 def buildobsmarkerspart(bundler, markers):
41 """add an obsmarker part to the bundler with <markers>
41 """add an obsmarker part to the bundler with <markers>
42
42
43 No part is created if markers is empty.
43 No part is created if markers is empty.
44 Raises ValueError if the bundler doesn't support any known obsmarker format.
44 Raises ValueError if the bundler doesn't support any known obsmarker format.
45 """
45 """
46 if markers:
46 if markers:
47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
47 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
48 version = obsolete.commonversion(remoteversions)
48 version = obsolete.commonversion(remoteversions)
49 if version is None:
49 if version is None:
50 raise ValueError('bundler do not support common obsmarker format')
50 raise ValueError('bundler do not support common obsmarker format')
51 stream = obsolete.encodemarkers(markers, True, version=version)
51 stream = obsolete.encodemarkers(markers, True, version=version)
52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
52 return bundler.newpart('B2X:OBSMARKERS', data=stream)
53 return None
53 return None
54
54
55 class pushoperation(object):
55 class pushoperation(object):
56 """A object that represent a single push operation
56 """A object that represent a single push operation
57
57
58 It purpose is to carry push related state and very common operation.
58 It purpose is to carry push related state and very common operation.
59
59
60 A new should be created at the beginning of each push and discarded
60 A new should be created at the beginning of each push and discarded
61 afterward.
61 afterward.
62 """
62 """
63
63
64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
64 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
65 # repo we push from
65 # repo we push from
66 self.repo = repo
66 self.repo = repo
67 self.ui = repo.ui
67 self.ui = repo.ui
68 # repo we push to
68 # repo we push to
69 self.remote = remote
69 self.remote = remote
70 # force option provided
70 # force option provided
71 self.force = force
71 self.force = force
72 # revs to be pushed (None is "all")
72 # revs to be pushed (None is "all")
73 self.revs = revs
73 self.revs = revs
74 # allow push of new branch
74 # allow push of new branch
75 self.newbranch = newbranch
75 self.newbranch = newbranch
76 # did a local lock get acquired?
76 # did a local lock get acquired?
77 self.locallocked = None
77 self.locallocked = None
78 # step already performed
78 # step already performed
79 # (used to check what steps have been already performed through bundle2)
79 # (used to check what steps have been already performed through bundle2)
80 self.stepsdone = set()
80 self.stepsdone = set()
81 # Integer version of the push result
81 # Integer version of the push result
82 # - None means nothing to push
82 # - None means nothing to push
83 # - 0 means HTTP error
83 # - 0 means HTTP error
84 # - 1 means we pushed and remote head count is unchanged *or*
84 # - 1 means we pushed and remote head count is unchanged *or*
85 # we have outgoing changesets but refused to push
85 # we have outgoing changesets but refused to push
86 # - other values as described by addchangegroup()
86 # - other values as described by addchangegroup()
87 self.ret = None
87 self.ret = None
88 # discover.outgoing object (contains common and outgoing data)
88 # discover.outgoing object (contains common and outgoing data)
89 self.outgoing = None
89 self.outgoing = None
90 # all remote heads before the push
90 # all remote heads before the push
91 self.remoteheads = None
91 self.remoteheads = None
92 # testable as a boolean indicating if any nodes are missing locally.
92 # testable as a boolean indicating if any nodes are missing locally.
93 self.incoming = None
93 self.incoming = None
94 # phases changes that must be pushed along side the changesets
94 # phases changes that must be pushed along side the changesets
95 self.outdatedphases = None
95 self.outdatedphases = None
96 # phases changes that must be pushed if changeset push fails
96 # phases changes that must be pushed if changeset push fails
97 self.fallbackoutdatedphases = None
97 self.fallbackoutdatedphases = None
98 # outgoing obsmarkers
98 # outgoing obsmarkers
99 self.outobsmarkers = set()
99 self.outobsmarkers = set()
100 # outgoing bookmarks
100 # outgoing bookmarks
101 self.outbookmarks = []
101 self.outbookmarks = []
102
102
103 @util.propertycache
103 @util.propertycache
104 def futureheads(self):
104 def futureheads(self):
105 """future remote heads if the changeset push succeeds"""
105 """future remote heads if the changeset push succeeds"""
106 return self.outgoing.missingheads
106 return self.outgoing.missingheads
107
107
108 @util.propertycache
108 @util.propertycache
109 def fallbackheads(self):
109 def fallbackheads(self):
110 """future remote heads if the changeset push fails"""
110 """future remote heads if the changeset push fails"""
111 if self.revs is None:
111 if self.revs is None:
112 # not target to push, all common are relevant
112 # not target to push, all common are relevant
113 return self.outgoing.commonheads
113 return self.outgoing.commonheads
114 unfi = self.repo.unfiltered()
114 unfi = self.repo.unfiltered()
115 # I want cheads = heads(::missingheads and ::commonheads)
115 # I want cheads = heads(::missingheads and ::commonheads)
116 # (missingheads is revs with secret changeset filtered out)
116 # (missingheads is revs with secret changeset filtered out)
117 #
117 #
118 # This can be expressed as:
118 # This can be expressed as:
119 # cheads = ( (missingheads and ::commonheads)
119 # cheads = ( (missingheads and ::commonheads)
120 # + (commonheads and ::missingheads))"
120 # + (commonheads and ::missingheads))"
121 # )
121 # )
122 #
122 #
123 # while trying to push we already computed the following:
123 # while trying to push we already computed the following:
124 # common = (::commonheads)
124 # common = (::commonheads)
125 # missing = ((commonheads::missingheads) - commonheads)
125 # missing = ((commonheads::missingheads) - commonheads)
126 #
126 #
127 # We can pick:
127 # We can pick:
128 # * missingheads part of common (::commonheads)
128 # * missingheads part of common (::commonheads)
129 common = set(self.outgoing.common)
129 common = set(self.outgoing.common)
130 nm = self.repo.changelog.nodemap
130 nm = self.repo.changelog.nodemap
131 cheads = [node for node in self.revs if nm[node] in common]
131 cheads = [node for node in self.revs if nm[node] in common]
132 # and
132 # and
133 # * commonheads parents on missing
133 # * commonheads parents on missing
134 revset = unfi.set('%ln and parents(roots(%ln))',
134 revset = unfi.set('%ln and parents(roots(%ln))',
135 self.outgoing.commonheads,
135 self.outgoing.commonheads,
136 self.outgoing.missing)
136 self.outgoing.missing)
137 cheads.extend(c.node() for c in revset)
137 cheads.extend(c.node() for c in revset)
138 return cheads
138 return cheads
139
139
140 @property
140 @property
141 def commonheads(self):
141 def commonheads(self):
142 """set of all common heads after changeset bundle push"""
142 """set of all common heads after changeset bundle push"""
143 if self.ret:
143 if self.ret:
144 return self.futureheads
144 return self.futureheads
145 else:
145 else:
146 return self.fallbackheads
146 return self.fallbackheads
147
147
148 def push(repo, remote, force=False, revs=None, newbranch=False):
148 def push(repo, remote, force=False, revs=None, newbranch=False):
149 '''Push outgoing changesets (limited by revs) from a local
149 '''Push outgoing changesets (limited by revs) from a local
150 repository to remote. Return an integer:
150 repository to remote. Return an integer:
151 - None means nothing to push
151 - None means nothing to push
152 - 0 means HTTP error
152 - 0 means HTTP error
153 - 1 means we pushed and remote head count is unchanged *or*
153 - 1 means we pushed and remote head count is unchanged *or*
154 we have outgoing changesets but refused to push
154 we have outgoing changesets but refused to push
155 - other values as described by addchangegroup()
155 - other values as described by addchangegroup()
156 '''
156 '''
157 pushop = pushoperation(repo, remote, force, revs, newbranch)
157 pushop = pushoperation(repo, remote, force, revs, newbranch)
158 if pushop.remote.local():
158 if pushop.remote.local():
159 missing = (set(pushop.repo.requirements)
159 missing = (set(pushop.repo.requirements)
160 - pushop.remote.local().supported)
160 - pushop.remote.local().supported)
161 if missing:
161 if missing:
162 msg = _("required features are not"
162 msg = _("required features are not"
163 " supported in the destination:"
163 " supported in the destination:"
164 " %s") % (', '.join(sorted(missing)))
164 " %s") % (', '.join(sorted(missing)))
165 raise util.Abort(msg)
165 raise util.Abort(msg)
166
166
167 # there are two ways to push to remote repo:
167 # there are two ways to push to remote repo:
168 #
168 #
169 # addchangegroup assumes local user can lock remote
169 # addchangegroup assumes local user can lock remote
170 # repo (local filesystem, old ssh servers).
170 # repo (local filesystem, old ssh servers).
171 #
171 #
172 # unbundle assumes local user cannot lock remote repo (new ssh
172 # unbundle assumes local user cannot lock remote repo (new ssh
173 # servers, http servers).
173 # servers, http servers).
174
174
175 if not pushop.remote.canpush():
175 if not pushop.remote.canpush():
176 raise util.Abort(_("destination does not support push"))
176 raise util.Abort(_("destination does not support push"))
177 # get local lock as we might write phase data
177 # get local lock as we might write phase data
178 locallock = None
178 locallock = None
179 try:
179 try:
180 locallock = pushop.repo.lock()
180 locallock = pushop.repo.lock()
181 pushop.locallocked = True
181 pushop.locallocked = True
182 except IOError, err:
182 except IOError, err:
183 pushop.locallocked = False
183 pushop.locallocked = False
184 if err.errno != errno.EACCES:
184 if err.errno != errno.EACCES:
185 raise
185 raise
186 # source repo cannot be locked.
186 # source repo cannot be locked.
187 # We do not abort the push, but just disable the local phase
187 # We do not abort the push, but just disable the local phase
188 # synchronisation.
188 # synchronisation.
189 msg = 'cannot lock source repository: %s\n' % err
189 msg = 'cannot lock source repository: %s\n' % err
190 pushop.ui.debug(msg)
190 pushop.ui.debug(msg)
191 try:
191 try:
192 pushop.repo.checkpush(pushop)
192 pushop.repo.checkpush(pushop)
193 lock = None
193 lock = None
194 unbundle = pushop.remote.capable('unbundle')
194 unbundle = pushop.remote.capable('unbundle')
195 if not unbundle:
195 if not unbundle:
196 lock = pushop.remote.lock()
196 lock = pushop.remote.lock()
197 try:
197 try:
198 _pushdiscovery(pushop)
198 _pushdiscovery(pushop)
199 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
199 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
200 False)
200 False)
201 and pushop.remote.capable('bundle2-exp')):
201 and pushop.remote.capable('bundle2-exp')):
202 _pushbundle2(pushop)
202 _pushbundle2(pushop)
203 _pushchangeset(pushop)
203 _pushchangeset(pushop)
204 _pushsyncphase(pushop)
204 _pushsyncphase(pushop)
205 _pushobsolete(pushop)
205 _pushobsolete(pushop)
206 _pushbookmark(pushop)
206 _pushbookmark(pushop)
207 finally:
207 finally:
208 if lock is not None:
208 if lock is not None:
209 lock.release()
209 lock.release()
210 finally:
210 finally:
211 if locallock is not None:
211 if locallock is not None:
212 locallock.release()
212 locallock.release()
213
213
214 return pushop.ret
214 return pushop.ret
215
215
216 # list of steps to perform discovery before push
216 # list of steps to perform discovery before push
217 pushdiscoveryorder = []
217 pushdiscoveryorder = []
218
218
219 # Mapping between step name and function
219 # Mapping between step name and function
220 #
220 #
221 # This exists to help extensions wrap steps if necessary
221 # This exists to help extensions wrap steps if necessary
222 pushdiscoverymapping = {}
222 pushdiscoverymapping = {}
223
223
224 def pushdiscovery(stepname):
224 def pushdiscovery(stepname):
225 """decorator for function performing discovery before push
225 """decorator for function performing discovery before push
226
226
227 The function is added to the step -> function mapping and appended to the
227 The function is added to the step -> function mapping and appended to the
228 list of steps. Beware that decorated function will be added in order (this
228 list of steps. Beware that decorated function will be added in order (this
229 may matter).
229 may matter).
230
230
231 You can only use this decorator for a new step, if you want to wrap a step
231 You can only use this decorator for a new step, if you want to wrap a step
232 from an extension, change the pushdiscovery dictionary directly."""
232 from an extension, change the pushdiscovery dictionary directly."""
233 def dec(func):
233 def dec(func):
234 assert stepname not in pushdiscoverymapping
234 assert stepname not in pushdiscoverymapping
235 pushdiscoverymapping[stepname] = func
235 pushdiscoverymapping[stepname] = func
236 pushdiscoveryorder.append(stepname)
236 pushdiscoveryorder.append(stepname)
237 return func
237 return func
238 return dec
238 return dec
239
239
240 def _pushdiscovery(pushop):
240 def _pushdiscovery(pushop):
241 """Run all discovery steps"""
241 """Run all discovery steps"""
242 for stepname in pushdiscoveryorder:
242 for stepname in pushdiscoveryorder:
243 step = pushdiscoverymapping[stepname]
243 step = pushdiscoverymapping[stepname]
244 step(pushop)
244 step(pushop)
245
245
246 @pushdiscovery('changeset')
246 @pushdiscovery('changeset')
247 def _pushdiscoverychangeset(pushop):
247 def _pushdiscoverychangeset(pushop):
248 """discover the changeset that need to be pushed"""
248 """discover the changeset that need to be pushed"""
249 unfi = pushop.repo.unfiltered()
249 unfi = pushop.repo.unfiltered()
250 fci = discovery.findcommonincoming
250 fci = discovery.findcommonincoming
251 commoninc = fci(unfi, pushop.remote, force=pushop.force)
251 commoninc = fci(unfi, pushop.remote, force=pushop.force)
252 common, inc, remoteheads = commoninc
252 common, inc, remoteheads = commoninc
253 fco = discovery.findcommonoutgoing
253 fco = discovery.findcommonoutgoing
254 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
254 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
255 commoninc=commoninc, force=pushop.force)
255 commoninc=commoninc, force=pushop.force)
256 pushop.outgoing = outgoing
256 pushop.outgoing = outgoing
257 pushop.remoteheads = remoteheads
257 pushop.remoteheads = remoteheads
258 pushop.incoming = inc
258 pushop.incoming = inc
259
259
260 @pushdiscovery('phase')
260 @pushdiscovery('phase')
261 def _pushdiscoveryphase(pushop):
261 def _pushdiscoveryphase(pushop):
262 """discover the phase that needs to be pushed
262 """discover the phase that needs to be pushed
263
263
264 (computed for both success and failure case for changesets push)"""
264 (computed for both success and failure case for changesets push)"""
265 outgoing = pushop.outgoing
265 outgoing = pushop.outgoing
266 unfi = pushop.repo.unfiltered()
266 unfi = pushop.repo.unfiltered()
267 remotephases = pushop.remote.listkeys('phases')
267 remotephases = pushop.remote.listkeys('phases')
268 publishing = remotephases.get('publishing', False)
268 publishing = remotephases.get('publishing', False)
269 ana = phases.analyzeremotephases(pushop.repo,
269 ana = phases.analyzeremotephases(pushop.repo,
270 pushop.fallbackheads,
270 pushop.fallbackheads,
271 remotephases)
271 remotephases)
272 pheads, droots = ana
272 pheads, droots = ana
273 extracond = ''
273 extracond = ''
274 if not publishing:
274 if not publishing:
275 extracond = ' and public()'
275 extracond = ' and public()'
276 revset = 'heads((%%ln::%%ln) %s)' % extracond
276 revset = 'heads((%%ln::%%ln) %s)' % extracond
277 # Get the list of all revs draft on remote by public here.
277 # Get the list of all revs draft on remote by public here.
278 # XXX Beware that revset break if droots is not strictly
278 # XXX Beware that revset break if droots is not strictly
279 # XXX root we may want to ensure it is but it is costly
279 # XXX root we may want to ensure it is but it is costly
280 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
280 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
281 if not outgoing.missing:
281 if not outgoing.missing:
282 future = fallback
282 future = fallback
283 else:
283 else:
284 # adds changeset we are going to push as draft
284 # adds changeset we are going to push as draft
285 #
285 #
286 # should not be necessary for pushblishing server, but because of an
286 # should not be necessary for pushblishing server, but because of an
287 # issue fixed in xxxxx we have to do it anyway.
287 # issue fixed in xxxxx we have to do it anyway.
288 fdroots = list(unfi.set('roots(%ln + %ln::)',
288 fdroots = list(unfi.set('roots(%ln + %ln::)',
289 outgoing.missing, droots))
289 outgoing.missing, droots))
290 fdroots = [f.node() for f in fdroots]
290 fdroots = [f.node() for f in fdroots]
291 future = list(unfi.set(revset, fdroots, pushop.futureheads))
291 future = list(unfi.set(revset, fdroots, pushop.futureheads))
292 pushop.outdatedphases = future
292 pushop.outdatedphases = future
293 pushop.fallbackoutdatedphases = fallback
293 pushop.fallbackoutdatedphases = fallback
294
294
295 @pushdiscovery('obsmarker')
295 @pushdiscovery('obsmarker')
296 def _pushdiscoveryobsmarkers(pushop):
296 def _pushdiscoveryobsmarkers(pushop):
297 if (obsolete._enabled
297 if (obsolete._enabled
298 and pushop.repo.obsstore
298 and pushop.repo.obsstore
299 and 'obsolete' in pushop.remote.listkeys('namespaces')):
299 and 'obsolete' in pushop.remote.listkeys('namespaces')):
300 pushop.outobsmarkers = pushop.repo.obsstore
300 pushop.outobsmarkers = pushop.repo.obsstore
301
301
302 @pushdiscovery('bookmarks')
302 @pushdiscovery('bookmarks')
303 def _pushdiscoverybookmarks(pushop):
303 def _pushdiscoverybookmarks(pushop):
304 ui = pushop.ui
304 ui = pushop.ui
305 repo = pushop.repo.unfiltered()
305 repo = pushop.repo.unfiltered()
306 remote = pushop.remote
306 remote = pushop.remote
307 ui.debug("checking for updated bookmarks\n")
307 ui.debug("checking for updated bookmarks\n")
308 ancestors = ()
308 ancestors = ()
309 if pushop.revs:
309 if pushop.revs:
310 revnums = map(repo.changelog.rev, pushop.revs)
310 revnums = map(repo.changelog.rev, pushop.revs)
311 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
311 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
312 remotebookmark = remote.listkeys('bookmarks')
312 remotebookmark = remote.listkeys('bookmarks')
313
313
314 comp = bookmarks.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
314 comp = bookmarks.compare(repo, repo._bookmarks, remotebookmark, srchex=hex)
315 addsrc, adddst, advsrc, advdst, diverge, differ, invalid = comp
315 addsrc, adddst, advsrc, advdst, diverge, differ, invalid = comp
316 for b, scid, dcid in advsrc:
316 for b, scid, dcid in advsrc:
317 if not ancestors or repo[scid].rev() in ancestors:
317 if not ancestors or repo[scid].rev() in ancestors:
318 pushop.outbookmarks.append((b, dcid, scid))
318 pushop.outbookmarks.append((b, dcid, scid))
319
319
320 def _pushcheckoutgoing(pushop):
320 def _pushcheckoutgoing(pushop):
321 outgoing = pushop.outgoing
321 outgoing = pushop.outgoing
322 unfi = pushop.repo.unfiltered()
322 unfi = pushop.repo.unfiltered()
323 if not outgoing.missing:
323 if not outgoing.missing:
324 # nothing to push
324 # nothing to push
325 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
325 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
326 return False
326 return False
327 # something to push
327 # something to push
328 if not pushop.force:
328 if not pushop.force:
329 # if repo.obsstore == False --> no obsolete
329 # if repo.obsstore == False --> no obsolete
330 # then, save the iteration
330 # then, save the iteration
331 if unfi.obsstore:
331 if unfi.obsstore:
332 # this message are here for 80 char limit reason
332 # this message are here for 80 char limit reason
333 mso = _("push includes obsolete changeset: %s!")
333 mso = _("push includes obsolete changeset: %s!")
334 mst = "push includes %s changeset: %s!"
334 mst = "push includes %s changeset: %s!"
335 # plain versions for i18n tool to detect them
335 # plain versions for i18n tool to detect them
336 _("push includes unstable changeset: %s!")
336 _("push includes unstable changeset: %s!")
337 _("push includes bumped changeset: %s!")
337 _("push includes bumped changeset: %s!")
338 _("push includes divergent changeset: %s!")
338 _("push includes divergent changeset: %s!")
339 # If we are to push if there is at least one
339 # If we are to push if there is at least one
340 # obsolete or unstable changeset in missing, at
340 # obsolete or unstable changeset in missing, at
341 # least one of the missinghead will be obsolete or
341 # least one of the missinghead will be obsolete or
342 # unstable. So checking heads only is ok
342 # unstable. So checking heads only is ok
343 for node in outgoing.missingheads:
343 for node in outgoing.missingheads:
344 ctx = unfi[node]
344 ctx = unfi[node]
345 if ctx.obsolete():
345 if ctx.obsolete():
346 raise util.Abort(mso % ctx)
346 raise util.Abort(mso % ctx)
347 elif ctx.troubled():
347 elif ctx.troubled():
348 raise util.Abort(_(mst)
348 raise util.Abort(_(mst)
349 % (ctx.troubles()[0],
349 % (ctx.troubles()[0],
350 ctx))
350 ctx))
351 newbm = pushop.ui.configlist('bookmarks', 'pushing')
351 newbm = pushop.ui.configlist('bookmarks', 'pushing')
352 discovery.checkheads(unfi, pushop.remote, outgoing,
352 discovery.checkheads(unfi, pushop.remote, outgoing,
353 pushop.remoteheads,
353 pushop.remoteheads,
354 pushop.newbranch,
354 pushop.newbranch,
355 bool(pushop.incoming),
355 bool(pushop.incoming),
356 newbm)
356 newbm)
357 return True
357 return True
358
358
359 # List of names of steps to perform for an outgoing bundle2, order matters.
359 # List of names of steps to perform for an outgoing bundle2, order matters.
360 b2partsgenorder = []
360 b2partsgenorder = []
361
361
362 # Mapping between step name and function
362 # Mapping between step name and function
363 #
363 #
364 # This exists to help extensions wrap steps if necessary
364 # This exists to help extensions wrap steps if necessary
365 b2partsgenmapping = {}
365 b2partsgenmapping = {}
366
366
367 def b2partsgenerator(stepname):
367 def b2partsgenerator(stepname):
368 """decorator for function generating bundle2 part
368 """decorator for function generating bundle2 part
369
369
370 The function is added to the step -> function mapping and appended to the
370 The function is added to the step -> function mapping and appended to the
371 list of steps. Beware that decorated functions will be added in order
371 list of steps. Beware that decorated functions will be added in order
372 (this may matter).
372 (this may matter).
373
373
374 You can only use this decorator for new steps, if you want to wrap a step
374 You can only use this decorator for new steps, if you want to wrap a step
375 from an extension, attack the b2partsgenmapping dictionary directly."""
375 from an extension, attack the b2partsgenmapping dictionary directly."""
376 def dec(func):
376 def dec(func):
377 assert stepname not in b2partsgenmapping
377 assert stepname not in b2partsgenmapping
378 b2partsgenmapping[stepname] = func
378 b2partsgenmapping[stepname] = func
379 b2partsgenorder.append(stepname)
379 b2partsgenorder.append(stepname)
380 return func
380 return func
381 return dec
381 return dec
382
382
383 @b2partsgenerator('changeset')
383 @b2partsgenerator('changeset')
384 def _pushb2ctx(pushop, bundler):
384 def _pushb2ctx(pushop, bundler):
385 """handle changegroup push through bundle2
385 """handle changegroup push through bundle2
386
386
387 addchangegroup result is stored in the ``pushop.ret`` attribute.
387 addchangegroup result is stored in the ``pushop.ret`` attribute.
388 """
388 """
389 if 'changesets' in pushop.stepsdone:
389 if 'changesets' in pushop.stepsdone:
390 return
390 return
391 pushop.stepsdone.add('changesets')
391 pushop.stepsdone.add('changesets')
392 # Send known heads to the server for race detection.
392 # Send known heads to the server for race detection.
393 if not _pushcheckoutgoing(pushop):
393 if not _pushcheckoutgoing(pushop):
394 return
394 return
395 pushop.repo.prepushoutgoinghooks(pushop.repo,
395 pushop.repo.prepushoutgoinghooks(pushop.repo,
396 pushop.remote,
396 pushop.remote,
397 pushop.outgoing)
397 pushop.outgoing)
398 if not pushop.force:
398 if not pushop.force:
399 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
399 bundler.newpart('B2X:CHECK:HEADS', data=iter(pushop.remoteheads))
400 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
400 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
401 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
401 cgpart = bundler.newpart('B2X:CHANGEGROUP', data=cg.getchunks())
402 def handlereply(op):
402 def handlereply(op):
403 """extract addchangroup returns from server reply"""
403 """extract addchangroup returns from server reply"""
404 cgreplies = op.records.getreplies(cgpart.id)
404 cgreplies = op.records.getreplies(cgpart.id)
405 assert len(cgreplies['changegroup']) == 1
405 assert len(cgreplies['changegroup']) == 1
406 pushop.ret = cgreplies['changegroup'][0]['return']
406 pushop.ret = cgreplies['changegroup'][0]['return']
407 return handlereply
407 return handlereply
408
408
409 @b2partsgenerator('phase')
409 @b2partsgenerator('phase')
410 def _pushb2phases(pushop, bundler):
410 def _pushb2phases(pushop, bundler):
411 """handle phase push through bundle2"""
411 """handle phase push through bundle2"""
412 if 'phases' in pushop.stepsdone:
412 if 'phases' in pushop.stepsdone:
413 return
413 return
414 b2caps = bundle2.bundle2caps(pushop.remote)
414 b2caps = bundle2.bundle2caps(pushop.remote)
415 if not 'b2x:pushkey' in b2caps:
415 if not 'b2x:pushkey' in b2caps:
416 return
416 return
417 pushop.stepsdone.add('phases')
417 pushop.stepsdone.add('phases')
418 part2node = []
418 part2node = []
419 enc = pushkey.encode
419 enc = pushkey.encode
420 for newremotehead in pushop.outdatedphases:
420 for newremotehead in pushop.outdatedphases:
421 part = bundler.newpart('b2x:pushkey')
421 part = bundler.newpart('b2x:pushkey')
422 part.addparam('namespace', enc('phases'))
422 part.addparam('namespace', enc('phases'))
423 part.addparam('key', enc(newremotehead.hex()))
423 part.addparam('key', enc(newremotehead.hex()))
424 part.addparam('old', enc(str(phases.draft)))
424 part.addparam('old', enc(str(phases.draft)))
425 part.addparam('new', enc(str(phases.public)))
425 part.addparam('new', enc(str(phases.public)))
426 part2node.append((part.id, newremotehead))
426 part2node.append((part.id, newremotehead))
427 def handlereply(op):
427 def handlereply(op):
428 for partid, node in part2node:
428 for partid, node in part2node:
429 partrep = op.records.getreplies(partid)
429 partrep = op.records.getreplies(partid)
430 results = partrep['pushkey']
430 results = partrep['pushkey']
431 assert len(results) <= 1
431 assert len(results) <= 1
432 msg = None
432 msg = None
433 if not results:
433 if not results:
434 msg = _('server ignored update of %s to public!\n') % node
434 msg = _('server ignored update of %s to public!\n') % node
435 elif not int(results[0]['return']):
435 elif not int(results[0]['return']):
436 msg = _('updating %s to public failed!\n') % node
436 msg = _('updating %s to public failed!\n') % node
437 if msg is not None:
437 if msg is not None:
438 pushop.ui.warn(msg)
438 pushop.ui.warn(msg)
439 return handlereply
439 return handlereply
440
440
441 @b2partsgenerator('obsmarkers')
442 def _pushb2obsmarkers(pushop, bundler):
443 if 'obsmarkers' in pushop.stepsdone:
444 return
445 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
446 if obsolete.commonversion(remoteversions) is None:
447 return
448 pushop.stepsdone.add('obsmarkers')
449 if pushop.outobsmarkers:
450 buildobsmarkerspart(bundler, pushop.outobsmarkers)
451
441 @b2partsgenerator('bookmarks')
452 @b2partsgenerator('bookmarks')
442 def _pushb2bookmarks(pushop, bundler):
453 def _pushb2bookmarks(pushop, bundler):
443 """handle phase push through bundle2"""
454 """handle phase push through bundle2"""
444 if 'bookmarks' in pushop.stepsdone:
455 if 'bookmarks' in pushop.stepsdone:
445 return
456 return
446 b2caps = bundle2.bundle2caps(pushop.remote)
457 b2caps = bundle2.bundle2caps(pushop.remote)
447 if 'b2x:pushkey' not in b2caps:
458 if 'b2x:pushkey' not in b2caps:
448 return
459 return
449 pushop.stepsdone.add('bookmarks')
460 pushop.stepsdone.add('bookmarks')
450 part2book = []
461 part2book = []
451 enc = pushkey.encode
462 enc = pushkey.encode
452 for book, old, new in pushop.outbookmarks:
463 for book, old, new in pushop.outbookmarks:
453 part = bundler.newpart('b2x:pushkey')
464 part = bundler.newpart('b2x:pushkey')
454 part.addparam('namespace', enc('bookmarks'))
465 part.addparam('namespace', enc('bookmarks'))
455 part.addparam('key', enc(book))
466 part.addparam('key', enc(book))
456 part.addparam('old', enc(old))
467 part.addparam('old', enc(old))
457 part.addparam('new', enc(new))
468 part.addparam('new', enc(new))
458 part2book.append((part.id, book))
469 part2book.append((part.id, book))
459 def handlereply(op):
470 def handlereply(op):
460 for partid, book in part2book:
471 for partid, book in part2book:
461 partrep = op.records.getreplies(partid)
472 partrep = op.records.getreplies(partid)
462 results = partrep['pushkey']
473 results = partrep['pushkey']
463 assert len(results) <= 1
474 assert len(results) <= 1
464 if not results:
475 if not results:
465 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
476 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
466 else:
477 else:
467 ret = int(results[0]['return'])
478 ret = int(results[0]['return'])
468 if ret:
479 if ret:
469 pushop.ui.status(_("updating bookmark %s\n") % book)
480 pushop.ui.status(_("updating bookmark %s\n") % book)
470 else:
481 else:
471 pushop.ui.warn(_('updating bookmark %s failed!\n') % book)
482 pushop.ui.warn(_('updating bookmark %s failed!\n') % book)
472 return handlereply
483 return handlereply
473
484
474
485
475 def _pushbundle2(pushop):
486 def _pushbundle2(pushop):
476 """push data to the remote using bundle2
487 """push data to the remote using bundle2
477
488
478 The only currently supported type of data is changegroup but this will
489 The only currently supported type of data is changegroup but this will
479 evolve in the future."""
490 evolve in the future."""
480 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
491 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
481 # create reply capability
492 # create reply capability
482 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
493 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
483 bundler.newpart('b2x:replycaps', data=capsblob)
494 bundler.newpart('b2x:replycaps', data=capsblob)
484 replyhandlers = []
495 replyhandlers = []
485 for partgenname in b2partsgenorder:
496 for partgenname in b2partsgenorder:
486 partgen = b2partsgenmapping[partgenname]
497 partgen = b2partsgenmapping[partgenname]
487 ret = partgen(pushop, bundler)
498 ret = partgen(pushop, bundler)
488 if callable(ret):
499 if callable(ret):
489 replyhandlers.append(ret)
500 replyhandlers.append(ret)
490 # do not push if nothing to push
501 # do not push if nothing to push
491 if bundler.nbparts <= 1:
502 if bundler.nbparts <= 1:
492 return
503 return
493 stream = util.chunkbuffer(bundler.getchunks())
504 stream = util.chunkbuffer(bundler.getchunks())
494 try:
505 try:
495 reply = pushop.remote.unbundle(stream, ['force'], 'push')
506 reply = pushop.remote.unbundle(stream, ['force'], 'push')
496 except error.BundleValueError, exc:
507 except error.BundleValueError, exc:
497 raise util.Abort('missing support for %s' % exc)
508 raise util.Abort('missing support for %s' % exc)
498 try:
509 try:
499 op = bundle2.processbundle(pushop.repo, reply)
510 op = bundle2.processbundle(pushop.repo, reply)
500 except error.BundleValueError, exc:
511 except error.BundleValueError, exc:
501 raise util.Abort('missing support for %s' % exc)
512 raise util.Abort('missing support for %s' % exc)
502 for rephand in replyhandlers:
513 for rephand in replyhandlers:
503 rephand(op)
514 rephand(op)
504
515
505 def _pushchangeset(pushop):
516 def _pushchangeset(pushop):
506 """Make the actual push of changeset bundle to remote repo"""
517 """Make the actual push of changeset bundle to remote repo"""
507 if 'changesets' in pushop.stepsdone:
518 if 'changesets' in pushop.stepsdone:
508 return
519 return
509 pushop.stepsdone.add('changesets')
520 pushop.stepsdone.add('changesets')
510 if not _pushcheckoutgoing(pushop):
521 if not _pushcheckoutgoing(pushop):
511 return
522 return
512 pushop.repo.prepushoutgoinghooks(pushop.repo,
523 pushop.repo.prepushoutgoinghooks(pushop.repo,
513 pushop.remote,
524 pushop.remote,
514 pushop.outgoing)
525 pushop.outgoing)
515 outgoing = pushop.outgoing
526 outgoing = pushop.outgoing
516 unbundle = pushop.remote.capable('unbundle')
527 unbundle = pushop.remote.capable('unbundle')
517 # TODO: get bundlecaps from remote
528 # TODO: get bundlecaps from remote
518 bundlecaps = None
529 bundlecaps = None
519 # create a changegroup from local
530 # create a changegroup from local
520 if pushop.revs is None and not (outgoing.excluded
531 if pushop.revs is None and not (outgoing.excluded
521 or pushop.repo.changelog.filteredrevs):
532 or pushop.repo.changelog.filteredrevs):
522 # push everything,
533 # push everything,
523 # use the fast path, no race possible on push
534 # use the fast path, no race possible on push
524 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
535 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
525 cg = changegroup.getsubset(pushop.repo,
536 cg = changegroup.getsubset(pushop.repo,
526 outgoing,
537 outgoing,
527 bundler,
538 bundler,
528 'push',
539 'push',
529 fastpath=True)
540 fastpath=True)
530 else:
541 else:
531 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
542 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
532 bundlecaps)
543 bundlecaps)
533
544
534 # apply changegroup to remote
545 # apply changegroup to remote
535 if unbundle:
546 if unbundle:
536 # local repo finds heads on server, finds out what
547 # local repo finds heads on server, finds out what
537 # revs it must push. once revs transferred, if server
548 # revs it must push. once revs transferred, if server
538 # finds it has different heads (someone else won
549 # finds it has different heads (someone else won
539 # commit/push race), server aborts.
550 # commit/push race), server aborts.
540 if pushop.force:
551 if pushop.force:
541 remoteheads = ['force']
552 remoteheads = ['force']
542 else:
553 else:
543 remoteheads = pushop.remoteheads
554 remoteheads = pushop.remoteheads
544 # ssh: return remote's addchangegroup()
555 # ssh: return remote's addchangegroup()
545 # http: return remote's addchangegroup() or 0 for error
556 # http: return remote's addchangegroup() or 0 for error
546 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
557 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
547 pushop.repo.url())
558 pushop.repo.url())
548 else:
559 else:
549 # we return an integer indicating remote head count
560 # we return an integer indicating remote head count
550 # change
561 # change
551 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
562 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
552
563
553 def _pushsyncphase(pushop):
564 def _pushsyncphase(pushop):
554 """synchronise phase information locally and remotely"""
565 """synchronise phase information locally and remotely"""
555 cheads = pushop.commonheads
566 cheads = pushop.commonheads
556 # even when we don't push, exchanging phase data is useful
567 # even when we don't push, exchanging phase data is useful
557 remotephases = pushop.remote.listkeys('phases')
568 remotephases = pushop.remote.listkeys('phases')
558 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
569 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
559 and remotephases # server supports phases
570 and remotephases # server supports phases
560 and pushop.ret is None # nothing was pushed
571 and pushop.ret is None # nothing was pushed
561 and remotephases.get('publishing', False)):
572 and remotephases.get('publishing', False)):
562 # When:
573 # When:
563 # - this is a subrepo push
574 # - this is a subrepo push
564 # - and remote support phase
575 # - and remote support phase
565 # - and no changeset was pushed
576 # - and no changeset was pushed
566 # - and remote is publishing
577 # - and remote is publishing
567 # We may be in issue 3871 case!
578 # We may be in issue 3871 case!
568 # We drop the possible phase synchronisation done by
579 # We drop the possible phase synchronisation done by
569 # courtesy to publish changesets possibly locally draft
580 # courtesy to publish changesets possibly locally draft
570 # on the remote.
581 # on the remote.
571 remotephases = {'publishing': 'True'}
582 remotephases = {'publishing': 'True'}
572 if not remotephases: # old server or public only reply from non-publishing
583 if not remotephases: # old server or public only reply from non-publishing
573 _localphasemove(pushop, cheads)
584 _localphasemove(pushop, cheads)
574 # don't push any phase data as there is nothing to push
585 # don't push any phase data as there is nothing to push
575 else:
586 else:
576 ana = phases.analyzeremotephases(pushop.repo, cheads,
587 ana = phases.analyzeremotephases(pushop.repo, cheads,
577 remotephases)
588 remotephases)
578 pheads, droots = ana
589 pheads, droots = ana
579 ### Apply remote phase on local
590 ### Apply remote phase on local
580 if remotephases.get('publishing', False):
591 if remotephases.get('publishing', False):
581 _localphasemove(pushop, cheads)
592 _localphasemove(pushop, cheads)
582 else: # publish = False
593 else: # publish = False
583 _localphasemove(pushop, pheads)
594 _localphasemove(pushop, pheads)
584 _localphasemove(pushop, cheads, phases.draft)
595 _localphasemove(pushop, cheads, phases.draft)
585 ### Apply local phase on remote
596 ### Apply local phase on remote
586
597
587 if pushop.ret:
598 if pushop.ret:
588 if 'phases' in pushop.stepsdone:
599 if 'phases' in pushop.stepsdone:
589 # phases already pushed though bundle2
600 # phases already pushed though bundle2
590 return
601 return
591 outdated = pushop.outdatedphases
602 outdated = pushop.outdatedphases
592 else:
603 else:
593 outdated = pushop.fallbackoutdatedphases
604 outdated = pushop.fallbackoutdatedphases
594
605
595 pushop.stepsdone.add('phases')
606 pushop.stepsdone.add('phases')
596
607
597 # filter heads already turned public by the push
608 # filter heads already turned public by the push
598 outdated = [c for c in outdated if c.node() not in pheads]
609 outdated = [c for c in outdated if c.node() not in pheads]
599 b2caps = bundle2.bundle2caps(pushop.remote)
610 b2caps = bundle2.bundle2caps(pushop.remote)
600 if 'b2x:pushkey' in b2caps:
611 if 'b2x:pushkey' in b2caps:
601 # server supports bundle2, let's do a batched push through it
612 # server supports bundle2, let's do a batched push through it
602 #
613 #
603 # This will eventually be unified with the changesets bundle2 push
614 # This will eventually be unified with the changesets bundle2 push
604 bundler = bundle2.bundle20(pushop.ui, b2caps)
615 bundler = bundle2.bundle20(pushop.ui, b2caps)
605 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
616 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo))
606 bundler.newpart('b2x:replycaps', data=capsblob)
617 bundler.newpart('b2x:replycaps', data=capsblob)
607 part2node = []
618 part2node = []
608 enc = pushkey.encode
619 enc = pushkey.encode
609 for newremotehead in outdated:
620 for newremotehead in outdated:
610 part = bundler.newpart('b2x:pushkey')
621 part = bundler.newpart('b2x:pushkey')
611 part.addparam('namespace', enc('phases'))
622 part.addparam('namespace', enc('phases'))
612 part.addparam('key', enc(newremotehead.hex()))
623 part.addparam('key', enc(newremotehead.hex()))
613 part.addparam('old', enc(str(phases.draft)))
624 part.addparam('old', enc(str(phases.draft)))
614 part.addparam('new', enc(str(phases.public)))
625 part.addparam('new', enc(str(phases.public)))
615 part2node.append((part.id, newremotehead))
626 part2node.append((part.id, newremotehead))
616 stream = util.chunkbuffer(bundler.getchunks())
627 stream = util.chunkbuffer(bundler.getchunks())
617 try:
628 try:
618 reply = pushop.remote.unbundle(stream, ['force'], 'push')
629 reply = pushop.remote.unbundle(stream, ['force'], 'push')
619 op = bundle2.processbundle(pushop.repo, reply)
630 op = bundle2.processbundle(pushop.repo, reply)
620 except error.BundleValueError, exc:
631 except error.BundleValueError, exc:
621 raise util.Abort('missing support for %s' % exc)
632 raise util.Abort('missing support for %s' % exc)
622 for partid, node in part2node:
633 for partid, node in part2node:
623 partrep = op.records.getreplies(partid)
634 partrep = op.records.getreplies(partid)
624 results = partrep['pushkey']
635 results = partrep['pushkey']
625 assert len(results) <= 1
636 assert len(results) <= 1
626 msg = None
637 msg = None
627 if not results:
638 if not results:
628 msg = _('server ignored update of %s to public!\n') % node
639 msg = _('server ignored update of %s to public!\n') % node
629 elif not int(results[0]['return']):
640 elif not int(results[0]['return']):
630 msg = _('updating %s to public failed!\n') % node
641 msg = _('updating %s to public failed!\n') % node
631 if msg is not None:
642 if msg is not None:
632 pushop.ui.warn(msg)
643 pushop.ui.warn(msg)
633
644
634 else:
645 else:
635 # fallback to independant pushkey command
646 # fallback to independant pushkey command
636 for newremotehead in outdated:
647 for newremotehead in outdated:
637 r = pushop.remote.pushkey('phases',
648 r = pushop.remote.pushkey('phases',
638 newremotehead.hex(),
649 newremotehead.hex(),
639 str(phases.draft),
650 str(phases.draft),
640 str(phases.public))
651 str(phases.public))
641 if not r:
652 if not r:
642 pushop.ui.warn(_('updating %s to public failed!\n')
653 pushop.ui.warn(_('updating %s to public failed!\n')
643 % newremotehead)
654 % newremotehead)
644
655
645 def _localphasemove(pushop, nodes, phase=phases.public):
656 def _localphasemove(pushop, nodes, phase=phases.public):
646 """move <nodes> to <phase> in the local source repo"""
657 """move <nodes> to <phase> in the local source repo"""
647 if pushop.locallocked:
658 if pushop.locallocked:
648 tr = pushop.repo.transaction('push-phase-sync')
659 tr = pushop.repo.transaction('push-phase-sync')
649 try:
660 try:
650 phases.advanceboundary(pushop.repo, tr, phase, nodes)
661 phases.advanceboundary(pushop.repo, tr, phase, nodes)
651 tr.close()
662 tr.close()
652 finally:
663 finally:
653 tr.release()
664 tr.release()
654 else:
665 else:
655 # repo is not locked, do not change any phases!
666 # repo is not locked, do not change any phases!
656 # Informs the user that phases should have been moved when
667 # Informs the user that phases should have been moved when
657 # applicable.
668 # applicable.
658 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
669 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
659 phasestr = phases.phasenames[phase]
670 phasestr = phases.phasenames[phase]
660 if actualmoves:
671 if actualmoves:
661 pushop.ui.status(_('cannot lock source repo, skipping '
672 pushop.ui.status(_('cannot lock source repo, skipping '
662 'local %s phase update\n') % phasestr)
673 'local %s phase update\n') % phasestr)
663
674
664 def _pushobsolete(pushop):
675 def _pushobsolete(pushop):
665 """utility function to push obsolete markers to a remote"""
676 """utility function to push obsolete markers to a remote"""
666 if 'obsmarkers' in pushop.stepsdone:
677 if 'obsmarkers' in pushop.stepsdone:
667 return
678 return
668 pushop.ui.debug('try to push obsolete markers to remote\n')
679 pushop.ui.debug('try to push obsolete markers to remote\n')
669 repo = pushop.repo
680 repo = pushop.repo
670 remote = pushop.remote
681 remote = pushop.remote
671 pushop.stepsdone.add('obsmarkers')
682 pushop.stepsdone.add('obsmarkers')
672 if (pushop.outobsmarkers):
683 if (pushop.outobsmarkers):
673 rslts = []
684 rslts = []
674 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
685 remotedata = obsolete._pushkeyescape(pushop.outobsmarkers)
675 for key in sorted(remotedata, reverse=True):
686 for key in sorted(remotedata, reverse=True):
676 # reverse sort to ensure we end with dump0
687 # reverse sort to ensure we end with dump0
677 data = remotedata[key]
688 data = remotedata[key]
678 rslts.append(remote.pushkey('obsolete', key, '', data))
689 rslts.append(remote.pushkey('obsolete', key, '', data))
679 if [r for r in rslts if not r]:
690 if [r for r in rslts if not r]:
680 msg = _('failed to push some obsolete markers!\n')
691 msg = _('failed to push some obsolete markers!\n')
681 repo.ui.warn(msg)
692 repo.ui.warn(msg)
682
693
683 def _pushbookmark(pushop):
694 def _pushbookmark(pushop):
684 """Update bookmark position on remote"""
695 """Update bookmark position on remote"""
685 if pushop.ret == 0 or 'bookmarks' in pushop.stepsdone:
696 if pushop.ret == 0 or 'bookmarks' in pushop.stepsdone:
686 return
697 return
687 pushop.stepsdone.add('bookmarks')
698 pushop.stepsdone.add('bookmarks')
688 ui = pushop.ui
699 ui = pushop.ui
689 remote = pushop.remote
700 remote = pushop.remote
690 for b, old, new in pushop.outbookmarks:
701 for b, old, new in pushop.outbookmarks:
691 if remote.pushkey('bookmarks', b, old, new):
702 if remote.pushkey('bookmarks', b, old, new):
692 ui.status(_("updating bookmark %s\n") % b)
703 ui.status(_("updating bookmark %s\n") % b)
693 else:
704 else:
694 ui.warn(_('updating bookmark %s failed!\n') % b)
705 ui.warn(_('updating bookmark %s failed!\n') % b)
695
706
696 class pulloperation(object):
707 class pulloperation(object):
697 """A object that represent a single pull operation
708 """A object that represent a single pull operation
698
709
699 It purpose is to carry push related state and very common operation.
710 It purpose is to carry push related state and very common operation.
700
711
701 A new should be created at the beginning of each pull and discarded
712 A new should be created at the beginning of each pull and discarded
702 afterward.
713 afterward.
703 """
714 """
704
715
705 def __init__(self, repo, remote, heads=None, force=False):
716 def __init__(self, repo, remote, heads=None, force=False):
706 # repo we pull into
717 # repo we pull into
707 self.repo = repo
718 self.repo = repo
708 # repo we pull from
719 # repo we pull from
709 self.remote = remote
720 self.remote = remote
710 # revision we try to pull (None is "all")
721 # revision we try to pull (None is "all")
711 self.heads = heads
722 self.heads = heads
712 # do we force pull?
723 # do we force pull?
713 self.force = force
724 self.force = force
714 # the name the pull transaction
725 # the name the pull transaction
715 self._trname = 'pull\n' + util.hidepassword(remote.url())
726 self._trname = 'pull\n' + util.hidepassword(remote.url())
716 # hold the transaction once created
727 # hold the transaction once created
717 self._tr = None
728 self._tr = None
718 # set of common changeset between local and remote before pull
729 # set of common changeset between local and remote before pull
719 self.common = None
730 self.common = None
720 # set of pulled head
731 # set of pulled head
721 self.rheads = None
732 self.rheads = None
722 # list of missing changeset to fetch remotely
733 # list of missing changeset to fetch remotely
723 self.fetch = None
734 self.fetch = None
724 # result of changegroup pulling (used as return code by pull)
735 # result of changegroup pulling (used as return code by pull)
725 self.cgresult = None
736 self.cgresult = None
726 # list of step remaining todo (related to future bundle2 usage)
737 # list of step remaining todo (related to future bundle2 usage)
727 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
738 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
728
739
729 @util.propertycache
740 @util.propertycache
730 def pulledsubset(self):
741 def pulledsubset(self):
731 """heads of the set of changeset target by the pull"""
742 """heads of the set of changeset target by the pull"""
732 # compute target subset
743 # compute target subset
733 if self.heads is None:
744 if self.heads is None:
734 # We pulled every thing possible
745 # We pulled every thing possible
735 # sync on everything common
746 # sync on everything common
736 c = set(self.common)
747 c = set(self.common)
737 ret = list(self.common)
748 ret = list(self.common)
738 for n in self.rheads:
749 for n in self.rheads:
739 if n not in c:
750 if n not in c:
740 ret.append(n)
751 ret.append(n)
741 return ret
752 return ret
742 else:
753 else:
743 # We pulled a specific subset
754 # We pulled a specific subset
744 # sync on this subset
755 # sync on this subset
745 return self.heads
756 return self.heads
746
757
747 def gettransaction(self):
758 def gettransaction(self):
748 """get appropriate pull transaction, creating it if needed"""
759 """get appropriate pull transaction, creating it if needed"""
749 if self._tr is None:
760 if self._tr is None:
750 self._tr = self.repo.transaction(self._trname)
761 self._tr = self.repo.transaction(self._trname)
751 return self._tr
762 return self._tr
752
763
753 def closetransaction(self):
764 def closetransaction(self):
754 """close transaction if created"""
765 """close transaction if created"""
755 if self._tr is not None:
766 if self._tr is not None:
756 self._tr.close()
767 self._tr.close()
757
768
758 def releasetransaction(self):
769 def releasetransaction(self):
759 """release transaction if created"""
770 """release transaction if created"""
760 if self._tr is not None:
771 if self._tr is not None:
761 self._tr.release()
772 self._tr.release()
762
773
763 def pull(repo, remote, heads=None, force=False):
774 def pull(repo, remote, heads=None, force=False):
764 pullop = pulloperation(repo, remote, heads, force)
775 pullop = pulloperation(repo, remote, heads, force)
765 if pullop.remote.local():
776 if pullop.remote.local():
766 missing = set(pullop.remote.requirements) - pullop.repo.supported
777 missing = set(pullop.remote.requirements) - pullop.repo.supported
767 if missing:
778 if missing:
768 msg = _("required features are not"
779 msg = _("required features are not"
769 " supported in the destination:"
780 " supported in the destination:"
770 " %s") % (', '.join(sorted(missing)))
781 " %s") % (', '.join(sorted(missing)))
771 raise util.Abort(msg)
782 raise util.Abort(msg)
772
783
773 lock = pullop.repo.lock()
784 lock = pullop.repo.lock()
774 try:
785 try:
775 _pulldiscovery(pullop)
786 _pulldiscovery(pullop)
776 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
787 if (pullop.repo.ui.configbool('experimental', 'bundle2-exp', False)
777 and pullop.remote.capable('bundle2-exp')):
788 and pullop.remote.capable('bundle2-exp')):
778 _pullbundle2(pullop)
789 _pullbundle2(pullop)
779 if 'changegroup' in pullop.todosteps:
790 if 'changegroup' in pullop.todosteps:
780 _pullchangeset(pullop)
791 _pullchangeset(pullop)
781 if 'phases' in pullop.todosteps:
792 if 'phases' in pullop.todosteps:
782 _pullphase(pullop)
793 _pullphase(pullop)
783 if 'obsmarkers' in pullop.todosteps:
794 if 'obsmarkers' in pullop.todosteps:
784 _pullobsolete(pullop)
795 _pullobsolete(pullop)
785 pullop.closetransaction()
796 pullop.closetransaction()
786 finally:
797 finally:
787 pullop.releasetransaction()
798 pullop.releasetransaction()
788 lock.release()
799 lock.release()
789
800
790 return pullop.cgresult
801 return pullop.cgresult
791
802
792 def _pulldiscovery(pullop):
803 def _pulldiscovery(pullop):
793 """discovery phase for the pull
804 """discovery phase for the pull
794
805
795 Current handle changeset discovery only, will change handle all discovery
806 Current handle changeset discovery only, will change handle all discovery
796 at some point."""
807 at some point."""
797 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
808 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
798 pullop.remote,
809 pullop.remote,
799 heads=pullop.heads,
810 heads=pullop.heads,
800 force=pullop.force)
811 force=pullop.force)
801 pullop.common, pullop.fetch, pullop.rheads = tmp
812 pullop.common, pullop.fetch, pullop.rheads = tmp
802
813
803 def _pullbundle2(pullop):
814 def _pullbundle2(pullop):
804 """pull data using bundle2
815 """pull data using bundle2
805
816
806 For now, the only supported data are changegroup."""
817 For now, the only supported data are changegroup."""
807 remotecaps = bundle2.bundle2caps(pullop.remote)
818 remotecaps = bundle2.bundle2caps(pullop.remote)
808 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
819 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
809 # pulling changegroup
820 # pulling changegroup
810 pullop.todosteps.remove('changegroup')
821 pullop.todosteps.remove('changegroup')
811
822
812 kwargs['common'] = pullop.common
823 kwargs['common'] = pullop.common
813 kwargs['heads'] = pullop.heads or pullop.rheads
824 kwargs['heads'] = pullop.heads or pullop.rheads
814 if 'b2x:listkeys' in remotecaps:
825 if 'b2x:listkeys' in remotecaps:
815 kwargs['listkeys'] = ['phase']
826 kwargs['listkeys'] = ['phase']
816 if not pullop.fetch:
827 if not pullop.fetch:
817 pullop.repo.ui.status(_("no changes found\n"))
828 pullop.repo.ui.status(_("no changes found\n"))
818 pullop.cgresult = 0
829 pullop.cgresult = 0
819 else:
830 else:
820 if pullop.heads is None and list(pullop.common) == [nullid]:
831 if pullop.heads is None and list(pullop.common) == [nullid]:
821 pullop.repo.ui.status(_("requesting all changes\n"))
832 pullop.repo.ui.status(_("requesting all changes\n"))
822 _pullbundle2extraprepare(pullop, kwargs)
833 _pullbundle2extraprepare(pullop, kwargs)
823 if kwargs.keys() == ['format']:
834 if kwargs.keys() == ['format']:
824 return # nothing to pull
835 return # nothing to pull
825 bundle = pullop.remote.getbundle('pull', **kwargs)
836 bundle = pullop.remote.getbundle('pull', **kwargs)
826 try:
837 try:
827 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
838 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
828 except error.BundleValueError, exc:
839 except error.BundleValueError, exc:
829 raise util.Abort('missing support for %s' % exc)
840 raise util.Abort('missing support for %s' % exc)
830
841
831 if pullop.fetch:
842 if pullop.fetch:
832 assert len(op.records['changegroup']) == 1
843 assert len(op.records['changegroup']) == 1
833 pullop.cgresult = op.records['changegroup'][0]['return']
844 pullop.cgresult = op.records['changegroup'][0]['return']
834
845
835 # processing phases change
846 # processing phases change
836 for namespace, value in op.records['listkeys']:
847 for namespace, value in op.records['listkeys']:
837 if namespace == 'phases':
848 if namespace == 'phases':
838 _pullapplyphases(pullop, value)
849 _pullapplyphases(pullop, value)
839
850
840 def _pullbundle2extraprepare(pullop, kwargs):
851 def _pullbundle2extraprepare(pullop, kwargs):
841 """hook function so that extensions can extend the getbundle call"""
852 """hook function so that extensions can extend the getbundle call"""
842 pass
853 pass
843
854
844 def _pullchangeset(pullop):
855 def _pullchangeset(pullop):
845 """pull changeset from unbundle into the local repo"""
856 """pull changeset from unbundle into the local repo"""
846 # We delay the open of the transaction as late as possible so we
857 # We delay the open of the transaction as late as possible so we
847 # don't open transaction for nothing or you break future useful
858 # don't open transaction for nothing or you break future useful
848 # rollback call
859 # rollback call
849 pullop.todosteps.remove('changegroup')
860 pullop.todosteps.remove('changegroup')
850 if not pullop.fetch:
861 if not pullop.fetch:
851 pullop.repo.ui.status(_("no changes found\n"))
862 pullop.repo.ui.status(_("no changes found\n"))
852 pullop.cgresult = 0
863 pullop.cgresult = 0
853 return
864 return
854 pullop.gettransaction()
865 pullop.gettransaction()
855 if pullop.heads is None and list(pullop.common) == [nullid]:
866 if pullop.heads is None and list(pullop.common) == [nullid]:
856 pullop.repo.ui.status(_("requesting all changes\n"))
867 pullop.repo.ui.status(_("requesting all changes\n"))
857 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
868 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
858 # issue1320, avoid a race if remote changed after discovery
869 # issue1320, avoid a race if remote changed after discovery
859 pullop.heads = pullop.rheads
870 pullop.heads = pullop.rheads
860
871
861 if pullop.remote.capable('getbundle'):
872 if pullop.remote.capable('getbundle'):
862 # TODO: get bundlecaps from remote
873 # TODO: get bundlecaps from remote
863 cg = pullop.remote.getbundle('pull', common=pullop.common,
874 cg = pullop.remote.getbundle('pull', common=pullop.common,
864 heads=pullop.heads or pullop.rheads)
875 heads=pullop.heads or pullop.rheads)
865 elif pullop.heads is None:
876 elif pullop.heads is None:
866 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
877 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
867 elif not pullop.remote.capable('changegroupsubset'):
878 elif not pullop.remote.capable('changegroupsubset'):
868 raise util.Abort(_("partial pull cannot be done because "
879 raise util.Abort(_("partial pull cannot be done because "
869 "other repository doesn't support "
880 "other repository doesn't support "
870 "changegroupsubset."))
881 "changegroupsubset."))
871 else:
882 else:
872 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
883 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
873 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
884 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
874 pullop.remote.url())
885 pullop.remote.url())
875
886
876 def _pullphase(pullop):
887 def _pullphase(pullop):
877 # Get remote phases data from remote
888 # Get remote phases data from remote
878 remotephases = pullop.remote.listkeys('phases')
889 remotephases = pullop.remote.listkeys('phases')
879 _pullapplyphases(pullop, remotephases)
890 _pullapplyphases(pullop, remotephases)
880
891
881 def _pullapplyphases(pullop, remotephases):
892 def _pullapplyphases(pullop, remotephases):
882 """apply phase movement from observed remote state"""
893 """apply phase movement from observed remote state"""
883 pullop.todosteps.remove('phases')
894 pullop.todosteps.remove('phases')
884 publishing = bool(remotephases.get('publishing', False))
895 publishing = bool(remotephases.get('publishing', False))
885 if remotephases and not publishing:
896 if remotephases and not publishing:
886 # remote is new and unpublishing
897 # remote is new and unpublishing
887 pheads, _dr = phases.analyzeremotephases(pullop.repo,
898 pheads, _dr = phases.analyzeremotephases(pullop.repo,
888 pullop.pulledsubset,
899 pullop.pulledsubset,
889 remotephases)
900 remotephases)
890 dheads = pullop.pulledsubset
901 dheads = pullop.pulledsubset
891 else:
902 else:
892 # Remote is old or publishing all common changesets
903 # Remote is old or publishing all common changesets
893 # should be seen as public
904 # should be seen as public
894 pheads = pullop.pulledsubset
905 pheads = pullop.pulledsubset
895 dheads = []
906 dheads = []
896 unfi = pullop.repo.unfiltered()
907 unfi = pullop.repo.unfiltered()
897 phase = unfi._phasecache.phase
908 phase = unfi._phasecache.phase
898 rev = unfi.changelog.nodemap.get
909 rev = unfi.changelog.nodemap.get
899 public = phases.public
910 public = phases.public
900 draft = phases.draft
911 draft = phases.draft
901
912
902 # exclude changesets already public locally and update the others
913 # exclude changesets already public locally and update the others
903 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
914 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
904 if pheads:
915 if pheads:
905 tr = pullop.gettransaction()
916 tr = pullop.gettransaction()
906 phases.advanceboundary(pullop.repo, tr, public, pheads)
917 phases.advanceboundary(pullop.repo, tr, public, pheads)
907
918
908 # exclude changesets already draft locally and update the others
919 # exclude changesets already draft locally and update the others
909 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
920 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
910 if dheads:
921 if dheads:
911 tr = pullop.gettransaction()
922 tr = pullop.gettransaction()
912 phases.advanceboundary(pullop.repo, tr, draft, dheads)
923 phases.advanceboundary(pullop.repo, tr, draft, dheads)
913
924
914 def _pullobsolete(pullop):
925 def _pullobsolete(pullop):
915 """utility function to pull obsolete markers from a remote
926 """utility function to pull obsolete markers from a remote
916
927
917 The `gettransaction` is function that return the pull transaction, creating
928 The `gettransaction` is function that return the pull transaction, creating
918 one if necessary. We return the transaction to inform the calling code that
929 one if necessary. We return the transaction to inform the calling code that
919 a new transaction have been created (when applicable).
930 a new transaction have been created (when applicable).
920
931
921 Exists mostly to allow overriding for experimentation purpose"""
932 Exists mostly to allow overriding for experimentation purpose"""
922 pullop.todosteps.remove('obsmarkers')
933 pullop.todosteps.remove('obsmarkers')
923 tr = None
934 tr = None
924 if obsolete._enabled:
935 if obsolete._enabled:
925 pullop.repo.ui.debug('fetching remote obsolete markers\n')
936 pullop.repo.ui.debug('fetching remote obsolete markers\n')
926 remoteobs = pullop.remote.listkeys('obsolete')
937 remoteobs = pullop.remote.listkeys('obsolete')
927 if 'dump0' in remoteobs:
938 if 'dump0' in remoteobs:
928 tr = pullop.gettransaction()
939 tr = pullop.gettransaction()
929 for key in sorted(remoteobs, reverse=True):
940 for key in sorted(remoteobs, reverse=True):
930 if key.startswith('dump'):
941 if key.startswith('dump'):
931 data = base85.b85decode(remoteobs[key])
942 data = base85.b85decode(remoteobs[key])
932 pullop.repo.obsstore.mergemarkers(tr, data)
943 pullop.repo.obsstore.mergemarkers(tr, data)
933 pullop.repo.invalidatevolatilesets()
944 pullop.repo.invalidatevolatilesets()
934 return tr
945 return tr
935
946
936 def caps20to10(repo):
947 def caps20to10(repo):
937 """return a set with appropriate options to use bundle20 during getbundle"""
948 """return a set with appropriate options to use bundle20 during getbundle"""
938 caps = set(['HG2X'])
949 caps = set(['HG2X'])
939 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
950 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
940 caps.add('bundle2=' + urllib.quote(capsblob))
951 caps.add('bundle2=' + urllib.quote(capsblob))
941 return caps
952 return caps
942
953
943 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
954 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
944 **kwargs):
955 **kwargs):
945 """return a full bundle (with potentially multiple kind of parts)
956 """return a full bundle (with potentially multiple kind of parts)
946
957
947 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
958 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
948 passed. For now, the bundle can contain only changegroup, but this will
959 passed. For now, the bundle can contain only changegroup, but this will
949 changes when more part type will be available for bundle2.
960 changes when more part type will be available for bundle2.
950
961
951 This is different from changegroup.getbundle that only returns an HG10
962 This is different from changegroup.getbundle that only returns an HG10
952 changegroup bundle. They may eventually get reunited in the future when we
963 changegroup bundle. They may eventually get reunited in the future when we
953 have a clearer idea of the API we what to query different data.
964 have a clearer idea of the API we what to query different data.
954
965
955 The implementation is at a very early stage and will get massive rework
966 The implementation is at a very early stage and will get massive rework
956 when the API of bundle is refined.
967 when the API of bundle is refined.
957 """
968 """
958 cg = None
969 cg = None
959 if kwargs.get('cg', True):
970 if kwargs.get('cg', True):
960 # build changegroup bundle here.
971 # build changegroup bundle here.
961 cg = changegroup.getbundle(repo, source, heads=heads,
972 cg = changegroup.getbundle(repo, source, heads=heads,
962 common=common, bundlecaps=bundlecaps)
973 common=common, bundlecaps=bundlecaps)
963 elif 'HG2X' not in bundlecaps:
974 elif 'HG2X' not in bundlecaps:
964 raise ValueError(_('request for bundle10 must include changegroup'))
975 raise ValueError(_('request for bundle10 must include changegroup'))
965 if bundlecaps is None or 'HG2X' not in bundlecaps:
976 if bundlecaps is None or 'HG2X' not in bundlecaps:
966 if kwargs:
977 if kwargs:
967 raise ValueError(_('unsupported getbundle arguments: %s')
978 raise ValueError(_('unsupported getbundle arguments: %s')
968 % ', '.join(sorted(kwargs.keys())))
979 % ', '.join(sorted(kwargs.keys())))
969 return cg
980 return cg
970 # very crude first implementation,
981 # very crude first implementation,
971 # the bundle API will change and the generation will be done lazily.
982 # the bundle API will change and the generation will be done lazily.
972 b2caps = {}
983 b2caps = {}
973 for bcaps in bundlecaps:
984 for bcaps in bundlecaps:
974 if bcaps.startswith('bundle2='):
985 if bcaps.startswith('bundle2='):
975 blob = urllib.unquote(bcaps[len('bundle2='):])
986 blob = urllib.unquote(bcaps[len('bundle2='):])
976 b2caps.update(bundle2.decodecaps(blob))
987 b2caps.update(bundle2.decodecaps(blob))
977 bundler = bundle2.bundle20(repo.ui, b2caps)
988 bundler = bundle2.bundle20(repo.ui, b2caps)
978 if cg:
989 if cg:
979 bundler.newpart('b2x:changegroup', data=cg.getchunks())
990 bundler.newpart('b2x:changegroup', data=cg.getchunks())
980 listkeys = kwargs.get('listkeys', ())
991 listkeys = kwargs.get('listkeys', ())
981 for namespace in listkeys:
992 for namespace in listkeys:
982 part = bundler.newpart('b2x:listkeys')
993 part = bundler.newpart('b2x:listkeys')
983 part.addparam('namespace', namespace)
994 part.addparam('namespace', namespace)
984 keys = repo.listkeys(namespace).items()
995 keys = repo.listkeys(namespace).items()
985 part.data = pushkey.encodekeys(keys)
996 part.data = pushkey.encodekeys(keys)
986 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
997 _getbundleextrapart(bundler, repo, source, heads=heads, common=common,
987 bundlecaps=bundlecaps, **kwargs)
998 bundlecaps=bundlecaps, **kwargs)
988 return util.chunkbuffer(bundler.getchunks())
999 return util.chunkbuffer(bundler.getchunks())
989
1000
990 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
1001 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
991 bundlecaps=None, **kwargs):
1002 bundlecaps=None, **kwargs):
992 """hook function to let extensions add parts to the requested bundle"""
1003 """hook function to let extensions add parts to the requested bundle"""
993 pass
1004 pass
994
1005
995 def check_heads(repo, their_heads, context):
1006 def check_heads(repo, their_heads, context):
996 """check if the heads of a repo have been modified
1007 """check if the heads of a repo have been modified
997
1008
998 Used by peer for unbundling.
1009 Used by peer for unbundling.
999 """
1010 """
1000 heads = repo.heads()
1011 heads = repo.heads()
1001 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1012 heads_hash = util.sha1(''.join(sorted(heads))).digest()
1002 if not (their_heads == ['force'] or their_heads == heads or
1013 if not (their_heads == ['force'] or their_heads == heads or
1003 their_heads == ['hashed', heads_hash]):
1014 their_heads == ['hashed', heads_hash]):
1004 # someone else committed/pushed/unbundled while we
1015 # someone else committed/pushed/unbundled while we
1005 # were transferring data
1016 # were transferring data
1006 raise error.PushRaced('repository changed while %s - '
1017 raise error.PushRaced('repository changed while %s - '
1007 'please try again' % context)
1018 'please try again' % context)
1008
1019
1009 def unbundle(repo, cg, heads, source, url):
1020 def unbundle(repo, cg, heads, source, url):
1010 """Apply a bundle to a repo.
1021 """Apply a bundle to a repo.
1011
1022
1012 this function makes sure the repo is locked during the application and have
1023 this function makes sure the repo is locked during the application and have
1013 mechanism to check that no push race occurred between the creation of the
1024 mechanism to check that no push race occurred between the creation of the
1014 bundle and its application.
1025 bundle and its application.
1015
1026
1016 If the push was raced as PushRaced exception is raised."""
1027 If the push was raced as PushRaced exception is raised."""
1017 r = 0
1028 r = 0
1018 # need a transaction when processing a bundle2 stream
1029 # need a transaction when processing a bundle2 stream
1019 tr = None
1030 tr = None
1020 lock = repo.lock()
1031 lock = repo.lock()
1021 try:
1032 try:
1022 check_heads(repo, heads, 'uploading changes')
1033 check_heads(repo, heads, 'uploading changes')
1023 # push can proceed
1034 # push can proceed
1024 if util.safehasattr(cg, 'params'):
1035 if util.safehasattr(cg, 'params'):
1025 try:
1036 try:
1026 tr = repo.transaction('unbundle')
1037 tr = repo.transaction('unbundle')
1027 tr.hookargs['bundle2-exp'] = '1'
1038 tr.hookargs['bundle2-exp'] = '1'
1028 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1039 r = bundle2.processbundle(repo, cg, lambda: tr).reply
1029 cl = repo.unfiltered().changelog
1040 cl = repo.unfiltered().changelog
1030 p = cl.writepending() and repo.root or ""
1041 p = cl.writepending() and repo.root or ""
1031 repo.hook('b2x-pretransactionclose', throw=True, source=source,
1042 repo.hook('b2x-pretransactionclose', throw=True, source=source,
1032 url=url, pending=p, **tr.hookargs)
1043 url=url, pending=p, **tr.hookargs)
1033 tr.close()
1044 tr.close()
1034 repo.hook('b2x-transactionclose', source=source, url=url,
1045 repo.hook('b2x-transactionclose', source=source, url=url,
1035 **tr.hookargs)
1046 **tr.hookargs)
1036 except Exception, exc:
1047 except Exception, exc:
1037 exc.duringunbundle2 = True
1048 exc.duringunbundle2 = True
1038 raise
1049 raise
1039 else:
1050 else:
1040 r = changegroup.addchangegroup(repo, cg, source, url)
1051 r = changegroup.addchangegroup(repo, cg, source, url)
1041 finally:
1052 finally:
1042 if tr is not None:
1053 if tr is not None:
1043 tr.release()
1054 tr.release()
1044 lock.release()
1055 lock.release()
1045 return r
1056 return r
@@ -1,1202 +1,1203
1
1
2 $ getmainid() {
2 $ getmainid() {
3 > hg -R main log --template '{node}\n' --rev "$1"
3 > hg -R main log --template '{node}\n' --rev "$1"
4 > }
4 > }
5
5
6 Create an extension to test bundle2 API
6 Create an extension to test bundle2 API
7
7
8 $ cat > bundle2.py << EOF
8 $ cat > bundle2.py << EOF
9 > """A small extension to test bundle2 implementation
9 > """A small extension to test bundle2 implementation
10 >
10 >
11 > Current bundle2 implementation is far too limited to be used in any core
11 > Current bundle2 implementation is far too limited to be used in any core
12 > code. We still need to be able to test it while it grow up.
12 > code. We still need to be able to test it while it grow up.
13 > """
13 > """
14 >
14 >
15 > import sys, os
15 > import sys, os
16 > from mercurial import cmdutil
16 > from mercurial import cmdutil
17 > from mercurial import util
17 > from mercurial import util
18 > from mercurial import bundle2
18 > from mercurial import bundle2
19 > from mercurial import scmutil
19 > from mercurial import scmutil
20 > from mercurial import discovery
20 > from mercurial import discovery
21 > from mercurial import changegroup
21 > from mercurial import changegroup
22 > from mercurial import error
22 > from mercurial import error
23 > from mercurial import obsolete
23 > from mercurial import obsolete
24 >
24 >
25 > obsolete._enabled = True
25 > obsolete._enabled = True
26 >
26 >
27 > try:
27 > try:
28 > import msvcrt
28 > import msvcrt
29 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
29 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
30 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
30 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
31 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
31 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
32 > except ImportError:
32 > except ImportError:
33 > pass
33 > pass
34 >
34 >
35 > cmdtable = {}
35 > cmdtable = {}
36 > command = cmdutil.command(cmdtable)
36 > command = cmdutil.command(cmdtable)
37 >
37 >
38 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
38 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
39 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
39 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
40 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
40 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
41 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
41 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
42 >
42 >
43 > @bundle2.parthandler('test:song')
43 > @bundle2.parthandler('test:song')
44 > def songhandler(op, part):
44 > def songhandler(op, part):
45 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
45 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
46 > op.ui.write('The choir starts singing:\n')
46 > op.ui.write('The choir starts singing:\n')
47 > verses = 0
47 > verses = 0
48 > for line in part.read().split('\n'):
48 > for line in part.read().split('\n'):
49 > op.ui.write(' %s\n' % line)
49 > op.ui.write(' %s\n' % line)
50 > verses += 1
50 > verses += 1
51 > op.records.add('song', {'verses': verses})
51 > op.records.add('song', {'verses': verses})
52 >
52 >
53 > @bundle2.parthandler('test:ping')
53 > @bundle2.parthandler('test:ping')
54 > def pinghandler(op, part):
54 > def pinghandler(op, part):
55 > op.ui.write('received ping request (id %i)\n' % part.id)
55 > op.ui.write('received ping request (id %i)\n' % part.id)
56 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
56 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
57 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
57 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
58 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))])
58 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))])
59 >
59 >
60 > @bundle2.parthandler('test:debugreply')
60 > @bundle2.parthandler('test:debugreply')
61 > def debugreply(op, part):
61 > def debugreply(op, part):
62 > """print data about the capacity of the bundle reply"""
62 > """print data about the capacity of the bundle reply"""
63 > if op.reply is None:
63 > if op.reply is None:
64 > op.ui.write('debugreply: no reply\n')
64 > op.ui.write('debugreply: no reply\n')
65 > else:
65 > else:
66 > op.ui.write('debugreply: capabilities:\n')
66 > op.ui.write('debugreply: capabilities:\n')
67 > for cap in sorted(op.reply.capabilities):
67 > for cap in sorted(op.reply.capabilities):
68 > op.ui.write('debugreply: %r\n' % cap)
68 > op.ui.write('debugreply: %r\n' % cap)
69 > for val in op.reply.capabilities[cap]:
69 > for val in op.reply.capabilities[cap]:
70 > op.ui.write('debugreply: %r\n' % val)
70 > op.ui.write('debugreply: %r\n' % val)
71 >
71 >
72 > @command('bundle2',
72 > @command('bundle2',
73 > [('', 'param', [], 'stream level parameter'),
73 > [('', 'param', [], 'stream level parameter'),
74 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
74 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
75 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
75 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
76 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
76 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
77 > ('', 'reply', False, 'produce a reply bundle'),
77 > ('', 'reply', False, 'produce a reply bundle'),
78 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
78 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
79 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
79 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
80 > '[OUTPUTFILE]')
80 > '[OUTPUTFILE]')
81 > def cmdbundle2(ui, repo, path=None, **opts):
81 > def cmdbundle2(ui, repo, path=None, **opts):
82 > """write a bundle2 container on standard ouput"""
82 > """write a bundle2 container on standard ouput"""
83 > bundler = bundle2.bundle20(ui)
83 > bundler = bundle2.bundle20(ui)
84 > for p in opts['param']:
84 > for p in opts['param']:
85 > p = p.split('=', 1)
85 > p = p.split('=', 1)
86 > try:
86 > try:
87 > bundler.addparam(*p)
87 > bundler.addparam(*p)
88 > except ValueError, exc:
88 > except ValueError, exc:
89 > raise util.Abort('%s' % exc)
89 > raise util.Abort('%s' % exc)
90 >
90 >
91 > if opts['reply']:
91 > if opts['reply']:
92 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
92 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
93 > bundler.newpart('b2x:replycaps', data=capsstring)
93 > bundler.newpart('b2x:replycaps', data=capsstring)
94 >
94 >
95 > if opts['pushrace']:
95 > if opts['pushrace']:
96 > # also serve to test the assignement of data outside of init
96 > # also serve to test the assignement of data outside of init
97 > part = bundler.newpart('b2x:check:heads')
97 > part = bundler.newpart('b2x:check:heads')
98 > part.data = '01234567890123456789'
98 > part.data = '01234567890123456789'
99 >
99 >
100 > revs = opts['rev']
100 > revs = opts['rev']
101 > if 'rev' in opts:
101 > if 'rev' in opts:
102 > revs = scmutil.revrange(repo, opts['rev'])
102 > revs = scmutil.revrange(repo, opts['rev'])
103 > if revs:
103 > if revs:
104 > # very crude version of a changegroup part creation
104 > # very crude version of a changegroup part creation
105 > bundled = repo.revs('%ld::%ld', revs, revs)
105 > bundled = repo.revs('%ld::%ld', revs, revs)
106 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
106 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
107 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
107 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
108 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
108 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
109 > cg = changegroup.getlocalbundle(repo, 'test:bundle2', outgoing, None)
109 > cg = changegroup.getlocalbundle(repo, 'test:bundle2', outgoing, None)
110 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
110 > bundler.newpart('b2x:changegroup', data=cg.getchunks())
111 >
111 >
112 > if opts['parts']:
112 > if opts['parts']:
113 > bundler.newpart('test:empty')
113 > bundler.newpart('test:empty')
114 > # add a second one to make sure we handle multiple parts
114 > # add a second one to make sure we handle multiple parts
115 > bundler.newpart('test:empty')
115 > bundler.newpart('test:empty')
116 > bundler.newpart('test:song', data=ELEPHANTSSONG)
116 > bundler.newpart('test:song', data=ELEPHANTSSONG)
117 > bundler.newpart('test:debugreply')
117 > bundler.newpart('test:debugreply')
118 > mathpart = bundler.newpart('test:math')
118 > mathpart = bundler.newpart('test:math')
119 > mathpart.addparam('pi', '3.14')
119 > mathpart.addparam('pi', '3.14')
120 > mathpart.addparam('e', '2.72')
120 > mathpart.addparam('e', '2.72')
121 > mathpart.addparam('cooking', 'raw', mandatory=False)
121 > mathpart.addparam('cooking', 'raw', mandatory=False)
122 > mathpart.data = '42'
122 > mathpart.data = '42'
123 > # advisory known part with unknown mandatory param
123 > # advisory known part with unknown mandatory param
124 > bundler.newpart('test:song', [('randomparam','')])
124 > bundler.newpart('test:song', [('randomparam','')])
125 > if opts['unknown']:
125 > if opts['unknown']:
126 > bundler.newpart('test:UNKNOWN', data='some random content')
126 > bundler.newpart('test:UNKNOWN', data='some random content')
127 > if opts['unknownparams']:
127 > if opts['unknownparams']:
128 > bundler.newpart('test:SONG', [('randomparams', '')])
128 > bundler.newpart('test:SONG', [('randomparams', '')])
129 > if opts['parts']:
129 > if opts['parts']:
130 > bundler.newpart('test:ping')
130 > bundler.newpart('test:ping')
131 >
131 >
132 > if path is None:
132 > if path is None:
133 > file = sys.stdout
133 > file = sys.stdout
134 > else:
134 > else:
135 > file = open(path, 'wb')
135 > file = open(path, 'wb')
136 >
136 >
137 > for chunk in bundler.getchunks():
137 > for chunk in bundler.getchunks():
138 > file.write(chunk)
138 > file.write(chunk)
139 >
139 >
140 > @command('unbundle2', [], '')
140 > @command('unbundle2', [], '')
141 > def cmdunbundle2(ui, repo, replypath=None):
141 > def cmdunbundle2(ui, repo, replypath=None):
142 > """process a bundle2 stream from stdin on the current repo"""
142 > """process a bundle2 stream from stdin on the current repo"""
143 > try:
143 > try:
144 > tr = None
144 > tr = None
145 > lock = repo.lock()
145 > lock = repo.lock()
146 > tr = repo.transaction('processbundle')
146 > tr = repo.transaction('processbundle')
147 > try:
147 > try:
148 > unbundler = bundle2.unbundle20(ui, sys.stdin)
148 > unbundler = bundle2.unbundle20(ui, sys.stdin)
149 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
149 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
150 > tr.close()
150 > tr.close()
151 > except error.BundleValueError, exc:
151 > except error.BundleValueError, exc:
152 > raise util.Abort('missing support for %s' % exc)
152 > raise util.Abort('missing support for %s' % exc)
153 > except error.PushRaced, exc:
153 > except error.PushRaced, exc:
154 > raise util.Abort('push race: %s' % exc)
154 > raise util.Abort('push race: %s' % exc)
155 > finally:
155 > finally:
156 > if tr is not None:
156 > if tr is not None:
157 > tr.release()
157 > tr.release()
158 > lock.release()
158 > lock.release()
159 > remains = sys.stdin.read()
159 > remains = sys.stdin.read()
160 > ui.write('%i unread bytes\n' % len(remains))
160 > ui.write('%i unread bytes\n' % len(remains))
161 > if op.records['song']:
161 > if op.records['song']:
162 > totalverses = sum(r['verses'] for r in op.records['song'])
162 > totalverses = sum(r['verses'] for r in op.records['song'])
163 > ui.write('%i total verses sung\n' % totalverses)
163 > ui.write('%i total verses sung\n' % totalverses)
164 > for rec in op.records['changegroup']:
164 > for rec in op.records['changegroup']:
165 > ui.write('addchangegroup return: %i\n' % rec['return'])
165 > ui.write('addchangegroup return: %i\n' % rec['return'])
166 > if op.reply is not None and replypath is not None:
166 > if op.reply is not None and replypath is not None:
167 > file = open(replypath, 'wb')
167 > file = open(replypath, 'wb')
168 > for chunk in op.reply.getchunks():
168 > for chunk in op.reply.getchunks():
169 > file.write(chunk)
169 > file.write(chunk)
170 >
170 >
171 > @command('statbundle2', [], '')
171 > @command('statbundle2', [], '')
172 > def cmdstatbundle2(ui, repo):
172 > def cmdstatbundle2(ui, repo):
173 > """print statistic on the bundle2 container read from stdin"""
173 > """print statistic on the bundle2 container read from stdin"""
174 > unbundler = bundle2.unbundle20(ui, sys.stdin)
174 > unbundler = bundle2.unbundle20(ui, sys.stdin)
175 > try:
175 > try:
176 > params = unbundler.params
176 > params = unbundler.params
177 > except error.BundleValueError, exc:
177 > except error.BundleValueError, exc:
178 > raise util.Abort('unknown parameters: %s' % exc)
178 > raise util.Abort('unknown parameters: %s' % exc)
179 > ui.write('options count: %i\n' % len(params))
179 > ui.write('options count: %i\n' % len(params))
180 > for key in sorted(params):
180 > for key in sorted(params):
181 > ui.write('- %s\n' % key)
181 > ui.write('- %s\n' % key)
182 > value = params[key]
182 > value = params[key]
183 > if value is not None:
183 > if value is not None:
184 > ui.write(' %s\n' % value)
184 > ui.write(' %s\n' % value)
185 > count = 0
185 > count = 0
186 > for p in unbundler.iterparts():
186 > for p in unbundler.iterparts():
187 > count += 1
187 > count += 1
188 > ui.write(' :%s:\n' % p.type)
188 > ui.write(' :%s:\n' % p.type)
189 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
189 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
190 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
190 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
191 > ui.write(' payload: %i bytes\n' % len(p.read()))
191 > ui.write(' payload: %i bytes\n' % len(p.read()))
192 > ui.write('parts count: %i\n' % count)
192 > ui.write('parts count: %i\n' % count)
193 > EOF
193 > EOF
194 $ cat >> $HGRCPATH << EOF
194 $ cat >> $HGRCPATH << EOF
195 > [extensions]
195 > [extensions]
196 > bundle2=$TESTTMP/bundle2.py
196 > bundle2=$TESTTMP/bundle2.py
197 > [experimental]
197 > [experimental]
198 > bundle2-exp=True
198 > bundle2-exp=True
199 > [ui]
199 > [ui]
200 > ssh=python "$TESTDIR/dummyssh"
200 > ssh=python "$TESTDIR/dummyssh"
201 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
201 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
202 > [web]
202 > [web]
203 > push_ssl = false
203 > push_ssl = false
204 > allow_push = *
204 > allow_push = *
205 > [phases]
205 > [phases]
206 > publish=False
206 > publish=False
207 > EOF
207 > EOF
208
208
209 The extension requires a repo (currently unused)
209 The extension requires a repo (currently unused)
210
210
211 $ hg init main
211 $ hg init main
212 $ cd main
212 $ cd main
213 $ touch a
213 $ touch a
214 $ hg add a
214 $ hg add a
215 $ hg commit -m 'a'
215 $ hg commit -m 'a'
216
216
217
217
218 Empty bundle
218 Empty bundle
219 =================
219 =================
220
220
221 - no option
221 - no option
222 - no parts
222 - no parts
223
223
224 Test bundling
224 Test bundling
225
225
226 $ hg bundle2
226 $ hg bundle2
227 HG2X\x00\x00\x00\x00 (no-eol) (esc)
227 HG2X\x00\x00\x00\x00 (no-eol) (esc)
228
228
229 Test unbundling
229 Test unbundling
230
230
231 $ hg bundle2 | hg statbundle2
231 $ hg bundle2 | hg statbundle2
232 options count: 0
232 options count: 0
233 parts count: 0
233 parts count: 0
234
234
235 Test old style bundle are detected and refused
235 Test old style bundle are detected and refused
236
236
237 $ hg bundle --all ../bundle.hg
237 $ hg bundle --all ../bundle.hg
238 1 changesets found
238 1 changesets found
239 $ hg statbundle2 < ../bundle.hg
239 $ hg statbundle2 < ../bundle.hg
240 abort: unknown bundle version 10
240 abort: unknown bundle version 10
241 [255]
241 [255]
242
242
243 Test parameters
243 Test parameters
244 =================
244 =================
245
245
246 - some options
246 - some options
247 - no parts
247 - no parts
248
248
249 advisory parameters, no value
249 advisory parameters, no value
250 -------------------------------
250 -------------------------------
251
251
252 Simplest possible parameters form
252 Simplest possible parameters form
253
253
254 Test generation simple option
254 Test generation simple option
255
255
256 $ hg bundle2 --param 'caution'
256 $ hg bundle2 --param 'caution'
257 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
257 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
258
258
259 Test unbundling
259 Test unbundling
260
260
261 $ hg bundle2 --param 'caution' | hg statbundle2
261 $ hg bundle2 --param 'caution' | hg statbundle2
262 options count: 1
262 options count: 1
263 - caution
263 - caution
264 parts count: 0
264 parts count: 0
265
265
266 Test generation multiple option
266 Test generation multiple option
267
267
268 $ hg bundle2 --param 'caution' --param 'meal'
268 $ hg bundle2 --param 'caution' --param 'meal'
269 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
269 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
270
270
271 Test unbundling
271 Test unbundling
272
272
273 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
273 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
274 options count: 2
274 options count: 2
275 - caution
275 - caution
276 - meal
276 - meal
277 parts count: 0
277 parts count: 0
278
278
279 advisory parameters, with value
279 advisory parameters, with value
280 -------------------------------
280 -------------------------------
281
281
282 Test generation
282 Test generation
283
283
284 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
284 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
285 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
285 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
286
286
287 Test unbundling
287 Test unbundling
288
288
289 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
289 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
290 options count: 3
290 options count: 3
291 - caution
291 - caution
292 - elephants
292 - elephants
293 - meal
293 - meal
294 vegan
294 vegan
295 parts count: 0
295 parts count: 0
296
296
297 parameter with special char in value
297 parameter with special char in value
298 ---------------------------------------------------
298 ---------------------------------------------------
299
299
300 Test generation
300 Test generation
301
301
302 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
302 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
303 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
303 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
304
304
305 Test unbundling
305 Test unbundling
306
306
307 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
307 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
308 options count: 2
308 options count: 2
309 - e|! 7/
309 - e|! 7/
310 babar%#==tutu
310 babar%#==tutu
311 - simple
311 - simple
312 parts count: 0
312 parts count: 0
313
313
314 Test unknown mandatory option
314 Test unknown mandatory option
315 ---------------------------------------------------
315 ---------------------------------------------------
316
316
317 $ hg bundle2 --param 'Gravity' | hg statbundle2
317 $ hg bundle2 --param 'Gravity' | hg statbundle2
318 abort: unknown parameters: Stream Parameter - Gravity
318 abort: unknown parameters: Stream Parameter - Gravity
319 [255]
319 [255]
320
320
321 Test debug output
321 Test debug output
322 ---------------------------------------------------
322 ---------------------------------------------------
323
323
324 bundling debug
324 bundling debug
325
325
326 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
326 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
327 start emission of HG2X stream
327 start emission of HG2X stream
328 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
328 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
329 start of parts
329 start of parts
330 end of bundle
330 end of bundle
331
331
332 file content is ok
332 file content is ok
333
333
334 $ cat ../out.hg2
334 $ cat ../out.hg2
335 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
335 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
336
336
337 unbundling debug
337 unbundling debug
338
338
339 $ hg statbundle2 --debug < ../out.hg2
339 $ hg statbundle2 --debug < ../out.hg2
340 start processing of HG2X stream
340 start processing of HG2X stream
341 reading bundle2 stream parameters
341 reading bundle2 stream parameters
342 ignoring unknown parameter 'e|! 7/'
342 ignoring unknown parameter 'e|! 7/'
343 ignoring unknown parameter 'simple'
343 ignoring unknown parameter 'simple'
344 options count: 2
344 options count: 2
345 - e|! 7/
345 - e|! 7/
346 babar%#==tutu
346 babar%#==tutu
347 - simple
347 - simple
348 start extraction of bundle2 parts
348 start extraction of bundle2 parts
349 part header size: 0
349 part header size: 0
350 end of bundle2 stream
350 end of bundle2 stream
351 parts count: 0
351 parts count: 0
352
352
353
353
354 Test buggy input
354 Test buggy input
355 ---------------------------------------------------
355 ---------------------------------------------------
356
356
357 empty parameter name
357 empty parameter name
358
358
359 $ hg bundle2 --param '' --quiet
359 $ hg bundle2 --param '' --quiet
360 abort: empty parameter name
360 abort: empty parameter name
361 [255]
361 [255]
362
362
363 bad parameter name
363 bad parameter name
364
364
365 $ hg bundle2 --param 42babar
365 $ hg bundle2 --param 42babar
366 abort: non letter first character: '42babar'
366 abort: non letter first character: '42babar'
367 [255]
367 [255]
368
368
369
369
370 Test part
370 Test part
371 =================
371 =================
372
372
373 $ hg bundle2 --parts ../parts.hg2 --debug
373 $ hg bundle2 --parts ../parts.hg2 --debug
374 start emission of HG2X stream
374 start emission of HG2X stream
375 bundle parameter:
375 bundle parameter:
376 start of parts
376 start of parts
377 bundle part: "test:empty"
377 bundle part: "test:empty"
378 bundle part: "test:empty"
378 bundle part: "test:empty"
379 bundle part: "test:song"
379 bundle part: "test:song"
380 bundle part: "test:debugreply"
380 bundle part: "test:debugreply"
381 bundle part: "test:math"
381 bundle part: "test:math"
382 bundle part: "test:song"
382 bundle part: "test:song"
383 bundle part: "test:ping"
383 bundle part: "test:ping"
384 end of bundle
384 end of bundle
385
385
386 $ cat ../parts.hg2
386 $ cat ../parts.hg2
387 HG2X\x00\x00\x00\x11 (esc)
387 HG2X\x00\x00\x00\x11 (esc)
388 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
388 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
389 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
389 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
390 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
390 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
391 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x1d test:song\x00\x00\x00\x05\x01\x00\x0b\x00randomparam\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
391 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x1d test:song\x00\x00\x00\x05\x01\x00\x0b\x00randomparam\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
392
392
393
393
394 $ hg statbundle2 < ../parts.hg2
394 $ hg statbundle2 < ../parts.hg2
395 options count: 0
395 options count: 0
396 :test:empty:
396 :test:empty:
397 mandatory: 0
397 mandatory: 0
398 advisory: 0
398 advisory: 0
399 payload: 0 bytes
399 payload: 0 bytes
400 :test:empty:
400 :test:empty:
401 mandatory: 0
401 mandatory: 0
402 advisory: 0
402 advisory: 0
403 payload: 0 bytes
403 payload: 0 bytes
404 :test:song:
404 :test:song:
405 mandatory: 0
405 mandatory: 0
406 advisory: 0
406 advisory: 0
407 payload: 178 bytes
407 payload: 178 bytes
408 :test:debugreply:
408 :test:debugreply:
409 mandatory: 0
409 mandatory: 0
410 advisory: 0
410 advisory: 0
411 payload: 0 bytes
411 payload: 0 bytes
412 :test:math:
412 :test:math:
413 mandatory: 2
413 mandatory: 2
414 advisory: 1
414 advisory: 1
415 payload: 2 bytes
415 payload: 2 bytes
416 :test:song:
416 :test:song:
417 mandatory: 1
417 mandatory: 1
418 advisory: 0
418 advisory: 0
419 payload: 0 bytes
419 payload: 0 bytes
420 :test:ping:
420 :test:ping:
421 mandatory: 0
421 mandatory: 0
422 advisory: 0
422 advisory: 0
423 payload: 0 bytes
423 payload: 0 bytes
424 parts count: 7
424 parts count: 7
425
425
426 $ hg statbundle2 --debug < ../parts.hg2
426 $ hg statbundle2 --debug < ../parts.hg2
427 start processing of HG2X stream
427 start processing of HG2X stream
428 reading bundle2 stream parameters
428 reading bundle2 stream parameters
429 options count: 0
429 options count: 0
430 start extraction of bundle2 parts
430 start extraction of bundle2 parts
431 part header size: 17
431 part header size: 17
432 part type: "test:empty"
432 part type: "test:empty"
433 part id: "0"
433 part id: "0"
434 part parameters: 0
434 part parameters: 0
435 :test:empty:
435 :test:empty:
436 mandatory: 0
436 mandatory: 0
437 advisory: 0
437 advisory: 0
438 payload chunk size: 0
438 payload chunk size: 0
439 payload: 0 bytes
439 payload: 0 bytes
440 part header size: 17
440 part header size: 17
441 part type: "test:empty"
441 part type: "test:empty"
442 part id: "1"
442 part id: "1"
443 part parameters: 0
443 part parameters: 0
444 :test:empty:
444 :test:empty:
445 mandatory: 0
445 mandatory: 0
446 advisory: 0
446 advisory: 0
447 payload chunk size: 0
447 payload chunk size: 0
448 payload: 0 bytes
448 payload: 0 bytes
449 part header size: 16
449 part header size: 16
450 part type: "test:song"
450 part type: "test:song"
451 part id: "2"
451 part id: "2"
452 part parameters: 0
452 part parameters: 0
453 :test:song:
453 :test:song:
454 mandatory: 0
454 mandatory: 0
455 advisory: 0
455 advisory: 0
456 payload chunk size: 178
456 payload chunk size: 178
457 payload chunk size: 0
457 payload chunk size: 0
458 payload: 178 bytes
458 payload: 178 bytes
459 part header size: 22
459 part header size: 22
460 part type: "test:debugreply"
460 part type: "test:debugreply"
461 part id: "3"
461 part id: "3"
462 part parameters: 0
462 part parameters: 0
463 :test:debugreply:
463 :test:debugreply:
464 mandatory: 0
464 mandatory: 0
465 advisory: 0
465 advisory: 0
466 payload chunk size: 0
466 payload chunk size: 0
467 payload: 0 bytes
467 payload: 0 bytes
468 part header size: 43
468 part header size: 43
469 part type: "test:math"
469 part type: "test:math"
470 part id: "4"
470 part id: "4"
471 part parameters: 3
471 part parameters: 3
472 :test:math:
472 :test:math:
473 mandatory: 2
473 mandatory: 2
474 advisory: 1
474 advisory: 1
475 payload chunk size: 2
475 payload chunk size: 2
476 payload chunk size: 0
476 payload chunk size: 0
477 payload: 2 bytes
477 payload: 2 bytes
478 part header size: 29
478 part header size: 29
479 part type: "test:song"
479 part type: "test:song"
480 part id: "5"
480 part id: "5"
481 part parameters: 1
481 part parameters: 1
482 :test:song:
482 :test:song:
483 mandatory: 1
483 mandatory: 1
484 advisory: 0
484 advisory: 0
485 payload chunk size: 0
485 payload chunk size: 0
486 payload: 0 bytes
486 payload: 0 bytes
487 part header size: 16
487 part header size: 16
488 part type: "test:ping"
488 part type: "test:ping"
489 part id: "6"
489 part id: "6"
490 part parameters: 0
490 part parameters: 0
491 :test:ping:
491 :test:ping:
492 mandatory: 0
492 mandatory: 0
493 advisory: 0
493 advisory: 0
494 payload chunk size: 0
494 payload chunk size: 0
495 payload: 0 bytes
495 payload: 0 bytes
496 part header size: 0
496 part header size: 0
497 end of bundle2 stream
497 end of bundle2 stream
498 parts count: 7
498 parts count: 7
499
499
500 Test actual unbundling of test part
500 Test actual unbundling of test part
501 =======================================
501 =======================================
502
502
503 Process the bundle
503 Process the bundle
504
504
505 $ hg unbundle2 --debug < ../parts.hg2
505 $ hg unbundle2 --debug < ../parts.hg2
506 start processing of HG2X stream
506 start processing of HG2X stream
507 reading bundle2 stream parameters
507 reading bundle2 stream parameters
508 start extraction of bundle2 parts
508 start extraction of bundle2 parts
509 part header size: 17
509 part header size: 17
510 part type: "test:empty"
510 part type: "test:empty"
511 part id: "0"
511 part id: "0"
512 part parameters: 0
512 part parameters: 0
513 ignoring unsupported advisory part test:empty
513 ignoring unsupported advisory part test:empty
514 payload chunk size: 0
514 payload chunk size: 0
515 part header size: 17
515 part header size: 17
516 part type: "test:empty"
516 part type: "test:empty"
517 part id: "1"
517 part id: "1"
518 part parameters: 0
518 part parameters: 0
519 ignoring unsupported advisory part test:empty
519 ignoring unsupported advisory part test:empty
520 payload chunk size: 0
520 payload chunk size: 0
521 part header size: 16
521 part header size: 16
522 part type: "test:song"
522 part type: "test:song"
523 part id: "2"
523 part id: "2"
524 part parameters: 0
524 part parameters: 0
525 found a handler for part 'test:song'
525 found a handler for part 'test:song'
526 The choir starts singing:
526 The choir starts singing:
527 payload chunk size: 178
527 payload chunk size: 178
528 payload chunk size: 0
528 payload chunk size: 0
529 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
529 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
530 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
530 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
531 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
531 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
532 part header size: 22
532 part header size: 22
533 part type: "test:debugreply"
533 part type: "test:debugreply"
534 part id: "3"
534 part id: "3"
535 part parameters: 0
535 part parameters: 0
536 found a handler for part 'test:debugreply'
536 found a handler for part 'test:debugreply'
537 debugreply: no reply
537 debugreply: no reply
538 payload chunk size: 0
538 payload chunk size: 0
539 part header size: 43
539 part header size: 43
540 part type: "test:math"
540 part type: "test:math"
541 part id: "4"
541 part id: "4"
542 part parameters: 3
542 part parameters: 3
543 ignoring unsupported advisory part test:math
543 ignoring unsupported advisory part test:math
544 payload chunk size: 2
544 payload chunk size: 2
545 payload chunk size: 0
545 payload chunk size: 0
546 part header size: 29
546 part header size: 29
547 part type: "test:song"
547 part type: "test:song"
548 part id: "5"
548 part id: "5"
549 part parameters: 1
549 part parameters: 1
550 found a handler for part 'test:song'
550 found a handler for part 'test:song'
551 ignoring unsupported advisory part test:song - randomparam
551 ignoring unsupported advisory part test:song - randomparam
552 payload chunk size: 0
552 payload chunk size: 0
553 part header size: 16
553 part header size: 16
554 part type: "test:ping"
554 part type: "test:ping"
555 part id: "6"
555 part id: "6"
556 part parameters: 0
556 part parameters: 0
557 found a handler for part 'test:ping'
557 found a handler for part 'test:ping'
558 received ping request (id 6)
558 received ping request (id 6)
559 payload chunk size: 0
559 payload chunk size: 0
560 part header size: 0
560 part header size: 0
561 end of bundle2 stream
561 end of bundle2 stream
562 0 unread bytes
562 0 unread bytes
563 3 total verses sung
563 3 total verses sung
564
564
565 Unbundle with an unknown mandatory part
565 Unbundle with an unknown mandatory part
566 (should abort)
566 (should abort)
567
567
568 $ hg bundle2 --parts --unknown ../unknown.hg2
568 $ hg bundle2 --parts --unknown ../unknown.hg2
569
569
570 $ hg unbundle2 < ../unknown.hg2
570 $ hg unbundle2 < ../unknown.hg2
571 The choir starts singing:
571 The choir starts singing:
572 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
572 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
573 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
573 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
574 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
574 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
575 debugreply: no reply
575 debugreply: no reply
576 0 unread bytes
576 0 unread bytes
577 abort: missing support for test:unknown
577 abort: missing support for test:unknown
578 [255]
578 [255]
579
579
580 Unbundle with an unknown mandatory part parameters
580 Unbundle with an unknown mandatory part parameters
581 (should abort)
581 (should abort)
582
582
583 $ hg bundle2 --unknownparams ../unknown.hg2
583 $ hg bundle2 --unknownparams ../unknown.hg2
584
584
585 $ hg unbundle2 < ../unknown.hg2
585 $ hg unbundle2 < ../unknown.hg2
586 0 unread bytes
586 0 unread bytes
587 abort: missing support for test:song - randomparams
587 abort: missing support for test:song - randomparams
588 [255]
588 [255]
589
589
590 unbundle with a reply
590 unbundle with a reply
591
591
592 $ hg bundle2 --parts --reply ../parts-reply.hg2
592 $ hg bundle2 --parts --reply ../parts-reply.hg2
593 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
593 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
594 0 unread bytes
594 0 unread bytes
595 3 total verses sung
595 3 total verses sung
596
596
597 The reply is a bundle
597 The reply is a bundle
598
598
599 $ cat ../reply.hg2
599 $ cat ../reply.hg2
600 HG2X\x00\x00\x00\x1f (esc)
600 HG2X\x00\x00\x00\x1f (esc)
601 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
601 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
602 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
602 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
603 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
603 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
604 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
604 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
605 \x00\x00\x00\x00\x00\x1f (esc)
605 \x00\x00\x00\x00\x00\x1f (esc)
606 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
606 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
607 debugreply: 'city=!'
607 debugreply: 'city=!'
608 debugreply: 'celeste,ville'
608 debugreply: 'celeste,ville'
609 debugreply: 'elephants'
609 debugreply: 'elephants'
610 debugreply: 'babar'
610 debugreply: 'babar'
611 debugreply: 'celeste'
611 debugreply: 'celeste'
612 debugreply: 'ping-pong'
612 debugreply: 'ping-pong'
613 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to7\x00\x00\x00\x00\x00\x1f (esc)
613 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to7\x00\x00\x00\x00\x00\x1f (esc)
614 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to7\x00\x00\x00=received ping request (id 7) (esc)
614 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to7\x00\x00\x00=received ping request (id 7) (esc)
615 replying to ping request (id 7)
615 replying to ping request (id 7)
616 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
616 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
617
617
618 The reply is valid
618 The reply is valid
619
619
620 $ hg statbundle2 < ../reply.hg2
620 $ hg statbundle2 < ../reply.hg2
621 options count: 0
621 options count: 0
622 :b2x:output:
622 :b2x:output:
623 mandatory: 0
623 mandatory: 0
624 advisory: 1
624 advisory: 1
625 payload: 217 bytes
625 payload: 217 bytes
626 :b2x:output:
626 :b2x:output:
627 mandatory: 0
627 mandatory: 0
628 advisory: 1
628 advisory: 1
629 payload: 201 bytes
629 payload: 201 bytes
630 :test:pong:
630 :test:pong:
631 mandatory: 1
631 mandatory: 1
632 advisory: 0
632 advisory: 0
633 payload: 0 bytes
633 payload: 0 bytes
634 :b2x:output:
634 :b2x:output:
635 mandatory: 0
635 mandatory: 0
636 advisory: 1
636 advisory: 1
637 payload: 61 bytes
637 payload: 61 bytes
638 parts count: 4
638 parts count: 4
639
639
640 Unbundle the reply to get the output:
640 Unbundle the reply to get the output:
641
641
642 $ hg unbundle2 < ../reply.hg2
642 $ hg unbundle2 < ../reply.hg2
643 remote: The choir starts singing:
643 remote: The choir starts singing:
644 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
644 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
645 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
645 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
646 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
646 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
647 remote: debugreply: capabilities:
647 remote: debugreply: capabilities:
648 remote: debugreply: 'city=!'
648 remote: debugreply: 'city=!'
649 remote: debugreply: 'celeste,ville'
649 remote: debugreply: 'celeste,ville'
650 remote: debugreply: 'elephants'
650 remote: debugreply: 'elephants'
651 remote: debugreply: 'babar'
651 remote: debugreply: 'babar'
652 remote: debugreply: 'celeste'
652 remote: debugreply: 'celeste'
653 remote: debugreply: 'ping-pong'
653 remote: debugreply: 'ping-pong'
654 remote: received ping request (id 7)
654 remote: received ping request (id 7)
655 remote: replying to ping request (id 7)
655 remote: replying to ping request (id 7)
656 0 unread bytes
656 0 unread bytes
657
657
658 Test push race detection
658 Test push race detection
659
659
660 $ hg bundle2 --pushrace ../part-race.hg2
660 $ hg bundle2 --pushrace ../part-race.hg2
661
661
662 $ hg unbundle2 < ../part-race.hg2
662 $ hg unbundle2 < ../part-race.hg2
663 0 unread bytes
663 0 unread bytes
664 abort: push race: repository changed while pushing - please try again
664 abort: push race: repository changed while pushing - please try again
665 [255]
665 [255]
666
666
667 Support for changegroup
667 Support for changegroup
668 ===================================
668 ===================================
669
669
670 $ hg unbundle $TESTDIR/bundles/rebase.hg
670 $ hg unbundle $TESTDIR/bundles/rebase.hg
671 adding changesets
671 adding changesets
672 adding manifests
672 adding manifests
673 adding file changes
673 adding file changes
674 added 8 changesets with 7 changes to 7 files (+3 heads)
674 added 8 changesets with 7 changes to 7 files (+3 heads)
675 (run 'hg heads' to see heads, 'hg merge' to merge)
675 (run 'hg heads' to see heads, 'hg merge' to merge)
676
676
677 $ hg log -G
677 $ hg log -G
678 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
678 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
679 |
679 |
680 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
680 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
681 |/|
681 |/|
682 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
682 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
683 | |
683 | |
684 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
684 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
685 |/
685 |/
686 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
686 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
687 | |
687 | |
688 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
688 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
689 | |
689 | |
690 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
690 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
691 |/
691 |/
692 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
692 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
693
693
694 @ 0:3903775176ed draft test a
694 @ 0:3903775176ed draft test a
695
695
696
696
697 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
697 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
698 4 changesets found
698 4 changesets found
699 list of changesets:
699 list of changesets:
700 32af7686d403cf45b5d95f2d70cebea587ac806a
700 32af7686d403cf45b5d95f2d70cebea587ac806a
701 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
701 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
702 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
702 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
703 02de42196ebee42ef284b6780a87cdc96e8eaab6
703 02de42196ebee42ef284b6780a87cdc96e8eaab6
704 start emission of HG2X stream
704 start emission of HG2X stream
705 bundle parameter:
705 bundle parameter:
706 start of parts
706 start of parts
707 bundle part: "b2x:changegroup"
707 bundle part: "b2x:changegroup"
708 bundling: 1/4 changesets (25.00%)
708 bundling: 1/4 changesets (25.00%)
709 bundling: 2/4 changesets (50.00%)
709 bundling: 2/4 changesets (50.00%)
710 bundling: 3/4 changesets (75.00%)
710 bundling: 3/4 changesets (75.00%)
711 bundling: 4/4 changesets (100.00%)
711 bundling: 4/4 changesets (100.00%)
712 bundling: 1/4 manifests (25.00%)
712 bundling: 1/4 manifests (25.00%)
713 bundling: 2/4 manifests (50.00%)
713 bundling: 2/4 manifests (50.00%)
714 bundling: 3/4 manifests (75.00%)
714 bundling: 3/4 manifests (75.00%)
715 bundling: 4/4 manifests (100.00%)
715 bundling: 4/4 manifests (100.00%)
716 bundling: D 1/3 files (33.33%)
716 bundling: D 1/3 files (33.33%)
717 bundling: E 2/3 files (66.67%)
717 bundling: E 2/3 files (66.67%)
718 bundling: H 3/3 files (100.00%)
718 bundling: H 3/3 files (100.00%)
719 end of bundle
719 end of bundle
720
720
721 $ cat ../rev.hg2
721 $ cat ../rev.hg2
722 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
722 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
723 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
723 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
724 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
724 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
725 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
725 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
726 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
726 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
727 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
727 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
728 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
728 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
729 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
729 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
730 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
730 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
731 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
731 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
732 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
732 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
733 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
733 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
734 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
734 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
735 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
735 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
736 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
736 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
737 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
737 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
738 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
738 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
739 l\r (no-eol) (esc)
739 l\r (no-eol) (esc)
740 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
740 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
741 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
741 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
742 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
742 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
743 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
743 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
744
744
745 $ hg unbundle2 < ../rev.hg2
745 $ hg unbundle2 < ../rev.hg2
746 adding changesets
746 adding changesets
747 adding manifests
747 adding manifests
748 adding file changes
748 adding file changes
749 added 0 changesets with 0 changes to 3 files
749 added 0 changesets with 0 changes to 3 files
750 0 unread bytes
750 0 unread bytes
751 addchangegroup return: 1
751 addchangegroup return: 1
752
752
753 with reply
753 with reply
754
754
755 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
755 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
756 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
756 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
757 0 unread bytes
757 0 unread bytes
758 addchangegroup return: 1
758 addchangegroup return: 1
759
759
760 $ cat ../rev-reply.hg2
760 $ cat ../rev-reply.hg2
761 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
761 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
762 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
762 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
763 adding manifests
763 adding manifests
764 adding file changes
764 adding file changes
765 added 0 changesets with 0 changes to 3 files
765 added 0 changesets with 0 changes to 3 files
766 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
766 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
767
767
768 $ cd ..
768 $ cd ..
769
769
770 Real world exchange
770 Real world exchange
771 =====================
771 =====================
772
772
773 Add more obsolescence information
773 Add more obsolescence information
774
774
775 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
775 $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc`
776 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
776 $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c`
777
777
778 clone --pull
778 clone --pull
779
779
780 $ hg -R main phase --public cd010b8cd998
780 $ hg -R main phase --public cd010b8cd998
781 $ hg clone main other --pull --rev 9520eea781bc
781 $ hg clone main other --pull --rev 9520eea781bc
782 adding changesets
782 adding changesets
783 adding manifests
783 adding manifests
784 adding file changes
784 adding file changes
785 added 2 changesets with 2 changes to 2 files
785 added 2 changesets with 2 changes to 2 files
786 updating to branch default
786 updating to branch default
787 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
787 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
788 $ hg -R other log -G
788 $ hg -R other log -G
789 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
789 @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
790 |
790 |
791 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
791 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
792
792
793 $ hg -R other debugobsolete
793 $ hg -R other debugobsolete
794 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
794 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
795 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
795 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
796
796
797 pull
797 pull
798
798
799 $ hg -R main phase --public 9520eea781bc
799 $ hg -R main phase --public 9520eea781bc
800 $ hg -R other pull -r 24b6387c8c8c
800 $ hg -R other pull -r 24b6387c8c8c
801 pulling from $TESTTMP/main (glob)
801 pulling from $TESTTMP/main (glob)
802 searching for changes
802 searching for changes
803 adding changesets
803 adding changesets
804 adding manifests
804 adding manifests
805 adding file changes
805 adding file changes
806 added 1 changesets with 1 changes to 1 files (+1 heads)
806 added 1 changesets with 1 changes to 1 files (+1 heads)
807 (run 'hg heads' to see heads, 'hg merge' to merge)
807 (run 'hg heads' to see heads, 'hg merge' to merge)
808 $ hg -R other log -G
808 $ hg -R other log -G
809 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
809 o 2:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
810 |
810 |
811 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
811 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
812 |/
812 |/
813 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
813 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
814
814
815 $ hg -R other debugobsolete
815 $ hg -R other debugobsolete
816 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
816 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
817 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
817 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
818
818
819 pull empty (with phase movement)
819 pull empty (with phase movement)
820
820
821 $ hg -R main phase --public 24b6387c8c8c
821 $ hg -R main phase --public 24b6387c8c8c
822 $ hg -R other pull -r 24b6387c8c8c
822 $ hg -R other pull -r 24b6387c8c8c
823 pulling from $TESTTMP/main (glob)
823 pulling from $TESTTMP/main (glob)
824 no changes found
824 no changes found
825 $ hg -R other log -G
825 $ hg -R other log -G
826 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
826 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
827 |
827 |
828 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
828 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
829 |/
829 |/
830 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
830 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
831
831
832 $ hg -R other debugobsolete
832 $ hg -R other debugobsolete
833 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
833 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
834 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
834 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
835
835
836 pull empty
836 pull empty
837
837
838 $ hg -R other pull -r 24b6387c8c8c
838 $ hg -R other pull -r 24b6387c8c8c
839 pulling from $TESTTMP/main (glob)
839 pulling from $TESTTMP/main (glob)
840 no changes found
840 no changes found
841 $ hg -R other log -G
841 $ hg -R other log -G
842 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
842 o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
843 |
843 |
844 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
844 | @ 1:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
845 |/
845 |/
846 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
846 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
847
847
848 $ hg -R other debugobsolete
848 $ hg -R other debugobsolete
849 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
849 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
850 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
850 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
851
851
852 add extra data to test their exchange during push
852 add extra data to test their exchange during push
853
853
854 $ hg -R main bookmark --rev eea13746799a book_eea1
854 $ hg -R main bookmark --rev eea13746799a book_eea1
855 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
855 $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a`
856 $ hg -R main bookmark --rev 02de42196ebe book_02de
856 $ hg -R main bookmark --rev 02de42196ebe book_02de
857 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
857 $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe`
858 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
858 $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc
859 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
859 $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16`
860 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
860 $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd
861 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
861 $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8`
862 $ hg -R main bookmark --rev 32af7686d403 book_32af
862 $ hg -R main bookmark --rev 32af7686d403 book_32af
863 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
863 $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403`
864
864
865 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
865 $ hg -R other bookmark --rev cd010b8cd998 book_eea1
866 $ hg -R other bookmark --rev cd010b8cd998 book_02de
866 $ hg -R other bookmark --rev cd010b8cd998 book_02de
867 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
867 $ hg -R other bookmark --rev cd010b8cd998 book_42cc
868 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
868 $ hg -R other bookmark --rev cd010b8cd998 book_5fdd
869 $ hg -R other bookmark --rev cd010b8cd998 book_32af
869 $ hg -R other bookmark --rev cd010b8cd998 book_32af
870
870
871 $ hg -R main phase --public eea13746799a
871 $ hg -R main phase --public eea13746799a
872
872
873 push
873 push
874 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
874 $ hg -R main push other --rev eea13746799a --bookmark book_eea1
875 pushing to other
875 pushing to other
876 searching for changes
876 searching for changes
877 remote: adding changesets
877 remote: adding changesets
878 remote: adding manifests
878 remote: adding manifests
879 remote: adding file changes
879 remote: adding file changes
880 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
880 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
881 remote: 5 new obsolescence markers
881 updating bookmark book_eea1
882 updating bookmark book_eea1
882 exporting bookmark book_eea1
883 exporting bookmark book_eea1
883 $ hg -R other log -G
884 $ hg -R other log -G
884 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
885 o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
885 |\
886 |\
886 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
887 | o 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
887 | |
888 | |
888 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
889 @ | 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
889 |/
890 |/
890 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
891 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de book_32af book_42cc book_5fdd A
891
892
892 $ hg -R other debugobsolete
893 $ hg -R other debugobsolete
893 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
894 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
894 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
895 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
895 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
896 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
896 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
897 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
897 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
898 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
898 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
899 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
899 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
900 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
900
901
901 pull over ssh
902 pull over ssh
902
903
903 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
904 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de
904 pulling from ssh://user@dummy/main
905 pulling from ssh://user@dummy/main
905 searching for changes
906 searching for changes
906 adding changesets
907 adding changesets
907 adding manifests
908 adding manifests
908 adding file changes
909 adding file changes
909 added 1 changesets with 1 changes to 1 files (+1 heads)
910 added 1 changesets with 1 changes to 1 files (+1 heads)
910 updating bookmark book_02de
911 updating bookmark book_02de
911 (run 'hg heads' to see heads, 'hg merge' to merge)
912 (run 'hg heads' to see heads, 'hg merge' to merge)
912 importing bookmark book_02de
913 importing bookmark book_02de
913 $ hg -R other debugobsolete
914 $ hg -R other debugobsolete
914 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
915 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
915 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
916 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
916 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
917 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
917 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
918 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
918 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
919 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
919 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
920 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
920 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
921 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
921
922
922 pull over http
923 pull over http
923
924
924 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
925 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
925 $ cat main.pid >> $DAEMON_PIDS
926 $ cat main.pid >> $DAEMON_PIDS
926
927
927 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
928 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc
928 pulling from http://localhost:$HGPORT/
929 pulling from http://localhost:$HGPORT/
929 searching for changes
930 searching for changes
930 adding changesets
931 adding changesets
931 adding manifests
932 adding manifests
932 adding file changes
933 adding file changes
933 added 1 changesets with 1 changes to 1 files (+1 heads)
934 added 1 changesets with 1 changes to 1 files (+1 heads)
934 updating bookmark book_42cc
935 updating bookmark book_42cc
935 (run 'hg heads .' to see heads, 'hg merge' to merge)
936 (run 'hg heads .' to see heads, 'hg merge' to merge)
936 importing bookmark book_42cc
937 importing bookmark book_42cc
937 $ cat main-error.log
938 $ cat main-error.log
938 $ hg -R other debugobsolete
939 $ hg -R other debugobsolete
939 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
940 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
940 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
941 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
941 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
942 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
942 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
943 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
943 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
944 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
944 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
945 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
945 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
946 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
946
947
947 push over ssh
948 push over ssh
948
949
949 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
950 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd
950 pushing to ssh://user@dummy/other
951 pushing to ssh://user@dummy/other
951 searching for changes
952 searching for changes
952 remote: adding changesets
953 remote: adding changesets
953 remote: adding manifests
954 remote: adding manifests
954 remote: adding file changes
955 remote: adding file changes
955 remote: added 1 changesets with 1 changes to 1 files
956 remote: added 1 changesets with 1 changes to 1 files
956 updating bookmark book_5fdd
957 updating bookmark book_5fdd
957 exporting bookmark book_5fdd
958 exporting bookmark book_5fdd
958 $ hg -R other log -G
959 $ hg -R other log -G
959 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
960 o 6:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
960 |
961 |
961 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
962 o 5:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
962 |
963 |
963 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
964 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
964 | |
965 | |
965 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
966 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
966 | |/|
967 | |/|
967 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
968 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
968 |/ /
969 |/ /
969 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
970 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
970 |/
971 |/
971 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
972 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af A
972
973
973 $ hg -R other debugobsolete
974 $ hg -R other debugobsolete
974 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
975 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
975 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
976 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
976 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
977 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
977 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
978 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
978 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
979 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
979 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
980 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
980 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
981 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
981
982
982 push over http
983 push over http
983
984
984 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
985 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
985 $ cat other.pid >> $DAEMON_PIDS
986 $ cat other.pid >> $DAEMON_PIDS
986
987
987 $ hg -R main phase --public 32af7686d403
988 $ hg -R main phase --public 32af7686d403
988 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
989 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af
989 pushing to http://localhost:$HGPORT2/
990 pushing to http://localhost:$HGPORT2/
990 searching for changes
991 searching for changes
991 remote: adding changesets
992 remote: adding changesets
992 remote: adding manifests
993 remote: adding manifests
993 remote: adding file changes
994 remote: adding file changes
994 remote: added 1 changesets with 1 changes to 1 files
995 remote: added 1 changesets with 1 changes to 1 files
995 updating bookmark book_32af
996 updating bookmark book_32af
996 exporting bookmark book_32af
997 exporting bookmark book_32af
997 $ cat other-error.log
998 $ cat other-error.log
998
999
999 Check final content.
1000 Check final content.
1000
1001
1001 $ hg -R other log -G
1002 $ hg -R other log -G
1002 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
1003 o 7:32af7686d403 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_32af D
1003 |
1004 |
1004 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
1005 o 6:5fddd98957c8 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_5fdd C
1005 |
1006 |
1006 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
1007 o 5:42ccdea3bb16 public Nicolas Dumazet <nicdumz.commits@gmail.com> book_42cc B
1007 |
1008 |
1008 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
1009 | o 4:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> book_02de H
1009 | |
1010 | |
1010 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
1011 | | o 3:eea13746799a public Nicolas Dumazet <nicdumz.commits@gmail.com> book_eea1 G
1011 | |/|
1012 | |/|
1012 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
1013 | o | 2:24b6387c8c8c public Nicolas Dumazet <nicdumz.commits@gmail.com> F
1013 |/ /
1014 |/ /
1014 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
1015 | @ 1:9520eea781bc public Nicolas Dumazet <nicdumz.commits@gmail.com> E
1015 |/
1016 |/
1016 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
1017 o 0:cd010b8cd998 public Nicolas Dumazet <nicdumz.commits@gmail.com> A
1017
1018
1018 $ hg -R other debugobsolete
1019 $ hg -R other debugobsolete
1019 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1020 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1020 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1021 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1021 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1022 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1022 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1023 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1023 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1024 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1024 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1025 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1025 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1026 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1026
1027
1027 Error Handling
1028 Error Handling
1028 ==============
1029 ==============
1029
1030
1030 Check that errors are properly returned to the client during push.
1031 Check that errors are properly returned to the client during push.
1031
1032
1032 Setting up
1033 Setting up
1033
1034
1034 $ cat > failpush.py << EOF
1035 $ cat > failpush.py << EOF
1035 > """A small extension that makes push fails when using bundle2
1036 > """A small extension that makes push fails when using bundle2
1036 >
1037 >
1037 > used to test error handling in bundle2
1038 > used to test error handling in bundle2
1038 > """
1039 > """
1039 >
1040 >
1040 > from mercurial import util
1041 > from mercurial import util
1041 > from mercurial import bundle2
1042 > from mercurial import bundle2
1042 > from mercurial import exchange
1043 > from mercurial import exchange
1043 > from mercurial import extensions
1044 > from mercurial import extensions
1044 >
1045 >
1045 > def _pushbundle2failpart(pushop, bundler):
1046 > def _pushbundle2failpart(pushop, bundler):
1046 > reason = pushop.ui.config('failpush', 'reason', None)
1047 > reason = pushop.ui.config('failpush', 'reason', None)
1047 > part = None
1048 > part = None
1048 > if reason == 'abort':
1049 > if reason == 'abort':
1049 > bundler.newpart('test:abort')
1050 > bundler.newpart('test:abort')
1050 > if reason == 'unknown':
1051 > if reason == 'unknown':
1051 > bundler.newpart('TEST:UNKNOWN')
1052 > bundler.newpart('TEST:UNKNOWN')
1052 > if reason == 'race':
1053 > if reason == 'race':
1053 > # 20 Bytes of crap
1054 > # 20 Bytes of crap
1054 > bundler.newpart('b2x:check:heads', data='01234567890123456789')
1055 > bundler.newpart('b2x:check:heads', data='01234567890123456789')
1055 >
1056 >
1056 > @bundle2.parthandler("test:abort")
1057 > @bundle2.parthandler("test:abort")
1057 > def handleabort(op, part):
1058 > def handleabort(op, part):
1058 > raise util.Abort('Abandon ship!', hint="don't panic")
1059 > raise util.Abort('Abandon ship!', hint="don't panic")
1059 >
1060 >
1060 > def uisetup(ui):
1061 > def uisetup(ui):
1061 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
1062 > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart
1062 > exchange.b2partsgenorder.insert(0, 'failpart')
1063 > exchange.b2partsgenorder.insert(0, 'failpart')
1063 >
1064 >
1064 > EOF
1065 > EOF
1065
1066
1066 $ cd main
1067 $ cd main
1067 $ hg up tip
1068 $ hg up tip
1068 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
1069 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
1069 $ echo 'I' > I
1070 $ echo 'I' > I
1070 $ hg add I
1071 $ hg add I
1071 $ hg ci -m 'I'
1072 $ hg ci -m 'I'
1072 $ hg id
1073 $ hg id
1073 e7ec4e813ba6 tip
1074 e7ec4e813ba6 tip
1074 $ cd ..
1075 $ cd ..
1075
1076
1076 $ cat << EOF >> $HGRCPATH
1077 $ cat << EOF >> $HGRCPATH
1077 > [extensions]
1078 > [extensions]
1078 > failpush=$TESTTMP/failpush.py
1079 > failpush=$TESTTMP/failpush.py
1079 > EOF
1080 > EOF
1080
1081
1081 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1082 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1082 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1083 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1083 $ cat other.pid >> $DAEMON_PIDS
1084 $ cat other.pid >> $DAEMON_PIDS
1084
1085
1085 Doing the actual push: Abort error
1086 Doing the actual push: Abort error
1086
1087
1087 $ cat << EOF >> $HGRCPATH
1088 $ cat << EOF >> $HGRCPATH
1088 > [failpush]
1089 > [failpush]
1089 > reason = abort
1090 > reason = abort
1090 > EOF
1091 > EOF
1091
1092
1092 $ hg -R main push other -r e7ec4e813ba6
1093 $ hg -R main push other -r e7ec4e813ba6
1093 pushing to other
1094 pushing to other
1094 searching for changes
1095 searching for changes
1095 abort: Abandon ship!
1096 abort: Abandon ship!
1096 (don't panic)
1097 (don't panic)
1097 [255]
1098 [255]
1098
1099
1099 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1100 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1100 pushing to ssh://user@dummy/other
1101 pushing to ssh://user@dummy/other
1101 searching for changes
1102 searching for changes
1102 abort: Abandon ship!
1103 abort: Abandon ship!
1103 (don't panic)
1104 (don't panic)
1104 [255]
1105 [255]
1105
1106
1106 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1107 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1107 pushing to http://localhost:$HGPORT2/
1108 pushing to http://localhost:$HGPORT2/
1108 searching for changes
1109 searching for changes
1109 abort: Abandon ship!
1110 abort: Abandon ship!
1110 (don't panic)
1111 (don't panic)
1111 [255]
1112 [255]
1112
1113
1113
1114
1114 Doing the actual push: unknown mandatory parts
1115 Doing the actual push: unknown mandatory parts
1115
1116
1116 $ cat << EOF >> $HGRCPATH
1117 $ cat << EOF >> $HGRCPATH
1117 > [failpush]
1118 > [failpush]
1118 > reason = unknown
1119 > reason = unknown
1119 > EOF
1120 > EOF
1120
1121
1121 $ hg -R main push other -r e7ec4e813ba6
1122 $ hg -R main push other -r e7ec4e813ba6
1122 pushing to other
1123 pushing to other
1123 searching for changes
1124 searching for changes
1124 abort: missing support for test:unknown
1125 abort: missing support for test:unknown
1125 [255]
1126 [255]
1126
1127
1127 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1128 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1128 pushing to ssh://user@dummy/other
1129 pushing to ssh://user@dummy/other
1129 searching for changes
1130 searching for changes
1130 abort: missing support for test:unknown
1131 abort: missing support for test:unknown
1131 [255]
1132 [255]
1132
1133
1133 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1134 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1134 pushing to http://localhost:$HGPORT2/
1135 pushing to http://localhost:$HGPORT2/
1135 searching for changes
1136 searching for changes
1136 abort: missing support for test:unknown
1137 abort: missing support for test:unknown
1137 [255]
1138 [255]
1138
1139
1139 Doing the actual push: race
1140 Doing the actual push: race
1140
1141
1141 $ cat << EOF >> $HGRCPATH
1142 $ cat << EOF >> $HGRCPATH
1142 > [failpush]
1143 > [failpush]
1143 > reason = race
1144 > reason = race
1144 > EOF
1145 > EOF
1145
1146
1146 $ hg -R main push other -r e7ec4e813ba6
1147 $ hg -R main push other -r e7ec4e813ba6
1147 pushing to other
1148 pushing to other
1148 searching for changes
1149 searching for changes
1149 abort: push failed:
1150 abort: push failed:
1150 'repository changed while pushing - please try again'
1151 'repository changed while pushing - please try again'
1151 [255]
1152 [255]
1152
1153
1153 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1154 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1154 pushing to ssh://user@dummy/other
1155 pushing to ssh://user@dummy/other
1155 searching for changes
1156 searching for changes
1156 abort: push failed:
1157 abort: push failed:
1157 'repository changed while pushing - please try again'
1158 'repository changed while pushing - please try again'
1158 [255]
1159 [255]
1159
1160
1160 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1161 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1161 pushing to http://localhost:$HGPORT2/
1162 pushing to http://localhost:$HGPORT2/
1162 searching for changes
1163 searching for changes
1163 abort: push failed:
1164 abort: push failed:
1164 'repository changed while pushing - please try again'
1165 'repository changed while pushing - please try again'
1165 [255]
1166 [255]
1166
1167
1167 Doing the actual push: hook abort
1168 Doing the actual push: hook abort
1168
1169
1169 $ cat << EOF >> $HGRCPATH
1170 $ cat << EOF >> $HGRCPATH
1170 > [failpush]
1171 > [failpush]
1171 > reason =
1172 > reason =
1172 > [hooks]
1173 > [hooks]
1173 > b2x-pretransactionclose.failpush = false
1174 > b2x-pretransactionclose.failpush = false
1174 > EOF
1175 > EOF
1175
1176
1176 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1177 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1177 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1178 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1178 $ cat other.pid >> $DAEMON_PIDS
1179 $ cat other.pid >> $DAEMON_PIDS
1179
1180
1180 $ hg -R main push other -r e7ec4e813ba6
1181 $ hg -R main push other -r e7ec4e813ba6
1181 pushing to other
1182 pushing to other
1182 searching for changes
1183 searching for changes
1183 transaction abort!
1184 transaction abort!
1184 rollback completed
1185 rollback completed
1185 abort: b2x-pretransactionclose.failpush hook exited with status 1
1186 abort: b2x-pretransactionclose.failpush hook exited with status 1
1186 [255]
1187 [255]
1187
1188
1188 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1189 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1189 pushing to ssh://user@dummy/other
1190 pushing to ssh://user@dummy/other
1190 searching for changes
1191 searching for changes
1191 abort: b2x-pretransactionclose.failpush hook exited with status 1
1192 abort: b2x-pretransactionclose.failpush hook exited with status 1
1192 remote: transaction abort!
1193 remote: transaction abort!
1193 remote: rollback completed
1194 remote: rollback completed
1194 [255]
1195 [255]
1195
1196
1196 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1197 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1197 pushing to http://localhost:$HGPORT2/
1198 pushing to http://localhost:$HGPORT2/
1198 searching for changes
1199 searching for changes
1199 abort: b2x-pretransactionclose.failpush hook exited with status 1
1200 abort: b2x-pretransactionclose.failpush hook exited with status 1
1200 [255]
1201 [255]
1201
1202
1202
1203
General Comments 0
You need to be logged in to leave comments. Login now