##// END OF EJS Templates
bundle2: gracefully handle hook abort...
Pierre-Yves David -
r21187:bcfd44ab stable
parent child Browse files
Show More
@@ -1,757 +1,761
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex, nullid
9 from node import hex, nullid
10 import errno, urllib
10 import errno, urllib
11 import util, scmutil, changegroup, base85, error
11 import util, scmutil, changegroup, base85, error
12 import discovery, phases, obsolete, bookmarks, bundle2
12 import discovery, phases, obsolete, bookmarks, bundle2
13
13
14 def readbundle(ui, fh, fname, vfs=None):
14 def readbundle(ui, fh, fname, vfs=None):
15 header = changegroup.readexactly(fh, 4)
15 header = changegroup.readexactly(fh, 4)
16
16
17 alg = None
17 alg = None
18 if not fname:
18 if not fname:
19 fname = "stream"
19 fname = "stream"
20 if not header.startswith('HG') and header.startswith('\0'):
20 if not header.startswith('HG') and header.startswith('\0'):
21 fh = changegroup.headerlessfixup(fh, header)
21 fh = changegroup.headerlessfixup(fh, header)
22 header = "HG10"
22 header = "HG10"
23 alg = 'UN'
23 alg = 'UN'
24 elif vfs:
24 elif vfs:
25 fname = vfs.join(fname)
25 fname = vfs.join(fname)
26
26
27 magic, version = header[0:2], header[2:4]
27 magic, version = header[0:2], header[2:4]
28
28
29 if magic != 'HG':
29 if magic != 'HG':
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
30 raise util.Abort(_('%s: not a Mercurial bundle') % fname)
31 if version == '10':
31 if version == '10':
32 if alg is None:
32 if alg is None:
33 alg = changegroup.readexactly(fh, 2)
33 alg = changegroup.readexactly(fh, 2)
34 return changegroup.unbundle10(fh, alg)
34 return changegroup.unbundle10(fh, alg)
35 elif version == '2X':
35 elif version == '2X':
36 return bundle2.unbundle20(ui, fh, header=magic + version)
36 return bundle2.unbundle20(ui, fh, header=magic + version)
37 else:
37 else:
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
38 raise util.Abort(_('%s: unknown bundle version %s') % (fname, version))
39
39
40
40
41 class pushoperation(object):
41 class pushoperation(object):
42 """A object that represent a single push operation
42 """A object that represent a single push operation
43
43
44 It purpose is to carry push related state and very common operation.
44 It purpose is to carry push related state and very common operation.
45
45
46 A new should be created at the beginning of each push and discarded
46 A new should be created at the beginning of each push and discarded
47 afterward.
47 afterward.
48 """
48 """
49
49
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
50 def __init__(self, repo, remote, force=False, revs=None, newbranch=False):
51 # repo we push from
51 # repo we push from
52 self.repo = repo
52 self.repo = repo
53 self.ui = repo.ui
53 self.ui = repo.ui
54 # repo we push to
54 # repo we push to
55 self.remote = remote
55 self.remote = remote
56 # force option provided
56 # force option provided
57 self.force = force
57 self.force = force
58 # revs to be pushed (None is "all")
58 # revs to be pushed (None is "all")
59 self.revs = revs
59 self.revs = revs
60 # allow push of new branch
60 # allow push of new branch
61 self.newbranch = newbranch
61 self.newbranch = newbranch
62 # did a local lock get acquired?
62 # did a local lock get acquired?
63 self.locallocked = None
63 self.locallocked = None
64 # Integer version of the push result
64 # Integer version of the push result
65 # - None means nothing to push
65 # - None means nothing to push
66 # - 0 means HTTP error
66 # - 0 means HTTP error
67 # - 1 means we pushed and remote head count is unchanged *or*
67 # - 1 means we pushed and remote head count is unchanged *or*
68 # we have outgoing changesets but refused to push
68 # we have outgoing changesets but refused to push
69 # - other values as described by addchangegroup()
69 # - other values as described by addchangegroup()
70 self.ret = None
70 self.ret = None
71 # discover.outgoing object (contains common and outgoing data)
71 # discover.outgoing object (contains common and outgoing data)
72 self.outgoing = None
72 self.outgoing = None
73 # all remote heads before the push
73 # all remote heads before the push
74 self.remoteheads = None
74 self.remoteheads = None
75 # testable as a boolean indicating if any nodes are missing locally.
75 # testable as a boolean indicating if any nodes are missing locally.
76 self.incoming = None
76 self.incoming = None
77 # set of all heads common after changeset bundle push
77 # set of all heads common after changeset bundle push
78 self.commonheads = None
78 self.commonheads = None
79
79
80 def push(repo, remote, force=False, revs=None, newbranch=False):
80 def push(repo, remote, force=False, revs=None, newbranch=False):
81 '''Push outgoing changesets (limited by revs) from a local
81 '''Push outgoing changesets (limited by revs) from a local
82 repository to remote. Return an integer:
82 repository to remote. Return an integer:
83 - None means nothing to push
83 - None means nothing to push
84 - 0 means HTTP error
84 - 0 means HTTP error
85 - 1 means we pushed and remote head count is unchanged *or*
85 - 1 means we pushed and remote head count is unchanged *or*
86 we have outgoing changesets but refused to push
86 we have outgoing changesets but refused to push
87 - other values as described by addchangegroup()
87 - other values as described by addchangegroup()
88 '''
88 '''
89 pushop = pushoperation(repo, remote, force, revs, newbranch)
89 pushop = pushoperation(repo, remote, force, revs, newbranch)
90 if pushop.remote.local():
90 if pushop.remote.local():
91 missing = (set(pushop.repo.requirements)
91 missing = (set(pushop.repo.requirements)
92 - pushop.remote.local().supported)
92 - pushop.remote.local().supported)
93 if missing:
93 if missing:
94 msg = _("required features are not"
94 msg = _("required features are not"
95 " supported in the destination:"
95 " supported in the destination:"
96 " %s") % (', '.join(sorted(missing)))
96 " %s") % (', '.join(sorted(missing)))
97 raise util.Abort(msg)
97 raise util.Abort(msg)
98
98
99 # there are two ways to push to remote repo:
99 # there are two ways to push to remote repo:
100 #
100 #
101 # addchangegroup assumes local user can lock remote
101 # addchangegroup assumes local user can lock remote
102 # repo (local filesystem, old ssh servers).
102 # repo (local filesystem, old ssh servers).
103 #
103 #
104 # unbundle assumes local user cannot lock remote repo (new ssh
104 # unbundle assumes local user cannot lock remote repo (new ssh
105 # servers, http servers).
105 # servers, http servers).
106
106
107 if not pushop.remote.canpush():
107 if not pushop.remote.canpush():
108 raise util.Abort(_("destination does not support push"))
108 raise util.Abort(_("destination does not support push"))
109 # get local lock as we might write phase data
109 # get local lock as we might write phase data
110 locallock = None
110 locallock = None
111 try:
111 try:
112 locallock = pushop.repo.lock()
112 locallock = pushop.repo.lock()
113 pushop.locallocked = True
113 pushop.locallocked = True
114 except IOError, err:
114 except IOError, err:
115 pushop.locallocked = False
115 pushop.locallocked = False
116 if err.errno != errno.EACCES:
116 if err.errno != errno.EACCES:
117 raise
117 raise
118 # source repo cannot be locked.
118 # source repo cannot be locked.
119 # We do not abort the push, but just disable the local phase
119 # We do not abort the push, but just disable the local phase
120 # synchronisation.
120 # synchronisation.
121 msg = 'cannot lock source repository: %s\n' % err
121 msg = 'cannot lock source repository: %s\n' % err
122 pushop.ui.debug(msg)
122 pushop.ui.debug(msg)
123 try:
123 try:
124 pushop.repo.checkpush(pushop)
124 pushop.repo.checkpush(pushop)
125 lock = None
125 lock = None
126 unbundle = pushop.remote.capable('unbundle')
126 unbundle = pushop.remote.capable('unbundle')
127 if not unbundle:
127 if not unbundle:
128 lock = pushop.remote.lock()
128 lock = pushop.remote.lock()
129 try:
129 try:
130 _pushdiscovery(pushop)
130 _pushdiscovery(pushop)
131 if _pushcheckoutgoing(pushop):
131 if _pushcheckoutgoing(pushop):
132 pushop.repo.prepushoutgoinghooks(pushop.repo,
132 pushop.repo.prepushoutgoinghooks(pushop.repo,
133 pushop.remote,
133 pushop.remote,
134 pushop.outgoing)
134 pushop.outgoing)
135 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
135 if (pushop.repo.ui.configbool('experimental', 'bundle2-exp',
136 False)
136 False)
137 and pushop.remote.capable('bundle2-exp')):
137 and pushop.remote.capable('bundle2-exp')):
138 _pushbundle2(pushop)
138 _pushbundle2(pushop)
139 else:
139 else:
140 _pushchangeset(pushop)
140 _pushchangeset(pushop)
141 _pushcomputecommonheads(pushop)
141 _pushcomputecommonheads(pushop)
142 _pushsyncphase(pushop)
142 _pushsyncphase(pushop)
143 _pushobsolete(pushop)
143 _pushobsolete(pushop)
144 finally:
144 finally:
145 if lock is not None:
145 if lock is not None:
146 lock.release()
146 lock.release()
147 finally:
147 finally:
148 if locallock is not None:
148 if locallock is not None:
149 locallock.release()
149 locallock.release()
150
150
151 _pushbookmark(pushop)
151 _pushbookmark(pushop)
152 return pushop.ret
152 return pushop.ret
153
153
154 def _pushdiscovery(pushop):
154 def _pushdiscovery(pushop):
155 # discovery
155 # discovery
156 unfi = pushop.repo.unfiltered()
156 unfi = pushop.repo.unfiltered()
157 fci = discovery.findcommonincoming
157 fci = discovery.findcommonincoming
158 commoninc = fci(unfi, pushop.remote, force=pushop.force)
158 commoninc = fci(unfi, pushop.remote, force=pushop.force)
159 common, inc, remoteheads = commoninc
159 common, inc, remoteheads = commoninc
160 fco = discovery.findcommonoutgoing
160 fco = discovery.findcommonoutgoing
161 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
161 outgoing = fco(unfi, pushop.remote, onlyheads=pushop.revs,
162 commoninc=commoninc, force=pushop.force)
162 commoninc=commoninc, force=pushop.force)
163 pushop.outgoing = outgoing
163 pushop.outgoing = outgoing
164 pushop.remoteheads = remoteheads
164 pushop.remoteheads = remoteheads
165 pushop.incoming = inc
165 pushop.incoming = inc
166
166
167 def _pushcheckoutgoing(pushop):
167 def _pushcheckoutgoing(pushop):
168 outgoing = pushop.outgoing
168 outgoing = pushop.outgoing
169 unfi = pushop.repo.unfiltered()
169 unfi = pushop.repo.unfiltered()
170 if not outgoing.missing:
170 if not outgoing.missing:
171 # nothing to push
171 # nothing to push
172 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
172 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
173 return False
173 return False
174 # something to push
174 # something to push
175 if not pushop.force:
175 if not pushop.force:
176 # if repo.obsstore == False --> no obsolete
176 # if repo.obsstore == False --> no obsolete
177 # then, save the iteration
177 # then, save the iteration
178 if unfi.obsstore:
178 if unfi.obsstore:
179 # this message are here for 80 char limit reason
179 # this message are here for 80 char limit reason
180 mso = _("push includes obsolete changeset: %s!")
180 mso = _("push includes obsolete changeset: %s!")
181 mst = "push includes %s changeset: %s!"
181 mst = "push includes %s changeset: %s!"
182 # plain versions for i18n tool to detect them
182 # plain versions for i18n tool to detect them
183 _("push includes unstable changeset: %s!")
183 _("push includes unstable changeset: %s!")
184 _("push includes bumped changeset: %s!")
184 _("push includes bumped changeset: %s!")
185 _("push includes divergent changeset: %s!")
185 _("push includes divergent changeset: %s!")
186 # If we are to push if there is at least one
186 # If we are to push if there is at least one
187 # obsolete or unstable changeset in missing, at
187 # obsolete or unstable changeset in missing, at
188 # least one of the missinghead will be obsolete or
188 # least one of the missinghead will be obsolete or
189 # unstable. So checking heads only is ok
189 # unstable. So checking heads only is ok
190 for node in outgoing.missingheads:
190 for node in outgoing.missingheads:
191 ctx = unfi[node]
191 ctx = unfi[node]
192 if ctx.obsolete():
192 if ctx.obsolete():
193 raise util.Abort(mso % ctx)
193 raise util.Abort(mso % ctx)
194 elif ctx.troubled():
194 elif ctx.troubled():
195 raise util.Abort(_(mst)
195 raise util.Abort(_(mst)
196 % (ctx.troubles()[0],
196 % (ctx.troubles()[0],
197 ctx))
197 ctx))
198 newbm = pushop.ui.configlist('bookmarks', 'pushing')
198 newbm = pushop.ui.configlist('bookmarks', 'pushing')
199 discovery.checkheads(unfi, pushop.remote, outgoing,
199 discovery.checkheads(unfi, pushop.remote, outgoing,
200 pushop.remoteheads,
200 pushop.remoteheads,
201 pushop.newbranch,
201 pushop.newbranch,
202 bool(pushop.incoming),
202 bool(pushop.incoming),
203 newbm)
203 newbm)
204 return True
204 return True
205
205
206 def _pushbundle2(pushop):
206 def _pushbundle2(pushop):
207 """push data to the remote using bundle2
207 """push data to the remote using bundle2
208
208
209 The only currently supported type of data is changegroup but this will
209 The only currently supported type of data is changegroup but this will
210 evolve in the future."""
210 evolve in the future."""
211 # Send known head to the server for race detection.
211 # Send known head to the server for race detection.
212 capsblob = urllib.unquote(pushop.remote.capable('bundle2-exp'))
212 capsblob = urllib.unquote(pushop.remote.capable('bundle2-exp'))
213 caps = bundle2.decodecaps(capsblob)
213 caps = bundle2.decodecaps(capsblob)
214 bundler = bundle2.bundle20(pushop.ui, caps)
214 bundler = bundle2.bundle20(pushop.ui, caps)
215 # create reply capability
215 # create reply capability
216 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
216 capsblob = bundle2.encodecaps(pushop.repo.bundle2caps)
217 bundler.addpart(bundle2.bundlepart('b2x:replycaps', data=capsblob))
217 bundler.addpart(bundle2.bundlepart('b2x:replycaps', data=capsblob))
218 if not pushop.force:
218 if not pushop.force:
219 part = bundle2.bundlepart('B2X:CHECK:HEADS',
219 part = bundle2.bundlepart('B2X:CHECK:HEADS',
220 data=iter(pushop.remoteheads))
220 data=iter(pushop.remoteheads))
221 bundler.addpart(part)
221 bundler.addpart(part)
222 extrainfo = _pushbundle2extraparts(pushop, bundler)
222 extrainfo = _pushbundle2extraparts(pushop, bundler)
223 # add the changegroup bundle
223 # add the changegroup bundle
224 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
224 cg = changegroup.getlocalbundle(pushop.repo, 'push', pushop.outgoing)
225 cgpart = bundle2.bundlepart('B2X:CHANGEGROUP', data=cg.getchunks())
225 cgpart = bundle2.bundlepart('B2X:CHANGEGROUP', data=cg.getchunks())
226 bundler.addpart(cgpart)
226 bundler.addpart(cgpart)
227 stream = util.chunkbuffer(bundler.getchunks())
227 stream = util.chunkbuffer(bundler.getchunks())
228 try:
228 try:
229 reply = pushop.remote.unbundle(stream, ['force'], 'push')
229 reply = pushop.remote.unbundle(stream, ['force'], 'push')
230 except bundle2.UnknownPartError, exc:
230 except bundle2.UnknownPartError, exc:
231 raise util.Abort('missing support for %s' % exc)
231 raise util.Abort('missing support for %s' % exc)
232 try:
232 try:
233 op = bundle2.processbundle(pushop.repo, reply)
233 op = bundle2.processbundle(pushop.repo, reply)
234 except bundle2.UnknownPartError, exc:
234 except bundle2.UnknownPartError, exc:
235 raise util.Abort('missing support for %s' % exc)
235 raise util.Abort('missing support for %s' % exc)
236 cgreplies = op.records.getreplies(cgpart.id)
236 cgreplies = op.records.getreplies(cgpart.id)
237 assert len(cgreplies['changegroup']) == 1
237 assert len(cgreplies['changegroup']) == 1
238 pushop.ret = cgreplies['changegroup'][0]['return']
238 pushop.ret = cgreplies['changegroup'][0]['return']
239 _pushbundle2extrareply(pushop, op, extrainfo)
239 _pushbundle2extrareply(pushop, op, extrainfo)
240
240
241 def _pushbundle2extraparts(pushop, bundler):
241 def _pushbundle2extraparts(pushop, bundler):
242 """hook function to let extensions add parts
242 """hook function to let extensions add parts
243
243
244 Return a dict to let extensions pass data to the reply processing.
244 Return a dict to let extensions pass data to the reply processing.
245 """
245 """
246 return {}
246 return {}
247
247
248 def _pushbundle2extrareply(pushop, op, extrainfo):
248 def _pushbundle2extrareply(pushop, op, extrainfo):
249 """hook function to let extensions react to part replies
249 """hook function to let extensions react to part replies
250
250
251 The dict from _pushbundle2extrareply is fed to this function.
251 The dict from _pushbundle2extrareply is fed to this function.
252 """
252 """
253 pass
253 pass
254
254
255 def _pushchangeset(pushop):
255 def _pushchangeset(pushop):
256 """Make the actual push of changeset bundle to remote repo"""
256 """Make the actual push of changeset bundle to remote repo"""
257 outgoing = pushop.outgoing
257 outgoing = pushop.outgoing
258 unbundle = pushop.remote.capable('unbundle')
258 unbundle = pushop.remote.capable('unbundle')
259 # TODO: get bundlecaps from remote
259 # TODO: get bundlecaps from remote
260 bundlecaps = None
260 bundlecaps = None
261 # create a changegroup from local
261 # create a changegroup from local
262 if pushop.revs is None and not (outgoing.excluded
262 if pushop.revs is None and not (outgoing.excluded
263 or pushop.repo.changelog.filteredrevs):
263 or pushop.repo.changelog.filteredrevs):
264 # push everything,
264 # push everything,
265 # use the fast path, no race possible on push
265 # use the fast path, no race possible on push
266 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
266 bundler = changegroup.bundle10(pushop.repo, bundlecaps)
267 cg = changegroup.getsubset(pushop.repo,
267 cg = changegroup.getsubset(pushop.repo,
268 outgoing,
268 outgoing,
269 bundler,
269 bundler,
270 'push',
270 'push',
271 fastpath=True)
271 fastpath=True)
272 else:
272 else:
273 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
273 cg = changegroup.getlocalbundle(pushop.repo, 'push', outgoing,
274 bundlecaps)
274 bundlecaps)
275
275
276 # apply changegroup to remote
276 # apply changegroup to remote
277 if unbundle:
277 if unbundle:
278 # local repo finds heads on server, finds out what
278 # local repo finds heads on server, finds out what
279 # revs it must push. once revs transferred, if server
279 # revs it must push. once revs transferred, if server
280 # finds it has different heads (someone else won
280 # finds it has different heads (someone else won
281 # commit/push race), server aborts.
281 # commit/push race), server aborts.
282 if pushop.force:
282 if pushop.force:
283 remoteheads = ['force']
283 remoteheads = ['force']
284 else:
284 else:
285 remoteheads = pushop.remoteheads
285 remoteheads = pushop.remoteheads
286 # ssh: return remote's addchangegroup()
286 # ssh: return remote's addchangegroup()
287 # http: return remote's addchangegroup() or 0 for error
287 # http: return remote's addchangegroup() or 0 for error
288 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
288 pushop.ret = pushop.remote.unbundle(cg, remoteheads,
289 'push')
289 'push')
290 else:
290 else:
291 # we return an integer indicating remote head count
291 # we return an integer indicating remote head count
292 # change
292 # change
293 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
293 pushop.ret = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url())
294
294
295 def _pushcomputecommonheads(pushop):
295 def _pushcomputecommonheads(pushop):
296 unfi = pushop.repo.unfiltered()
296 unfi = pushop.repo.unfiltered()
297 if pushop.ret:
297 if pushop.ret:
298 # push succeed, synchronize target of the push
298 # push succeed, synchronize target of the push
299 cheads = pushop.outgoing.missingheads
299 cheads = pushop.outgoing.missingheads
300 elif pushop.revs is None:
300 elif pushop.revs is None:
301 # All out push fails. synchronize all common
301 # All out push fails. synchronize all common
302 cheads = pushop.outgoing.commonheads
302 cheads = pushop.outgoing.commonheads
303 else:
303 else:
304 # I want cheads = heads(::missingheads and ::commonheads)
304 # I want cheads = heads(::missingheads and ::commonheads)
305 # (missingheads is revs with secret changeset filtered out)
305 # (missingheads is revs with secret changeset filtered out)
306 #
306 #
307 # This can be expressed as:
307 # This can be expressed as:
308 # cheads = ( (missingheads and ::commonheads)
308 # cheads = ( (missingheads and ::commonheads)
309 # + (commonheads and ::missingheads))"
309 # + (commonheads and ::missingheads))"
310 # )
310 # )
311 #
311 #
312 # while trying to push we already computed the following:
312 # while trying to push we already computed the following:
313 # common = (::commonheads)
313 # common = (::commonheads)
314 # missing = ((commonheads::missingheads) - commonheads)
314 # missing = ((commonheads::missingheads) - commonheads)
315 #
315 #
316 # We can pick:
316 # We can pick:
317 # * missingheads part of common (::commonheads)
317 # * missingheads part of common (::commonheads)
318 common = set(pushop.outgoing.common)
318 common = set(pushop.outgoing.common)
319 nm = pushop.repo.changelog.nodemap
319 nm = pushop.repo.changelog.nodemap
320 cheads = [node for node in pushop.revs if nm[node] in common]
320 cheads = [node for node in pushop.revs if nm[node] in common]
321 # and
321 # and
322 # * commonheads parents on missing
322 # * commonheads parents on missing
323 revset = unfi.set('%ln and parents(roots(%ln))',
323 revset = unfi.set('%ln and parents(roots(%ln))',
324 pushop.outgoing.commonheads,
324 pushop.outgoing.commonheads,
325 pushop.outgoing.missing)
325 pushop.outgoing.missing)
326 cheads.extend(c.node() for c in revset)
326 cheads.extend(c.node() for c in revset)
327 pushop.commonheads = cheads
327 pushop.commonheads = cheads
328
328
329 def _pushsyncphase(pushop):
329 def _pushsyncphase(pushop):
330 """synchronise phase information locally and remotely"""
330 """synchronise phase information locally and remotely"""
331 unfi = pushop.repo.unfiltered()
331 unfi = pushop.repo.unfiltered()
332 cheads = pushop.commonheads
332 cheads = pushop.commonheads
333 if pushop.ret:
333 if pushop.ret:
334 # push succeed, synchronize target of the push
334 # push succeed, synchronize target of the push
335 cheads = pushop.outgoing.missingheads
335 cheads = pushop.outgoing.missingheads
336 elif pushop.revs is None:
336 elif pushop.revs is None:
337 # All out push fails. synchronize all common
337 # All out push fails. synchronize all common
338 cheads = pushop.outgoing.commonheads
338 cheads = pushop.outgoing.commonheads
339 else:
339 else:
340 # I want cheads = heads(::missingheads and ::commonheads)
340 # I want cheads = heads(::missingheads and ::commonheads)
341 # (missingheads is revs with secret changeset filtered out)
341 # (missingheads is revs with secret changeset filtered out)
342 #
342 #
343 # This can be expressed as:
343 # This can be expressed as:
344 # cheads = ( (missingheads and ::commonheads)
344 # cheads = ( (missingheads and ::commonheads)
345 # + (commonheads and ::missingheads))"
345 # + (commonheads and ::missingheads))"
346 # )
346 # )
347 #
347 #
348 # while trying to push we already computed the following:
348 # while trying to push we already computed the following:
349 # common = (::commonheads)
349 # common = (::commonheads)
350 # missing = ((commonheads::missingheads) - commonheads)
350 # missing = ((commonheads::missingheads) - commonheads)
351 #
351 #
352 # We can pick:
352 # We can pick:
353 # * missingheads part of common (::commonheads)
353 # * missingheads part of common (::commonheads)
354 common = set(pushop.outgoing.common)
354 common = set(pushop.outgoing.common)
355 nm = pushop.repo.changelog.nodemap
355 nm = pushop.repo.changelog.nodemap
356 cheads = [node for node in pushop.revs if nm[node] in common]
356 cheads = [node for node in pushop.revs if nm[node] in common]
357 # and
357 # and
358 # * commonheads parents on missing
358 # * commonheads parents on missing
359 revset = unfi.set('%ln and parents(roots(%ln))',
359 revset = unfi.set('%ln and parents(roots(%ln))',
360 pushop.outgoing.commonheads,
360 pushop.outgoing.commonheads,
361 pushop.outgoing.missing)
361 pushop.outgoing.missing)
362 cheads.extend(c.node() for c in revset)
362 cheads.extend(c.node() for c in revset)
363 pushop.commonheads = cheads
363 pushop.commonheads = cheads
364 # even when we don't push, exchanging phase data is useful
364 # even when we don't push, exchanging phase data is useful
365 remotephases = pushop.remote.listkeys('phases')
365 remotephases = pushop.remote.listkeys('phases')
366 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
366 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
367 and remotephases # server supports phases
367 and remotephases # server supports phases
368 and pushop.ret is None # nothing was pushed
368 and pushop.ret is None # nothing was pushed
369 and remotephases.get('publishing', False)):
369 and remotephases.get('publishing', False)):
370 # When:
370 # When:
371 # - this is a subrepo push
371 # - this is a subrepo push
372 # - and remote support phase
372 # - and remote support phase
373 # - and no changeset was pushed
373 # - and no changeset was pushed
374 # - and remote is publishing
374 # - and remote is publishing
375 # We may be in issue 3871 case!
375 # We may be in issue 3871 case!
376 # We drop the possible phase synchronisation done by
376 # We drop the possible phase synchronisation done by
377 # courtesy to publish changesets possibly locally draft
377 # courtesy to publish changesets possibly locally draft
378 # on the remote.
378 # on the remote.
379 remotephases = {'publishing': 'True'}
379 remotephases = {'publishing': 'True'}
380 if not remotephases: # old server or public only reply from non-publishing
380 if not remotephases: # old server or public only reply from non-publishing
381 _localphasemove(pushop, cheads)
381 _localphasemove(pushop, cheads)
382 # don't push any phase data as there is nothing to push
382 # don't push any phase data as there is nothing to push
383 else:
383 else:
384 ana = phases.analyzeremotephases(pushop.repo, cheads,
384 ana = phases.analyzeremotephases(pushop.repo, cheads,
385 remotephases)
385 remotephases)
386 pheads, droots = ana
386 pheads, droots = ana
387 ### Apply remote phase on local
387 ### Apply remote phase on local
388 if remotephases.get('publishing', False):
388 if remotephases.get('publishing', False):
389 _localphasemove(pushop, cheads)
389 _localphasemove(pushop, cheads)
390 else: # publish = False
390 else: # publish = False
391 _localphasemove(pushop, pheads)
391 _localphasemove(pushop, pheads)
392 _localphasemove(pushop, cheads, phases.draft)
392 _localphasemove(pushop, cheads, phases.draft)
393 ### Apply local phase on remote
393 ### Apply local phase on remote
394
394
395 # Get the list of all revs draft on remote by public here.
395 # Get the list of all revs draft on remote by public here.
396 # XXX Beware that revset break if droots is not strictly
396 # XXX Beware that revset break if droots is not strictly
397 # XXX root we may want to ensure it is but it is costly
397 # XXX root we may want to ensure it is but it is costly
398 outdated = unfi.set('heads((%ln::%ln) and public())',
398 outdated = unfi.set('heads((%ln::%ln) and public())',
399 droots, cheads)
399 droots, cheads)
400 for newremotehead in outdated:
400 for newremotehead in outdated:
401 r = pushop.remote.pushkey('phases',
401 r = pushop.remote.pushkey('phases',
402 newremotehead.hex(),
402 newremotehead.hex(),
403 str(phases.draft),
403 str(phases.draft),
404 str(phases.public))
404 str(phases.public))
405 if not r:
405 if not r:
406 pushop.ui.warn(_('updating %s to public failed!\n')
406 pushop.ui.warn(_('updating %s to public failed!\n')
407 % newremotehead)
407 % newremotehead)
408
408
409 def _localphasemove(pushop, nodes, phase=phases.public):
409 def _localphasemove(pushop, nodes, phase=phases.public):
410 """move <nodes> to <phase> in the local source repo"""
410 """move <nodes> to <phase> in the local source repo"""
411 if pushop.locallocked:
411 if pushop.locallocked:
412 phases.advanceboundary(pushop.repo, phase, nodes)
412 phases.advanceboundary(pushop.repo, phase, nodes)
413 else:
413 else:
414 # repo is not locked, do not change any phases!
414 # repo is not locked, do not change any phases!
415 # Informs the user that phases should have been moved when
415 # Informs the user that phases should have been moved when
416 # applicable.
416 # applicable.
417 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
417 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
418 phasestr = phases.phasenames[phase]
418 phasestr = phases.phasenames[phase]
419 if actualmoves:
419 if actualmoves:
420 pushop.ui.status(_('cannot lock source repo, skipping '
420 pushop.ui.status(_('cannot lock source repo, skipping '
421 'local %s phase update\n') % phasestr)
421 'local %s phase update\n') % phasestr)
422
422
423 def _pushobsolete(pushop):
423 def _pushobsolete(pushop):
424 """utility function to push obsolete markers to a remote"""
424 """utility function to push obsolete markers to a remote"""
425 pushop.ui.debug('try to push obsolete markers to remote\n')
425 pushop.ui.debug('try to push obsolete markers to remote\n')
426 repo = pushop.repo
426 repo = pushop.repo
427 remote = pushop.remote
427 remote = pushop.remote
428 if (obsolete._enabled and repo.obsstore and
428 if (obsolete._enabled and repo.obsstore and
429 'obsolete' in remote.listkeys('namespaces')):
429 'obsolete' in remote.listkeys('namespaces')):
430 rslts = []
430 rslts = []
431 remotedata = repo.listkeys('obsolete')
431 remotedata = repo.listkeys('obsolete')
432 for key in sorted(remotedata, reverse=True):
432 for key in sorted(remotedata, reverse=True):
433 # reverse sort to ensure we end with dump0
433 # reverse sort to ensure we end with dump0
434 data = remotedata[key]
434 data = remotedata[key]
435 rslts.append(remote.pushkey('obsolete', key, '', data))
435 rslts.append(remote.pushkey('obsolete', key, '', data))
436 if [r for r in rslts if not r]:
436 if [r for r in rslts if not r]:
437 msg = _('failed to push some obsolete markers!\n')
437 msg = _('failed to push some obsolete markers!\n')
438 repo.ui.warn(msg)
438 repo.ui.warn(msg)
439
439
440 def _pushbookmark(pushop):
440 def _pushbookmark(pushop):
441 """Update bookmark position on remote"""
441 """Update bookmark position on remote"""
442 ui = pushop.ui
442 ui = pushop.ui
443 repo = pushop.repo.unfiltered()
443 repo = pushop.repo.unfiltered()
444 remote = pushop.remote
444 remote = pushop.remote
445 ui.debug("checking for updated bookmarks\n")
445 ui.debug("checking for updated bookmarks\n")
446 revnums = map(repo.changelog.rev, pushop.revs or [])
446 revnums = map(repo.changelog.rev, pushop.revs or [])
447 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
447 ancestors = [a for a in repo.changelog.ancestors(revnums, inclusive=True)]
448 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
448 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid
449 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
449 ) = bookmarks.compare(repo, repo._bookmarks, remote.listkeys('bookmarks'),
450 srchex=hex)
450 srchex=hex)
451
451
452 for b, scid, dcid in advsrc:
452 for b, scid, dcid in advsrc:
453 if ancestors and repo[scid].rev() not in ancestors:
453 if ancestors and repo[scid].rev() not in ancestors:
454 continue
454 continue
455 if remote.pushkey('bookmarks', b, dcid, scid):
455 if remote.pushkey('bookmarks', b, dcid, scid):
456 ui.status(_("updating bookmark %s\n") % b)
456 ui.status(_("updating bookmark %s\n") % b)
457 else:
457 else:
458 ui.warn(_('updating bookmark %s failed!\n') % b)
458 ui.warn(_('updating bookmark %s failed!\n') % b)
459
459
460 class pulloperation(object):
460 class pulloperation(object):
461 """A object that represent a single pull operation
461 """A object that represent a single pull operation
462
462
463 It purpose is to carry push related state and very common operation.
463 It purpose is to carry push related state and very common operation.
464
464
465 A new should be created at the beginning of each pull and discarded
465 A new should be created at the beginning of each pull and discarded
466 afterward.
466 afterward.
467 """
467 """
468
468
469 def __init__(self, repo, remote, heads=None, force=False):
469 def __init__(self, repo, remote, heads=None, force=False):
470 # repo we pull into
470 # repo we pull into
471 self.repo = repo
471 self.repo = repo
472 # repo we pull from
472 # repo we pull from
473 self.remote = remote
473 self.remote = remote
474 # revision we try to pull (None is "all")
474 # revision we try to pull (None is "all")
475 self.heads = heads
475 self.heads = heads
476 # do we force pull?
476 # do we force pull?
477 self.force = force
477 self.force = force
478 # the name the pull transaction
478 # the name the pull transaction
479 self._trname = 'pull\n' + util.hidepassword(remote.url())
479 self._trname = 'pull\n' + util.hidepassword(remote.url())
480 # hold the transaction once created
480 # hold the transaction once created
481 self._tr = None
481 self._tr = None
482 # set of common changeset between local and remote before pull
482 # set of common changeset between local and remote before pull
483 self.common = None
483 self.common = None
484 # set of pulled head
484 # set of pulled head
485 self.rheads = None
485 self.rheads = None
486 # list of missing changeset to fetch remotely
486 # list of missing changeset to fetch remotely
487 self.fetch = None
487 self.fetch = None
488 # result of changegroup pulling (used as return code by pull)
488 # result of changegroup pulling (used as return code by pull)
489 self.cgresult = None
489 self.cgresult = None
490 # list of step remaining todo (related to future bundle2 usage)
490 # list of step remaining todo (related to future bundle2 usage)
491 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
491 self.todosteps = set(['changegroup', 'phases', 'obsmarkers'])
492
492
493 @util.propertycache
493 @util.propertycache
494 def pulledsubset(self):
494 def pulledsubset(self):
495 """heads of the set of changeset target by the pull"""
495 """heads of the set of changeset target by the pull"""
496 # compute target subset
496 # compute target subset
497 if self.heads is None:
497 if self.heads is None:
498 # We pulled every thing possible
498 # We pulled every thing possible
499 # sync on everything common
499 # sync on everything common
500 c = set(self.common)
500 c = set(self.common)
501 ret = list(self.common)
501 ret = list(self.common)
502 for n in self.rheads:
502 for n in self.rheads:
503 if n not in c:
503 if n not in c:
504 ret.append(n)
504 ret.append(n)
505 return ret
505 return ret
506 else:
506 else:
507 # We pulled a specific subset
507 # We pulled a specific subset
508 # sync on this subset
508 # sync on this subset
509 return self.heads
509 return self.heads
510
510
511 def gettransaction(self):
511 def gettransaction(self):
512 """get appropriate pull transaction, creating it if needed"""
512 """get appropriate pull transaction, creating it if needed"""
513 if self._tr is None:
513 if self._tr is None:
514 self._tr = self.repo.transaction(self._trname)
514 self._tr = self.repo.transaction(self._trname)
515 return self._tr
515 return self._tr
516
516
517 def closetransaction(self):
517 def closetransaction(self):
518 """close transaction if created"""
518 """close transaction if created"""
519 if self._tr is not None:
519 if self._tr is not None:
520 self._tr.close()
520 self._tr.close()
521
521
522 def releasetransaction(self):
522 def releasetransaction(self):
523 """release transaction if created"""
523 """release transaction if created"""
524 if self._tr is not None:
524 if self._tr is not None:
525 self._tr.release()
525 self._tr.release()
526
526
527 def pull(repo, remote, heads=None, force=False):
527 def pull(repo, remote, heads=None, force=False):
528 pullop = pulloperation(repo, remote, heads, force)
528 pullop = pulloperation(repo, remote, heads, force)
529 if pullop.remote.local():
529 if pullop.remote.local():
530 missing = set(pullop.remote.requirements) - pullop.repo.supported
530 missing = set(pullop.remote.requirements) - pullop.repo.supported
531 if missing:
531 if missing:
532 msg = _("required features are not"
532 msg = _("required features are not"
533 " supported in the destination:"
533 " supported in the destination:"
534 " %s") % (', '.join(sorted(missing)))
534 " %s") % (', '.join(sorted(missing)))
535 raise util.Abort(msg)
535 raise util.Abort(msg)
536
536
537 lock = pullop.repo.lock()
537 lock = pullop.repo.lock()
538 try:
538 try:
539 _pulldiscovery(pullop)
539 _pulldiscovery(pullop)
540 if (pullop.repo.ui.configbool('server', 'bundle2', False)
540 if (pullop.repo.ui.configbool('server', 'bundle2', False)
541 and pullop.remote.capable('bundle2-exp')):
541 and pullop.remote.capable('bundle2-exp')):
542 _pullbundle2(pullop)
542 _pullbundle2(pullop)
543 if 'changegroup' in pullop.todosteps:
543 if 'changegroup' in pullop.todosteps:
544 _pullchangeset(pullop)
544 _pullchangeset(pullop)
545 if 'phases' in pullop.todosteps:
545 if 'phases' in pullop.todosteps:
546 _pullphase(pullop)
546 _pullphase(pullop)
547 if 'obsmarkers' in pullop.todosteps:
547 if 'obsmarkers' in pullop.todosteps:
548 _pullobsolete(pullop)
548 _pullobsolete(pullop)
549 pullop.closetransaction()
549 pullop.closetransaction()
550 finally:
550 finally:
551 pullop.releasetransaction()
551 pullop.releasetransaction()
552 lock.release()
552 lock.release()
553
553
554 return pullop.cgresult
554 return pullop.cgresult
555
555
556 def _pulldiscovery(pullop):
556 def _pulldiscovery(pullop):
557 """discovery phase for the pull
557 """discovery phase for the pull
558
558
559 Current handle changeset discovery only, will change handle all discovery
559 Current handle changeset discovery only, will change handle all discovery
560 at some point."""
560 at some point."""
561 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
561 tmp = discovery.findcommonincoming(pullop.repo.unfiltered(),
562 pullop.remote,
562 pullop.remote,
563 heads=pullop.heads,
563 heads=pullop.heads,
564 force=pullop.force)
564 force=pullop.force)
565 pullop.common, pullop.fetch, pullop.rheads = tmp
565 pullop.common, pullop.fetch, pullop.rheads = tmp
566
566
567 def _pullbundle2(pullop):
567 def _pullbundle2(pullop):
568 """pull data using bundle2
568 """pull data using bundle2
569
569
570 For now, the only supported data are changegroup."""
570 For now, the only supported data are changegroup."""
571 kwargs = {'bundlecaps': set(['HG2X'])}
571 kwargs = {'bundlecaps': set(['HG2X'])}
572 capsblob = bundle2.encodecaps(pullop.repo.bundle2caps)
572 capsblob = bundle2.encodecaps(pullop.repo.bundle2caps)
573 kwargs['bundlecaps'].add('bundle2=' + urllib.quote(capsblob))
573 kwargs['bundlecaps'].add('bundle2=' + urllib.quote(capsblob))
574 # pulling changegroup
574 # pulling changegroup
575 pullop.todosteps.remove('changegroup')
575 pullop.todosteps.remove('changegroup')
576 if not pullop.fetch:
576 if not pullop.fetch:
577 pullop.repo.ui.status(_("no changes found\n"))
577 pullop.repo.ui.status(_("no changes found\n"))
578 pullop.cgresult = 0
578 pullop.cgresult = 0
579 else:
579 else:
580 kwargs['common'] = pullop.common
580 kwargs['common'] = pullop.common
581 kwargs['heads'] = pullop.heads or pullop.rheads
581 kwargs['heads'] = pullop.heads or pullop.rheads
582 if pullop.heads is None and list(pullop.common) == [nullid]:
582 if pullop.heads is None and list(pullop.common) == [nullid]:
583 pullop.repo.ui.status(_("requesting all changes\n"))
583 pullop.repo.ui.status(_("requesting all changes\n"))
584 _pullbundle2extraprepare(pullop, kwargs)
584 _pullbundle2extraprepare(pullop, kwargs)
585 if kwargs.keys() == ['format']:
585 if kwargs.keys() == ['format']:
586 return # nothing to pull
586 return # nothing to pull
587 bundle = pullop.remote.getbundle('pull', **kwargs)
587 bundle = pullop.remote.getbundle('pull', **kwargs)
588 try:
588 try:
589 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
589 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
590 except UnknownPartError, exc:
590 except UnknownPartError, exc:
591 raise util.Abort('missing support for %s' % exc)
591 raise util.Abort('missing support for %s' % exc)
592 assert len(op.records['changegroup']) == 1
592 assert len(op.records['changegroup']) == 1
593 pullop.cgresult = op.records['changegroup'][0]['return']
593 pullop.cgresult = op.records['changegroup'][0]['return']
594
594
595 def _pullbundle2extraprepare(pullop, kwargs):
595 def _pullbundle2extraprepare(pullop, kwargs):
596 """hook function so that extensions can extend the getbundle call"""
596 """hook function so that extensions can extend the getbundle call"""
597 pass
597 pass
598
598
599 def _pullchangeset(pullop):
599 def _pullchangeset(pullop):
600 """pull changeset from unbundle into the local repo"""
600 """pull changeset from unbundle into the local repo"""
601 # We delay the open of the transaction as late as possible so we
601 # We delay the open of the transaction as late as possible so we
602 # don't open transaction for nothing or you break future useful
602 # don't open transaction for nothing or you break future useful
603 # rollback call
603 # rollback call
604 pullop.todosteps.remove('changegroup')
604 pullop.todosteps.remove('changegroup')
605 if not pullop.fetch:
605 if not pullop.fetch:
606 pullop.repo.ui.status(_("no changes found\n"))
606 pullop.repo.ui.status(_("no changes found\n"))
607 pullop.cgresult = 0
607 pullop.cgresult = 0
608 return
608 return
609 pullop.gettransaction()
609 pullop.gettransaction()
610 if pullop.heads is None and list(pullop.common) == [nullid]:
610 if pullop.heads is None and list(pullop.common) == [nullid]:
611 pullop.repo.ui.status(_("requesting all changes\n"))
611 pullop.repo.ui.status(_("requesting all changes\n"))
612 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
612 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
613 # issue1320, avoid a race if remote changed after discovery
613 # issue1320, avoid a race if remote changed after discovery
614 pullop.heads = pullop.rheads
614 pullop.heads = pullop.rheads
615
615
616 if pullop.remote.capable('getbundle'):
616 if pullop.remote.capable('getbundle'):
617 # TODO: get bundlecaps from remote
617 # TODO: get bundlecaps from remote
618 cg = pullop.remote.getbundle('pull', common=pullop.common,
618 cg = pullop.remote.getbundle('pull', common=pullop.common,
619 heads=pullop.heads or pullop.rheads)
619 heads=pullop.heads or pullop.rheads)
620 elif pullop.heads is None:
620 elif pullop.heads is None:
621 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
621 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
622 elif not pullop.remote.capable('changegroupsubset'):
622 elif not pullop.remote.capable('changegroupsubset'):
623 raise util.Abort(_("partial pull cannot be done because "
623 raise util.Abort(_("partial pull cannot be done because "
624 "other repository doesn't support "
624 "other repository doesn't support "
625 "changegroupsubset."))
625 "changegroupsubset."))
626 else:
626 else:
627 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
627 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
628 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
628 pullop.cgresult = changegroup.addchangegroup(pullop.repo, cg, 'pull',
629 pullop.remote.url())
629 pullop.remote.url())
630
630
631 def _pullphase(pullop):
631 def _pullphase(pullop):
632 # Get remote phases data from remote
632 # Get remote phases data from remote
633 pullop.todosteps.remove('phases')
633 pullop.todosteps.remove('phases')
634 remotephases = pullop.remote.listkeys('phases')
634 remotephases = pullop.remote.listkeys('phases')
635 publishing = bool(remotephases.get('publishing', False))
635 publishing = bool(remotephases.get('publishing', False))
636 if remotephases and not publishing:
636 if remotephases and not publishing:
637 # remote is new and unpublishing
637 # remote is new and unpublishing
638 pheads, _dr = phases.analyzeremotephases(pullop.repo,
638 pheads, _dr = phases.analyzeremotephases(pullop.repo,
639 pullop.pulledsubset,
639 pullop.pulledsubset,
640 remotephases)
640 remotephases)
641 phases.advanceboundary(pullop.repo, phases.public, pheads)
641 phases.advanceboundary(pullop.repo, phases.public, pheads)
642 phases.advanceboundary(pullop.repo, phases.draft,
642 phases.advanceboundary(pullop.repo, phases.draft,
643 pullop.pulledsubset)
643 pullop.pulledsubset)
644 else:
644 else:
645 # Remote is old or publishing all common changesets
645 # Remote is old or publishing all common changesets
646 # should be seen as public
646 # should be seen as public
647 phases.advanceboundary(pullop.repo, phases.public,
647 phases.advanceboundary(pullop.repo, phases.public,
648 pullop.pulledsubset)
648 pullop.pulledsubset)
649
649
650 def _pullobsolete(pullop):
650 def _pullobsolete(pullop):
651 """utility function to pull obsolete markers from a remote
651 """utility function to pull obsolete markers from a remote
652
652
653 The `gettransaction` is function that return the pull transaction, creating
653 The `gettransaction` is function that return the pull transaction, creating
654 one if necessary. We return the transaction to inform the calling code that
654 one if necessary. We return the transaction to inform the calling code that
655 a new transaction have been created (when applicable).
655 a new transaction have been created (when applicable).
656
656
657 Exists mostly to allow overriding for experimentation purpose"""
657 Exists mostly to allow overriding for experimentation purpose"""
658 pullop.todosteps.remove('obsmarkers')
658 pullop.todosteps.remove('obsmarkers')
659 tr = None
659 tr = None
660 if obsolete._enabled:
660 if obsolete._enabled:
661 pullop.repo.ui.debug('fetching remote obsolete markers\n')
661 pullop.repo.ui.debug('fetching remote obsolete markers\n')
662 remoteobs = pullop.remote.listkeys('obsolete')
662 remoteobs = pullop.remote.listkeys('obsolete')
663 if 'dump0' in remoteobs:
663 if 'dump0' in remoteobs:
664 tr = pullop.gettransaction()
664 tr = pullop.gettransaction()
665 for key in sorted(remoteobs, reverse=True):
665 for key in sorted(remoteobs, reverse=True):
666 if key.startswith('dump'):
666 if key.startswith('dump'):
667 data = base85.b85decode(remoteobs[key])
667 data = base85.b85decode(remoteobs[key])
668 pullop.repo.obsstore.mergemarkers(tr, data)
668 pullop.repo.obsstore.mergemarkers(tr, data)
669 pullop.repo.invalidatevolatilesets()
669 pullop.repo.invalidatevolatilesets()
670 return tr
670 return tr
671
671
672 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
672 def getbundle(repo, source, heads=None, common=None, bundlecaps=None,
673 **kwargs):
673 **kwargs):
674 """return a full bundle (with potentially multiple kind of parts)
674 """return a full bundle (with potentially multiple kind of parts)
675
675
676 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
676 Could be a bundle HG10 or a bundle HG2X depending on bundlecaps
677 passed. For now, the bundle can contain only changegroup, but this will
677 passed. For now, the bundle can contain only changegroup, but this will
678 changes when more part type will be available for bundle2.
678 changes when more part type will be available for bundle2.
679
679
680 This is different from changegroup.getbundle that only returns an HG10
680 This is different from changegroup.getbundle that only returns an HG10
681 changegroup bundle. They may eventually get reunited in the future when we
681 changegroup bundle. They may eventually get reunited in the future when we
682 have a clearer idea of the API we what to query different data.
682 have a clearer idea of the API we what to query different data.
683
683
684 The implementation is at a very early stage and will get massive rework
684 The implementation is at a very early stage and will get massive rework
685 when the API of bundle is refined.
685 when the API of bundle is refined.
686 """
686 """
687 # build bundle here.
687 # build bundle here.
688 cg = changegroup.getbundle(repo, source, heads=heads,
688 cg = changegroup.getbundle(repo, source, heads=heads,
689 common=common, bundlecaps=bundlecaps)
689 common=common, bundlecaps=bundlecaps)
690 if bundlecaps is None or 'HG2X' not in bundlecaps:
690 if bundlecaps is None or 'HG2X' not in bundlecaps:
691 return cg
691 return cg
692 # very crude first implementation,
692 # very crude first implementation,
693 # the bundle API will change and the generation will be done lazily.
693 # the bundle API will change and the generation will be done lazily.
694 b2caps = {}
694 b2caps = {}
695 for bcaps in bundlecaps:
695 for bcaps in bundlecaps:
696 if bcaps.startswith('bundle2='):
696 if bcaps.startswith('bundle2='):
697 blob = urllib.unquote(bcaps[len('bundle2='):])
697 blob = urllib.unquote(bcaps[len('bundle2='):])
698 b2caps.update(bundle2.decodecaps(blob))
698 b2caps.update(bundle2.decodecaps(blob))
699 bundler = bundle2.bundle20(repo.ui, b2caps)
699 bundler = bundle2.bundle20(repo.ui, b2caps)
700 part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
700 part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
701 bundler.addpart(part)
701 bundler.addpart(part)
702 _getbundleextrapart(bundler, repo, source, heads=None, common=None,
702 _getbundleextrapart(bundler, repo, source, heads=None, common=None,
703 bundlecaps=None, **kwargs)
703 bundlecaps=None, **kwargs)
704 return util.chunkbuffer(bundler.getchunks())
704 return util.chunkbuffer(bundler.getchunks())
705
705
706 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
706 def _getbundleextrapart(bundler, repo, source, heads=None, common=None,
707 bundlecaps=None, **kwargs):
707 bundlecaps=None, **kwargs):
708 """hook function to let extensions add parts to the requested bundle"""
708 """hook function to let extensions add parts to the requested bundle"""
709 pass
709 pass
710
710
711 def check_heads(repo, their_heads, context):
711 def check_heads(repo, their_heads, context):
712 """check if the heads of a repo have been modified
712 """check if the heads of a repo have been modified
713
713
714 Used by peer for unbundling.
714 Used by peer for unbundling.
715 """
715 """
716 heads = repo.heads()
716 heads = repo.heads()
717 heads_hash = util.sha1(''.join(sorted(heads))).digest()
717 heads_hash = util.sha1(''.join(sorted(heads))).digest()
718 if not (their_heads == ['force'] or their_heads == heads or
718 if not (their_heads == ['force'] or their_heads == heads or
719 their_heads == ['hashed', heads_hash]):
719 their_heads == ['hashed', heads_hash]):
720 # someone else committed/pushed/unbundled while we
720 # someone else committed/pushed/unbundled while we
721 # were transferring data
721 # were transferring data
722 raise error.PushRaced('repository changed while %s - '
722 raise error.PushRaced('repository changed while %s - '
723 'please try again' % context)
723 'please try again' % context)
724
724
725 def unbundle(repo, cg, heads, source, url):
725 def unbundle(repo, cg, heads, source, url):
726 """Apply a bundle to a repo.
726 """Apply a bundle to a repo.
727
727
728 this function makes sure the repo is locked during the application and have
728 this function makes sure the repo is locked during the application and have
729 mechanism to check that no push race occurred between the creation of the
729 mechanism to check that no push race occurred between the creation of the
730 bundle and its application.
730 bundle and its application.
731
731
732 If the push was raced as PushRaced exception is raised."""
732 If the push was raced as PushRaced exception is raised."""
733 r = 0
733 r = 0
734 # need a transaction when processing a bundle2 stream
734 # need a transaction when processing a bundle2 stream
735 tr = None
735 tr = None
736 lock = repo.lock()
736 lock = repo.lock()
737 try:
737 try:
738 check_heads(repo, heads, 'uploading changes')
738 check_heads(repo, heads, 'uploading changes')
739 # push can proceed
739 # push can proceed
740 if util.safehasattr(cg, 'params'):
740 if util.safehasattr(cg, 'params'):
741 try:
741 tr = repo.transaction('unbundle')
742 tr = repo.transaction('unbundle')
742 tr.hookargs['bundle2-exp'] = '1'
743 tr.hookargs['bundle2-exp'] = '1'
743 r = bundle2.processbundle(repo, cg, lambda: tr).reply
744 r = bundle2.processbundle(repo, cg, lambda: tr).reply
744 cl = repo.unfiltered().changelog
745 cl = repo.unfiltered().changelog
745 p = cl.writepending() and repo.root or ""
746 p = cl.writepending() and repo.root or ""
746 repo.hook('b2x-pretransactionclose', throw=True, source=source,
747 repo.hook('b2x-pretransactionclose', throw=True, source=source,
747 url=url, pending=p, **tr.hookargs)
748 url=url, pending=p, **tr.hookargs)
748 tr.close()
749 tr.close()
749 repo.hook('b2x-transactionclose', source=source, url=url,
750 repo.hook('b2x-transactionclose', source=source, url=url,
750 **tr.hookargs)
751 **tr.hookargs)
752 except Exception, exc:
753 exc.duringunbundle2 = True
754 raise
751 else:
755 else:
752 r = changegroup.addchangegroup(repo, cg, source, url)
756 r = changegroup.addchangegroup(repo, cg, source, url)
753 finally:
757 finally:
754 if tr is not None:
758 if tr is not None:
755 tr.release()
759 tr.release()
756 lock.release()
760 lock.release()
757 return r
761 return r
@@ -1,1045 +1,1082
1
1
2 Create an extension to test bundle2 API
2 Create an extension to test bundle2 API
3
3
4 $ cat > bundle2.py << EOF
4 $ cat > bundle2.py << EOF
5 > """A small extension to test bundle2 implementation
5 > """A small extension to test bundle2 implementation
6 >
6 >
7 > Current bundle2 implementation is far too limited to be used in any core
7 > Current bundle2 implementation is far too limited to be used in any core
8 > code. We still need to be able to test it while it grow up.
8 > code. We still need to be able to test it while it grow up.
9 > """
9 > """
10 >
10 >
11 > import sys
11 > import sys
12 > from mercurial import cmdutil
12 > from mercurial import cmdutil
13 > from mercurial import util
13 > from mercurial import util
14 > from mercurial import bundle2
14 > from mercurial import bundle2
15 > from mercurial import scmutil
15 > from mercurial import scmutil
16 > from mercurial import discovery
16 > from mercurial import discovery
17 > from mercurial import changegroup
17 > from mercurial import changegroup
18 > from mercurial import error
18 > from mercurial import error
19 > cmdtable = {}
19 > cmdtable = {}
20 > command = cmdutil.command(cmdtable)
20 > command = cmdutil.command(cmdtable)
21 >
21 >
22 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
22 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
23 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
23 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
24 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
24 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
25 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
25 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
26 >
26 >
27 > @bundle2.parthandler('test:song')
27 > @bundle2.parthandler('test:song')
28 > def songhandler(op, part):
28 > def songhandler(op, part):
29 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
29 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
30 > op.ui.write('The choir starts singing:\n')
30 > op.ui.write('The choir starts singing:\n')
31 > verses = 0
31 > verses = 0
32 > for line in part.read().split('\n'):
32 > for line in part.read().split('\n'):
33 > op.ui.write(' %s\n' % line)
33 > op.ui.write(' %s\n' % line)
34 > verses += 1
34 > verses += 1
35 > op.records.add('song', {'verses': verses})
35 > op.records.add('song', {'verses': verses})
36 >
36 >
37 > @bundle2.parthandler('test:ping')
37 > @bundle2.parthandler('test:ping')
38 > def pinghandler(op, part):
38 > def pinghandler(op, part):
39 > op.ui.write('received ping request (id %i)\n' % part.id)
39 > op.ui.write('received ping request (id %i)\n' % part.id)
40 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
40 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
41 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
41 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
42 > rpart = bundle2.bundlepart('test:pong',
42 > rpart = bundle2.bundlepart('test:pong',
43 > [('in-reply-to', str(part.id))])
43 > [('in-reply-to', str(part.id))])
44 > op.reply.addpart(rpart)
44 > op.reply.addpart(rpart)
45 >
45 >
46 > @bundle2.parthandler('test:debugreply')
46 > @bundle2.parthandler('test:debugreply')
47 > def debugreply(op, part):
47 > def debugreply(op, part):
48 > """print data about the capacity of the bundle reply"""
48 > """print data about the capacity of the bundle reply"""
49 > if op.reply is None:
49 > if op.reply is None:
50 > op.ui.write('debugreply: no reply\n')
50 > op.ui.write('debugreply: no reply\n')
51 > else:
51 > else:
52 > op.ui.write('debugreply: capabilities:\n')
52 > op.ui.write('debugreply: capabilities:\n')
53 > for cap in sorted(op.reply.capabilities):
53 > for cap in sorted(op.reply.capabilities):
54 > op.ui.write('debugreply: %r\n' % cap)
54 > op.ui.write('debugreply: %r\n' % cap)
55 > for val in op.reply.capabilities[cap]:
55 > for val in op.reply.capabilities[cap]:
56 > op.ui.write('debugreply: %r\n' % val)
56 > op.ui.write('debugreply: %r\n' % val)
57 >
57 >
58 > @command('bundle2',
58 > @command('bundle2',
59 > [('', 'param', [], 'stream level parameter'),
59 > [('', 'param', [], 'stream level parameter'),
60 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
60 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
61 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
61 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
62 > ('', 'reply', False, 'produce a reply bundle'),
62 > ('', 'reply', False, 'produce a reply bundle'),
63 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
63 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
64 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
64 > ('r', 'rev', [], 'includes those changeset in the bundle'),],
65 > '[OUTPUTFILE]')
65 > '[OUTPUTFILE]')
66 > def cmdbundle2(ui, repo, path=None, **opts):
66 > def cmdbundle2(ui, repo, path=None, **opts):
67 > """write a bundle2 container on standard ouput"""
67 > """write a bundle2 container on standard ouput"""
68 > bundler = bundle2.bundle20(ui)
68 > bundler = bundle2.bundle20(ui)
69 > for p in opts['param']:
69 > for p in opts['param']:
70 > p = p.split('=', 1)
70 > p = p.split('=', 1)
71 > try:
71 > try:
72 > bundler.addparam(*p)
72 > bundler.addparam(*p)
73 > except ValueError, exc:
73 > except ValueError, exc:
74 > raise util.Abort('%s' % exc)
74 > raise util.Abort('%s' % exc)
75 >
75 >
76 > if opts['reply']:
76 > if opts['reply']:
77 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
77 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
78 > bundler.addpart(bundle2.bundlepart('b2x:replycaps', data=capsstring))
78 > bundler.addpart(bundle2.bundlepart('b2x:replycaps', data=capsstring))
79 >
79 >
80 > if opts['pushrace']:
80 > if opts['pushrace']:
81 > dummynode = '01234567890123456789'
81 > dummynode = '01234567890123456789'
82 > bundler.addpart(bundle2.bundlepart('b2x:check:heads', data=dummynode))
82 > bundler.addpart(bundle2.bundlepart('b2x:check:heads', data=dummynode))
83 >
83 >
84 > revs = opts['rev']
84 > revs = opts['rev']
85 > if 'rev' in opts:
85 > if 'rev' in opts:
86 > revs = scmutil.revrange(repo, opts['rev'])
86 > revs = scmutil.revrange(repo, opts['rev'])
87 > if revs:
87 > if revs:
88 > # very crude version of a changegroup part creation
88 > # very crude version of a changegroup part creation
89 > bundled = repo.revs('%ld::%ld', revs, revs)
89 > bundled = repo.revs('%ld::%ld', revs, revs)
90 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
90 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
91 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
91 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
92 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
92 > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing)
93 > cg = changegroup.getlocalbundle(repo, 'test:bundle2', outgoing, None)
93 > cg = changegroup.getlocalbundle(repo, 'test:bundle2', outgoing, None)
94 > part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
94 > part = bundle2.bundlepart('b2x:changegroup', data=cg.getchunks())
95 > bundler.addpart(part)
95 > bundler.addpart(part)
96 >
96 >
97 > if opts['parts']:
97 > if opts['parts']:
98 > part = bundle2.bundlepart('test:empty')
98 > part = bundle2.bundlepart('test:empty')
99 > bundler.addpart(part)
99 > bundler.addpart(part)
100 > # add a second one to make sure we handle multiple parts
100 > # add a second one to make sure we handle multiple parts
101 > part = bundle2.bundlepart('test:empty')
101 > part = bundle2.bundlepart('test:empty')
102 > bundler.addpart(part)
102 > bundler.addpart(part)
103 > part = bundle2.bundlepart('test:song', data=ELEPHANTSSONG)
103 > part = bundle2.bundlepart('test:song', data=ELEPHANTSSONG)
104 > bundler.addpart(part)
104 > bundler.addpart(part)
105 > part = bundle2.bundlepart('test:debugreply')
105 > part = bundle2.bundlepart('test:debugreply')
106 > bundler.addpart(part)
106 > bundler.addpart(part)
107 > part = bundle2.bundlepart('test:math',
107 > part = bundle2.bundlepart('test:math',
108 > [('pi', '3.14'), ('e', '2.72')],
108 > [('pi', '3.14'), ('e', '2.72')],
109 > [('cooking', 'raw')],
109 > [('cooking', 'raw')],
110 > '42')
110 > '42')
111 > bundler.addpart(part)
111 > bundler.addpart(part)
112 > if opts['unknown']:
112 > if opts['unknown']:
113 > part = bundle2.bundlepart('test:UNKNOWN',
113 > part = bundle2.bundlepart('test:UNKNOWN',
114 > data='some random content')
114 > data='some random content')
115 > bundler.addpart(part)
115 > bundler.addpart(part)
116 > if opts['parts']:
116 > if opts['parts']:
117 > part = bundle2.bundlepart('test:ping')
117 > part = bundle2.bundlepart('test:ping')
118 > bundler.addpart(part)
118 > bundler.addpart(part)
119 >
119 >
120 > if path is None:
120 > if path is None:
121 > file = sys.stdout
121 > file = sys.stdout
122 > else:
122 > else:
123 > file = open(path, 'w')
123 > file = open(path, 'w')
124 >
124 >
125 > for chunk in bundler.getchunks():
125 > for chunk in bundler.getchunks():
126 > file.write(chunk)
126 > file.write(chunk)
127 >
127 >
128 > @command('unbundle2', [], '')
128 > @command('unbundle2', [], '')
129 > def cmdunbundle2(ui, repo, replypath=None):
129 > def cmdunbundle2(ui, repo, replypath=None):
130 > """process a bundle2 stream from stdin on the current repo"""
130 > """process a bundle2 stream from stdin on the current repo"""
131 > try:
131 > try:
132 > tr = None
132 > tr = None
133 > lock = repo.lock()
133 > lock = repo.lock()
134 > tr = repo.transaction('processbundle')
134 > tr = repo.transaction('processbundle')
135 > try:
135 > try:
136 > unbundler = bundle2.unbundle20(ui, sys.stdin)
136 > unbundler = bundle2.unbundle20(ui, sys.stdin)
137 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
137 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
138 > tr.close()
138 > tr.close()
139 > except KeyError, exc:
139 > except KeyError, exc:
140 > raise util.Abort('missing support for %s' % exc)
140 > raise util.Abort('missing support for %s' % exc)
141 > except error.PushRaced, exc:
141 > except error.PushRaced, exc:
142 > raise util.Abort('push race: %s' % exc)
142 > raise util.Abort('push race: %s' % exc)
143 > finally:
143 > finally:
144 > if tr is not None:
144 > if tr is not None:
145 > tr.release()
145 > tr.release()
146 > lock.release()
146 > lock.release()
147 > remains = sys.stdin.read()
147 > remains = sys.stdin.read()
148 > ui.write('%i unread bytes\n' % len(remains))
148 > ui.write('%i unread bytes\n' % len(remains))
149 > if op.records['song']:
149 > if op.records['song']:
150 > totalverses = sum(r['verses'] for r in op.records['song'])
150 > totalverses = sum(r['verses'] for r in op.records['song'])
151 > ui.write('%i total verses sung\n' % totalverses)
151 > ui.write('%i total verses sung\n' % totalverses)
152 > for rec in op.records['changegroup']:
152 > for rec in op.records['changegroup']:
153 > ui.write('addchangegroup return: %i\n' % rec['return'])
153 > ui.write('addchangegroup return: %i\n' % rec['return'])
154 > if op.reply is not None and replypath is not None:
154 > if op.reply is not None and replypath is not None:
155 > file = open(replypath, 'w')
155 > file = open(replypath, 'w')
156 > for chunk in op.reply.getchunks():
156 > for chunk in op.reply.getchunks():
157 > file.write(chunk)
157 > file.write(chunk)
158 >
158 >
159 > @command('statbundle2', [], '')
159 > @command('statbundle2', [], '')
160 > def cmdstatbundle2(ui, repo):
160 > def cmdstatbundle2(ui, repo):
161 > """print statistic on the bundle2 container read from stdin"""
161 > """print statistic on the bundle2 container read from stdin"""
162 > unbundler = bundle2.unbundle20(ui, sys.stdin)
162 > unbundler = bundle2.unbundle20(ui, sys.stdin)
163 > try:
163 > try:
164 > params = unbundler.params
164 > params = unbundler.params
165 > except KeyError, exc:
165 > except KeyError, exc:
166 > raise util.Abort('unknown parameters: %s' % exc)
166 > raise util.Abort('unknown parameters: %s' % exc)
167 > ui.write('options count: %i\n' % len(params))
167 > ui.write('options count: %i\n' % len(params))
168 > for key in sorted(params):
168 > for key in sorted(params):
169 > ui.write('- %s\n' % key)
169 > ui.write('- %s\n' % key)
170 > value = params[key]
170 > value = params[key]
171 > if value is not None:
171 > if value is not None:
172 > ui.write(' %s\n' % value)
172 > ui.write(' %s\n' % value)
173 > count = 0
173 > count = 0
174 > for p in unbundler.iterparts():
174 > for p in unbundler.iterparts():
175 > count += 1
175 > count += 1
176 > ui.write(' :%s:\n' % p.type)
176 > ui.write(' :%s:\n' % p.type)
177 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
177 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
178 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
178 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
179 > ui.write(' payload: %i bytes\n' % len(p.read()))
179 > ui.write(' payload: %i bytes\n' % len(p.read()))
180 > ui.write('parts count: %i\n' % count)
180 > ui.write('parts count: %i\n' % count)
181 > EOF
181 > EOF
182 $ cat >> $HGRCPATH << EOF
182 $ cat >> $HGRCPATH << EOF
183 > [extensions]
183 > [extensions]
184 > bundle2=$TESTTMP/bundle2.py
184 > bundle2=$TESTTMP/bundle2.py
185 > [experimental]
185 > [experimental]
186 > bundle2-exp=True
186 > bundle2-exp=True
187 > [ui]
187 > [ui]
188 > ssh=python "$TESTDIR/dummyssh"
188 > ssh=python "$TESTDIR/dummyssh"
189 > [web]
189 > [web]
190 > push_ssl = false
190 > push_ssl = false
191 > allow_push = *
191 > allow_push = *
192 > EOF
192 > EOF
193
193
194 The extension requires a repo (currently unused)
194 The extension requires a repo (currently unused)
195
195
196 $ hg init main
196 $ hg init main
197 $ cd main
197 $ cd main
198 $ touch a
198 $ touch a
199 $ hg add a
199 $ hg add a
200 $ hg commit -m 'a'
200 $ hg commit -m 'a'
201
201
202
202
203 Empty bundle
203 Empty bundle
204 =================
204 =================
205
205
206 - no option
206 - no option
207 - no parts
207 - no parts
208
208
209 Test bundling
209 Test bundling
210
210
211 $ hg bundle2
211 $ hg bundle2
212 HG2X\x00\x00\x00\x00 (no-eol) (esc)
212 HG2X\x00\x00\x00\x00 (no-eol) (esc)
213
213
214 Test unbundling
214 Test unbundling
215
215
216 $ hg bundle2 | hg statbundle2
216 $ hg bundle2 | hg statbundle2
217 options count: 0
217 options count: 0
218 parts count: 0
218 parts count: 0
219
219
220 Test old style bundle are detected and refused
220 Test old style bundle are detected and refused
221
221
222 $ hg bundle --all ../bundle.hg
222 $ hg bundle --all ../bundle.hg
223 1 changesets found
223 1 changesets found
224 $ hg statbundle2 < ../bundle.hg
224 $ hg statbundle2 < ../bundle.hg
225 abort: unknown bundle version 10
225 abort: unknown bundle version 10
226 [255]
226 [255]
227
227
228 Test parameters
228 Test parameters
229 =================
229 =================
230
230
231 - some options
231 - some options
232 - no parts
232 - no parts
233
233
234 advisory parameters, no value
234 advisory parameters, no value
235 -------------------------------
235 -------------------------------
236
236
237 Simplest possible parameters form
237 Simplest possible parameters form
238
238
239 Test generation simple option
239 Test generation simple option
240
240
241 $ hg bundle2 --param 'caution'
241 $ hg bundle2 --param 'caution'
242 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
242 HG2X\x00\x07caution\x00\x00 (no-eol) (esc)
243
243
244 Test unbundling
244 Test unbundling
245
245
246 $ hg bundle2 --param 'caution' | hg statbundle2
246 $ hg bundle2 --param 'caution' | hg statbundle2
247 options count: 1
247 options count: 1
248 - caution
248 - caution
249 parts count: 0
249 parts count: 0
250
250
251 Test generation multiple option
251 Test generation multiple option
252
252
253 $ hg bundle2 --param 'caution' --param 'meal'
253 $ hg bundle2 --param 'caution' --param 'meal'
254 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
254 HG2X\x00\x0ccaution meal\x00\x00 (no-eol) (esc)
255
255
256 Test unbundling
256 Test unbundling
257
257
258 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
258 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
259 options count: 2
259 options count: 2
260 - caution
260 - caution
261 - meal
261 - meal
262 parts count: 0
262 parts count: 0
263
263
264 advisory parameters, with value
264 advisory parameters, with value
265 -------------------------------
265 -------------------------------
266
266
267 Test generation
267 Test generation
268
268
269 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
269 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants'
270 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
270 HG2X\x00\x1ccaution meal=vegan elephants\x00\x00 (no-eol) (esc)
271
271
272 Test unbundling
272 Test unbundling
273
273
274 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
274 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
275 options count: 3
275 options count: 3
276 - caution
276 - caution
277 - elephants
277 - elephants
278 - meal
278 - meal
279 vegan
279 vegan
280 parts count: 0
280 parts count: 0
281
281
282 parameter with special char in value
282 parameter with special char in value
283 ---------------------------------------------------
283 ---------------------------------------------------
284
284
285 Test generation
285 Test generation
286
286
287 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
287 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple
288 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
288 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
289
289
290 Test unbundling
290 Test unbundling
291
291
292 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
292 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
293 options count: 2
293 options count: 2
294 - e|! 7/
294 - e|! 7/
295 babar%#==tutu
295 babar%#==tutu
296 - simple
296 - simple
297 parts count: 0
297 parts count: 0
298
298
299 Test unknown mandatory option
299 Test unknown mandatory option
300 ---------------------------------------------------
300 ---------------------------------------------------
301
301
302 $ hg bundle2 --param 'Gravity' | hg statbundle2
302 $ hg bundle2 --param 'Gravity' | hg statbundle2
303 abort: unknown parameters: 'Gravity'
303 abort: unknown parameters: 'Gravity'
304 [255]
304 [255]
305
305
306 Test debug output
306 Test debug output
307 ---------------------------------------------------
307 ---------------------------------------------------
308
308
309 bundling debug
309 bundling debug
310
310
311 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
311 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2
312 start emission of HG2X stream
312 start emission of HG2X stream
313 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
313 bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
314 start of parts
314 start of parts
315 end of bundle
315 end of bundle
316
316
317 file content is ok
317 file content is ok
318
318
319 $ cat ../out.hg2
319 $ cat ../out.hg2
320 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
320 HG2X\x00)e%7C%21%207/=babar%25%23%3D%3Dtutu simple\x00\x00 (no-eol) (esc)
321
321
322 unbundling debug
322 unbundling debug
323
323
324 $ hg statbundle2 --debug < ../out.hg2
324 $ hg statbundle2 --debug < ../out.hg2
325 start processing of HG2X stream
325 start processing of HG2X stream
326 reading bundle2 stream parameters
326 reading bundle2 stream parameters
327 ignoring unknown parameter 'e|! 7/'
327 ignoring unknown parameter 'e|! 7/'
328 ignoring unknown parameter 'simple'
328 ignoring unknown parameter 'simple'
329 options count: 2
329 options count: 2
330 - e|! 7/
330 - e|! 7/
331 babar%#==tutu
331 babar%#==tutu
332 - simple
332 - simple
333 start extraction of bundle2 parts
333 start extraction of bundle2 parts
334 part header size: 0
334 part header size: 0
335 end of bundle2 stream
335 end of bundle2 stream
336 parts count: 0
336 parts count: 0
337
337
338
338
339 Test buggy input
339 Test buggy input
340 ---------------------------------------------------
340 ---------------------------------------------------
341
341
342 empty parameter name
342 empty parameter name
343
343
344 $ hg bundle2 --param '' --quiet
344 $ hg bundle2 --param '' --quiet
345 abort: empty parameter name
345 abort: empty parameter name
346 [255]
346 [255]
347
347
348 bad parameter name
348 bad parameter name
349
349
350 $ hg bundle2 --param 42babar
350 $ hg bundle2 --param 42babar
351 abort: non letter first character: '42babar'
351 abort: non letter first character: '42babar'
352 [255]
352 [255]
353
353
354
354
355 Test part
355 Test part
356 =================
356 =================
357
357
358 $ hg bundle2 --parts ../parts.hg2 --debug
358 $ hg bundle2 --parts ../parts.hg2 --debug
359 start emission of HG2X stream
359 start emission of HG2X stream
360 bundle parameter:
360 bundle parameter:
361 start of parts
361 start of parts
362 bundle part: "test:empty"
362 bundle part: "test:empty"
363 bundle part: "test:empty"
363 bundle part: "test:empty"
364 bundle part: "test:song"
364 bundle part: "test:song"
365 bundle part: "test:debugreply"
365 bundle part: "test:debugreply"
366 bundle part: "test:math"
366 bundle part: "test:math"
367 bundle part: "test:ping"
367 bundle part: "test:ping"
368 end of bundle
368 end of bundle
369
369
370 $ cat ../parts.hg2
370 $ cat ../parts.hg2
371 HG2X\x00\x00\x00\x11 (esc)
371 HG2X\x00\x00\x00\x11 (esc)
372 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
372 test:empty\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11 (esc)
373 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
373 test:empty\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x10 test:song\x00\x00\x00\x02\x00\x00\x00\x00\x00\xb2Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko (esc)
374 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
374 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
375 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
375 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.\x00\x00\x00\x00\x00\x16\x0ftest:debugreply\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00+ test:math\x00\x00\x00\x04\x02\x01\x02\x04\x01\x04\x07\x03pi3.14e2.72cookingraw\x00\x00\x00\x0242\x00\x00\x00\x00\x00\x10 test:ping\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
376
376
377
377
378 $ hg statbundle2 < ../parts.hg2
378 $ hg statbundle2 < ../parts.hg2
379 options count: 0
379 options count: 0
380 :test:empty:
380 :test:empty:
381 mandatory: 0
381 mandatory: 0
382 advisory: 0
382 advisory: 0
383 payload: 0 bytes
383 payload: 0 bytes
384 :test:empty:
384 :test:empty:
385 mandatory: 0
385 mandatory: 0
386 advisory: 0
386 advisory: 0
387 payload: 0 bytes
387 payload: 0 bytes
388 :test:song:
388 :test:song:
389 mandatory: 0
389 mandatory: 0
390 advisory: 0
390 advisory: 0
391 payload: 178 bytes
391 payload: 178 bytes
392 :test:debugreply:
392 :test:debugreply:
393 mandatory: 0
393 mandatory: 0
394 advisory: 0
394 advisory: 0
395 payload: 0 bytes
395 payload: 0 bytes
396 :test:math:
396 :test:math:
397 mandatory: 2
397 mandatory: 2
398 advisory: 1
398 advisory: 1
399 payload: 2 bytes
399 payload: 2 bytes
400 :test:ping:
400 :test:ping:
401 mandatory: 0
401 mandatory: 0
402 advisory: 0
402 advisory: 0
403 payload: 0 bytes
403 payload: 0 bytes
404 parts count: 6
404 parts count: 6
405
405
406 $ hg statbundle2 --debug < ../parts.hg2
406 $ hg statbundle2 --debug < ../parts.hg2
407 start processing of HG2X stream
407 start processing of HG2X stream
408 reading bundle2 stream parameters
408 reading bundle2 stream parameters
409 options count: 0
409 options count: 0
410 start extraction of bundle2 parts
410 start extraction of bundle2 parts
411 part header size: 17
411 part header size: 17
412 part type: "test:empty"
412 part type: "test:empty"
413 part id: "0"
413 part id: "0"
414 part parameters: 0
414 part parameters: 0
415 :test:empty:
415 :test:empty:
416 mandatory: 0
416 mandatory: 0
417 advisory: 0
417 advisory: 0
418 payload chunk size: 0
418 payload chunk size: 0
419 payload: 0 bytes
419 payload: 0 bytes
420 part header size: 17
420 part header size: 17
421 part type: "test:empty"
421 part type: "test:empty"
422 part id: "1"
422 part id: "1"
423 part parameters: 0
423 part parameters: 0
424 :test:empty:
424 :test:empty:
425 mandatory: 0
425 mandatory: 0
426 advisory: 0
426 advisory: 0
427 payload chunk size: 0
427 payload chunk size: 0
428 payload: 0 bytes
428 payload: 0 bytes
429 part header size: 16
429 part header size: 16
430 part type: "test:song"
430 part type: "test:song"
431 part id: "2"
431 part id: "2"
432 part parameters: 0
432 part parameters: 0
433 :test:song:
433 :test:song:
434 mandatory: 0
434 mandatory: 0
435 advisory: 0
435 advisory: 0
436 payload chunk size: 178
436 payload chunk size: 178
437 payload chunk size: 0
437 payload chunk size: 0
438 payload: 178 bytes
438 payload: 178 bytes
439 part header size: 22
439 part header size: 22
440 part type: "test:debugreply"
440 part type: "test:debugreply"
441 part id: "3"
441 part id: "3"
442 part parameters: 0
442 part parameters: 0
443 :test:debugreply:
443 :test:debugreply:
444 mandatory: 0
444 mandatory: 0
445 advisory: 0
445 advisory: 0
446 payload chunk size: 0
446 payload chunk size: 0
447 payload: 0 bytes
447 payload: 0 bytes
448 part header size: 43
448 part header size: 43
449 part type: "test:math"
449 part type: "test:math"
450 part id: "4"
450 part id: "4"
451 part parameters: 3
451 part parameters: 3
452 :test:math:
452 :test:math:
453 mandatory: 2
453 mandatory: 2
454 advisory: 1
454 advisory: 1
455 payload chunk size: 2
455 payload chunk size: 2
456 payload chunk size: 0
456 payload chunk size: 0
457 payload: 2 bytes
457 payload: 2 bytes
458 part header size: 16
458 part header size: 16
459 part type: "test:ping"
459 part type: "test:ping"
460 part id: "5"
460 part id: "5"
461 part parameters: 0
461 part parameters: 0
462 :test:ping:
462 :test:ping:
463 mandatory: 0
463 mandatory: 0
464 advisory: 0
464 advisory: 0
465 payload chunk size: 0
465 payload chunk size: 0
466 payload: 0 bytes
466 payload: 0 bytes
467 part header size: 0
467 part header size: 0
468 end of bundle2 stream
468 end of bundle2 stream
469 parts count: 6
469 parts count: 6
470
470
471 Test actual unbundling of test part
471 Test actual unbundling of test part
472 =======================================
472 =======================================
473
473
474 Process the bundle
474 Process the bundle
475
475
476 $ hg unbundle2 --debug < ../parts.hg2
476 $ hg unbundle2 --debug < ../parts.hg2
477 start processing of HG2X stream
477 start processing of HG2X stream
478 reading bundle2 stream parameters
478 reading bundle2 stream parameters
479 start extraction of bundle2 parts
479 start extraction of bundle2 parts
480 part header size: 17
480 part header size: 17
481 part type: "test:empty"
481 part type: "test:empty"
482 part id: "0"
482 part id: "0"
483 part parameters: 0
483 part parameters: 0
484 ignoring unknown advisory part 'test:empty'
484 ignoring unknown advisory part 'test:empty'
485 payload chunk size: 0
485 payload chunk size: 0
486 part header size: 17
486 part header size: 17
487 part type: "test:empty"
487 part type: "test:empty"
488 part id: "1"
488 part id: "1"
489 part parameters: 0
489 part parameters: 0
490 ignoring unknown advisory part 'test:empty'
490 ignoring unknown advisory part 'test:empty'
491 payload chunk size: 0
491 payload chunk size: 0
492 part header size: 16
492 part header size: 16
493 part type: "test:song"
493 part type: "test:song"
494 part id: "2"
494 part id: "2"
495 part parameters: 0
495 part parameters: 0
496 found a handler for part 'test:song'
496 found a handler for part 'test:song'
497 The choir starts singing:
497 The choir starts singing:
498 payload chunk size: 178
498 payload chunk size: 178
499 payload chunk size: 0
499 payload chunk size: 0
500 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
500 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
501 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
501 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
502 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
502 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
503 part header size: 22
503 part header size: 22
504 part type: "test:debugreply"
504 part type: "test:debugreply"
505 part id: "3"
505 part id: "3"
506 part parameters: 0
506 part parameters: 0
507 found a handler for part 'test:debugreply'
507 found a handler for part 'test:debugreply'
508 debugreply: no reply
508 debugreply: no reply
509 payload chunk size: 0
509 payload chunk size: 0
510 part header size: 43
510 part header size: 43
511 part type: "test:math"
511 part type: "test:math"
512 part id: "4"
512 part id: "4"
513 part parameters: 3
513 part parameters: 3
514 ignoring unknown advisory part 'test:math'
514 ignoring unknown advisory part 'test:math'
515 payload chunk size: 2
515 payload chunk size: 2
516 payload chunk size: 0
516 payload chunk size: 0
517 part header size: 16
517 part header size: 16
518 part type: "test:ping"
518 part type: "test:ping"
519 part id: "5"
519 part id: "5"
520 part parameters: 0
520 part parameters: 0
521 found a handler for part 'test:ping'
521 found a handler for part 'test:ping'
522 received ping request (id 5)
522 received ping request (id 5)
523 payload chunk size: 0
523 payload chunk size: 0
524 part header size: 0
524 part header size: 0
525 end of bundle2 stream
525 end of bundle2 stream
526 0 unread bytes
526 0 unread bytes
527 3 total verses sung
527 3 total verses sung
528
528
529 Unbundle with an unknown mandatory part
529 Unbundle with an unknown mandatory part
530 (should abort)
530 (should abort)
531
531
532 $ hg bundle2 --parts --unknown ../unknown.hg2
532 $ hg bundle2 --parts --unknown ../unknown.hg2
533
533
534 $ hg unbundle2 < ../unknown.hg2
534 $ hg unbundle2 < ../unknown.hg2
535 The choir starts singing:
535 The choir starts singing:
536 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
536 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
537 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
537 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
538 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
538 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
539 debugreply: no reply
539 debugreply: no reply
540 0 unread bytes
540 0 unread bytes
541 abort: missing support for 'test:unknown'
541 abort: missing support for 'test:unknown'
542 [255]
542 [255]
543
543
544 unbundle with a reply
544 unbundle with a reply
545
545
546 $ hg bundle2 --parts --reply ../parts-reply.hg2
546 $ hg bundle2 --parts --reply ../parts-reply.hg2
547 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
547 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
548 0 unread bytes
548 0 unread bytes
549 3 total verses sung
549 3 total verses sung
550
550
551 The reply is a bundle
551 The reply is a bundle
552
552
553 $ cat ../reply.hg2
553 $ cat ../reply.hg2
554 HG2X\x00\x00\x00\x1f (esc)
554 HG2X\x00\x00\x00\x1f (esc)
555 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
555 b2x:output\x00\x00\x00\x00\x00\x01\x0b\x01in-reply-to3\x00\x00\x00\xd9The choir starts singing: (esc)
556 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
556 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
557 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
557 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
558 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
558 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
559 \x00\x00\x00\x00\x00\x1f (esc)
559 \x00\x00\x00\x00\x00\x1f (esc)
560 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
560 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to4\x00\x00\x00\xc9debugreply: capabilities: (esc)
561 debugreply: 'city=!'
561 debugreply: 'city=!'
562 debugreply: 'celeste,ville'
562 debugreply: 'celeste,ville'
563 debugreply: 'elephants'
563 debugreply: 'elephants'
564 debugreply: 'babar'
564 debugreply: 'babar'
565 debugreply: 'celeste'
565 debugreply: 'celeste'
566 debugreply: 'ping-pong'
566 debugreply: 'ping-pong'
567 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to6\x00\x00\x00\x00\x00\x1f (esc)
567 \x00\x00\x00\x00\x00\x1e test:pong\x00\x00\x00\x02\x01\x00\x0b\x01in-reply-to6\x00\x00\x00\x00\x00\x1f (esc)
568 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to6\x00\x00\x00=received ping request (id 6) (esc)
568 b2x:output\x00\x00\x00\x03\x00\x01\x0b\x01in-reply-to6\x00\x00\x00=received ping request (id 6) (esc)
569 replying to ping request (id 6)
569 replying to ping request (id 6)
570 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
570 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
571
571
572 The reply is valid
572 The reply is valid
573
573
574 $ hg statbundle2 < ../reply.hg2
574 $ hg statbundle2 < ../reply.hg2
575 options count: 0
575 options count: 0
576 :b2x:output:
576 :b2x:output:
577 mandatory: 0
577 mandatory: 0
578 advisory: 1
578 advisory: 1
579 payload: 217 bytes
579 payload: 217 bytes
580 :b2x:output:
580 :b2x:output:
581 mandatory: 0
581 mandatory: 0
582 advisory: 1
582 advisory: 1
583 payload: 201 bytes
583 payload: 201 bytes
584 :test:pong:
584 :test:pong:
585 mandatory: 1
585 mandatory: 1
586 advisory: 0
586 advisory: 0
587 payload: 0 bytes
587 payload: 0 bytes
588 :b2x:output:
588 :b2x:output:
589 mandatory: 0
589 mandatory: 0
590 advisory: 1
590 advisory: 1
591 payload: 61 bytes
591 payload: 61 bytes
592 parts count: 4
592 parts count: 4
593
593
594 Unbundle the reply to get the output:
594 Unbundle the reply to get the output:
595
595
596 $ hg unbundle2 < ../reply.hg2
596 $ hg unbundle2 < ../reply.hg2
597 remote: The choir starts singing:
597 remote: The choir starts singing:
598 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
598 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
599 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
599 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
600 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
600 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
601 remote: debugreply: capabilities:
601 remote: debugreply: capabilities:
602 remote: debugreply: 'city=!'
602 remote: debugreply: 'city=!'
603 remote: debugreply: 'celeste,ville'
603 remote: debugreply: 'celeste,ville'
604 remote: debugreply: 'elephants'
604 remote: debugreply: 'elephants'
605 remote: debugreply: 'babar'
605 remote: debugreply: 'babar'
606 remote: debugreply: 'celeste'
606 remote: debugreply: 'celeste'
607 remote: debugreply: 'ping-pong'
607 remote: debugreply: 'ping-pong'
608 remote: received ping request (id 6)
608 remote: received ping request (id 6)
609 remote: replying to ping request (id 6)
609 remote: replying to ping request (id 6)
610 0 unread bytes
610 0 unread bytes
611
611
612 Test push race detection
612 Test push race detection
613
613
614 $ hg bundle2 --pushrace ../part-race.hg2
614 $ hg bundle2 --pushrace ../part-race.hg2
615
615
616 $ hg unbundle2 < ../part-race.hg2
616 $ hg unbundle2 < ../part-race.hg2
617 0 unread bytes
617 0 unread bytes
618 abort: push race: repository changed while pushing - please try again
618 abort: push race: repository changed while pushing - please try again
619 [255]
619 [255]
620
620
621 Support for changegroup
621 Support for changegroup
622 ===================================
622 ===================================
623
623
624 $ hg unbundle $TESTDIR/bundles/rebase.hg
624 $ hg unbundle $TESTDIR/bundles/rebase.hg
625 adding changesets
625 adding changesets
626 adding manifests
626 adding manifests
627 adding file changes
627 adding file changes
628 added 8 changesets with 7 changes to 7 files (+3 heads)
628 added 8 changesets with 7 changes to 7 files (+3 heads)
629 (run 'hg heads' to see heads, 'hg merge' to merge)
629 (run 'hg heads' to see heads, 'hg merge' to merge)
630
630
631 $ hg log -G
631 $ hg log -G
632 o changeset: 8:02de42196ebe
632 o changeset: 8:02de42196ebe
633 | tag: tip
633 | tag: tip
634 | parent: 6:24b6387c8c8c
634 | parent: 6:24b6387c8c8c
635 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
635 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
636 | date: Sat Apr 30 15:24:48 2011 +0200
636 | date: Sat Apr 30 15:24:48 2011 +0200
637 | summary: H
637 | summary: H
638 |
638 |
639 | o changeset: 7:eea13746799a
639 | o changeset: 7:eea13746799a
640 |/| parent: 6:24b6387c8c8c
640 |/| parent: 6:24b6387c8c8c
641 | | parent: 5:9520eea781bc
641 | | parent: 5:9520eea781bc
642 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
642 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
643 | | date: Sat Apr 30 15:24:48 2011 +0200
643 | | date: Sat Apr 30 15:24:48 2011 +0200
644 | | summary: G
644 | | summary: G
645 | |
645 | |
646 o | changeset: 6:24b6387c8c8c
646 o | changeset: 6:24b6387c8c8c
647 | | parent: 1:cd010b8cd998
647 | | parent: 1:cd010b8cd998
648 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
648 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
649 | | date: Sat Apr 30 15:24:48 2011 +0200
649 | | date: Sat Apr 30 15:24:48 2011 +0200
650 | | summary: F
650 | | summary: F
651 | |
651 | |
652 | o changeset: 5:9520eea781bc
652 | o changeset: 5:9520eea781bc
653 |/ parent: 1:cd010b8cd998
653 |/ parent: 1:cd010b8cd998
654 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
654 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
655 | date: Sat Apr 30 15:24:48 2011 +0200
655 | date: Sat Apr 30 15:24:48 2011 +0200
656 | summary: E
656 | summary: E
657 |
657 |
658 | o changeset: 4:32af7686d403
658 | o changeset: 4:32af7686d403
659 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
659 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
660 | | date: Sat Apr 30 15:24:48 2011 +0200
660 | | date: Sat Apr 30 15:24:48 2011 +0200
661 | | summary: D
661 | | summary: D
662 | |
662 | |
663 | o changeset: 3:5fddd98957c8
663 | o changeset: 3:5fddd98957c8
664 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
664 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
665 | | date: Sat Apr 30 15:24:48 2011 +0200
665 | | date: Sat Apr 30 15:24:48 2011 +0200
666 | | summary: C
666 | | summary: C
667 | |
667 | |
668 | o changeset: 2:42ccdea3bb16
668 | o changeset: 2:42ccdea3bb16
669 |/ user: Nicolas Dumazet <nicdumz.commits@gmail.com>
669 |/ user: Nicolas Dumazet <nicdumz.commits@gmail.com>
670 | date: Sat Apr 30 15:24:48 2011 +0200
670 | date: Sat Apr 30 15:24:48 2011 +0200
671 | summary: B
671 | summary: B
672 |
672 |
673 o changeset: 1:cd010b8cd998
673 o changeset: 1:cd010b8cd998
674 parent: -1:000000000000
674 parent: -1:000000000000
675 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
675 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
676 date: Sat Apr 30 15:24:48 2011 +0200
676 date: Sat Apr 30 15:24:48 2011 +0200
677 summary: A
677 summary: A
678
678
679 @ changeset: 0:3903775176ed
679 @ changeset: 0:3903775176ed
680 user: test
680 user: test
681 date: Thu Jan 01 00:00:00 1970 +0000
681 date: Thu Jan 01 00:00:00 1970 +0000
682 summary: a
682 summary: a
683
683
684
684
685 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
685 $ hg bundle2 --debug --rev '8+7+5+4' ../rev.hg2
686 4 changesets found
686 4 changesets found
687 list of changesets:
687 list of changesets:
688 32af7686d403cf45b5d95f2d70cebea587ac806a
688 32af7686d403cf45b5d95f2d70cebea587ac806a
689 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
689 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
690 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
690 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
691 02de42196ebee42ef284b6780a87cdc96e8eaab6
691 02de42196ebee42ef284b6780a87cdc96e8eaab6
692 start emission of HG2X stream
692 start emission of HG2X stream
693 bundle parameter:
693 bundle parameter:
694 start of parts
694 start of parts
695 bundle part: "b2x:changegroup"
695 bundle part: "b2x:changegroup"
696 bundling: 1/4 changesets (25.00%)
696 bundling: 1/4 changesets (25.00%)
697 bundling: 2/4 changesets (50.00%)
697 bundling: 2/4 changesets (50.00%)
698 bundling: 3/4 changesets (75.00%)
698 bundling: 3/4 changesets (75.00%)
699 bundling: 4/4 changesets (100.00%)
699 bundling: 4/4 changesets (100.00%)
700 bundling: 1/4 manifests (25.00%)
700 bundling: 1/4 manifests (25.00%)
701 bundling: 2/4 manifests (50.00%)
701 bundling: 2/4 manifests (50.00%)
702 bundling: 3/4 manifests (75.00%)
702 bundling: 3/4 manifests (75.00%)
703 bundling: 4/4 manifests (100.00%)
703 bundling: 4/4 manifests (100.00%)
704 bundling: D 1/3 files (33.33%)
704 bundling: D 1/3 files (33.33%)
705 bundling: E 2/3 files (66.67%)
705 bundling: E 2/3 files (66.67%)
706 bundling: H 3/3 files (100.00%)
706 bundling: H 3/3 files (100.00%)
707 end of bundle
707 end of bundle
708
708
709 $ cat ../rev.hg2
709 $ cat ../rev.hg2
710 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
710 HG2X\x00\x00\x00\x16\x0fb2x:changegroup\x00\x00\x00\x00\x00\x00\x00\x00\x06\x13\x00\x00\x00\xa42\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j_\xdd\xd9\x89W\xc8\xa5JMCm\xfe\x1d\xa9\xd8\x7f!\xa1\xb9{\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c (esc)
711 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
711 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02D (esc)
712 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
712 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01D\x00\x00\x00\xa4\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xcd\x01\x0b\x8c\xd9\x98\xf3\x98\x1aZ\x81\x15\xf9O\x8d\xa4\xabP`\x89\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)4dece9c826f69490507b98c6383a3009b295837d (esc)
713 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
713 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x02E (esc)
714 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
714 \x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01E\x00\x00\x00\xa2\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)365b93d57fdf4814e2b5911d6bacff2b12014441 (esc)
715 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
715 \x00\x00\x00f\x00\x00\x00h\x00\x00\x00\x00\x00\x00\x00i\x00\x00\x00j\x00\x00\x00\x01G\x00\x00\x00\xa4\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
716 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
716 \x87\xcd\xc9n\x8e\xaa\xb6$\xb68|\x8c\x8c\xae7\x17\x88\x80\xf3\xfa\x95\xde\xd3\xcb\x1c\xf7\x85\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
717 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
717 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00)8bee48edc7318541fc0013ee41b089276a8c24bf (esc)
718 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
718 \x00\x00\x00f\x00\x00\x00f\x00\x00\x00\x02H (esc)
719 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
719 \x00\x00\x00g\x00\x00\x00h\x00\x00\x00\x01H\x00\x00\x00\x00\x00\x00\x00\x8bn\x1fLG\xec\xb53\xff\xd0\xc8\xe5,\xdc\x88\xaf\xb6\xcd9\xe2\x0cf\xa5\xa0\x18\x17\xfd\xf5#\x9c'8\x02\xb5\xb7a\x8d\x05\x1c\x89\xe4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+D\x00c3f1ca2924c16a19b0656a84900e504e5b0aec2d (esc)
720 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
720 \x00\x00\x00\x8bM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\x00}\x8c\x9d\x88\x84\x13%\xf5\xc6\xb0cq\xb3[N\x8a+\x1a\x83\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00+\x00\x00\x00\xac\x00\x00\x00+E\x009c6fd0350a6c0d0c49d4a9c5017cf07043f54e58 (esc)
721 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
721 \x00\x00\x00\x8b6[\x93\xd5\x7f\xdfH\x14\xe2\xb5\x91\x1dk\xac\xff+\x12\x01DA(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xceM\xec\xe9\xc8&\xf6\x94\x90P{\x98\xc68:0 \xb2\x95\x83}\xee\xa17Fy\x9a\x9e\x0b\xfd\x88\xf2\x9d<.\x9d\xc98\x9fRO\x00\x00\x00V\x00\x00\x00V\x00\x00\x00+F\x0022bfcfd62a21a3287edbd4d656218d0f525ed76a (esc)
722 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
722 \x00\x00\x00\x97\x8b\xeeH\xed\xc71\x85A\xfc\x00\x13\xeeA\xb0\x89'j\x8c$\xbf(\xa5\x84\xc6^\xf1!\xf8\x9e\xb6j\xb7\xd0\xbc\x15=\x80\x99\xe7\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
723 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
723 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00+\x00\x00\x00V\x00\x00\x00\x00\x00\x00\x00\x81\x00\x00\x00\x81\x00\x00\x00+H\x008500189e74a9e0475e822093bc7db0d631aeb0b4 (esc)
724 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
724 \x00\x00\x00\x00\x00\x00\x00\x05D\x00\x00\x00b\xc3\xf1\xca)$\xc1j\x19\xb0ej\x84\x90\x0ePN[ (esc)
725 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
725 \xec-\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x002\xafv\x86\xd4\x03\xcfE\xb5\xd9_-p\xce\xbe\xa5\x87\xac\x80j\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02D (esc)
726 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
726 \x00\x00\x00\x00\x00\x00\x00\x05E\x00\x00\x00b\x9co\xd05 (esc)
727 l\r (no-eol) (esc)
727 l\r (no-eol) (esc)
728 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
728 \x0cI\xd4\xa9\xc5\x01|\xf0pC\xf5NX\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x95 \xee\xa7\x81\xbc\xca\x16\xc1\xe1Z\xcc\x0b\xa1C5\xa0\xe8\xe5\xba\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02E (esc)
729 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
729 \x00\x00\x00\x00\x00\x00\x00\x05H\x00\x00\x00b\x85\x00\x18\x9et\xa9\xe0G^\x82 \x93\xbc}\xb0\xd61\xae\xb0\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xdeB\x19n\xbe\xe4.\xf2\x84\xb6x (esc)
730 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
730 \x87\xcd\xc9n\x8e\xaa\xb6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02H (esc)
731 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
731 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 (no-eol) (esc)
732
732
733 $ hg unbundle2 < ../rev.hg2
733 $ hg unbundle2 < ../rev.hg2
734 adding changesets
734 adding changesets
735 adding manifests
735 adding manifests
736 adding file changes
736 adding file changes
737 added 0 changesets with 0 changes to 3 files
737 added 0 changesets with 0 changes to 3 files
738 0 unread bytes
738 0 unread bytes
739 addchangegroup return: 1
739 addchangegroup return: 1
740
740
741 with reply
741 with reply
742
742
743 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
743 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
744 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
744 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
745 0 unread bytes
745 0 unread bytes
746 addchangegroup return: 1
746 addchangegroup return: 1
747
747
748 $ cat ../rev-reply.hg2
748 $ cat ../rev-reply.hg2
749 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
749 HG2X\x00\x00\x003\x15b2x:reply:changegroup\x00\x00\x00\x00\x00\x02\x0b\x01\x06\x01in-reply-to1return1\x00\x00\x00\x00\x00\x1f (esc)
750 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
750 b2x:output\x00\x00\x00\x01\x00\x01\x0b\x01in-reply-to1\x00\x00\x00dadding changesets (esc)
751 adding manifests
751 adding manifests
752 adding file changes
752 adding file changes
753 added 0 changesets with 0 changes to 3 files
753 added 0 changesets with 0 changes to 3 files
754 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
754 \x00\x00\x00\x00\x00\x00 (no-eol) (esc)
755
755
756 Real world exchange
756 Real world exchange
757 =====================
757 =====================
758
758
759
759
760 clone --pull
760 clone --pull
761
761
762 $ cd ..
762 $ cd ..
763 $ hg clone main other --pull --rev 9520eea781bc
763 $ hg clone main other --pull --rev 9520eea781bc
764 adding changesets
764 adding changesets
765 adding manifests
765 adding manifests
766 adding file changes
766 adding file changes
767 added 2 changesets with 2 changes to 2 files
767 added 2 changesets with 2 changes to 2 files
768 updating to branch default
768 updating to branch default
769 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
769 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
770 $ hg -R other log -G
770 $ hg -R other log -G
771 @ changeset: 1:9520eea781bc
771 @ changeset: 1:9520eea781bc
772 | tag: tip
772 | tag: tip
773 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
773 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
774 | date: Sat Apr 30 15:24:48 2011 +0200
774 | date: Sat Apr 30 15:24:48 2011 +0200
775 | summary: E
775 | summary: E
776 |
776 |
777 o changeset: 0:cd010b8cd998
777 o changeset: 0:cd010b8cd998
778 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
778 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
779 date: Sat Apr 30 15:24:48 2011 +0200
779 date: Sat Apr 30 15:24:48 2011 +0200
780 summary: A
780 summary: A
781
781
782
782
783 pull
783 pull
784
784
785 $ hg -R other pull -r 24b6387c8c8c
785 $ hg -R other pull -r 24b6387c8c8c
786 pulling from $TESTTMP/main (glob)
786 pulling from $TESTTMP/main (glob)
787 searching for changes
787 searching for changes
788 adding changesets
788 adding changesets
789 adding manifests
789 adding manifests
790 adding file changes
790 adding file changes
791 added 1 changesets with 1 changes to 1 files (+1 heads)
791 added 1 changesets with 1 changes to 1 files (+1 heads)
792 (run 'hg heads' to see heads, 'hg merge' to merge)
792 (run 'hg heads' to see heads, 'hg merge' to merge)
793
793
794 push
794 push
795
795
796 $ hg -R main push other --rev eea13746799a
796 $ hg -R main push other --rev eea13746799a
797 pushing to other
797 pushing to other
798 searching for changes
798 searching for changes
799 remote: adding changesets
799 remote: adding changesets
800 remote: adding manifests
800 remote: adding manifests
801 remote: adding file changes
801 remote: adding file changes
802 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
802 remote: added 1 changesets with 0 changes to 0 files (-1 heads)
803
803
804 pull over ssh
804 pull over ssh
805
805
806 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --traceback
806 $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --traceback
807 pulling from ssh://user@dummy/main
807 pulling from ssh://user@dummy/main
808 searching for changes
808 searching for changes
809 adding changesets
809 adding changesets
810 adding manifests
810 adding manifests
811 adding file changes
811 adding file changes
812 added 1 changesets with 1 changes to 1 files (+1 heads)
812 added 1 changesets with 1 changes to 1 files (+1 heads)
813 (run 'hg heads' to see heads, 'hg merge' to merge)
813 (run 'hg heads' to see heads, 'hg merge' to merge)
814
814
815 pull over http
815 pull over http
816
816
817 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
817 $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log
818 $ cat main.pid >> $DAEMON_PIDS
818 $ cat main.pid >> $DAEMON_PIDS
819
819
820 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16
820 $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16
821 pulling from http://localhost:$HGPORT/
821 pulling from http://localhost:$HGPORT/
822 searching for changes
822 searching for changes
823 adding changesets
823 adding changesets
824 adding manifests
824 adding manifests
825 adding file changes
825 adding file changes
826 added 1 changesets with 1 changes to 1 files (+1 heads)
826 added 1 changesets with 1 changes to 1 files (+1 heads)
827 (run 'hg heads .' to see heads, 'hg merge' to merge)
827 (run 'hg heads .' to see heads, 'hg merge' to merge)
828 $ cat main-error.log
828 $ cat main-error.log
829
829
830 push over ssh
830 push over ssh
831
831
832 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8
832 $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8
833 pushing to ssh://user@dummy/other
833 pushing to ssh://user@dummy/other
834 searching for changes
834 searching for changes
835 remote: adding changesets
835 remote: adding changesets
836 remote: adding manifests
836 remote: adding manifests
837 remote: adding file changes
837 remote: adding file changes
838 remote: added 1 changesets with 1 changes to 1 files
838 remote: added 1 changesets with 1 changes to 1 files
839
839
840 push over http
840 push over http
841
841
842 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
842 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
843 $ cat other.pid >> $DAEMON_PIDS
843 $ cat other.pid >> $DAEMON_PIDS
844
844
845 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403
845 $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403
846 pushing to http://localhost:$HGPORT2/
846 pushing to http://localhost:$HGPORT2/
847 searching for changes
847 searching for changes
848 remote: adding changesets
848 remote: adding changesets
849 remote: adding manifests
849 remote: adding manifests
850 remote: adding file changes
850 remote: adding file changes
851 remote: added 1 changesets with 1 changes to 1 files
851 remote: added 1 changesets with 1 changes to 1 files
852 $ cat other-error.log
852 $ cat other-error.log
853
853
854 Check final content.
854 Check final content.
855
855
856 $ hg -R other log -G
856 $ hg -R other log -G
857 o changeset: 7:32af7686d403
857 o changeset: 7:32af7686d403
858 | tag: tip
858 | tag: tip
859 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
859 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
860 | date: Sat Apr 30 15:24:48 2011 +0200
860 | date: Sat Apr 30 15:24:48 2011 +0200
861 | summary: D
861 | summary: D
862 |
862 |
863 o changeset: 6:5fddd98957c8
863 o changeset: 6:5fddd98957c8
864 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
864 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
865 | date: Sat Apr 30 15:24:48 2011 +0200
865 | date: Sat Apr 30 15:24:48 2011 +0200
866 | summary: C
866 | summary: C
867 |
867 |
868 o changeset: 5:42ccdea3bb16
868 o changeset: 5:42ccdea3bb16
869 | parent: 0:cd010b8cd998
869 | parent: 0:cd010b8cd998
870 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
870 | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
871 | date: Sat Apr 30 15:24:48 2011 +0200
871 | date: Sat Apr 30 15:24:48 2011 +0200
872 | summary: B
872 | summary: B
873 |
873 |
874 | o changeset: 4:02de42196ebe
874 | o changeset: 4:02de42196ebe
875 | | parent: 2:24b6387c8c8c
875 | | parent: 2:24b6387c8c8c
876 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
876 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
877 | | date: Sat Apr 30 15:24:48 2011 +0200
877 | | date: Sat Apr 30 15:24:48 2011 +0200
878 | | summary: H
878 | | summary: H
879 | |
879 | |
880 | | o changeset: 3:eea13746799a
880 | | o changeset: 3:eea13746799a
881 | |/| parent: 2:24b6387c8c8c
881 | |/| parent: 2:24b6387c8c8c
882 | | | parent: 1:9520eea781bc
882 | | | parent: 1:9520eea781bc
883 | | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
883 | | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
884 | | | date: Sat Apr 30 15:24:48 2011 +0200
884 | | | date: Sat Apr 30 15:24:48 2011 +0200
885 | | | summary: G
885 | | | summary: G
886 | | |
886 | | |
887 | o | changeset: 2:24b6387c8c8c
887 | o | changeset: 2:24b6387c8c8c
888 |/ / parent: 0:cd010b8cd998
888 |/ / parent: 0:cd010b8cd998
889 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
889 | | user: Nicolas Dumazet <nicdumz.commits@gmail.com>
890 | | date: Sat Apr 30 15:24:48 2011 +0200
890 | | date: Sat Apr 30 15:24:48 2011 +0200
891 | | summary: F
891 | | summary: F
892 | |
892 | |
893 | @ changeset: 1:9520eea781bc
893 | @ changeset: 1:9520eea781bc
894 |/ user: Nicolas Dumazet <nicdumz.commits@gmail.com>
894 |/ user: Nicolas Dumazet <nicdumz.commits@gmail.com>
895 | date: Sat Apr 30 15:24:48 2011 +0200
895 | date: Sat Apr 30 15:24:48 2011 +0200
896 | summary: E
896 | summary: E
897 |
897 |
898 o changeset: 0:cd010b8cd998
898 o changeset: 0:cd010b8cd998
899 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
899 user: Nicolas Dumazet <nicdumz.commits@gmail.com>
900 date: Sat Apr 30 15:24:48 2011 +0200
900 date: Sat Apr 30 15:24:48 2011 +0200
901 summary: A
901 summary: A
902
902
903
903
904 Error Handling
904 Error Handling
905 ==============
905 ==============
906
906
907 Check that errors are properly returned to the client during push.
907 Check that errors are properly returned to the client during push.
908
908
909 Setting up
909 Setting up
910
910
911 $ cat > failpush.py << EOF
911 $ cat > failpush.py << EOF
912 > """A small extension that makes push fails when using bundle2
912 > """A small extension that makes push fails when using bundle2
913 >
913 >
914 > used to test error handling in bundle2
914 > used to test error handling in bundle2
915 > """
915 > """
916 >
916 >
917 > from mercurial import util
917 > from mercurial import util
918 > from mercurial import bundle2
918 > from mercurial import bundle2
919 > from mercurial import exchange
919 > from mercurial import exchange
920 > from mercurial import extensions
920 > from mercurial import extensions
921 >
921 >
922 > def _pushbundle2failpart(orig, pushop, bundler):
922 > def _pushbundle2failpart(orig, pushop, bundler):
923 > extradata = orig(pushop, bundler)
923 > extradata = orig(pushop, bundler)
924 > reason = pushop.ui.config('failpush', 'reason', None)
924 > reason = pushop.ui.config('failpush', 'reason', None)
925 > part = None
925 > part = None
926 > if reason == 'abort':
926 > if reason == 'abort':
927 > part = bundle2.bundlepart('test:abort')
927 > part = bundle2.bundlepart('test:abort')
928 > if reason == 'unknown':
928 > if reason == 'unknown':
929 > part = bundle2.bundlepart('TEST:UNKNOWN')
929 > part = bundle2.bundlepart('TEST:UNKNOWN')
930 > if reason == 'race':
930 > if reason == 'race':
931 > # 20 Bytes of crap
931 > # 20 Bytes of crap
932 > part = bundle2.bundlepart('b2x:check:heads', data='01234567890123456789')
932 > part = bundle2.bundlepart('b2x:check:heads', data='01234567890123456789')
933 > if part is not None:
933 > if part is not None:
934 > bundler.addpart(part)
934 > bundler.addpart(part)
935 > return extradata
935 > return extradata
936 >
936 >
937 > @bundle2.parthandler("test:abort")
937 > @bundle2.parthandler("test:abort")
938 > def handleabort(op, part):
938 > def handleabort(op, part):
939 > raise util.Abort('Abandon ship!', hint="don't panic")
939 > raise util.Abort('Abandon ship!', hint="don't panic")
940 >
940 >
941 > def uisetup(ui):
941 > def uisetup(ui):
942 > extensions.wrapfunction(exchange, '_pushbundle2extraparts', _pushbundle2failpart)
942 > extensions.wrapfunction(exchange, '_pushbundle2extraparts', _pushbundle2failpart)
943 >
943 >
944 > EOF
944 > EOF
945
945
946 $ cd main
946 $ cd main
947 $ hg up tip
947 $ hg up tip
948 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
948 3 files updated, 0 files merged, 1 files removed, 0 files unresolved
949 $ echo 'I' > I
949 $ echo 'I' > I
950 $ hg add I
950 $ hg add I
951 $ hg ci -m 'I'
951 $ hg ci -m 'I'
952 $ hg id
952 $ hg id
953 e7ec4e813ba6 tip
953 e7ec4e813ba6 tip
954 $ cd ..
954 $ cd ..
955
955
956 $ cat << EOF >> $HGRCPATH
956 $ cat << EOF >> $HGRCPATH
957 > [extensions]
957 > [extensions]
958 > failpush=$TESTTMP/failpush.py
958 > failpush=$TESTTMP/failpush.py
959 > EOF
959 > EOF
960
960
961 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
961 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
962 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
962 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
963 $ cat other.pid >> $DAEMON_PIDS
963 $ cat other.pid >> $DAEMON_PIDS
964
964
965 Doing the actual push: Abort error
965 Doing the actual push: Abort error
966
966
967 $ cat << EOF >> $HGRCPATH
967 $ cat << EOF >> $HGRCPATH
968 > [failpush]
968 > [failpush]
969 > reason = abort
969 > reason = abort
970 > EOF
970 > EOF
971
971
972 $ hg -R main push other -r e7ec4e813ba6
972 $ hg -R main push other -r e7ec4e813ba6
973 pushing to other
973 pushing to other
974 searching for changes
974 searching for changes
975 abort: Abandon ship!
975 abort: Abandon ship!
976 (don't panic)
976 (don't panic)
977 [255]
977 [255]
978
978
979 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
979 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
980 pushing to ssh://user@dummy/other
980 pushing to ssh://user@dummy/other
981 searching for changes
981 searching for changes
982 abort: Abandon ship!
982 abort: Abandon ship!
983 (don't panic)
983 (don't panic)
984 [255]
984 [255]
985
985
986 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
986 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
987 pushing to http://localhost:$HGPORT2/
987 pushing to http://localhost:$HGPORT2/
988 searching for changes
988 searching for changes
989 abort: Abandon ship!
989 abort: Abandon ship!
990 (don't panic)
990 (don't panic)
991 [255]
991 [255]
992
992
993
993
994 Doing the actual push: unknown mandatory parts
994 Doing the actual push: unknown mandatory parts
995
995
996 $ cat << EOF >> $HGRCPATH
996 $ cat << EOF >> $HGRCPATH
997 > [failpush]
997 > [failpush]
998 > reason = unknown
998 > reason = unknown
999 > EOF
999 > EOF
1000
1000
1001 $ hg -R main push other -r e7ec4e813ba6
1001 $ hg -R main push other -r e7ec4e813ba6
1002 pushing to other
1002 pushing to other
1003 searching for changes
1003 searching for changes
1004 abort: missing support for 'test:unknown'
1004 abort: missing support for 'test:unknown'
1005 [255]
1005 [255]
1006
1006
1007 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1007 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1008 pushing to ssh://user@dummy/other
1008 pushing to ssh://user@dummy/other
1009 searching for changes
1009 searching for changes
1010 abort: missing support for "'test:unknown'"
1010 abort: missing support for "'test:unknown'"
1011 [255]
1011 [255]
1012
1012
1013 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1013 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1014 pushing to http://localhost:$HGPORT2/
1014 pushing to http://localhost:$HGPORT2/
1015 searching for changes
1015 searching for changes
1016 abort: missing support for "'test:unknown'"
1016 abort: missing support for "'test:unknown'"
1017 [255]
1017 [255]
1018
1018
1019 Doing the actual push: race
1019 Doing the actual push: race
1020
1020
1021 $ cat << EOF >> $HGRCPATH
1021 $ cat << EOF >> $HGRCPATH
1022 > [failpush]
1022 > [failpush]
1023 > reason = race
1023 > reason = race
1024 > EOF
1024 > EOF
1025
1025
1026 $ hg -R main push other -r e7ec4e813ba6
1026 $ hg -R main push other -r e7ec4e813ba6
1027 pushing to other
1027 pushing to other
1028 searching for changes
1028 searching for changes
1029 abort: push failed:
1029 abort: push failed:
1030 'repository changed while pushing - please try again'
1030 'repository changed while pushing - please try again'
1031 [255]
1031 [255]
1032
1032
1033 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1033 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1034 pushing to ssh://user@dummy/other
1034 pushing to ssh://user@dummy/other
1035 searching for changes
1035 searching for changes
1036 abort: push failed:
1036 abort: push failed:
1037 'repository changed while pushing - please try again'
1037 'repository changed while pushing - please try again'
1038 [255]
1038 [255]
1039
1039
1040 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1040 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1041 pushing to http://localhost:$HGPORT2/
1041 pushing to http://localhost:$HGPORT2/
1042 searching for changes
1042 searching for changes
1043 abort: push failed:
1043 abort: push failed:
1044 'repository changed while pushing - please try again'
1044 'repository changed while pushing - please try again'
1045 [255]
1045 [255]
1046
1047 Doing the actual push: hook abort
1048
1049 $ cat << EOF >> $HGRCPATH
1050 > [failpush]
1051 > reason =
1052 > [hooks]
1053 > b2x-pretransactionclose.failpush = false
1054 > EOF
1055
1056 $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS
1057 $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log
1058 $ cat other.pid >> $DAEMON_PIDS
1059
1060 $ hg -R main push other -r e7ec4e813ba6
1061 pushing to other
1062 searching for changes
1063 transaction abort!
1064 rollback completed
1065 abort: b2x-pretransactionclose.failpush hook exited with status 1
1066 [255]
1067
1068 $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6
1069 pushing to ssh://user@dummy/other
1070 searching for changes
1071 abort: b2x-pretransactionclose.failpush hook exited with status 1
1072 remote: transaction abort!
1073 remote: rollback completed
1074 [255]
1075
1076 $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6
1077 pushing to http://localhost:$HGPORT2/
1078 searching for changes
1079 abort: b2x-pretransactionclose.failpush hook exited with status 1
1080 [255]
1081
1082
General Comments 0
You need to be logged in to leave comments. Login now