##// END OF EJS Templates
changegroup: add bundlecaps back...
Durham Goode -
r32287:df3cf942 default
parent child Browse files
Show More
@@ -1,1020 +1,1031 b''
1 # changegroup.py - Mercurial changegroup manipulation functions
1 # changegroup.py - Mercurial changegroup manipulation functions
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import struct
11 import struct
12 import tempfile
12 import tempfile
13 import weakref
13 import weakref
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullrev,
18 nullrev,
19 short,
19 short,
20 )
20 )
21
21
22 from . import (
22 from . import (
23 dagutil,
23 dagutil,
24 discovery,
24 discovery,
25 error,
25 error,
26 mdiff,
26 mdiff,
27 phases,
27 phases,
28 pycompat,
28 pycompat,
29 util,
29 util,
30 )
30 )
31
31
32 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
32 _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s"
33 _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
33 _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s"
34 _CHANGEGROUPV3_DELTA_HEADER = ">20s20s20s20s20sH"
34 _CHANGEGROUPV3_DELTA_HEADER = ">20s20s20s20s20sH"
35
35
36 def readexactly(stream, n):
36 def readexactly(stream, n):
37 '''read n bytes from stream.read and abort if less was available'''
37 '''read n bytes from stream.read and abort if less was available'''
38 s = stream.read(n)
38 s = stream.read(n)
39 if len(s) < n:
39 if len(s) < n:
40 raise error.Abort(_("stream ended unexpectedly"
40 raise error.Abort(_("stream ended unexpectedly"
41 " (got %d bytes, expected %d)")
41 " (got %d bytes, expected %d)")
42 % (len(s), n))
42 % (len(s), n))
43 return s
43 return s
44
44
45 def getchunk(stream):
45 def getchunk(stream):
46 """return the next chunk from stream as a string"""
46 """return the next chunk from stream as a string"""
47 d = readexactly(stream, 4)
47 d = readexactly(stream, 4)
48 l = struct.unpack(">l", d)[0]
48 l = struct.unpack(">l", d)[0]
49 if l <= 4:
49 if l <= 4:
50 if l:
50 if l:
51 raise error.Abort(_("invalid chunk length %d") % l)
51 raise error.Abort(_("invalid chunk length %d") % l)
52 return ""
52 return ""
53 return readexactly(stream, l - 4)
53 return readexactly(stream, l - 4)
54
54
55 def chunkheader(length):
55 def chunkheader(length):
56 """return a changegroup chunk header (string)"""
56 """return a changegroup chunk header (string)"""
57 return struct.pack(">l", length + 4)
57 return struct.pack(">l", length + 4)
58
58
59 def closechunk():
59 def closechunk():
60 """return a changegroup chunk header (string) for a zero-length chunk"""
60 """return a changegroup chunk header (string) for a zero-length chunk"""
61 return struct.pack(">l", 0)
61 return struct.pack(">l", 0)
62
62
63 def combineresults(results):
63 def combineresults(results):
64 """logic to combine 0 or more addchangegroup results into one"""
64 """logic to combine 0 or more addchangegroup results into one"""
65 changedheads = 0
65 changedheads = 0
66 result = 1
66 result = 1
67 for ret in results:
67 for ret in results:
68 # If any changegroup result is 0, return 0
68 # If any changegroup result is 0, return 0
69 if ret == 0:
69 if ret == 0:
70 result = 0
70 result = 0
71 break
71 break
72 if ret < -1:
72 if ret < -1:
73 changedheads += ret + 1
73 changedheads += ret + 1
74 elif ret > 1:
74 elif ret > 1:
75 changedheads += ret - 1
75 changedheads += ret - 1
76 if changedheads > 0:
76 if changedheads > 0:
77 result = 1 + changedheads
77 result = 1 + changedheads
78 elif changedheads < 0:
78 elif changedheads < 0:
79 result = -1 + changedheads
79 result = -1 + changedheads
80 return result
80 return result
81
81
82 def writechunks(ui, chunks, filename, vfs=None):
82 def writechunks(ui, chunks, filename, vfs=None):
83 """Write chunks to a file and return its filename.
83 """Write chunks to a file and return its filename.
84
84
85 The stream is assumed to be a bundle file.
85 The stream is assumed to be a bundle file.
86 Existing files will not be overwritten.
86 Existing files will not be overwritten.
87 If no filename is specified, a temporary file is created.
87 If no filename is specified, a temporary file is created.
88 """
88 """
89 fh = None
89 fh = None
90 cleanup = None
90 cleanup = None
91 try:
91 try:
92 if filename:
92 if filename:
93 if vfs:
93 if vfs:
94 fh = vfs.open(filename, "wb")
94 fh = vfs.open(filename, "wb")
95 else:
95 else:
96 # Increase default buffer size because default is usually
96 # Increase default buffer size because default is usually
97 # small (4k is common on Linux).
97 # small (4k is common on Linux).
98 fh = open(filename, "wb", 131072)
98 fh = open(filename, "wb", 131072)
99 else:
99 else:
100 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
100 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
101 fh = os.fdopen(fd, pycompat.sysstr("wb"))
101 fh = os.fdopen(fd, pycompat.sysstr("wb"))
102 cleanup = filename
102 cleanup = filename
103 for c in chunks:
103 for c in chunks:
104 fh.write(c)
104 fh.write(c)
105 cleanup = None
105 cleanup = None
106 return filename
106 return filename
107 finally:
107 finally:
108 if fh is not None:
108 if fh is not None:
109 fh.close()
109 fh.close()
110 if cleanup is not None:
110 if cleanup is not None:
111 if filename and vfs:
111 if filename and vfs:
112 vfs.unlink(cleanup)
112 vfs.unlink(cleanup)
113 else:
113 else:
114 os.unlink(cleanup)
114 os.unlink(cleanup)
115
115
116 class cg1unpacker(object):
116 class cg1unpacker(object):
117 """Unpacker for cg1 changegroup streams.
117 """Unpacker for cg1 changegroup streams.
118
118
119 A changegroup unpacker handles the framing of the revision data in
119 A changegroup unpacker handles the framing of the revision data in
120 the wire format. Most consumers will want to use the apply()
120 the wire format. Most consumers will want to use the apply()
121 method to add the changes from the changegroup to a repository.
121 method to add the changes from the changegroup to a repository.
122
122
123 If you're forwarding a changegroup unmodified to another consumer,
123 If you're forwarding a changegroup unmodified to another consumer,
124 use getchunks(), which returns an iterator of changegroup
124 use getchunks(), which returns an iterator of changegroup
125 chunks. This is mostly useful for cases where you need to know the
125 chunks. This is mostly useful for cases where you need to know the
126 data stream has ended by observing the end of the changegroup.
126 data stream has ended by observing the end of the changegroup.
127
127
128 deltachunk() is useful only if you're applying delta data. Most
128 deltachunk() is useful only if you're applying delta data. Most
129 consumers should prefer apply() instead.
129 consumers should prefer apply() instead.
130
130
131 A few other public methods exist. Those are used only for
131 A few other public methods exist. Those are used only for
132 bundlerepo and some debug commands - their use is discouraged.
132 bundlerepo and some debug commands - their use is discouraged.
133 """
133 """
134 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
134 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
135 deltaheadersize = struct.calcsize(deltaheader)
135 deltaheadersize = struct.calcsize(deltaheader)
136 version = '01'
136 version = '01'
137 _grouplistcount = 1 # One list of files after the manifests
137 _grouplistcount = 1 # One list of files after the manifests
138
138
139 def __init__(self, fh, alg, extras=None):
139 def __init__(self, fh, alg, extras=None):
140 if alg is None:
140 if alg is None:
141 alg = 'UN'
141 alg = 'UN'
142 if alg not in util.compengines.supportedbundletypes:
142 if alg not in util.compengines.supportedbundletypes:
143 raise error.Abort(_('unknown stream compression type: %s')
143 raise error.Abort(_('unknown stream compression type: %s')
144 % alg)
144 % alg)
145 if alg == 'BZ':
145 if alg == 'BZ':
146 alg = '_truncatedBZ'
146 alg = '_truncatedBZ'
147
147
148 compengine = util.compengines.forbundletype(alg)
148 compengine = util.compengines.forbundletype(alg)
149 self._stream = compengine.decompressorreader(fh)
149 self._stream = compengine.decompressorreader(fh)
150 self._type = alg
150 self._type = alg
151 self.extras = extras or {}
151 self.extras = extras or {}
152 self.callback = None
152 self.callback = None
153
153
154 # These methods (compressed, read, seek, tell) all appear to only
154 # These methods (compressed, read, seek, tell) all appear to only
155 # be used by bundlerepo, but it's a little hard to tell.
155 # be used by bundlerepo, but it's a little hard to tell.
156 def compressed(self):
156 def compressed(self):
157 return self._type is not None and self._type != 'UN'
157 return self._type is not None and self._type != 'UN'
158 def read(self, l):
158 def read(self, l):
159 return self._stream.read(l)
159 return self._stream.read(l)
160 def seek(self, pos):
160 def seek(self, pos):
161 return self._stream.seek(pos)
161 return self._stream.seek(pos)
162 def tell(self):
162 def tell(self):
163 return self._stream.tell()
163 return self._stream.tell()
164 def close(self):
164 def close(self):
165 return self._stream.close()
165 return self._stream.close()
166
166
167 def _chunklength(self):
167 def _chunklength(self):
168 d = readexactly(self._stream, 4)
168 d = readexactly(self._stream, 4)
169 l = struct.unpack(">l", d)[0]
169 l = struct.unpack(">l", d)[0]
170 if l <= 4:
170 if l <= 4:
171 if l:
171 if l:
172 raise error.Abort(_("invalid chunk length %d") % l)
172 raise error.Abort(_("invalid chunk length %d") % l)
173 return 0
173 return 0
174 if self.callback:
174 if self.callback:
175 self.callback()
175 self.callback()
176 return l - 4
176 return l - 4
177
177
178 def changelogheader(self):
178 def changelogheader(self):
179 """v10 does not have a changelog header chunk"""
179 """v10 does not have a changelog header chunk"""
180 return {}
180 return {}
181
181
182 def manifestheader(self):
182 def manifestheader(self):
183 """v10 does not have a manifest header chunk"""
183 """v10 does not have a manifest header chunk"""
184 return {}
184 return {}
185
185
186 def filelogheader(self):
186 def filelogheader(self):
187 """return the header of the filelogs chunk, v10 only has the filename"""
187 """return the header of the filelogs chunk, v10 only has the filename"""
188 l = self._chunklength()
188 l = self._chunklength()
189 if not l:
189 if not l:
190 return {}
190 return {}
191 fname = readexactly(self._stream, l)
191 fname = readexactly(self._stream, l)
192 return {'filename': fname}
192 return {'filename': fname}
193
193
194 def _deltaheader(self, headertuple, prevnode):
194 def _deltaheader(self, headertuple, prevnode):
195 node, p1, p2, cs = headertuple
195 node, p1, p2, cs = headertuple
196 if prevnode is None:
196 if prevnode is None:
197 deltabase = p1
197 deltabase = p1
198 else:
198 else:
199 deltabase = prevnode
199 deltabase = prevnode
200 flags = 0
200 flags = 0
201 return node, p1, p2, deltabase, cs, flags
201 return node, p1, p2, deltabase, cs, flags
202
202
203 def deltachunk(self, prevnode):
203 def deltachunk(self, prevnode):
204 l = self._chunklength()
204 l = self._chunklength()
205 if not l:
205 if not l:
206 return {}
206 return {}
207 headerdata = readexactly(self._stream, self.deltaheadersize)
207 headerdata = readexactly(self._stream, self.deltaheadersize)
208 header = struct.unpack(self.deltaheader, headerdata)
208 header = struct.unpack(self.deltaheader, headerdata)
209 delta = readexactly(self._stream, l - self.deltaheadersize)
209 delta = readexactly(self._stream, l - self.deltaheadersize)
210 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
210 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
211 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
211 return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs,
212 'deltabase': deltabase, 'delta': delta, 'flags': flags}
212 'deltabase': deltabase, 'delta': delta, 'flags': flags}
213
213
214 def getchunks(self):
214 def getchunks(self):
215 """returns all the chunks contains in the bundle
215 """returns all the chunks contains in the bundle
216
216
217 Used when you need to forward the binary stream to a file or another
217 Used when you need to forward the binary stream to a file or another
218 network API. To do so, it parse the changegroup data, otherwise it will
218 network API. To do so, it parse the changegroup data, otherwise it will
219 block in case of sshrepo because it don't know the end of the stream.
219 block in case of sshrepo because it don't know the end of the stream.
220 """
220 """
221 # an empty chunkgroup is the end of the changegroup
221 # an empty chunkgroup is the end of the changegroup
222 # a changegroup has at least 2 chunkgroups (changelog and manifest).
222 # a changegroup has at least 2 chunkgroups (changelog and manifest).
223 # after that, changegroup versions 1 and 2 have a series of groups
223 # after that, changegroup versions 1 and 2 have a series of groups
224 # with one group per file. changegroup 3 has a series of directory
224 # with one group per file. changegroup 3 has a series of directory
225 # manifests before the files.
225 # manifests before the files.
226 count = 0
226 count = 0
227 emptycount = 0
227 emptycount = 0
228 while emptycount < self._grouplistcount:
228 while emptycount < self._grouplistcount:
229 empty = True
229 empty = True
230 count += 1
230 count += 1
231 while True:
231 while True:
232 chunk = getchunk(self)
232 chunk = getchunk(self)
233 if not chunk:
233 if not chunk:
234 if empty and count > 2:
234 if empty and count > 2:
235 emptycount += 1
235 emptycount += 1
236 break
236 break
237 empty = False
237 empty = False
238 yield chunkheader(len(chunk))
238 yield chunkheader(len(chunk))
239 pos = 0
239 pos = 0
240 while pos < len(chunk):
240 while pos < len(chunk):
241 next = pos + 2**20
241 next = pos + 2**20
242 yield chunk[pos:next]
242 yield chunk[pos:next]
243 pos = next
243 pos = next
244 yield closechunk()
244 yield closechunk()
245
245
246 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
246 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
247 # We know that we'll never have more manifests than we had
247 # We know that we'll never have more manifests than we had
248 # changesets.
248 # changesets.
249 self.callback = prog(_('manifests'), numchanges)
249 self.callback = prog(_('manifests'), numchanges)
250 # no need to check for empty manifest group here:
250 # no need to check for empty manifest group here:
251 # if the result of the merge of 1 and 2 is the same in 3 and 4,
251 # if the result of the merge of 1 and 2 is the same in 3 and 4,
252 # no new manifest will be created and the manifest group will
252 # no new manifest will be created and the manifest group will
253 # be empty during the pull
253 # be empty during the pull
254 self.manifestheader()
254 self.manifestheader()
255 repo.manifestlog._revlog.addgroup(self, revmap, trp)
255 repo.manifestlog._revlog.addgroup(self, revmap, trp)
256 repo.ui.progress(_('manifests'), None)
256 repo.ui.progress(_('manifests'), None)
257 self.callback = None
257 self.callback = None
258
258
259 def apply(self, repo, srctype, url, emptyok=False,
259 def apply(self, repo, srctype, url, emptyok=False,
260 targetphase=phases.draft, expectedtotal=None):
260 targetphase=phases.draft, expectedtotal=None):
261 """Add the changegroup returned by source.read() to this repo.
261 """Add the changegroup returned by source.read() to this repo.
262 srctype is a string like 'push', 'pull', or 'unbundle'. url is
262 srctype is a string like 'push', 'pull', or 'unbundle'. url is
263 the URL of the repo where this changegroup is coming from.
263 the URL of the repo where this changegroup is coming from.
264
264
265 Return an integer summarizing the change to this repo:
265 Return an integer summarizing the change to this repo:
266 - nothing changed or no source: 0
266 - nothing changed or no source: 0
267 - more heads than before: 1+added heads (2..n)
267 - more heads than before: 1+added heads (2..n)
268 - fewer heads than before: -1-removed heads (-2..-n)
268 - fewer heads than before: -1-removed heads (-2..-n)
269 - number of heads stays the same: 1
269 - number of heads stays the same: 1
270 """
270 """
271 repo = repo.unfiltered()
271 repo = repo.unfiltered()
272 def csmap(x):
272 def csmap(x):
273 repo.ui.debug("add changeset %s\n" % short(x))
273 repo.ui.debug("add changeset %s\n" % short(x))
274 return len(cl)
274 return len(cl)
275
275
276 def revmap(x):
276 def revmap(x):
277 return cl.rev(x)
277 return cl.rev(x)
278
278
279 changesets = files = revisions = 0
279 changesets = files = revisions = 0
280
280
281 try:
281 try:
282 with repo.transaction("\n".join([srctype,
282 with repo.transaction("\n".join([srctype,
283 util.hidepassword(url)])) as tr:
283 util.hidepassword(url)])) as tr:
284 # The transaction could have been created before and already
284 # The transaction could have been created before and already
285 # carries source information. In this case we use the top
285 # carries source information. In this case we use the top
286 # level data. We overwrite the argument because we need to use
286 # level data. We overwrite the argument because we need to use
287 # the top level value (if they exist) in this function.
287 # the top level value (if they exist) in this function.
288 srctype = tr.hookargs.setdefault('source', srctype)
288 srctype = tr.hookargs.setdefault('source', srctype)
289 url = tr.hookargs.setdefault('url', url)
289 url = tr.hookargs.setdefault('url', url)
290 repo.hook('prechangegroup', throw=True, **tr.hookargs)
290 repo.hook('prechangegroup', throw=True, **tr.hookargs)
291
291
292 # write changelog data to temp files so concurrent readers
292 # write changelog data to temp files so concurrent readers
293 # will not see an inconsistent view
293 # will not see an inconsistent view
294 cl = repo.changelog
294 cl = repo.changelog
295 cl.delayupdate(tr)
295 cl.delayupdate(tr)
296 oldheads = set(cl.heads())
296 oldheads = set(cl.heads())
297
297
298 trp = weakref.proxy(tr)
298 trp = weakref.proxy(tr)
299 # pull off the changeset group
299 # pull off the changeset group
300 repo.ui.status(_("adding changesets\n"))
300 repo.ui.status(_("adding changesets\n"))
301 clstart = len(cl)
301 clstart = len(cl)
302 class prog(object):
302 class prog(object):
303 def __init__(self, step, total):
303 def __init__(self, step, total):
304 self._step = step
304 self._step = step
305 self._total = total
305 self._total = total
306 self._count = 1
306 self._count = 1
307 def __call__(self):
307 def __call__(self):
308 repo.ui.progress(self._step, self._count,
308 repo.ui.progress(self._step, self._count,
309 unit=_('chunks'), total=self._total)
309 unit=_('chunks'), total=self._total)
310 self._count += 1
310 self._count += 1
311 self.callback = prog(_('changesets'), expectedtotal)
311 self.callback = prog(_('changesets'), expectedtotal)
312
312
313 efiles = set()
313 efiles = set()
314 def onchangelog(cl, node):
314 def onchangelog(cl, node):
315 efiles.update(cl.readfiles(node))
315 efiles.update(cl.readfiles(node))
316
316
317 self.changelogheader()
317 self.changelogheader()
318 srccontent = cl.addgroup(self, csmap, trp,
318 srccontent = cl.addgroup(self, csmap, trp,
319 addrevisioncb=onchangelog)
319 addrevisioncb=onchangelog)
320 efiles = len(efiles)
320 efiles = len(efiles)
321
321
322 if not (srccontent or emptyok):
322 if not (srccontent or emptyok):
323 raise error.Abort(_("received changelog group is empty"))
323 raise error.Abort(_("received changelog group is empty"))
324 clend = len(cl)
324 clend = len(cl)
325 changesets = clend - clstart
325 changesets = clend - clstart
326 repo.ui.progress(_('changesets'), None)
326 repo.ui.progress(_('changesets'), None)
327 self.callback = None
327 self.callback = None
328
328
329 # pull off the manifest group
329 # pull off the manifest group
330 repo.ui.status(_("adding manifests\n"))
330 repo.ui.status(_("adding manifests\n"))
331 self._unpackmanifests(repo, revmap, trp, prog, changesets)
331 self._unpackmanifests(repo, revmap, trp, prog, changesets)
332
332
333 needfiles = {}
333 needfiles = {}
334 if repo.ui.configbool('server', 'validate', default=False):
334 if repo.ui.configbool('server', 'validate', default=False):
335 cl = repo.changelog
335 cl = repo.changelog
336 ml = repo.manifestlog
336 ml = repo.manifestlog
337 # validate incoming csets have their manifests
337 # validate incoming csets have their manifests
338 for cset in xrange(clstart, clend):
338 for cset in xrange(clstart, clend):
339 mfnode = cl.changelogrevision(cset).manifest
339 mfnode = cl.changelogrevision(cset).manifest
340 mfest = ml[mfnode].readdelta()
340 mfest = ml[mfnode].readdelta()
341 # store file nodes we must see
341 # store file nodes we must see
342 for f, n in mfest.iteritems():
342 for f, n in mfest.iteritems():
343 needfiles.setdefault(f, set()).add(n)
343 needfiles.setdefault(f, set()).add(n)
344
344
345 # process the files
345 # process the files
346 repo.ui.status(_("adding file changes\n"))
346 repo.ui.status(_("adding file changes\n"))
347 newrevs, newfiles = _addchangegroupfiles(
347 newrevs, newfiles = _addchangegroupfiles(
348 repo, self, revmap, trp, efiles, needfiles)
348 repo, self, revmap, trp, efiles, needfiles)
349 revisions += newrevs
349 revisions += newrevs
350 files += newfiles
350 files += newfiles
351
351
352 dh = 0
352 dh = 0
353 if oldheads:
353 if oldheads:
354 heads = cl.heads()
354 heads = cl.heads()
355 dh = len(heads) - len(oldheads)
355 dh = len(heads) - len(oldheads)
356 for h in heads:
356 for h in heads:
357 if h not in oldheads and repo[h].closesbranch():
357 if h not in oldheads and repo[h].closesbranch():
358 dh -= 1
358 dh -= 1
359 htext = ""
359 htext = ""
360 if dh:
360 if dh:
361 htext = _(" (%+d heads)") % dh
361 htext = _(" (%+d heads)") % dh
362
362
363 repo.ui.status(_("added %d changesets"
363 repo.ui.status(_("added %d changesets"
364 " with %d changes to %d files%s\n")
364 " with %d changes to %d files%s\n")
365 % (changesets, revisions, files, htext))
365 % (changesets, revisions, files, htext))
366 repo.invalidatevolatilesets()
366 repo.invalidatevolatilesets()
367
367
368 if changesets > 0:
368 if changesets > 0:
369 if 'node' not in tr.hookargs:
369 if 'node' not in tr.hookargs:
370 tr.hookargs['node'] = hex(cl.node(clstart))
370 tr.hookargs['node'] = hex(cl.node(clstart))
371 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
371 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
372 hookargs = dict(tr.hookargs)
372 hookargs = dict(tr.hookargs)
373 else:
373 else:
374 hookargs = dict(tr.hookargs)
374 hookargs = dict(tr.hookargs)
375 hookargs['node'] = hex(cl.node(clstart))
375 hookargs['node'] = hex(cl.node(clstart))
376 hookargs['node_last'] = hex(cl.node(clend - 1))
376 hookargs['node_last'] = hex(cl.node(clend - 1))
377 repo.hook('pretxnchangegroup', throw=True, **hookargs)
377 repo.hook('pretxnchangegroup', throw=True, **hookargs)
378
378
379 added = [cl.node(r) for r in xrange(clstart, clend)]
379 added = [cl.node(r) for r in xrange(clstart, clend)]
380 publishing = repo.publishing()
380 publishing = repo.publishing()
381 if srctype in ('push', 'serve'):
381 if srctype in ('push', 'serve'):
382 # Old servers can not push the boundary themselves.
382 # Old servers can not push the boundary themselves.
383 # New servers won't push the boundary if changeset already
383 # New servers won't push the boundary if changeset already
384 # exists locally as secret
384 # exists locally as secret
385 #
385 #
386 # We should not use added here but the list of all change in
386 # We should not use added here but the list of all change in
387 # the bundle
387 # the bundle
388 if publishing:
388 if publishing:
389 phases.advanceboundary(repo, tr, phases.public,
389 phases.advanceboundary(repo, tr, phases.public,
390 srccontent)
390 srccontent)
391 else:
391 else:
392 # Those changesets have been pushed from the
392 # Those changesets have been pushed from the
393 # outside, their phases are going to be pushed
393 # outside, their phases are going to be pushed
394 # alongside. Therefor `targetphase` is
394 # alongside. Therefor `targetphase` is
395 # ignored.
395 # ignored.
396 phases.advanceboundary(repo, tr, phases.draft,
396 phases.advanceboundary(repo, tr, phases.draft,
397 srccontent)
397 srccontent)
398 phases.retractboundary(repo, tr, phases.draft, added)
398 phases.retractboundary(repo, tr, phases.draft, added)
399 elif srctype != 'strip':
399 elif srctype != 'strip':
400 # publishing only alter behavior during push
400 # publishing only alter behavior during push
401 #
401 #
402 # strip should not touch boundary at all
402 # strip should not touch boundary at all
403 phases.retractboundary(repo, tr, targetphase, added)
403 phases.retractboundary(repo, tr, targetphase, added)
404
404
405 if changesets > 0:
405 if changesets > 0:
406
406
407 def runhooks():
407 def runhooks():
408 # These hooks run when the lock releases, not when the
408 # These hooks run when the lock releases, not when the
409 # transaction closes. So it's possible for the changelog
409 # transaction closes. So it's possible for the changelog
410 # to have changed since we last saw it.
410 # to have changed since we last saw it.
411 if clstart >= len(repo):
411 if clstart >= len(repo):
412 return
412 return
413
413
414 repo.hook("changegroup", **hookargs)
414 repo.hook("changegroup", **hookargs)
415
415
416 for n in added:
416 for n in added:
417 args = hookargs.copy()
417 args = hookargs.copy()
418 args['node'] = hex(n)
418 args['node'] = hex(n)
419 del args['node_last']
419 del args['node_last']
420 repo.hook("incoming", **args)
420 repo.hook("incoming", **args)
421
421
422 newheads = [h for h in repo.heads()
422 newheads = [h for h in repo.heads()
423 if h not in oldheads]
423 if h not in oldheads]
424 repo.ui.log("incoming",
424 repo.ui.log("incoming",
425 "%s incoming changes - new heads: %s\n",
425 "%s incoming changes - new heads: %s\n",
426 len(added),
426 len(added),
427 ', '.join([hex(c[:6]) for c in newheads]))
427 ', '.join([hex(c[:6]) for c in newheads]))
428
428
429 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
429 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
430 lambda tr: repo._afterlock(runhooks))
430 lambda tr: repo._afterlock(runhooks))
431 finally:
431 finally:
432 repo.ui.flush()
432 repo.ui.flush()
433 # never return 0 here:
433 # never return 0 here:
434 if dh < 0:
434 if dh < 0:
435 return dh - 1
435 return dh - 1
436 else:
436 else:
437 return dh + 1
437 return dh + 1
438
438
439 class cg2unpacker(cg1unpacker):
439 class cg2unpacker(cg1unpacker):
440 """Unpacker for cg2 streams.
440 """Unpacker for cg2 streams.
441
441
442 cg2 streams add support for generaldelta, so the delta header
442 cg2 streams add support for generaldelta, so the delta header
443 format is slightly different. All other features about the data
443 format is slightly different. All other features about the data
444 remain the same.
444 remain the same.
445 """
445 """
446 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
446 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
447 deltaheadersize = struct.calcsize(deltaheader)
447 deltaheadersize = struct.calcsize(deltaheader)
448 version = '02'
448 version = '02'
449
449
450 def _deltaheader(self, headertuple, prevnode):
450 def _deltaheader(self, headertuple, prevnode):
451 node, p1, p2, deltabase, cs = headertuple
451 node, p1, p2, deltabase, cs = headertuple
452 flags = 0
452 flags = 0
453 return node, p1, p2, deltabase, cs, flags
453 return node, p1, p2, deltabase, cs, flags
454
454
455 class cg3unpacker(cg2unpacker):
455 class cg3unpacker(cg2unpacker):
456 """Unpacker for cg3 streams.
456 """Unpacker for cg3 streams.
457
457
458 cg3 streams add support for exchanging treemanifests and revlog
458 cg3 streams add support for exchanging treemanifests and revlog
459 flags. It adds the revlog flags to the delta header and an empty chunk
459 flags. It adds the revlog flags to the delta header and an empty chunk
460 separating manifests and files.
460 separating manifests and files.
461 """
461 """
462 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
462 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
463 deltaheadersize = struct.calcsize(deltaheader)
463 deltaheadersize = struct.calcsize(deltaheader)
464 version = '03'
464 version = '03'
465 _grouplistcount = 2 # One list of manifests and one list of files
465 _grouplistcount = 2 # One list of manifests and one list of files
466
466
467 def _deltaheader(self, headertuple, prevnode):
467 def _deltaheader(self, headertuple, prevnode):
468 node, p1, p2, deltabase, cs, flags = headertuple
468 node, p1, p2, deltabase, cs, flags = headertuple
469 return node, p1, p2, deltabase, cs, flags
469 return node, p1, p2, deltabase, cs, flags
470
470
471 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
471 def _unpackmanifests(self, repo, revmap, trp, prog, numchanges):
472 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog,
472 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog,
473 numchanges)
473 numchanges)
474 for chunkdata in iter(self.filelogheader, {}):
474 for chunkdata in iter(self.filelogheader, {}):
475 # If we get here, there are directory manifests in the changegroup
475 # If we get here, there are directory manifests in the changegroup
476 d = chunkdata["filename"]
476 d = chunkdata["filename"]
477 repo.ui.debug("adding %s revisions\n" % d)
477 repo.ui.debug("adding %s revisions\n" % d)
478 dirlog = repo.manifestlog._revlog.dirlog(d)
478 dirlog = repo.manifestlog._revlog.dirlog(d)
479 if not dirlog.addgroup(self, revmap, trp):
479 if not dirlog.addgroup(self, revmap, trp):
480 raise error.Abort(_("received dir revlog group is empty"))
480 raise error.Abort(_("received dir revlog group is empty"))
481
481
482 class headerlessfixup(object):
482 class headerlessfixup(object):
483 def __init__(self, fh, h):
483 def __init__(self, fh, h):
484 self._h = h
484 self._h = h
485 self._fh = fh
485 self._fh = fh
486 def read(self, n):
486 def read(self, n):
487 if self._h:
487 if self._h:
488 d, self._h = self._h[:n], self._h[n:]
488 d, self._h = self._h[:n], self._h[n:]
489 if len(d) < n:
489 if len(d) < n:
490 d += readexactly(self._fh, n - len(d))
490 d += readexactly(self._fh, n - len(d))
491 return d
491 return d
492 return readexactly(self._fh, n)
492 return readexactly(self._fh, n)
493
493
494 class cg1packer(object):
494 class cg1packer(object):
495 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
495 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
496 version = '01'
496 version = '01'
497 def __init__(self, repo):
497 def __init__(self, repo, bundlecaps=None):
498 """Given a source repo, construct a bundler.
498 """Given a source repo, construct a bundler.
499
500 bundlecaps is optional and can be used to specify the set of
501 capabilities which can be used to build the bundle. While bundlecaps is
502 unused in core Mercurial, extensions rely on this feature to communicate
503 capabilities to customize the changegroup packer.
499 """
504 """
505 # Set of capabilities we can use to build the bundle.
506 if bundlecaps is None:
507 bundlecaps = set()
508 self._bundlecaps = bundlecaps
500 # experimental config: bundle.reorder
509 # experimental config: bundle.reorder
501 reorder = repo.ui.config('bundle', 'reorder', 'auto')
510 reorder = repo.ui.config('bundle', 'reorder', 'auto')
502 if reorder == 'auto':
511 if reorder == 'auto':
503 reorder = None
512 reorder = None
504 else:
513 else:
505 reorder = util.parsebool(reorder)
514 reorder = util.parsebool(reorder)
506 self._repo = repo
515 self._repo = repo
507 self._reorder = reorder
516 self._reorder = reorder
508 self._progress = repo.ui.progress
517 self._progress = repo.ui.progress
509 if self._repo.ui.verbose and not self._repo.ui.debugflag:
518 if self._repo.ui.verbose and not self._repo.ui.debugflag:
510 self._verbosenote = self._repo.ui.note
519 self._verbosenote = self._repo.ui.note
511 else:
520 else:
512 self._verbosenote = lambda s: None
521 self._verbosenote = lambda s: None
513
522
514 def close(self):
523 def close(self):
515 return closechunk()
524 return closechunk()
516
525
517 def fileheader(self, fname):
526 def fileheader(self, fname):
518 return chunkheader(len(fname)) + fname
527 return chunkheader(len(fname)) + fname
519
528
520 # Extracted both for clarity and for overriding in extensions.
529 # Extracted both for clarity and for overriding in extensions.
521 def _sortgroup(self, revlog, nodelist, lookup):
530 def _sortgroup(self, revlog, nodelist, lookup):
522 """Sort nodes for change group and turn them into revnums."""
531 """Sort nodes for change group and turn them into revnums."""
523 # for generaldelta revlogs, we linearize the revs; this will both be
532 # for generaldelta revlogs, we linearize the revs; this will both be
524 # much quicker and generate a much smaller bundle
533 # much quicker and generate a much smaller bundle
525 if (revlog._generaldelta and self._reorder is None) or self._reorder:
534 if (revlog._generaldelta and self._reorder is None) or self._reorder:
526 dag = dagutil.revlogdag(revlog)
535 dag = dagutil.revlogdag(revlog)
527 return dag.linearize(set(revlog.rev(n) for n in nodelist))
536 return dag.linearize(set(revlog.rev(n) for n in nodelist))
528 else:
537 else:
529 return sorted([revlog.rev(n) for n in nodelist])
538 return sorted([revlog.rev(n) for n in nodelist])
530
539
531 def group(self, nodelist, revlog, lookup, units=None):
540 def group(self, nodelist, revlog, lookup, units=None):
532 """Calculate a delta group, yielding a sequence of changegroup chunks
541 """Calculate a delta group, yielding a sequence of changegroup chunks
533 (strings).
542 (strings).
534
543
535 Given a list of changeset revs, return a set of deltas and
544 Given a list of changeset revs, return a set of deltas and
536 metadata corresponding to nodes. The first delta is
545 metadata corresponding to nodes. The first delta is
537 first parent(nodelist[0]) -> nodelist[0], the receiver is
546 first parent(nodelist[0]) -> nodelist[0], the receiver is
538 guaranteed to have this parent as it has all history before
547 guaranteed to have this parent as it has all history before
539 these changesets. In the case firstparent is nullrev the
548 these changesets. In the case firstparent is nullrev the
540 changegroup starts with a full revision.
549 changegroup starts with a full revision.
541
550
542 If units is not None, progress detail will be generated, units specifies
551 If units is not None, progress detail will be generated, units specifies
543 the type of revlog that is touched (changelog, manifest, etc.).
552 the type of revlog that is touched (changelog, manifest, etc.).
544 """
553 """
545 # if we don't have any revisions touched by these changesets, bail
554 # if we don't have any revisions touched by these changesets, bail
546 if len(nodelist) == 0:
555 if len(nodelist) == 0:
547 yield self.close()
556 yield self.close()
548 return
557 return
549
558
550 revs = self._sortgroup(revlog, nodelist, lookup)
559 revs = self._sortgroup(revlog, nodelist, lookup)
551
560
552 # add the parent of the first rev
561 # add the parent of the first rev
553 p = revlog.parentrevs(revs[0])[0]
562 p = revlog.parentrevs(revs[0])[0]
554 revs.insert(0, p)
563 revs.insert(0, p)
555
564
556 # build deltas
565 # build deltas
557 total = len(revs) - 1
566 total = len(revs) - 1
558 msgbundling = _('bundling')
567 msgbundling = _('bundling')
559 for r in xrange(len(revs) - 1):
568 for r in xrange(len(revs) - 1):
560 if units is not None:
569 if units is not None:
561 self._progress(msgbundling, r + 1, unit=units, total=total)
570 self._progress(msgbundling, r + 1, unit=units, total=total)
562 prev, curr = revs[r], revs[r + 1]
571 prev, curr = revs[r], revs[r + 1]
563 linknode = lookup(revlog.node(curr))
572 linknode = lookup(revlog.node(curr))
564 for c in self.revchunk(revlog, curr, prev, linknode):
573 for c in self.revchunk(revlog, curr, prev, linknode):
565 yield c
574 yield c
566
575
567 if units is not None:
576 if units is not None:
568 self._progress(msgbundling, None)
577 self._progress(msgbundling, None)
569 yield self.close()
578 yield self.close()
570
579
571 # filter any nodes that claim to be part of the known set
580 # filter any nodes that claim to be part of the known set
572 def prune(self, revlog, missing, commonrevs):
581 def prune(self, revlog, missing, commonrevs):
573 rr, rl = revlog.rev, revlog.linkrev
582 rr, rl = revlog.rev, revlog.linkrev
574 return [n for n in missing if rl(rr(n)) not in commonrevs]
583 return [n for n in missing if rl(rr(n)) not in commonrevs]
575
584
576 def _packmanifests(self, dir, mfnodes, lookuplinknode):
585 def _packmanifests(self, dir, mfnodes, lookuplinknode):
577 """Pack flat manifests into a changegroup stream."""
586 """Pack flat manifests into a changegroup stream."""
578 assert not dir
587 assert not dir
579 for chunk in self.group(mfnodes, self._repo.manifestlog._revlog,
588 for chunk in self.group(mfnodes, self._repo.manifestlog._revlog,
580 lookuplinknode, units=_('manifests')):
589 lookuplinknode, units=_('manifests')):
581 yield chunk
590 yield chunk
582
591
583 def _manifestsdone(self):
592 def _manifestsdone(self):
584 return ''
593 return ''
585
594
586 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
595 def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
587 '''yield a sequence of changegroup chunks (strings)'''
596 '''yield a sequence of changegroup chunks (strings)'''
588 repo = self._repo
597 repo = self._repo
589 cl = repo.changelog
598 cl = repo.changelog
590
599
591 clrevorder = {}
600 clrevorder = {}
592 mfs = {} # needed manifests
601 mfs = {} # needed manifests
593 fnodes = {} # needed file nodes
602 fnodes = {} # needed file nodes
594 changedfiles = set()
603 changedfiles = set()
595
604
596 # Callback for the changelog, used to collect changed files and manifest
605 # Callback for the changelog, used to collect changed files and manifest
597 # nodes.
606 # nodes.
598 # Returns the linkrev node (identity in the changelog case).
607 # Returns the linkrev node (identity in the changelog case).
599 def lookupcl(x):
608 def lookupcl(x):
600 c = cl.read(x)
609 c = cl.read(x)
601 clrevorder[x] = len(clrevorder)
610 clrevorder[x] = len(clrevorder)
602 n = c[0]
611 n = c[0]
603 # record the first changeset introducing this manifest version
612 # record the first changeset introducing this manifest version
604 mfs.setdefault(n, x)
613 mfs.setdefault(n, x)
605 # Record a complete list of potentially-changed files in
614 # Record a complete list of potentially-changed files in
606 # this manifest.
615 # this manifest.
607 changedfiles.update(c[3])
616 changedfiles.update(c[3])
608 return x
617 return x
609
618
610 self._verbosenote(_('uncompressed size of bundle content:\n'))
619 self._verbosenote(_('uncompressed size of bundle content:\n'))
611 size = 0
620 size = 0
612 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets')):
621 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets')):
613 size += len(chunk)
622 size += len(chunk)
614 yield chunk
623 yield chunk
615 self._verbosenote(_('%8.i (changelog)\n') % size)
624 self._verbosenote(_('%8.i (changelog)\n') % size)
616
625
617 # We need to make sure that the linkrev in the changegroup refers to
626 # We need to make sure that the linkrev in the changegroup refers to
618 # the first changeset that introduced the manifest or file revision.
627 # the first changeset that introduced the manifest or file revision.
619 # The fastpath is usually safer than the slowpath, because the filelogs
628 # The fastpath is usually safer than the slowpath, because the filelogs
620 # are walked in revlog order.
629 # are walked in revlog order.
621 #
630 #
622 # When taking the slowpath with reorder=None and the manifest revlog
631 # When taking the slowpath with reorder=None and the manifest revlog
623 # uses generaldelta, the manifest may be walked in the "wrong" order.
632 # uses generaldelta, the manifest may be walked in the "wrong" order.
624 # Without 'clrevorder', we would get an incorrect linkrev (see fix in
633 # Without 'clrevorder', we would get an incorrect linkrev (see fix in
625 # cc0ff93d0c0c).
634 # cc0ff93d0c0c).
626 #
635 #
627 # When taking the fastpath, we are only vulnerable to reordering
636 # When taking the fastpath, we are only vulnerable to reordering
628 # of the changelog itself. The changelog never uses generaldelta, so
637 # of the changelog itself. The changelog never uses generaldelta, so
629 # it is only reordered when reorder=True. To handle this case, we
638 # it is only reordered when reorder=True. To handle this case, we
630 # simply take the slowpath, which already has the 'clrevorder' logic.
639 # simply take the slowpath, which already has the 'clrevorder' logic.
631 # This was also fixed in cc0ff93d0c0c.
640 # This was also fixed in cc0ff93d0c0c.
632 fastpathlinkrev = fastpathlinkrev and not self._reorder
641 fastpathlinkrev = fastpathlinkrev and not self._reorder
633 # Treemanifests don't work correctly with fastpathlinkrev
642 # Treemanifests don't work correctly with fastpathlinkrev
634 # either, because we don't discover which directory nodes to
643 # either, because we don't discover which directory nodes to
635 # send along with files. This could probably be fixed.
644 # send along with files. This could probably be fixed.
636 fastpathlinkrev = fastpathlinkrev and (
645 fastpathlinkrev = fastpathlinkrev and (
637 'treemanifest' not in repo.requirements)
646 'treemanifest' not in repo.requirements)
638
647
639 for chunk in self.generatemanifests(commonrevs, clrevorder,
648 for chunk in self.generatemanifests(commonrevs, clrevorder,
640 fastpathlinkrev, mfs, fnodes):
649 fastpathlinkrev, mfs, fnodes):
641 yield chunk
650 yield chunk
642 mfs.clear()
651 mfs.clear()
643 clrevs = set(cl.rev(x) for x in clnodes)
652 clrevs = set(cl.rev(x) for x in clnodes)
644
653
645 if not fastpathlinkrev:
654 if not fastpathlinkrev:
646 def linknodes(unused, fname):
655 def linknodes(unused, fname):
647 return fnodes.get(fname, {})
656 return fnodes.get(fname, {})
648 else:
657 else:
649 cln = cl.node
658 cln = cl.node
650 def linknodes(filerevlog, fname):
659 def linknodes(filerevlog, fname):
651 llr = filerevlog.linkrev
660 llr = filerevlog.linkrev
652 fln = filerevlog.node
661 fln = filerevlog.node
653 revs = ((r, llr(r)) for r in filerevlog)
662 revs = ((r, llr(r)) for r in filerevlog)
654 return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs)
663 return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs)
655
664
656 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
665 for chunk in self.generatefiles(changedfiles, linknodes, commonrevs,
657 source):
666 source):
658 yield chunk
667 yield chunk
659
668
660 yield self.close()
669 yield self.close()
661
670
662 if clnodes:
671 if clnodes:
663 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
672 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
664
673
665 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev, mfs,
674 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev, mfs,
666 fnodes):
675 fnodes):
667 repo = self._repo
676 repo = self._repo
668 mfl = repo.manifestlog
677 mfl = repo.manifestlog
669 dirlog = mfl._revlog.dirlog
678 dirlog = mfl._revlog.dirlog
670 tmfnodes = {'': mfs}
679 tmfnodes = {'': mfs}
671
680
672 # Callback for the manifest, used to collect linkrevs for filelog
681 # Callback for the manifest, used to collect linkrevs for filelog
673 # revisions.
682 # revisions.
674 # Returns the linkrev node (collected in lookupcl).
683 # Returns the linkrev node (collected in lookupcl).
675 def makelookupmflinknode(dir):
684 def makelookupmflinknode(dir):
676 if fastpathlinkrev:
685 if fastpathlinkrev:
677 assert not dir
686 assert not dir
678 return mfs.__getitem__
687 return mfs.__getitem__
679
688
680 def lookupmflinknode(x):
689 def lookupmflinknode(x):
681 """Callback for looking up the linknode for manifests.
690 """Callback for looking up the linknode for manifests.
682
691
683 Returns the linkrev node for the specified manifest.
692 Returns the linkrev node for the specified manifest.
684
693
685 SIDE EFFECT:
694 SIDE EFFECT:
686
695
687 1) fclnodes gets populated with the list of relevant
696 1) fclnodes gets populated with the list of relevant
688 file nodes if we're not using fastpathlinkrev
697 file nodes if we're not using fastpathlinkrev
689 2) When treemanifests are in use, collects treemanifest nodes
698 2) When treemanifests are in use, collects treemanifest nodes
690 to send
699 to send
691
700
692 Note that this means manifests must be completely sent to
701 Note that this means manifests must be completely sent to
693 the client before you can trust the list of files and
702 the client before you can trust the list of files and
694 treemanifests to send.
703 treemanifests to send.
695 """
704 """
696 clnode = tmfnodes[dir][x]
705 clnode = tmfnodes[dir][x]
697 mdata = mfl.get(dir, x).readfast(shallow=True)
706 mdata = mfl.get(dir, x).readfast(shallow=True)
698 for p, n, fl in mdata.iterentries():
707 for p, n, fl in mdata.iterentries():
699 if fl == 't': # subdirectory manifest
708 if fl == 't': # subdirectory manifest
700 subdir = dir + p + '/'
709 subdir = dir + p + '/'
701 tmfclnodes = tmfnodes.setdefault(subdir, {})
710 tmfclnodes = tmfnodes.setdefault(subdir, {})
702 tmfclnode = tmfclnodes.setdefault(n, clnode)
711 tmfclnode = tmfclnodes.setdefault(n, clnode)
703 if clrevorder[clnode] < clrevorder[tmfclnode]:
712 if clrevorder[clnode] < clrevorder[tmfclnode]:
704 tmfclnodes[n] = clnode
713 tmfclnodes[n] = clnode
705 else:
714 else:
706 f = dir + p
715 f = dir + p
707 fclnodes = fnodes.setdefault(f, {})
716 fclnodes = fnodes.setdefault(f, {})
708 fclnode = fclnodes.setdefault(n, clnode)
717 fclnode = fclnodes.setdefault(n, clnode)
709 if clrevorder[clnode] < clrevorder[fclnode]:
718 if clrevorder[clnode] < clrevorder[fclnode]:
710 fclnodes[n] = clnode
719 fclnodes[n] = clnode
711 return clnode
720 return clnode
712 return lookupmflinknode
721 return lookupmflinknode
713
722
714 size = 0
723 size = 0
715 while tmfnodes:
724 while tmfnodes:
716 dir = min(tmfnodes)
725 dir = min(tmfnodes)
717 nodes = tmfnodes[dir]
726 nodes = tmfnodes[dir]
718 prunednodes = self.prune(dirlog(dir), nodes, commonrevs)
727 prunednodes = self.prune(dirlog(dir), nodes, commonrevs)
719 if not dir or prunednodes:
728 if not dir or prunednodes:
720 for x in self._packmanifests(dir, prunednodes,
729 for x in self._packmanifests(dir, prunednodes,
721 makelookupmflinknode(dir)):
730 makelookupmflinknode(dir)):
722 size += len(x)
731 size += len(x)
723 yield x
732 yield x
724 del tmfnodes[dir]
733 del tmfnodes[dir]
725 self._verbosenote(_('%8.i (manifests)\n') % size)
734 self._verbosenote(_('%8.i (manifests)\n') % size)
726 yield self._manifestsdone()
735 yield self._manifestsdone()
727
736
728 # The 'source' parameter is useful for extensions
737 # The 'source' parameter is useful for extensions
729 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
738 def generatefiles(self, changedfiles, linknodes, commonrevs, source):
730 repo = self._repo
739 repo = self._repo
731 progress = self._progress
740 progress = self._progress
732 msgbundling = _('bundling')
741 msgbundling = _('bundling')
733
742
734 total = len(changedfiles)
743 total = len(changedfiles)
735 # for progress output
744 # for progress output
736 msgfiles = _('files')
745 msgfiles = _('files')
737 for i, fname in enumerate(sorted(changedfiles)):
746 for i, fname in enumerate(sorted(changedfiles)):
738 filerevlog = repo.file(fname)
747 filerevlog = repo.file(fname)
739 if not filerevlog:
748 if not filerevlog:
740 raise error.Abort(_("empty or missing revlog for %s") % fname)
749 raise error.Abort(_("empty or missing revlog for %s") % fname)
741
750
742 linkrevnodes = linknodes(filerevlog, fname)
751 linkrevnodes = linknodes(filerevlog, fname)
743 # Lookup for filenodes, we collected the linkrev nodes above in the
752 # Lookup for filenodes, we collected the linkrev nodes above in the
744 # fastpath case and with lookupmf in the slowpath case.
753 # fastpath case and with lookupmf in the slowpath case.
745 def lookupfilelog(x):
754 def lookupfilelog(x):
746 return linkrevnodes[x]
755 return linkrevnodes[x]
747
756
748 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs)
757 filenodes = self.prune(filerevlog, linkrevnodes, commonrevs)
749 if filenodes:
758 if filenodes:
750 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
759 progress(msgbundling, i + 1, item=fname, unit=msgfiles,
751 total=total)
760 total=total)
752 h = self.fileheader(fname)
761 h = self.fileheader(fname)
753 size = len(h)
762 size = len(h)
754 yield h
763 yield h
755 for chunk in self.group(filenodes, filerevlog, lookupfilelog):
764 for chunk in self.group(filenodes, filerevlog, lookupfilelog):
756 size += len(chunk)
765 size += len(chunk)
757 yield chunk
766 yield chunk
758 self._verbosenote(_('%8.i %s\n') % (size, fname))
767 self._verbosenote(_('%8.i %s\n') % (size, fname))
759 progress(msgbundling, None)
768 progress(msgbundling, None)
760
769
761 def deltaparent(self, revlog, rev, p1, p2, prev):
770 def deltaparent(self, revlog, rev, p1, p2, prev):
762 return prev
771 return prev
763
772
764 def revchunk(self, revlog, rev, prev, linknode):
773 def revchunk(self, revlog, rev, prev, linknode):
765 node = revlog.node(rev)
774 node = revlog.node(rev)
766 p1, p2 = revlog.parentrevs(rev)
775 p1, p2 = revlog.parentrevs(rev)
767 base = self.deltaparent(revlog, rev, p1, p2, prev)
776 base = self.deltaparent(revlog, rev, p1, p2, prev)
768
777
769 prefix = ''
778 prefix = ''
770 if revlog.iscensored(base) or revlog.iscensored(rev):
779 if revlog.iscensored(base) or revlog.iscensored(rev):
771 try:
780 try:
772 delta = revlog.revision(node, raw=True)
781 delta = revlog.revision(node, raw=True)
773 except error.CensoredNodeError as e:
782 except error.CensoredNodeError as e:
774 delta = e.tombstone
783 delta = e.tombstone
775 if base == nullrev:
784 if base == nullrev:
776 prefix = mdiff.trivialdiffheader(len(delta))
785 prefix = mdiff.trivialdiffheader(len(delta))
777 else:
786 else:
778 baselen = revlog.rawsize(base)
787 baselen = revlog.rawsize(base)
779 prefix = mdiff.replacediffheader(baselen, len(delta))
788 prefix = mdiff.replacediffheader(baselen, len(delta))
780 elif base == nullrev:
789 elif base == nullrev:
781 delta = revlog.revision(node, raw=True)
790 delta = revlog.revision(node, raw=True)
782 prefix = mdiff.trivialdiffheader(len(delta))
791 prefix = mdiff.trivialdiffheader(len(delta))
783 else:
792 else:
784 delta = revlog.revdiff(base, rev)
793 delta = revlog.revdiff(base, rev)
785 p1n, p2n = revlog.parents(node)
794 p1n, p2n = revlog.parents(node)
786 basenode = revlog.node(base)
795 basenode = revlog.node(base)
787 flags = revlog.flags(rev)
796 flags = revlog.flags(rev)
788 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode, flags)
797 meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode, flags)
789 meta += prefix
798 meta += prefix
790 l = len(meta) + len(delta)
799 l = len(meta) + len(delta)
791 yield chunkheader(l)
800 yield chunkheader(l)
792 yield meta
801 yield meta
793 yield delta
802 yield delta
794 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
803 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
795 # do nothing with basenode, it is implicitly the previous one in HG10
804 # do nothing with basenode, it is implicitly the previous one in HG10
796 # do nothing with flags, it is implicitly 0 for cg1 and cg2
805 # do nothing with flags, it is implicitly 0 for cg1 and cg2
797 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
806 return struct.pack(self.deltaheader, node, p1n, p2n, linknode)
798
807
799 class cg2packer(cg1packer):
808 class cg2packer(cg1packer):
800 version = '02'
809 version = '02'
801 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
810 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
802
811
803 def __init__(self, repo):
812 def __init__(self, repo, bundlecaps=None):
804 super(cg2packer, self).__init__(repo)
813 super(cg2packer, self).__init__(repo, bundlecaps)
805 if self._reorder is None:
814 if self._reorder is None:
806 # Since generaldelta is directly supported by cg2, reordering
815 # Since generaldelta is directly supported by cg2, reordering
807 # generally doesn't help, so we disable it by default (treating
816 # generally doesn't help, so we disable it by default (treating
808 # bundle.reorder=auto just like bundle.reorder=False).
817 # bundle.reorder=auto just like bundle.reorder=False).
809 self._reorder = False
818 self._reorder = False
810
819
811 def deltaparent(self, revlog, rev, p1, p2, prev):
820 def deltaparent(self, revlog, rev, p1, p2, prev):
812 dp = revlog.deltaparent(rev)
821 dp = revlog.deltaparent(rev)
813 if dp == nullrev and revlog.storedeltachains:
822 if dp == nullrev and revlog.storedeltachains:
814 # Avoid sending full revisions when delta parent is null. Pick prev
823 # Avoid sending full revisions when delta parent is null. Pick prev
815 # in that case. It's tempting to pick p1 in this case, as p1 will
824 # in that case. It's tempting to pick p1 in this case, as p1 will
816 # be smaller in the common case. However, computing a delta against
825 # be smaller in the common case. However, computing a delta against
817 # p1 may require resolving the raw text of p1, which could be
826 # p1 may require resolving the raw text of p1, which could be
818 # expensive. The revlog caches should have prev cached, meaning
827 # expensive. The revlog caches should have prev cached, meaning
819 # less CPU for changegroup generation. There is likely room to add
828 # less CPU for changegroup generation. There is likely room to add
820 # a flag and/or config option to control this behavior.
829 # a flag and/or config option to control this behavior.
821 return prev
830 return prev
822 elif dp == nullrev:
831 elif dp == nullrev:
823 # revlog is configured to use full snapshot for a reason,
832 # revlog is configured to use full snapshot for a reason,
824 # stick to full snapshot.
833 # stick to full snapshot.
825 return nullrev
834 return nullrev
826 elif dp not in (p1, p2, prev):
835 elif dp not in (p1, p2, prev):
827 # Pick prev when we can't be sure remote has the base revision.
836 # Pick prev when we can't be sure remote has the base revision.
828 return prev
837 return prev
829 else:
838 else:
830 return dp
839 return dp
831
840
832 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
841 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
833 # Do nothing with flags, it is implicitly 0 in cg1 and cg2
842 # Do nothing with flags, it is implicitly 0 in cg1 and cg2
834 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
843 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode)
835
844
836 class cg3packer(cg2packer):
845 class cg3packer(cg2packer):
837 version = '03'
846 version = '03'
838 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
847 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
839
848
840 def _packmanifests(self, dir, mfnodes, lookuplinknode):
849 def _packmanifests(self, dir, mfnodes, lookuplinknode):
841 if dir:
850 if dir:
842 yield self.fileheader(dir)
851 yield self.fileheader(dir)
843
852
844 dirlog = self._repo.manifestlog._revlog.dirlog(dir)
853 dirlog = self._repo.manifestlog._revlog.dirlog(dir)
845 for chunk in self.group(mfnodes, dirlog, lookuplinknode,
854 for chunk in self.group(mfnodes, dirlog, lookuplinknode,
846 units=_('manifests')):
855 units=_('manifests')):
847 yield chunk
856 yield chunk
848
857
849 def _manifestsdone(self):
858 def _manifestsdone(self):
850 return self.close()
859 return self.close()
851
860
852 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
861 def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags):
853 return struct.pack(
862 return struct.pack(
854 self.deltaheader, node, p1n, p2n, basenode, linknode, flags)
863 self.deltaheader, node, p1n, p2n, basenode, linknode, flags)
855
864
856 _packermap = {'01': (cg1packer, cg1unpacker),
865 _packermap = {'01': (cg1packer, cg1unpacker),
857 # cg2 adds support for exchanging generaldelta
866 # cg2 adds support for exchanging generaldelta
858 '02': (cg2packer, cg2unpacker),
867 '02': (cg2packer, cg2unpacker),
859 # cg3 adds support for exchanging revlog flags and treemanifests
868 # cg3 adds support for exchanging revlog flags and treemanifests
860 '03': (cg3packer, cg3unpacker),
869 '03': (cg3packer, cg3unpacker),
861 }
870 }
862
871
863 def allsupportedversions(repo):
872 def allsupportedversions(repo):
864 versions = set(_packermap.keys())
873 versions = set(_packermap.keys())
865 if not (repo.ui.configbool('experimental', 'changegroup3') or
874 if not (repo.ui.configbool('experimental', 'changegroup3') or
866 repo.ui.configbool('experimental', 'treemanifest') or
875 repo.ui.configbool('experimental', 'treemanifest') or
867 'treemanifest' in repo.requirements):
876 'treemanifest' in repo.requirements):
868 versions.discard('03')
877 versions.discard('03')
869 return versions
878 return versions
870
879
871 # Changegroup versions that can be applied to the repo
880 # Changegroup versions that can be applied to the repo
872 def supportedincomingversions(repo):
881 def supportedincomingversions(repo):
873 return allsupportedversions(repo)
882 return allsupportedversions(repo)
874
883
875 # Changegroup versions that can be created from the repo
884 # Changegroup versions that can be created from the repo
876 def supportedoutgoingversions(repo):
885 def supportedoutgoingversions(repo):
877 versions = allsupportedversions(repo)
886 versions = allsupportedversions(repo)
878 if 'treemanifest' in repo.requirements:
887 if 'treemanifest' in repo.requirements:
879 # Versions 01 and 02 support only flat manifests and it's just too
888 # Versions 01 and 02 support only flat manifests and it's just too
880 # expensive to convert between the flat manifest and tree manifest on
889 # expensive to convert between the flat manifest and tree manifest on
881 # the fly. Since tree manifests are hashed differently, all of history
890 # the fly. Since tree manifests are hashed differently, all of history
882 # would have to be converted. Instead, we simply don't even pretend to
891 # would have to be converted. Instead, we simply don't even pretend to
883 # support versions 01 and 02.
892 # support versions 01 and 02.
884 versions.discard('01')
893 versions.discard('01')
885 versions.discard('02')
894 versions.discard('02')
886 return versions
895 return versions
887
896
888 def safeversion(repo):
897 def safeversion(repo):
889 # Finds the smallest version that it's safe to assume clients of the repo
898 # Finds the smallest version that it's safe to assume clients of the repo
890 # will support. For example, all hg versions that support generaldelta also
899 # will support. For example, all hg versions that support generaldelta also
891 # support changegroup 02.
900 # support changegroup 02.
892 versions = supportedoutgoingversions(repo)
901 versions = supportedoutgoingversions(repo)
893 if 'generaldelta' in repo.requirements:
902 if 'generaldelta' in repo.requirements:
894 versions.discard('01')
903 versions.discard('01')
895 assert versions
904 assert versions
896 return min(versions)
905 return min(versions)
897
906
898 def getbundler(version, repo):
907 def getbundler(version, repo, bundlecaps=None):
899 assert version in supportedoutgoingversions(repo)
908 assert version in supportedoutgoingversions(repo)
900 return _packermap[version][0](repo)
909 return _packermap[version][0](repo, bundlecaps)
901
910
902 def getunbundler(version, fh, alg, extras=None):
911 def getunbundler(version, fh, alg, extras=None):
903 return _packermap[version][1](fh, alg, extras=extras)
912 return _packermap[version][1](fh, alg, extras=extras)
904
913
905 def _changegroupinfo(repo, nodes, source):
914 def _changegroupinfo(repo, nodes, source):
906 if repo.ui.verbose or source == 'bundle':
915 if repo.ui.verbose or source == 'bundle':
907 repo.ui.status(_("%d changesets found\n") % len(nodes))
916 repo.ui.status(_("%d changesets found\n") % len(nodes))
908 if repo.ui.debugflag:
917 if repo.ui.debugflag:
909 repo.ui.debug("list of changesets:\n")
918 repo.ui.debug("list of changesets:\n")
910 for node in nodes:
919 for node in nodes:
911 repo.ui.debug("%s\n" % hex(node))
920 repo.ui.debug("%s\n" % hex(node))
912
921
913 def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
922 def getsubsetraw(repo, outgoing, bundler, source, fastpath=False):
914 repo = repo.unfiltered()
923 repo = repo.unfiltered()
915 commonrevs = outgoing.common
924 commonrevs = outgoing.common
916 csets = outgoing.missing
925 csets = outgoing.missing
917 heads = outgoing.missingheads
926 heads = outgoing.missingheads
918 # We go through the fast path if we get told to, or if all (unfiltered
927 # We go through the fast path if we get told to, or if all (unfiltered
919 # heads have been requested (since we then know there all linkrevs will
928 # heads have been requested (since we then know there all linkrevs will
920 # be pulled by the client).
929 # be pulled by the client).
921 heads.sort()
930 heads.sort()
922 fastpathlinkrev = fastpath or (
931 fastpathlinkrev = fastpath or (
923 repo.filtername is None and heads == sorted(repo.heads()))
932 repo.filtername is None and heads == sorted(repo.heads()))
924
933
925 repo.hook('preoutgoing', throw=True, source=source)
934 repo.hook('preoutgoing', throw=True, source=source)
926 _changegroupinfo(repo, csets, source)
935 _changegroupinfo(repo, csets, source)
927 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
936 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
928
937
929 def getsubset(repo, outgoing, bundler, source, fastpath=False):
938 def getsubset(repo, outgoing, bundler, source, fastpath=False):
930 gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
939 gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath)
931 return getunbundler(bundler.version, util.chunkbuffer(gengroup), None,
940 return getunbundler(bundler.version, util.chunkbuffer(gengroup), None,
932 {'clcount': len(outgoing.missing)})
941 {'clcount': len(outgoing.missing)})
933
942
934 def changegroupsubset(repo, roots, heads, source, version='01'):
943 def changegroupsubset(repo, roots, heads, source, version='01'):
935 """Compute a changegroup consisting of all the nodes that are
944 """Compute a changegroup consisting of all the nodes that are
936 descendants of any of the roots and ancestors of any of the heads.
945 descendants of any of the roots and ancestors of any of the heads.
937 Return a chunkbuffer object whose read() method will return
946 Return a chunkbuffer object whose read() method will return
938 successive changegroup chunks.
947 successive changegroup chunks.
939
948
940 It is fairly complex as determining which filenodes and which
949 It is fairly complex as determining which filenodes and which
941 manifest nodes need to be included for the changeset to be complete
950 manifest nodes need to be included for the changeset to be complete
942 is non-trivial.
951 is non-trivial.
943
952
944 Another wrinkle is doing the reverse, figuring out which changeset in
953 Another wrinkle is doing the reverse, figuring out which changeset in
945 the changegroup a particular filenode or manifestnode belongs to.
954 the changegroup a particular filenode or manifestnode belongs to.
946 """
955 """
947 outgoing = discovery.outgoing(repo, missingroots=roots, missingheads=heads)
956 outgoing = discovery.outgoing(repo, missingroots=roots, missingheads=heads)
948 bundler = getbundler(version, repo)
957 bundler = getbundler(version, repo)
949 return getsubset(repo, outgoing, bundler, source)
958 return getsubset(repo, outgoing, bundler, source)
950
959
951 def getlocalchangegroupraw(repo, source, outgoing, version='01'):
960 def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None,
961 version='01'):
952 """Like getbundle, but taking a discovery.outgoing as an argument.
962 """Like getbundle, but taking a discovery.outgoing as an argument.
953
963
954 This is only implemented for local repos and reuses potentially
964 This is only implemented for local repos and reuses potentially
955 precomputed sets in outgoing. Returns a raw changegroup generator."""
965 precomputed sets in outgoing. Returns a raw changegroup generator."""
956 if not outgoing.missing:
966 if not outgoing.missing:
957 return None
967 return None
958 bundler = getbundler(version, repo)
968 bundler = getbundler(version, repo, bundlecaps)
959 return getsubsetraw(repo, outgoing, bundler, source)
969 return getsubsetraw(repo, outgoing, bundler, source)
960
970
961 def getchangegroup(repo, source, outgoing, version='01'):
971 def getchangegroup(repo, source, outgoing, bundlecaps=None,
972 version='01'):
962 """Like getbundle, but taking a discovery.outgoing as an argument.
973 """Like getbundle, but taking a discovery.outgoing as an argument.
963
974
964 This is only implemented for local repos and reuses potentially
975 This is only implemented for local repos and reuses potentially
965 precomputed sets in outgoing."""
976 precomputed sets in outgoing."""
966 if not outgoing.missing:
977 if not outgoing.missing:
967 return None
978 return None
968 bundler = getbundler(version, repo)
979 bundler = getbundler(version, repo, bundlecaps)
969 return getsubset(repo, outgoing, bundler, source)
980 return getsubset(repo, outgoing, bundler, source)
970
981
971 def getlocalchangegroup(repo, *args, **kwargs):
982 def getlocalchangegroup(repo, *args, **kwargs):
972 repo.ui.deprecwarn('getlocalchangegroup is deprecated, use getchangegroup',
983 repo.ui.deprecwarn('getlocalchangegroup is deprecated, use getchangegroup',
973 '4.3')
984 '4.3')
974 return getchangegroup(repo, *args, **kwargs)
985 return getchangegroup(repo, *args, **kwargs)
975
986
976 def changegroup(repo, basenodes, source):
987 def changegroup(repo, basenodes, source):
977 # to avoid a race we use changegroupsubset() (issue1320)
988 # to avoid a race we use changegroupsubset() (issue1320)
978 return changegroupsubset(repo, basenodes, repo.heads(), source)
989 return changegroupsubset(repo, basenodes, repo.heads(), source)
979
990
980 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
991 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
981 revisions = 0
992 revisions = 0
982 files = 0
993 files = 0
983 for chunkdata in iter(source.filelogheader, {}):
994 for chunkdata in iter(source.filelogheader, {}):
984 files += 1
995 files += 1
985 f = chunkdata["filename"]
996 f = chunkdata["filename"]
986 repo.ui.debug("adding %s revisions\n" % f)
997 repo.ui.debug("adding %s revisions\n" % f)
987 repo.ui.progress(_('files'), files, unit=_('files'),
998 repo.ui.progress(_('files'), files, unit=_('files'),
988 total=expectedfiles)
999 total=expectedfiles)
989 fl = repo.file(f)
1000 fl = repo.file(f)
990 o = len(fl)
1001 o = len(fl)
991 try:
1002 try:
992 if not fl.addgroup(source, revmap, trp):
1003 if not fl.addgroup(source, revmap, trp):
993 raise error.Abort(_("received file revlog group is empty"))
1004 raise error.Abort(_("received file revlog group is empty"))
994 except error.CensoredBaseError as e:
1005 except error.CensoredBaseError as e:
995 raise error.Abort(_("received delta base is censored: %s") % e)
1006 raise error.Abort(_("received delta base is censored: %s") % e)
996 revisions += len(fl) - o
1007 revisions += len(fl) - o
997 if f in needfiles:
1008 if f in needfiles:
998 needs = needfiles[f]
1009 needs = needfiles[f]
999 for new in xrange(o, len(fl)):
1010 for new in xrange(o, len(fl)):
1000 n = fl.node(new)
1011 n = fl.node(new)
1001 if n in needs:
1012 if n in needs:
1002 needs.remove(n)
1013 needs.remove(n)
1003 else:
1014 else:
1004 raise error.Abort(
1015 raise error.Abort(
1005 _("received spurious file revlog entry"))
1016 _("received spurious file revlog entry"))
1006 if not needs:
1017 if not needs:
1007 del needfiles[f]
1018 del needfiles[f]
1008 repo.ui.progress(_('files'), None)
1019 repo.ui.progress(_('files'), None)
1009
1020
1010 for f, needs in needfiles.iteritems():
1021 for f, needs in needfiles.iteritems():
1011 fl = repo.file(f)
1022 fl = repo.file(f)
1012 for n in needs:
1023 for n in needs:
1013 try:
1024 try:
1014 fl.rev(n)
1025 fl.rev(n)
1015 except error.LookupError:
1026 except error.LookupError:
1016 raise error.Abort(
1027 raise error.Abort(
1017 _('missing file data for %s:%s - run hg verify') %
1028 _('missing file data for %s:%s - run hg verify') %
1018 (f, hex(n)))
1029 (f, hex(n)))
1019
1030
1020 return revisions, files
1031 return revisions, files
@@ -1,2000 +1,2004 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 bookmarks as bookmod,
19 bookmarks as bookmod,
20 bundle2,
20 bundle2,
21 changegroup,
21 changegroup,
22 discovery,
22 discovery,
23 error,
23 error,
24 lock as lockmod,
24 lock as lockmod,
25 obsolete,
25 obsolete,
26 phases,
26 phases,
27 pushkey,
27 pushkey,
28 scmutil,
28 scmutil,
29 sslutil,
29 sslutil,
30 streamclone,
30 streamclone,
31 url as urlmod,
31 url as urlmod,
32 util,
32 util,
33 )
33 )
34
34
35 urlerr = util.urlerr
35 urlerr = util.urlerr
36 urlreq = util.urlreq
36 urlreq = util.urlreq
37
37
38 # Maps bundle version human names to changegroup versions.
38 # Maps bundle version human names to changegroup versions.
39 _bundlespeccgversions = {'v1': '01',
39 _bundlespeccgversions = {'v1': '01',
40 'v2': '02',
40 'v2': '02',
41 'packed1': 's1',
41 'packed1': 's1',
42 'bundle2': '02', #legacy
42 'bundle2': '02', #legacy
43 }
43 }
44
44
45 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
45 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
46 _bundlespecv1compengines = set(['gzip', 'bzip2', 'none'])
46 _bundlespecv1compengines = set(['gzip', 'bzip2', 'none'])
47
47
48 def parsebundlespec(repo, spec, strict=True, externalnames=False):
48 def parsebundlespec(repo, spec, strict=True, externalnames=False):
49 """Parse a bundle string specification into parts.
49 """Parse a bundle string specification into parts.
50
50
51 Bundle specifications denote a well-defined bundle/exchange format.
51 Bundle specifications denote a well-defined bundle/exchange format.
52 The content of a given specification should not change over time in
52 The content of a given specification should not change over time in
53 order to ensure that bundles produced by a newer version of Mercurial are
53 order to ensure that bundles produced by a newer version of Mercurial are
54 readable from an older version.
54 readable from an older version.
55
55
56 The string currently has the form:
56 The string currently has the form:
57
57
58 <compression>-<type>[;<parameter0>[;<parameter1>]]
58 <compression>-<type>[;<parameter0>[;<parameter1>]]
59
59
60 Where <compression> is one of the supported compression formats
60 Where <compression> is one of the supported compression formats
61 and <type> is (currently) a version string. A ";" can follow the type and
61 and <type> is (currently) a version string. A ";" can follow the type and
62 all text afterwards is interpreted as URI encoded, ";" delimited key=value
62 all text afterwards is interpreted as URI encoded, ";" delimited key=value
63 pairs.
63 pairs.
64
64
65 If ``strict`` is True (the default) <compression> is required. Otherwise,
65 If ``strict`` is True (the default) <compression> is required. Otherwise,
66 it is optional.
66 it is optional.
67
67
68 If ``externalnames`` is False (the default), the human-centric names will
68 If ``externalnames`` is False (the default), the human-centric names will
69 be converted to their internal representation.
69 be converted to their internal representation.
70
70
71 Returns a 3-tuple of (compression, version, parameters). Compression will
71 Returns a 3-tuple of (compression, version, parameters). Compression will
72 be ``None`` if not in strict mode and a compression isn't defined.
72 be ``None`` if not in strict mode and a compression isn't defined.
73
73
74 An ``InvalidBundleSpecification`` is raised when the specification is
74 An ``InvalidBundleSpecification`` is raised when the specification is
75 not syntactically well formed.
75 not syntactically well formed.
76
76
77 An ``UnsupportedBundleSpecification`` is raised when the compression or
77 An ``UnsupportedBundleSpecification`` is raised when the compression or
78 bundle type/version is not recognized.
78 bundle type/version is not recognized.
79
79
80 Note: this function will likely eventually return a more complex data
80 Note: this function will likely eventually return a more complex data
81 structure, including bundle2 part information.
81 structure, including bundle2 part information.
82 """
82 """
83 def parseparams(s):
83 def parseparams(s):
84 if ';' not in s:
84 if ';' not in s:
85 return s, {}
85 return s, {}
86
86
87 params = {}
87 params = {}
88 version, paramstr = s.split(';', 1)
88 version, paramstr = s.split(';', 1)
89
89
90 for p in paramstr.split(';'):
90 for p in paramstr.split(';'):
91 if '=' not in p:
91 if '=' not in p:
92 raise error.InvalidBundleSpecification(
92 raise error.InvalidBundleSpecification(
93 _('invalid bundle specification: '
93 _('invalid bundle specification: '
94 'missing "=" in parameter: %s') % p)
94 'missing "=" in parameter: %s') % p)
95
95
96 key, value = p.split('=', 1)
96 key, value = p.split('=', 1)
97 key = urlreq.unquote(key)
97 key = urlreq.unquote(key)
98 value = urlreq.unquote(value)
98 value = urlreq.unquote(value)
99 params[key] = value
99 params[key] = value
100
100
101 return version, params
101 return version, params
102
102
103
103
104 if strict and '-' not in spec:
104 if strict and '-' not in spec:
105 raise error.InvalidBundleSpecification(
105 raise error.InvalidBundleSpecification(
106 _('invalid bundle specification; '
106 _('invalid bundle specification; '
107 'must be prefixed with compression: %s') % spec)
107 'must be prefixed with compression: %s') % spec)
108
108
109 if '-' in spec:
109 if '-' in spec:
110 compression, version = spec.split('-', 1)
110 compression, version = spec.split('-', 1)
111
111
112 if compression not in util.compengines.supportedbundlenames:
112 if compression not in util.compengines.supportedbundlenames:
113 raise error.UnsupportedBundleSpecification(
113 raise error.UnsupportedBundleSpecification(
114 _('%s compression is not supported') % compression)
114 _('%s compression is not supported') % compression)
115
115
116 version, params = parseparams(version)
116 version, params = parseparams(version)
117
117
118 if version not in _bundlespeccgversions:
118 if version not in _bundlespeccgversions:
119 raise error.UnsupportedBundleSpecification(
119 raise error.UnsupportedBundleSpecification(
120 _('%s is not a recognized bundle version') % version)
120 _('%s is not a recognized bundle version') % version)
121 else:
121 else:
122 # Value could be just the compression or just the version, in which
122 # Value could be just the compression or just the version, in which
123 # case some defaults are assumed (but only when not in strict mode).
123 # case some defaults are assumed (but only when not in strict mode).
124 assert not strict
124 assert not strict
125
125
126 spec, params = parseparams(spec)
126 spec, params = parseparams(spec)
127
127
128 if spec in util.compengines.supportedbundlenames:
128 if spec in util.compengines.supportedbundlenames:
129 compression = spec
129 compression = spec
130 version = 'v1'
130 version = 'v1'
131 # Generaldelta repos require v2.
131 # Generaldelta repos require v2.
132 if 'generaldelta' in repo.requirements:
132 if 'generaldelta' in repo.requirements:
133 version = 'v2'
133 version = 'v2'
134 # Modern compression engines require v2.
134 # Modern compression engines require v2.
135 if compression not in _bundlespecv1compengines:
135 if compression not in _bundlespecv1compengines:
136 version = 'v2'
136 version = 'v2'
137 elif spec in _bundlespeccgversions:
137 elif spec in _bundlespeccgversions:
138 if spec == 'packed1':
138 if spec == 'packed1':
139 compression = 'none'
139 compression = 'none'
140 else:
140 else:
141 compression = 'bzip2'
141 compression = 'bzip2'
142 version = spec
142 version = spec
143 else:
143 else:
144 raise error.UnsupportedBundleSpecification(
144 raise error.UnsupportedBundleSpecification(
145 _('%s is not a recognized bundle specification') % spec)
145 _('%s is not a recognized bundle specification') % spec)
146
146
147 # Bundle version 1 only supports a known set of compression engines.
147 # Bundle version 1 only supports a known set of compression engines.
148 if version == 'v1' and compression not in _bundlespecv1compengines:
148 if version == 'v1' and compression not in _bundlespecv1compengines:
149 raise error.UnsupportedBundleSpecification(
149 raise error.UnsupportedBundleSpecification(
150 _('compression engine %s is not supported on v1 bundles') %
150 _('compression engine %s is not supported on v1 bundles') %
151 compression)
151 compression)
152
152
153 # The specification for packed1 can optionally declare the data formats
153 # The specification for packed1 can optionally declare the data formats
154 # required to apply it. If we see this metadata, compare against what the
154 # required to apply it. If we see this metadata, compare against what the
155 # repo supports and error if the bundle isn't compatible.
155 # repo supports and error if the bundle isn't compatible.
156 if version == 'packed1' and 'requirements' in params:
156 if version == 'packed1' and 'requirements' in params:
157 requirements = set(params['requirements'].split(','))
157 requirements = set(params['requirements'].split(','))
158 missingreqs = requirements - repo.supportedformats
158 missingreqs = requirements - repo.supportedformats
159 if missingreqs:
159 if missingreqs:
160 raise error.UnsupportedBundleSpecification(
160 raise error.UnsupportedBundleSpecification(
161 _('missing support for repository features: %s') %
161 _('missing support for repository features: %s') %
162 ', '.join(sorted(missingreqs)))
162 ', '.join(sorted(missingreqs)))
163
163
164 if not externalnames:
164 if not externalnames:
165 engine = util.compengines.forbundlename(compression)
165 engine = util.compengines.forbundlename(compression)
166 compression = engine.bundletype()[1]
166 compression = engine.bundletype()[1]
167 version = _bundlespeccgversions[version]
167 version = _bundlespeccgversions[version]
168 return compression, version, params
168 return compression, version, params
169
169
170 def readbundle(ui, fh, fname, vfs=None):
170 def readbundle(ui, fh, fname, vfs=None):
171 header = changegroup.readexactly(fh, 4)
171 header = changegroup.readexactly(fh, 4)
172
172
173 alg = None
173 alg = None
174 if not fname:
174 if not fname:
175 fname = "stream"
175 fname = "stream"
176 if not header.startswith('HG') and header.startswith('\0'):
176 if not header.startswith('HG') and header.startswith('\0'):
177 fh = changegroup.headerlessfixup(fh, header)
177 fh = changegroup.headerlessfixup(fh, header)
178 header = "HG10"
178 header = "HG10"
179 alg = 'UN'
179 alg = 'UN'
180 elif vfs:
180 elif vfs:
181 fname = vfs.join(fname)
181 fname = vfs.join(fname)
182
182
183 magic, version = header[0:2], header[2:4]
183 magic, version = header[0:2], header[2:4]
184
184
185 if magic != 'HG':
185 if magic != 'HG':
186 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
186 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
187 if version == '10':
187 if version == '10':
188 if alg is None:
188 if alg is None:
189 alg = changegroup.readexactly(fh, 2)
189 alg = changegroup.readexactly(fh, 2)
190 return changegroup.cg1unpacker(fh, alg)
190 return changegroup.cg1unpacker(fh, alg)
191 elif version.startswith('2'):
191 elif version.startswith('2'):
192 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
192 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
193 elif version == 'S1':
193 elif version == 'S1':
194 return streamclone.streamcloneapplier(fh)
194 return streamclone.streamcloneapplier(fh)
195 else:
195 else:
196 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
196 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
197
197
198 def getbundlespec(ui, fh):
198 def getbundlespec(ui, fh):
199 """Infer the bundlespec from a bundle file handle.
199 """Infer the bundlespec from a bundle file handle.
200
200
201 The input file handle is seeked and the original seek position is not
201 The input file handle is seeked and the original seek position is not
202 restored.
202 restored.
203 """
203 """
204 def speccompression(alg):
204 def speccompression(alg):
205 try:
205 try:
206 return util.compengines.forbundletype(alg).bundletype()[0]
206 return util.compengines.forbundletype(alg).bundletype()[0]
207 except KeyError:
207 except KeyError:
208 return None
208 return None
209
209
210 b = readbundle(ui, fh, None)
210 b = readbundle(ui, fh, None)
211 if isinstance(b, changegroup.cg1unpacker):
211 if isinstance(b, changegroup.cg1unpacker):
212 alg = b._type
212 alg = b._type
213 if alg == '_truncatedBZ':
213 if alg == '_truncatedBZ':
214 alg = 'BZ'
214 alg = 'BZ'
215 comp = speccompression(alg)
215 comp = speccompression(alg)
216 if not comp:
216 if not comp:
217 raise error.Abort(_('unknown compression algorithm: %s') % alg)
217 raise error.Abort(_('unknown compression algorithm: %s') % alg)
218 return '%s-v1' % comp
218 return '%s-v1' % comp
219 elif isinstance(b, bundle2.unbundle20):
219 elif isinstance(b, bundle2.unbundle20):
220 if 'Compression' in b.params:
220 if 'Compression' in b.params:
221 comp = speccompression(b.params['Compression'])
221 comp = speccompression(b.params['Compression'])
222 if not comp:
222 if not comp:
223 raise error.Abort(_('unknown compression algorithm: %s') % comp)
223 raise error.Abort(_('unknown compression algorithm: %s') % comp)
224 else:
224 else:
225 comp = 'none'
225 comp = 'none'
226
226
227 version = None
227 version = None
228 for part in b.iterparts():
228 for part in b.iterparts():
229 if part.type == 'changegroup':
229 if part.type == 'changegroup':
230 version = part.params['version']
230 version = part.params['version']
231 if version in ('01', '02'):
231 if version in ('01', '02'):
232 version = 'v2'
232 version = 'v2'
233 else:
233 else:
234 raise error.Abort(_('changegroup version %s does not have '
234 raise error.Abort(_('changegroup version %s does not have '
235 'a known bundlespec') % version,
235 'a known bundlespec') % version,
236 hint=_('try upgrading your Mercurial '
236 hint=_('try upgrading your Mercurial '
237 'client'))
237 'client'))
238
238
239 if not version:
239 if not version:
240 raise error.Abort(_('could not identify changegroup version in '
240 raise error.Abort(_('could not identify changegroup version in '
241 'bundle'))
241 'bundle'))
242
242
243 return '%s-%s' % (comp, version)
243 return '%s-%s' % (comp, version)
244 elif isinstance(b, streamclone.streamcloneapplier):
244 elif isinstance(b, streamclone.streamcloneapplier):
245 requirements = streamclone.readbundle1header(fh)[2]
245 requirements = streamclone.readbundle1header(fh)[2]
246 params = 'requirements=%s' % ','.join(sorted(requirements))
246 params = 'requirements=%s' % ','.join(sorted(requirements))
247 return 'none-packed1;%s' % urlreq.quote(params)
247 return 'none-packed1;%s' % urlreq.quote(params)
248 else:
248 else:
249 raise error.Abort(_('unknown bundle type: %s') % b)
249 raise error.Abort(_('unknown bundle type: %s') % b)
250
250
251 def buildobsmarkerspart(bundler, markers):
251 def buildobsmarkerspart(bundler, markers):
252 """add an obsmarker part to the bundler with <markers>
252 """add an obsmarker part to the bundler with <markers>
253
253
254 No part is created if markers is empty.
254 No part is created if markers is empty.
255 Raises ValueError if the bundler doesn't support any known obsmarker format.
255 Raises ValueError if the bundler doesn't support any known obsmarker format.
256 """
256 """
257 if markers:
257 if markers:
258 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
258 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
259 version = obsolete.commonversion(remoteversions)
259 version = obsolete.commonversion(remoteversions)
260 if version is None:
260 if version is None:
261 raise ValueError('bundler does not support common obsmarker format')
261 raise ValueError('bundler does not support common obsmarker format')
262 stream = obsolete.encodemarkers(markers, True, version=version)
262 stream = obsolete.encodemarkers(markers, True, version=version)
263 return bundler.newpart('obsmarkers', data=stream)
263 return bundler.newpart('obsmarkers', data=stream)
264 return None
264 return None
265
265
266 def _computeoutgoing(repo, heads, common):
266 def _computeoutgoing(repo, heads, common):
267 """Computes which revs are outgoing given a set of common
267 """Computes which revs are outgoing given a set of common
268 and a set of heads.
268 and a set of heads.
269
269
270 This is a separate function so extensions can have access to
270 This is a separate function so extensions can have access to
271 the logic.
271 the logic.
272
272
273 Returns a discovery.outgoing object.
273 Returns a discovery.outgoing object.
274 """
274 """
275 cl = repo.changelog
275 cl = repo.changelog
276 if common:
276 if common:
277 hasnode = cl.hasnode
277 hasnode = cl.hasnode
278 common = [n for n in common if hasnode(n)]
278 common = [n for n in common if hasnode(n)]
279 else:
279 else:
280 common = [nullid]
280 common = [nullid]
281 if not heads:
281 if not heads:
282 heads = cl.heads()
282 heads = cl.heads()
283 return discovery.outgoing(repo, common, heads)
283 return discovery.outgoing(repo, common, heads)
284
284
285 def _forcebundle1(op):
285 def _forcebundle1(op):
286 """return true if a pull/push must use bundle1
286 """return true if a pull/push must use bundle1
287
287
288 This function is used to allow testing of the older bundle version"""
288 This function is used to allow testing of the older bundle version"""
289 ui = op.repo.ui
289 ui = op.repo.ui
290 forcebundle1 = False
290 forcebundle1 = False
291 # The goal is this config is to allow developer to choose the bundle
291 # The goal is this config is to allow developer to choose the bundle
292 # version used during exchanged. This is especially handy during test.
292 # version used during exchanged. This is especially handy during test.
293 # Value is a list of bundle version to be picked from, highest version
293 # Value is a list of bundle version to be picked from, highest version
294 # should be used.
294 # should be used.
295 #
295 #
296 # developer config: devel.legacy.exchange
296 # developer config: devel.legacy.exchange
297 exchange = ui.configlist('devel', 'legacy.exchange')
297 exchange = ui.configlist('devel', 'legacy.exchange')
298 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
298 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
299 return forcebundle1 or not op.remote.capable('bundle2')
299 return forcebundle1 or not op.remote.capable('bundle2')
300
300
301 class pushoperation(object):
301 class pushoperation(object):
302 """A object that represent a single push operation
302 """A object that represent a single push operation
303
303
304 Its purpose is to carry push related state and very common operations.
304 Its purpose is to carry push related state and very common operations.
305
305
306 A new pushoperation should be created at the beginning of each push and
306 A new pushoperation should be created at the beginning of each push and
307 discarded afterward.
307 discarded afterward.
308 """
308 """
309
309
310 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
310 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
311 bookmarks=()):
311 bookmarks=()):
312 # repo we push from
312 # repo we push from
313 self.repo = repo
313 self.repo = repo
314 self.ui = repo.ui
314 self.ui = repo.ui
315 # repo we push to
315 # repo we push to
316 self.remote = remote
316 self.remote = remote
317 # force option provided
317 # force option provided
318 self.force = force
318 self.force = force
319 # revs to be pushed (None is "all")
319 # revs to be pushed (None is "all")
320 self.revs = revs
320 self.revs = revs
321 # bookmark explicitly pushed
321 # bookmark explicitly pushed
322 self.bookmarks = bookmarks
322 self.bookmarks = bookmarks
323 # allow push of new branch
323 # allow push of new branch
324 self.newbranch = newbranch
324 self.newbranch = newbranch
325 # did a local lock get acquired?
325 # did a local lock get acquired?
326 self.locallocked = None
326 self.locallocked = None
327 # step already performed
327 # step already performed
328 # (used to check what steps have been already performed through bundle2)
328 # (used to check what steps have been already performed through bundle2)
329 self.stepsdone = set()
329 self.stepsdone = set()
330 # Integer version of the changegroup push result
330 # Integer version of the changegroup push result
331 # - None means nothing to push
331 # - None means nothing to push
332 # - 0 means HTTP error
332 # - 0 means HTTP error
333 # - 1 means we pushed and remote head count is unchanged *or*
333 # - 1 means we pushed and remote head count is unchanged *or*
334 # we have outgoing changesets but refused to push
334 # we have outgoing changesets but refused to push
335 # - other values as described by addchangegroup()
335 # - other values as described by addchangegroup()
336 self.cgresult = None
336 self.cgresult = None
337 # Boolean value for the bookmark push
337 # Boolean value for the bookmark push
338 self.bkresult = None
338 self.bkresult = None
339 # discover.outgoing object (contains common and outgoing data)
339 # discover.outgoing object (contains common and outgoing data)
340 self.outgoing = None
340 self.outgoing = None
341 # all remote heads before the push
341 # all remote heads before the push
342 self.remoteheads = None
342 self.remoteheads = None
343 # testable as a boolean indicating if any nodes are missing locally.
343 # testable as a boolean indicating if any nodes are missing locally.
344 self.incoming = None
344 self.incoming = None
345 # phases changes that must be pushed along side the changesets
345 # phases changes that must be pushed along side the changesets
346 self.outdatedphases = None
346 self.outdatedphases = None
347 # phases changes that must be pushed if changeset push fails
347 # phases changes that must be pushed if changeset push fails
348 self.fallbackoutdatedphases = None
348 self.fallbackoutdatedphases = None
349 # outgoing obsmarkers
349 # outgoing obsmarkers
350 self.outobsmarkers = set()
350 self.outobsmarkers = set()
351 # outgoing bookmarks
351 # outgoing bookmarks
352 self.outbookmarks = []
352 self.outbookmarks = []
353 # transaction manager
353 # transaction manager
354 self.trmanager = None
354 self.trmanager = None
355 # map { pushkey partid -> callback handling failure}
355 # map { pushkey partid -> callback handling failure}
356 # used to handle exception from mandatory pushkey part failure
356 # used to handle exception from mandatory pushkey part failure
357 self.pkfailcb = {}
357 self.pkfailcb = {}
358
358
359 @util.propertycache
359 @util.propertycache
360 def futureheads(self):
360 def futureheads(self):
361 """future remote heads if the changeset push succeeds"""
361 """future remote heads if the changeset push succeeds"""
362 return self.outgoing.missingheads
362 return self.outgoing.missingheads
363
363
364 @util.propertycache
364 @util.propertycache
365 def fallbackheads(self):
365 def fallbackheads(self):
366 """future remote heads if the changeset push fails"""
366 """future remote heads if the changeset push fails"""
367 if self.revs is None:
367 if self.revs is None:
368 # not target to push, all common are relevant
368 # not target to push, all common are relevant
369 return self.outgoing.commonheads
369 return self.outgoing.commonheads
370 unfi = self.repo.unfiltered()
370 unfi = self.repo.unfiltered()
371 # I want cheads = heads(::missingheads and ::commonheads)
371 # I want cheads = heads(::missingheads and ::commonheads)
372 # (missingheads is revs with secret changeset filtered out)
372 # (missingheads is revs with secret changeset filtered out)
373 #
373 #
374 # This can be expressed as:
374 # This can be expressed as:
375 # cheads = ( (missingheads and ::commonheads)
375 # cheads = ( (missingheads and ::commonheads)
376 # + (commonheads and ::missingheads))"
376 # + (commonheads and ::missingheads))"
377 # )
377 # )
378 #
378 #
379 # while trying to push we already computed the following:
379 # while trying to push we already computed the following:
380 # common = (::commonheads)
380 # common = (::commonheads)
381 # missing = ((commonheads::missingheads) - commonheads)
381 # missing = ((commonheads::missingheads) - commonheads)
382 #
382 #
383 # We can pick:
383 # We can pick:
384 # * missingheads part of common (::commonheads)
384 # * missingheads part of common (::commonheads)
385 common = self.outgoing.common
385 common = self.outgoing.common
386 nm = self.repo.changelog.nodemap
386 nm = self.repo.changelog.nodemap
387 cheads = [node for node in self.revs if nm[node] in common]
387 cheads = [node for node in self.revs if nm[node] in common]
388 # and
388 # and
389 # * commonheads parents on missing
389 # * commonheads parents on missing
390 revset = unfi.set('%ln and parents(roots(%ln))',
390 revset = unfi.set('%ln and parents(roots(%ln))',
391 self.outgoing.commonheads,
391 self.outgoing.commonheads,
392 self.outgoing.missing)
392 self.outgoing.missing)
393 cheads.extend(c.node() for c in revset)
393 cheads.extend(c.node() for c in revset)
394 return cheads
394 return cheads
395
395
396 @property
396 @property
397 def commonheads(self):
397 def commonheads(self):
398 """set of all common heads after changeset bundle push"""
398 """set of all common heads after changeset bundle push"""
399 if self.cgresult:
399 if self.cgresult:
400 return self.futureheads
400 return self.futureheads
401 else:
401 else:
402 return self.fallbackheads
402 return self.fallbackheads
403
403
404 # mapping of message used when pushing bookmark
404 # mapping of message used when pushing bookmark
405 bookmsgmap = {'update': (_("updating bookmark %s\n"),
405 bookmsgmap = {'update': (_("updating bookmark %s\n"),
406 _('updating bookmark %s failed!\n')),
406 _('updating bookmark %s failed!\n')),
407 'export': (_("exporting bookmark %s\n"),
407 'export': (_("exporting bookmark %s\n"),
408 _('exporting bookmark %s failed!\n')),
408 _('exporting bookmark %s failed!\n')),
409 'delete': (_("deleting remote bookmark %s\n"),
409 'delete': (_("deleting remote bookmark %s\n"),
410 _('deleting remote bookmark %s failed!\n')),
410 _('deleting remote bookmark %s failed!\n')),
411 }
411 }
412
412
413
413
414 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
414 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
415 opargs=None):
415 opargs=None):
416 '''Push outgoing changesets (limited by revs) from a local
416 '''Push outgoing changesets (limited by revs) from a local
417 repository to remote. Return an integer:
417 repository to remote. Return an integer:
418 - None means nothing to push
418 - None means nothing to push
419 - 0 means HTTP error
419 - 0 means HTTP error
420 - 1 means we pushed and remote head count is unchanged *or*
420 - 1 means we pushed and remote head count is unchanged *or*
421 we have outgoing changesets but refused to push
421 we have outgoing changesets but refused to push
422 - other values as described by addchangegroup()
422 - other values as described by addchangegroup()
423 '''
423 '''
424 if opargs is None:
424 if opargs is None:
425 opargs = {}
425 opargs = {}
426 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
426 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
427 **opargs)
427 **opargs)
428 if pushop.remote.local():
428 if pushop.remote.local():
429 missing = (set(pushop.repo.requirements)
429 missing = (set(pushop.repo.requirements)
430 - pushop.remote.local().supported)
430 - pushop.remote.local().supported)
431 if missing:
431 if missing:
432 msg = _("required features are not"
432 msg = _("required features are not"
433 " supported in the destination:"
433 " supported in the destination:"
434 " %s") % (', '.join(sorted(missing)))
434 " %s") % (', '.join(sorted(missing)))
435 raise error.Abort(msg)
435 raise error.Abort(msg)
436
436
437 # there are two ways to push to remote repo:
437 # there are two ways to push to remote repo:
438 #
438 #
439 # addchangegroup assumes local user can lock remote
439 # addchangegroup assumes local user can lock remote
440 # repo (local filesystem, old ssh servers).
440 # repo (local filesystem, old ssh servers).
441 #
441 #
442 # unbundle assumes local user cannot lock remote repo (new ssh
442 # unbundle assumes local user cannot lock remote repo (new ssh
443 # servers, http servers).
443 # servers, http servers).
444
444
445 if not pushop.remote.canpush():
445 if not pushop.remote.canpush():
446 raise error.Abort(_("destination does not support push"))
446 raise error.Abort(_("destination does not support push"))
447 # get local lock as we might write phase data
447 # get local lock as we might write phase data
448 localwlock = locallock = None
448 localwlock = locallock = None
449 try:
449 try:
450 # bundle2 push may receive a reply bundle touching bookmarks or other
450 # bundle2 push may receive a reply bundle touching bookmarks or other
451 # things requiring the wlock. Take it now to ensure proper ordering.
451 # things requiring the wlock. Take it now to ensure proper ordering.
452 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
452 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
453 if (not _forcebundle1(pushop)) and maypushback:
453 if (not _forcebundle1(pushop)) and maypushback:
454 localwlock = pushop.repo.wlock()
454 localwlock = pushop.repo.wlock()
455 locallock = pushop.repo.lock()
455 locallock = pushop.repo.lock()
456 pushop.locallocked = True
456 pushop.locallocked = True
457 except IOError as err:
457 except IOError as err:
458 pushop.locallocked = False
458 pushop.locallocked = False
459 if err.errno != errno.EACCES:
459 if err.errno != errno.EACCES:
460 raise
460 raise
461 # source repo cannot be locked.
461 # source repo cannot be locked.
462 # We do not abort the push, but just disable the local phase
462 # We do not abort the push, but just disable the local phase
463 # synchronisation.
463 # synchronisation.
464 msg = 'cannot lock source repository: %s\n' % err
464 msg = 'cannot lock source repository: %s\n' % err
465 pushop.ui.debug(msg)
465 pushop.ui.debug(msg)
466 try:
466 try:
467 if pushop.locallocked:
467 if pushop.locallocked:
468 pushop.trmanager = transactionmanager(pushop.repo,
468 pushop.trmanager = transactionmanager(pushop.repo,
469 'push-response',
469 'push-response',
470 pushop.remote.url())
470 pushop.remote.url())
471 pushop.repo.checkpush(pushop)
471 pushop.repo.checkpush(pushop)
472 lock = None
472 lock = None
473 unbundle = pushop.remote.capable('unbundle')
473 unbundle = pushop.remote.capable('unbundle')
474 if not unbundle:
474 if not unbundle:
475 lock = pushop.remote.lock()
475 lock = pushop.remote.lock()
476 try:
476 try:
477 _pushdiscovery(pushop)
477 _pushdiscovery(pushop)
478 if not _forcebundle1(pushop):
478 if not _forcebundle1(pushop):
479 _pushbundle2(pushop)
479 _pushbundle2(pushop)
480 _pushchangeset(pushop)
480 _pushchangeset(pushop)
481 _pushsyncphase(pushop)
481 _pushsyncphase(pushop)
482 _pushobsolete(pushop)
482 _pushobsolete(pushop)
483 _pushbookmark(pushop)
483 _pushbookmark(pushop)
484 finally:
484 finally:
485 if lock is not None:
485 if lock is not None:
486 lock.release()
486 lock.release()
487 if pushop.trmanager:
487 if pushop.trmanager:
488 pushop.trmanager.close()
488 pushop.trmanager.close()
489 finally:
489 finally:
490 if pushop.trmanager:
490 if pushop.trmanager:
491 pushop.trmanager.release()
491 pushop.trmanager.release()
492 if locallock is not None:
492 if locallock is not None:
493 locallock.release()
493 locallock.release()
494 if localwlock is not None:
494 if localwlock is not None:
495 localwlock.release()
495 localwlock.release()
496
496
497 return pushop
497 return pushop
498
498
499 # list of steps to perform discovery before push
499 # list of steps to perform discovery before push
500 pushdiscoveryorder = []
500 pushdiscoveryorder = []
501
501
502 # Mapping between step name and function
502 # Mapping between step name and function
503 #
503 #
504 # This exists to help extensions wrap steps if necessary
504 # This exists to help extensions wrap steps if necessary
505 pushdiscoverymapping = {}
505 pushdiscoverymapping = {}
506
506
507 def pushdiscovery(stepname):
507 def pushdiscovery(stepname):
508 """decorator for function performing discovery before push
508 """decorator for function performing discovery before push
509
509
510 The function is added to the step -> function mapping and appended to the
510 The function is added to the step -> function mapping and appended to the
511 list of steps. Beware that decorated function will be added in order (this
511 list of steps. Beware that decorated function will be added in order (this
512 may matter).
512 may matter).
513
513
514 You can only use this decorator for a new step, if you want to wrap a step
514 You can only use this decorator for a new step, if you want to wrap a step
515 from an extension, change the pushdiscovery dictionary directly."""
515 from an extension, change the pushdiscovery dictionary directly."""
516 def dec(func):
516 def dec(func):
517 assert stepname not in pushdiscoverymapping
517 assert stepname not in pushdiscoverymapping
518 pushdiscoverymapping[stepname] = func
518 pushdiscoverymapping[stepname] = func
519 pushdiscoveryorder.append(stepname)
519 pushdiscoveryorder.append(stepname)
520 return func
520 return func
521 return dec
521 return dec
522
522
523 def _pushdiscovery(pushop):
523 def _pushdiscovery(pushop):
524 """Run all discovery steps"""
524 """Run all discovery steps"""
525 for stepname in pushdiscoveryorder:
525 for stepname in pushdiscoveryorder:
526 step = pushdiscoverymapping[stepname]
526 step = pushdiscoverymapping[stepname]
527 step(pushop)
527 step(pushop)
528
528
529 @pushdiscovery('changeset')
529 @pushdiscovery('changeset')
530 def _pushdiscoverychangeset(pushop):
530 def _pushdiscoverychangeset(pushop):
531 """discover the changeset that need to be pushed"""
531 """discover the changeset that need to be pushed"""
532 fci = discovery.findcommonincoming
532 fci = discovery.findcommonincoming
533 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
533 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
534 common, inc, remoteheads = commoninc
534 common, inc, remoteheads = commoninc
535 fco = discovery.findcommonoutgoing
535 fco = discovery.findcommonoutgoing
536 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
536 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
537 commoninc=commoninc, force=pushop.force)
537 commoninc=commoninc, force=pushop.force)
538 pushop.outgoing = outgoing
538 pushop.outgoing = outgoing
539 pushop.remoteheads = remoteheads
539 pushop.remoteheads = remoteheads
540 pushop.incoming = inc
540 pushop.incoming = inc
541
541
542 @pushdiscovery('phase')
542 @pushdiscovery('phase')
543 def _pushdiscoveryphase(pushop):
543 def _pushdiscoveryphase(pushop):
544 """discover the phase that needs to be pushed
544 """discover the phase that needs to be pushed
545
545
546 (computed for both success and failure case for changesets push)"""
546 (computed for both success and failure case for changesets push)"""
547 outgoing = pushop.outgoing
547 outgoing = pushop.outgoing
548 unfi = pushop.repo.unfiltered()
548 unfi = pushop.repo.unfiltered()
549 remotephases = pushop.remote.listkeys('phases')
549 remotephases = pushop.remote.listkeys('phases')
550 publishing = remotephases.get('publishing', False)
550 publishing = remotephases.get('publishing', False)
551 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
551 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
552 and remotephases # server supports phases
552 and remotephases # server supports phases
553 and not pushop.outgoing.missing # no changesets to be pushed
553 and not pushop.outgoing.missing # no changesets to be pushed
554 and publishing):
554 and publishing):
555 # When:
555 # When:
556 # - this is a subrepo push
556 # - this is a subrepo push
557 # - and remote support phase
557 # - and remote support phase
558 # - and no changeset are to be pushed
558 # - and no changeset are to be pushed
559 # - and remote is publishing
559 # - and remote is publishing
560 # We may be in issue 3871 case!
560 # We may be in issue 3871 case!
561 # We drop the possible phase synchronisation done by
561 # We drop the possible phase synchronisation done by
562 # courtesy to publish changesets possibly locally draft
562 # courtesy to publish changesets possibly locally draft
563 # on the remote.
563 # on the remote.
564 remotephases = {'publishing': 'True'}
564 remotephases = {'publishing': 'True'}
565 ana = phases.analyzeremotephases(pushop.repo,
565 ana = phases.analyzeremotephases(pushop.repo,
566 pushop.fallbackheads,
566 pushop.fallbackheads,
567 remotephases)
567 remotephases)
568 pheads, droots = ana
568 pheads, droots = ana
569 extracond = ''
569 extracond = ''
570 if not publishing:
570 if not publishing:
571 extracond = ' and public()'
571 extracond = ' and public()'
572 revset = 'heads((%%ln::%%ln) %s)' % extracond
572 revset = 'heads((%%ln::%%ln) %s)' % extracond
573 # Get the list of all revs draft on remote by public here.
573 # Get the list of all revs draft on remote by public here.
574 # XXX Beware that revset break if droots is not strictly
574 # XXX Beware that revset break if droots is not strictly
575 # XXX root we may want to ensure it is but it is costly
575 # XXX root we may want to ensure it is but it is costly
576 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
576 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
577 if not outgoing.missing:
577 if not outgoing.missing:
578 future = fallback
578 future = fallback
579 else:
579 else:
580 # adds changeset we are going to push as draft
580 # adds changeset we are going to push as draft
581 #
581 #
582 # should not be necessary for publishing server, but because of an
582 # should not be necessary for publishing server, but because of an
583 # issue fixed in xxxxx we have to do it anyway.
583 # issue fixed in xxxxx we have to do it anyway.
584 fdroots = list(unfi.set('roots(%ln + %ln::)',
584 fdroots = list(unfi.set('roots(%ln + %ln::)',
585 outgoing.missing, droots))
585 outgoing.missing, droots))
586 fdroots = [f.node() for f in fdroots]
586 fdroots = [f.node() for f in fdroots]
587 future = list(unfi.set(revset, fdroots, pushop.futureheads))
587 future = list(unfi.set(revset, fdroots, pushop.futureheads))
588 pushop.outdatedphases = future
588 pushop.outdatedphases = future
589 pushop.fallbackoutdatedphases = fallback
589 pushop.fallbackoutdatedphases = fallback
590
590
591 @pushdiscovery('obsmarker')
591 @pushdiscovery('obsmarker')
592 def _pushdiscoveryobsmarkers(pushop):
592 def _pushdiscoveryobsmarkers(pushop):
593 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
593 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
594 and pushop.repo.obsstore
594 and pushop.repo.obsstore
595 and 'obsolete' in pushop.remote.listkeys('namespaces')):
595 and 'obsolete' in pushop.remote.listkeys('namespaces')):
596 repo = pushop.repo
596 repo = pushop.repo
597 # very naive computation, that can be quite expensive on big repo.
597 # very naive computation, that can be quite expensive on big repo.
598 # However: evolution is currently slow on them anyway.
598 # However: evolution is currently slow on them anyway.
599 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
599 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
600 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
600 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
601
601
602 @pushdiscovery('bookmarks')
602 @pushdiscovery('bookmarks')
603 def _pushdiscoverybookmarks(pushop):
603 def _pushdiscoverybookmarks(pushop):
604 ui = pushop.ui
604 ui = pushop.ui
605 repo = pushop.repo.unfiltered()
605 repo = pushop.repo.unfiltered()
606 remote = pushop.remote
606 remote = pushop.remote
607 ui.debug("checking for updated bookmarks\n")
607 ui.debug("checking for updated bookmarks\n")
608 ancestors = ()
608 ancestors = ()
609 if pushop.revs:
609 if pushop.revs:
610 revnums = map(repo.changelog.rev, pushop.revs)
610 revnums = map(repo.changelog.rev, pushop.revs)
611 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
611 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
612 remotebookmark = remote.listkeys('bookmarks')
612 remotebookmark = remote.listkeys('bookmarks')
613
613
614 explicit = set([repo._bookmarks.expandname(bookmark)
614 explicit = set([repo._bookmarks.expandname(bookmark)
615 for bookmark in pushop.bookmarks])
615 for bookmark in pushop.bookmarks])
616
616
617 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
617 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
618 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
618 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
619
619
620 def safehex(x):
620 def safehex(x):
621 if x is None:
621 if x is None:
622 return x
622 return x
623 return hex(x)
623 return hex(x)
624
624
625 def hexifycompbookmarks(bookmarks):
625 def hexifycompbookmarks(bookmarks):
626 for b, scid, dcid in bookmarks:
626 for b, scid, dcid in bookmarks:
627 yield b, safehex(scid), safehex(dcid)
627 yield b, safehex(scid), safehex(dcid)
628
628
629 comp = [hexifycompbookmarks(marks) for marks in comp]
629 comp = [hexifycompbookmarks(marks) for marks in comp]
630 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
630 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
631
631
632 for b, scid, dcid in advsrc:
632 for b, scid, dcid in advsrc:
633 if b in explicit:
633 if b in explicit:
634 explicit.remove(b)
634 explicit.remove(b)
635 if not ancestors or repo[scid].rev() in ancestors:
635 if not ancestors or repo[scid].rev() in ancestors:
636 pushop.outbookmarks.append((b, dcid, scid))
636 pushop.outbookmarks.append((b, dcid, scid))
637 # search added bookmark
637 # search added bookmark
638 for b, scid, dcid in addsrc:
638 for b, scid, dcid in addsrc:
639 if b in explicit:
639 if b in explicit:
640 explicit.remove(b)
640 explicit.remove(b)
641 pushop.outbookmarks.append((b, '', scid))
641 pushop.outbookmarks.append((b, '', scid))
642 # search for overwritten bookmark
642 # search for overwritten bookmark
643 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
643 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
644 if b in explicit:
644 if b in explicit:
645 explicit.remove(b)
645 explicit.remove(b)
646 pushop.outbookmarks.append((b, dcid, scid))
646 pushop.outbookmarks.append((b, dcid, scid))
647 # search for bookmark to delete
647 # search for bookmark to delete
648 for b, scid, dcid in adddst:
648 for b, scid, dcid in adddst:
649 if b in explicit:
649 if b in explicit:
650 explicit.remove(b)
650 explicit.remove(b)
651 # treat as "deleted locally"
651 # treat as "deleted locally"
652 pushop.outbookmarks.append((b, dcid, ''))
652 pushop.outbookmarks.append((b, dcid, ''))
653 # identical bookmarks shouldn't get reported
653 # identical bookmarks shouldn't get reported
654 for b, scid, dcid in same:
654 for b, scid, dcid in same:
655 if b in explicit:
655 if b in explicit:
656 explicit.remove(b)
656 explicit.remove(b)
657
657
658 if explicit:
658 if explicit:
659 explicit = sorted(explicit)
659 explicit = sorted(explicit)
660 # we should probably list all of them
660 # we should probably list all of them
661 ui.warn(_('bookmark %s does not exist on the local '
661 ui.warn(_('bookmark %s does not exist on the local '
662 'or remote repository!\n') % explicit[0])
662 'or remote repository!\n') % explicit[0])
663 pushop.bkresult = 2
663 pushop.bkresult = 2
664
664
665 pushop.outbookmarks.sort()
665 pushop.outbookmarks.sort()
666
666
667 def _pushcheckoutgoing(pushop):
667 def _pushcheckoutgoing(pushop):
668 outgoing = pushop.outgoing
668 outgoing = pushop.outgoing
669 unfi = pushop.repo.unfiltered()
669 unfi = pushop.repo.unfiltered()
670 if not outgoing.missing:
670 if not outgoing.missing:
671 # nothing to push
671 # nothing to push
672 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
672 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
673 return False
673 return False
674 # something to push
674 # something to push
675 if not pushop.force:
675 if not pushop.force:
676 # if repo.obsstore == False --> no obsolete
676 # if repo.obsstore == False --> no obsolete
677 # then, save the iteration
677 # then, save the iteration
678 if unfi.obsstore:
678 if unfi.obsstore:
679 # this message are here for 80 char limit reason
679 # this message are here for 80 char limit reason
680 mso = _("push includes obsolete changeset: %s!")
680 mso = _("push includes obsolete changeset: %s!")
681 mst = {"unstable": _("push includes unstable changeset: %s!"),
681 mst = {"unstable": _("push includes unstable changeset: %s!"),
682 "bumped": _("push includes bumped changeset: %s!"),
682 "bumped": _("push includes bumped changeset: %s!"),
683 "divergent": _("push includes divergent changeset: %s!")}
683 "divergent": _("push includes divergent changeset: %s!")}
684 # If we are to push if there is at least one
684 # If we are to push if there is at least one
685 # obsolete or unstable changeset in missing, at
685 # obsolete or unstable changeset in missing, at
686 # least one of the missinghead will be obsolete or
686 # least one of the missinghead will be obsolete or
687 # unstable. So checking heads only is ok
687 # unstable. So checking heads only is ok
688 for node in outgoing.missingheads:
688 for node in outgoing.missingheads:
689 ctx = unfi[node]
689 ctx = unfi[node]
690 if ctx.obsolete():
690 if ctx.obsolete():
691 raise error.Abort(mso % ctx)
691 raise error.Abort(mso % ctx)
692 elif ctx.troubled():
692 elif ctx.troubled():
693 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
693 raise error.Abort(mst[ctx.troubles()[0]] % ctx)
694
694
695 discovery.checkheads(pushop)
695 discovery.checkheads(pushop)
696 return True
696 return True
697
697
698 # List of names of steps to perform for an outgoing bundle2, order matters.
698 # List of names of steps to perform for an outgoing bundle2, order matters.
699 b2partsgenorder = []
699 b2partsgenorder = []
700
700
701 # Mapping between step name and function
701 # Mapping between step name and function
702 #
702 #
703 # This exists to help extensions wrap steps if necessary
703 # This exists to help extensions wrap steps if necessary
704 b2partsgenmapping = {}
704 b2partsgenmapping = {}
705
705
706 def b2partsgenerator(stepname, idx=None):
706 def b2partsgenerator(stepname, idx=None):
707 """decorator for function generating bundle2 part
707 """decorator for function generating bundle2 part
708
708
709 The function is added to the step -> function mapping and appended to the
709 The function is added to the step -> function mapping and appended to the
710 list of steps. Beware that decorated functions will be added in order
710 list of steps. Beware that decorated functions will be added in order
711 (this may matter).
711 (this may matter).
712
712
713 You can only use this decorator for new steps, if you want to wrap a step
713 You can only use this decorator for new steps, if you want to wrap a step
714 from an extension, attack the b2partsgenmapping dictionary directly."""
714 from an extension, attack the b2partsgenmapping dictionary directly."""
715 def dec(func):
715 def dec(func):
716 assert stepname not in b2partsgenmapping
716 assert stepname not in b2partsgenmapping
717 b2partsgenmapping[stepname] = func
717 b2partsgenmapping[stepname] = func
718 if idx is None:
718 if idx is None:
719 b2partsgenorder.append(stepname)
719 b2partsgenorder.append(stepname)
720 else:
720 else:
721 b2partsgenorder.insert(idx, stepname)
721 b2partsgenorder.insert(idx, stepname)
722 return func
722 return func
723 return dec
723 return dec
724
724
725 def _pushb2ctxcheckheads(pushop, bundler):
725 def _pushb2ctxcheckheads(pushop, bundler):
726 """Generate race condition checking parts
726 """Generate race condition checking parts
727
727
728 Exists as an independent function to aid extensions
728 Exists as an independent function to aid extensions
729 """
729 """
730 if not pushop.force:
730 if not pushop.force:
731 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
731 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
732
732
733 @b2partsgenerator('changeset')
733 @b2partsgenerator('changeset')
734 def _pushb2ctx(pushop, bundler):
734 def _pushb2ctx(pushop, bundler):
735 """handle changegroup push through bundle2
735 """handle changegroup push through bundle2
736
736
737 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
737 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
738 """
738 """
739 if 'changesets' in pushop.stepsdone:
739 if 'changesets' in pushop.stepsdone:
740 return
740 return
741 pushop.stepsdone.add('changesets')
741 pushop.stepsdone.add('changesets')
742 # Send known heads to the server for race detection.
742 # Send known heads to the server for race detection.
743 if not _pushcheckoutgoing(pushop):
743 if not _pushcheckoutgoing(pushop):
744 return
744 return
745 pushop.repo.prepushoutgoinghooks(pushop)
745 pushop.repo.prepushoutgoinghooks(pushop)
746
746
747 _pushb2ctxcheckheads(pushop, bundler)
747 _pushb2ctxcheckheads(pushop, bundler)
748
748
749 b2caps = bundle2.bundle2caps(pushop.remote)
749 b2caps = bundle2.bundle2caps(pushop.remote)
750 version = '01'
750 version = '01'
751 cgversions = b2caps.get('changegroup')
751 cgversions = b2caps.get('changegroup')
752 if cgversions: # 3.1 and 3.2 ship with an empty value
752 if cgversions: # 3.1 and 3.2 ship with an empty value
753 cgversions = [v for v in cgversions
753 cgversions = [v for v in cgversions
754 if v in changegroup.supportedoutgoingversions(
754 if v in changegroup.supportedoutgoingversions(
755 pushop.repo)]
755 pushop.repo)]
756 if not cgversions:
756 if not cgversions:
757 raise ValueError(_('no common changegroup version'))
757 raise ValueError(_('no common changegroup version'))
758 version = max(cgversions)
758 version = max(cgversions)
759 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
759 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
760 pushop.outgoing,
760 pushop.outgoing,
761 version=version)
761 version=version)
762 cgpart = bundler.newpart('changegroup', data=cg)
762 cgpart = bundler.newpart('changegroup', data=cg)
763 if cgversions:
763 if cgversions:
764 cgpart.addparam('version', version)
764 cgpart.addparam('version', version)
765 if 'treemanifest' in pushop.repo.requirements:
765 if 'treemanifest' in pushop.repo.requirements:
766 cgpart.addparam('treemanifest', '1')
766 cgpart.addparam('treemanifest', '1')
767 def handlereply(op):
767 def handlereply(op):
768 """extract addchangegroup returns from server reply"""
768 """extract addchangegroup returns from server reply"""
769 cgreplies = op.records.getreplies(cgpart.id)
769 cgreplies = op.records.getreplies(cgpart.id)
770 assert len(cgreplies['changegroup']) == 1
770 assert len(cgreplies['changegroup']) == 1
771 pushop.cgresult = cgreplies['changegroup'][0]['return']
771 pushop.cgresult = cgreplies['changegroup'][0]['return']
772 return handlereply
772 return handlereply
773
773
774 @b2partsgenerator('phase')
774 @b2partsgenerator('phase')
775 def _pushb2phases(pushop, bundler):
775 def _pushb2phases(pushop, bundler):
776 """handle phase push through bundle2"""
776 """handle phase push through bundle2"""
777 if 'phases' in pushop.stepsdone:
777 if 'phases' in pushop.stepsdone:
778 return
778 return
779 b2caps = bundle2.bundle2caps(pushop.remote)
779 b2caps = bundle2.bundle2caps(pushop.remote)
780 if not 'pushkey' in b2caps:
780 if not 'pushkey' in b2caps:
781 return
781 return
782 pushop.stepsdone.add('phases')
782 pushop.stepsdone.add('phases')
783 part2node = []
783 part2node = []
784
784
785 def handlefailure(pushop, exc):
785 def handlefailure(pushop, exc):
786 targetid = int(exc.partid)
786 targetid = int(exc.partid)
787 for partid, node in part2node:
787 for partid, node in part2node:
788 if partid == targetid:
788 if partid == targetid:
789 raise error.Abort(_('updating %s to public failed') % node)
789 raise error.Abort(_('updating %s to public failed') % node)
790
790
791 enc = pushkey.encode
791 enc = pushkey.encode
792 for newremotehead in pushop.outdatedphases:
792 for newremotehead in pushop.outdatedphases:
793 part = bundler.newpart('pushkey')
793 part = bundler.newpart('pushkey')
794 part.addparam('namespace', enc('phases'))
794 part.addparam('namespace', enc('phases'))
795 part.addparam('key', enc(newremotehead.hex()))
795 part.addparam('key', enc(newremotehead.hex()))
796 part.addparam('old', enc(str(phases.draft)))
796 part.addparam('old', enc(str(phases.draft)))
797 part.addparam('new', enc(str(phases.public)))
797 part.addparam('new', enc(str(phases.public)))
798 part2node.append((part.id, newremotehead))
798 part2node.append((part.id, newremotehead))
799 pushop.pkfailcb[part.id] = handlefailure
799 pushop.pkfailcb[part.id] = handlefailure
800
800
801 def handlereply(op):
801 def handlereply(op):
802 for partid, node in part2node:
802 for partid, node in part2node:
803 partrep = op.records.getreplies(partid)
803 partrep = op.records.getreplies(partid)
804 results = partrep['pushkey']
804 results = partrep['pushkey']
805 assert len(results) <= 1
805 assert len(results) <= 1
806 msg = None
806 msg = None
807 if not results:
807 if not results:
808 msg = _('server ignored update of %s to public!\n') % node
808 msg = _('server ignored update of %s to public!\n') % node
809 elif not int(results[0]['return']):
809 elif not int(results[0]['return']):
810 msg = _('updating %s to public failed!\n') % node
810 msg = _('updating %s to public failed!\n') % node
811 if msg is not None:
811 if msg is not None:
812 pushop.ui.warn(msg)
812 pushop.ui.warn(msg)
813 return handlereply
813 return handlereply
814
814
815 @b2partsgenerator('obsmarkers')
815 @b2partsgenerator('obsmarkers')
816 def _pushb2obsmarkers(pushop, bundler):
816 def _pushb2obsmarkers(pushop, bundler):
817 if 'obsmarkers' in pushop.stepsdone:
817 if 'obsmarkers' in pushop.stepsdone:
818 return
818 return
819 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
819 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
820 if obsolete.commonversion(remoteversions) is None:
820 if obsolete.commonversion(remoteversions) is None:
821 return
821 return
822 pushop.stepsdone.add('obsmarkers')
822 pushop.stepsdone.add('obsmarkers')
823 if pushop.outobsmarkers:
823 if pushop.outobsmarkers:
824 markers = sorted(pushop.outobsmarkers)
824 markers = sorted(pushop.outobsmarkers)
825 buildobsmarkerspart(bundler, markers)
825 buildobsmarkerspart(bundler, markers)
826
826
827 @b2partsgenerator('bookmarks')
827 @b2partsgenerator('bookmarks')
828 def _pushb2bookmarks(pushop, bundler):
828 def _pushb2bookmarks(pushop, bundler):
829 """handle bookmark push through bundle2"""
829 """handle bookmark push through bundle2"""
830 if 'bookmarks' in pushop.stepsdone:
830 if 'bookmarks' in pushop.stepsdone:
831 return
831 return
832 b2caps = bundle2.bundle2caps(pushop.remote)
832 b2caps = bundle2.bundle2caps(pushop.remote)
833 if 'pushkey' not in b2caps:
833 if 'pushkey' not in b2caps:
834 return
834 return
835 pushop.stepsdone.add('bookmarks')
835 pushop.stepsdone.add('bookmarks')
836 part2book = []
836 part2book = []
837 enc = pushkey.encode
837 enc = pushkey.encode
838
838
839 def handlefailure(pushop, exc):
839 def handlefailure(pushop, exc):
840 targetid = int(exc.partid)
840 targetid = int(exc.partid)
841 for partid, book, action in part2book:
841 for partid, book, action in part2book:
842 if partid == targetid:
842 if partid == targetid:
843 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
843 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
844 # we should not be called for part we did not generated
844 # we should not be called for part we did not generated
845 assert False
845 assert False
846
846
847 for book, old, new in pushop.outbookmarks:
847 for book, old, new in pushop.outbookmarks:
848 part = bundler.newpart('pushkey')
848 part = bundler.newpart('pushkey')
849 part.addparam('namespace', enc('bookmarks'))
849 part.addparam('namespace', enc('bookmarks'))
850 part.addparam('key', enc(book))
850 part.addparam('key', enc(book))
851 part.addparam('old', enc(old))
851 part.addparam('old', enc(old))
852 part.addparam('new', enc(new))
852 part.addparam('new', enc(new))
853 action = 'update'
853 action = 'update'
854 if not old:
854 if not old:
855 action = 'export'
855 action = 'export'
856 elif not new:
856 elif not new:
857 action = 'delete'
857 action = 'delete'
858 part2book.append((part.id, book, action))
858 part2book.append((part.id, book, action))
859 pushop.pkfailcb[part.id] = handlefailure
859 pushop.pkfailcb[part.id] = handlefailure
860
860
861 def handlereply(op):
861 def handlereply(op):
862 ui = pushop.ui
862 ui = pushop.ui
863 for partid, book, action in part2book:
863 for partid, book, action in part2book:
864 partrep = op.records.getreplies(partid)
864 partrep = op.records.getreplies(partid)
865 results = partrep['pushkey']
865 results = partrep['pushkey']
866 assert len(results) <= 1
866 assert len(results) <= 1
867 if not results:
867 if not results:
868 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
868 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
869 else:
869 else:
870 ret = int(results[0]['return'])
870 ret = int(results[0]['return'])
871 if ret:
871 if ret:
872 ui.status(bookmsgmap[action][0] % book)
872 ui.status(bookmsgmap[action][0] % book)
873 else:
873 else:
874 ui.warn(bookmsgmap[action][1] % book)
874 ui.warn(bookmsgmap[action][1] % book)
875 if pushop.bkresult is not None:
875 if pushop.bkresult is not None:
876 pushop.bkresult = 1
876 pushop.bkresult = 1
877 return handlereply
877 return handlereply
878
878
879
879
880 def _pushbundle2(pushop):
880 def _pushbundle2(pushop):
881 """push data to the remote using bundle2
881 """push data to the remote using bundle2
882
882
883 The only currently supported type of data is changegroup but this will
883 The only currently supported type of data is changegroup but this will
884 evolve in the future."""
884 evolve in the future."""
885 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
885 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
886 pushback = (pushop.trmanager
886 pushback = (pushop.trmanager
887 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
887 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
888
888
889 # create reply capability
889 # create reply capability
890 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
890 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
891 allowpushback=pushback))
891 allowpushback=pushback))
892 bundler.newpart('replycaps', data=capsblob)
892 bundler.newpart('replycaps', data=capsblob)
893 replyhandlers = []
893 replyhandlers = []
894 for partgenname in b2partsgenorder:
894 for partgenname in b2partsgenorder:
895 partgen = b2partsgenmapping[partgenname]
895 partgen = b2partsgenmapping[partgenname]
896 ret = partgen(pushop, bundler)
896 ret = partgen(pushop, bundler)
897 if callable(ret):
897 if callable(ret):
898 replyhandlers.append(ret)
898 replyhandlers.append(ret)
899 # do not push if nothing to push
899 # do not push if nothing to push
900 if bundler.nbparts <= 1:
900 if bundler.nbparts <= 1:
901 return
901 return
902 stream = util.chunkbuffer(bundler.getchunks())
902 stream = util.chunkbuffer(bundler.getchunks())
903 try:
903 try:
904 try:
904 try:
905 reply = pushop.remote.unbundle(
905 reply = pushop.remote.unbundle(
906 stream, ['force'], pushop.remote.url())
906 stream, ['force'], pushop.remote.url())
907 except error.BundleValueError as exc:
907 except error.BundleValueError as exc:
908 raise error.Abort(_('missing support for %s') % exc)
908 raise error.Abort(_('missing support for %s') % exc)
909 try:
909 try:
910 trgetter = None
910 trgetter = None
911 if pushback:
911 if pushback:
912 trgetter = pushop.trmanager.transaction
912 trgetter = pushop.trmanager.transaction
913 op = bundle2.processbundle(pushop.repo, reply, trgetter)
913 op = bundle2.processbundle(pushop.repo, reply, trgetter)
914 except error.BundleValueError as exc:
914 except error.BundleValueError as exc:
915 raise error.Abort(_('missing support for %s') % exc)
915 raise error.Abort(_('missing support for %s') % exc)
916 except bundle2.AbortFromPart as exc:
916 except bundle2.AbortFromPart as exc:
917 pushop.ui.status(_('remote: %s\n') % exc)
917 pushop.ui.status(_('remote: %s\n') % exc)
918 if exc.hint is not None:
918 if exc.hint is not None:
919 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
919 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
920 raise error.Abort(_('push failed on remote'))
920 raise error.Abort(_('push failed on remote'))
921 except error.PushkeyFailed as exc:
921 except error.PushkeyFailed as exc:
922 partid = int(exc.partid)
922 partid = int(exc.partid)
923 if partid not in pushop.pkfailcb:
923 if partid not in pushop.pkfailcb:
924 raise
924 raise
925 pushop.pkfailcb[partid](pushop, exc)
925 pushop.pkfailcb[partid](pushop, exc)
926 for rephand in replyhandlers:
926 for rephand in replyhandlers:
927 rephand(op)
927 rephand(op)
928
928
929 def _pushchangeset(pushop):
929 def _pushchangeset(pushop):
930 """Make the actual push of changeset bundle to remote repo"""
930 """Make the actual push of changeset bundle to remote repo"""
931 if 'changesets' in pushop.stepsdone:
931 if 'changesets' in pushop.stepsdone:
932 return
932 return
933 pushop.stepsdone.add('changesets')
933 pushop.stepsdone.add('changesets')
934 if not _pushcheckoutgoing(pushop):
934 if not _pushcheckoutgoing(pushop):
935 return
935 return
936 pushop.repo.prepushoutgoinghooks(pushop)
936 pushop.repo.prepushoutgoinghooks(pushop)
937 outgoing = pushop.outgoing
937 outgoing = pushop.outgoing
938 unbundle = pushop.remote.capable('unbundle')
938 unbundle = pushop.remote.capable('unbundle')
939 # TODO: get bundlecaps from remote
940 bundlecaps = None
939 # create a changegroup from local
941 # create a changegroup from local
940 if pushop.revs is None and not (outgoing.excluded
942 if pushop.revs is None and not (outgoing.excluded
941 or pushop.repo.changelog.filteredrevs):
943 or pushop.repo.changelog.filteredrevs):
942 # push everything,
944 # push everything,
943 # use the fast path, no race possible on push
945 # use the fast path, no race possible on push
944 bundler = changegroup.cg1packer(pushop.repo)
946 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
945 cg = changegroup.getsubset(pushop.repo,
947 cg = changegroup.getsubset(pushop.repo,
946 outgoing,
948 outgoing,
947 bundler,
949 bundler,
948 'push',
950 'push',
949 fastpath=True)
951 fastpath=True)
950 else:
952 else:
951 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing)
953 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
954 bundlecaps=bundlecaps)
952
955
953 # apply changegroup to remote
956 # apply changegroup to remote
954 if unbundle:
957 if unbundle:
955 # local repo finds heads on server, finds out what
958 # local repo finds heads on server, finds out what
956 # revs it must push. once revs transferred, if server
959 # revs it must push. once revs transferred, if server
957 # finds it has different heads (someone else won
960 # finds it has different heads (someone else won
958 # commit/push race), server aborts.
961 # commit/push race), server aborts.
959 if pushop.force:
962 if pushop.force:
960 remoteheads = ['force']
963 remoteheads = ['force']
961 else:
964 else:
962 remoteheads = pushop.remoteheads
965 remoteheads = pushop.remoteheads
963 # ssh: return remote's addchangegroup()
966 # ssh: return remote's addchangegroup()
964 # http: return remote's addchangegroup() or 0 for error
967 # http: return remote's addchangegroup() or 0 for error
965 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
968 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
966 pushop.repo.url())
969 pushop.repo.url())
967 else:
970 else:
968 # we return an integer indicating remote head count
971 # we return an integer indicating remote head count
969 # change
972 # change
970 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
973 pushop.cgresult = pushop.remote.addchangegroup(cg, 'push',
971 pushop.repo.url())
974 pushop.repo.url())
972
975
973 def _pushsyncphase(pushop):
976 def _pushsyncphase(pushop):
974 """synchronise phase information locally and remotely"""
977 """synchronise phase information locally and remotely"""
975 cheads = pushop.commonheads
978 cheads = pushop.commonheads
976 # even when we don't push, exchanging phase data is useful
979 # even when we don't push, exchanging phase data is useful
977 remotephases = pushop.remote.listkeys('phases')
980 remotephases = pushop.remote.listkeys('phases')
978 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
981 if (pushop.ui.configbool('ui', '_usedassubrepo', False)
979 and remotephases # server supports phases
982 and remotephases # server supports phases
980 and pushop.cgresult is None # nothing was pushed
983 and pushop.cgresult is None # nothing was pushed
981 and remotephases.get('publishing', False)):
984 and remotephases.get('publishing', False)):
982 # When:
985 # When:
983 # - this is a subrepo push
986 # - this is a subrepo push
984 # - and remote support phase
987 # - and remote support phase
985 # - and no changeset was pushed
988 # - and no changeset was pushed
986 # - and remote is publishing
989 # - and remote is publishing
987 # We may be in issue 3871 case!
990 # We may be in issue 3871 case!
988 # We drop the possible phase synchronisation done by
991 # We drop the possible phase synchronisation done by
989 # courtesy to publish changesets possibly locally draft
992 # courtesy to publish changesets possibly locally draft
990 # on the remote.
993 # on the remote.
991 remotephases = {'publishing': 'True'}
994 remotephases = {'publishing': 'True'}
992 if not remotephases: # old server or public only reply from non-publishing
995 if not remotephases: # old server or public only reply from non-publishing
993 _localphasemove(pushop, cheads)
996 _localphasemove(pushop, cheads)
994 # don't push any phase data as there is nothing to push
997 # don't push any phase data as there is nothing to push
995 else:
998 else:
996 ana = phases.analyzeremotephases(pushop.repo, cheads,
999 ana = phases.analyzeremotephases(pushop.repo, cheads,
997 remotephases)
1000 remotephases)
998 pheads, droots = ana
1001 pheads, droots = ana
999 ### Apply remote phase on local
1002 ### Apply remote phase on local
1000 if remotephases.get('publishing', False):
1003 if remotephases.get('publishing', False):
1001 _localphasemove(pushop, cheads)
1004 _localphasemove(pushop, cheads)
1002 else: # publish = False
1005 else: # publish = False
1003 _localphasemove(pushop, pheads)
1006 _localphasemove(pushop, pheads)
1004 _localphasemove(pushop, cheads, phases.draft)
1007 _localphasemove(pushop, cheads, phases.draft)
1005 ### Apply local phase on remote
1008 ### Apply local phase on remote
1006
1009
1007 if pushop.cgresult:
1010 if pushop.cgresult:
1008 if 'phases' in pushop.stepsdone:
1011 if 'phases' in pushop.stepsdone:
1009 # phases already pushed though bundle2
1012 # phases already pushed though bundle2
1010 return
1013 return
1011 outdated = pushop.outdatedphases
1014 outdated = pushop.outdatedphases
1012 else:
1015 else:
1013 outdated = pushop.fallbackoutdatedphases
1016 outdated = pushop.fallbackoutdatedphases
1014
1017
1015 pushop.stepsdone.add('phases')
1018 pushop.stepsdone.add('phases')
1016
1019
1017 # filter heads already turned public by the push
1020 # filter heads already turned public by the push
1018 outdated = [c for c in outdated if c.node() not in pheads]
1021 outdated = [c for c in outdated if c.node() not in pheads]
1019 # fallback to independent pushkey command
1022 # fallback to independent pushkey command
1020 for newremotehead in outdated:
1023 for newremotehead in outdated:
1021 r = pushop.remote.pushkey('phases',
1024 r = pushop.remote.pushkey('phases',
1022 newremotehead.hex(),
1025 newremotehead.hex(),
1023 str(phases.draft),
1026 str(phases.draft),
1024 str(phases.public))
1027 str(phases.public))
1025 if not r:
1028 if not r:
1026 pushop.ui.warn(_('updating %s to public failed!\n')
1029 pushop.ui.warn(_('updating %s to public failed!\n')
1027 % newremotehead)
1030 % newremotehead)
1028
1031
1029 def _localphasemove(pushop, nodes, phase=phases.public):
1032 def _localphasemove(pushop, nodes, phase=phases.public):
1030 """move <nodes> to <phase> in the local source repo"""
1033 """move <nodes> to <phase> in the local source repo"""
1031 if pushop.trmanager:
1034 if pushop.trmanager:
1032 phases.advanceboundary(pushop.repo,
1035 phases.advanceboundary(pushop.repo,
1033 pushop.trmanager.transaction(),
1036 pushop.trmanager.transaction(),
1034 phase,
1037 phase,
1035 nodes)
1038 nodes)
1036 else:
1039 else:
1037 # repo is not locked, do not change any phases!
1040 # repo is not locked, do not change any phases!
1038 # Informs the user that phases should have been moved when
1041 # Informs the user that phases should have been moved when
1039 # applicable.
1042 # applicable.
1040 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1043 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1041 phasestr = phases.phasenames[phase]
1044 phasestr = phases.phasenames[phase]
1042 if actualmoves:
1045 if actualmoves:
1043 pushop.ui.status(_('cannot lock source repo, skipping '
1046 pushop.ui.status(_('cannot lock source repo, skipping '
1044 'local %s phase update\n') % phasestr)
1047 'local %s phase update\n') % phasestr)
1045
1048
1046 def _pushobsolete(pushop):
1049 def _pushobsolete(pushop):
1047 """utility function to push obsolete markers to a remote"""
1050 """utility function to push obsolete markers to a remote"""
1048 if 'obsmarkers' in pushop.stepsdone:
1051 if 'obsmarkers' in pushop.stepsdone:
1049 return
1052 return
1050 repo = pushop.repo
1053 repo = pushop.repo
1051 remote = pushop.remote
1054 remote = pushop.remote
1052 pushop.stepsdone.add('obsmarkers')
1055 pushop.stepsdone.add('obsmarkers')
1053 if pushop.outobsmarkers:
1056 if pushop.outobsmarkers:
1054 pushop.ui.debug('try to push obsolete markers to remote\n')
1057 pushop.ui.debug('try to push obsolete markers to remote\n')
1055 rslts = []
1058 rslts = []
1056 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1059 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1057 for key in sorted(remotedata, reverse=True):
1060 for key in sorted(remotedata, reverse=True):
1058 # reverse sort to ensure we end with dump0
1061 # reverse sort to ensure we end with dump0
1059 data = remotedata[key]
1062 data = remotedata[key]
1060 rslts.append(remote.pushkey('obsolete', key, '', data))
1063 rslts.append(remote.pushkey('obsolete', key, '', data))
1061 if [r for r in rslts if not r]:
1064 if [r for r in rslts if not r]:
1062 msg = _('failed to push some obsolete markers!\n')
1065 msg = _('failed to push some obsolete markers!\n')
1063 repo.ui.warn(msg)
1066 repo.ui.warn(msg)
1064
1067
1065 def _pushbookmark(pushop):
1068 def _pushbookmark(pushop):
1066 """Update bookmark position on remote"""
1069 """Update bookmark position on remote"""
1067 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1070 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1068 return
1071 return
1069 pushop.stepsdone.add('bookmarks')
1072 pushop.stepsdone.add('bookmarks')
1070 ui = pushop.ui
1073 ui = pushop.ui
1071 remote = pushop.remote
1074 remote = pushop.remote
1072
1075
1073 for b, old, new in pushop.outbookmarks:
1076 for b, old, new in pushop.outbookmarks:
1074 action = 'update'
1077 action = 'update'
1075 if not old:
1078 if not old:
1076 action = 'export'
1079 action = 'export'
1077 elif not new:
1080 elif not new:
1078 action = 'delete'
1081 action = 'delete'
1079 if remote.pushkey('bookmarks', b, old, new):
1082 if remote.pushkey('bookmarks', b, old, new):
1080 ui.status(bookmsgmap[action][0] % b)
1083 ui.status(bookmsgmap[action][0] % b)
1081 else:
1084 else:
1082 ui.warn(bookmsgmap[action][1] % b)
1085 ui.warn(bookmsgmap[action][1] % b)
1083 # discovery can have set the value form invalid entry
1086 # discovery can have set the value form invalid entry
1084 if pushop.bkresult is not None:
1087 if pushop.bkresult is not None:
1085 pushop.bkresult = 1
1088 pushop.bkresult = 1
1086
1089
1087 class pulloperation(object):
1090 class pulloperation(object):
1088 """A object that represent a single pull operation
1091 """A object that represent a single pull operation
1089
1092
1090 It purpose is to carry pull related state and very common operation.
1093 It purpose is to carry pull related state and very common operation.
1091
1094
1092 A new should be created at the beginning of each pull and discarded
1095 A new should be created at the beginning of each pull and discarded
1093 afterward.
1096 afterward.
1094 """
1097 """
1095
1098
1096 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1099 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1097 remotebookmarks=None, streamclonerequested=None):
1100 remotebookmarks=None, streamclonerequested=None):
1098 # repo we pull into
1101 # repo we pull into
1099 self.repo = repo
1102 self.repo = repo
1100 # repo we pull from
1103 # repo we pull from
1101 self.remote = remote
1104 self.remote = remote
1102 # revision we try to pull (None is "all")
1105 # revision we try to pull (None is "all")
1103 self.heads = heads
1106 self.heads = heads
1104 # bookmark pulled explicitly
1107 # bookmark pulled explicitly
1105 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1108 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1106 for bookmark in bookmarks]
1109 for bookmark in bookmarks]
1107 # do we force pull?
1110 # do we force pull?
1108 self.force = force
1111 self.force = force
1109 # whether a streaming clone was requested
1112 # whether a streaming clone was requested
1110 self.streamclonerequested = streamclonerequested
1113 self.streamclonerequested = streamclonerequested
1111 # transaction manager
1114 # transaction manager
1112 self.trmanager = None
1115 self.trmanager = None
1113 # set of common changeset between local and remote before pull
1116 # set of common changeset between local and remote before pull
1114 self.common = None
1117 self.common = None
1115 # set of pulled head
1118 # set of pulled head
1116 self.rheads = None
1119 self.rheads = None
1117 # list of missing changeset to fetch remotely
1120 # list of missing changeset to fetch remotely
1118 self.fetch = None
1121 self.fetch = None
1119 # remote bookmarks data
1122 # remote bookmarks data
1120 self.remotebookmarks = remotebookmarks
1123 self.remotebookmarks = remotebookmarks
1121 # result of changegroup pulling (used as return code by pull)
1124 # result of changegroup pulling (used as return code by pull)
1122 self.cgresult = None
1125 self.cgresult = None
1123 # list of step already done
1126 # list of step already done
1124 self.stepsdone = set()
1127 self.stepsdone = set()
1125 # Whether we attempted a clone from pre-generated bundles.
1128 # Whether we attempted a clone from pre-generated bundles.
1126 self.clonebundleattempted = False
1129 self.clonebundleattempted = False
1127
1130
1128 @util.propertycache
1131 @util.propertycache
1129 def pulledsubset(self):
1132 def pulledsubset(self):
1130 """heads of the set of changeset target by the pull"""
1133 """heads of the set of changeset target by the pull"""
1131 # compute target subset
1134 # compute target subset
1132 if self.heads is None:
1135 if self.heads is None:
1133 # We pulled every thing possible
1136 # We pulled every thing possible
1134 # sync on everything common
1137 # sync on everything common
1135 c = set(self.common)
1138 c = set(self.common)
1136 ret = list(self.common)
1139 ret = list(self.common)
1137 for n in self.rheads:
1140 for n in self.rheads:
1138 if n not in c:
1141 if n not in c:
1139 ret.append(n)
1142 ret.append(n)
1140 return ret
1143 return ret
1141 else:
1144 else:
1142 # We pulled a specific subset
1145 # We pulled a specific subset
1143 # sync on this subset
1146 # sync on this subset
1144 return self.heads
1147 return self.heads
1145
1148
1146 @util.propertycache
1149 @util.propertycache
1147 def canusebundle2(self):
1150 def canusebundle2(self):
1148 return not _forcebundle1(self)
1151 return not _forcebundle1(self)
1149
1152
1150 @util.propertycache
1153 @util.propertycache
1151 def remotebundle2caps(self):
1154 def remotebundle2caps(self):
1152 return bundle2.bundle2caps(self.remote)
1155 return bundle2.bundle2caps(self.remote)
1153
1156
1154 def gettransaction(self):
1157 def gettransaction(self):
1155 # deprecated; talk to trmanager directly
1158 # deprecated; talk to trmanager directly
1156 return self.trmanager.transaction()
1159 return self.trmanager.transaction()
1157
1160
1158 class transactionmanager(object):
1161 class transactionmanager(object):
1159 """An object to manage the life cycle of a transaction
1162 """An object to manage the life cycle of a transaction
1160
1163
1161 It creates the transaction on demand and calls the appropriate hooks when
1164 It creates the transaction on demand and calls the appropriate hooks when
1162 closing the transaction."""
1165 closing the transaction."""
1163 def __init__(self, repo, source, url):
1166 def __init__(self, repo, source, url):
1164 self.repo = repo
1167 self.repo = repo
1165 self.source = source
1168 self.source = source
1166 self.url = url
1169 self.url = url
1167 self._tr = None
1170 self._tr = None
1168
1171
1169 def transaction(self):
1172 def transaction(self):
1170 """Return an open transaction object, constructing if necessary"""
1173 """Return an open transaction object, constructing if necessary"""
1171 if not self._tr:
1174 if not self._tr:
1172 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1175 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1173 self._tr = self.repo.transaction(trname)
1176 self._tr = self.repo.transaction(trname)
1174 self._tr.hookargs['source'] = self.source
1177 self._tr.hookargs['source'] = self.source
1175 self._tr.hookargs['url'] = self.url
1178 self._tr.hookargs['url'] = self.url
1176 return self._tr
1179 return self._tr
1177
1180
1178 def close(self):
1181 def close(self):
1179 """close transaction if created"""
1182 """close transaction if created"""
1180 if self._tr is not None:
1183 if self._tr is not None:
1181 self._tr.close()
1184 self._tr.close()
1182
1185
1183 def release(self):
1186 def release(self):
1184 """release transaction if created"""
1187 """release transaction if created"""
1185 if self._tr is not None:
1188 if self._tr is not None:
1186 self._tr.release()
1189 self._tr.release()
1187
1190
1188 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1191 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1189 streamclonerequested=None):
1192 streamclonerequested=None):
1190 """Fetch repository data from a remote.
1193 """Fetch repository data from a remote.
1191
1194
1192 This is the main function used to retrieve data from a remote repository.
1195 This is the main function used to retrieve data from a remote repository.
1193
1196
1194 ``repo`` is the local repository to clone into.
1197 ``repo`` is the local repository to clone into.
1195 ``remote`` is a peer instance.
1198 ``remote`` is a peer instance.
1196 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1199 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1197 default) means to pull everything from the remote.
1200 default) means to pull everything from the remote.
1198 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1201 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1199 default, all remote bookmarks are pulled.
1202 default, all remote bookmarks are pulled.
1200 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1203 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1201 initialization.
1204 initialization.
1202 ``streamclonerequested`` is a boolean indicating whether a "streaming
1205 ``streamclonerequested`` is a boolean indicating whether a "streaming
1203 clone" is requested. A "streaming clone" is essentially a raw file copy
1206 clone" is requested. A "streaming clone" is essentially a raw file copy
1204 of revlogs from the server. This only works when the local repository is
1207 of revlogs from the server. This only works when the local repository is
1205 empty. The default value of ``None`` means to respect the server
1208 empty. The default value of ``None`` means to respect the server
1206 configuration for preferring stream clones.
1209 configuration for preferring stream clones.
1207
1210
1208 Returns the ``pulloperation`` created for this pull.
1211 Returns the ``pulloperation`` created for this pull.
1209 """
1212 """
1210 if opargs is None:
1213 if opargs is None:
1211 opargs = {}
1214 opargs = {}
1212 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1215 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1213 streamclonerequested=streamclonerequested, **opargs)
1216 streamclonerequested=streamclonerequested, **opargs)
1214 if pullop.remote.local():
1217 if pullop.remote.local():
1215 missing = set(pullop.remote.requirements) - pullop.repo.supported
1218 missing = set(pullop.remote.requirements) - pullop.repo.supported
1216 if missing:
1219 if missing:
1217 msg = _("required features are not"
1220 msg = _("required features are not"
1218 " supported in the destination:"
1221 " supported in the destination:"
1219 " %s") % (', '.join(sorted(missing)))
1222 " %s") % (', '.join(sorted(missing)))
1220 raise error.Abort(msg)
1223 raise error.Abort(msg)
1221
1224
1222 wlock = lock = None
1225 wlock = lock = None
1223 try:
1226 try:
1224 wlock = pullop.repo.wlock()
1227 wlock = pullop.repo.wlock()
1225 lock = pullop.repo.lock()
1228 lock = pullop.repo.lock()
1226 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1229 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1227 streamclone.maybeperformlegacystreamclone(pullop)
1230 streamclone.maybeperformlegacystreamclone(pullop)
1228 # This should ideally be in _pullbundle2(). However, it needs to run
1231 # This should ideally be in _pullbundle2(). However, it needs to run
1229 # before discovery to avoid extra work.
1232 # before discovery to avoid extra work.
1230 _maybeapplyclonebundle(pullop)
1233 _maybeapplyclonebundle(pullop)
1231 _pulldiscovery(pullop)
1234 _pulldiscovery(pullop)
1232 if pullop.canusebundle2:
1235 if pullop.canusebundle2:
1233 _pullbundle2(pullop)
1236 _pullbundle2(pullop)
1234 _pullchangeset(pullop)
1237 _pullchangeset(pullop)
1235 _pullphase(pullop)
1238 _pullphase(pullop)
1236 _pullbookmarks(pullop)
1239 _pullbookmarks(pullop)
1237 _pullobsolete(pullop)
1240 _pullobsolete(pullop)
1238 pullop.trmanager.close()
1241 pullop.trmanager.close()
1239 finally:
1242 finally:
1240 lockmod.release(pullop.trmanager, lock, wlock)
1243 lockmod.release(pullop.trmanager, lock, wlock)
1241
1244
1242 return pullop
1245 return pullop
1243
1246
1244 # list of steps to perform discovery before pull
1247 # list of steps to perform discovery before pull
1245 pulldiscoveryorder = []
1248 pulldiscoveryorder = []
1246
1249
1247 # Mapping between step name and function
1250 # Mapping between step name and function
1248 #
1251 #
1249 # This exists to help extensions wrap steps if necessary
1252 # This exists to help extensions wrap steps if necessary
1250 pulldiscoverymapping = {}
1253 pulldiscoverymapping = {}
1251
1254
1252 def pulldiscovery(stepname):
1255 def pulldiscovery(stepname):
1253 """decorator for function performing discovery before pull
1256 """decorator for function performing discovery before pull
1254
1257
1255 The function is added to the step -> function mapping and appended to the
1258 The function is added to the step -> function mapping and appended to the
1256 list of steps. Beware that decorated function will be added in order (this
1259 list of steps. Beware that decorated function will be added in order (this
1257 may matter).
1260 may matter).
1258
1261
1259 You can only use this decorator for a new step, if you want to wrap a step
1262 You can only use this decorator for a new step, if you want to wrap a step
1260 from an extension, change the pulldiscovery dictionary directly."""
1263 from an extension, change the pulldiscovery dictionary directly."""
1261 def dec(func):
1264 def dec(func):
1262 assert stepname not in pulldiscoverymapping
1265 assert stepname not in pulldiscoverymapping
1263 pulldiscoverymapping[stepname] = func
1266 pulldiscoverymapping[stepname] = func
1264 pulldiscoveryorder.append(stepname)
1267 pulldiscoveryorder.append(stepname)
1265 return func
1268 return func
1266 return dec
1269 return dec
1267
1270
1268 def _pulldiscovery(pullop):
1271 def _pulldiscovery(pullop):
1269 """Run all discovery steps"""
1272 """Run all discovery steps"""
1270 for stepname in pulldiscoveryorder:
1273 for stepname in pulldiscoveryorder:
1271 step = pulldiscoverymapping[stepname]
1274 step = pulldiscoverymapping[stepname]
1272 step(pullop)
1275 step(pullop)
1273
1276
1274 @pulldiscovery('b1:bookmarks')
1277 @pulldiscovery('b1:bookmarks')
1275 def _pullbookmarkbundle1(pullop):
1278 def _pullbookmarkbundle1(pullop):
1276 """fetch bookmark data in bundle1 case
1279 """fetch bookmark data in bundle1 case
1277
1280
1278 If not using bundle2, we have to fetch bookmarks before changeset
1281 If not using bundle2, we have to fetch bookmarks before changeset
1279 discovery to reduce the chance and impact of race conditions."""
1282 discovery to reduce the chance and impact of race conditions."""
1280 if pullop.remotebookmarks is not None:
1283 if pullop.remotebookmarks is not None:
1281 return
1284 return
1282 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1285 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1283 # all known bundle2 servers now support listkeys, but lets be nice with
1286 # all known bundle2 servers now support listkeys, but lets be nice with
1284 # new implementation.
1287 # new implementation.
1285 return
1288 return
1286 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1289 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1287
1290
1288
1291
1289 @pulldiscovery('changegroup')
1292 @pulldiscovery('changegroup')
1290 def _pulldiscoverychangegroup(pullop):
1293 def _pulldiscoverychangegroup(pullop):
1291 """discovery phase for the pull
1294 """discovery phase for the pull
1292
1295
1293 Current handle changeset discovery only, will change handle all discovery
1296 Current handle changeset discovery only, will change handle all discovery
1294 at some point."""
1297 at some point."""
1295 tmp = discovery.findcommonincoming(pullop.repo,
1298 tmp = discovery.findcommonincoming(pullop.repo,
1296 pullop.remote,
1299 pullop.remote,
1297 heads=pullop.heads,
1300 heads=pullop.heads,
1298 force=pullop.force)
1301 force=pullop.force)
1299 common, fetch, rheads = tmp
1302 common, fetch, rheads = tmp
1300 nm = pullop.repo.unfiltered().changelog.nodemap
1303 nm = pullop.repo.unfiltered().changelog.nodemap
1301 if fetch and rheads:
1304 if fetch and rheads:
1302 # If a remote heads in filtered locally, lets drop it from the unknown
1305 # If a remote heads in filtered locally, lets drop it from the unknown
1303 # remote heads and put in back in common.
1306 # remote heads and put in back in common.
1304 #
1307 #
1305 # This is a hackish solution to catch most of "common but locally
1308 # This is a hackish solution to catch most of "common but locally
1306 # hidden situation". We do not performs discovery on unfiltered
1309 # hidden situation". We do not performs discovery on unfiltered
1307 # repository because it end up doing a pathological amount of round
1310 # repository because it end up doing a pathological amount of round
1308 # trip for w huge amount of changeset we do not care about.
1311 # trip for w huge amount of changeset we do not care about.
1309 #
1312 #
1310 # If a set of such "common but filtered" changeset exist on the server
1313 # If a set of such "common but filtered" changeset exist on the server
1311 # but are not including a remote heads, we'll not be able to detect it,
1314 # but are not including a remote heads, we'll not be able to detect it,
1312 scommon = set(common)
1315 scommon = set(common)
1313 filteredrheads = []
1316 filteredrheads = []
1314 for n in rheads:
1317 for n in rheads:
1315 if n in nm:
1318 if n in nm:
1316 if n not in scommon:
1319 if n not in scommon:
1317 common.append(n)
1320 common.append(n)
1318 else:
1321 else:
1319 filteredrheads.append(n)
1322 filteredrheads.append(n)
1320 if not filteredrheads:
1323 if not filteredrheads:
1321 fetch = []
1324 fetch = []
1322 rheads = filteredrheads
1325 rheads = filteredrheads
1323 pullop.common = common
1326 pullop.common = common
1324 pullop.fetch = fetch
1327 pullop.fetch = fetch
1325 pullop.rheads = rheads
1328 pullop.rheads = rheads
1326
1329
1327 def _pullbundle2(pullop):
1330 def _pullbundle2(pullop):
1328 """pull data using bundle2
1331 """pull data using bundle2
1329
1332
1330 For now, the only supported data are changegroup."""
1333 For now, the only supported data are changegroup."""
1331 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1334 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1332
1335
1333 # At the moment we don't do stream clones over bundle2. If that is
1336 # At the moment we don't do stream clones over bundle2. If that is
1334 # implemented then here's where the check for that will go.
1337 # implemented then here's where the check for that will go.
1335 streaming = False
1338 streaming = False
1336
1339
1337 # pulling changegroup
1340 # pulling changegroup
1338 pullop.stepsdone.add('changegroup')
1341 pullop.stepsdone.add('changegroup')
1339
1342
1340 kwargs['common'] = pullop.common
1343 kwargs['common'] = pullop.common
1341 kwargs['heads'] = pullop.heads or pullop.rheads
1344 kwargs['heads'] = pullop.heads or pullop.rheads
1342 kwargs['cg'] = pullop.fetch
1345 kwargs['cg'] = pullop.fetch
1343 if 'listkeys' in pullop.remotebundle2caps:
1346 if 'listkeys' in pullop.remotebundle2caps:
1344 kwargs['listkeys'] = ['phases']
1347 kwargs['listkeys'] = ['phases']
1345 if pullop.remotebookmarks is None:
1348 if pullop.remotebookmarks is None:
1346 # make sure to always includes bookmark data when migrating
1349 # make sure to always includes bookmark data when migrating
1347 # `hg incoming --bundle` to using this function.
1350 # `hg incoming --bundle` to using this function.
1348 kwargs['listkeys'].append('bookmarks')
1351 kwargs['listkeys'].append('bookmarks')
1349
1352
1350 # If this is a full pull / clone and the server supports the clone bundles
1353 # If this is a full pull / clone and the server supports the clone bundles
1351 # feature, tell the server whether we attempted a clone bundle. The
1354 # feature, tell the server whether we attempted a clone bundle. The
1352 # presence of this flag indicates the client supports clone bundles. This
1355 # presence of this flag indicates the client supports clone bundles. This
1353 # will enable the server to treat clients that support clone bundles
1356 # will enable the server to treat clients that support clone bundles
1354 # differently from those that don't.
1357 # differently from those that don't.
1355 if (pullop.remote.capable('clonebundles')
1358 if (pullop.remote.capable('clonebundles')
1356 and pullop.heads is None and list(pullop.common) == [nullid]):
1359 and pullop.heads is None and list(pullop.common) == [nullid]):
1357 kwargs['cbattempted'] = pullop.clonebundleattempted
1360 kwargs['cbattempted'] = pullop.clonebundleattempted
1358
1361
1359 if streaming:
1362 if streaming:
1360 pullop.repo.ui.status(_('streaming all changes\n'))
1363 pullop.repo.ui.status(_('streaming all changes\n'))
1361 elif not pullop.fetch:
1364 elif not pullop.fetch:
1362 pullop.repo.ui.status(_("no changes found\n"))
1365 pullop.repo.ui.status(_("no changes found\n"))
1363 pullop.cgresult = 0
1366 pullop.cgresult = 0
1364 else:
1367 else:
1365 if pullop.heads is None and list(pullop.common) == [nullid]:
1368 if pullop.heads is None and list(pullop.common) == [nullid]:
1366 pullop.repo.ui.status(_("requesting all changes\n"))
1369 pullop.repo.ui.status(_("requesting all changes\n"))
1367 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1370 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1368 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1371 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1369 if obsolete.commonversion(remoteversions) is not None:
1372 if obsolete.commonversion(remoteversions) is not None:
1370 kwargs['obsmarkers'] = True
1373 kwargs['obsmarkers'] = True
1371 pullop.stepsdone.add('obsmarkers')
1374 pullop.stepsdone.add('obsmarkers')
1372 _pullbundle2extraprepare(pullop, kwargs)
1375 _pullbundle2extraprepare(pullop, kwargs)
1373 bundle = pullop.remote.getbundle('pull', **kwargs)
1376 bundle = pullop.remote.getbundle('pull', **kwargs)
1374 try:
1377 try:
1375 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1378 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1376 except bundle2.AbortFromPart as exc:
1379 except bundle2.AbortFromPart as exc:
1377 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1380 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1378 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1381 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1379 except error.BundleValueError as exc:
1382 except error.BundleValueError as exc:
1380 raise error.Abort(_('missing support for %s') % exc)
1383 raise error.Abort(_('missing support for %s') % exc)
1381
1384
1382 if pullop.fetch:
1385 if pullop.fetch:
1383 results = [cg['return'] for cg in op.records['changegroup']]
1386 results = [cg['return'] for cg in op.records['changegroup']]
1384 pullop.cgresult = changegroup.combineresults(results)
1387 pullop.cgresult = changegroup.combineresults(results)
1385
1388
1386 # processing phases change
1389 # processing phases change
1387 for namespace, value in op.records['listkeys']:
1390 for namespace, value in op.records['listkeys']:
1388 if namespace == 'phases':
1391 if namespace == 'phases':
1389 _pullapplyphases(pullop, value)
1392 _pullapplyphases(pullop, value)
1390
1393
1391 # processing bookmark update
1394 # processing bookmark update
1392 for namespace, value in op.records['listkeys']:
1395 for namespace, value in op.records['listkeys']:
1393 if namespace == 'bookmarks':
1396 if namespace == 'bookmarks':
1394 pullop.remotebookmarks = value
1397 pullop.remotebookmarks = value
1395
1398
1396 # bookmark data were either already there or pulled in the bundle
1399 # bookmark data were either already there or pulled in the bundle
1397 if pullop.remotebookmarks is not None:
1400 if pullop.remotebookmarks is not None:
1398 _pullbookmarks(pullop)
1401 _pullbookmarks(pullop)
1399
1402
1400 def _pullbundle2extraprepare(pullop, kwargs):
1403 def _pullbundle2extraprepare(pullop, kwargs):
1401 """hook function so that extensions can extend the getbundle call"""
1404 """hook function so that extensions can extend the getbundle call"""
1402 pass
1405 pass
1403
1406
1404 def _pullchangeset(pullop):
1407 def _pullchangeset(pullop):
1405 """pull changeset from unbundle into the local repo"""
1408 """pull changeset from unbundle into the local repo"""
1406 # We delay the open of the transaction as late as possible so we
1409 # We delay the open of the transaction as late as possible so we
1407 # don't open transaction for nothing or you break future useful
1410 # don't open transaction for nothing or you break future useful
1408 # rollback call
1411 # rollback call
1409 if 'changegroup' in pullop.stepsdone:
1412 if 'changegroup' in pullop.stepsdone:
1410 return
1413 return
1411 pullop.stepsdone.add('changegroup')
1414 pullop.stepsdone.add('changegroup')
1412 if not pullop.fetch:
1415 if not pullop.fetch:
1413 pullop.repo.ui.status(_("no changes found\n"))
1416 pullop.repo.ui.status(_("no changes found\n"))
1414 pullop.cgresult = 0
1417 pullop.cgresult = 0
1415 return
1418 return
1416 pullop.gettransaction()
1419 pullop.gettransaction()
1417 if pullop.heads is None and list(pullop.common) == [nullid]:
1420 if pullop.heads is None and list(pullop.common) == [nullid]:
1418 pullop.repo.ui.status(_("requesting all changes\n"))
1421 pullop.repo.ui.status(_("requesting all changes\n"))
1419 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1422 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1420 # issue1320, avoid a race if remote changed after discovery
1423 # issue1320, avoid a race if remote changed after discovery
1421 pullop.heads = pullop.rheads
1424 pullop.heads = pullop.rheads
1422
1425
1423 if pullop.remote.capable('getbundle'):
1426 if pullop.remote.capable('getbundle'):
1424 # TODO: get bundlecaps from remote
1427 # TODO: get bundlecaps from remote
1425 cg = pullop.remote.getbundle('pull', common=pullop.common,
1428 cg = pullop.remote.getbundle('pull', common=pullop.common,
1426 heads=pullop.heads or pullop.rheads)
1429 heads=pullop.heads or pullop.rheads)
1427 elif pullop.heads is None:
1430 elif pullop.heads is None:
1428 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1431 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1429 elif not pullop.remote.capable('changegroupsubset'):
1432 elif not pullop.remote.capable('changegroupsubset'):
1430 raise error.Abort(_("partial pull cannot be done because "
1433 raise error.Abort(_("partial pull cannot be done because "
1431 "other repository doesn't support "
1434 "other repository doesn't support "
1432 "changegroupsubset."))
1435 "changegroupsubset."))
1433 else:
1436 else:
1434 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1437 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1435 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1438 pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url())
1436
1439
1437 def _pullphase(pullop):
1440 def _pullphase(pullop):
1438 # Get remote phases data from remote
1441 # Get remote phases data from remote
1439 if 'phases' in pullop.stepsdone:
1442 if 'phases' in pullop.stepsdone:
1440 return
1443 return
1441 remotephases = pullop.remote.listkeys('phases')
1444 remotephases = pullop.remote.listkeys('phases')
1442 _pullapplyphases(pullop, remotephases)
1445 _pullapplyphases(pullop, remotephases)
1443
1446
1444 def _pullapplyphases(pullop, remotephases):
1447 def _pullapplyphases(pullop, remotephases):
1445 """apply phase movement from observed remote state"""
1448 """apply phase movement from observed remote state"""
1446 if 'phases' in pullop.stepsdone:
1449 if 'phases' in pullop.stepsdone:
1447 return
1450 return
1448 pullop.stepsdone.add('phases')
1451 pullop.stepsdone.add('phases')
1449 publishing = bool(remotephases.get('publishing', False))
1452 publishing = bool(remotephases.get('publishing', False))
1450 if remotephases and not publishing:
1453 if remotephases and not publishing:
1451 # remote is new and non-publishing
1454 # remote is new and non-publishing
1452 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1455 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1453 pullop.pulledsubset,
1456 pullop.pulledsubset,
1454 remotephases)
1457 remotephases)
1455 dheads = pullop.pulledsubset
1458 dheads = pullop.pulledsubset
1456 else:
1459 else:
1457 # Remote is old or publishing all common changesets
1460 # Remote is old or publishing all common changesets
1458 # should be seen as public
1461 # should be seen as public
1459 pheads = pullop.pulledsubset
1462 pheads = pullop.pulledsubset
1460 dheads = []
1463 dheads = []
1461 unfi = pullop.repo.unfiltered()
1464 unfi = pullop.repo.unfiltered()
1462 phase = unfi._phasecache.phase
1465 phase = unfi._phasecache.phase
1463 rev = unfi.changelog.nodemap.get
1466 rev = unfi.changelog.nodemap.get
1464 public = phases.public
1467 public = phases.public
1465 draft = phases.draft
1468 draft = phases.draft
1466
1469
1467 # exclude changesets already public locally and update the others
1470 # exclude changesets already public locally and update the others
1468 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1471 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1469 if pheads:
1472 if pheads:
1470 tr = pullop.gettransaction()
1473 tr = pullop.gettransaction()
1471 phases.advanceboundary(pullop.repo, tr, public, pheads)
1474 phases.advanceboundary(pullop.repo, tr, public, pheads)
1472
1475
1473 # exclude changesets already draft locally and update the others
1476 # exclude changesets already draft locally and update the others
1474 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1477 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1475 if dheads:
1478 if dheads:
1476 tr = pullop.gettransaction()
1479 tr = pullop.gettransaction()
1477 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1480 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1478
1481
1479 def _pullbookmarks(pullop):
1482 def _pullbookmarks(pullop):
1480 """process the remote bookmark information to update the local one"""
1483 """process the remote bookmark information to update the local one"""
1481 if 'bookmarks' in pullop.stepsdone:
1484 if 'bookmarks' in pullop.stepsdone:
1482 return
1485 return
1483 pullop.stepsdone.add('bookmarks')
1486 pullop.stepsdone.add('bookmarks')
1484 repo = pullop.repo
1487 repo = pullop.repo
1485 remotebookmarks = pullop.remotebookmarks
1488 remotebookmarks = pullop.remotebookmarks
1486 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1489 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1487 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1490 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1488 pullop.remote.url(),
1491 pullop.remote.url(),
1489 pullop.gettransaction,
1492 pullop.gettransaction,
1490 explicit=pullop.explicitbookmarks)
1493 explicit=pullop.explicitbookmarks)
1491
1494
1492 def _pullobsolete(pullop):
1495 def _pullobsolete(pullop):
1493 """utility function to pull obsolete markers from a remote
1496 """utility function to pull obsolete markers from a remote
1494
1497
1495 The `gettransaction` is function that return the pull transaction, creating
1498 The `gettransaction` is function that return the pull transaction, creating
1496 one if necessary. We return the transaction to inform the calling code that
1499 one if necessary. We return the transaction to inform the calling code that
1497 a new transaction have been created (when applicable).
1500 a new transaction have been created (when applicable).
1498
1501
1499 Exists mostly to allow overriding for experimentation purpose"""
1502 Exists mostly to allow overriding for experimentation purpose"""
1500 if 'obsmarkers' in pullop.stepsdone:
1503 if 'obsmarkers' in pullop.stepsdone:
1501 return
1504 return
1502 pullop.stepsdone.add('obsmarkers')
1505 pullop.stepsdone.add('obsmarkers')
1503 tr = None
1506 tr = None
1504 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1507 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1505 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1508 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1506 remoteobs = pullop.remote.listkeys('obsolete')
1509 remoteobs = pullop.remote.listkeys('obsolete')
1507 if 'dump0' in remoteobs:
1510 if 'dump0' in remoteobs:
1508 tr = pullop.gettransaction()
1511 tr = pullop.gettransaction()
1509 markers = []
1512 markers = []
1510 for key in sorted(remoteobs, reverse=True):
1513 for key in sorted(remoteobs, reverse=True):
1511 if key.startswith('dump'):
1514 if key.startswith('dump'):
1512 data = util.b85decode(remoteobs[key])
1515 data = util.b85decode(remoteobs[key])
1513 version, newmarks = obsolete._readmarkers(data)
1516 version, newmarks = obsolete._readmarkers(data)
1514 markers += newmarks
1517 markers += newmarks
1515 if markers:
1518 if markers:
1516 pullop.repo.obsstore.add(tr, markers)
1519 pullop.repo.obsstore.add(tr, markers)
1517 pullop.repo.invalidatevolatilesets()
1520 pullop.repo.invalidatevolatilesets()
1518 return tr
1521 return tr
1519
1522
1520 def caps20to10(repo):
1523 def caps20to10(repo):
1521 """return a set with appropriate options to use bundle20 during getbundle"""
1524 """return a set with appropriate options to use bundle20 during getbundle"""
1522 caps = set(['HG20'])
1525 caps = set(['HG20'])
1523 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1526 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1524 caps.add('bundle2=' + urlreq.quote(capsblob))
1527 caps.add('bundle2=' + urlreq.quote(capsblob))
1525 return caps
1528 return caps
1526
1529
1527 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1530 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1528 getbundle2partsorder = []
1531 getbundle2partsorder = []
1529
1532
1530 # Mapping between step name and function
1533 # Mapping between step name and function
1531 #
1534 #
1532 # This exists to help extensions wrap steps if necessary
1535 # This exists to help extensions wrap steps if necessary
1533 getbundle2partsmapping = {}
1536 getbundle2partsmapping = {}
1534
1537
1535 def getbundle2partsgenerator(stepname, idx=None):
1538 def getbundle2partsgenerator(stepname, idx=None):
1536 """decorator for function generating bundle2 part for getbundle
1539 """decorator for function generating bundle2 part for getbundle
1537
1540
1538 The function is added to the step -> function mapping and appended to the
1541 The function is added to the step -> function mapping and appended to the
1539 list of steps. Beware that decorated functions will be added in order
1542 list of steps. Beware that decorated functions will be added in order
1540 (this may matter).
1543 (this may matter).
1541
1544
1542 You can only use this decorator for new steps, if you want to wrap a step
1545 You can only use this decorator for new steps, if you want to wrap a step
1543 from an extension, attack the getbundle2partsmapping dictionary directly."""
1546 from an extension, attack the getbundle2partsmapping dictionary directly."""
1544 def dec(func):
1547 def dec(func):
1545 assert stepname not in getbundle2partsmapping
1548 assert stepname not in getbundle2partsmapping
1546 getbundle2partsmapping[stepname] = func
1549 getbundle2partsmapping[stepname] = func
1547 if idx is None:
1550 if idx is None:
1548 getbundle2partsorder.append(stepname)
1551 getbundle2partsorder.append(stepname)
1549 else:
1552 else:
1550 getbundle2partsorder.insert(idx, stepname)
1553 getbundle2partsorder.insert(idx, stepname)
1551 return func
1554 return func
1552 return dec
1555 return dec
1553
1556
1554 def bundle2requested(bundlecaps):
1557 def bundle2requested(bundlecaps):
1555 if bundlecaps is not None:
1558 if bundlecaps is not None:
1556 return any(cap.startswith('HG2') for cap in bundlecaps)
1559 return any(cap.startswith('HG2') for cap in bundlecaps)
1557 return False
1560 return False
1558
1561
1559 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1562 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1560 **kwargs):
1563 **kwargs):
1561 """Return chunks constituting a bundle's raw data.
1564 """Return chunks constituting a bundle's raw data.
1562
1565
1563 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1566 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1564 passed.
1567 passed.
1565
1568
1566 Returns an iterator over raw chunks (of varying sizes).
1569 Returns an iterator over raw chunks (of varying sizes).
1567 """
1570 """
1568 usebundle2 = bundle2requested(bundlecaps)
1571 usebundle2 = bundle2requested(bundlecaps)
1569 # bundle10 case
1572 # bundle10 case
1570 if not usebundle2:
1573 if not usebundle2:
1571 if bundlecaps and not kwargs.get('cg', True):
1574 if bundlecaps and not kwargs.get('cg', True):
1572 raise ValueError(_('request for bundle10 must include changegroup'))
1575 raise ValueError(_('request for bundle10 must include changegroup'))
1573
1576
1574 if kwargs:
1577 if kwargs:
1575 raise ValueError(_('unsupported getbundle arguments: %s')
1578 raise ValueError(_('unsupported getbundle arguments: %s')
1576 % ', '.join(sorted(kwargs.keys())))
1579 % ', '.join(sorted(kwargs.keys())))
1577 outgoing = _computeoutgoing(repo, heads, common)
1580 outgoing = _computeoutgoing(repo, heads, common)
1578 bundler = changegroup.getbundler('01', repo)
1581 bundler = changegroup.getbundler('01', repo, bundlecaps)
1579 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1582 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1580
1583
1581 # bundle20 case
1584 # bundle20 case
1582 b2caps = {}
1585 b2caps = {}
1583 for bcaps in bundlecaps:
1586 for bcaps in bundlecaps:
1584 if bcaps.startswith('bundle2='):
1587 if bcaps.startswith('bundle2='):
1585 blob = urlreq.unquote(bcaps[len('bundle2='):])
1588 blob = urlreq.unquote(bcaps[len('bundle2='):])
1586 b2caps.update(bundle2.decodecaps(blob))
1589 b2caps.update(bundle2.decodecaps(blob))
1587 bundler = bundle2.bundle20(repo.ui, b2caps)
1590 bundler = bundle2.bundle20(repo.ui, b2caps)
1588
1591
1589 kwargs['heads'] = heads
1592 kwargs['heads'] = heads
1590 kwargs['common'] = common
1593 kwargs['common'] = common
1591
1594
1592 for name in getbundle2partsorder:
1595 for name in getbundle2partsorder:
1593 func = getbundle2partsmapping[name]
1596 func = getbundle2partsmapping[name]
1594 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1597 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1595 **kwargs)
1598 **kwargs)
1596
1599
1597 return bundler.getchunks()
1600 return bundler.getchunks()
1598
1601
1599 @getbundle2partsgenerator('changegroup')
1602 @getbundle2partsgenerator('changegroup')
1600 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1603 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1601 b2caps=None, heads=None, common=None, **kwargs):
1604 b2caps=None, heads=None, common=None, **kwargs):
1602 """add a changegroup part to the requested bundle"""
1605 """add a changegroup part to the requested bundle"""
1603 cg = None
1606 cg = None
1604 if kwargs.get('cg', True):
1607 if kwargs.get('cg', True):
1605 # build changegroup bundle here.
1608 # build changegroup bundle here.
1606 version = '01'
1609 version = '01'
1607 cgversions = b2caps.get('changegroup')
1610 cgversions = b2caps.get('changegroup')
1608 if cgversions: # 3.1 and 3.2 ship with an empty value
1611 if cgversions: # 3.1 and 3.2 ship with an empty value
1609 cgversions = [v for v in cgversions
1612 cgversions = [v for v in cgversions
1610 if v in changegroup.supportedoutgoingversions(repo)]
1613 if v in changegroup.supportedoutgoingversions(repo)]
1611 if not cgversions:
1614 if not cgversions:
1612 raise ValueError(_('no common changegroup version'))
1615 raise ValueError(_('no common changegroup version'))
1613 version = max(cgversions)
1616 version = max(cgversions)
1614 outgoing = _computeoutgoing(repo, heads, common)
1617 outgoing = _computeoutgoing(repo, heads, common)
1615 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1618 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1619 bundlecaps=bundlecaps,
1616 version=version)
1620 version=version)
1617
1621
1618 if cg:
1622 if cg:
1619 part = bundler.newpart('changegroup', data=cg)
1623 part = bundler.newpart('changegroup', data=cg)
1620 if cgversions:
1624 if cgversions:
1621 part.addparam('version', version)
1625 part.addparam('version', version)
1622 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1626 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1623 if 'treemanifest' in repo.requirements:
1627 if 'treemanifest' in repo.requirements:
1624 part.addparam('treemanifest', '1')
1628 part.addparam('treemanifest', '1')
1625
1629
1626 @getbundle2partsgenerator('listkeys')
1630 @getbundle2partsgenerator('listkeys')
1627 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1631 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1628 b2caps=None, **kwargs):
1632 b2caps=None, **kwargs):
1629 """add parts containing listkeys namespaces to the requested bundle"""
1633 """add parts containing listkeys namespaces to the requested bundle"""
1630 listkeys = kwargs.get('listkeys', ())
1634 listkeys = kwargs.get('listkeys', ())
1631 for namespace in listkeys:
1635 for namespace in listkeys:
1632 part = bundler.newpart('listkeys')
1636 part = bundler.newpart('listkeys')
1633 part.addparam('namespace', namespace)
1637 part.addparam('namespace', namespace)
1634 keys = repo.listkeys(namespace).items()
1638 keys = repo.listkeys(namespace).items()
1635 part.data = pushkey.encodekeys(keys)
1639 part.data = pushkey.encodekeys(keys)
1636
1640
1637 @getbundle2partsgenerator('obsmarkers')
1641 @getbundle2partsgenerator('obsmarkers')
1638 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1642 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1639 b2caps=None, heads=None, **kwargs):
1643 b2caps=None, heads=None, **kwargs):
1640 """add an obsolescence markers part to the requested bundle"""
1644 """add an obsolescence markers part to the requested bundle"""
1641 if kwargs.get('obsmarkers', False):
1645 if kwargs.get('obsmarkers', False):
1642 if heads is None:
1646 if heads is None:
1643 heads = repo.heads()
1647 heads = repo.heads()
1644 subset = [c.node() for c in repo.set('::%ln', heads)]
1648 subset = [c.node() for c in repo.set('::%ln', heads)]
1645 markers = repo.obsstore.relevantmarkers(subset)
1649 markers = repo.obsstore.relevantmarkers(subset)
1646 markers = sorted(markers)
1650 markers = sorted(markers)
1647 buildobsmarkerspart(bundler, markers)
1651 buildobsmarkerspart(bundler, markers)
1648
1652
1649 @getbundle2partsgenerator('hgtagsfnodes')
1653 @getbundle2partsgenerator('hgtagsfnodes')
1650 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1654 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1651 b2caps=None, heads=None, common=None,
1655 b2caps=None, heads=None, common=None,
1652 **kwargs):
1656 **kwargs):
1653 """Transfer the .hgtags filenodes mapping.
1657 """Transfer the .hgtags filenodes mapping.
1654
1658
1655 Only values for heads in this bundle will be transferred.
1659 Only values for heads in this bundle will be transferred.
1656
1660
1657 The part data consists of pairs of 20 byte changeset node and .hgtags
1661 The part data consists of pairs of 20 byte changeset node and .hgtags
1658 filenodes raw values.
1662 filenodes raw values.
1659 """
1663 """
1660 # Don't send unless:
1664 # Don't send unless:
1661 # - changeset are being exchanged,
1665 # - changeset are being exchanged,
1662 # - the client supports it.
1666 # - the client supports it.
1663 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1667 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1664 return
1668 return
1665
1669
1666 outgoing = _computeoutgoing(repo, heads, common)
1670 outgoing = _computeoutgoing(repo, heads, common)
1667 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1671 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1668
1672
1669 def _getbookmarks(repo, **kwargs):
1673 def _getbookmarks(repo, **kwargs):
1670 """Returns bookmark to node mapping.
1674 """Returns bookmark to node mapping.
1671
1675
1672 This function is primarily used to generate `bookmarks` bundle2 part.
1676 This function is primarily used to generate `bookmarks` bundle2 part.
1673 It is a separate function in order to make it easy to wrap it
1677 It is a separate function in order to make it easy to wrap it
1674 in extensions. Passing `kwargs` to the function makes it easy to
1678 in extensions. Passing `kwargs` to the function makes it easy to
1675 add new parameters in extensions.
1679 add new parameters in extensions.
1676 """
1680 """
1677
1681
1678 return dict(bookmod.listbinbookmarks(repo))
1682 return dict(bookmod.listbinbookmarks(repo))
1679
1683
1680 def check_heads(repo, their_heads, context):
1684 def check_heads(repo, their_heads, context):
1681 """check if the heads of a repo have been modified
1685 """check if the heads of a repo have been modified
1682
1686
1683 Used by peer for unbundling.
1687 Used by peer for unbundling.
1684 """
1688 """
1685 heads = repo.heads()
1689 heads = repo.heads()
1686 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1690 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1687 if not (their_heads == ['force'] or their_heads == heads or
1691 if not (their_heads == ['force'] or their_heads == heads or
1688 their_heads == ['hashed', heads_hash]):
1692 their_heads == ['hashed', heads_hash]):
1689 # someone else committed/pushed/unbundled while we
1693 # someone else committed/pushed/unbundled while we
1690 # were transferring data
1694 # were transferring data
1691 raise error.PushRaced('repository changed while %s - '
1695 raise error.PushRaced('repository changed while %s - '
1692 'please try again' % context)
1696 'please try again' % context)
1693
1697
1694 def unbundle(repo, cg, heads, source, url):
1698 def unbundle(repo, cg, heads, source, url):
1695 """Apply a bundle to a repo.
1699 """Apply a bundle to a repo.
1696
1700
1697 this function makes sure the repo is locked during the application and have
1701 this function makes sure the repo is locked during the application and have
1698 mechanism to check that no push race occurred between the creation of the
1702 mechanism to check that no push race occurred between the creation of the
1699 bundle and its application.
1703 bundle and its application.
1700
1704
1701 If the push was raced as PushRaced exception is raised."""
1705 If the push was raced as PushRaced exception is raised."""
1702 r = 0
1706 r = 0
1703 # need a transaction when processing a bundle2 stream
1707 # need a transaction when processing a bundle2 stream
1704 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1708 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1705 lockandtr = [None, None, None]
1709 lockandtr = [None, None, None]
1706 recordout = None
1710 recordout = None
1707 # quick fix for output mismatch with bundle2 in 3.4
1711 # quick fix for output mismatch with bundle2 in 3.4
1708 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1712 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture',
1709 False)
1713 False)
1710 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1714 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1711 captureoutput = True
1715 captureoutput = True
1712 try:
1716 try:
1713 # note: outside bundle1, 'heads' is expected to be empty and this
1717 # note: outside bundle1, 'heads' is expected to be empty and this
1714 # 'check_heads' call wil be a no-op
1718 # 'check_heads' call wil be a no-op
1715 check_heads(repo, heads, 'uploading changes')
1719 check_heads(repo, heads, 'uploading changes')
1716 # push can proceed
1720 # push can proceed
1717 if not util.safehasattr(cg, 'params'):
1721 if not util.safehasattr(cg, 'params'):
1718 # legacy case: bundle1 (changegroup 01)
1722 # legacy case: bundle1 (changegroup 01)
1719 lockandtr[1] = repo.lock()
1723 lockandtr[1] = repo.lock()
1720 r = cg.apply(repo, source, url)
1724 r = cg.apply(repo, source, url)
1721 else:
1725 else:
1722 r = None
1726 r = None
1723 try:
1727 try:
1724 def gettransaction():
1728 def gettransaction():
1725 if not lockandtr[2]:
1729 if not lockandtr[2]:
1726 lockandtr[0] = repo.wlock()
1730 lockandtr[0] = repo.wlock()
1727 lockandtr[1] = repo.lock()
1731 lockandtr[1] = repo.lock()
1728 lockandtr[2] = repo.transaction(source)
1732 lockandtr[2] = repo.transaction(source)
1729 lockandtr[2].hookargs['source'] = source
1733 lockandtr[2].hookargs['source'] = source
1730 lockandtr[2].hookargs['url'] = url
1734 lockandtr[2].hookargs['url'] = url
1731 lockandtr[2].hookargs['bundle2'] = '1'
1735 lockandtr[2].hookargs['bundle2'] = '1'
1732 return lockandtr[2]
1736 return lockandtr[2]
1733
1737
1734 # Do greedy locking by default until we're satisfied with lazy
1738 # Do greedy locking by default until we're satisfied with lazy
1735 # locking.
1739 # locking.
1736 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1740 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1737 gettransaction()
1741 gettransaction()
1738
1742
1739 op = bundle2.bundleoperation(repo, gettransaction,
1743 op = bundle2.bundleoperation(repo, gettransaction,
1740 captureoutput=captureoutput)
1744 captureoutput=captureoutput)
1741 try:
1745 try:
1742 op = bundle2.processbundle(repo, cg, op=op)
1746 op = bundle2.processbundle(repo, cg, op=op)
1743 finally:
1747 finally:
1744 r = op.reply
1748 r = op.reply
1745 if captureoutput and r is not None:
1749 if captureoutput and r is not None:
1746 repo.ui.pushbuffer(error=True, subproc=True)
1750 repo.ui.pushbuffer(error=True, subproc=True)
1747 def recordout(output):
1751 def recordout(output):
1748 r.newpart('output', data=output, mandatory=False)
1752 r.newpart('output', data=output, mandatory=False)
1749 if lockandtr[2] is not None:
1753 if lockandtr[2] is not None:
1750 lockandtr[2].close()
1754 lockandtr[2].close()
1751 except BaseException as exc:
1755 except BaseException as exc:
1752 exc.duringunbundle2 = True
1756 exc.duringunbundle2 = True
1753 if captureoutput and r is not None:
1757 if captureoutput and r is not None:
1754 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1758 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1755 def recordout(output):
1759 def recordout(output):
1756 part = bundle2.bundlepart('output', data=output,
1760 part = bundle2.bundlepart('output', data=output,
1757 mandatory=False)
1761 mandatory=False)
1758 parts.append(part)
1762 parts.append(part)
1759 raise
1763 raise
1760 finally:
1764 finally:
1761 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1765 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1762 if recordout is not None:
1766 if recordout is not None:
1763 recordout(repo.ui.popbuffer())
1767 recordout(repo.ui.popbuffer())
1764 return r
1768 return r
1765
1769
1766 def _maybeapplyclonebundle(pullop):
1770 def _maybeapplyclonebundle(pullop):
1767 """Apply a clone bundle from a remote, if possible."""
1771 """Apply a clone bundle from a remote, if possible."""
1768
1772
1769 repo = pullop.repo
1773 repo = pullop.repo
1770 remote = pullop.remote
1774 remote = pullop.remote
1771
1775
1772 if not repo.ui.configbool('ui', 'clonebundles', True):
1776 if not repo.ui.configbool('ui', 'clonebundles', True):
1773 return
1777 return
1774
1778
1775 # Only run if local repo is empty.
1779 # Only run if local repo is empty.
1776 if len(repo):
1780 if len(repo):
1777 return
1781 return
1778
1782
1779 if pullop.heads:
1783 if pullop.heads:
1780 return
1784 return
1781
1785
1782 if not remote.capable('clonebundles'):
1786 if not remote.capable('clonebundles'):
1783 return
1787 return
1784
1788
1785 res = remote._call('clonebundles')
1789 res = remote._call('clonebundles')
1786
1790
1787 # If we call the wire protocol command, that's good enough to record the
1791 # If we call the wire protocol command, that's good enough to record the
1788 # attempt.
1792 # attempt.
1789 pullop.clonebundleattempted = True
1793 pullop.clonebundleattempted = True
1790
1794
1791 entries = parseclonebundlesmanifest(repo, res)
1795 entries = parseclonebundlesmanifest(repo, res)
1792 if not entries:
1796 if not entries:
1793 repo.ui.note(_('no clone bundles available on remote; '
1797 repo.ui.note(_('no clone bundles available on remote; '
1794 'falling back to regular clone\n'))
1798 'falling back to regular clone\n'))
1795 return
1799 return
1796
1800
1797 entries = filterclonebundleentries(repo, entries)
1801 entries = filterclonebundleentries(repo, entries)
1798 if not entries:
1802 if not entries:
1799 # There is a thundering herd concern here. However, if a server
1803 # There is a thundering herd concern here. However, if a server
1800 # operator doesn't advertise bundles appropriate for its clients,
1804 # operator doesn't advertise bundles appropriate for its clients,
1801 # they deserve what's coming. Furthermore, from a client's
1805 # they deserve what's coming. Furthermore, from a client's
1802 # perspective, no automatic fallback would mean not being able to
1806 # perspective, no automatic fallback would mean not being able to
1803 # clone!
1807 # clone!
1804 repo.ui.warn(_('no compatible clone bundles available on server; '
1808 repo.ui.warn(_('no compatible clone bundles available on server; '
1805 'falling back to regular clone\n'))
1809 'falling back to regular clone\n'))
1806 repo.ui.warn(_('(you may want to report this to the server '
1810 repo.ui.warn(_('(you may want to report this to the server '
1807 'operator)\n'))
1811 'operator)\n'))
1808 return
1812 return
1809
1813
1810 entries = sortclonebundleentries(repo.ui, entries)
1814 entries = sortclonebundleentries(repo.ui, entries)
1811
1815
1812 url = entries[0]['URL']
1816 url = entries[0]['URL']
1813 repo.ui.status(_('applying clone bundle from %s\n') % url)
1817 repo.ui.status(_('applying clone bundle from %s\n') % url)
1814 if trypullbundlefromurl(repo.ui, repo, url):
1818 if trypullbundlefromurl(repo.ui, repo, url):
1815 repo.ui.status(_('finished applying clone bundle\n'))
1819 repo.ui.status(_('finished applying clone bundle\n'))
1816 # Bundle failed.
1820 # Bundle failed.
1817 #
1821 #
1818 # We abort by default to avoid the thundering herd of
1822 # We abort by default to avoid the thundering herd of
1819 # clients flooding a server that was expecting expensive
1823 # clients flooding a server that was expecting expensive
1820 # clone load to be offloaded.
1824 # clone load to be offloaded.
1821 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1825 elif repo.ui.configbool('ui', 'clonebundlefallback', False):
1822 repo.ui.warn(_('falling back to normal clone\n'))
1826 repo.ui.warn(_('falling back to normal clone\n'))
1823 else:
1827 else:
1824 raise error.Abort(_('error applying bundle'),
1828 raise error.Abort(_('error applying bundle'),
1825 hint=_('if this error persists, consider contacting '
1829 hint=_('if this error persists, consider contacting '
1826 'the server operator or disable clone '
1830 'the server operator or disable clone '
1827 'bundles via '
1831 'bundles via '
1828 '"--config ui.clonebundles=false"'))
1832 '"--config ui.clonebundles=false"'))
1829
1833
1830 def parseclonebundlesmanifest(repo, s):
1834 def parseclonebundlesmanifest(repo, s):
1831 """Parses the raw text of a clone bundles manifest.
1835 """Parses the raw text of a clone bundles manifest.
1832
1836
1833 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1837 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1834 to the URL and other keys are the attributes for the entry.
1838 to the URL and other keys are the attributes for the entry.
1835 """
1839 """
1836 m = []
1840 m = []
1837 for line in s.splitlines():
1841 for line in s.splitlines():
1838 fields = line.split()
1842 fields = line.split()
1839 if not fields:
1843 if not fields:
1840 continue
1844 continue
1841 attrs = {'URL': fields[0]}
1845 attrs = {'URL': fields[0]}
1842 for rawattr in fields[1:]:
1846 for rawattr in fields[1:]:
1843 key, value = rawattr.split('=', 1)
1847 key, value = rawattr.split('=', 1)
1844 key = urlreq.unquote(key)
1848 key = urlreq.unquote(key)
1845 value = urlreq.unquote(value)
1849 value = urlreq.unquote(value)
1846 attrs[key] = value
1850 attrs[key] = value
1847
1851
1848 # Parse BUNDLESPEC into components. This makes client-side
1852 # Parse BUNDLESPEC into components. This makes client-side
1849 # preferences easier to specify since you can prefer a single
1853 # preferences easier to specify since you can prefer a single
1850 # component of the BUNDLESPEC.
1854 # component of the BUNDLESPEC.
1851 if key == 'BUNDLESPEC':
1855 if key == 'BUNDLESPEC':
1852 try:
1856 try:
1853 comp, version, params = parsebundlespec(repo, value,
1857 comp, version, params = parsebundlespec(repo, value,
1854 externalnames=True)
1858 externalnames=True)
1855 attrs['COMPRESSION'] = comp
1859 attrs['COMPRESSION'] = comp
1856 attrs['VERSION'] = version
1860 attrs['VERSION'] = version
1857 except error.InvalidBundleSpecification:
1861 except error.InvalidBundleSpecification:
1858 pass
1862 pass
1859 except error.UnsupportedBundleSpecification:
1863 except error.UnsupportedBundleSpecification:
1860 pass
1864 pass
1861
1865
1862 m.append(attrs)
1866 m.append(attrs)
1863
1867
1864 return m
1868 return m
1865
1869
1866 def filterclonebundleentries(repo, entries):
1870 def filterclonebundleentries(repo, entries):
1867 """Remove incompatible clone bundle manifest entries.
1871 """Remove incompatible clone bundle manifest entries.
1868
1872
1869 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1873 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1870 and returns a new list consisting of only the entries that this client
1874 and returns a new list consisting of only the entries that this client
1871 should be able to apply.
1875 should be able to apply.
1872
1876
1873 There is no guarantee we'll be able to apply all returned entries because
1877 There is no guarantee we'll be able to apply all returned entries because
1874 the metadata we use to filter on may be missing or wrong.
1878 the metadata we use to filter on may be missing or wrong.
1875 """
1879 """
1876 newentries = []
1880 newentries = []
1877 for entry in entries:
1881 for entry in entries:
1878 spec = entry.get('BUNDLESPEC')
1882 spec = entry.get('BUNDLESPEC')
1879 if spec:
1883 if spec:
1880 try:
1884 try:
1881 parsebundlespec(repo, spec, strict=True)
1885 parsebundlespec(repo, spec, strict=True)
1882 except error.InvalidBundleSpecification as e:
1886 except error.InvalidBundleSpecification as e:
1883 repo.ui.debug(str(e) + '\n')
1887 repo.ui.debug(str(e) + '\n')
1884 continue
1888 continue
1885 except error.UnsupportedBundleSpecification as e:
1889 except error.UnsupportedBundleSpecification as e:
1886 repo.ui.debug('filtering %s because unsupported bundle '
1890 repo.ui.debug('filtering %s because unsupported bundle '
1887 'spec: %s\n' % (entry['URL'], str(e)))
1891 'spec: %s\n' % (entry['URL'], str(e)))
1888 continue
1892 continue
1889
1893
1890 if 'REQUIRESNI' in entry and not sslutil.hassni:
1894 if 'REQUIRESNI' in entry and not sslutil.hassni:
1891 repo.ui.debug('filtering %s because SNI not supported\n' %
1895 repo.ui.debug('filtering %s because SNI not supported\n' %
1892 entry['URL'])
1896 entry['URL'])
1893 continue
1897 continue
1894
1898
1895 newentries.append(entry)
1899 newentries.append(entry)
1896
1900
1897 return newentries
1901 return newentries
1898
1902
1899 class clonebundleentry(object):
1903 class clonebundleentry(object):
1900 """Represents an item in a clone bundles manifest.
1904 """Represents an item in a clone bundles manifest.
1901
1905
1902 This rich class is needed to support sorting since sorted() in Python 3
1906 This rich class is needed to support sorting since sorted() in Python 3
1903 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1907 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1904 won't work.
1908 won't work.
1905 """
1909 """
1906
1910
1907 def __init__(self, value, prefers):
1911 def __init__(self, value, prefers):
1908 self.value = value
1912 self.value = value
1909 self.prefers = prefers
1913 self.prefers = prefers
1910
1914
1911 def _cmp(self, other):
1915 def _cmp(self, other):
1912 for prefkey, prefvalue in self.prefers:
1916 for prefkey, prefvalue in self.prefers:
1913 avalue = self.value.get(prefkey)
1917 avalue = self.value.get(prefkey)
1914 bvalue = other.value.get(prefkey)
1918 bvalue = other.value.get(prefkey)
1915
1919
1916 # Special case for b missing attribute and a matches exactly.
1920 # Special case for b missing attribute and a matches exactly.
1917 if avalue is not None and bvalue is None and avalue == prefvalue:
1921 if avalue is not None and bvalue is None and avalue == prefvalue:
1918 return -1
1922 return -1
1919
1923
1920 # Special case for a missing attribute and b matches exactly.
1924 # Special case for a missing attribute and b matches exactly.
1921 if bvalue is not None and avalue is None and bvalue == prefvalue:
1925 if bvalue is not None and avalue is None and bvalue == prefvalue:
1922 return 1
1926 return 1
1923
1927
1924 # We can't compare unless attribute present on both.
1928 # We can't compare unless attribute present on both.
1925 if avalue is None or bvalue is None:
1929 if avalue is None or bvalue is None:
1926 continue
1930 continue
1927
1931
1928 # Same values should fall back to next attribute.
1932 # Same values should fall back to next attribute.
1929 if avalue == bvalue:
1933 if avalue == bvalue:
1930 continue
1934 continue
1931
1935
1932 # Exact matches come first.
1936 # Exact matches come first.
1933 if avalue == prefvalue:
1937 if avalue == prefvalue:
1934 return -1
1938 return -1
1935 if bvalue == prefvalue:
1939 if bvalue == prefvalue:
1936 return 1
1940 return 1
1937
1941
1938 # Fall back to next attribute.
1942 # Fall back to next attribute.
1939 continue
1943 continue
1940
1944
1941 # If we got here we couldn't sort by attributes and prefers. Fall
1945 # If we got here we couldn't sort by attributes and prefers. Fall
1942 # back to index order.
1946 # back to index order.
1943 return 0
1947 return 0
1944
1948
1945 def __lt__(self, other):
1949 def __lt__(self, other):
1946 return self._cmp(other) < 0
1950 return self._cmp(other) < 0
1947
1951
1948 def __gt__(self, other):
1952 def __gt__(self, other):
1949 return self._cmp(other) > 0
1953 return self._cmp(other) > 0
1950
1954
1951 def __eq__(self, other):
1955 def __eq__(self, other):
1952 return self._cmp(other) == 0
1956 return self._cmp(other) == 0
1953
1957
1954 def __le__(self, other):
1958 def __le__(self, other):
1955 return self._cmp(other) <= 0
1959 return self._cmp(other) <= 0
1956
1960
1957 def __ge__(self, other):
1961 def __ge__(self, other):
1958 return self._cmp(other) >= 0
1962 return self._cmp(other) >= 0
1959
1963
1960 def __ne__(self, other):
1964 def __ne__(self, other):
1961 return self._cmp(other) != 0
1965 return self._cmp(other) != 0
1962
1966
1963 def sortclonebundleentries(ui, entries):
1967 def sortclonebundleentries(ui, entries):
1964 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1968 prefers = ui.configlist('ui', 'clonebundleprefers', default=[])
1965 if not prefers:
1969 if not prefers:
1966 return list(entries)
1970 return list(entries)
1967
1971
1968 prefers = [p.split('=', 1) for p in prefers]
1972 prefers = [p.split('=', 1) for p in prefers]
1969
1973
1970 items = sorted(clonebundleentry(v, prefers) for v in entries)
1974 items = sorted(clonebundleentry(v, prefers) for v in entries)
1971 return [i.value for i in items]
1975 return [i.value for i in items]
1972
1976
1973 def trypullbundlefromurl(ui, repo, url):
1977 def trypullbundlefromurl(ui, repo, url):
1974 """Attempt to apply a bundle from a URL."""
1978 """Attempt to apply a bundle from a URL."""
1975 lock = repo.lock()
1979 lock = repo.lock()
1976 try:
1980 try:
1977 tr = repo.transaction('bundleurl')
1981 tr = repo.transaction('bundleurl')
1978 try:
1982 try:
1979 try:
1983 try:
1980 fh = urlmod.open(ui, url)
1984 fh = urlmod.open(ui, url)
1981 cg = readbundle(ui, fh, 'stream')
1985 cg = readbundle(ui, fh, 'stream')
1982
1986
1983 if isinstance(cg, bundle2.unbundle20):
1987 if isinstance(cg, bundle2.unbundle20):
1984 bundle2.processbundle(repo, cg, lambda: tr)
1988 bundle2.processbundle(repo, cg, lambda: tr)
1985 elif isinstance(cg, streamclone.streamcloneapplier):
1989 elif isinstance(cg, streamclone.streamcloneapplier):
1986 cg.apply(repo)
1990 cg.apply(repo)
1987 else:
1991 else:
1988 cg.apply(repo, 'clonebundles', url)
1992 cg.apply(repo, 'clonebundles', url)
1989 tr.close()
1993 tr.close()
1990 return True
1994 return True
1991 except urlerr.httperror as e:
1995 except urlerr.httperror as e:
1992 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1996 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
1993 except urlerr.urlerror as e:
1997 except urlerr.urlerror as e:
1994 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
1998 ui.warn(_('error fetching bundle: %s\n') % e.reason[1])
1995
1999
1996 return False
2000 return False
1997 finally:
2001 finally:
1998 tr.release()
2002 tr.release()
1999 finally:
2003 finally:
2000 lock.release()
2004 lock.release()
@@ -1,1232 +1,1232 b''
1 This test is dedicated to test the bundle2 container format
1 This test is dedicated to test the bundle2 container format
2
2
3 It test multiple existing parts to test different feature of the container. You
3 It test multiple existing parts to test different feature of the container. You
4 probably do not need to touch this test unless you change the binary encoding
4 probably do not need to touch this test unless you change the binary encoding
5 of the bundle2 format itself.
5 of the bundle2 format itself.
6
6
7 Create an extension to test bundle2 API
7 Create an extension to test bundle2 API
8
8
9 $ cat > bundle2.py << EOF
9 $ cat > bundle2.py << EOF
10 > """A small extension to test bundle2 implementation
10 > """A small extension to test bundle2 implementation
11 >
11 >
12 > This extension allows detailed testing of the various bundle2 API and
12 > This extension allows detailed testing of the various bundle2 API and
13 > behaviors.
13 > behaviors.
14 > """
14 > """
15 >
15 >
16 > import sys, os, gc
16 > import sys, os, gc
17 > from mercurial import cmdutil
17 > from mercurial import cmdutil
18 > from mercurial import util
18 > from mercurial import util
19 > from mercurial import bundle2
19 > from mercurial import bundle2
20 > from mercurial import scmutil
20 > from mercurial import scmutil
21 > from mercurial import discovery
21 > from mercurial import discovery
22 > from mercurial import changegroup
22 > from mercurial import changegroup
23 > from mercurial import error
23 > from mercurial import error
24 > from mercurial import obsolete
24 > from mercurial import obsolete
25 >
25 >
26 >
26 >
27 > try:
27 > try:
28 > import msvcrt
28 > import msvcrt
29 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
29 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
30 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
30 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
31 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
31 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
32 > except ImportError:
32 > except ImportError:
33 > pass
33 > pass
34 >
34 >
35 > cmdtable = {}
35 > cmdtable = {}
36 > command = cmdutil.command(cmdtable)
36 > command = cmdutil.command(cmdtable)
37 >
37 >
38 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
38 > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
39 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
39 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
40 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
40 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
41 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
41 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
42 >
42 >
43 > @bundle2.parthandler('test:song')
43 > @bundle2.parthandler('test:song')
44 > def songhandler(op, part):
44 > def songhandler(op, part):
45 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
45 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
46 > op.ui.write('The choir starts singing:\n')
46 > op.ui.write('The choir starts singing:\n')
47 > verses = 0
47 > verses = 0
48 > for line in part.read().split('\n'):
48 > for line in part.read().split('\n'):
49 > op.ui.write(' %s\n' % line)
49 > op.ui.write(' %s\n' % line)
50 > verses += 1
50 > verses += 1
51 > op.records.add('song', {'verses': verses})
51 > op.records.add('song', {'verses': verses})
52 >
52 >
53 > @bundle2.parthandler('test:ping')
53 > @bundle2.parthandler('test:ping')
54 > def pinghandler(op, part):
54 > def pinghandler(op, part):
55 > op.ui.write('received ping request (id %i)\n' % part.id)
55 > op.ui.write('received ping request (id %i)\n' % part.id)
56 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
56 > if op.reply is not None and 'ping-pong' in op.reply.capabilities:
57 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
57 > op.ui.write_err('replying to ping request (id %i)\n' % part.id)
58 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))],
58 > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))],
59 > mandatory=False)
59 > mandatory=False)
60 >
60 >
61 > @bundle2.parthandler('test:debugreply')
61 > @bundle2.parthandler('test:debugreply')
62 > def debugreply(op, part):
62 > def debugreply(op, part):
63 > """print data about the capacity of the bundle reply"""
63 > """print data about the capacity of the bundle reply"""
64 > if op.reply is None:
64 > if op.reply is None:
65 > op.ui.write('debugreply: no reply\n')
65 > op.ui.write('debugreply: no reply\n')
66 > else:
66 > else:
67 > op.ui.write('debugreply: capabilities:\n')
67 > op.ui.write('debugreply: capabilities:\n')
68 > for cap in sorted(op.reply.capabilities):
68 > for cap in sorted(op.reply.capabilities):
69 > op.ui.write('debugreply: %r\n' % cap)
69 > op.ui.write('debugreply: %r\n' % cap)
70 > for val in op.reply.capabilities[cap]:
70 > for val in op.reply.capabilities[cap]:
71 > op.ui.write('debugreply: %r\n' % val)
71 > op.ui.write('debugreply: %r\n' % val)
72 >
72 >
73 > @command('bundle2',
73 > @command('bundle2',
74 > [('', 'param', [], 'stream level parameter'),
74 > [('', 'param', [], 'stream level parameter'),
75 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
75 > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'),
76 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
76 > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'),
77 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
77 > ('', 'parts', False, 'include some arbitrary parts to the bundle'),
78 > ('', 'reply', False, 'produce a reply bundle'),
78 > ('', 'reply', False, 'produce a reply bundle'),
79 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
79 > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'),
80 > ('', 'genraise', False, 'includes a part that raise an exception during generation'),
80 > ('', 'genraise', False, 'includes a part that raise an exception during generation'),
81 > ('', 'timeout', False, 'emulate a timeout during bundle generation'),
81 > ('', 'timeout', False, 'emulate a timeout during bundle generation'),
82 > ('r', 'rev', [], 'includes those changeset in the bundle'),
82 > ('r', 'rev', [], 'includes those changeset in the bundle'),
83 > ('', 'compress', '', 'compress the stream'),],
83 > ('', 'compress', '', 'compress the stream'),],
84 > '[OUTPUTFILE]')
84 > '[OUTPUTFILE]')
85 > def cmdbundle2(ui, repo, path=None, **opts):
85 > def cmdbundle2(ui, repo, path=None, **opts):
86 > """write a bundle2 container on standard output"""
86 > """write a bundle2 container on standard output"""
87 > bundler = bundle2.bundle20(ui)
87 > bundler = bundle2.bundle20(ui)
88 > for p in opts['param']:
88 > for p in opts['param']:
89 > p = p.split('=', 1)
89 > p = p.split('=', 1)
90 > try:
90 > try:
91 > bundler.addparam(*p)
91 > bundler.addparam(*p)
92 > except ValueError, exc:
92 > except ValueError, exc:
93 > raise error.Abort('%s' % exc)
93 > raise error.Abort('%s' % exc)
94 >
94 >
95 > if opts['compress']:
95 > if opts['compress']:
96 > bundler.setcompression(opts['compress'])
96 > bundler.setcompression(opts['compress'])
97 >
97 >
98 > if opts['reply']:
98 > if opts['reply']:
99 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
99 > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
100 > bundler.newpart('replycaps', data=capsstring)
100 > bundler.newpart('replycaps', data=capsstring)
101 >
101 >
102 > if opts['pushrace']:
102 > if opts['pushrace']:
103 > # also serve to test the assignement of data outside of init
103 > # also serve to test the assignement of data outside of init
104 > part = bundler.newpart('check:heads')
104 > part = bundler.newpart('check:heads')
105 > part.data = '01234567890123456789'
105 > part.data = '01234567890123456789'
106 >
106 >
107 > revs = opts['rev']
107 > revs = opts['rev']
108 > if 'rev' in opts:
108 > if 'rev' in opts:
109 > revs = scmutil.revrange(repo, opts['rev'])
109 > revs = scmutil.revrange(repo, opts['rev'])
110 > if revs:
110 > if revs:
111 > # very crude version of a changegroup part creation
111 > # very crude version of a changegroup part creation
112 > bundled = repo.revs('%ld::%ld', revs, revs)
112 > bundled = repo.revs('%ld::%ld', revs, revs)
113 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
113 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
114 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
114 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
115 > outgoing = discovery.outgoing(repo, headcommon, headmissing)
115 > outgoing = discovery.outgoing(repo, headcommon, headmissing)
116 > cg = changegroup.getchangegroup(repo, 'test:bundle2', outgoing)
116 > cg = changegroup.getchangegroup(repo, 'test:bundle2', outgoing, None)
117 > bundler.newpart('changegroup', data=cg.getchunks(),
117 > bundler.newpart('changegroup', data=cg.getchunks(),
118 > mandatory=False)
118 > mandatory=False)
119 >
119 >
120 > if opts['parts']:
120 > if opts['parts']:
121 > bundler.newpart('test:empty', mandatory=False)
121 > bundler.newpart('test:empty', mandatory=False)
122 > # add a second one to make sure we handle multiple parts
122 > # add a second one to make sure we handle multiple parts
123 > bundler.newpart('test:empty', mandatory=False)
123 > bundler.newpart('test:empty', mandatory=False)
124 > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
124 > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
125 > bundler.newpart('test:debugreply', mandatory=False)
125 > bundler.newpart('test:debugreply', mandatory=False)
126 > mathpart = bundler.newpart('test:math')
126 > mathpart = bundler.newpart('test:math')
127 > mathpart.addparam('pi', '3.14')
127 > mathpart.addparam('pi', '3.14')
128 > mathpart.addparam('e', '2.72')
128 > mathpart.addparam('e', '2.72')
129 > mathpart.addparam('cooking', 'raw', mandatory=False)
129 > mathpart.addparam('cooking', 'raw', mandatory=False)
130 > mathpart.data = '42'
130 > mathpart.data = '42'
131 > mathpart.mandatory = False
131 > mathpart.mandatory = False
132 > # advisory known part with unknown mandatory param
132 > # advisory known part with unknown mandatory param
133 > bundler.newpart('test:song', [('randomparam','')], mandatory=False)
133 > bundler.newpart('test:song', [('randomparam','')], mandatory=False)
134 > if opts['unknown']:
134 > if opts['unknown']:
135 > bundler.newpart('test:unknown', data='some random content')
135 > bundler.newpart('test:unknown', data='some random content')
136 > if opts['unknownparams']:
136 > if opts['unknownparams']:
137 > bundler.newpart('test:song', [('randomparams', '')])
137 > bundler.newpart('test:song', [('randomparams', '')])
138 > if opts['parts']:
138 > if opts['parts']:
139 > bundler.newpart('test:ping', mandatory=False)
139 > bundler.newpart('test:ping', mandatory=False)
140 > if opts['genraise']:
140 > if opts['genraise']:
141 > def genraise():
141 > def genraise():
142 > yield 'first line\n'
142 > yield 'first line\n'
143 > raise RuntimeError('Someone set up us the bomb!')
143 > raise RuntimeError('Someone set up us the bomb!')
144 > bundler.newpart('output', data=genraise(), mandatory=False)
144 > bundler.newpart('output', data=genraise(), mandatory=False)
145 >
145 >
146 > if path is None:
146 > if path is None:
147 > file = sys.stdout
147 > file = sys.stdout
148 > else:
148 > else:
149 > file = open(path, 'wb')
149 > file = open(path, 'wb')
150 >
150 >
151 > if opts['timeout']:
151 > if opts['timeout']:
152 > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
152 > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False)
153 > for idx, junk in enumerate(bundler.getchunks()):
153 > for idx, junk in enumerate(bundler.getchunks()):
154 > ui.write('%d chunk\n' % idx)
154 > ui.write('%d chunk\n' % idx)
155 > if idx > 4:
155 > if idx > 4:
156 > # This throws a GeneratorExit inside the generator, which
156 > # This throws a GeneratorExit inside the generator, which
157 > # can cause problems if the exception-recovery code is
157 > # can cause problems if the exception-recovery code is
158 > # too zealous. It's important for this test that the break
158 > # too zealous. It's important for this test that the break
159 > # occur while we're in the middle of a part.
159 > # occur while we're in the middle of a part.
160 > break
160 > break
161 > gc.collect()
161 > gc.collect()
162 > ui.write('fake timeout complete.\n')
162 > ui.write('fake timeout complete.\n')
163 > return
163 > return
164 > try:
164 > try:
165 > for chunk in bundler.getchunks():
165 > for chunk in bundler.getchunks():
166 > file.write(chunk)
166 > file.write(chunk)
167 > except RuntimeError, exc:
167 > except RuntimeError, exc:
168 > raise error.Abort(exc)
168 > raise error.Abort(exc)
169 > finally:
169 > finally:
170 > file.flush()
170 > file.flush()
171 >
171 >
172 > @command('unbundle2', [], '')
172 > @command('unbundle2', [], '')
173 > def cmdunbundle2(ui, repo, replypath=None):
173 > def cmdunbundle2(ui, repo, replypath=None):
174 > """process a bundle2 stream from stdin on the current repo"""
174 > """process a bundle2 stream from stdin on the current repo"""
175 > try:
175 > try:
176 > tr = None
176 > tr = None
177 > lock = repo.lock()
177 > lock = repo.lock()
178 > tr = repo.transaction('processbundle')
178 > tr = repo.transaction('processbundle')
179 > try:
179 > try:
180 > unbundler = bundle2.getunbundler(ui, sys.stdin)
180 > unbundler = bundle2.getunbundler(ui, sys.stdin)
181 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
181 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
182 > tr.close()
182 > tr.close()
183 > except error.BundleValueError, exc:
183 > except error.BundleValueError, exc:
184 > raise error.Abort('missing support for %s' % exc)
184 > raise error.Abort('missing support for %s' % exc)
185 > except error.PushRaced, exc:
185 > except error.PushRaced, exc:
186 > raise error.Abort('push race: %s' % exc)
186 > raise error.Abort('push race: %s' % exc)
187 > finally:
187 > finally:
188 > if tr is not None:
188 > if tr is not None:
189 > tr.release()
189 > tr.release()
190 > lock.release()
190 > lock.release()
191 > remains = sys.stdin.read()
191 > remains = sys.stdin.read()
192 > ui.write('%i unread bytes\n' % len(remains))
192 > ui.write('%i unread bytes\n' % len(remains))
193 > if op.records['song']:
193 > if op.records['song']:
194 > totalverses = sum(r['verses'] for r in op.records['song'])
194 > totalverses = sum(r['verses'] for r in op.records['song'])
195 > ui.write('%i total verses sung\n' % totalverses)
195 > ui.write('%i total verses sung\n' % totalverses)
196 > for rec in op.records['changegroup']:
196 > for rec in op.records['changegroup']:
197 > ui.write('addchangegroup return: %i\n' % rec['return'])
197 > ui.write('addchangegroup return: %i\n' % rec['return'])
198 > if op.reply is not None and replypath is not None:
198 > if op.reply is not None and replypath is not None:
199 > with open(replypath, 'wb') as file:
199 > with open(replypath, 'wb') as file:
200 > for chunk in op.reply.getchunks():
200 > for chunk in op.reply.getchunks():
201 > file.write(chunk)
201 > file.write(chunk)
202 >
202 >
203 > @command('statbundle2', [], '')
203 > @command('statbundle2', [], '')
204 > def cmdstatbundle2(ui, repo):
204 > def cmdstatbundle2(ui, repo):
205 > """print statistic on the bundle2 container read from stdin"""
205 > """print statistic on the bundle2 container read from stdin"""
206 > unbundler = bundle2.getunbundler(ui, sys.stdin)
206 > unbundler = bundle2.getunbundler(ui, sys.stdin)
207 > try:
207 > try:
208 > params = unbundler.params
208 > params = unbundler.params
209 > except error.BundleValueError, exc:
209 > except error.BundleValueError, exc:
210 > raise error.Abort('unknown parameters: %s' % exc)
210 > raise error.Abort('unknown parameters: %s' % exc)
211 > ui.write('options count: %i\n' % len(params))
211 > ui.write('options count: %i\n' % len(params))
212 > for key in sorted(params):
212 > for key in sorted(params):
213 > ui.write('- %s\n' % key)
213 > ui.write('- %s\n' % key)
214 > value = params[key]
214 > value = params[key]
215 > if value is not None:
215 > if value is not None:
216 > ui.write(' %s\n' % value)
216 > ui.write(' %s\n' % value)
217 > count = 0
217 > count = 0
218 > for p in unbundler.iterparts():
218 > for p in unbundler.iterparts():
219 > count += 1
219 > count += 1
220 > ui.write(' :%s:\n' % p.type)
220 > ui.write(' :%s:\n' % p.type)
221 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
221 > ui.write(' mandatory: %i\n' % len(p.mandatoryparams))
222 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
222 > ui.write(' advisory: %i\n' % len(p.advisoryparams))
223 > ui.write(' payload: %i bytes\n' % len(p.read()))
223 > ui.write(' payload: %i bytes\n' % len(p.read()))
224 > ui.write('parts count: %i\n' % count)
224 > ui.write('parts count: %i\n' % count)
225 > EOF
225 > EOF
226 $ cat >> $HGRCPATH << EOF
226 $ cat >> $HGRCPATH << EOF
227 > [extensions]
227 > [extensions]
228 > bundle2=$TESTTMP/bundle2.py
228 > bundle2=$TESTTMP/bundle2.py
229 > [experimental]
229 > [experimental]
230 > evolution=createmarkers
230 > evolution=createmarkers
231 > [ui]
231 > [ui]
232 > ssh=python "$TESTDIR/dummyssh"
232 > ssh=python "$TESTDIR/dummyssh"
233 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
233 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
234 > [web]
234 > [web]
235 > push_ssl = false
235 > push_ssl = false
236 > allow_push = *
236 > allow_push = *
237 > [phases]
237 > [phases]
238 > publish=False
238 > publish=False
239 > EOF
239 > EOF
240
240
241 The extension requires a repo (currently unused)
241 The extension requires a repo (currently unused)
242
242
243 $ hg init main
243 $ hg init main
244 $ cd main
244 $ cd main
245 $ touch a
245 $ touch a
246 $ hg add a
246 $ hg add a
247 $ hg commit -m 'a'
247 $ hg commit -m 'a'
248
248
249
249
250 Empty bundle
250 Empty bundle
251 =================
251 =================
252
252
253 - no option
253 - no option
254 - no parts
254 - no parts
255
255
256 Test bundling
256 Test bundling
257
257
258 $ hg bundle2 | f --hexdump
258 $ hg bundle2 | f --hexdump
259
259
260 0000: 48 47 32 30 00 00 00 00 00 00 00 00 |HG20........|
260 0000: 48 47 32 30 00 00 00 00 00 00 00 00 |HG20........|
261
261
262 Test timeouts during bundling
262 Test timeouts during bundling
263 $ hg bundle2 --timeout --debug --config devel.bundle2.debug=yes
263 $ hg bundle2 --timeout --debug --config devel.bundle2.debug=yes
264 bundle2-output-bundle: "HG20", 1 parts total
264 bundle2-output-bundle: "HG20", 1 parts total
265 bundle2-output: start emission of HG20 stream
265 bundle2-output: start emission of HG20 stream
266 0 chunk
266 0 chunk
267 bundle2-output: bundle parameter:
267 bundle2-output: bundle parameter:
268 1 chunk
268 1 chunk
269 bundle2-output: start of parts
269 bundle2-output: start of parts
270 bundle2-output: bundle part: "test:song"
270 bundle2-output: bundle part: "test:song"
271 bundle2-output-part: "test:song" (advisory) 178 bytes payload
271 bundle2-output-part: "test:song" (advisory) 178 bytes payload
272 bundle2-output: part 0: "test:song"
272 bundle2-output: part 0: "test:song"
273 bundle2-output: header chunk size: 16
273 bundle2-output: header chunk size: 16
274 2 chunk
274 2 chunk
275 3 chunk
275 3 chunk
276 bundle2-output: payload chunk size: 178
276 bundle2-output: payload chunk size: 178
277 4 chunk
277 4 chunk
278 5 chunk
278 5 chunk
279 bundle2-generatorexit
279 bundle2-generatorexit
280 fake timeout complete.
280 fake timeout complete.
281
281
282 Test unbundling
282 Test unbundling
283
283
284 $ hg bundle2 | hg statbundle2
284 $ hg bundle2 | hg statbundle2
285 options count: 0
285 options count: 0
286 parts count: 0
286 parts count: 0
287
287
288 Test old style bundle are detected and refused
288 Test old style bundle are detected and refused
289
289
290 $ hg bundle --all --type v1 ../bundle.hg
290 $ hg bundle --all --type v1 ../bundle.hg
291 1 changesets found
291 1 changesets found
292 $ hg statbundle2 < ../bundle.hg
292 $ hg statbundle2 < ../bundle.hg
293 abort: unknown bundle version 10
293 abort: unknown bundle version 10
294 [255]
294 [255]
295
295
296 Test parameters
296 Test parameters
297 =================
297 =================
298
298
299 - some options
299 - some options
300 - no parts
300 - no parts
301
301
302 advisory parameters, no value
302 advisory parameters, no value
303 -------------------------------
303 -------------------------------
304
304
305 Simplest possible parameters form
305 Simplest possible parameters form
306
306
307 Test generation simple option
307 Test generation simple option
308
308
309 $ hg bundle2 --param 'caution' | f --hexdump
309 $ hg bundle2 --param 'caution' | f --hexdump
310
310
311 0000: 48 47 32 30 00 00 00 07 63 61 75 74 69 6f 6e 00 |HG20....caution.|
311 0000: 48 47 32 30 00 00 00 07 63 61 75 74 69 6f 6e 00 |HG20....caution.|
312 0010: 00 00 00 |...|
312 0010: 00 00 00 |...|
313
313
314 Test unbundling
314 Test unbundling
315
315
316 $ hg bundle2 --param 'caution' | hg statbundle2
316 $ hg bundle2 --param 'caution' | hg statbundle2
317 options count: 1
317 options count: 1
318 - caution
318 - caution
319 parts count: 0
319 parts count: 0
320
320
321 Test generation multiple option
321 Test generation multiple option
322
322
323 $ hg bundle2 --param 'caution' --param 'meal' | f --hexdump
323 $ hg bundle2 --param 'caution' --param 'meal' | f --hexdump
324
324
325 0000: 48 47 32 30 00 00 00 0c 63 61 75 74 69 6f 6e 20 |HG20....caution |
325 0000: 48 47 32 30 00 00 00 0c 63 61 75 74 69 6f 6e 20 |HG20....caution |
326 0010: 6d 65 61 6c 00 00 00 00 |meal....|
326 0010: 6d 65 61 6c 00 00 00 00 |meal....|
327
327
328 Test unbundling
328 Test unbundling
329
329
330 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
330 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
331 options count: 2
331 options count: 2
332 - caution
332 - caution
333 - meal
333 - meal
334 parts count: 0
334 parts count: 0
335
335
336 advisory parameters, with value
336 advisory parameters, with value
337 -------------------------------
337 -------------------------------
338
338
339 Test generation
339 Test generation
340
340
341 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | f --hexdump
341 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | f --hexdump
342
342
343 0000: 48 47 32 30 00 00 00 1c 63 61 75 74 69 6f 6e 20 |HG20....caution |
343 0000: 48 47 32 30 00 00 00 1c 63 61 75 74 69 6f 6e 20 |HG20....caution |
344 0010: 6d 65 61 6c 3d 76 65 67 61 6e 20 65 6c 65 70 68 |meal=vegan eleph|
344 0010: 6d 65 61 6c 3d 76 65 67 61 6e 20 65 6c 65 70 68 |meal=vegan eleph|
345 0020: 61 6e 74 73 00 00 00 00 |ants....|
345 0020: 61 6e 74 73 00 00 00 00 |ants....|
346
346
347 Test unbundling
347 Test unbundling
348
348
349 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
349 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
350 options count: 3
350 options count: 3
351 - caution
351 - caution
352 - elephants
352 - elephants
353 - meal
353 - meal
354 vegan
354 vegan
355 parts count: 0
355 parts count: 0
356
356
357 parameter with special char in value
357 parameter with special char in value
358 ---------------------------------------------------
358 ---------------------------------------------------
359
359
360 Test generation
360 Test generation
361
361
362 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | f --hexdump
362 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | f --hexdump
363
363
364 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
364 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
365 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
365 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
366 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
366 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
367 0030: 65 00 00 00 00 |e....|
367 0030: 65 00 00 00 00 |e....|
368
368
369 Test unbundling
369 Test unbundling
370
370
371 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
371 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
372 options count: 2
372 options count: 2
373 - e|! 7/
373 - e|! 7/
374 babar%#==tutu
374 babar%#==tutu
375 - simple
375 - simple
376 parts count: 0
376 parts count: 0
377
377
378 Test unknown mandatory option
378 Test unknown mandatory option
379 ---------------------------------------------------
379 ---------------------------------------------------
380
380
381 $ hg bundle2 --param 'Gravity' | hg statbundle2
381 $ hg bundle2 --param 'Gravity' | hg statbundle2
382 abort: unknown parameters: Stream Parameter - Gravity
382 abort: unknown parameters: Stream Parameter - Gravity
383 [255]
383 [255]
384
384
385 Test debug output
385 Test debug output
386 ---------------------------------------------------
386 ---------------------------------------------------
387
387
388 bundling debug
388 bundling debug
389
389
390 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2 --config progress.debug=true --config devel.bundle2.debug=true
390 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2 --config progress.debug=true --config devel.bundle2.debug=true
391 bundle2-output-bundle: "HG20", (2 params) 0 parts total
391 bundle2-output-bundle: "HG20", (2 params) 0 parts total
392 bundle2-output: start emission of HG20 stream
392 bundle2-output: start emission of HG20 stream
393 bundle2-output: bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
393 bundle2-output: bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
394 bundle2-output: start of parts
394 bundle2-output: start of parts
395 bundle2-output: end of bundle
395 bundle2-output: end of bundle
396
396
397 file content is ok
397 file content is ok
398
398
399 $ f --hexdump ../out.hg2
399 $ f --hexdump ../out.hg2
400 ../out.hg2:
400 ../out.hg2:
401 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
401 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
402 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
402 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
403 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
403 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
404 0030: 65 00 00 00 00 |e....|
404 0030: 65 00 00 00 00 |e....|
405
405
406 unbundling debug
406 unbundling debug
407
407
408 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../out.hg2
408 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../out.hg2
409 bundle2-input: start processing of HG20 stream
409 bundle2-input: start processing of HG20 stream
410 bundle2-input: reading bundle2 stream parameters
410 bundle2-input: reading bundle2 stream parameters
411 bundle2-input: ignoring unknown parameter 'e|! 7/'
411 bundle2-input: ignoring unknown parameter 'e|! 7/'
412 bundle2-input: ignoring unknown parameter 'simple'
412 bundle2-input: ignoring unknown parameter 'simple'
413 options count: 2
413 options count: 2
414 - e|! 7/
414 - e|! 7/
415 babar%#==tutu
415 babar%#==tutu
416 - simple
416 - simple
417 bundle2-input: start extraction of bundle2 parts
417 bundle2-input: start extraction of bundle2 parts
418 bundle2-input: part header size: 0
418 bundle2-input: part header size: 0
419 bundle2-input: end of bundle2 stream
419 bundle2-input: end of bundle2 stream
420 parts count: 0
420 parts count: 0
421
421
422
422
423 Test buggy input
423 Test buggy input
424 ---------------------------------------------------
424 ---------------------------------------------------
425
425
426 empty parameter name
426 empty parameter name
427
427
428 $ hg bundle2 --param '' --quiet
428 $ hg bundle2 --param '' --quiet
429 abort: empty parameter name
429 abort: empty parameter name
430 [255]
430 [255]
431
431
432 bad parameter name
432 bad parameter name
433
433
434 $ hg bundle2 --param 42babar
434 $ hg bundle2 --param 42babar
435 abort: non letter first character: '42babar'
435 abort: non letter first character: '42babar'
436 [255]
436 [255]
437
437
438
438
439 Test part
439 Test part
440 =================
440 =================
441
441
442 $ hg bundle2 --parts ../parts.hg2 --debug --config progress.debug=true --config devel.bundle2.debug=true
442 $ hg bundle2 --parts ../parts.hg2 --debug --config progress.debug=true --config devel.bundle2.debug=true
443 bundle2-output-bundle: "HG20", 7 parts total
443 bundle2-output-bundle: "HG20", 7 parts total
444 bundle2-output: start emission of HG20 stream
444 bundle2-output: start emission of HG20 stream
445 bundle2-output: bundle parameter:
445 bundle2-output: bundle parameter:
446 bundle2-output: start of parts
446 bundle2-output: start of parts
447 bundle2-output: bundle part: "test:empty"
447 bundle2-output: bundle part: "test:empty"
448 bundle2-output-part: "test:empty" (advisory) empty payload
448 bundle2-output-part: "test:empty" (advisory) empty payload
449 bundle2-output: part 0: "test:empty"
449 bundle2-output: part 0: "test:empty"
450 bundle2-output: header chunk size: 17
450 bundle2-output: header chunk size: 17
451 bundle2-output: closing payload chunk
451 bundle2-output: closing payload chunk
452 bundle2-output: bundle part: "test:empty"
452 bundle2-output: bundle part: "test:empty"
453 bundle2-output-part: "test:empty" (advisory) empty payload
453 bundle2-output-part: "test:empty" (advisory) empty payload
454 bundle2-output: part 1: "test:empty"
454 bundle2-output: part 1: "test:empty"
455 bundle2-output: header chunk size: 17
455 bundle2-output: header chunk size: 17
456 bundle2-output: closing payload chunk
456 bundle2-output: closing payload chunk
457 bundle2-output: bundle part: "test:song"
457 bundle2-output: bundle part: "test:song"
458 bundle2-output-part: "test:song" (advisory) 178 bytes payload
458 bundle2-output-part: "test:song" (advisory) 178 bytes payload
459 bundle2-output: part 2: "test:song"
459 bundle2-output: part 2: "test:song"
460 bundle2-output: header chunk size: 16
460 bundle2-output: header chunk size: 16
461 bundle2-output: payload chunk size: 178
461 bundle2-output: payload chunk size: 178
462 bundle2-output: closing payload chunk
462 bundle2-output: closing payload chunk
463 bundle2-output: bundle part: "test:debugreply"
463 bundle2-output: bundle part: "test:debugreply"
464 bundle2-output-part: "test:debugreply" (advisory) empty payload
464 bundle2-output-part: "test:debugreply" (advisory) empty payload
465 bundle2-output: part 3: "test:debugreply"
465 bundle2-output: part 3: "test:debugreply"
466 bundle2-output: header chunk size: 22
466 bundle2-output: header chunk size: 22
467 bundle2-output: closing payload chunk
467 bundle2-output: closing payload chunk
468 bundle2-output: bundle part: "test:math"
468 bundle2-output: bundle part: "test:math"
469 bundle2-output-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) 2 bytes payload
469 bundle2-output-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) 2 bytes payload
470 bundle2-output: part 4: "test:math"
470 bundle2-output: part 4: "test:math"
471 bundle2-output: header chunk size: 43
471 bundle2-output: header chunk size: 43
472 bundle2-output: payload chunk size: 2
472 bundle2-output: payload chunk size: 2
473 bundle2-output: closing payload chunk
473 bundle2-output: closing payload chunk
474 bundle2-output: bundle part: "test:song"
474 bundle2-output: bundle part: "test:song"
475 bundle2-output-part: "test:song" (advisory) (params: 1 mandatory) empty payload
475 bundle2-output-part: "test:song" (advisory) (params: 1 mandatory) empty payload
476 bundle2-output: part 5: "test:song"
476 bundle2-output: part 5: "test:song"
477 bundle2-output: header chunk size: 29
477 bundle2-output: header chunk size: 29
478 bundle2-output: closing payload chunk
478 bundle2-output: closing payload chunk
479 bundle2-output: bundle part: "test:ping"
479 bundle2-output: bundle part: "test:ping"
480 bundle2-output-part: "test:ping" (advisory) empty payload
480 bundle2-output-part: "test:ping" (advisory) empty payload
481 bundle2-output: part 6: "test:ping"
481 bundle2-output: part 6: "test:ping"
482 bundle2-output: header chunk size: 16
482 bundle2-output: header chunk size: 16
483 bundle2-output: closing payload chunk
483 bundle2-output: closing payload chunk
484 bundle2-output: end of bundle
484 bundle2-output: end of bundle
485
485
486 $ f --hexdump ../parts.hg2
486 $ f --hexdump ../parts.hg2
487 ../parts.hg2:
487 ../parts.hg2:
488 0000: 48 47 32 30 00 00 00 00 00 00 00 11 0a 74 65 73 |HG20.........tes|
488 0000: 48 47 32 30 00 00 00 00 00 00 00 11 0a 74 65 73 |HG20.........tes|
489 0010: 74 3a 65 6d 70 74 79 00 00 00 00 00 00 00 00 00 |t:empty.........|
489 0010: 74 3a 65 6d 70 74 79 00 00 00 00 00 00 00 00 00 |t:empty.........|
490 0020: 00 00 00 00 11 0a 74 65 73 74 3a 65 6d 70 74 79 |......test:empty|
490 0020: 00 00 00 00 11 0a 74 65 73 74 3a 65 6d 70 74 79 |......test:empty|
491 0030: 00 00 00 01 00 00 00 00 00 00 00 00 00 10 09 74 |...............t|
491 0030: 00 00 00 01 00 00 00 00 00 00 00 00 00 10 09 74 |...............t|
492 0040: 65 73 74 3a 73 6f 6e 67 00 00 00 02 00 00 00 00 |est:song........|
492 0040: 65 73 74 3a 73 6f 6e 67 00 00 00 02 00 00 00 00 |est:song........|
493 0050: 00 b2 50 61 74 61 6c 69 20 44 69 72 61 70 61 74 |..Patali Dirapat|
493 0050: 00 b2 50 61 74 61 6c 69 20 44 69 72 61 70 61 74 |..Patali Dirapat|
494 0060: 61 2c 20 43 72 6f 6d 64 61 20 43 72 6f 6d 64 61 |a, Cromda Cromda|
494 0060: 61 2c 20 43 72 6f 6d 64 61 20 43 72 6f 6d 64 61 |a, Cromda Cromda|
495 0070: 20 52 69 70 61 6c 6f 2c 20 50 61 74 61 20 50 61 | Ripalo, Pata Pa|
495 0070: 20 52 69 70 61 6c 6f 2c 20 50 61 74 61 20 50 61 | Ripalo, Pata Pa|
496 0080: 74 61 2c 20 4b 6f 20 4b 6f 20 4b 6f 0a 42 6f 6b |ta, Ko Ko Ko.Bok|
496 0080: 74 61 2c 20 4b 6f 20 4b 6f 20 4b 6f 0a 42 6f 6b |ta, Ko Ko Ko.Bok|
497 0090: 6f 72 6f 20 44 69 70 6f 75 6c 69 74 6f 2c 20 52 |oro Dipoulito, R|
497 0090: 6f 72 6f 20 44 69 70 6f 75 6c 69 74 6f 2c 20 52 |oro Dipoulito, R|
498 00a0: 6f 6e 64 69 20 52 6f 6e 64 69 20 50 65 70 69 6e |ondi Rondi Pepin|
498 00a0: 6f 6e 64 69 20 52 6f 6e 64 69 20 50 65 70 69 6e |ondi Rondi Pepin|
499 00b0: 6f 2c 20 50 61 74 61 20 50 61 74 61 2c 20 4b 6f |o, Pata Pata, Ko|
499 00b0: 6f 2c 20 50 61 74 61 20 50 61 74 61 2c 20 4b 6f |o, Pata Pata, Ko|
500 00c0: 20 4b 6f 20 4b 6f 0a 45 6d 61 6e 61 20 4b 61 72 | Ko Ko.Emana Kar|
500 00c0: 20 4b 6f 20 4b 6f 0a 45 6d 61 6e 61 20 4b 61 72 | Ko Ko.Emana Kar|
501 00d0: 61 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c |assoli, Loucra L|
501 00d0: 61 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c |assoli, Loucra L|
502 00e0: 6f 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 |oucra Ponponto, |
502 00e0: 6f 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 |oucra Ponponto, |
503 00f0: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
503 00f0: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
504 0100: 20 4b 6f 2e 00 00 00 00 00 00 00 16 0f 74 65 73 | Ko..........tes|
504 0100: 20 4b 6f 2e 00 00 00 00 00 00 00 16 0f 74 65 73 | Ko..........tes|
505 0110: 74 3a 64 65 62 75 67 72 65 70 6c 79 00 00 00 03 |t:debugreply....|
505 0110: 74 3a 64 65 62 75 67 72 65 70 6c 79 00 00 00 03 |t:debugreply....|
506 0120: 00 00 00 00 00 00 00 00 00 2b 09 74 65 73 74 3a |.........+.test:|
506 0120: 00 00 00 00 00 00 00 00 00 2b 09 74 65 73 74 3a |.........+.test:|
507 0130: 6d 61 74 68 00 00 00 04 02 01 02 04 01 04 07 03 |math............|
507 0130: 6d 61 74 68 00 00 00 04 02 01 02 04 01 04 07 03 |math............|
508 0140: 70 69 33 2e 31 34 65 32 2e 37 32 63 6f 6f 6b 69 |pi3.14e2.72cooki|
508 0140: 70 69 33 2e 31 34 65 32 2e 37 32 63 6f 6f 6b 69 |pi3.14e2.72cooki|
509 0150: 6e 67 72 61 77 00 00 00 02 34 32 00 00 00 00 00 |ngraw....42.....|
509 0150: 6e 67 72 61 77 00 00 00 02 34 32 00 00 00 00 00 |ngraw....42.....|
510 0160: 00 00 1d 09 74 65 73 74 3a 73 6f 6e 67 00 00 00 |....test:song...|
510 0160: 00 00 1d 09 74 65 73 74 3a 73 6f 6e 67 00 00 00 |....test:song...|
511 0170: 05 01 00 0b 00 72 61 6e 64 6f 6d 70 61 72 61 6d |.....randomparam|
511 0170: 05 01 00 0b 00 72 61 6e 64 6f 6d 70 61 72 61 6d |.....randomparam|
512 0180: 00 00 00 00 00 00 00 10 09 74 65 73 74 3a 70 69 |.........test:pi|
512 0180: 00 00 00 00 00 00 00 10 09 74 65 73 74 3a 70 69 |.........test:pi|
513 0190: 6e 67 00 00 00 06 00 00 00 00 00 00 00 00 00 00 |ng..............|
513 0190: 6e 67 00 00 00 06 00 00 00 00 00 00 00 00 00 00 |ng..............|
514
514
515
515
516 $ hg statbundle2 < ../parts.hg2
516 $ hg statbundle2 < ../parts.hg2
517 options count: 0
517 options count: 0
518 :test:empty:
518 :test:empty:
519 mandatory: 0
519 mandatory: 0
520 advisory: 0
520 advisory: 0
521 payload: 0 bytes
521 payload: 0 bytes
522 :test:empty:
522 :test:empty:
523 mandatory: 0
523 mandatory: 0
524 advisory: 0
524 advisory: 0
525 payload: 0 bytes
525 payload: 0 bytes
526 :test:song:
526 :test:song:
527 mandatory: 0
527 mandatory: 0
528 advisory: 0
528 advisory: 0
529 payload: 178 bytes
529 payload: 178 bytes
530 :test:debugreply:
530 :test:debugreply:
531 mandatory: 0
531 mandatory: 0
532 advisory: 0
532 advisory: 0
533 payload: 0 bytes
533 payload: 0 bytes
534 :test:math:
534 :test:math:
535 mandatory: 2
535 mandatory: 2
536 advisory: 1
536 advisory: 1
537 payload: 2 bytes
537 payload: 2 bytes
538 :test:song:
538 :test:song:
539 mandatory: 1
539 mandatory: 1
540 advisory: 0
540 advisory: 0
541 payload: 0 bytes
541 payload: 0 bytes
542 :test:ping:
542 :test:ping:
543 mandatory: 0
543 mandatory: 0
544 advisory: 0
544 advisory: 0
545 payload: 0 bytes
545 payload: 0 bytes
546 parts count: 7
546 parts count: 7
547
547
548 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
548 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
549 bundle2-input: start processing of HG20 stream
549 bundle2-input: start processing of HG20 stream
550 bundle2-input: reading bundle2 stream parameters
550 bundle2-input: reading bundle2 stream parameters
551 options count: 0
551 options count: 0
552 bundle2-input: start extraction of bundle2 parts
552 bundle2-input: start extraction of bundle2 parts
553 bundle2-input: part header size: 17
553 bundle2-input: part header size: 17
554 bundle2-input: part type: "test:empty"
554 bundle2-input: part type: "test:empty"
555 bundle2-input: part id: "0"
555 bundle2-input: part id: "0"
556 bundle2-input: part parameters: 0
556 bundle2-input: part parameters: 0
557 :test:empty:
557 :test:empty:
558 mandatory: 0
558 mandatory: 0
559 advisory: 0
559 advisory: 0
560 bundle2-input: payload chunk size: 0
560 bundle2-input: payload chunk size: 0
561 payload: 0 bytes
561 payload: 0 bytes
562 bundle2-input: part header size: 17
562 bundle2-input: part header size: 17
563 bundle2-input: part type: "test:empty"
563 bundle2-input: part type: "test:empty"
564 bundle2-input: part id: "1"
564 bundle2-input: part id: "1"
565 bundle2-input: part parameters: 0
565 bundle2-input: part parameters: 0
566 :test:empty:
566 :test:empty:
567 mandatory: 0
567 mandatory: 0
568 advisory: 0
568 advisory: 0
569 bundle2-input: payload chunk size: 0
569 bundle2-input: payload chunk size: 0
570 payload: 0 bytes
570 payload: 0 bytes
571 bundle2-input: part header size: 16
571 bundle2-input: part header size: 16
572 bundle2-input: part type: "test:song"
572 bundle2-input: part type: "test:song"
573 bundle2-input: part id: "2"
573 bundle2-input: part id: "2"
574 bundle2-input: part parameters: 0
574 bundle2-input: part parameters: 0
575 :test:song:
575 :test:song:
576 mandatory: 0
576 mandatory: 0
577 advisory: 0
577 advisory: 0
578 bundle2-input: payload chunk size: 178
578 bundle2-input: payload chunk size: 178
579 bundle2-input: payload chunk size: 0
579 bundle2-input: payload chunk size: 0
580 bundle2-input-part: total payload size 178
580 bundle2-input-part: total payload size 178
581 payload: 178 bytes
581 payload: 178 bytes
582 bundle2-input: part header size: 22
582 bundle2-input: part header size: 22
583 bundle2-input: part type: "test:debugreply"
583 bundle2-input: part type: "test:debugreply"
584 bundle2-input: part id: "3"
584 bundle2-input: part id: "3"
585 bundle2-input: part parameters: 0
585 bundle2-input: part parameters: 0
586 :test:debugreply:
586 :test:debugreply:
587 mandatory: 0
587 mandatory: 0
588 advisory: 0
588 advisory: 0
589 bundle2-input: payload chunk size: 0
589 bundle2-input: payload chunk size: 0
590 payload: 0 bytes
590 payload: 0 bytes
591 bundle2-input: part header size: 43
591 bundle2-input: part header size: 43
592 bundle2-input: part type: "test:math"
592 bundle2-input: part type: "test:math"
593 bundle2-input: part id: "4"
593 bundle2-input: part id: "4"
594 bundle2-input: part parameters: 3
594 bundle2-input: part parameters: 3
595 :test:math:
595 :test:math:
596 mandatory: 2
596 mandatory: 2
597 advisory: 1
597 advisory: 1
598 bundle2-input: payload chunk size: 2
598 bundle2-input: payload chunk size: 2
599 bundle2-input: payload chunk size: 0
599 bundle2-input: payload chunk size: 0
600 bundle2-input-part: total payload size 2
600 bundle2-input-part: total payload size 2
601 payload: 2 bytes
601 payload: 2 bytes
602 bundle2-input: part header size: 29
602 bundle2-input: part header size: 29
603 bundle2-input: part type: "test:song"
603 bundle2-input: part type: "test:song"
604 bundle2-input: part id: "5"
604 bundle2-input: part id: "5"
605 bundle2-input: part parameters: 1
605 bundle2-input: part parameters: 1
606 :test:song:
606 :test:song:
607 mandatory: 1
607 mandatory: 1
608 advisory: 0
608 advisory: 0
609 bundle2-input: payload chunk size: 0
609 bundle2-input: payload chunk size: 0
610 payload: 0 bytes
610 payload: 0 bytes
611 bundle2-input: part header size: 16
611 bundle2-input: part header size: 16
612 bundle2-input: part type: "test:ping"
612 bundle2-input: part type: "test:ping"
613 bundle2-input: part id: "6"
613 bundle2-input: part id: "6"
614 bundle2-input: part parameters: 0
614 bundle2-input: part parameters: 0
615 :test:ping:
615 :test:ping:
616 mandatory: 0
616 mandatory: 0
617 advisory: 0
617 advisory: 0
618 bundle2-input: payload chunk size: 0
618 bundle2-input: payload chunk size: 0
619 payload: 0 bytes
619 payload: 0 bytes
620 bundle2-input: part header size: 0
620 bundle2-input: part header size: 0
621 bundle2-input: end of bundle2 stream
621 bundle2-input: end of bundle2 stream
622 parts count: 7
622 parts count: 7
623
623
624 Test actual unbundling of test part
624 Test actual unbundling of test part
625 =======================================
625 =======================================
626
626
627 Process the bundle
627 Process the bundle
628
628
629 $ hg unbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
629 $ hg unbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
630 bundle2-input: start processing of HG20 stream
630 bundle2-input: start processing of HG20 stream
631 bundle2-input: reading bundle2 stream parameters
631 bundle2-input: reading bundle2 stream parameters
632 bundle2-input-bundle: with-transaction
632 bundle2-input-bundle: with-transaction
633 bundle2-input: start extraction of bundle2 parts
633 bundle2-input: start extraction of bundle2 parts
634 bundle2-input: part header size: 17
634 bundle2-input: part header size: 17
635 bundle2-input: part type: "test:empty"
635 bundle2-input: part type: "test:empty"
636 bundle2-input: part id: "0"
636 bundle2-input: part id: "0"
637 bundle2-input: part parameters: 0
637 bundle2-input: part parameters: 0
638 bundle2-input: ignoring unsupported advisory part test:empty
638 bundle2-input: ignoring unsupported advisory part test:empty
639 bundle2-input-part: "test:empty" (advisory) unsupported-type
639 bundle2-input-part: "test:empty" (advisory) unsupported-type
640 bundle2-input: payload chunk size: 0
640 bundle2-input: payload chunk size: 0
641 bundle2-input: part header size: 17
641 bundle2-input: part header size: 17
642 bundle2-input: part type: "test:empty"
642 bundle2-input: part type: "test:empty"
643 bundle2-input: part id: "1"
643 bundle2-input: part id: "1"
644 bundle2-input: part parameters: 0
644 bundle2-input: part parameters: 0
645 bundle2-input: ignoring unsupported advisory part test:empty
645 bundle2-input: ignoring unsupported advisory part test:empty
646 bundle2-input-part: "test:empty" (advisory) unsupported-type
646 bundle2-input-part: "test:empty" (advisory) unsupported-type
647 bundle2-input: payload chunk size: 0
647 bundle2-input: payload chunk size: 0
648 bundle2-input: part header size: 16
648 bundle2-input: part header size: 16
649 bundle2-input: part type: "test:song"
649 bundle2-input: part type: "test:song"
650 bundle2-input: part id: "2"
650 bundle2-input: part id: "2"
651 bundle2-input: part parameters: 0
651 bundle2-input: part parameters: 0
652 bundle2-input: found a handler for part 'test:song'
652 bundle2-input: found a handler for part 'test:song'
653 bundle2-input-part: "test:song" (advisory) supported
653 bundle2-input-part: "test:song" (advisory) supported
654 The choir starts singing:
654 The choir starts singing:
655 bundle2-input: payload chunk size: 178
655 bundle2-input: payload chunk size: 178
656 bundle2-input: payload chunk size: 0
656 bundle2-input: payload chunk size: 0
657 bundle2-input-part: total payload size 178
657 bundle2-input-part: total payload size 178
658 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
658 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
659 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
659 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
660 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
660 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
661 bundle2-input: part header size: 22
661 bundle2-input: part header size: 22
662 bundle2-input: part type: "test:debugreply"
662 bundle2-input: part type: "test:debugreply"
663 bundle2-input: part id: "3"
663 bundle2-input: part id: "3"
664 bundle2-input: part parameters: 0
664 bundle2-input: part parameters: 0
665 bundle2-input: found a handler for part 'test:debugreply'
665 bundle2-input: found a handler for part 'test:debugreply'
666 bundle2-input-part: "test:debugreply" (advisory) supported
666 bundle2-input-part: "test:debugreply" (advisory) supported
667 debugreply: no reply
667 debugreply: no reply
668 bundle2-input: payload chunk size: 0
668 bundle2-input: payload chunk size: 0
669 bundle2-input: part header size: 43
669 bundle2-input: part header size: 43
670 bundle2-input: part type: "test:math"
670 bundle2-input: part type: "test:math"
671 bundle2-input: part id: "4"
671 bundle2-input: part id: "4"
672 bundle2-input: part parameters: 3
672 bundle2-input: part parameters: 3
673 bundle2-input: ignoring unsupported advisory part test:math
673 bundle2-input: ignoring unsupported advisory part test:math
674 bundle2-input-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) unsupported-type
674 bundle2-input-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) unsupported-type
675 bundle2-input: payload chunk size: 2
675 bundle2-input: payload chunk size: 2
676 bundle2-input: payload chunk size: 0
676 bundle2-input: payload chunk size: 0
677 bundle2-input-part: total payload size 2
677 bundle2-input-part: total payload size 2
678 bundle2-input: part header size: 29
678 bundle2-input: part header size: 29
679 bundle2-input: part type: "test:song"
679 bundle2-input: part type: "test:song"
680 bundle2-input: part id: "5"
680 bundle2-input: part id: "5"
681 bundle2-input: part parameters: 1
681 bundle2-input: part parameters: 1
682 bundle2-input: found a handler for part 'test:song'
682 bundle2-input: found a handler for part 'test:song'
683 bundle2-input: ignoring unsupported advisory part test:song - randomparam
683 bundle2-input: ignoring unsupported advisory part test:song - randomparam
684 bundle2-input-part: "test:song" (advisory) (params: 1 mandatory) unsupported-params (['randomparam'])
684 bundle2-input-part: "test:song" (advisory) (params: 1 mandatory) unsupported-params (['randomparam'])
685 bundle2-input: payload chunk size: 0
685 bundle2-input: payload chunk size: 0
686 bundle2-input: part header size: 16
686 bundle2-input: part header size: 16
687 bundle2-input: part type: "test:ping"
687 bundle2-input: part type: "test:ping"
688 bundle2-input: part id: "6"
688 bundle2-input: part id: "6"
689 bundle2-input: part parameters: 0
689 bundle2-input: part parameters: 0
690 bundle2-input: found a handler for part 'test:ping'
690 bundle2-input: found a handler for part 'test:ping'
691 bundle2-input-part: "test:ping" (advisory) supported
691 bundle2-input-part: "test:ping" (advisory) supported
692 received ping request (id 6)
692 received ping request (id 6)
693 bundle2-input: payload chunk size: 0
693 bundle2-input: payload chunk size: 0
694 bundle2-input: part header size: 0
694 bundle2-input: part header size: 0
695 bundle2-input: end of bundle2 stream
695 bundle2-input: end of bundle2 stream
696 bundle2-input-bundle: 6 parts total
696 bundle2-input-bundle: 6 parts total
697 0 unread bytes
697 0 unread bytes
698 3 total verses sung
698 3 total verses sung
699
699
700 Unbundle with an unknown mandatory part
700 Unbundle with an unknown mandatory part
701 (should abort)
701 (should abort)
702
702
703 $ hg bundle2 --parts --unknown ../unknown.hg2
703 $ hg bundle2 --parts --unknown ../unknown.hg2
704
704
705 $ hg unbundle2 < ../unknown.hg2
705 $ hg unbundle2 < ../unknown.hg2
706 The choir starts singing:
706 The choir starts singing:
707 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
707 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
708 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
708 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
709 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
709 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
710 debugreply: no reply
710 debugreply: no reply
711 0 unread bytes
711 0 unread bytes
712 abort: missing support for test:unknown
712 abort: missing support for test:unknown
713 [255]
713 [255]
714
714
715 Unbundle with an unknown mandatory part parameters
715 Unbundle with an unknown mandatory part parameters
716 (should abort)
716 (should abort)
717
717
718 $ hg bundle2 --unknownparams ../unknown.hg2
718 $ hg bundle2 --unknownparams ../unknown.hg2
719
719
720 $ hg unbundle2 < ../unknown.hg2
720 $ hg unbundle2 < ../unknown.hg2
721 0 unread bytes
721 0 unread bytes
722 abort: missing support for test:song - randomparams
722 abort: missing support for test:song - randomparams
723 [255]
723 [255]
724
724
725 unbundle with a reply
725 unbundle with a reply
726
726
727 $ hg bundle2 --parts --reply ../parts-reply.hg2
727 $ hg bundle2 --parts --reply ../parts-reply.hg2
728 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
728 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
729 0 unread bytes
729 0 unread bytes
730 3 total verses sung
730 3 total verses sung
731
731
732 The reply is a bundle
732 The reply is a bundle
733
733
734 $ f --hexdump ../reply.hg2
734 $ f --hexdump ../reply.hg2
735 ../reply.hg2:
735 ../reply.hg2:
736 0000: 48 47 32 30 00 00 00 00 00 00 00 1b 06 6f 75 74 |HG20.........out|
736 0000: 48 47 32 30 00 00 00 00 00 00 00 1b 06 6f 75 74 |HG20.........out|
737 0010: 70 75 74 00 00 00 00 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
737 0010: 70 75 74 00 00 00 00 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
738 0020: 70 6c 79 2d 74 6f 33 00 00 00 d9 54 68 65 20 63 |ply-to3....The c|
738 0020: 70 6c 79 2d 74 6f 33 00 00 00 d9 54 68 65 20 63 |ply-to3....The c|
739 0030: 68 6f 69 72 20 73 74 61 72 74 73 20 73 69 6e 67 |hoir starts sing|
739 0030: 68 6f 69 72 20 73 74 61 72 74 73 20 73 69 6e 67 |hoir starts sing|
740 0040: 69 6e 67 3a 0a 20 20 20 20 50 61 74 61 6c 69 20 |ing:. Patali |
740 0040: 69 6e 67 3a 0a 20 20 20 20 50 61 74 61 6c 69 20 |ing:. Patali |
741 0050: 44 69 72 61 70 61 74 61 2c 20 43 72 6f 6d 64 61 |Dirapata, Cromda|
741 0050: 44 69 72 61 70 61 74 61 2c 20 43 72 6f 6d 64 61 |Dirapata, Cromda|
742 0060: 20 43 72 6f 6d 64 61 20 52 69 70 61 6c 6f 2c 20 | Cromda Ripalo, |
742 0060: 20 43 72 6f 6d 64 61 20 52 69 70 61 6c 6f 2c 20 | Cromda Ripalo, |
743 0070: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
743 0070: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
744 0080: 20 4b 6f 0a 20 20 20 20 42 6f 6b 6f 72 6f 20 44 | Ko. Bokoro D|
744 0080: 20 4b 6f 0a 20 20 20 20 42 6f 6b 6f 72 6f 20 44 | Ko. Bokoro D|
745 0090: 69 70 6f 75 6c 69 74 6f 2c 20 52 6f 6e 64 69 20 |ipoulito, Rondi |
745 0090: 69 70 6f 75 6c 69 74 6f 2c 20 52 6f 6e 64 69 20 |ipoulito, Rondi |
746 00a0: 52 6f 6e 64 69 20 50 65 70 69 6e 6f 2c 20 50 61 |Rondi Pepino, Pa|
746 00a0: 52 6f 6e 64 69 20 50 65 70 69 6e 6f 2c 20 50 61 |Rondi Pepino, Pa|
747 00b0: 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 4b |ta Pata, Ko Ko K|
747 00b0: 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 4b |ta Pata, Ko Ko K|
748 00c0: 6f 0a 20 20 20 20 45 6d 61 6e 61 20 4b 61 72 61 |o. Emana Kara|
748 00c0: 6f 0a 20 20 20 20 45 6d 61 6e 61 20 4b 61 72 61 |o. Emana Kara|
749 00d0: 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c 6f |ssoli, Loucra Lo|
749 00d0: 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c 6f |ssoli, Loucra Lo|
750 00e0: 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 50 |ucra Ponponto, P|
750 00e0: 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 50 |ucra Ponponto, P|
751 00f0: 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 |ata Pata, Ko Ko |
751 00f0: 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 |ata Pata, Ko Ko |
752 0100: 4b 6f 2e 0a 00 00 00 00 00 00 00 1b 06 6f 75 74 |Ko...........out|
752 0100: 4b 6f 2e 0a 00 00 00 00 00 00 00 1b 06 6f 75 74 |Ko...........out|
753 0110: 70 75 74 00 00 00 01 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
753 0110: 70 75 74 00 00 00 01 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
754 0120: 70 6c 79 2d 74 6f 34 00 00 00 c9 64 65 62 75 67 |ply-to4....debug|
754 0120: 70 6c 79 2d 74 6f 34 00 00 00 c9 64 65 62 75 67 |ply-to4....debug|
755 0130: 72 65 70 6c 79 3a 20 63 61 70 61 62 69 6c 69 74 |reply: capabilit|
755 0130: 72 65 70 6c 79 3a 20 63 61 70 61 62 69 6c 69 74 |reply: capabilit|
756 0140: 69 65 73 3a 0a 64 65 62 75 67 72 65 70 6c 79 3a |ies:.debugreply:|
756 0140: 69 65 73 3a 0a 64 65 62 75 67 72 65 70 6c 79 3a |ies:.debugreply:|
757 0150: 20 20 20 20 20 27 63 69 74 79 3d 21 27 0a 64 65 | 'city=!'.de|
757 0150: 20 20 20 20 20 27 63 69 74 79 3d 21 27 0a 64 65 | 'city=!'.de|
758 0160: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
758 0160: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
759 0170: 20 20 27 63 65 6c 65 73 74 65 2c 76 69 6c 6c 65 | 'celeste,ville|
759 0170: 20 20 27 63 65 6c 65 73 74 65 2c 76 69 6c 6c 65 | 'celeste,ville|
760 0180: 27 0a 64 65 62 75 67 72 65 70 6c 79 3a 20 20 20 |'.debugreply: |
760 0180: 27 0a 64 65 62 75 67 72 65 70 6c 79 3a 20 20 20 |'.debugreply: |
761 0190: 20 20 27 65 6c 65 70 68 61 6e 74 73 27 0a 64 65 | 'elephants'.de|
761 0190: 20 20 27 65 6c 65 70 68 61 6e 74 73 27 0a 64 65 | 'elephants'.de|
762 01a0: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
762 01a0: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
763 01b0: 20 20 27 62 61 62 61 72 27 0a 64 65 62 75 67 72 | 'babar'.debugr|
763 01b0: 20 20 27 62 61 62 61 72 27 0a 64 65 62 75 67 72 | 'babar'.debugr|
764 01c0: 65 70 6c 79 3a 20 20 20 20 20 20 20 20 20 27 63 |eply: 'c|
764 01c0: 65 70 6c 79 3a 20 20 20 20 20 20 20 20 20 27 63 |eply: 'c|
765 01d0: 65 6c 65 73 74 65 27 0a 64 65 62 75 67 72 65 70 |eleste'.debugrep|
765 01d0: 65 6c 65 73 74 65 27 0a 64 65 62 75 67 72 65 70 |eleste'.debugrep|
766 01e0: 6c 79 3a 20 20 20 20 20 27 70 69 6e 67 2d 70 6f |ly: 'ping-po|
766 01e0: 6c 79 3a 20 20 20 20 20 27 70 69 6e 67 2d 70 6f |ly: 'ping-po|
767 01f0: 6e 67 27 0a 00 00 00 00 00 00 00 1e 09 74 65 73 |ng'..........tes|
767 01f0: 6e 67 27 0a 00 00 00 00 00 00 00 1e 09 74 65 73 |ng'..........tes|
768 0200: 74 3a 70 6f 6e 67 00 00 00 02 01 00 0b 01 69 6e |t:pong........in|
768 0200: 74 3a 70 6f 6e 67 00 00 00 02 01 00 0b 01 69 6e |t:pong........in|
769 0210: 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 00 00 00 |-reply-to7......|
769 0210: 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 00 00 00 |-reply-to7......|
770 0220: 00 1b 06 6f 75 74 70 75 74 00 00 00 03 00 01 0b |...output.......|
770 0220: 00 1b 06 6f 75 74 70 75 74 00 00 00 03 00 01 0b |...output.......|
771 0230: 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 |.in-reply-to7...|
771 0230: 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 |.in-reply-to7...|
772 0240: 3d 72 65 63 65 69 76 65 64 20 70 69 6e 67 20 72 |=received ping r|
772 0240: 3d 72 65 63 65 69 76 65 64 20 70 69 6e 67 20 72 |=received ping r|
773 0250: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 72 65 |equest (id 7).re|
773 0250: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 72 65 |equest (id 7).re|
774 0260: 70 6c 79 69 6e 67 20 74 6f 20 70 69 6e 67 20 72 |plying to ping r|
774 0260: 70 6c 79 69 6e 67 20 74 6f 20 70 69 6e 67 20 72 |plying to ping r|
775 0270: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 00 00 |equest (id 7)...|
775 0270: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 00 00 |equest (id 7)...|
776 0280: 00 00 00 00 00 00 |......|
776 0280: 00 00 00 00 00 00 |......|
777
777
778 The reply is valid
778 The reply is valid
779
779
780 $ hg statbundle2 < ../reply.hg2
780 $ hg statbundle2 < ../reply.hg2
781 options count: 0
781 options count: 0
782 :output:
782 :output:
783 mandatory: 0
783 mandatory: 0
784 advisory: 1
784 advisory: 1
785 payload: 217 bytes
785 payload: 217 bytes
786 :output:
786 :output:
787 mandatory: 0
787 mandatory: 0
788 advisory: 1
788 advisory: 1
789 payload: 201 bytes
789 payload: 201 bytes
790 :test:pong:
790 :test:pong:
791 mandatory: 1
791 mandatory: 1
792 advisory: 0
792 advisory: 0
793 payload: 0 bytes
793 payload: 0 bytes
794 :output:
794 :output:
795 mandatory: 0
795 mandatory: 0
796 advisory: 1
796 advisory: 1
797 payload: 61 bytes
797 payload: 61 bytes
798 parts count: 4
798 parts count: 4
799
799
800 Unbundle the reply to get the output:
800 Unbundle the reply to get the output:
801
801
802 $ hg unbundle2 < ../reply.hg2
802 $ hg unbundle2 < ../reply.hg2
803 remote: The choir starts singing:
803 remote: The choir starts singing:
804 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
804 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
805 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
805 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
806 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
806 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
807 remote: debugreply: capabilities:
807 remote: debugreply: capabilities:
808 remote: debugreply: 'city=!'
808 remote: debugreply: 'city=!'
809 remote: debugreply: 'celeste,ville'
809 remote: debugreply: 'celeste,ville'
810 remote: debugreply: 'elephants'
810 remote: debugreply: 'elephants'
811 remote: debugreply: 'babar'
811 remote: debugreply: 'babar'
812 remote: debugreply: 'celeste'
812 remote: debugreply: 'celeste'
813 remote: debugreply: 'ping-pong'
813 remote: debugreply: 'ping-pong'
814 remote: received ping request (id 7)
814 remote: received ping request (id 7)
815 remote: replying to ping request (id 7)
815 remote: replying to ping request (id 7)
816 0 unread bytes
816 0 unread bytes
817
817
818 Test push race detection
818 Test push race detection
819
819
820 $ hg bundle2 --pushrace ../part-race.hg2
820 $ hg bundle2 --pushrace ../part-race.hg2
821
821
822 $ hg unbundle2 < ../part-race.hg2
822 $ hg unbundle2 < ../part-race.hg2
823 0 unread bytes
823 0 unread bytes
824 abort: push race: repository changed while pushing - please try again
824 abort: push race: repository changed while pushing - please try again
825 [255]
825 [255]
826
826
827 Support for changegroup
827 Support for changegroup
828 ===================================
828 ===================================
829
829
830 $ hg unbundle $TESTDIR/bundles/rebase.hg
830 $ hg unbundle $TESTDIR/bundles/rebase.hg
831 adding changesets
831 adding changesets
832 adding manifests
832 adding manifests
833 adding file changes
833 adding file changes
834 added 8 changesets with 7 changes to 7 files (+3 heads)
834 added 8 changesets with 7 changes to 7 files (+3 heads)
835 (run 'hg heads' to see heads, 'hg merge' to merge)
835 (run 'hg heads' to see heads, 'hg merge' to merge)
836
836
837 $ hg log -G
837 $ hg log -G
838 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
838 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
839 |
839 |
840 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
840 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
841 |/|
841 |/|
842 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
842 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
843 | |
843 | |
844 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
844 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
845 |/
845 |/
846 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
846 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
847 | |
847 | |
848 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
848 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
849 | |
849 | |
850 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
850 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
851 |/
851 |/
852 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
852 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
853
853
854 @ 0:3903775176ed draft test a
854 @ 0:3903775176ed draft test a
855
855
856
856
857 $ hg bundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true --rev '8+7+5+4' ../rev.hg2
857 $ hg bundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true --rev '8+7+5+4' ../rev.hg2
858 4 changesets found
858 4 changesets found
859 list of changesets:
859 list of changesets:
860 32af7686d403cf45b5d95f2d70cebea587ac806a
860 32af7686d403cf45b5d95f2d70cebea587ac806a
861 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
861 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
862 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
862 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
863 02de42196ebee42ef284b6780a87cdc96e8eaab6
863 02de42196ebee42ef284b6780a87cdc96e8eaab6
864 bundle2-output-bundle: "HG20", 1 parts total
864 bundle2-output-bundle: "HG20", 1 parts total
865 bundle2-output: start emission of HG20 stream
865 bundle2-output: start emission of HG20 stream
866 bundle2-output: bundle parameter:
866 bundle2-output: bundle parameter:
867 bundle2-output: start of parts
867 bundle2-output: start of parts
868 bundle2-output: bundle part: "changegroup"
868 bundle2-output: bundle part: "changegroup"
869 bundle2-output-part: "changegroup" (advisory) streamed payload
869 bundle2-output-part: "changegroup" (advisory) streamed payload
870 bundle2-output: part 0: "changegroup"
870 bundle2-output: part 0: "changegroup"
871 bundle2-output: header chunk size: 18
871 bundle2-output: header chunk size: 18
872 bundling: 1/4 changesets (25.00%)
872 bundling: 1/4 changesets (25.00%)
873 bundling: 2/4 changesets (50.00%)
873 bundling: 2/4 changesets (50.00%)
874 bundling: 3/4 changesets (75.00%)
874 bundling: 3/4 changesets (75.00%)
875 bundling: 4/4 changesets (100.00%)
875 bundling: 4/4 changesets (100.00%)
876 bundling: 1/4 manifests (25.00%)
876 bundling: 1/4 manifests (25.00%)
877 bundling: 2/4 manifests (50.00%)
877 bundling: 2/4 manifests (50.00%)
878 bundling: 3/4 manifests (75.00%)
878 bundling: 3/4 manifests (75.00%)
879 bundling: 4/4 manifests (100.00%)
879 bundling: 4/4 manifests (100.00%)
880 bundling: D 1/3 files (33.33%)
880 bundling: D 1/3 files (33.33%)
881 bundling: E 2/3 files (66.67%)
881 bundling: E 2/3 files (66.67%)
882 bundling: H 3/3 files (100.00%)
882 bundling: H 3/3 files (100.00%)
883 bundle2-output: payload chunk size: 1555
883 bundle2-output: payload chunk size: 1555
884 bundle2-output: closing payload chunk
884 bundle2-output: closing payload chunk
885 bundle2-output: end of bundle
885 bundle2-output: end of bundle
886
886
887 $ f --hexdump ../rev.hg2
887 $ f --hexdump ../rev.hg2
888 ../rev.hg2:
888 ../rev.hg2:
889 0000: 48 47 32 30 00 00 00 00 00 00 00 12 0b 63 68 61 |HG20.........cha|
889 0000: 48 47 32 30 00 00 00 00 00 00 00 12 0b 63 68 61 |HG20.........cha|
890 0010: 6e 67 65 67 72 6f 75 70 00 00 00 00 00 00 00 00 |ngegroup........|
890 0010: 6e 67 65 67 72 6f 75 70 00 00 00 00 00 00 00 00 |ngegroup........|
891 0020: 06 13 00 00 00 a4 32 af 76 86 d4 03 cf 45 b5 d9 |......2.v....E..|
891 0020: 06 13 00 00 00 a4 32 af 76 86 d4 03 cf 45 b5 d9 |......2.v....E..|
892 0030: 5f 2d 70 ce be a5 87 ac 80 6a 5f dd d9 89 57 c8 |_-p......j_...W.|
892 0030: 5f 2d 70 ce be a5 87 ac 80 6a 5f dd d9 89 57 c8 |_-p......j_...W.|
893 0040: a5 4a 4d 43 6d fe 1d a9 d8 7f 21 a1 b9 7b 00 00 |.JMCm.....!..{..|
893 0040: a5 4a 4d 43 6d fe 1d a9 d8 7f 21 a1 b9 7b 00 00 |.JMCm.....!..{..|
894 0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
894 0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
895 0060: 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d 70 ce |..2.v....E.._-p.|
895 0060: 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d 70 ce |..2.v....E.._-p.|
896 0070: be a5 87 ac 80 6a 00 00 00 00 00 00 00 29 00 00 |.....j.......)..|
896 0070: be a5 87 ac 80 6a 00 00 00 00 00 00 00 29 00 00 |.....j.......)..|
897 0080: 00 29 36 65 31 66 34 63 34 37 65 63 62 35 33 33 |.)6e1f4c47ecb533|
897 0080: 00 29 36 65 31 66 34 63 34 37 65 63 62 35 33 33 |.)6e1f4c47ecb533|
898 0090: 66 66 64 30 63 38 65 35 32 63 64 63 38 38 61 66 |ffd0c8e52cdc88af|
898 0090: 66 66 64 30 63 38 65 35 32 63 64 63 38 38 61 66 |ffd0c8e52cdc88af|
899 00a0: 62 36 63 64 33 39 65 32 30 63 0a 00 00 00 66 00 |b6cd39e20c....f.|
899 00a0: 62 36 63 64 33 39 65 32 30 63 0a 00 00 00 66 00 |b6cd39e20c....f.|
900 00b0: 00 00 68 00 00 00 02 44 0a 00 00 00 69 00 00 00 |..h....D....i...|
900 00b0: 00 00 68 00 00 00 02 44 0a 00 00 00 69 00 00 00 |..h....D....i...|
901 00c0: 6a 00 00 00 01 44 00 00 00 a4 95 20 ee a7 81 bc |j....D..... ....|
901 00c0: 6a 00 00 00 01 44 00 00 00 a4 95 20 ee a7 81 bc |j....D..... ....|
902 00d0: ca 16 c1 e1 5a cc 0b a1 43 35 a0 e8 e5 ba cd 01 |....Z...C5......|
902 00d0: ca 16 c1 e1 5a cc 0b a1 43 35 a0 e8 e5 ba cd 01 |....Z...C5......|
903 00e0: 0b 8c d9 98 f3 98 1a 5a 81 15 f9 4f 8d a4 ab 50 |.......Z...O...P|
903 00e0: 0b 8c d9 98 f3 98 1a 5a 81 15 f9 4f 8d a4 ab 50 |.......Z...O...P|
904 00f0: 60 89 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |`...............|
904 00f0: 60 89 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |`...............|
905 0100: 00 00 00 00 00 00 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
905 0100: 00 00 00 00 00 00 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
906 0110: 5a cc 0b a1 43 35 a0 e8 e5 ba 00 00 00 00 00 00 |Z...C5..........|
906 0110: 5a cc 0b a1 43 35 a0 e8 e5 ba 00 00 00 00 00 00 |Z...C5..........|
907 0120: 00 29 00 00 00 29 34 64 65 63 65 39 63 38 32 36 |.)...)4dece9c826|
907 0120: 00 29 00 00 00 29 34 64 65 63 65 39 63 38 32 36 |.)...)4dece9c826|
908 0130: 66 36 39 34 39 30 35 30 37 62 39 38 63 36 33 38 |f69490507b98c638|
908 0130: 66 36 39 34 39 30 35 30 37 62 39 38 63 36 33 38 |f69490507b98c638|
909 0140: 33 61 33 30 30 39 62 32 39 35 38 33 37 64 0a 00 |3a3009b295837d..|
909 0140: 33 61 33 30 30 39 62 32 39 35 38 33 37 64 0a 00 |3a3009b295837d..|
910 0150: 00 00 66 00 00 00 68 00 00 00 02 45 0a 00 00 00 |..f...h....E....|
910 0150: 00 00 66 00 00 00 68 00 00 00 02 45 0a 00 00 00 |..f...h....E....|
911 0160: 69 00 00 00 6a 00 00 00 01 45 00 00 00 a2 ee a1 |i...j....E......|
911 0160: 69 00 00 00 6a 00 00 00 01 45 00 00 00 a2 ee a1 |i...j....E......|
912 0170: 37 46 79 9a 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f |7Fy.......<...8.|
912 0170: 37 46 79 9a 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f |7Fy.......<...8.|
913 0180: 52 4f 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 fa 95 |RO$.8|...7......|
913 0180: 52 4f 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 fa 95 |RO$.8|...7......|
914 0190: de d3 cb 1c f7 85 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
914 0190: de d3 cb 1c f7 85 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
915 01a0: 5a cc 0b a1 43 35 a0 e8 e5 ba ee a1 37 46 79 9a |Z...C5......7Fy.|
915 01a0: 5a cc 0b a1 43 35 a0 e8 e5 ba ee a1 37 46 79 9a |Z...C5......7Fy.|
916 01b0: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
916 01b0: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
917 01c0: 00 00 00 00 00 29 00 00 00 29 33 36 35 62 39 33 |.....)...)365b93|
917 01c0: 00 00 00 00 00 29 00 00 00 29 33 36 35 62 39 33 |.....)...)365b93|
918 01d0: 64 35 37 66 64 66 34 38 31 34 65 32 62 35 39 31 |d57fdf4814e2b591|
918 01d0: 64 35 37 66 64 66 34 38 31 34 65 32 62 35 39 31 |d57fdf4814e2b591|
919 01e0: 31 64 36 62 61 63 66 66 32 62 31 32 30 31 34 34 |1d6bacff2b120144|
919 01e0: 31 64 36 62 61 63 66 66 32 62 31 32 30 31 34 34 |1d6bacff2b120144|
920 01f0: 34 31 0a 00 00 00 66 00 00 00 68 00 00 00 00 00 |41....f...h.....|
920 01f0: 34 31 0a 00 00 00 66 00 00 00 68 00 00 00 00 00 |41....f...h.....|
921 0200: 00 00 69 00 00 00 6a 00 00 00 01 47 00 00 00 a4 |..i...j....G....|
921 0200: 00 00 69 00 00 00 6a 00 00 00 01 47 00 00 00 a4 |..i...j....G....|
922 0210: 02 de 42 19 6e be e4 2e f2 84 b6 78 0a 87 cd c9 |..B.n......x....|
922 0210: 02 de 42 19 6e be e4 2e f2 84 b6 78 0a 87 cd c9 |..B.n......x....|
923 0220: 6e 8e aa b6 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 |n...$.8|...7....|
923 0220: 6e 8e aa b6 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 |n...$.8|...7....|
924 0230: fa 95 de d3 cb 1c f7 85 00 00 00 00 00 00 00 00 |................|
924 0230: fa 95 de d3 cb 1c f7 85 00 00 00 00 00 00 00 00 |................|
925 0240: 00 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 |..............B.|
925 0240: 00 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 |..............B.|
926 0250: 6e be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 |n......x....n...|
926 0250: 6e be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 |n......x....n...|
927 0260: 00 00 00 00 00 00 00 29 00 00 00 29 38 62 65 65 |.......)...)8bee|
927 0260: 00 00 00 00 00 00 00 29 00 00 00 29 38 62 65 65 |.......)...)8bee|
928 0270: 34 38 65 64 63 37 33 31 38 35 34 31 66 63 30 30 |48edc7318541fc00|
928 0270: 34 38 65 64 63 37 33 31 38 35 34 31 66 63 30 30 |48edc7318541fc00|
929 0280: 31 33 65 65 34 31 62 30 38 39 32 37 36 61 38 63 |13ee41b089276a8c|
929 0280: 31 33 65 65 34 31 62 30 38 39 32 37 36 61 38 63 |13ee41b089276a8c|
930 0290: 32 34 62 66 0a 00 00 00 66 00 00 00 66 00 00 00 |24bf....f...f...|
930 0290: 32 34 62 66 0a 00 00 00 66 00 00 00 66 00 00 00 |24bf....f...f...|
931 02a0: 02 48 0a 00 00 00 67 00 00 00 68 00 00 00 01 48 |.H....g...h....H|
931 02a0: 02 48 0a 00 00 00 67 00 00 00 68 00 00 00 01 48 |.H....g...h....H|
932 02b0: 00 00 00 00 00 00 00 8b 6e 1f 4c 47 ec b5 33 ff |........n.LG..3.|
932 02b0: 00 00 00 00 00 00 00 8b 6e 1f 4c 47 ec b5 33 ff |........n.LG..3.|
933 02c0: d0 c8 e5 2c dc 88 af b6 cd 39 e2 0c 66 a5 a0 18 |...,.....9..f...|
933 02c0: d0 c8 e5 2c dc 88 af b6 cd 39 e2 0c 66 a5 a0 18 |...,.....9..f...|
934 02d0: 17 fd f5 23 9c 27 38 02 b5 b7 61 8d 05 1c 89 e4 |...#.'8...a.....|
934 02d0: 17 fd f5 23 9c 27 38 02 b5 b7 61 8d 05 1c 89 e4 |...#.'8...a.....|
935 02e0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
935 02e0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
936 02f0: 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d |....2.v....E.._-|
936 02f0: 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d |....2.v....E.._-|
937 0300: 70 ce be a5 87 ac 80 6a 00 00 00 81 00 00 00 81 |p......j........|
937 0300: 70 ce be a5 87 ac 80 6a 00 00 00 81 00 00 00 81 |p......j........|
938 0310: 00 00 00 2b 44 00 63 33 66 31 63 61 32 39 32 34 |...+D.c3f1ca2924|
938 0310: 00 00 00 2b 44 00 63 33 66 31 63 61 32 39 32 34 |...+D.c3f1ca2924|
939 0320: 63 31 36 61 31 39 62 30 36 35 36 61 38 34 39 30 |c16a19b0656a8490|
939 0320: 63 31 36 61 31 39 62 30 36 35 36 61 38 34 39 30 |c16a19b0656a8490|
940 0330: 30 65 35 30 34 65 35 62 30 61 65 63 32 64 0a 00 |0e504e5b0aec2d..|
940 0330: 30 65 35 30 34 65 35 62 30 61 65 63 32 64 0a 00 |0e504e5b0aec2d..|
941 0340: 00 00 8b 4d ec e9 c8 26 f6 94 90 50 7b 98 c6 38 |...M...&...P{..8|
941 0340: 00 00 8b 4d ec e9 c8 26 f6 94 90 50 7b 98 c6 38 |...M...&...P{..8|
942 0350: 3a 30 09 b2 95 83 7d 00 7d 8c 9d 88 84 13 25 f5 |:0....}.}.....%.|
942 0350: 3a 30 09 b2 95 83 7d 00 7d 8c 9d 88 84 13 25 f5 |:0....}.}.....%.|
943 0360: c6 b0 63 71 b3 5b 4e 8a 2b 1a 83 00 00 00 00 00 |..cq.[N.+.......|
943 0360: c6 b0 63 71 b3 5b 4e 8a 2b 1a 83 00 00 00 00 00 |..cq.[N.+.......|
944 0370: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 95 |................|
944 0370: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 95 |................|
945 0380: 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 a0 | ........Z...C5.|
945 0380: 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 a0 | ........Z...C5.|
946 0390: e8 e5 ba 00 00 00 2b 00 00 00 ac 00 00 00 2b 45 |......+.......+E|
946 0390: e8 e5 ba 00 00 00 2b 00 00 00 ac 00 00 00 2b 45 |......+.......+E|
947 03a0: 00 39 63 36 66 64 30 33 35 30 61 36 63 30 64 30 |.9c6fd0350a6c0d0|
947 03a0: 00 39 63 36 66 64 30 33 35 30 61 36 63 30 64 30 |.9c6fd0350a6c0d0|
948 03b0: 63 34 39 64 34 61 39 63 35 30 31 37 63 66 30 37 |c49d4a9c5017cf07|
948 03b0: 63 34 39 64 34 61 39 63 35 30 31 37 63 66 30 37 |c49d4a9c5017cf07|
949 03c0: 30 34 33 66 35 34 65 35 38 0a 00 00 00 8b 36 5b |043f54e58.....6[|
949 03c0: 30 34 33 66 35 34 65 35 38 0a 00 00 00 8b 36 5b |043f54e58.....6[|
950 03d0: 93 d5 7f df 48 14 e2 b5 91 1d 6b ac ff 2b 12 01 |....H.....k..+..|
950 03d0: 93 d5 7f df 48 14 e2 b5 91 1d 6b ac ff 2b 12 01 |....H.....k..+..|
951 03e0: 44 41 28 a5 84 c6 5e f1 21 f8 9e b6 6a b7 d0 bc |DA(...^.!...j...|
951 03e0: 44 41 28 a5 84 c6 5e f1 21 f8 9e b6 6a b7 d0 bc |DA(...^.!...j...|
952 03f0: 15 3d 80 99 e7 ce 4d ec e9 c8 26 f6 94 90 50 7b |.=....M...&...P{|
952 03f0: 15 3d 80 99 e7 ce 4d ec e9 c8 26 f6 94 90 50 7b |.=....M...&...P{|
953 0400: 98 c6 38 3a 30 09 b2 95 83 7d ee a1 37 46 79 9a |..8:0....}..7Fy.|
953 0400: 98 c6 38 3a 30 09 b2 95 83 7d ee a1 37 46 79 9a |..8:0....}..7Fy.|
954 0410: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
954 0410: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
955 0420: 00 56 00 00 00 56 00 00 00 2b 46 00 32 32 62 66 |.V...V...+F.22bf|
955 0420: 00 56 00 00 00 56 00 00 00 2b 46 00 32 32 62 66 |.V...V...+F.22bf|
956 0430: 63 66 64 36 32 61 32 31 61 33 32 38 37 65 64 62 |cfd62a21a3287edb|
956 0430: 63 66 64 36 32 61 32 31 61 33 32 38 37 65 64 62 |cfd62a21a3287edb|
957 0440: 64 34 64 36 35 36 32 31 38 64 30 66 35 32 35 65 |d4d656218d0f525e|
957 0440: 64 34 64 36 35 36 32 31 38 64 30 66 35 32 35 65 |d4d656218d0f525e|
958 0450: 64 37 36 61 0a 00 00 00 97 8b ee 48 ed c7 31 85 |d76a.......H..1.|
958 0450: 64 37 36 61 0a 00 00 00 97 8b ee 48 ed c7 31 85 |d76a.......H..1.|
959 0460: 41 fc 00 13 ee 41 b0 89 27 6a 8c 24 bf 28 a5 84 |A....A..'j.$.(..|
959 0460: 41 fc 00 13 ee 41 b0 89 27 6a 8c 24 bf 28 a5 84 |A....A..'j.$.(..|
960 0470: c6 5e f1 21 f8 9e b6 6a b7 d0 bc 15 3d 80 99 e7 |.^.!...j....=...|
960 0470: c6 5e f1 21 f8 9e b6 6a b7 d0 bc 15 3d 80 99 e7 |.^.!...j....=...|
961 0480: ce 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
961 0480: ce 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
962 0490: 00 00 00 00 00 02 de 42 19 6e be e4 2e f2 84 b6 |.......B.n......|
962 0490: 00 00 00 00 00 02 de 42 19 6e be e4 2e f2 84 b6 |.......B.n......|
963 04a0: 78 0a 87 cd c9 6e 8e aa b6 00 00 00 2b 00 00 00 |x....n......+...|
963 04a0: 78 0a 87 cd c9 6e 8e aa b6 00 00 00 2b 00 00 00 |x....n......+...|
964 04b0: 56 00 00 00 00 00 00 00 81 00 00 00 81 00 00 00 |V...............|
964 04b0: 56 00 00 00 00 00 00 00 81 00 00 00 81 00 00 00 |V...............|
965 04c0: 2b 48 00 38 35 30 30 31 38 39 65 37 34 61 39 65 |+H.8500189e74a9e|
965 04c0: 2b 48 00 38 35 30 30 31 38 39 65 37 34 61 39 65 |+H.8500189e74a9e|
966 04d0: 30 34 37 35 65 38 32 32 30 39 33 62 63 37 64 62 |0475e822093bc7db|
966 04d0: 30 34 37 35 65 38 32 32 30 39 33 62 63 37 64 62 |0475e822093bc7db|
967 04e0: 30 64 36 33 31 61 65 62 30 62 34 0a 00 00 00 00 |0d631aeb0b4.....|
967 04e0: 30 64 36 33 31 61 65 62 30 62 34 0a 00 00 00 00 |0d631aeb0b4.....|
968 04f0: 00 00 00 05 44 00 00 00 62 c3 f1 ca 29 24 c1 6a |....D...b...)$.j|
968 04f0: 00 00 00 05 44 00 00 00 62 c3 f1 ca 29 24 c1 6a |....D...b...)$.j|
969 0500: 19 b0 65 6a 84 90 0e 50 4e 5b 0a ec 2d 00 00 00 |..ej...PN[..-...|
969 0500: 19 b0 65 6a 84 90 0e 50 4e 5b 0a ec 2d 00 00 00 |..ej...PN[..-...|
970 0510: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
970 0510: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
971 0520: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
971 0520: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
972 0530: 00 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f |.....2.v....E.._|
972 0530: 00 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f |.....2.v....E.._|
973 0540: 2d 70 ce be a5 87 ac 80 6a 00 00 00 00 00 00 00 |-p......j.......|
973 0540: 2d 70 ce be a5 87 ac 80 6a 00 00 00 00 00 00 00 |-p......j.......|
974 0550: 00 00 00 00 02 44 0a 00 00 00 00 00 00 00 05 45 |.....D.........E|
974 0550: 00 00 00 00 02 44 0a 00 00 00 00 00 00 00 05 45 |.....D.........E|
975 0560: 00 00 00 62 9c 6f d0 35 0a 6c 0d 0c 49 d4 a9 c5 |...b.o.5.l..I...|
975 0560: 00 00 00 62 9c 6f d0 35 0a 6c 0d 0c 49 d4 a9 c5 |...b.o.5.l..I...|
976 0570: 01 7c f0 70 43 f5 4e 58 00 00 00 00 00 00 00 00 |.|.pC.NX........|
976 0570: 01 7c f0 70 43 f5 4e 58 00 00 00 00 00 00 00 00 |.|.pC.NX........|
977 0580: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
977 0580: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
978 0590: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
978 0590: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
979 05a0: 95 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 |. ........Z...C5|
979 05a0: 95 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 |. ........Z...C5|
980 05b0: a0 e8 e5 ba 00 00 00 00 00 00 00 00 00 00 00 02 |................|
980 05b0: a0 e8 e5 ba 00 00 00 00 00 00 00 00 00 00 00 02 |................|
981 05c0: 45 0a 00 00 00 00 00 00 00 05 48 00 00 00 62 85 |E.........H...b.|
981 05c0: 45 0a 00 00 00 00 00 00 00 05 48 00 00 00 62 85 |E.........H...b.|
982 05d0: 00 18 9e 74 a9 e0 47 5e 82 20 93 bc 7d b0 d6 31 |...t..G^. ..}..1|
982 05d0: 00 18 9e 74 a9 e0 47 5e 82 20 93 bc 7d b0 d6 31 |...t..G^. ..}..1|
983 05e0: ae b0 b4 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
983 05e0: ae b0 b4 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
984 05f0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
984 05f0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
985 0600: 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 6e |.............B.n|
985 0600: 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 6e |.............B.n|
986 0610: be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 00 |......x....n....|
986 0610: be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 00 |......x....n....|
987 0620: 00 00 00 00 00 00 00 00 00 00 02 48 0a 00 00 00 |...........H....|
987 0620: 00 00 00 00 00 00 00 00 00 00 02 48 0a 00 00 00 |...........H....|
988 0630: 00 00 00 00 00 00 00 00 00 00 00 00 00 |.............|
988 0630: 00 00 00 00 00 00 00 00 00 00 00 00 00 |.............|
989
989
990 $ hg debugbundle ../rev.hg2
990 $ hg debugbundle ../rev.hg2
991 Stream params: {}
991 Stream params: {}
992 changegroup -- 'sortdict()'
992 changegroup -- 'sortdict()'
993 32af7686d403cf45b5d95f2d70cebea587ac806a
993 32af7686d403cf45b5d95f2d70cebea587ac806a
994 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
994 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
995 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
995 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
996 02de42196ebee42ef284b6780a87cdc96e8eaab6
996 02de42196ebee42ef284b6780a87cdc96e8eaab6
997 $ hg unbundle ../rev.hg2
997 $ hg unbundle ../rev.hg2
998 adding changesets
998 adding changesets
999 adding manifests
999 adding manifests
1000 adding file changes
1000 adding file changes
1001 added 0 changesets with 0 changes to 3 files
1001 added 0 changesets with 0 changes to 3 files
1002 (run 'hg update' to get a working copy)
1002 (run 'hg update' to get a working copy)
1003
1003
1004 with reply
1004 with reply
1005
1005
1006 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
1006 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
1007 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
1007 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
1008 0 unread bytes
1008 0 unread bytes
1009 addchangegroup return: 1
1009 addchangegroup return: 1
1010
1010
1011 $ f --hexdump ../rev-reply.hg2
1011 $ f --hexdump ../rev-reply.hg2
1012 ../rev-reply.hg2:
1012 ../rev-reply.hg2:
1013 0000: 48 47 32 30 00 00 00 00 00 00 00 2f 11 72 65 70 |HG20......./.rep|
1013 0000: 48 47 32 30 00 00 00 00 00 00 00 2f 11 72 65 70 |HG20......./.rep|
1014 0010: 6c 79 3a 63 68 61 6e 67 65 67 72 6f 75 70 00 00 |ly:changegroup..|
1014 0010: 6c 79 3a 63 68 61 6e 67 65 67 72 6f 75 70 00 00 |ly:changegroup..|
1015 0020: 00 00 00 02 0b 01 06 01 69 6e 2d 72 65 70 6c 79 |........in-reply|
1015 0020: 00 00 00 02 0b 01 06 01 69 6e 2d 72 65 70 6c 79 |........in-reply|
1016 0030: 2d 74 6f 31 72 65 74 75 72 6e 31 00 00 00 00 00 |-to1return1.....|
1016 0030: 2d 74 6f 31 72 65 74 75 72 6e 31 00 00 00 00 00 |-to1return1.....|
1017 0040: 00 00 1b 06 6f 75 74 70 75 74 00 00 00 01 00 01 |....output......|
1017 0040: 00 00 1b 06 6f 75 74 70 75 74 00 00 00 01 00 01 |....output......|
1018 0050: 0b 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 31 00 00 |..in-reply-to1..|
1018 0050: 0b 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 31 00 00 |..in-reply-to1..|
1019 0060: 00 64 61 64 64 69 6e 67 20 63 68 61 6e 67 65 73 |.dadding changes|
1019 0060: 00 64 61 64 64 69 6e 67 20 63 68 61 6e 67 65 73 |.dadding changes|
1020 0070: 65 74 73 0a 61 64 64 69 6e 67 20 6d 61 6e 69 66 |ets.adding manif|
1020 0070: 65 74 73 0a 61 64 64 69 6e 67 20 6d 61 6e 69 66 |ets.adding manif|
1021 0080: 65 73 74 73 0a 61 64 64 69 6e 67 20 66 69 6c 65 |ests.adding file|
1021 0080: 65 73 74 73 0a 61 64 64 69 6e 67 20 66 69 6c 65 |ests.adding file|
1022 0090: 20 63 68 61 6e 67 65 73 0a 61 64 64 65 64 20 30 | changes.added 0|
1022 0090: 20 63 68 61 6e 67 65 73 0a 61 64 64 65 64 20 30 | changes.added 0|
1023 00a0: 20 63 68 61 6e 67 65 73 65 74 73 20 77 69 74 68 | changesets with|
1023 00a0: 20 63 68 61 6e 67 65 73 65 74 73 20 77 69 74 68 | changesets with|
1024 00b0: 20 30 20 63 68 61 6e 67 65 73 20 74 6f 20 33 20 | 0 changes to 3 |
1024 00b0: 20 30 20 63 68 61 6e 67 65 73 20 74 6f 20 33 20 | 0 changes to 3 |
1025 00c0: 66 69 6c 65 73 0a 00 00 00 00 00 00 00 00 |files.........|
1025 00c0: 66 69 6c 65 73 0a 00 00 00 00 00 00 00 00 |files.........|
1026
1026
1027 Check handling of exception during generation.
1027 Check handling of exception during generation.
1028 ----------------------------------------------
1028 ----------------------------------------------
1029
1029
1030 $ hg bundle2 --genraise > ../genfailed.hg2
1030 $ hg bundle2 --genraise > ../genfailed.hg2
1031 abort: Someone set up us the bomb!
1031 abort: Someone set up us the bomb!
1032 [255]
1032 [255]
1033
1033
1034 Should still be a valid bundle
1034 Should still be a valid bundle
1035
1035
1036 $ f --hexdump ../genfailed.hg2
1036 $ f --hexdump ../genfailed.hg2
1037 ../genfailed.hg2:
1037 ../genfailed.hg2:
1038 0000: 48 47 32 30 00 00 00 00 00 00 00 0d 06 6f 75 74 |HG20.........out|
1038 0000: 48 47 32 30 00 00 00 00 00 00 00 0d 06 6f 75 74 |HG20.........out|
1039 0010: 70 75 74 00 00 00 00 00 00 ff ff ff ff 00 00 00 |put.............|
1039 0010: 70 75 74 00 00 00 00 00 00 ff ff ff ff 00 00 00 |put.............|
1040 0020: 48 0b 65 72 72 6f 72 3a 61 62 6f 72 74 00 00 00 |H.error:abort...|
1040 0020: 48 0b 65 72 72 6f 72 3a 61 62 6f 72 74 00 00 00 |H.error:abort...|
1041 0030: 00 01 00 07 2d 6d 65 73 73 61 67 65 75 6e 65 78 |....-messageunex|
1041 0030: 00 01 00 07 2d 6d 65 73 73 61 67 65 75 6e 65 78 |....-messageunex|
1042 0040: 70 65 63 74 65 64 20 65 72 72 6f 72 3a 20 53 6f |pected error: So|
1042 0040: 70 65 63 74 65 64 20 65 72 72 6f 72 3a 20 53 6f |pected error: So|
1043 0050: 6d 65 6f 6e 65 20 73 65 74 20 75 70 20 75 73 20 |meone set up us |
1043 0050: 6d 65 6f 6e 65 20 73 65 74 20 75 70 20 75 73 20 |meone set up us |
1044 0060: 74 68 65 20 62 6f 6d 62 21 00 00 00 00 00 00 00 |the bomb!.......|
1044 0060: 74 68 65 20 62 6f 6d 62 21 00 00 00 00 00 00 00 |the bomb!.......|
1045 0070: 00 |.|
1045 0070: 00 |.|
1046
1046
1047 And its handling on the other size raise a clean exception
1047 And its handling on the other size raise a clean exception
1048
1048
1049 $ cat ../genfailed.hg2 | hg unbundle2
1049 $ cat ../genfailed.hg2 | hg unbundle2
1050 0 unread bytes
1050 0 unread bytes
1051 abort: unexpected error: Someone set up us the bomb!
1051 abort: unexpected error: Someone set up us the bomb!
1052 [255]
1052 [255]
1053
1053
1054 Test compression
1054 Test compression
1055 ================
1055 ================
1056
1056
1057 Simple case where it just work: GZ
1057 Simple case where it just work: GZ
1058 ----------------------------------
1058 ----------------------------------
1059
1059
1060 $ hg bundle2 --compress GZ --rev '8+7+5+4' ../rev.hg2.bz
1060 $ hg bundle2 --compress GZ --rev '8+7+5+4' ../rev.hg2.bz
1061 $ f --hexdump ../rev.hg2.bz
1061 $ f --hexdump ../rev.hg2.bz
1062 ../rev.hg2.bz:
1062 ../rev.hg2.bz:
1063 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1063 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1064 0010: 69 6f 6e 3d 47 5a 78 9c 95 94 7d 68 95 55 1c c7 |ion=GZx...}h.U..|
1064 0010: 69 6f 6e 3d 47 5a 78 9c 95 94 7d 68 95 55 1c c7 |ion=GZx...}h.U..|
1065 0020: 9f 3b 31 e8 ce fa c3 65 be a0 a4 b4 52 b9 29 e7 |.;1....e....R.).|
1065 0020: 9f 3b 31 e8 ce fa c3 65 be a0 a4 b4 52 b9 29 e7 |.;1....e....R.).|
1066 0030: f5 79 ce 89 fa 63 ed 5e 77 8b 9c c3 3f 2a 1c 68 |.y...c.^w...?*.h|
1066 0030: f5 79 ce 89 fa 63 ed 5e 77 8b 9c c3 3f 2a 1c 68 |.y...c.^w...?*.h|
1067 0040: cf 79 9b dd 6a ae b0 28 74 b8 e5 96 5b bb 86 61 |.y..j..(t...[..a|
1067 0040: cf 79 9b dd 6a ae b0 28 74 b8 e5 96 5b bb 86 61 |.y..j..(t...[..a|
1068 0050: a3 15 6e 3a 71 c8 6a e8 a5 da 95 64 28 22 ce 69 |..n:q.j....d(".i|
1068 0050: a3 15 6e 3a 71 c8 6a e8 a5 da 95 64 28 22 ce 69 |..n:q.j....d(".i|
1069 0060: cd 06 59 34 28 2b 51 2a 58 c3 17 56 2a 9a 9d 67 |..Y4(+Q*X..V*..g|
1069 0060: cd 06 59 34 28 2b 51 2a 58 c3 17 56 2a 9a 9d 67 |..Y4(+Q*X..V*..g|
1070 0070: dc c6 35 9e c4 1d f8 9e 87 f3 9c f3 3b bf 0f bf |..5.........;...|
1070 0070: dc c6 35 9e c4 1d f8 9e 87 f3 9c f3 3b bf 0f bf |..5.........;...|
1071 0080: 97 e3 38 ce f4 42 b9 d6 af ae d2 55 af ae 7b ad |..8..B.....U..{.|
1071 0080: 97 e3 38 ce f4 42 b9 d6 af ae d2 55 af ae 7b ad |..8..B.....U..{.|
1072 0090: c6 c9 8d bb 8a ec b4 07 ed 7f fd ed d3 53 be 4e |.............S.N|
1072 0090: c6 c9 8d bb 8a ec b4 07 ed 7f fd ed d3 53 be 4e |.............S.N|
1073 00a0: f4 0e af 59 52 73 ea 50 d7 96 9e ba d4 9a 1f 87 |...YRs.P........|
1073 00a0: f4 0e af 59 52 73 ea 50 d7 96 9e ba d4 9a 1f 87 |...YRs.P........|
1074 00b0: 9b 9f 1d e8 7a 6a 79 e9 cb 7f cf eb fe 7e d3 82 |....zjy......~..|
1074 00b0: 9b 9f 1d e8 7a 6a 79 e9 cb 7f cf eb fe 7e d3 82 |....zjy......~..|
1075 00c0: ce 2f 36 38 21 23 cc 36 b7 b5 38 90 ab a1 21 92 |./68!#.6..8...!.|
1075 00c0: ce 2f 36 38 21 23 cc 36 b7 b5 38 90 ab a1 21 92 |./68!#.6..8...!.|
1076 00d0: 78 5a 0a 8a b1 31 0a 48 a6 29 92 4a 32 e6 1b e1 |xZ...1.H.).J2...|
1076 00d0: 78 5a 0a 8a b1 31 0a 48 a6 29 92 4a 32 e6 1b e1 |xZ...1.H.).J2...|
1077 00e0: 4a 85 b9 46 40 46 ed 61 63 b5 d6 aa 20 1e ac 5e |J..F@F.ac... ..^|
1077 00e0: 4a 85 b9 46 40 46 ed 61 63 b5 d6 aa 20 1e ac 5e |J..F@F.ac... ..^|
1078 00f0: b0 0a ae 8a c4 03 c6 d6 f9 a3 7b eb fb 4e de 7f |..........{..N..|
1078 00f0: b0 0a ae 8a c4 03 c6 d6 f9 a3 7b eb fb 4e de 7f |..........{..N..|
1079 0100: e4 97 55 5f 15 76 96 d2 5d bf 9d 3f 38 18 29 4c |..U_.v..]..?8.)L|
1079 0100: e4 97 55 5f 15 76 96 d2 5d bf 9d 3f 38 18 29 4c |..U_.v..]..?8.)L|
1080 0110: 0f b7 5d 6e 9b b3 aa 7e c6 d5 15 5b f7 7c 52 f1 |..]n...~...[.|R.|
1080 0110: 0f b7 5d 6e 9b b3 aa 7e c6 d5 15 5b f7 7c 52 f1 |..]n...~...[.|R.|
1081 0120: 7c 73 18 63 98 6d 3e 23 51 5a 6a 2e 19 72 8d cb ||s.c.m>#QZj..r..|
1081 0120: 7c 73 18 63 98 6d 3e 23 51 5a 6a 2e 19 72 8d cb ||s.c.m>#QZj..r..|
1082 0130: 09 07 14 78 82 33 e9 62 86 7d 0c 00 17 88 53 86 |...x.3.b.}....S.|
1082 0130: 09 07 14 78 82 33 e9 62 86 7d 0c 00 17 88 53 86 |...x.3.b.}....S.|
1083 0140: 3d 75 0b 63 e2 16 c6 84 9d 76 8f 76 7a cb de fc |=u.c.....v.vz...|
1083 0140: 3d 75 0b 63 e2 16 c6 84 9d 76 8f 76 7a cb de fc |=u.c.....v.vz...|
1084 0150: a8 a3 f0 46 d3 a5 f6 c7 96 b6 9f 60 3b 57 ae 28 |...F.......`;W.(|
1084 0150: a8 a3 f0 46 d3 a5 f6 c7 96 b6 9f 60 3b 57 ae 28 |...F.......`;W.(|
1085 0160: ce b2 8d e9 f4 3e 6f 66 53 dd e5 6b ad 67 be f9 |.....>ofS..k.g..|
1085 0160: ce b2 8d e9 f4 3e 6f 66 53 dd e5 6b ad 67 be f9 |.....>ofS..k.g..|
1086 0170: 72 ee 5f 8d 61 3c 61 b6 f9 8c d8 a5 82 63 45 3d |r._.a<a......cE=|
1086 0170: 72 ee 5f 8d 61 3c 61 b6 f9 8c d8 a5 82 63 45 3d |r._.a<a......cE=|
1087 0180: a3 0c 61 90 68 24 28 87 50 b9 c2 97 c6 20 01 11 |..a.h$(.P.... ..|
1087 0180: a3 0c 61 90 68 24 28 87 50 b9 c2 97 c6 20 01 11 |..a.h$(.P.... ..|
1088 0190: 80 84 10 98 cf e8 e4 13 96 05 51 2c 38 f3 c4 ec |..........Q,8...|
1088 0190: 80 84 10 98 cf e8 e4 13 96 05 51 2c 38 f3 c4 ec |..........Q,8...|
1089 01a0: ea 43 e7 96 5e 6a c8 be 11 dd 32 78 a2 fa dd 8f |.C..^j....2x....|
1089 01a0: ea 43 e7 96 5e 6a c8 be 11 dd 32 78 a2 fa dd 8f |.C..^j....2x....|
1090 01b0: b3 61 84 61 51 0c b3 cd 27 64 42 6b c2 b4 92 1e |.a.aQ...'dBk....|
1090 01b0: b3 61 84 61 51 0c b3 cd 27 64 42 6b c2 b4 92 1e |.a.aQ...'dBk....|
1091 01c0: 86 8c 12 68 24 00 10 db 7f 50 00 c6 91 e7 fa 4c |...h$....P.....L|
1091 01c0: 86 8c 12 68 24 00 10 db 7f 50 00 c6 91 e7 fa 4c |...h$....P.....L|
1092 01d0: 22 22 cc bf 84 81 0a 92 c1 aa 2a c7 1b 49 e6 ee |""........*..I..|
1092 01d0: 22 22 cc bf 84 81 0a 92 c1 aa 2a c7 1b 49 e6 ee |""........*..I..|
1093 01e0: 6b a9 7e e0 e9 b2 91 5e 7c 73 68 e0 fc 23 3f 34 |k.~....^|sh..#?4|
1093 01e0: 6b a9 7e e0 e9 b2 91 5e 7c 73 68 e0 fc 23 3f 34 |k.~....^|sh..#?4|
1094 01f0: ed cf 0e f2 b3 d3 4c d7 ae 59 33 6f 8c 3d b8 63 |......L..Y3o.=.c|
1094 01f0: ed cf 0e f2 b3 d3 4c d7 ae 59 33 6f 8c 3d b8 63 |......L..Y3o.=.c|
1095 0200: 21 2b e8 3d e0 6f 9d 3a b7 f9 dc 24 2a b2 3e a7 |!+.=.o.:...$*.>.|
1095 0200: 21 2b e8 3d e0 6f 9d 3a b7 f9 dc 24 2a b2 3e a7 |!+.=.o.:...$*.>.|
1096 0210: 58 dc 91 d8 40 e9 23 8e 88 84 ae 0f b9 00 2e b5 |X...@.#.........|
1096 0210: 58 dc 91 d8 40 e9 23 8e 88 84 ae 0f b9 00 2e b5 |X...@.#.........|
1097 0220: 74 36 f3 40 53 40 34 15 c0 d7 12 8d e7 bb 65 f9 |t6.@S@4.......e.|
1097 0220: 74 36 f3 40 53 40 34 15 c0 d7 12 8d e7 bb 65 f9 |t6.@S@4.......e.|
1098 0230: c8 ef 03 0f ff f9 fe b6 8a 0d 6d fd ec 51 70 f7 |..........m..Qp.|
1098 0230: c8 ef 03 0f ff f9 fe b6 8a 0d 6d fd ec 51 70 f7 |..........m..Qp.|
1099 0240: a7 ad 9b 6b 9d da 74 7b 53 43 d1 43 63 fd 19 f9 |...k..t{SC.Cc...|
1099 0240: a7 ad 9b 6b 9d da 74 7b 53 43 d1 43 63 fd 19 f9 |...k..t{SC.Cc...|
1100 0250: ca 67 95 e5 ef c4 e6 6c 9e 44 e1 c5 ac 7a 82 6f |.g.....l.D...z.o|
1100 0250: ca 67 95 e5 ef c4 e6 6c 9e 44 e1 c5 ac 7a 82 6f |.g.....l.D...z.o|
1101 0260: c2 e1 d2 b5 2d 81 29 f0 5d 09 6c 6f 10 ae 88 cf |....-.).].lo....|
1101 0260: c2 e1 d2 b5 2d 81 29 f0 5d 09 6c 6f 10 ae 88 cf |....-.).].lo....|
1102 0270: 25 05 d0 93 06 78 80 60 43 2d 10 1b 47 71 2b b7 |%....x.`C-..Gq+.|
1102 0270: 25 05 d0 93 06 78 80 60 43 2d 10 1b 47 71 2b b7 |%....x.`C-..Gq+.|
1103 0280: 7f bb e9 a7 e4 7d 67 7b df 9b f7 62 cf cd d8 f4 |.....}g{...b....|
1103 0280: 7f bb e9 a7 e4 7d 67 7b df 9b f7 62 cf cd d8 f4 |.....}g{...b....|
1104 0290: 48 bc 64 51 57 43 ff ea 8b 0b ae 74 64 53 07 86 |H.dQWC.....tdS..|
1104 0290: 48 bc 64 51 57 43 ff ea 8b 0b ae 74 64 53 07 86 |H.dQWC.....tdS..|
1105 02a0: fa 66 3c 5e f7 e1 af a7 c2 90 ff a7 be 9e c9 29 |.f<^...........)|
1105 02a0: fa 66 3c 5e f7 e1 af a7 c2 90 ff a7 be 9e c9 29 |.f<^...........)|
1106 02b0: b6 cc 41 48 18 69 94 8b 7c 04 7d 8c 98 a7 95 50 |..AH.i..|.}....P|
1106 02b0: b6 cc 41 48 18 69 94 8b 7c 04 7d 8c 98 a7 95 50 |..AH.i..|.}....P|
1107 02c0: 44 d9 d0 20 c8 14 30 14 51 ad 6c 16 03 94 0f 5a |D.. ..0.Q.l....Z|
1107 02c0: 44 d9 d0 20 c8 14 30 14 51 ad 6c 16 03 94 0f 5a |D.. ..0.Q.l....Z|
1108 02d0: 46 93 7f 1c 87 8d 25 d7 9d a2 d1 92 4c f3 c2 54 |F.....%.....L..T|
1108 02d0: 46 93 7f 1c 87 8d 25 d7 9d a2 d1 92 4c f3 c2 54 |F.....%.....L..T|
1109 02e0: ba f8 70 18 ca 24 0a 29 96 43 71 f2 93 95 74 18 |..p..$.).Cq...t.|
1109 02e0: ba f8 70 18 ca 24 0a 29 96 43 71 f2 93 95 74 18 |..p..$.).Cq...t.|
1110 02f0: b5 65 c4 b8 f6 6c 5c 34 20 1e d5 0c 21 c0 b1 90 |.e...l\4 ...!...|
1110 02f0: b5 65 c4 b8 f6 6c 5c 34 20 1e d5 0c 21 c0 b1 90 |.e...l\4 ...!...|
1111 0300: 9e 12 40 b9 18 fa 5a 00 41 a2 39 d3 a9 c1 73 21 |..@...Z.A.9...s!|
1111 0300: 9e 12 40 b9 18 fa 5a 00 41 a2 39 d3 a9 c1 73 21 |..@...Z.A.9...s!|
1112 0310: 8e 5e 3c b9 b8 f8 48 6a 76 46 a7 1a b6 dd 5b 51 |.^<...HjvF....[Q|
1112 0310: 8e 5e 3c b9 b8 f8 48 6a 76 46 a7 1a b6 dd 5b 51 |.^<...HjvF....[Q|
1113 0320: 5e 19 1d 59 12 c6 32 89 02 9a c0 8f 4f b8 0a ba |^..Y..2.....O...|
1113 0320: 5e 19 1d 59 12 c6 32 89 02 9a c0 8f 4f b8 0a ba |^..Y..2.....O...|
1114 0330: 5e ec 58 37 44 a3 2f dd 33 ed c9 d3 dd c7 22 1b |^.X7D./.3.....".|
1114 0330: 5e ec 58 37 44 a3 2f dd 33 ed c9 d3 dd c7 22 1b |^.X7D./.3.....".|
1115 0340: 2f d4 94 8e 95 3f 77 a7 ae 6e f3 32 8d bb 4a 4c |/....?w..n.2..JL|
1115 0340: 2f d4 94 8e 95 3f 77 a7 ae 6e f3 32 8d bb 4a 4c |/....?w..n.2..JL|
1116 0350: b8 0a 5a 43 34 3a b3 3a d6 77 ff 5c b6 fa ad f9 |..ZC4:.:.w.\....|
1116 0350: b8 0a 5a 43 34 3a b3 3a d6 77 ff 5c b6 fa ad f9 |..ZC4:.:.w.\....|
1117 0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w||
1117 0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w||
1118 0370: 3b 19 fd af c5 3f f1 60 c3 17 |;....?.`..|
1118 0370: 3b 19 fd af c5 3f f1 60 c3 17 |;....?.`..|
1119 $ hg debugbundle ../rev.hg2.bz
1119 $ hg debugbundle ../rev.hg2.bz
1120 Stream params: sortdict([('Compression', 'GZ')])
1120 Stream params: sortdict([('Compression', 'GZ')])
1121 changegroup -- 'sortdict()'
1121 changegroup -- 'sortdict()'
1122 32af7686d403cf45b5d95f2d70cebea587ac806a
1122 32af7686d403cf45b5d95f2d70cebea587ac806a
1123 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1123 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1124 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1124 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1125 02de42196ebee42ef284b6780a87cdc96e8eaab6
1125 02de42196ebee42ef284b6780a87cdc96e8eaab6
1126 $ hg unbundle ../rev.hg2.bz
1126 $ hg unbundle ../rev.hg2.bz
1127 adding changesets
1127 adding changesets
1128 adding manifests
1128 adding manifests
1129 adding file changes
1129 adding file changes
1130 added 0 changesets with 0 changes to 3 files
1130 added 0 changesets with 0 changes to 3 files
1131 (run 'hg update' to get a working copy)
1131 (run 'hg update' to get a working copy)
1132 Simple case where it just work: BZ
1132 Simple case where it just work: BZ
1133 ----------------------------------
1133 ----------------------------------
1134
1134
1135 $ hg bundle2 --compress BZ --rev '8+7+5+4' ../rev.hg2.bz
1135 $ hg bundle2 --compress BZ --rev '8+7+5+4' ../rev.hg2.bz
1136 $ f --hexdump ../rev.hg2.bz
1136 $ f --hexdump ../rev.hg2.bz
1137 ../rev.hg2.bz:
1137 ../rev.hg2.bz:
1138 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1138 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1139 0010: 69 6f 6e 3d 42 5a 42 5a 68 39 31 41 59 26 53 59 |ion=BZBZh91AY&SY|
1139 0010: 69 6f 6e 3d 42 5a 42 5a 68 39 31 41 59 26 53 59 |ion=BZBZh91AY&SY|
1140 0020: a3 4b 18 3d 00 00 1a 7f ff ff bf 5f f6 ef ef 7f |.K.=......._....|
1140 0020: a3 4b 18 3d 00 00 1a 7f ff ff bf 5f f6 ef ef 7f |.K.=......._....|
1141 0030: f6 3f f7 d1 d9 ff ff f7 6e ff ff 6e f7 f6 bd df |.?......n..n....|
1141 0030: f6 3f f7 d1 d9 ff ff f7 6e ff ff 6e f7 f6 bd df |.?......n..n....|
1142 0040: b5 ab ff cf 67 f6 e7 7b f7 c0 02 d7 33 82 8b 51 |....g..{....3..Q|
1142 0040: b5 ab ff cf 67 f6 e7 7b f7 c0 02 d7 33 82 8b 51 |....g..{....3..Q|
1143 0050: 04 a5 53 d5 3d 27 a0 99 18 4d 0d 34 00 d1 a1 e8 |..S.='...M.4....|
1143 0050: 04 a5 53 d5 3d 27 a0 99 18 4d 0d 34 00 d1 a1 e8 |..S.='...M.4....|
1144 0060: 80 c8 7a 87 a9 a3 43 6a 3d 46 86 26 80 34 3d 40 |..z...Cj=F.&.4=@|
1144 0060: 80 c8 7a 87 a9 a3 43 6a 3d 46 86 26 80 34 3d 40 |..z...Cj=F.&.4=@|
1145 0070: c8 c9 b5 34 f4 8f 48 0f 51 ea 34 34 fd 4d aa 19 |...4..H.Q.44.M..|
1145 0070: c8 c9 b5 34 f4 8f 48 0f 51 ea 34 34 fd 4d aa 19 |...4..H.Q.44.M..|
1146 0080: 03 40 0c 08 da 86 43 d4 f5 0f 42 1e a0 f3 54 33 |.@....C...B...T3|
1146 0080: 03 40 0c 08 da 86 43 d4 f5 0f 42 1e a0 f3 54 33 |.@....C...B...T3|
1147 0090: 54 d3 13 4d 03 40 32 00 00 32 03 26 80 0d 00 0d |T..M.@2..2.&....|
1147 0090: 54 d3 13 4d 03 40 32 00 00 32 03 26 80 0d 00 0d |T..M.@2..2.&....|
1148 00a0: 00 68 c8 c8 03 20 32 30 98 8c 80 00 00 03 4d 00 |.h... 20......M.|
1148 00a0: 00 68 c8 c8 03 20 32 30 98 8c 80 00 00 03 4d 00 |.h... 20......M.|
1149 00b0: c8 00 00 0d 00 00 22 99 a1 34 c2 64 a6 d5 34 1a |......"..4.d..4.|
1149 00b0: c8 00 00 0d 00 00 22 99 a1 34 c2 64 a6 d5 34 1a |......"..4.d..4.|
1150 00c0: 00 00 06 86 83 4d 07 a8 d1 a0 68 01 a0 00 00 00 |.....M....h.....|
1150 00c0: 00 00 06 86 83 4d 07 a8 d1 a0 68 01 a0 00 00 00 |.....M....h.....|
1151 00d0: 00 0d 06 80 00 00 00 0d 00 03 40 00 00 04 a4 a1 |..........@.....|
1151 00d0: 00 0d 06 80 00 00 00 0d 00 03 40 00 00 04 a4 a1 |..........@.....|
1152 00e0: 4d a9 89 89 b4 9a 32 0c 43 46 86 87 a9 8d 41 9a |M.....2.CF....A.|
1152 00e0: 4d a9 89 89 b4 9a 32 0c 43 46 86 87 a9 8d 41 9a |M.....2.CF....A.|
1153 00f0: 98 46 9a 0d 31 32 1a 34 0d 0c 8d a2 0c 98 4d 06 |.F..12.4......M.|
1153 00f0: 98 46 9a 0d 31 32 1a 34 0d 0c 8d a2 0c 98 4d 06 |.F..12.4......M.|
1154 0100: 8c 40 c2 60 8d 0d 0c 20 c9 89 fa a0 d0 d3 21 a1 |.@.`... ......!.|
1154 0100: 8c 40 c2 60 8d 0d 0c 20 c9 89 fa a0 d0 d3 21 a1 |.@.`... ......!.|
1155 0110: ea 34 d3 68 9e a6 d1 74 05 33 cb 66 96 93 28 64 |.4.h...t.3.f..(d|
1155 0110: ea 34 d3 68 9e a6 d1 74 05 33 cb 66 96 93 28 64 |.4.h...t.3.f..(d|
1156 0120: 40 91 22 ac 55 9b ea 40 7b 38 94 e2 f8 06 00 cb |@.".U..@{8......|
1156 0120: 40 91 22 ac 55 9b ea 40 7b 38 94 e2 f8 06 00 cb |@.".U..@{8......|
1157 0130: 28 02 00 4d ab 40 24 10 43 18 cf 64 b4 06 83 0c |(..M.@$.C..d....|
1157 0130: 28 02 00 4d ab 40 24 10 43 18 cf 64 b4 06 83 0c |(..M.@$.C..d....|
1158 0140: 34 6c b4 a3 d4 0a 0a e4 a8 5c 4e 23 c0 c9 7a 31 |4l.......\N#..z1|
1158 0140: 34 6c b4 a3 d4 0a 0a e4 a8 5c 4e 23 c0 c9 7a 31 |4l.......\N#..z1|
1159 0150: 97 87 77 7a 64 88 80 8e 60 97 20 93 0f 8e eb c4 |..wzd...`. .....|
1159 0150: 97 87 77 7a 64 88 80 8e 60 97 20 93 0f 8e eb c4 |..wzd...`. .....|
1160 0160: 62 a4 44 a3 52 20 b2 99 a9 2e e1 d7 29 4a 54 ac |b.D.R ......)JT.|
1160 0160: 62 a4 44 a3 52 20 b2 99 a9 2e e1 d7 29 4a 54 ac |b.D.R ......)JT.|
1161 0170: 44 7a bb cc 04 3d e0 aa bd 6a 33 5e 9b a2 57 36 |Dz...=...j3^..W6|
1161 0170: 44 7a bb cc 04 3d e0 aa bd 6a 33 5e 9b a2 57 36 |Dz...=...j3^..W6|
1162 0180: fa cb 45 bb 6d 3e c1 d9 d9 f5 83 69 8a d0 e0 e2 |..E.m>.....i....|
1162 0180: fa cb 45 bb 6d 3e c1 d9 d9 f5 83 69 8a d0 e0 e2 |..E.m>.....i....|
1163 0190: e7 ae 90 55 24 da 3f ab 78 c0 4c b4 56 a3 9e a4 |...U$.?.x.L.V...|
1163 0190: e7 ae 90 55 24 da 3f ab 78 c0 4c b4 56 a3 9e a4 |...U$.?.x.L.V...|
1164 01a0: af 9c 65 74 86 ec 6d dc 62 dc 33 ca c8 50 dd 9d |..et..m.b.3..P..|
1164 01a0: af 9c 65 74 86 ec 6d dc 62 dc 33 ca c8 50 dd 9d |..et..m.b.3..P..|
1165 01b0: 98 8e 9e 59 20 f3 f0 42 91 4a 09 f5 75 8d 3d a5 |...Y ..B.J..u.=.|
1165 01b0: 98 8e 9e 59 20 f3 f0 42 91 4a 09 f5 75 8d 3d a5 |...Y ..B.J..u.=.|
1166 01c0: a5 15 cb 8d 10 63 b0 c2 2e b2 81 f7 c1 76 0e 53 |.....c.......v.S|
1166 01c0: a5 15 cb 8d 10 63 b0 c2 2e b2 81 f7 c1 76 0e 53 |.....c.......v.S|
1167 01d0: 6c 0e 46 73 b5 ae 67 f9 4c 0b 45 6b a8 32 2a 2f |l.Fs..g.L.Ek.2*/|
1167 01d0: 6c 0e 46 73 b5 ae 67 f9 4c 0b 45 6b a8 32 2a 2f |l.Fs..g.L.Ek.2*/|
1168 01e0: a2 54 a4 44 05 20 a1 38 d1 a4 c6 09 a8 2b 08 99 |.T.D. .8.....+..|
1168 01e0: a2 54 a4 44 05 20 a1 38 d1 a4 c6 09 a8 2b 08 99 |.T.D. .8.....+..|
1169 01f0: a4 14 ae 8d a3 e3 aa 34 27 d8 44 ca c3 5d 21 8b |.......4'.D..]!.|
1169 01f0: a4 14 ae 8d a3 e3 aa 34 27 d8 44 ca c3 5d 21 8b |.......4'.D..]!.|
1170 0200: 1a 1e 97 29 71 2b 09 4a 4a 55 55 94 58 65 b2 bc |...)q+.JJUU.Xe..|
1170 0200: 1a 1e 97 29 71 2b 09 4a 4a 55 55 94 58 65 b2 bc |...)q+.JJUU.Xe..|
1171 0210: f3 a5 90 26 36 76 67 7a 51 98 d6 8a 4a 99 50 b5 |...&6vgzQ...J.P.|
1171 0210: f3 a5 90 26 36 76 67 7a 51 98 d6 8a 4a 99 50 b5 |...&6vgzQ...J.P.|
1172 0220: 99 8f 94 21 17 a9 8b f3 ad 4c 33 d4 2e 40 c8 0c |...!.....L3..@..|
1172 0220: 99 8f 94 21 17 a9 8b f3 ad 4c 33 d4 2e 40 c8 0c |...!.....L3..@..|
1173 0230: 3b 90 53 39 db 48 02 34 83 48 d6 b3 99 13 d2 58 |;.S9.H.4.H.....X|
1173 0230: 3b 90 53 39 db 48 02 34 83 48 d6 b3 99 13 d2 58 |;.S9.H.4.H.....X|
1174 0240: 65 8e 71 ac a9 06 95 f2 c4 8e b4 08 6b d3 0c ae |e.q.........k...|
1174 0240: 65 8e 71 ac a9 06 95 f2 c4 8e b4 08 6b d3 0c ae |e.q.........k...|
1175 0250: d9 90 56 71 43 a7 a2 62 16 3e 50 63 d3 57 3c 2d |..VqC..b.>Pc.W<-|
1175 0250: d9 90 56 71 43 a7 a2 62 16 3e 50 63 d3 57 3c 2d |..VqC..b.>Pc.W<-|
1176 0260: 9f 0f 34 05 08 d8 a6 4b 59 31 54 66 3a 45 0c 8a |..4....KY1Tf:E..|
1176 0260: 9f 0f 34 05 08 d8 a6 4b 59 31 54 66 3a 45 0c 8a |..4....KY1Tf:E..|
1177 0270: c7 90 3a f0 6a 83 1b f5 ca fb 80 2b 50 06 fb 51 |..:.j......+P..Q|
1177 0270: c7 90 3a f0 6a 83 1b f5 ca fb 80 2b 50 06 fb 51 |..:.j......+P..Q|
1178 0280: 7e a6 a4 d4 81 44 82 21 54 00 5b 1a 30 83 62 a3 |~....D.!T.[.0.b.|
1178 0280: 7e a6 a4 d4 81 44 82 21 54 00 5b 1a 30 83 62 a3 |~....D.!T.[.0.b.|
1179 0290: 18 b6 24 19 1e 45 df 4d 5c db a6 af 5b ac 90 fa |..$..E.M\...[...|
1179 0290: 18 b6 24 19 1e 45 df 4d 5c db a6 af 5b ac 90 fa |..$..E.M\...[...|
1180 02a0: 3e ed f9 ec 4c ba 36 ee d8 60 20 a7 c7 3b cb d1 |>...L.6..` ..;..|
1180 02a0: 3e ed f9 ec 4c ba 36 ee d8 60 20 a7 c7 3b cb d1 |>...L.6..` ..;..|
1181 02b0: 90 43 7d 27 16 50 5d ad f4 14 07 0b 90 5c cc 6b |.C}'.P]......\.k|
1181 02b0: 90 43 7d 27 16 50 5d ad f4 14 07 0b 90 5c cc 6b |.C}'.P]......\.k|
1182 02c0: 8d 3f a6 88 f4 34 37 a8 cf 14 63 36 19 f7 3e 28 |.?...47...c6..>(|
1182 02c0: 8d 3f a6 88 f4 34 37 a8 cf 14 63 36 19 f7 3e 28 |.?...47...c6..>(|
1183 02d0: de 99 e8 16 a4 9d 0d 40 a1 a7 24 52 14 a6 72 62 |.......@..$R..rb|
1183 02d0: de 99 e8 16 a4 9d 0d 40 a1 a7 24 52 14 a6 72 62 |.......@..$R..rb|
1184 02e0: 59 5a ca 2d e5 51 90 78 88 d9 c6 c7 21 d0 f7 46 |YZ.-.Q.x....!..F|
1184 02e0: 59 5a ca 2d e5 51 90 78 88 d9 c6 c7 21 d0 f7 46 |YZ.-.Q.x....!..F|
1185 02f0: b2 04 46 44 4e 20 9c 12 b1 03 4e 25 e0 a9 0c 58 |..FDN ....N%...X|
1185 02f0: b2 04 46 44 4e 20 9c 12 b1 03 4e 25 e0 a9 0c 58 |..FDN ....N%...X|
1186 0300: 5b 1d 3c 93 20 01 51 de a9 1c 69 23 32 46 14 b4 |[.<. .Q...i#2F..|
1186 0300: 5b 1d 3c 93 20 01 51 de a9 1c 69 23 32 46 14 b4 |[.<. .Q...i#2F..|
1187 0310: 90 db 17 98 98 50 03 90 29 aa 40 b0 13 d8 43 d2 |.....P..).@...C.|
1187 0310: 90 db 17 98 98 50 03 90 29 aa 40 b0 13 d8 43 d2 |.....P..).@...C.|
1188 0320: 5f c5 9d eb f3 f2 ad 41 e8 7a a9 ed a1 58 84 a6 |_......A.z...X..|
1188 0320: 5f c5 9d eb f3 f2 ad 41 e8 7a a9 ed a1 58 84 a6 |_......A.z...X..|
1189 0330: 42 bf d6 fc 24 82 c1 20 32 26 4a 15 a6 1d 29 7f |B...$.. 2&J...).|
1189 0330: 42 bf d6 fc 24 82 c1 20 32 26 4a 15 a6 1d 29 7f |B...$.. 2&J...).|
1190 0340: 7e f4 3d 07 bc 62 9a 5b ec 44 3d 72 1d 41 8b 5c |~.=..b.[.D=r.A.\|
1190 0340: 7e f4 3d 07 bc 62 9a 5b ec 44 3d 72 1d 41 8b 5c |~.=..b.[.D=r.A.\|
1191 0350: 80 de 0e 62 9a 2e f8 83 00 d5 07 a0 9c c6 74 98 |...b..........t.|
1191 0350: 80 de 0e 62 9a 2e f8 83 00 d5 07 a0 9c c6 74 98 |...b..........t.|
1192 0360: 11 b2 5e a9 38 02 03 ee fd 86 5c f4 86 b3 ae da |..^.8.....\.....|
1192 0360: 11 b2 5e a9 38 02 03 ee fd 86 5c f4 86 b3 ae da |..^.8.....\.....|
1193 0370: 05 94 01 c5 c6 ea 18 e6 ba 2a ba b3 04 5c 96 89 |.........*...\..|
1193 0370: 05 94 01 c5 c6 ea 18 e6 ba 2a ba b3 04 5c 96 89 |.........*...\..|
1194 0380: 72 63 5b 10 11 f6 67 34 98 cb e4 c0 4e fa e6 99 |rc[...g4....N...|
1194 0380: 72 63 5b 10 11 f6 67 34 98 cb e4 c0 4e fa e6 99 |rc[...g4....N...|
1195 0390: 19 6e 50 e8 26 8d 0c 17 e0 be ef e1 8e 02 6f 32 |.nP.&.........o2|
1195 0390: 19 6e 50 e8 26 8d 0c 17 e0 be ef e1 8e 02 6f 32 |.nP.&.........o2|
1196 03a0: 82 dc 26 f8 a1 08 f3 8a 0d f3 c4 75 00 48 73 b8 |..&........u.Hs.|
1196 03a0: 82 dc 26 f8 a1 08 f3 8a 0d f3 c4 75 00 48 73 b8 |..&........u.Hs.|
1197 03b0: be 3b 0d 7f d0 fd c7 78 96 ec e0 03 80 68 4d 8d |.;.....x.....hM.|
1197 03b0: be 3b 0d 7f d0 fd c7 78 96 ec e0 03 80 68 4d 8d |.;.....x.....hM.|
1198 03c0: 43 8c d7 68 58 f9 50 f0 18 cb 21 58 1b 60 cd 1f |C..hX.P...!X.`..|
1198 03c0: 43 8c d7 68 58 f9 50 f0 18 cb 21 58 1b 60 cd 1f |C..hX.P...!X.`..|
1199 03d0: 84 36 2e 16 1f 0a f7 4e 8f eb df 01 2d c2 79 0b |.6.....N....-.y.|
1199 03d0: 84 36 2e 16 1f 0a f7 4e 8f eb df 01 2d c2 79 0b |.6.....N....-.y.|
1200 03e0: f7 24 ea 0d e8 59 86 51 6e 1c 30 a3 ad 2f ee 8c |.$...Y.Qn.0../..|
1200 03e0: f7 24 ea 0d e8 59 86 51 6e 1c 30 a3 ad 2f ee 8c |.$...Y.Qn.0../..|
1201 03f0: 90 c8 84 d5 e8 34 c1 95 b2 c9 f6 4d 87 1c 7d 19 |.....4.....M..}.|
1201 03f0: 90 c8 84 d5 e8 34 c1 95 b2 c9 f6 4d 87 1c 7d 19 |.....4.....M..}.|
1202 0400: d6 41 58 56 7a e0 6c ba 10 c7 e8 33 39 36 96 e7 |.AXVz.l....396..|
1202 0400: d6 41 58 56 7a e0 6c ba 10 c7 e8 33 39 36 96 e7 |.AXVz.l....396..|
1203 0410: d2 f9 59 9a 08 95 48 38 e7 0b b7 0a 24 67 c4 39 |..Y...H8....$g.9|
1203 0410: d2 f9 59 9a 08 95 48 38 e7 0b b7 0a 24 67 c4 39 |..Y...H8....$g.9|
1204 0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...|
1204 0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...|
1205 0430: 2e e4 8a 70 a1 21 46 96 30 7a |...p.!F.0z|
1205 0430: 2e e4 8a 70 a1 21 46 96 30 7a |...p.!F.0z|
1206 $ hg debugbundle ../rev.hg2.bz
1206 $ hg debugbundle ../rev.hg2.bz
1207 Stream params: sortdict([('Compression', 'BZ')])
1207 Stream params: sortdict([('Compression', 'BZ')])
1208 changegroup -- 'sortdict()'
1208 changegroup -- 'sortdict()'
1209 32af7686d403cf45b5d95f2d70cebea587ac806a
1209 32af7686d403cf45b5d95f2d70cebea587ac806a
1210 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1210 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1211 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1211 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1212 02de42196ebee42ef284b6780a87cdc96e8eaab6
1212 02de42196ebee42ef284b6780a87cdc96e8eaab6
1213 $ hg unbundle ../rev.hg2.bz
1213 $ hg unbundle ../rev.hg2.bz
1214 adding changesets
1214 adding changesets
1215 adding manifests
1215 adding manifests
1216 adding file changes
1216 adding file changes
1217 added 0 changesets with 0 changes to 3 files
1217 added 0 changesets with 0 changes to 3 files
1218 (run 'hg update' to get a working copy)
1218 (run 'hg update' to get a working copy)
1219
1219
1220 unknown compression while unbundling
1220 unknown compression while unbundling
1221 -----------------------------
1221 -----------------------------
1222
1222
1223 $ hg bundle2 --param Compression=FooBarUnknown --rev '8+7+5+4' ../rev.hg2.bz
1223 $ hg bundle2 --param Compression=FooBarUnknown --rev '8+7+5+4' ../rev.hg2.bz
1224 $ cat ../rev.hg2.bz | hg statbundle2
1224 $ cat ../rev.hg2.bz | hg statbundle2
1225 abort: unknown parameters: Stream Parameter - Compression='FooBarUnknown'
1225 abort: unknown parameters: Stream Parameter - Compression='FooBarUnknown'
1226 [255]
1226 [255]
1227 $ hg unbundle ../rev.hg2.bz
1227 $ hg unbundle ../rev.hg2.bz
1228 abort: ../rev.hg2.bz: unknown bundle feature, Stream Parameter - Compression='FooBarUnknown'
1228 abort: ../rev.hg2.bz: unknown bundle feature, Stream Parameter - Compression='FooBarUnknown'
1229 (see https://mercurial-scm.org/wiki/BundleFeature for more information)
1229 (see https://mercurial-scm.org/wiki/BundleFeature for more information)
1230 [255]
1230 [255]
1231
1231
1232 $ cd ..
1232 $ cd ..
@@ -1,258 +1,260 b''
1 Create an extension to test bundle2 with multiple changegroups
1 Create an extension to test bundle2 with multiple changegroups
2
2
3 $ cat > bundle2.py <<EOF
3 $ cat > bundle2.py <<EOF
4 > """
4 > """
5 > """
5 > """
6 > from mercurial import changegroup, discovery, exchange
6 > from mercurial import changegroup, discovery, exchange
7 >
7 >
8 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
8 > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
9 > b2caps=None, heads=None, common=None,
9 > b2caps=None, heads=None, common=None,
10 > **kwargs):
10 > **kwargs):
11 > # Create two changegroups given the common changesets and heads for the
11 > # Create two changegroups given the common changesets and heads for the
12 > # changegroup part we are being requested. Use the parent of each head
12 > # changegroup part we are being requested. Use the parent of each head
13 > # in 'heads' as intermediate heads for the first changegroup.
13 > # in 'heads' as intermediate heads for the first changegroup.
14 > intermediates = [repo[r].p1().node() for r in heads]
14 > intermediates = [repo[r].p1().node() for r in heads]
15 > outgoing = discovery.outgoing(repo, common, intermediates)
15 > outgoing = discovery.outgoing(repo, common, intermediates)
16 > cg = changegroup.getchangegroup(repo, source, outgoing)
16 > cg = changegroup.getchangegroup(repo, source, outgoing,
17 > bundlecaps=bundlecaps)
17 > bundler.newpart('output', data='changegroup1')
18 > bundler.newpart('output', data='changegroup1')
18 > bundler.newpart('changegroup', data=cg.getchunks())
19 > bundler.newpart('changegroup', data=cg.getchunks())
19 > outgoing = discovery.outgoing(repo, common + intermediates, heads)
20 > outgoing = discovery.outgoing(repo, common + intermediates, heads)
20 > cg = changegroup.getchangegroup(repo, source, outgoing)
21 > cg = changegroup.getchangegroup(repo, source, outgoing,
22 > bundlecaps=bundlecaps)
21 > bundler.newpart('output', data='changegroup2')
23 > bundler.newpart('output', data='changegroup2')
22 > bundler.newpart('changegroup', data=cg.getchunks())
24 > bundler.newpart('changegroup', data=cg.getchunks())
23 >
25 >
24 > def _pull(repo, *args, **kwargs):
26 > def _pull(repo, *args, **kwargs):
25 > pullop = _orig_pull(repo, *args, **kwargs)
27 > pullop = _orig_pull(repo, *args, **kwargs)
26 > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult)
28 > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult)
27 > return pullop
29 > return pullop
28 >
30 >
29 > _orig_pull = exchange.pull
31 > _orig_pull = exchange.pull
30 > exchange.pull = _pull
32 > exchange.pull = _pull
31 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
33 > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart
32 > EOF
34 > EOF
33
35
34 $ cat >> $HGRCPATH << EOF
36 $ cat >> $HGRCPATH << EOF
35 > [ui]
37 > [ui]
36 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
38 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
37 > EOF
39 > EOF
38
40
39 Start with a simple repository with a single commit
41 Start with a simple repository with a single commit
40
42
41 $ hg init repo
43 $ hg init repo
42 $ cd repo
44 $ cd repo
43 $ cat > .hg/hgrc << EOF
45 $ cat > .hg/hgrc << EOF
44 > [extensions]
46 > [extensions]
45 > bundle2=$TESTTMP/bundle2.py
47 > bundle2=$TESTTMP/bundle2.py
46 > EOF
48 > EOF
47
49
48 $ echo A > A
50 $ echo A > A
49 $ hg commit -A -m A -q
51 $ hg commit -A -m A -q
50 $ cd ..
52 $ cd ..
51
53
52 Clone
54 Clone
53
55
54 $ hg clone -q repo clone
56 $ hg clone -q repo clone
55
57
56 Add two linear commits
58 Add two linear commits
57
59
58 $ cd repo
60 $ cd repo
59 $ echo B > B
61 $ echo B > B
60 $ hg commit -A -m B -q
62 $ hg commit -A -m B -q
61 $ echo C > C
63 $ echo C > C
62 $ hg commit -A -m C -q
64 $ hg commit -A -m C -q
63
65
64 $ cd ../clone
66 $ cd ../clone
65 $ cat >> .hg/hgrc <<EOF
67 $ cat >> .hg/hgrc <<EOF
66 > [hooks]
68 > [hooks]
67 > pretxnchangegroup = sh -c "printenv.py pretxnchangegroup"
69 > pretxnchangegroup = sh -c "printenv.py pretxnchangegroup"
68 > changegroup = sh -c "printenv.py changegroup"
70 > changegroup = sh -c "printenv.py changegroup"
69 > incoming = sh -c "printenv.py incoming"
71 > incoming = sh -c "printenv.py incoming"
70 > EOF
72 > EOF
71
73
72 Pull the new commits in the clone
74 Pull the new commits in the clone
73
75
74 $ hg pull
76 $ hg pull
75 pulling from $TESTTMP/repo (glob)
77 pulling from $TESTTMP/repo (glob)
76 searching for changes
78 searching for changes
77 remote: changegroup1
79 remote: changegroup1
78 adding changesets
80 adding changesets
79 adding manifests
81 adding manifests
80 adding file changes
82 adding file changes
81 added 1 changesets with 1 changes to 1 files
83 added 1 changesets with 1 changes to 1 files
82 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
84 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
83 remote: changegroup2
85 remote: changegroup2
84 adding changesets
86 adding changesets
85 adding manifests
87 adding manifests
86 adding file changes
88 adding file changes
87 added 1 changesets with 1 changes to 1 files
89 added 1 changesets with 1 changes to 1 files
88 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
90 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
89 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
91 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
90 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
92 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
91 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
93 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
92 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
94 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
93 pullop.cgresult is 1
95 pullop.cgresult is 1
94 (run 'hg update' to get a working copy)
96 (run 'hg update' to get a working copy)
95 $ hg update
97 $ hg update
96 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
98 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
97 $ hg log -G
99 $ hg log -G
98 @ 2:f838bfaca5c7 public test C
100 @ 2:f838bfaca5c7 public test C
99 |
101 |
100 o 1:27547f69f254 public test B
102 o 1:27547f69f254 public test B
101 |
103 |
102 o 0:4a2df7238c3b public test A
104 o 0:4a2df7238c3b public test A
103
105
104 Add more changesets with multiple heads to the original repository
106 Add more changesets with multiple heads to the original repository
105
107
106 $ cd ../repo
108 $ cd ../repo
107 $ echo D > D
109 $ echo D > D
108 $ hg commit -A -m D -q
110 $ hg commit -A -m D -q
109 $ hg up -r 1
111 $ hg up -r 1
110 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
112 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
111 $ echo E > E
113 $ echo E > E
112 $ hg commit -A -m E -q
114 $ hg commit -A -m E -q
113 $ echo F > F
115 $ echo F > F
114 $ hg commit -A -m F -q
116 $ hg commit -A -m F -q
115 $ hg up -r 1
117 $ hg up -r 1
116 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
118 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
117 $ echo G > G
119 $ echo G > G
118 $ hg commit -A -m G -q
120 $ hg commit -A -m G -q
119 $ hg up -r 3
121 $ hg up -r 3
120 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
122 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
121 $ echo H > H
123 $ echo H > H
122 $ hg commit -A -m H -q
124 $ hg commit -A -m H -q
123 $ hg log -G
125 $ hg log -G
124 @ 7:5cd59d311f65 draft test H
126 @ 7:5cd59d311f65 draft test H
125 |
127 |
126 | o 6:1d14c3ce6ac0 draft test G
128 | o 6:1d14c3ce6ac0 draft test G
127 | |
129 | |
128 | | o 5:7f219660301f draft test F
130 | | o 5:7f219660301f draft test F
129 | | |
131 | | |
130 | | o 4:8a5212ebc852 draft test E
132 | | o 4:8a5212ebc852 draft test E
131 | |/
133 | |/
132 o | 3:b3325c91a4d9 draft test D
134 o | 3:b3325c91a4d9 draft test D
133 | |
135 | |
134 o | 2:f838bfaca5c7 draft test C
136 o | 2:f838bfaca5c7 draft test C
135 |/
137 |/
136 o 1:27547f69f254 draft test B
138 o 1:27547f69f254 draft test B
137 |
139 |
138 o 0:4a2df7238c3b draft test A
140 o 0:4a2df7238c3b draft test A
139
141
140 New heads are reported during transfer and properly accounted for in
142 New heads are reported during transfer and properly accounted for in
141 pullop.cgresult
143 pullop.cgresult
142
144
143 $ cd ../clone
145 $ cd ../clone
144 $ hg pull
146 $ hg pull
145 pulling from $TESTTMP/repo (glob)
147 pulling from $TESTTMP/repo (glob)
146 searching for changes
148 searching for changes
147 remote: changegroup1
149 remote: changegroup1
148 adding changesets
150 adding changesets
149 adding manifests
151 adding manifests
150 adding file changes
152 adding file changes
151 added 2 changesets with 2 changes to 2 files (+1 heads)
153 added 2 changesets with 2 changes to 2 files (+1 heads)
152 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
154 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
153 remote: changegroup2
155 remote: changegroup2
154 adding changesets
156 adding changesets
155 adding manifests
157 adding manifests
156 adding file changes
158 adding file changes
157 added 3 changesets with 3 changes to 3 files (+1 heads)
159 added 3 changesets with 3 changes to 3 files (+1 heads)
158 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
160 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
159 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
161 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
160 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
162 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
161 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
163 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
162 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
164 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
163 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
165 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
164 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
166 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
165 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
167 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
166 pullop.cgresult is 3
168 pullop.cgresult is 3
167 (run 'hg heads' to see heads, 'hg merge' to merge)
169 (run 'hg heads' to see heads, 'hg merge' to merge)
168 $ hg log -G
170 $ hg log -G
169 o 7:5cd59d311f65 public test H
171 o 7:5cd59d311f65 public test H
170 |
172 |
171 | o 6:1d14c3ce6ac0 public test G
173 | o 6:1d14c3ce6ac0 public test G
172 | |
174 | |
173 | | o 5:7f219660301f public test F
175 | | o 5:7f219660301f public test F
174 | | |
176 | | |
175 | | o 4:8a5212ebc852 public test E
177 | | o 4:8a5212ebc852 public test E
176 | |/
178 | |/
177 o | 3:b3325c91a4d9 public test D
179 o | 3:b3325c91a4d9 public test D
178 | |
180 | |
179 @ | 2:f838bfaca5c7 public test C
181 @ | 2:f838bfaca5c7 public test C
180 |/
182 |/
181 o 1:27547f69f254 public test B
183 o 1:27547f69f254 public test B
182 |
184 |
183 o 0:4a2df7238c3b public test A
185 o 0:4a2df7238c3b public test A
184
186
185 Removing a head from the original repository by merging it
187 Removing a head from the original repository by merging it
186
188
187 $ cd ../repo
189 $ cd ../repo
188 $ hg merge -r 6 -q
190 $ hg merge -r 6 -q
189 $ hg commit -m Merge
191 $ hg commit -m Merge
190 $ echo I > I
192 $ echo I > I
191 $ hg commit -A -m H -q
193 $ hg commit -A -m H -q
192 $ hg log -G
194 $ hg log -G
193 @ 9:9d18e5bd9ab0 draft test H
195 @ 9:9d18e5bd9ab0 draft test H
194 |
196 |
195 o 8:71bd7b46de72 draft test Merge
197 o 8:71bd7b46de72 draft test Merge
196 |\
198 |\
197 | o 7:5cd59d311f65 draft test H
199 | o 7:5cd59d311f65 draft test H
198 | |
200 | |
199 o | 6:1d14c3ce6ac0 draft test G
201 o | 6:1d14c3ce6ac0 draft test G
200 | |
202 | |
201 | | o 5:7f219660301f draft test F
203 | | o 5:7f219660301f draft test F
202 | | |
204 | | |
203 +---o 4:8a5212ebc852 draft test E
205 +---o 4:8a5212ebc852 draft test E
204 | |
206 | |
205 | o 3:b3325c91a4d9 draft test D
207 | o 3:b3325c91a4d9 draft test D
206 | |
208 | |
207 | o 2:f838bfaca5c7 draft test C
209 | o 2:f838bfaca5c7 draft test C
208 |/
210 |/
209 o 1:27547f69f254 draft test B
211 o 1:27547f69f254 draft test B
210 |
212 |
211 o 0:4a2df7238c3b draft test A
213 o 0:4a2df7238c3b draft test A
212
214
213 Removed heads are reported during transfer and properly accounted for in
215 Removed heads are reported during transfer and properly accounted for in
214 pullop.cgresult
216 pullop.cgresult
215
217
216 $ cd ../clone
218 $ cd ../clone
217 $ hg pull
219 $ hg pull
218 pulling from $TESTTMP/repo (glob)
220 pulling from $TESTTMP/repo (glob)
219 searching for changes
221 searching for changes
220 remote: changegroup1
222 remote: changegroup1
221 adding changesets
223 adding changesets
222 adding manifests
224 adding manifests
223 adding file changes
225 adding file changes
224 added 1 changesets with 0 changes to 0 files (-1 heads)
226 added 1 changesets with 0 changes to 0 files (-1 heads)
225 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
227 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
226 remote: changegroup2
228 remote: changegroup2
227 adding changesets
229 adding changesets
228 adding manifests
230 adding manifests
229 adding file changes
231 adding file changes
230 added 1 changesets with 1 changes to 1 files
232 added 1 changesets with 1 changes to 1 files
231 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
233 pretxnchangegroup hook: HG_HOOKNAME=pretxnchangegroup HG_HOOKTYPE=pretxnchangegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
232 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
234 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
233 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
235 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
234 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
236 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
235 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
237 incoming hook: HG_HOOKNAME=incoming HG_HOOKTYPE=incoming HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=file:$TESTTMP/repo
236 pullop.cgresult is -2
238 pullop.cgresult is -2
237 (run 'hg update' to get a working copy)
239 (run 'hg update' to get a working copy)
238 $ hg log -G
240 $ hg log -G
239 o 9:9d18e5bd9ab0 public test H
241 o 9:9d18e5bd9ab0 public test H
240 |
242 |
241 o 8:71bd7b46de72 public test Merge
243 o 8:71bd7b46de72 public test Merge
242 |\
244 |\
243 | o 7:5cd59d311f65 public test H
245 | o 7:5cd59d311f65 public test H
244 | |
246 | |
245 o | 6:1d14c3ce6ac0 public test G
247 o | 6:1d14c3ce6ac0 public test G
246 | |
248 | |
247 | | o 5:7f219660301f public test F
249 | | o 5:7f219660301f public test F
248 | | |
250 | | |
249 +---o 4:8a5212ebc852 public test E
251 +---o 4:8a5212ebc852 public test E
250 | |
252 | |
251 | o 3:b3325c91a4d9 public test D
253 | o 3:b3325c91a4d9 public test D
252 | |
254 | |
253 | @ 2:f838bfaca5c7 public test C
255 | @ 2:f838bfaca5c7 public test C
254 |/
256 |/
255 o 1:27547f69f254 public test B
257 o 1:27547f69f254 public test B
256 |
258 |
257 o 0:4a2df7238c3b public test A
259 o 0:4a2df7238c3b public test A
258
260
General Comments 0
You need to be logged in to leave comments. Login now