##// END OF EJS Templates
changegroup: add a option to create bundle with full snapshot only...
Boris Feld -
r40459:808b7626 default
parent child Browse files
Show More
@@ -1,1392 +1,1394 b''
1 # changegroup.py - Mercurial changegroup manipulation functions
1 # changegroup.py - Mercurial changegroup manipulation functions
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import struct
11 import struct
12 import weakref
12 import weakref
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 hex,
16 hex,
17 nullid,
17 nullid,
18 nullrev,
18 nullrev,
19 short,
19 short,
20 )
20 )
21
21
22 from . import (
22 from . import (
23 error,
23 error,
24 match as matchmod,
24 match as matchmod,
25 mdiff,
25 mdiff,
26 phases,
26 phases,
27 pycompat,
27 pycompat,
28 repository,
28 repository,
29 util,
29 util,
30 )
30 )
31
31
32 _CHANGEGROUPV1_DELTA_HEADER = struct.Struct("20s20s20s20s")
32 _CHANGEGROUPV1_DELTA_HEADER = struct.Struct("20s20s20s20s")
33 _CHANGEGROUPV2_DELTA_HEADER = struct.Struct("20s20s20s20s20s")
33 _CHANGEGROUPV2_DELTA_HEADER = struct.Struct("20s20s20s20s20s")
34 _CHANGEGROUPV3_DELTA_HEADER = struct.Struct(">20s20s20s20s20sH")
34 _CHANGEGROUPV3_DELTA_HEADER = struct.Struct(">20s20s20s20s20sH")
35
35
36 LFS_REQUIREMENT = 'lfs'
36 LFS_REQUIREMENT = 'lfs'
37
37
38 readexactly = util.readexactly
38 readexactly = util.readexactly
39
39
40 def getchunk(stream):
40 def getchunk(stream):
41 """return the next chunk from stream as a string"""
41 """return the next chunk from stream as a string"""
42 d = readexactly(stream, 4)
42 d = readexactly(stream, 4)
43 l = struct.unpack(">l", d)[0]
43 l = struct.unpack(">l", d)[0]
44 if l <= 4:
44 if l <= 4:
45 if l:
45 if l:
46 raise error.Abort(_("invalid chunk length %d") % l)
46 raise error.Abort(_("invalid chunk length %d") % l)
47 return ""
47 return ""
48 return readexactly(stream, l - 4)
48 return readexactly(stream, l - 4)
49
49
50 def chunkheader(length):
50 def chunkheader(length):
51 """return a changegroup chunk header (string)"""
51 """return a changegroup chunk header (string)"""
52 return struct.pack(">l", length + 4)
52 return struct.pack(">l", length + 4)
53
53
54 def closechunk():
54 def closechunk():
55 """return a changegroup chunk header (string) for a zero-length chunk"""
55 """return a changegroup chunk header (string) for a zero-length chunk"""
56 return struct.pack(">l", 0)
56 return struct.pack(">l", 0)
57
57
58 def _fileheader(path):
58 def _fileheader(path):
59 """Obtain a changegroup chunk header for a named path."""
59 """Obtain a changegroup chunk header for a named path."""
60 return chunkheader(len(path)) + path
60 return chunkheader(len(path)) + path
61
61
62 def writechunks(ui, chunks, filename, vfs=None):
62 def writechunks(ui, chunks, filename, vfs=None):
63 """Write chunks to a file and return its filename.
63 """Write chunks to a file and return its filename.
64
64
65 The stream is assumed to be a bundle file.
65 The stream is assumed to be a bundle file.
66 Existing files will not be overwritten.
66 Existing files will not be overwritten.
67 If no filename is specified, a temporary file is created.
67 If no filename is specified, a temporary file is created.
68 """
68 """
69 fh = None
69 fh = None
70 cleanup = None
70 cleanup = None
71 try:
71 try:
72 if filename:
72 if filename:
73 if vfs:
73 if vfs:
74 fh = vfs.open(filename, "wb")
74 fh = vfs.open(filename, "wb")
75 else:
75 else:
76 # Increase default buffer size because default is usually
76 # Increase default buffer size because default is usually
77 # small (4k is common on Linux).
77 # small (4k is common on Linux).
78 fh = open(filename, "wb", 131072)
78 fh = open(filename, "wb", 131072)
79 else:
79 else:
80 fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
80 fd, filename = pycompat.mkstemp(prefix="hg-bundle-", suffix=".hg")
81 fh = os.fdopen(fd, r"wb")
81 fh = os.fdopen(fd, r"wb")
82 cleanup = filename
82 cleanup = filename
83 for c in chunks:
83 for c in chunks:
84 fh.write(c)
84 fh.write(c)
85 cleanup = None
85 cleanup = None
86 return filename
86 return filename
87 finally:
87 finally:
88 if fh is not None:
88 if fh is not None:
89 fh.close()
89 fh.close()
90 if cleanup is not None:
90 if cleanup is not None:
91 if filename and vfs:
91 if filename and vfs:
92 vfs.unlink(cleanup)
92 vfs.unlink(cleanup)
93 else:
93 else:
94 os.unlink(cleanup)
94 os.unlink(cleanup)
95
95
96 class cg1unpacker(object):
96 class cg1unpacker(object):
97 """Unpacker for cg1 changegroup streams.
97 """Unpacker for cg1 changegroup streams.
98
98
99 A changegroup unpacker handles the framing of the revision data in
99 A changegroup unpacker handles the framing of the revision data in
100 the wire format. Most consumers will want to use the apply()
100 the wire format. Most consumers will want to use the apply()
101 method to add the changes from the changegroup to a repository.
101 method to add the changes from the changegroup to a repository.
102
102
103 If you're forwarding a changegroup unmodified to another consumer,
103 If you're forwarding a changegroup unmodified to another consumer,
104 use getchunks(), which returns an iterator of changegroup
104 use getchunks(), which returns an iterator of changegroup
105 chunks. This is mostly useful for cases where you need to know the
105 chunks. This is mostly useful for cases where you need to know the
106 data stream has ended by observing the end of the changegroup.
106 data stream has ended by observing the end of the changegroup.
107
107
108 deltachunk() is useful only if you're applying delta data. Most
108 deltachunk() is useful only if you're applying delta data. Most
109 consumers should prefer apply() instead.
109 consumers should prefer apply() instead.
110
110
111 A few other public methods exist. Those are used only for
111 A few other public methods exist. Those are used only for
112 bundlerepo and some debug commands - their use is discouraged.
112 bundlerepo and some debug commands - their use is discouraged.
113 """
113 """
114 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
114 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
115 deltaheadersize = deltaheader.size
115 deltaheadersize = deltaheader.size
116 version = '01'
116 version = '01'
117 _grouplistcount = 1 # One list of files after the manifests
117 _grouplistcount = 1 # One list of files after the manifests
118
118
119 def __init__(self, fh, alg, extras=None):
119 def __init__(self, fh, alg, extras=None):
120 if alg is None:
120 if alg is None:
121 alg = 'UN'
121 alg = 'UN'
122 if alg not in util.compengines.supportedbundletypes:
122 if alg not in util.compengines.supportedbundletypes:
123 raise error.Abort(_('unknown stream compression type: %s')
123 raise error.Abort(_('unknown stream compression type: %s')
124 % alg)
124 % alg)
125 if alg == 'BZ':
125 if alg == 'BZ':
126 alg = '_truncatedBZ'
126 alg = '_truncatedBZ'
127
127
128 compengine = util.compengines.forbundletype(alg)
128 compengine = util.compengines.forbundletype(alg)
129 self._stream = compengine.decompressorreader(fh)
129 self._stream = compengine.decompressorreader(fh)
130 self._type = alg
130 self._type = alg
131 self.extras = extras or {}
131 self.extras = extras or {}
132 self.callback = None
132 self.callback = None
133
133
134 # These methods (compressed, read, seek, tell) all appear to only
134 # These methods (compressed, read, seek, tell) all appear to only
135 # be used by bundlerepo, but it's a little hard to tell.
135 # be used by bundlerepo, but it's a little hard to tell.
136 def compressed(self):
136 def compressed(self):
137 return self._type is not None and self._type != 'UN'
137 return self._type is not None and self._type != 'UN'
138 def read(self, l):
138 def read(self, l):
139 return self._stream.read(l)
139 return self._stream.read(l)
140 def seek(self, pos):
140 def seek(self, pos):
141 return self._stream.seek(pos)
141 return self._stream.seek(pos)
142 def tell(self):
142 def tell(self):
143 return self._stream.tell()
143 return self._stream.tell()
144 def close(self):
144 def close(self):
145 return self._stream.close()
145 return self._stream.close()
146
146
147 def _chunklength(self):
147 def _chunklength(self):
148 d = readexactly(self._stream, 4)
148 d = readexactly(self._stream, 4)
149 l = struct.unpack(">l", d)[0]
149 l = struct.unpack(">l", d)[0]
150 if l <= 4:
150 if l <= 4:
151 if l:
151 if l:
152 raise error.Abort(_("invalid chunk length %d") % l)
152 raise error.Abort(_("invalid chunk length %d") % l)
153 return 0
153 return 0
154 if self.callback:
154 if self.callback:
155 self.callback()
155 self.callback()
156 return l - 4
156 return l - 4
157
157
158 def changelogheader(self):
158 def changelogheader(self):
159 """v10 does not have a changelog header chunk"""
159 """v10 does not have a changelog header chunk"""
160 return {}
160 return {}
161
161
162 def manifestheader(self):
162 def manifestheader(self):
163 """v10 does not have a manifest header chunk"""
163 """v10 does not have a manifest header chunk"""
164 return {}
164 return {}
165
165
166 def filelogheader(self):
166 def filelogheader(self):
167 """return the header of the filelogs chunk, v10 only has the filename"""
167 """return the header of the filelogs chunk, v10 only has the filename"""
168 l = self._chunklength()
168 l = self._chunklength()
169 if not l:
169 if not l:
170 return {}
170 return {}
171 fname = readexactly(self._stream, l)
171 fname = readexactly(self._stream, l)
172 return {'filename': fname}
172 return {'filename': fname}
173
173
174 def _deltaheader(self, headertuple, prevnode):
174 def _deltaheader(self, headertuple, prevnode):
175 node, p1, p2, cs = headertuple
175 node, p1, p2, cs = headertuple
176 if prevnode is None:
176 if prevnode is None:
177 deltabase = p1
177 deltabase = p1
178 else:
178 else:
179 deltabase = prevnode
179 deltabase = prevnode
180 flags = 0
180 flags = 0
181 return node, p1, p2, deltabase, cs, flags
181 return node, p1, p2, deltabase, cs, flags
182
182
183 def deltachunk(self, prevnode):
183 def deltachunk(self, prevnode):
184 l = self._chunklength()
184 l = self._chunklength()
185 if not l:
185 if not l:
186 return {}
186 return {}
187 headerdata = readexactly(self._stream, self.deltaheadersize)
187 headerdata = readexactly(self._stream, self.deltaheadersize)
188 header = self.deltaheader.unpack(headerdata)
188 header = self.deltaheader.unpack(headerdata)
189 delta = readexactly(self._stream, l - self.deltaheadersize)
189 delta = readexactly(self._stream, l - self.deltaheadersize)
190 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
190 node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode)
191 return (node, p1, p2, cs, deltabase, delta, flags)
191 return (node, p1, p2, cs, deltabase, delta, flags)
192
192
193 def getchunks(self):
193 def getchunks(self):
194 """returns all the chunks contains in the bundle
194 """returns all the chunks contains in the bundle
195
195
196 Used when you need to forward the binary stream to a file or another
196 Used when you need to forward the binary stream to a file or another
197 network API. To do so, it parse the changegroup data, otherwise it will
197 network API. To do so, it parse the changegroup data, otherwise it will
198 block in case of sshrepo because it don't know the end of the stream.
198 block in case of sshrepo because it don't know the end of the stream.
199 """
199 """
200 # For changegroup 1 and 2, we expect 3 parts: changelog, manifestlog,
200 # For changegroup 1 and 2, we expect 3 parts: changelog, manifestlog,
201 # and a list of filelogs. For changegroup 3, we expect 4 parts:
201 # and a list of filelogs. For changegroup 3, we expect 4 parts:
202 # changelog, manifestlog, a list of tree manifestlogs, and a list of
202 # changelog, manifestlog, a list of tree manifestlogs, and a list of
203 # filelogs.
203 # filelogs.
204 #
204 #
205 # Changelog and manifestlog parts are terminated with empty chunks. The
205 # Changelog and manifestlog parts are terminated with empty chunks. The
206 # tree and file parts are a list of entry sections. Each entry section
206 # tree and file parts are a list of entry sections. Each entry section
207 # is a series of chunks terminating in an empty chunk. The list of these
207 # is a series of chunks terminating in an empty chunk. The list of these
208 # entry sections is terminated in yet another empty chunk, so we know
208 # entry sections is terminated in yet another empty chunk, so we know
209 # we've reached the end of the tree/file list when we reach an empty
209 # we've reached the end of the tree/file list when we reach an empty
210 # chunk that was proceeded by no non-empty chunks.
210 # chunk that was proceeded by no non-empty chunks.
211
211
212 parts = 0
212 parts = 0
213 while parts < 2 + self._grouplistcount:
213 while parts < 2 + self._grouplistcount:
214 noentries = True
214 noentries = True
215 while True:
215 while True:
216 chunk = getchunk(self)
216 chunk = getchunk(self)
217 if not chunk:
217 if not chunk:
218 # The first two empty chunks represent the end of the
218 # The first two empty chunks represent the end of the
219 # changelog and the manifestlog portions. The remaining
219 # changelog and the manifestlog portions. The remaining
220 # empty chunks represent either A) the end of individual
220 # empty chunks represent either A) the end of individual
221 # tree or file entries in the file list, or B) the end of
221 # tree or file entries in the file list, or B) the end of
222 # the entire list. It's the end of the entire list if there
222 # the entire list. It's the end of the entire list if there
223 # were no entries (i.e. noentries is True).
223 # were no entries (i.e. noentries is True).
224 if parts < 2:
224 if parts < 2:
225 parts += 1
225 parts += 1
226 elif noentries:
226 elif noentries:
227 parts += 1
227 parts += 1
228 break
228 break
229 noentries = False
229 noentries = False
230 yield chunkheader(len(chunk))
230 yield chunkheader(len(chunk))
231 pos = 0
231 pos = 0
232 while pos < len(chunk):
232 while pos < len(chunk):
233 next = pos + 2**20
233 next = pos + 2**20
234 yield chunk[pos:next]
234 yield chunk[pos:next]
235 pos = next
235 pos = next
236 yield closechunk()
236 yield closechunk()
237
237
238 def _unpackmanifests(self, repo, revmap, trp, prog):
238 def _unpackmanifests(self, repo, revmap, trp, prog):
239 self.callback = prog.increment
239 self.callback = prog.increment
240 # no need to check for empty manifest group here:
240 # no need to check for empty manifest group here:
241 # if the result of the merge of 1 and 2 is the same in 3 and 4,
241 # if the result of the merge of 1 and 2 is the same in 3 and 4,
242 # no new manifest will be created and the manifest group will
242 # no new manifest will be created and the manifest group will
243 # be empty during the pull
243 # be empty during the pull
244 self.manifestheader()
244 self.manifestheader()
245 deltas = self.deltaiter()
245 deltas = self.deltaiter()
246 repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp)
246 repo.manifestlog.getstorage(b'').addgroup(deltas, revmap, trp)
247 prog.complete()
247 prog.complete()
248 self.callback = None
248 self.callback = None
249
249
250 def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
250 def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
251 expectedtotal=None):
251 expectedtotal=None):
252 """Add the changegroup returned by source.read() to this repo.
252 """Add the changegroup returned by source.read() to this repo.
253 srctype is a string like 'push', 'pull', or 'unbundle'. url is
253 srctype is a string like 'push', 'pull', or 'unbundle'. url is
254 the URL of the repo where this changegroup is coming from.
254 the URL of the repo where this changegroup is coming from.
255
255
256 Return an integer summarizing the change to this repo:
256 Return an integer summarizing the change to this repo:
257 - nothing changed or no source: 0
257 - nothing changed or no source: 0
258 - more heads than before: 1+added heads (2..n)
258 - more heads than before: 1+added heads (2..n)
259 - fewer heads than before: -1-removed heads (-2..-n)
259 - fewer heads than before: -1-removed heads (-2..-n)
260 - number of heads stays the same: 1
260 - number of heads stays the same: 1
261 """
261 """
262 repo = repo.unfiltered()
262 repo = repo.unfiltered()
263 def csmap(x):
263 def csmap(x):
264 repo.ui.debug("add changeset %s\n" % short(x))
264 repo.ui.debug("add changeset %s\n" % short(x))
265 return len(cl)
265 return len(cl)
266
266
267 def revmap(x):
267 def revmap(x):
268 return cl.rev(x)
268 return cl.rev(x)
269
269
270 changesets = files = revisions = 0
270 changesets = files = revisions = 0
271
271
272 try:
272 try:
273 # The transaction may already carry source information. In this
273 # The transaction may already carry source information. In this
274 # case we use the top level data. We overwrite the argument
274 # case we use the top level data. We overwrite the argument
275 # because we need to use the top level value (if they exist)
275 # because we need to use the top level value (if they exist)
276 # in this function.
276 # in this function.
277 srctype = tr.hookargs.setdefault('source', srctype)
277 srctype = tr.hookargs.setdefault('source', srctype)
278 url = tr.hookargs.setdefault('url', url)
278 url = tr.hookargs.setdefault('url', url)
279 repo.hook('prechangegroup',
279 repo.hook('prechangegroup',
280 throw=True, **pycompat.strkwargs(tr.hookargs))
280 throw=True, **pycompat.strkwargs(tr.hookargs))
281
281
282 # write changelog data to temp files so concurrent readers
282 # write changelog data to temp files so concurrent readers
283 # will not see an inconsistent view
283 # will not see an inconsistent view
284 cl = repo.changelog
284 cl = repo.changelog
285 cl.delayupdate(tr)
285 cl.delayupdate(tr)
286 oldheads = set(cl.heads())
286 oldheads = set(cl.heads())
287
287
288 trp = weakref.proxy(tr)
288 trp = weakref.proxy(tr)
289 # pull off the changeset group
289 # pull off the changeset group
290 repo.ui.status(_("adding changesets\n"))
290 repo.ui.status(_("adding changesets\n"))
291 clstart = len(cl)
291 clstart = len(cl)
292 progress = repo.ui.makeprogress(_('changesets'), unit=_('chunks'),
292 progress = repo.ui.makeprogress(_('changesets'), unit=_('chunks'),
293 total=expectedtotal)
293 total=expectedtotal)
294 self.callback = progress.increment
294 self.callback = progress.increment
295
295
296 efiles = set()
296 efiles = set()
297 def onchangelog(cl, node):
297 def onchangelog(cl, node):
298 efiles.update(cl.readfiles(node))
298 efiles.update(cl.readfiles(node))
299
299
300 self.changelogheader()
300 self.changelogheader()
301 deltas = self.deltaiter()
301 deltas = self.deltaiter()
302 cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog)
302 cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog)
303 efiles = len(efiles)
303 efiles = len(efiles)
304
304
305 if not cgnodes:
305 if not cgnodes:
306 repo.ui.develwarn('applied empty changelog from changegroup',
306 repo.ui.develwarn('applied empty changelog from changegroup',
307 config='warn-empty-changegroup')
307 config='warn-empty-changegroup')
308 clend = len(cl)
308 clend = len(cl)
309 changesets = clend - clstart
309 changesets = clend - clstart
310 progress.complete()
310 progress.complete()
311 self.callback = None
311 self.callback = None
312
312
313 # pull off the manifest group
313 # pull off the manifest group
314 repo.ui.status(_("adding manifests\n"))
314 repo.ui.status(_("adding manifests\n"))
315 # We know that we'll never have more manifests than we had
315 # We know that we'll never have more manifests than we had
316 # changesets.
316 # changesets.
317 progress = repo.ui.makeprogress(_('manifests'), unit=_('chunks'),
317 progress = repo.ui.makeprogress(_('manifests'), unit=_('chunks'),
318 total=changesets)
318 total=changesets)
319 self._unpackmanifests(repo, revmap, trp, progress)
319 self._unpackmanifests(repo, revmap, trp, progress)
320
320
321 needfiles = {}
321 needfiles = {}
322 if repo.ui.configbool('server', 'validate'):
322 if repo.ui.configbool('server', 'validate'):
323 cl = repo.changelog
323 cl = repo.changelog
324 ml = repo.manifestlog
324 ml = repo.manifestlog
325 # validate incoming csets have their manifests
325 # validate incoming csets have their manifests
326 for cset in pycompat.xrange(clstart, clend):
326 for cset in pycompat.xrange(clstart, clend):
327 mfnode = cl.changelogrevision(cset).manifest
327 mfnode = cl.changelogrevision(cset).manifest
328 mfest = ml[mfnode].readdelta()
328 mfest = ml[mfnode].readdelta()
329 # store file cgnodes we must see
329 # store file cgnodes we must see
330 for f, n in mfest.iteritems():
330 for f, n in mfest.iteritems():
331 needfiles.setdefault(f, set()).add(n)
331 needfiles.setdefault(f, set()).add(n)
332
332
333 # process the files
333 # process the files
334 repo.ui.status(_("adding file changes\n"))
334 repo.ui.status(_("adding file changes\n"))
335 newrevs, newfiles = _addchangegroupfiles(
335 newrevs, newfiles = _addchangegroupfiles(
336 repo, self, revmap, trp, efiles, needfiles)
336 repo, self, revmap, trp, efiles, needfiles)
337 revisions += newrevs
337 revisions += newrevs
338 files += newfiles
338 files += newfiles
339
339
340 deltaheads = 0
340 deltaheads = 0
341 if oldheads:
341 if oldheads:
342 heads = cl.heads()
342 heads = cl.heads()
343 deltaheads = len(heads) - len(oldheads)
343 deltaheads = len(heads) - len(oldheads)
344 for h in heads:
344 for h in heads:
345 if h not in oldheads and repo[h].closesbranch():
345 if h not in oldheads and repo[h].closesbranch():
346 deltaheads -= 1
346 deltaheads -= 1
347 htext = ""
347 htext = ""
348 if deltaheads:
348 if deltaheads:
349 htext = _(" (%+d heads)") % deltaheads
349 htext = _(" (%+d heads)") % deltaheads
350
350
351 repo.ui.status(_("added %d changesets"
351 repo.ui.status(_("added %d changesets"
352 " with %d changes to %d files%s\n")
352 " with %d changes to %d files%s\n")
353 % (changesets, revisions, files, htext))
353 % (changesets, revisions, files, htext))
354 repo.invalidatevolatilesets()
354 repo.invalidatevolatilesets()
355
355
356 if changesets > 0:
356 if changesets > 0:
357 if 'node' not in tr.hookargs:
357 if 'node' not in tr.hookargs:
358 tr.hookargs['node'] = hex(cl.node(clstart))
358 tr.hookargs['node'] = hex(cl.node(clstart))
359 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
359 tr.hookargs['node_last'] = hex(cl.node(clend - 1))
360 hookargs = dict(tr.hookargs)
360 hookargs = dict(tr.hookargs)
361 else:
361 else:
362 hookargs = dict(tr.hookargs)
362 hookargs = dict(tr.hookargs)
363 hookargs['node'] = hex(cl.node(clstart))
363 hookargs['node'] = hex(cl.node(clstart))
364 hookargs['node_last'] = hex(cl.node(clend - 1))
364 hookargs['node_last'] = hex(cl.node(clend - 1))
365 repo.hook('pretxnchangegroup',
365 repo.hook('pretxnchangegroup',
366 throw=True, **pycompat.strkwargs(hookargs))
366 throw=True, **pycompat.strkwargs(hookargs))
367
367
368 added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
368 added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
369 phaseall = None
369 phaseall = None
370 if srctype in ('push', 'serve'):
370 if srctype in ('push', 'serve'):
371 # Old servers can not push the boundary themselves.
371 # Old servers can not push the boundary themselves.
372 # New servers won't push the boundary if changeset already
372 # New servers won't push the boundary if changeset already
373 # exists locally as secret
373 # exists locally as secret
374 #
374 #
375 # We should not use added here but the list of all change in
375 # We should not use added here but the list of all change in
376 # the bundle
376 # the bundle
377 if repo.publishing():
377 if repo.publishing():
378 targetphase = phaseall = phases.public
378 targetphase = phaseall = phases.public
379 else:
379 else:
380 # closer target phase computation
380 # closer target phase computation
381
381
382 # Those changesets have been pushed from the
382 # Those changesets have been pushed from the
383 # outside, their phases are going to be pushed
383 # outside, their phases are going to be pushed
384 # alongside. Therefor `targetphase` is
384 # alongside. Therefor `targetphase` is
385 # ignored.
385 # ignored.
386 targetphase = phaseall = phases.draft
386 targetphase = phaseall = phases.draft
387 if added:
387 if added:
388 phases.registernew(repo, tr, targetphase, added)
388 phases.registernew(repo, tr, targetphase, added)
389 if phaseall is not None:
389 if phaseall is not None:
390 phases.advanceboundary(repo, tr, phaseall, cgnodes)
390 phases.advanceboundary(repo, tr, phaseall, cgnodes)
391
391
392 if changesets > 0:
392 if changesets > 0:
393
393
394 def runhooks():
394 def runhooks():
395 # These hooks run when the lock releases, not when the
395 # These hooks run when the lock releases, not when the
396 # transaction closes. So it's possible for the changelog
396 # transaction closes. So it's possible for the changelog
397 # to have changed since we last saw it.
397 # to have changed since we last saw it.
398 if clstart >= len(repo):
398 if clstart >= len(repo):
399 return
399 return
400
400
401 repo.hook("changegroup", **pycompat.strkwargs(hookargs))
401 repo.hook("changegroup", **pycompat.strkwargs(hookargs))
402
402
403 for n in added:
403 for n in added:
404 args = hookargs.copy()
404 args = hookargs.copy()
405 args['node'] = hex(n)
405 args['node'] = hex(n)
406 del args['node_last']
406 del args['node_last']
407 repo.hook("incoming", **pycompat.strkwargs(args))
407 repo.hook("incoming", **pycompat.strkwargs(args))
408
408
409 newheads = [h for h in repo.heads()
409 newheads = [h for h in repo.heads()
410 if h not in oldheads]
410 if h not in oldheads]
411 repo.ui.log("incoming",
411 repo.ui.log("incoming",
412 "%d incoming changes - new heads: %s\n",
412 "%d incoming changes - new heads: %s\n",
413 len(added),
413 len(added),
414 ', '.join([hex(c[:6]) for c in newheads]))
414 ', '.join([hex(c[:6]) for c in newheads]))
415
415
416 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
416 tr.addpostclose('changegroup-runhooks-%020i' % clstart,
417 lambda tr: repo._afterlock(runhooks))
417 lambda tr: repo._afterlock(runhooks))
418 finally:
418 finally:
419 repo.ui.flush()
419 repo.ui.flush()
420 # never return 0 here:
420 # never return 0 here:
421 if deltaheads < 0:
421 if deltaheads < 0:
422 ret = deltaheads - 1
422 ret = deltaheads - 1
423 else:
423 else:
424 ret = deltaheads + 1
424 ret = deltaheads + 1
425 return ret
425 return ret
426
426
427 def deltaiter(self):
427 def deltaiter(self):
428 """
428 """
429 returns an iterator of the deltas in this changegroup
429 returns an iterator of the deltas in this changegroup
430
430
431 Useful for passing to the underlying storage system to be stored.
431 Useful for passing to the underlying storage system to be stored.
432 """
432 """
433 chain = None
433 chain = None
434 for chunkdata in iter(lambda: self.deltachunk(chain), {}):
434 for chunkdata in iter(lambda: self.deltachunk(chain), {}):
435 # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags)
435 # Chunkdata: (node, p1, p2, cs, deltabase, delta, flags)
436 yield chunkdata
436 yield chunkdata
437 chain = chunkdata[0]
437 chain = chunkdata[0]
438
438
439 class cg2unpacker(cg1unpacker):
439 class cg2unpacker(cg1unpacker):
440 """Unpacker for cg2 streams.
440 """Unpacker for cg2 streams.
441
441
442 cg2 streams add support for generaldelta, so the delta header
442 cg2 streams add support for generaldelta, so the delta header
443 format is slightly different. All other features about the data
443 format is slightly different. All other features about the data
444 remain the same.
444 remain the same.
445 """
445 """
446 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
446 deltaheader = _CHANGEGROUPV2_DELTA_HEADER
447 deltaheadersize = deltaheader.size
447 deltaheadersize = deltaheader.size
448 version = '02'
448 version = '02'
449
449
450 def _deltaheader(self, headertuple, prevnode):
450 def _deltaheader(self, headertuple, prevnode):
451 node, p1, p2, deltabase, cs = headertuple
451 node, p1, p2, deltabase, cs = headertuple
452 flags = 0
452 flags = 0
453 return node, p1, p2, deltabase, cs, flags
453 return node, p1, p2, deltabase, cs, flags
454
454
455 class cg3unpacker(cg2unpacker):
455 class cg3unpacker(cg2unpacker):
456 """Unpacker for cg3 streams.
456 """Unpacker for cg3 streams.
457
457
458 cg3 streams add support for exchanging treemanifests and revlog
458 cg3 streams add support for exchanging treemanifests and revlog
459 flags. It adds the revlog flags to the delta header and an empty chunk
459 flags. It adds the revlog flags to the delta header and an empty chunk
460 separating manifests and files.
460 separating manifests and files.
461 """
461 """
462 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
462 deltaheader = _CHANGEGROUPV3_DELTA_HEADER
463 deltaheadersize = deltaheader.size
463 deltaheadersize = deltaheader.size
464 version = '03'
464 version = '03'
465 _grouplistcount = 2 # One list of manifests and one list of files
465 _grouplistcount = 2 # One list of manifests and one list of files
466
466
467 def _deltaheader(self, headertuple, prevnode):
467 def _deltaheader(self, headertuple, prevnode):
468 node, p1, p2, deltabase, cs, flags = headertuple
468 node, p1, p2, deltabase, cs, flags = headertuple
469 return node, p1, p2, deltabase, cs, flags
469 return node, p1, p2, deltabase, cs, flags
470
470
471 def _unpackmanifests(self, repo, revmap, trp, prog):
471 def _unpackmanifests(self, repo, revmap, trp, prog):
472 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog)
472 super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog)
473 for chunkdata in iter(self.filelogheader, {}):
473 for chunkdata in iter(self.filelogheader, {}):
474 # If we get here, there are directory manifests in the changegroup
474 # If we get here, there are directory manifests in the changegroup
475 d = chunkdata["filename"]
475 d = chunkdata["filename"]
476 repo.ui.debug("adding %s revisions\n" % d)
476 repo.ui.debug("adding %s revisions\n" % d)
477 deltas = self.deltaiter()
477 deltas = self.deltaiter()
478 if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
478 if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
479 raise error.Abort(_("received dir revlog group is empty"))
479 raise error.Abort(_("received dir revlog group is empty"))
480
480
481 class headerlessfixup(object):
481 class headerlessfixup(object):
482 def __init__(self, fh, h):
482 def __init__(self, fh, h):
483 self._h = h
483 self._h = h
484 self._fh = fh
484 self._fh = fh
485 def read(self, n):
485 def read(self, n):
486 if self._h:
486 if self._h:
487 d, self._h = self._h[:n], self._h[n:]
487 d, self._h = self._h[:n], self._h[n:]
488 if len(d) < n:
488 if len(d) < n:
489 d += readexactly(self._fh, n - len(d))
489 d += readexactly(self._fh, n - len(d))
490 return d
490 return d
491 return readexactly(self._fh, n)
491 return readexactly(self._fh, n)
492
492
493 def _revisiondeltatochunks(delta, headerfn):
493 def _revisiondeltatochunks(delta, headerfn):
494 """Serialize a revisiondelta to changegroup chunks."""
494 """Serialize a revisiondelta to changegroup chunks."""
495
495
496 # The captured revision delta may be encoded as a delta against
496 # The captured revision delta may be encoded as a delta against
497 # a base revision or as a full revision. The changegroup format
497 # a base revision or as a full revision. The changegroup format
498 # requires that everything on the wire be deltas. So for full
498 # requires that everything on the wire be deltas. So for full
499 # revisions, we need to invent a header that says to rewrite
499 # revisions, we need to invent a header that says to rewrite
500 # data.
500 # data.
501
501
502 if delta.delta is not None:
502 if delta.delta is not None:
503 prefix, data = b'', delta.delta
503 prefix, data = b'', delta.delta
504 elif delta.basenode == nullid:
504 elif delta.basenode == nullid:
505 data = delta.revision
505 data = delta.revision
506 prefix = mdiff.trivialdiffheader(len(data))
506 prefix = mdiff.trivialdiffheader(len(data))
507 else:
507 else:
508 data = delta.revision
508 data = delta.revision
509 prefix = mdiff.replacediffheader(delta.baserevisionsize,
509 prefix = mdiff.replacediffheader(delta.baserevisionsize,
510 len(data))
510 len(data))
511
511
512 meta = headerfn(delta)
512 meta = headerfn(delta)
513
513
514 yield chunkheader(len(meta) + len(prefix) + len(data))
514 yield chunkheader(len(meta) + len(prefix) + len(data))
515 yield meta
515 yield meta
516 if prefix:
516 if prefix:
517 yield prefix
517 yield prefix
518 yield data
518 yield data
519
519
520 def _sortnodesellipsis(store, nodes, cl, lookup):
520 def _sortnodesellipsis(store, nodes, cl, lookup):
521 """Sort nodes for changegroup generation."""
521 """Sort nodes for changegroup generation."""
522 # Ellipses serving mode.
522 # Ellipses serving mode.
523 #
523 #
524 # In a perfect world, we'd generate better ellipsis-ified graphs
524 # In a perfect world, we'd generate better ellipsis-ified graphs
525 # for non-changelog revlogs. In practice, we haven't started doing
525 # for non-changelog revlogs. In practice, we haven't started doing
526 # that yet, so the resulting DAGs for the manifestlog and filelogs
526 # that yet, so the resulting DAGs for the manifestlog and filelogs
527 # are actually full of bogus parentage on all the ellipsis
527 # are actually full of bogus parentage on all the ellipsis
528 # nodes. This has the side effect that, while the contents are
528 # nodes. This has the side effect that, while the contents are
529 # correct, the individual DAGs might be completely out of whack in
529 # correct, the individual DAGs might be completely out of whack in
530 # a case like 882681bc3166 and its ancestors (back about 10
530 # a case like 882681bc3166 and its ancestors (back about 10
531 # revisions or so) in the main hg repo.
531 # revisions or so) in the main hg repo.
532 #
532 #
533 # The one invariant we *know* holds is that the new (potentially
533 # The one invariant we *know* holds is that the new (potentially
534 # bogus) DAG shape will be valid if we order the nodes in the
534 # bogus) DAG shape will be valid if we order the nodes in the
535 # order that they're introduced in dramatis personae by the
535 # order that they're introduced in dramatis personae by the
536 # changelog, so what we do is we sort the non-changelog histories
536 # changelog, so what we do is we sort the non-changelog histories
537 # by the order in which they are used by the changelog.
537 # by the order in which they are used by the changelog.
538 key = lambda n: cl.rev(lookup(n))
538 key = lambda n: cl.rev(lookup(n))
539 return sorted(nodes, key=key)
539 return sorted(nodes, key=key)
540
540
541 def _resolvenarrowrevisioninfo(cl, store, ischangelog, rev, linkrev,
541 def _resolvenarrowrevisioninfo(cl, store, ischangelog, rev, linkrev,
542 linknode, clrevtolocalrev, fullclnodes,
542 linknode, clrevtolocalrev, fullclnodes,
543 precomputedellipsis):
543 precomputedellipsis):
544 linkparents = precomputedellipsis[linkrev]
544 linkparents = precomputedellipsis[linkrev]
545 def local(clrev):
545 def local(clrev):
546 """Turn a changelog revnum into a local revnum.
546 """Turn a changelog revnum into a local revnum.
547
547
548 The ellipsis dag is stored as revnums on the changelog,
548 The ellipsis dag is stored as revnums on the changelog,
549 but when we're producing ellipsis entries for
549 but when we're producing ellipsis entries for
550 non-changelog revlogs, we need to turn those numbers into
550 non-changelog revlogs, we need to turn those numbers into
551 something local. This does that for us, and during the
551 something local. This does that for us, and during the
552 changelog sending phase will also expand the stored
552 changelog sending phase will also expand the stored
553 mappings as needed.
553 mappings as needed.
554 """
554 """
555 if clrev == nullrev:
555 if clrev == nullrev:
556 return nullrev
556 return nullrev
557
557
558 if ischangelog:
558 if ischangelog:
559 return clrev
559 return clrev
560
560
561 # Walk the ellipsis-ized changelog breadth-first looking for a
561 # Walk the ellipsis-ized changelog breadth-first looking for a
562 # change that has been linked from the current revlog.
562 # change that has been linked from the current revlog.
563 #
563 #
564 # For a flat manifest revlog only a single step should be necessary
564 # For a flat manifest revlog only a single step should be necessary
565 # as all relevant changelog entries are relevant to the flat
565 # as all relevant changelog entries are relevant to the flat
566 # manifest.
566 # manifest.
567 #
567 #
568 # For a filelog or tree manifest dirlog however not every changelog
568 # For a filelog or tree manifest dirlog however not every changelog
569 # entry will have been relevant, so we need to skip some changelog
569 # entry will have been relevant, so we need to skip some changelog
570 # nodes even after ellipsis-izing.
570 # nodes even after ellipsis-izing.
571 walk = [clrev]
571 walk = [clrev]
572 while walk:
572 while walk:
573 p = walk[0]
573 p = walk[0]
574 walk = walk[1:]
574 walk = walk[1:]
575 if p in clrevtolocalrev:
575 if p in clrevtolocalrev:
576 return clrevtolocalrev[p]
576 return clrevtolocalrev[p]
577 elif p in fullclnodes:
577 elif p in fullclnodes:
578 walk.extend([pp for pp in cl.parentrevs(p)
578 walk.extend([pp for pp in cl.parentrevs(p)
579 if pp != nullrev])
579 if pp != nullrev])
580 elif p in precomputedellipsis:
580 elif p in precomputedellipsis:
581 walk.extend([pp for pp in precomputedellipsis[p]
581 walk.extend([pp for pp in precomputedellipsis[p]
582 if pp != nullrev])
582 if pp != nullrev])
583 else:
583 else:
584 # In this case, we've got an ellipsis with parents
584 # In this case, we've got an ellipsis with parents
585 # outside the current bundle (likely an
585 # outside the current bundle (likely an
586 # incremental pull). We "know" that we can use the
586 # incremental pull). We "know" that we can use the
587 # value of this same revlog at whatever revision
587 # value of this same revlog at whatever revision
588 # is pointed to by linknode. "Know" is in scare
588 # is pointed to by linknode. "Know" is in scare
589 # quotes because I haven't done enough examination
589 # quotes because I haven't done enough examination
590 # of edge cases to convince myself this is really
590 # of edge cases to convince myself this is really
591 # a fact - it works for all the (admittedly
591 # a fact - it works for all the (admittedly
592 # thorough) cases in our testsuite, but I would be
592 # thorough) cases in our testsuite, but I would be
593 # somewhat unsurprised to find a case in the wild
593 # somewhat unsurprised to find a case in the wild
594 # where this breaks down a bit. That said, I don't
594 # where this breaks down a bit. That said, I don't
595 # know if it would hurt anything.
595 # know if it would hurt anything.
596 for i in pycompat.xrange(rev, 0, -1):
596 for i in pycompat.xrange(rev, 0, -1):
597 if store.linkrev(i) == clrev:
597 if store.linkrev(i) == clrev:
598 return i
598 return i
599 # We failed to resolve a parent for this node, so
599 # We failed to resolve a parent for this node, so
600 # we crash the changegroup construction.
600 # we crash the changegroup construction.
601 raise error.Abort(
601 raise error.Abort(
602 'unable to resolve parent while packing %r %r'
602 'unable to resolve parent while packing %r %r'
603 ' for changeset %r' % (store.indexfile, rev, clrev))
603 ' for changeset %r' % (store.indexfile, rev, clrev))
604
604
605 return nullrev
605 return nullrev
606
606
607 if not linkparents or (
607 if not linkparents or (
608 store.parentrevs(rev) == (nullrev, nullrev)):
608 store.parentrevs(rev) == (nullrev, nullrev)):
609 p1, p2 = nullrev, nullrev
609 p1, p2 = nullrev, nullrev
610 elif len(linkparents) == 1:
610 elif len(linkparents) == 1:
611 p1, = sorted(local(p) for p in linkparents)
611 p1, = sorted(local(p) for p in linkparents)
612 p2 = nullrev
612 p2 = nullrev
613 else:
613 else:
614 p1, p2 = sorted(local(p) for p in linkparents)
614 p1, p2 = sorted(local(p) for p in linkparents)
615
615
616 p1node, p2node = store.node(p1), store.node(p2)
616 p1node, p2node = store.node(p1), store.node(p2)
617
617
618 return p1node, p2node, linknode
618 return p1node, p2node, linknode
619
619
620 def deltagroup(repo, store, nodes, ischangelog, lookup, forcedeltaparentprev,
620 def deltagroup(repo, store, nodes, ischangelog, lookup, forcedeltaparentprev,
621 topic=None,
621 topic=None,
622 ellipses=False, clrevtolocalrev=None, fullclnodes=None,
622 ellipses=False, clrevtolocalrev=None, fullclnodes=None,
623 precomputedellipsis=None):
623 precomputedellipsis=None):
624 """Calculate deltas for a set of revisions.
624 """Calculate deltas for a set of revisions.
625
625
626 Is a generator of ``revisiondelta`` instances.
626 Is a generator of ``revisiondelta`` instances.
627
627
628 If topic is not None, progress detail will be generated using this
628 If topic is not None, progress detail will be generated using this
629 topic name (e.g. changesets, manifests, etc).
629 topic name (e.g. changesets, manifests, etc).
630 """
630 """
631 if not nodes:
631 if not nodes:
632 return
632 return
633
633
634 cl = repo.changelog
634 cl = repo.changelog
635
635
636 if ischangelog:
636 if ischangelog:
637 # `hg log` shows changesets in storage order. To preserve order
637 # `hg log` shows changesets in storage order. To preserve order
638 # across clones, send out changesets in storage order.
638 # across clones, send out changesets in storage order.
639 nodesorder = 'storage'
639 nodesorder = 'storage'
640 elif ellipses:
640 elif ellipses:
641 nodes = _sortnodesellipsis(store, nodes, cl, lookup)
641 nodes = _sortnodesellipsis(store, nodes, cl, lookup)
642 nodesorder = 'nodes'
642 nodesorder = 'nodes'
643 else:
643 else:
644 nodesorder = None
644 nodesorder = None
645
645
646 # Perform ellipses filtering and revision massaging. We do this before
646 # Perform ellipses filtering and revision massaging. We do this before
647 # emitrevisions() because a) filtering out revisions creates less work
647 # emitrevisions() because a) filtering out revisions creates less work
648 # for emitrevisions() b) dropping revisions would break emitrevisions()'s
648 # for emitrevisions() b) dropping revisions would break emitrevisions()'s
649 # assumptions about delta choices and we would possibly send a delta
649 # assumptions about delta choices and we would possibly send a delta
650 # referencing a missing base revision.
650 # referencing a missing base revision.
651 #
651 #
652 # Also, calling lookup() has side-effects with regards to populating
652 # Also, calling lookup() has side-effects with regards to populating
653 # data structures. If we don't call lookup() for each node or if we call
653 # data structures. If we don't call lookup() for each node or if we call
654 # lookup() after the first pass through each node, things can break -
654 # lookup() after the first pass through each node, things can break -
655 # possibly intermittently depending on the python hash seed! For that
655 # possibly intermittently depending on the python hash seed! For that
656 # reason, we store a mapping of all linknodes during the initial node
656 # reason, we store a mapping of all linknodes during the initial node
657 # pass rather than use lookup() on the output side.
657 # pass rather than use lookup() on the output side.
658 if ellipses:
658 if ellipses:
659 filtered = []
659 filtered = []
660 adjustedparents = {}
660 adjustedparents = {}
661 linknodes = {}
661 linknodes = {}
662
662
663 for node in nodes:
663 for node in nodes:
664 rev = store.rev(node)
664 rev = store.rev(node)
665 linknode = lookup(node)
665 linknode = lookup(node)
666 linkrev = cl.rev(linknode)
666 linkrev = cl.rev(linknode)
667 clrevtolocalrev[linkrev] = rev
667 clrevtolocalrev[linkrev] = rev
668
668
669 # If linknode is in fullclnodes, it means the corresponding
669 # If linknode is in fullclnodes, it means the corresponding
670 # changeset was a full changeset and is being sent unaltered.
670 # changeset was a full changeset and is being sent unaltered.
671 if linknode in fullclnodes:
671 if linknode in fullclnodes:
672 linknodes[node] = linknode
672 linknodes[node] = linknode
673
673
674 # If the corresponding changeset wasn't in the set computed
674 # If the corresponding changeset wasn't in the set computed
675 # as relevant to us, it should be dropped outright.
675 # as relevant to us, it should be dropped outright.
676 elif linkrev not in precomputedellipsis:
676 elif linkrev not in precomputedellipsis:
677 continue
677 continue
678
678
679 else:
679 else:
680 # We could probably do this later and avoid the dict
680 # We could probably do this later and avoid the dict
681 # holding state. But it likely doesn't matter.
681 # holding state. But it likely doesn't matter.
682 p1node, p2node, linknode = _resolvenarrowrevisioninfo(
682 p1node, p2node, linknode = _resolvenarrowrevisioninfo(
683 cl, store, ischangelog, rev, linkrev, linknode,
683 cl, store, ischangelog, rev, linkrev, linknode,
684 clrevtolocalrev, fullclnodes, precomputedellipsis)
684 clrevtolocalrev, fullclnodes, precomputedellipsis)
685
685
686 adjustedparents[node] = (p1node, p2node)
686 adjustedparents[node] = (p1node, p2node)
687 linknodes[node] = linknode
687 linknodes[node] = linknode
688
688
689 filtered.append(node)
689 filtered.append(node)
690
690
691 nodes = filtered
691 nodes = filtered
692
692
693 # We expect the first pass to be fast, so we only engage the progress
693 # We expect the first pass to be fast, so we only engage the progress
694 # meter for constructing the revision deltas.
694 # meter for constructing the revision deltas.
695 progress = None
695 progress = None
696 if topic is not None:
696 if topic is not None:
697 progress = repo.ui.makeprogress(topic, unit=_('chunks'),
697 progress = repo.ui.makeprogress(topic, unit=_('chunks'),
698 total=len(nodes))
698 total=len(nodes))
699
699
700 configtarget = repo.ui.config('devel', 'bundle.delta')
700 configtarget = repo.ui.config('devel', 'bundle.delta')
701 if configtarget not in ('', 'p1'):
701 if configtarget not in ('', 'p1', 'full'):
702 msg = _("""config "devel.bundle.delta" as unknown value: %s""")
702 msg = _("""config "devel.bundle.delta" as unknown value: %s""")
703 repo.ui.warn(msg % configtarget)
703 repo.ui.warn(msg % configtarget)
704
704
705 deltamode = repository.CG_DELTAMODE_STD
705 deltamode = repository.CG_DELTAMODE_STD
706 if forcedeltaparentprev:
706 if forcedeltaparentprev:
707 deltamode = repository.CG_DELTAMODE_PREV
707 deltamode = repository.CG_DELTAMODE_PREV
708 elif configtarget == 'p1':
708 elif configtarget == 'p1':
709 deltamode = repository.CG_DELTAMODE_P1
709 deltamode = repository.CG_DELTAMODE_P1
710 elif configtarget == 'full':
711 deltamode = repository.CG_DELTAMODE_FULL
710
712
711 revisions = store.emitrevisions(
713 revisions = store.emitrevisions(
712 nodes,
714 nodes,
713 nodesorder=nodesorder,
715 nodesorder=nodesorder,
714 revisiondata=True,
716 revisiondata=True,
715 assumehaveparentrevisions=not ellipses,
717 assumehaveparentrevisions=not ellipses,
716 deltamode=deltamode)
718 deltamode=deltamode)
717
719
718 for i, revision in enumerate(revisions):
720 for i, revision in enumerate(revisions):
719 if progress:
721 if progress:
720 progress.update(i + 1)
722 progress.update(i + 1)
721
723
722 if ellipses:
724 if ellipses:
723 linknode = linknodes[revision.node]
725 linknode = linknodes[revision.node]
724
726
725 if revision.node in adjustedparents:
727 if revision.node in adjustedparents:
726 p1node, p2node = adjustedparents[revision.node]
728 p1node, p2node = adjustedparents[revision.node]
727 revision.p1node = p1node
729 revision.p1node = p1node
728 revision.p2node = p2node
730 revision.p2node = p2node
729 revision.flags |= repository.REVISION_FLAG_ELLIPSIS
731 revision.flags |= repository.REVISION_FLAG_ELLIPSIS
730
732
731 else:
733 else:
732 linknode = lookup(revision.node)
734 linknode = lookup(revision.node)
733
735
734 revision.linknode = linknode
736 revision.linknode = linknode
735 yield revision
737 yield revision
736
738
737 if progress:
739 if progress:
738 progress.complete()
740 progress.complete()
739
741
740 class cgpacker(object):
742 class cgpacker(object):
741 def __init__(self, repo, oldmatcher, matcher, version,
743 def __init__(self, repo, oldmatcher, matcher, version,
742 builddeltaheader, manifestsend,
744 builddeltaheader, manifestsend,
743 forcedeltaparentprev=False,
745 forcedeltaparentprev=False,
744 bundlecaps=None, ellipses=False,
746 bundlecaps=None, ellipses=False,
745 shallow=False, ellipsisroots=None, fullnodes=None):
747 shallow=False, ellipsisroots=None, fullnodes=None):
746 """Given a source repo, construct a bundler.
748 """Given a source repo, construct a bundler.
747
749
748 oldmatcher is a matcher that matches on files the client already has.
750 oldmatcher is a matcher that matches on files the client already has.
749 These will not be included in the changegroup.
751 These will not be included in the changegroup.
750
752
751 matcher is a matcher that matches on files to include in the
753 matcher is a matcher that matches on files to include in the
752 changegroup. Used to facilitate sparse changegroups.
754 changegroup. Used to facilitate sparse changegroups.
753
755
754 forcedeltaparentprev indicates whether delta parents must be against
756 forcedeltaparentprev indicates whether delta parents must be against
755 the previous revision in a delta group. This should only be used for
757 the previous revision in a delta group. This should only be used for
756 compatibility with changegroup version 1.
758 compatibility with changegroup version 1.
757
759
758 builddeltaheader is a callable that constructs the header for a group
760 builddeltaheader is a callable that constructs the header for a group
759 delta.
761 delta.
760
762
761 manifestsend is a chunk to send after manifests have been fully emitted.
763 manifestsend is a chunk to send after manifests have been fully emitted.
762
764
763 ellipses indicates whether ellipsis serving mode is enabled.
765 ellipses indicates whether ellipsis serving mode is enabled.
764
766
765 bundlecaps is optional and can be used to specify the set of
767 bundlecaps is optional and can be used to specify the set of
766 capabilities which can be used to build the bundle. While bundlecaps is
768 capabilities which can be used to build the bundle. While bundlecaps is
767 unused in core Mercurial, extensions rely on this feature to communicate
769 unused in core Mercurial, extensions rely on this feature to communicate
768 capabilities to customize the changegroup packer.
770 capabilities to customize the changegroup packer.
769
771
770 shallow indicates whether shallow data might be sent. The packer may
772 shallow indicates whether shallow data might be sent. The packer may
771 need to pack file contents not introduced by the changes being packed.
773 need to pack file contents not introduced by the changes being packed.
772
774
773 fullnodes is the set of changelog nodes which should not be ellipsis
775 fullnodes is the set of changelog nodes which should not be ellipsis
774 nodes. We store this rather than the set of nodes that should be
776 nodes. We store this rather than the set of nodes that should be
775 ellipsis because for very large histories we expect this to be
777 ellipsis because for very large histories we expect this to be
776 significantly smaller.
778 significantly smaller.
777 """
779 """
778 assert oldmatcher
780 assert oldmatcher
779 assert matcher
781 assert matcher
780 self._oldmatcher = oldmatcher
782 self._oldmatcher = oldmatcher
781 self._matcher = matcher
783 self._matcher = matcher
782
784
783 self.version = version
785 self.version = version
784 self._forcedeltaparentprev = forcedeltaparentprev
786 self._forcedeltaparentprev = forcedeltaparentprev
785 self._builddeltaheader = builddeltaheader
787 self._builddeltaheader = builddeltaheader
786 self._manifestsend = manifestsend
788 self._manifestsend = manifestsend
787 self._ellipses = ellipses
789 self._ellipses = ellipses
788
790
789 # Set of capabilities we can use to build the bundle.
791 # Set of capabilities we can use to build the bundle.
790 if bundlecaps is None:
792 if bundlecaps is None:
791 bundlecaps = set()
793 bundlecaps = set()
792 self._bundlecaps = bundlecaps
794 self._bundlecaps = bundlecaps
793 self._isshallow = shallow
795 self._isshallow = shallow
794 self._fullclnodes = fullnodes
796 self._fullclnodes = fullnodes
795
797
796 # Maps ellipsis revs to their roots at the changelog level.
798 # Maps ellipsis revs to their roots at the changelog level.
797 self._precomputedellipsis = ellipsisroots
799 self._precomputedellipsis = ellipsisroots
798
800
799 self._repo = repo
801 self._repo = repo
800
802
801 if self._repo.ui.verbose and not self._repo.ui.debugflag:
803 if self._repo.ui.verbose and not self._repo.ui.debugflag:
802 self._verbosenote = self._repo.ui.note
804 self._verbosenote = self._repo.ui.note
803 else:
805 else:
804 self._verbosenote = lambda s: None
806 self._verbosenote = lambda s: None
805
807
806 def generate(self, commonrevs, clnodes, fastpathlinkrev, source,
808 def generate(self, commonrevs, clnodes, fastpathlinkrev, source,
807 changelog=True):
809 changelog=True):
808 """Yield a sequence of changegroup byte chunks.
810 """Yield a sequence of changegroup byte chunks.
809 If changelog is False, changelog data won't be added to changegroup
811 If changelog is False, changelog data won't be added to changegroup
810 """
812 """
811
813
812 repo = self._repo
814 repo = self._repo
813 cl = repo.changelog
815 cl = repo.changelog
814
816
815 self._verbosenote(_('uncompressed size of bundle content:\n'))
817 self._verbosenote(_('uncompressed size of bundle content:\n'))
816 size = 0
818 size = 0
817
819
818 clstate, deltas = self._generatechangelog(cl, clnodes)
820 clstate, deltas = self._generatechangelog(cl, clnodes)
819 for delta in deltas:
821 for delta in deltas:
820 if changelog:
822 if changelog:
821 for chunk in _revisiondeltatochunks(delta,
823 for chunk in _revisiondeltatochunks(delta,
822 self._builddeltaheader):
824 self._builddeltaheader):
823 size += len(chunk)
825 size += len(chunk)
824 yield chunk
826 yield chunk
825
827
826 close = closechunk()
828 close = closechunk()
827 size += len(close)
829 size += len(close)
828 yield closechunk()
830 yield closechunk()
829
831
830 self._verbosenote(_('%8.i (changelog)\n') % size)
832 self._verbosenote(_('%8.i (changelog)\n') % size)
831
833
832 clrevorder = clstate['clrevorder']
834 clrevorder = clstate['clrevorder']
833 manifests = clstate['manifests']
835 manifests = clstate['manifests']
834 changedfiles = clstate['changedfiles']
836 changedfiles = clstate['changedfiles']
835
837
836 # We need to make sure that the linkrev in the changegroup refers to
838 # We need to make sure that the linkrev in the changegroup refers to
837 # the first changeset that introduced the manifest or file revision.
839 # the first changeset that introduced the manifest or file revision.
838 # The fastpath is usually safer than the slowpath, because the filelogs
840 # The fastpath is usually safer than the slowpath, because the filelogs
839 # are walked in revlog order.
841 # are walked in revlog order.
840 #
842 #
841 # When taking the slowpath when the manifest revlog uses generaldelta,
843 # When taking the slowpath when the manifest revlog uses generaldelta,
842 # the manifest may be walked in the "wrong" order. Without 'clrevorder',
844 # the manifest may be walked in the "wrong" order. Without 'clrevorder',
843 # we would get an incorrect linkrev (see fix in cc0ff93d0c0c).
845 # we would get an incorrect linkrev (see fix in cc0ff93d0c0c).
844 #
846 #
845 # When taking the fastpath, we are only vulnerable to reordering
847 # When taking the fastpath, we are only vulnerable to reordering
846 # of the changelog itself. The changelog never uses generaldelta and is
848 # of the changelog itself. The changelog never uses generaldelta and is
847 # never reordered. To handle this case, we simply take the slowpath,
849 # never reordered. To handle this case, we simply take the slowpath,
848 # which already has the 'clrevorder' logic. This was also fixed in
850 # which already has the 'clrevorder' logic. This was also fixed in
849 # cc0ff93d0c0c.
851 # cc0ff93d0c0c.
850
852
851 # Treemanifests don't work correctly with fastpathlinkrev
853 # Treemanifests don't work correctly with fastpathlinkrev
852 # either, because we don't discover which directory nodes to
854 # either, because we don't discover which directory nodes to
853 # send along with files. This could probably be fixed.
855 # send along with files. This could probably be fixed.
854 fastpathlinkrev = fastpathlinkrev and (
856 fastpathlinkrev = fastpathlinkrev and (
855 'treemanifest' not in repo.requirements)
857 'treemanifest' not in repo.requirements)
856
858
857 fnodes = {} # needed file nodes
859 fnodes = {} # needed file nodes
858
860
859 size = 0
861 size = 0
860 it = self.generatemanifests(
862 it = self.generatemanifests(
861 commonrevs, clrevorder, fastpathlinkrev, manifests, fnodes, source,
863 commonrevs, clrevorder, fastpathlinkrev, manifests, fnodes, source,
862 clstate['clrevtomanifestrev'])
864 clstate['clrevtomanifestrev'])
863
865
864 for tree, deltas in it:
866 for tree, deltas in it:
865 if tree:
867 if tree:
866 assert self.version == b'03'
868 assert self.version == b'03'
867 chunk = _fileheader(tree)
869 chunk = _fileheader(tree)
868 size += len(chunk)
870 size += len(chunk)
869 yield chunk
871 yield chunk
870
872
871 for delta in deltas:
873 for delta in deltas:
872 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
874 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
873 for chunk in chunks:
875 for chunk in chunks:
874 size += len(chunk)
876 size += len(chunk)
875 yield chunk
877 yield chunk
876
878
877 close = closechunk()
879 close = closechunk()
878 size += len(close)
880 size += len(close)
879 yield close
881 yield close
880
882
881 self._verbosenote(_('%8.i (manifests)\n') % size)
883 self._verbosenote(_('%8.i (manifests)\n') % size)
882 yield self._manifestsend
884 yield self._manifestsend
883
885
884 mfdicts = None
886 mfdicts = None
885 if self._ellipses and self._isshallow:
887 if self._ellipses and self._isshallow:
886 mfdicts = [(self._repo.manifestlog[n].read(), lr)
888 mfdicts = [(self._repo.manifestlog[n].read(), lr)
887 for (n, lr) in manifests.iteritems()]
889 for (n, lr) in manifests.iteritems()]
888
890
889 manifests.clear()
891 manifests.clear()
890 clrevs = set(cl.rev(x) for x in clnodes)
892 clrevs = set(cl.rev(x) for x in clnodes)
891
893
892 it = self.generatefiles(changedfiles, commonrevs,
894 it = self.generatefiles(changedfiles, commonrevs,
893 source, mfdicts, fastpathlinkrev,
895 source, mfdicts, fastpathlinkrev,
894 fnodes, clrevs)
896 fnodes, clrevs)
895
897
896 for path, deltas in it:
898 for path, deltas in it:
897 h = _fileheader(path)
899 h = _fileheader(path)
898 size = len(h)
900 size = len(h)
899 yield h
901 yield h
900
902
901 for delta in deltas:
903 for delta in deltas:
902 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
904 chunks = _revisiondeltatochunks(delta, self._builddeltaheader)
903 for chunk in chunks:
905 for chunk in chunks:
904 size += len(chunk)
906 size += len(chunk)
905 yield chunk
907 yield chunk
906
908
907 close = closechunk()
909 close = closechunk()
908 size += len(close)
910 size += len(close)
909 yield close
911 yield close
910
912
911 self._verbosenote(_('%8.i %s\n') % (size, path))
913 self._verbosenote(_('%8.i %s\n') % (size, path))
912
914
913 yield closechunk()
915 yield closechunk()
914
916
915 if clnodes:
917 if clnodes:
916 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
918 repo.hook('outgoing', node=hex(clnodes[0]), source=source)
917
919
918 def _generatechangelog(self, cl, nodes):
920 def _generatechangelog(self, cl, nodes):
919 """Generate data for changelog chunks.
921 """Generate data for changelog chunks.
920
922
921 Returns a 2-tuple of a dict containing state and an iterable of
923 Returns a 2-tuple of a dict containing state and an iterable of
922 byte chunks. The state will not be fully populated until the
924 byte chunks. The state will not be fully populated until the
923 chunk stream has been fully consumed.
925 chunk stream has been fully consumed.
924 """
926 """
925 clrevorder = {}
927 clrevorder = {}
926 manifests = {}
928 manifests = {}
927 mfl = self._repo.manifestlog
929 mfl = self._repo.manifestlog
928 changedfiles = set()
930 changedfiles = set()
929 clrevtomanifestrev = {}
931 clrevtomanifestrev = {}
930
932
931 # Callback for the changelog, used to collect changed files and
933 # Callback for the changelog, used to collect changed files and
932 # manifest nodes.
934 # manifest nodes.
933 # Returns the linkrev node (identity in the changelog case).
935 # Returns the linkrev node (identity in the changelog case).
934 def lookupcl(x):
936 def lookupcl(x):
935 c = cl.changelogrevision(x)
937 c = cl.changelogrevision(x)
936 clrevorder[x] = len(clrevorder)
938 clrevorder[x] = len(clrevorder)
937
939
938 if self._ellipses:
940 if self._ellipses:
939 # Only update manifests if x is going to be sent. Otherwise we
941 # Only update manifests if x is going to be sent. Otherwise we
940 # end up with bogus linkrevs specified for manifests and
942 # end up with bogus linkrevs specified for manifests and
941 # we skip some manifest nodes that we should otherwise
943 # we skip some manifest nodes that we should otherwise
942 # have sent.
944 # have sent.
943 if (x in self._fullclnodes
945 if (x in self._fullclnodes
944 or cl.rev(x) in self._precomputedellipsis):
946 or cl.rev(x) in self._precomputedellipsis):
945
947
946 manifestnode = c.manifest
948 manifestnode = c.manifest
947 # Record the first changeset introducing this manifest
949 # Record the first changeset introducing this manifest
948 # version.
950 # version.
949 manifests.setdefault(manifestnode, x)
951 manifests.setdefault(manifestnode, x)
950 # Set this narrow-specific dict so we have the lowest
952 # Set this narrow-specific dict so we have the lowest
951 # manifest revnum to look up for this cl revnum. (Part of
953 # manifest revnum to look up for this cl revnum. (Part of
952 # mapping changelog ellipsis parents to manifest ellipsis
954 # mapping changelog ellipsis parents to manifest ellipsis
953 # parents)
955 # parents)
954 clrevtomanifestrev.setdefault(
956 clrevtomanifestrev.setdefault(
955 cl.rev(x), mfl.rev(manifestnode))
957 cl.rev(x), mfl.rev(manifestnode))
956 # We can't trust the changed files list in the changeset if the
958 # We can't trust the changed files list in the changeset if the
957 # client requested a shallow clone.
959 # client requested a shallow clone.
958 if self._isshallow:
960 if self._isshallow:
959 changedfiles.update(mfl[c.manifest].read().keys())
961 changedfiles.update(mfl[c.manifest].read().keys())
960 else:
962 else:
961 changedfiles.update(c.files)
963 changedfiles.update(c.files)
962 else:
964 else:
963 # record the first changeset introducing this manifest version
965 # record the first changeset introducing this manifest version
964 manifests.setdefault(c.manifest, x)
966 manifests.setdefault(c.manifest, x)
965 # Record a complete list of potentially-changed files in
967 # Record a complete list of potentially-changed files in
966 # this manifest.
968 # this manifest.
967 changedfiles.update(c.files)
969 changedfiles.update(c.files)
968
970
969 return x
971 return x
970
972
971 state = {
973 state = {
972 'clrevorder': clrevorder,
974 'clrevorder': clrevorder,
973 'manifests': manifests,
975 'manifests': manifests,
974 'changedfiles': changedfiles,
976 'changedfiles': changedfiles,
975 'clrevtomanifestrev': clrevtomanifestrev,
977 'clrevtomanifestrev': clrevtomanifestrev,
976 }
978 }
977
979
978 gen = deltagroup(
980 gen = deltagroup(
979 self._repo, cl, nodes, True, lookupcl,
981 self._repo, cl, nodes, True, lookupcl,
980 self._forcedeltaparentprev,
982 self._forcedeltaparentprev,
981 ellipses=self._ellipses,
983 ellipses=self._ellipses,
982 topic=_('changesets'),
984 topic=_('changesets'),
983 clrevtolocalrev={},
985 clrevtolocalrev={},
984 fullclnodes=self._fullclnodes,
986 fullclnodes=self._fullclnodes,
985 precomputedellipsis=self._precomputedellipsis)
987 precomputedellipsis=self._precomputedellipsis)
986
988
987 return state, gen
989 return state, gen
988
990
989 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev,
991 def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev,
990 manifests, fnodes, source, clrevtolocalrev):
992 manifests, fnodes, source, clrevtolocalrev):
991 """Returns an iterator of changegroup chunks containing manifests.
993 """Returns an iterator of changegroup chunks containing manifests.
992
994
993 `source` is unused here, but is used by extensions like remotefilelog to
995 `source` is unused here, but is used by extensions like remotefilelog to
994 change what is sent based in pulls vs pushes, etc.
996 change what is sent based in pulls vs pushes, etc.
995 """
997 """
996 repo = self._repo
998 repo = self._repo
997 mfl = repo.manifestlog
999 mfl = repo.manifestlog
998 tmfnodes = {'': manifests}
1000 tmfnodes = {'': manifests}
999
1001
1000 # Callback for the manifest, used to collect linkrevs for filelog
1002 # Callback for the manifest, used to collect linkrevs for filelog
1001 # revisions.
1003 # revisions.
1002 # Returns the linkrev node (collected in lookupcl).
1004 # Returns the linkrev node (collected in lookupcl).
1003 def makelookupmflinknode(tree, nodes):
1005 def makelookupmflinknode(tree, nodes):
1004 if fastpathlinkrev:
1006 if fastpathlinkrev:
1005 assert not tree
1007 assert not tree
1006 return manifests.__getitem__
1008 return manifests.__getitem__
1007
1009
1008 def lookupmflinknode(x):
1010 def lookupmflinknode(x):
1009 """Callback for looking up the linknode for manifests.
1011 """Callback for looking up the linknode for manifests.
1010
1012
1011 Returns the linkrev node for the specified manifest.
1013 Returns the linkrev node for the specified manifest.
1012
1014
1013 SIDE EFFECT:
1015 SIDE EFFECT:
1014
1016
1015 1) fclnodes gets populated with the list of relevant
1017 1) fclnodes gets populated with the list of relevant
1016 file nodes if we're not using fastpathlinkrev
1018 file nodes if we're not using fastpathlinkrev
1017 2) When treemanifests are in use, collects treemanifest nodes
1019 2) When treemanifests are in use, collects treemanifest nodes
1018 to send
1020 to send
1019
1021
1020 Note that this means manifests must be completely sent to
1022 Note that this means manifests must be completely sent to
1021 the client before you can trust the list of files and
1023 the client before you can trust the list of files and
1022 treemanifests to send.
1024 treemanifests to send.
1023 """
1025 """
1024 clnode = nodes[x]
1026 clnode = nodes[x]
1025 mdata = mfl.get(tree, x).readfast(shallow=True)
1027 mdata = mfl.get(tree, x).readfast(shallow=True)
1026 for p, n, fl in mdata.iterentries():
1028 for p, n, fl in mdata.iterentries():
1027 if fl == 't': # subdirectory manifest
1029 if fl == 't': # subdirectory manifest
1028 subtree = tree + p + '/'
1030 subtree = tree + p + '/'
1029 tmfclnodes = tmfnodes.setdefault(subtree, {})
1031 tmfclnodes = tmfnodes.setdefault(subtree, {})
1030 tmfclnode = tmfclnodes.setdefault(n, clnode)
1032 tmfclnode = tmfclnodes.setdefault(n, clnode)
1031 if clrevorder[clnode] < clrevorder[tmfclnode]:
1033 if clrevorder[clnode] < clrevorder[tmfclnode]:
1032 tmfclnodes[n] = clnode
1034 tmfclnodes[n] = clnode
1033 else:
1035 else:
1034 f = tree + p
1036 f = tree + p
1035 fclnodes = fnodes.setdefault(f, {})
1037 fclnodes = fnodes.setdefault(f, {})
1036 fclnode = fclnodes.setdefault(n, clnode)
1038 fclnode = fclnodes.setdefault(n, clnode)
1037 if clrevorder[clnode] < clrevorder[fclnode]:
1039 if clrevorder[clnode] < clrevorder[fclnode]:
1038 fclnodes[n] = clnode
1040 fclnodes[n] = clnode
1039 return clnode
1041 return clnode
1040 return lookupmflinknode
1042 return lookupmflinknode
1041
1043
1042 while tmfnodes:
1044 while tmfnodes:
1043 tree, nodes = tmfnodes.popitem()
1045 tree, nodes = tmfnodes.popitem()
1044 store = mfl.getstorage(tree)
1046 store = mfl.getstorage(tree)
1045
1047
1046 if not self._matcher.visitdir(store.tree[:-1] or '.'):
1048 if not self._matcher.visitdir(store.tree[:-1] or '.'):
1047 # No nodes to send because this directory is out of
1049 # No nodes to send because this directory is out of
1048 # the client's view of the repository (probably
1050 # the client's view of the repository (probably
1049 # because of narrow clones).
1051 # because of narrow clones).
1050 prunednodes = []
1052 prunednodes = []
1051 else:
1053 else:
1052 # Avoid sending any manifest nodes we can prove the
1054 # Avoid sending any manifest nodes we can prove the
1053 # client already has by checking linkrevs. See the
1055 # client already has by checking linkrevs. See the
1054 # related comment in generatefiles().
1056 # related comment in generatefiles().
1055 prunednodes = self._prunemanifests(store, nodes, commonrevs)
1057 prunednodes = self._prunemanifests(store, nodes, commonrevs)
1056 if tree and not prunednodes:
1058 if tree and not prunednodes:
1057 continue
1059 continue
1058
1060
1059 lookupfn = makelookupmflinknode(tree, nodes)
1061 lookupfn = makelookupmflinknode(tree, nodes)
1060
1062
1061 deltas = deltagroup(
1063 deltas = deltagroup(
1062 self._repo, store, prunednodes, False, lookupfn,
1064 self._repo, store, prunednodes, False, lookupfn,
1063 self._forcedeltaparentprev,
1065 self._forcedeltaparentprev,
1064 ellipses=self._ellipses,
1066 ellipses=self._ellipses,
1065 topic=_('manifests'),
1067 topic=_('manifests'),
1066 clrevtolocalrev=clrevtolocalrev,
1068 clrevtolocalrev=clrevtolocalrev,
1067 fullclnodes=self._fullclnodes,
1069 fullclnodes=self._fullclnodes,
1068 precomputedellipsis=self._precomputedellipsis)
1070 precomputedellipsis=self._precomputedellipsis)
1069
1071
1070 if not self._oldmatcher.visitdir(store.tree[:-1] or '.'):
1072 if not self._oldmatcher.visitdir(store.tree[:-1] or '.'):
1071 yield tree, deltas
1073 yield tree, deltas
1072 else:
1074 else:
1073 # 'deltas' is a generator and we need to consume it even if
1075 # 'deltas' is a generator and we need to consume it even if
1074 # we are not going to send it because a side-effect is that
1076 # we are not going to send it because a side-effect is that
1075 # it updates tmdnodes (via lookupfn)
1077 # it updates tmdnodes (via lookupfn)
1076 for d in deltas:
1078 for d in deltas:
1077 pass
1079 pass
1078 if not tree:
1080 if not tree:
1079 yield tree, []
1081 yield tree, []
1080
1082
1081 def _prunemanifests(self, store, nodes, commonrevs):
1083 def _prunemanifests(self, store, nodes, commonrevs):
1082 # This is split out as a separate method to allow filtering
1084 # This is split out as a separate method to allow filtering
1083 # commonrevs in extension code.
1085 # commonrevs in extension code.
1084 #
1086 #
1085 # TODO(augie): this shouldn't be required, instead we should
1087 # TODO(augie): this shouldn't be required, instead we should
1086 # make filtering of revisions to send delegated to the store
1088 # make filtering of revisions to send delegated to the store
1087 # layer.
1089 # layer.
1088 frev, flr = store.rev, store.linkrev
1090 frev, flr = store.rev, store.linkrev
1089 return [n for n in nodes if flr(frev(n)) not in commonrevs]
1091 return [n for n in nodes if flr(frev(n)) not in commonrevs]
1090
1092
1091 # The 'source' parameter is useful for extensions
1093 # The 'source' parameter is useful for extensions
1092 def generatefiles(self, changedfiles, commonrevs, source,
1094 def generatefiles(self, changedfiles, commonrevs, source,
1093 mfdicts, fastpathlinkrev, fnodes, clrevs):
1095 mfdicts, fastpathlinkrev, fnodes, clrevs):
1094 changedfiles = [f for f in changedfiles
1096 changedfiles = [f for f in changedfiles
1095 if self._matcher(f) and not self._oldmatcher(f)]
1097 if self._matcher(f) and not self._oldmatcher(f)]
1096
1098
1097 if not fastpathlinkrev:
1099 if not fastpathlinkrev:
1098 def normallinknodes(unused, fname):
1100 def normallinknodes(unused, fname):
1099 return fnodes.get(fname, {})
1101 return fnodes.get(fname, {})
1100 else:
1102 else:
1101 cln = self._repo.changelog.node
1103 cln = self._repo.changelog.node
1102
1104
1103 def normallinknodes(store, fname):
1105 def normallinknodes(store, fname):
1104 flinkrev = store.linkrev
1106 flinkrev = store.linkrev
1105 fnode = store.node
1107 fnode = store.node
1106 revs = ((r, flinkrev(r)) for r in store)
1108 revs = ((r, flinkrev(r)) for r in store)
1107 return dict((fnode(r), cln(lr))
1109 return dict((fnode(r), cln(lr))
1108 for r, lr in revs if lr in clrevs)
1110 for r, lr in revs if lr in clrevs)
1109
1111
1110 clrevtolocalrev = {}
1112 clrevtolocalrev = {}
1111
1113
1112 if self._isshallow:
1114 if self._isshallow:
1113 # In a shallow clone, the linknodes callback needs to also include
1115 # In a shallow clone, the linknodes callback needs to also include
1114 # those file nodes that are in the manifests we sent but weren't
1116 # those file nodes that are in the manifests we sent but weren't
1115 # introduced by those manifests.
1117 # introduced by those manifests.
1116 commonctxs = [self._repo[c] for c in commonrevs]
1118 commonctxs = [self._repo[c] for c in commonrevs]
1117 clrev = self._repo.changelog.rev
1119 clrev = self._repo.changelog.rev
1118
1120
1119 def linknodes(flog, fname):
1121 def linknodes(flog, fname):
1120 for c in commonctxs:
1122 for c in commonctxs:
1121 try:
1123 try:
1122 fnode = c.filenode(fname)
1124 fnode = c.filenode(fname)
1123 clrevtolocalrev[c.rev()] = flog.rev(fnode)
1125 clrevtolocalrev[c.rev()] = flog.rev(fnode)
1124 except error.ManifestLookupError:
1126 except error.ManifestLookupError:
1125 pass
1127 pass
1126 links = normallinknodes(flog, fname)
1128 links = normallinknodes(flog, fname)
1127 if len(links) != len(mfdicts):
1129 if len(links) != len(mfdicts):
1128 for mf, lr in mfdicts:
1130 for mf, lr in mfdicts:
1129 fnode = mf.get(fname, None)
1131 fnode = mf.get(fname, None)
1130 if fnode in links:
1132 if fnode in links:
1131 links[fnode] = min(links[fnode], lr, key=clrev)
1133 links[fnode] = min(links[fnode], lr, key=clrev)
1132 elif fnode:
1134 elif fnode:
1133 links[fnode] = lr
1135 links[fnode] = lr
1134 return links
1136 return links
1135 else:
1137 else:
1136 linknodes = normallinknodes
1138 linknodes = normallinknodes
1137
1139
1138 repo = self._repo
1140 repo = self._repo
1139 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1141 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1140 total=len(changedfiles))
1142 total=len(changedfiles))
1141 for i, fname in enumerate(sorted(changedfiles)):
1143 for i, fname in enumerate(sorted(changedfiles)):
1142 filerevlog = repo.file(fname)
1144 filerevlog = repo.file(fname)
1143 if not filerevlog:
1145 if not filerevlog:
1144 raise error.Abort(_("empty or missing file data for %s") %
1146 raise error.Abort(_("empty or missing file data for %s") %
1145 fname)
1147 fname)
1146
1148
1147 clrevtolocalrev.clear()
1149 clrevtolocalrev.clear()
1148
1150
1149 linkrevnodes = linknodes(filerevlog, fname)
1151 linkrevnodes = linknodes(filerevlog, fname)
1150 # Lookup for filenodes, we collected the linkrev nodes above in the
1152 # Lookup for filenodes, we collected the linkrev nodes above in the
1151 # fastpath case and with lookupmf in the slowpath case.
1153 # fastpath case and with lookupmf in the slowpath case.
1152 def lookupfilelog(x):
1154 def lookupfilelog(x):
1153 return linkrevnodes[x]
1155 return linkrevnodes[x]
1154
1156
1155 frev, flr = filerevlog.rev, filerevlog.linkrev
1157 frev, flr = filerevlog.rev, filerevlog.linkrev
1156 # Skip sending any filenode we know the client already
1158 # Skip sending any filenode we know the client already
1157 # has. This avoids over-sending files relatively
1159 # has. This avoids over-sending files relatively
1158 # inexpensively, so it's not a problem if we under-filter
1160 # inexpensively, so it's not a problem if we under-filter
1159 # here.
1161 # here.
1160 filenodes = [n for n in linkrevnodes
1162 filenodes = [n for n in linkrevnodes
1161 if flr(frev(n)) not in commonrevs]
1163 if flr(frev(n)) not in commonrevs]
1162
1164
1163 if not filenodes:
1165 if not filenodes:
1164 continue
1166 continue
1165
1167
1166 progress.update(i + 1, item=fname)
1168 progress.update(i + 1, item=fname)
1167
1169
1168 deltas = deltagroup(
1170 deltas = deltagroup(
1169 self._repo, filerevlog, filenodes, False, lookupfilelog,
1171 self._repo, filerevlog, filenodes, False, lookupfilelog,
1170 self._forcedeltaparentprev,
1172 self._forcedeltaparentprev,
1171 ellipses=self._ellipses,
1173 ellipses=self._ellipses,
1172 clrevtolocalrev=clrevtolocalrev,
1174 clrevtolocalrev=clrevtolocalrev,
1173 fullclnodes=self._fullclnodes,
1175 fullclnodes=self._fullclnodes,
1174 precomputedellipsis=self._precomputedellipsis)
1176 precomputedellipsis=self._precomputedellipsis)
1175
1177
1176 yield fname, deltas
1178 yield fname, deltas
1177
1179
1178 progress.complete()
1180 progress.complete()
1179
1181
1180 def _makecg1packer(repo, oldmatcher, matcher, bundlecaps,
1182 def _makecg1packer(repo, oldmatcher, matcher, bundlecaps,
1181 ellipses=False, shallow=False, ellipsisroots=None,
1183 ellipses=False, shallow=False, ellipsisroots=None,
1182 fullnodes=None):
1184 fullnodes=None):
1183 builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack(
1185 builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack(
1184 d.node, d.p1node, d.p2node, d.linknode)
1186 d.node, d.p1node, d.p2node, d.linknode)
1185
1187
1186 return cgpacker(repo, oldmatcher, matcher, b'01',
1188 return cgpacker(repo, oldmatcher, matcher, b'01',
1187 builddeltaheader=builddeltaheader,
1189 builddeltaheader=builddeltaheader,
1188 manifestsend=b'',
1190 manifestsend=b'',
1189 forcedeltaparentprev=True,
1191 forcedeltaparentprev=True,
1190 bundlecaps=bundlecaps,
1192 bundlecaps=bundlecaps,
1191 ellipses=ellipses,
1193 ellipses=ellipses,
1192 shallow=shallow,
1194 shallow=shallow,
1193 ellipsisroots=ellipsisroots,
1195 ellipsisroots=ellipsisroots,
1194 fullnodes=fullnodes)
1196 fullnodes=fullnodes)
1195
1197
1196 def _makecg2packer(repo, oldmatcher, matcher, bundlecaps,
1198 def _makecg2packer(repo, oldmatcher, matcher, bundlecaps,
1197 ellipses=False, shallow=False, ellipsisroots=None,
1199 ellipses=False, shallow=False, ellipsisroots=None,
1198 fullnodes=None):
1200 fullnodes=None):
1199 builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack(
1201 builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack(
1200 d.node, d.p1node, d.p2node, d.basenode, d.linknode)
1202 d.node, d.p1node, d.p2node, d.basenode, d.linknode)
1201
1203
1202 return cgpacker(repo, oldmatcher, matcher, b'02',
1204 return cgpacker(repo, oldmatcher, matcher, b'02',
1203 builddeltaheader=builddeltaheader,
1205 builddeltaheader=builddeltaheader,
1204 manifestsend=b'',
1206 manifestsend=b'',
1205 bundlecaps=bundlecaps,
1207 bundlecaps=bundlecaps,
1206 ellipses=ellipses,
1208 ellipses=ellipses,
1207 shallow=shallow,
1209 shallow=shallow,
1208 ellipsisroots=ellipsisroots,
1210 ellipsisroots=ellipsisroots,
1209 fullnodes=fullnodes)
1211 fullnodes=fullnodes)
1210
1212
1211 def _makecg3packer(repo, oldmatcher, matcher, bundlecaps,
1213 def _makecg3packer(repo, oldmatcher, matcher, bundlecaps,
1212 ellipses=False, shallow=False, ellipsisroots=None,
1214 ellipses=False, shallow=False, ellipsisroots=None,
1213 fullnodes=None):
1215 fullnodes=None):
1214 builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
1216 builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
1215 d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags)
1217 d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags)
1216
1218
1217 return cgpacker(repo, oldmatcher, matcher, b'03',
1219 return cgpacker(repo, oldmatcher, matcher, b'03',
1218 builddeltaheader=builddeltaheader,
1220 builddeltaheader=builddeltaheader,
1219 manifestsend=closechunk(),
1221 manifestsend=closechunk(),
1220 bundlecaps=bundlecaps,
1222 bundlecaps=bundlecaps,
1221 ellipses=ellipses,
1223 ellipses=ellipses,
1222 shallow=shallow,
1224 shallow=shallow,
1223 ellipsisroots=ellipsisroots,
1225 ellipsisroots=ellipsisroots,
1224 fullnodes=fullnodes)
1226 fullnodes=fullnodes)
1225
1227
1226 _packermap = {'01': (_makecg1packer, cg1unpacker),
1228 _packermap = {'01': (_makecg1packer, cg1unpacker),
1227 # cg2 adds support for exchanging generaldelta
1229 # cg2 adds support for exchanging generaldelta
1228 '02': (_makecg2packer, cg2unpacker),
1230 '02': (_makecg2packer, cg2unpacker),
1229 # cg3 adds support for exchanging revlog flags and treemanifests
1231 # cg3 adds support for exchanging revlog flags and treemanifests
1230 '03': (_makecg3packer, cg3unpacker),
1232 '03': (_makecg3packer, cg3unpacker),
1231 }
1233 }
1232
1234
1233 def allsupportedversions(repo):
1235 def allsupportedversions(repo):
1234 versions = set(_packermap.keys())
1236 versions = set(_packermap.keys())
1235 if not (repo.ui.configbool('experimental', 'changegroup3') or
1237 if not (repo.ui.configbool('experimental', 'changegroup3') or
1236 repo.ui.configbool('experimental', 'treemanifest') or
1238 repo.ui.configbool('experimental', 'treemanifest') or
1237 'treemanifest' in repo.requirements):
1239 'treemanifest' in repo.requirements):
1238 versions.discard('03')
1240 versions.discard('03')
1239 return versions
1241 return versions
1240
1242
1241 # Changegroup versions that can be applied to the repo
1243 # Changegroup versions that can be applied to the repo
1242 def supportedincomingversions(repo):
1244 def supportedincomingversions(repo):
1243 return allsupportedversions(repo)
1245 return allsupportedversions(repo)
1244
1246
1245 # Changegroup versions that can be created from the repo
1247 # Changegroup versions that can be created from the repo
1246 def supportedoutgoingversions(repo):
1248 def supportedoutgoingversions(repo):
1247 versions = allsupportedversions(repo)
1249 versions = allsupportedversions(repo)
1248 if 'treemanifest' in repo.requirements:
1250 if 'treemanifest' in repo.requirements:
1249 # Versions 01 and 02 support only flat manifests and it's just too
1251 # Versions 01 and 02 support only flat manifests and it's just too
1250 # expensive to convert between the flat manifest and tree manifest on
1252 # expensive to convert between the flat manifest and tree manifest on
1251 # the fly. Since tree manifests are hashed differently, all of history
1253 # the fly. Since tree manifests are hashed differently, all of history
1252 # would have to be converted. Instead, we simply don't even pretend to
1254 # would have to be converted. Instead, we simply don't even pretend to
1253 # support versions 01 and 02.
1255 # support versions 01 and 02.
1254 versions.discard('01')
1256 versions.discard('01')
1255 versions.discard('02')
1257 versions.discard('02')
1256 if repository.NARROW_REQUIREMENT in repo.requirements:
1258 if repository.NARROW_REQUIREMENT in repo.requirements:
1257 # Versions 01 and 02 don't support revlog flags, and we need to
1259 # Versions 01 and 02 don't support revlog flags, and we need to
1258 # support that for stripping and unbundling to work.
1260 # support that for stripping and unbundling to work.
1259 versions.discard('01')
1261 versions.discard('01')
1260 versions.discard('02')
1262 versions.discard('02')
1261 if LFS_REQUIREMENT in repo.requirements:
1263 if LFS_REQUIREMENT in repo.requirements:
1262 # Versions 01 and 02 don't support revlog flags, and we need to
1264 # Versions 01 and 02 don't support revlog flags, and we need to
1263 # mark LFS entries with REVIDX_EXTSTORED.
1265 # mark LFS entries with REVIDX_EXTSTORED.
1264 versions.discard('01')
1266 versions.discard('01')
1265 versions.discard('02')
1267 versions.discard('02')
1266
1268
1267 return versions
1269 return versions
1268
1270
1269 def localversion(repo):
1271 def localversion(repo):
1270 # Finds the best version to use for bundles that are meant to be used
1272 # Finds the best version to use for bundles that are meant to be used
1271 # locally, such as those from strip and shelve, and temporary bundles.
1273 # locally, such as those from strip and shelve, and temporary bundles.
1272 return max(supportedoutgoingversions(repo))
1274 return max(supportedoutgoingversions(repo))
1273
1275
1274 def safeversion(repo):
1276 def safeversion(repo):
1275 # Finds the smallest version that it's safe to assume clients of the repo
1277 # Finds the smallest version that it's safe to assume clients of the repo
1276 # will support. For example, all hg versions that support generaldelta also
1278 # will support. For example, all hg versions that support generaldelta also
1277 # support changegroup 02.
1279 # support changegroup 02.
1278 versions = supportedoutgoingversions(repo)
1280 versions = supportedoutgoingversions(repo)
1279 if 'generaldelta' in repo.requirements:
1281 if 'generaldelta' in repo.requirements:
1280 versions.discard('01')
1282 versions.discard('01')
1281 assert versions
1283 assert versions
1282 return min(versions)
1284 return min(versions)
1283
1285
1284 def getbundler(version, repo, bundlecaps=None, oldmatcher=None,
1286 def getbundler(version, repo, bundlecaps=None, oldmatcher=None,
1285 matcher=None, ellipses=False, shallow=False,
1287 matcher=None, ellipses=False, shallow=False,
1286 ellipsisroots=None, fullnodes=None):
1288 ellipsisroots=None, fullnodes=None):
1287 assert version in supportedoutgoingversions(repo)
1289 assert version in supportedoutgoingversions(repo)
1288
1290
1289 if matcher is None:
1291 if matcher is None:
1290 matcher = matchmod.alwaysmatcher(repo.root, '')
1292 matcher = matchmod.alwaysmatcher(repo.root, '')
1291 if oldmatcher is None:
1293 if oldmatcher is None:
1292 oldmatcher = matchmod.nevermatcher(repo.root, '')
1294 oldmatcher = matchmod.nevermatcher(repo.root, '')
1293
1295
1294 if version == '01' and not matcher.always():
1296 if version == '01' and not matcher.always():
1295 raise error.ProgrammingError('version 01 changegroups do not support '
1297 raise error.ProgrammingError('version 01 changegroups do not support '
1296 'sparse file matchers')
1298 'sparse file matchers')
1297
1299
1298 if ellipses and version in (b'01', b'02'):
1300 if ellipses and version in (b'01', b'02'):
1299 raise error.Abort(
1301 raise error.Abort(
1300 _('ellipsis nodes require at least cg3 on client and server, '
1302 _('ellipsis nodes require at least cg3 on client and server, '
1301 'but negotiated version %s') % version)
1303 'but negotiated version %s') % version)
1302
1304
1303 # Requested files could include files not in the local store. So
1305 # Requested files could include files not in the local store. So
1304 # filter those out.
1306 # filter those out.
1305 matcher = repo.narrowmatch(matcher)
1307 matcher = repo.narrowmatch(matcher)
1306
1308
1307 fn = _packermap[version][0]
1309 fn = _packermap[version][0]
1308 return fn(repo, oldmatcher, matcher, bundlecaps, ellipses=ellipses,
1310 return fn(repo, oldmatcher, matcher, bundlecaps, ellipses=ellipses,
1309 shallow=shallow, ellipsisroots=ellipsisroots,
1311 shallow=shallow, ellipsisroots=ellipsisroots,
1310 fullnodes=fullnodes)
1312 fullnodes=fullnodes)
1311
1313
1312 def getunbundler(version, fh, alg, extras=None):
1314 def getunbundler(version, fh, alg, extras=None):
1313 return _packermap[version][1](fh, alg, extras=extras)
1315 return _packermap[version][1](fh, alg, extras=extras)
1314
1316
1315 def _changegroupinfo(repo, nodes, source):
1317 def _changegroupinfo(repo, nodes, source):
1316 if repo.ui.verbose or source == 'bundle':
1318 if repo.ui.verbose or source == 'bundle':
1317 repo.ui.status(_("%d changesets found\n") % len(nodes))
1319 repo.ui.status(_("%d changesets found\n") % len(nodes))
1318 if repo.ui.debugflag:
1320 if repo.ui.debugflag:
1319 repo.ui.debug("list of changesets:\n")
1321 repo.ui.debug("list of changesets:\n")
1320 for node in nodes:
1322 for node in nodes:
1321 repo.ui.debug("%s\n" % hex(node))
1323 repo.ui.debug("%s\n" % hex(node))
1322
1324
1323 def makechangegroup(repo, outgoing, version, source, fastpath=False,
1325 def makechangegroup(repo, outgoing, version, source, fastpath=False,
1324 bundlecaps=None):
1326 bundlecaps=None):
1325 cgstream = makestream(repo, outgoing, version, source,
1327 cgstream = makestream(repo, outgoing, version, source,
1326 fastpath=fastpath, bundlecaps=bundlecaps)
1328 fastpath=fastpath, bundlecaps=bundlecaps)
1327 return getunbundler(version, util.chunkbuffer(cgstream), None,
1329 return getunbundler(version, util.chunkbuffer(cgstream), None,
1328 {'clcount': len(outgoing.missing) })
1330 {'clcount': len(outgoing.missing) })
1329
1331
1330 def makestream(repo, outgoing, version, source, fastpath=False,
1332 def makestream(repo, outgoing, version, source, fastpath=False,
1331 bundlecaps=None, matcher=None):
1333 bundlecaps=None, matcher=None):
1332 bundler = getbundler(version, repo, bundlecaps=bundlecaps,
1334 bundler = getbundler(version, repo, bundlecaps=bundlecaps,
1333 matcher=matcher)
1335 matcher=matcher)
1334
1336
1335 repo = repo.unfiltered()
1337 repo = repo.unfiltered()
1336 commonrevs = outgoing.common
1338 commonrevs = outgoing.common
1337 csets = outgoing.missing
1339 csets = outgoing.missing
1338 heads = outgoing.missingheads
1340 heads = outgoing.missingheads
1339 # We go through the fast path if we get told to, or if all (unfiltered
1341 # We go through the fast path if we get told to, or if all (unfiltered
1340 # heads have been requested (since we then know there all linkrevs will
1342 # heads have been requested (since we then know there all linkrevs will
1341 # be pulled by the client).
1343 # be pulled by the client).
1342 heads.sort()
1344 heads.sort()
1343 fastpathlinkrev = fastpath or (
1345 fastpathlinkrev = fastpath or (
1344 repo.filtername is None and heads == sorted(repo.heads()))
1346 repo.filtername is None and heads == sorted(repo.heads()))
1345
1347
1346 repo.hook('preoutgoing', throw=True, source=source)
1348 repo.hook('preoutgoing', throw=True, source=source)
1347 _changegroupinfo(repo, csets, source)
1349 _changegroupinfo(repo, csets, source)
1348 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
1350 return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
1349
1351
1350 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
1352 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
1351 revisions = 0
1353 revisions = 0
1352 files = 0
1354 files = 0
1353 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1355 progress = repo.ui.makeprogress(_('files'), unit=_('files'),
1354 total=expectedfiles)
1356 total=expectedfiles)
1355 for chunkdata in iter(source.filelogheader, {}):
1357 for chunkdata in iter(source.filelogheader, {}):
1356 files += 1
1358 files += 1
1357 f = chunkdata["filename"]
1359 f = chunkdata["filename"]
1358 repo.ui.debug("adding %s revisions\n" % f)
1360 repo.ui.debug("adding %s revisions\n" % f)
1359 progress.increment()
1361 progress.increment()
1360 fl = repo.file(f)
1362 fl = repo.file(f)
1361 o = len(fl)
1363 o = len(fl)
1362 try:
1364 try:
1363 deltas = source.deltaiter()
1365 deltas = source.deltaiter()
1364 if not fl.addgroup(deltas, revmap, trp):
1366 if not fl.addgroup(deltas, revmap, trp):
1365 raise error.Abort(_("received file revlog group is empty"))
1367 raise error.Abort(_("received file revlog group is empty"))
1366 except error.CensoredBaseError as e:
1368 except error.CensoredBaseError as e:
1367 raise error.Abort(_("received delta base is censored: %s") % e)
1369 raise error.Abort(_("received delta base is censored: %s") % e)
1368 revisions += len(fl) - o
1370 revisions += len(fl) - o
1369 if f in needfiles:
1371 if f in needfiles:
1370 needs = needfiles[f]
1372 needs = needfiles[f]
1371 for new in pycompat.xrange(o, len(fl)):
1373 for new in pycompat.xrange(o, len(fl)):
1372 n = fl.node(new)
1374 n = fl.node(new)
1373 if n in needs:
1375 if n in needs:
1374 needs.remove(n)
1376 needs.remove(n)
1375 else:
1377 else:
1376 raise error.Abort(
1378 raise error.Abort(
1377 _("received spurious file revlog entry"))
1379 _("received spurious file revlog entry"))
1378 if not needs:
1380 if not needs:
1379 del needfiles[f]
1381 del needfiles[f]
1380 progress.complete()
1382 progress.complete()
1381
1383
1382 for f, needs in needfiles.iteritems():
1384 for f, needs in needfiles.iteritems():
1383 fl = repo.file(f)
1385 fl = repo.file(f)
1384 for n in needs:
1386 for n in needs:
1385 try:
1387 try:
1386 fl.rev(n)
1388 fl.rev(n)
1387 except error.LookupError:
1389 except error.LookupError:
1388 raise error.Abort(
1390 raise error.Abort(
1389 _('missing file data for %s:%s - run hg verify') %
1391 _('missing file data for %s:%s - run hg verify') %
1390 (f, hex(n)))
1392 (f, hex(n)))
1391
1393
1392 return revisions, files
1394 return revisions, files
@@ -1,907 +1,911 b''
1 Setting up test
1 Setting up test
2
2
3 $ hg init test
3 $ hg init test
4 $ cd test
4 $ cd test
5 $ echo 0 > afile
5 $ echo 0 > afile
6 $ hg add afile
6 $ hg add afile
7 $ hg commit -m "0.0"
7 $ hg commit -m "0.0"
8 $ echo 1 >> afile
8 $ echo 1 >> afile
9 $ hg commit -m "0.1"
9 $ hg commit -m "0.1"
10 $ echo 2 >> afile
10 $ echo 2 >> afile
11 $ hg commit -m "0.2"
11 $ hg commit -m "0.2"
12 $ echo 3 >> afile
12 $ echo 3 >> afile
13 $ hg commit -m "0.3"
13 $ hg commit -m "0.3"
14 $ hg update -C 0
14 $ hg update -C 0
15 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
15 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
16 $ echo 1 >> afile
16 $ echo 1 >> afile
17 $ hg commit -m "1.1"
17 $ hg commit -m "1.1"
18 created new head
18 created new head
19 $ echo 2 >> afile
19 $ echo 2 >> afile
20 $ hg commit -m "1.2"
20 $ hg commit -m "1.2"
21 $ echo "a line" > fred
21 $ echo "a line" > fred
22 $ echo 3 >> afile
22 $ echo 3 >> afile
23 $ hg add fred
23 $ hg add fred
24 $ hg commit -m "1.3"
24 $ hg commit -m "1.3"
25 $ hg mv afile adifferentfile
25 $ hg mv afile adifferentfile
26 $ hg commit -m "1.3m"
26 $ hg commit -m "1.3m"
27 $ hg update -C 3
27 $ hg update -C 3
28 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
28 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
29 $ hg mv afile anotherfile
29 $ hg mv afile anotherfile
30 $ hg commit -m "0.3m"
30 $ hg commit -m "0.3m"
31 $ hg verify
31 $ hg verify
32 checking changesets
32 checking changesets
33 checking manifests
33 checking manifests
34 crosschecking files in changesets and manifests
34 crosschecking files in changesets and manifests
35 checking files
35 checking files
36 checked 9 changesets with 7 changes to 4 files
36 checked 9 changesets with 7 changes to 4 files
37 $ cd ..
37 $ cd ..
38 $ hg init empty
38 $ hg init empty
39
39
40 Bundle and phase
40 Bundle and phase
41
41
42 $ hg -R test phase --force --secret 0
42 $ hg -R test phase --force --secret 0
43 $ hg -R test bundle phase.hg empty
43 $ hg -R test bundle phase.hg empty
44 searching for changes
44 searching for changes
45 no changes found (ignored 9 secret changesets)
45 no changes found (ignored 9 secret changesets)
46 [1]
46 [1]
47 $ hg -R test phase --draft -r 'head()'
47 $ hg -R test phase --draft -r 'head()'
48
48
49 Bundle --all
49 Bundle --all
50
50
51 $ hg -R test bundle --all all.hg
51 $ hg -R test bundle --all all.hg
52 9 changesets found
52 9 changesets found
53
53
54 Bundle test to full.hg
54 Bundle test to full.hg
55
55
56 $ hg -R test bundle full.hg empty
56 $ hg -R test bundle full.hg empty
57 searching for changes
57 searching for changes
58 9 changesets found
58 9 changesets found
59
59
60 Unbundle full.hg in test
60 Unbundle full.hg in test
61
61
62 $ hg -R test unbundle full.hg
62 $ hg -R test unbundle full.hg
63 adding changesets
63 adding changesets
64 adding manifests
64 adding manifests
65 adding file changes
65 adding file changes
66 added 0 changesets with 0 changes to 4 files
66 added 0 changesets with 0 changes to 4 files
67 (run 'hg update' to get a working copy)
67 (run 'hg update' to get a working copy)
68
68
69 Verify empty
69 Verify empty
70
70
71 $ hg -R empty heads
71 $ hg -R empty heads
72 [1]
72 [1]
73 $ hg -R empty verify
73 $ hg -R empty verify
74 checking changesets
74 checking changesets
75 checking manifests
75 checking manifests
76 crosschecking files in changesets and manifests
76 crosschecking files in changesets and manifests
77 checking files
77 checking files
78 checked 0 changesets with 0 changes to 0 files
78 checked 0 changesets with 0 changes to 0 files
79
79
80 #if repobundlerepo
80 #if repobundlerepo
81
81
82 Pull full.hg into test (using --cwd)
82 Pull full.hg into test (using --cwd)
83
83
84 $ hg --cwd test pull ../full.hg
84 $ hg --cwd test pull ../full.hg
85 pulling from ../full.hg
85 pulling from ../full.hg
86 searching for changes
86 searching for changes
87 no changes found
87 no changes found
88
88
89 Verify that there are no leaked temporary files after pull (issue2797)
89 Verify that there are no leaked temporary files after pull (issue2797)
90
90
91 $ ls test/.hg | grep .hg10un
91 $ ls test/.hg | grep .hg10un
92 [1]
92 [1]
93
93
94 Pull full.hg into empty (using --cwd)
94 Pull full.hg into empty (using --cwd)
95
95
96 $ hg --cwd empty pull ../full.hg
96 $ hg --cwd empty pull ../full.hg
97 pulling from ../full.hg
97 pulling from ../full.hg
98 requesting all changes
98 requesting all changes
99 adding changesets
99 adding changesets
100 adding manifests
100 adding manifests
101 adding file changes
101 adding file changes
102 added 9 changesets with 7 changes to 4 files (+1 heads)
102 added 9 changesets with 7 changes to 4 files (+1 heads)
103 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
103 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
104 (run 'hg heads' to see heads, 'hg merge' to merge)
104 (run 'hg heads' to see heads, 'hg merge' to merge)
105
105
106 Rollback empty
106 Rollback empty
107
107
108 $ hg -R empty rollback
108 $ hg -R empty rollback
109 repository tip rolled back to revision -1 (undo pull)
109 repository tip rolled back to revision -1 (undo pull)
110
110
111 Pull full.hg into empty again (using --cwd)
111 Pull full.hg into empty again (using --cwd)
112
112
113 $ hg --cwd empty pull ../full.hg
113 $ hg --cwd empty pull ../full.hg
114 pulling from ../full.hg
114 pulling from ../full.hg
115 requesting all changes
115 requesting all changes
116 adding changesets
116 adding changesets
117 adding manifests
117 adding manifests
118 adding file changes
118 adding file changes
119 added 9 changesets with 7 changes to 4 files (+1 heads)
119 added 9 changesets with 7 changes to 4 files (+1 heads)
120 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
120 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
121 (run 'hg heads' to see heads, 'hg merge' to merge)
121 (run 'hg heads' to see heads, 'hg merge' to merge)
122
122
123 Pull full.hg into test (using -R)
123 Pull full.hg into test (using -R)
124
124
125 $ hg -R test pull full.hg
125 $ hg -R test pull full.hg
126 pulling from full.hg
126 pulling from full.hg
127 searching for changes
127 searching for changes
128 no changes found
128 no changes found
129
129
130 Pull full.hg into empty (using -R)
130 Pull full.hg into empty (using -R)
131
131
132 $ hg -R empty pull full.hg
132 $ hg -R empty pull full.hg
133 pulling from full.hg
133 pulling from full.hg
134 searching for changes
134 searching for changes
135 no changes found
135 no changes found
136
136
137 Rollback empty
137 Rollback empty
138
138
139 $ hg -R empty rollback
139 $ hg -R empty rollback
140 repository tip rolled back to revision -1 (undo pull)
140 repository tip rolled back to revision -1 (undo pull)
141
141
142 Pull full.hg into empty again (using -R)
142 Pull full.hg into empty again (using -R)
143
143
144 $ hg -R empty pull full.hg
144 $ hg -R empty pull full.hg
145 pulling from full.hg
145 pulling from full.hg
146 requesting all changes
146 requesting all changes
147 adding changesets
147 adding changesets
148 adding manifests
148 adding manifests
149 adding file changes
149 adding file changes
150 added 9 changesets with 7 changes to 4 files (+1 heads)
150 added 9 changesets with 7 changes to 4 files (+1 heads)
151 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
151 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
152 (run 'hg heads' to see heads, 'hg merge' to merge)
152 (run 'hg heads' to see heads, 'hg merge' to merge)
153
153
154 Log -R full.hg in fresh empty
154 Log -R full.hg in fresh empty
155
155
156 $ rm -r empty
156 $ rm -r empty
157 $ hg init empty
157 $ hg init empty
158 $ cd empty
158 $ cd empty
159 $ hg -R bundle://../full.hg log
159 $ hg -R bundle://../full.hg log
160 changeset: 8:aa35859c02ea
160 changeset: 8:aa35859c02ea
161 tag: tip
161 tag: tip
162 parent: 3:eebf5a27f8ca
162 parent: 3:eebf5a27f8ca
163 user: test
163 user: test
164 date: Thu Jan 01 00:00:00 1970 +0000
164 date: Thu Jan 01 00:00:00 1970 +0000
165 summary: 0.3m
165 summary: 0.3m
166
166
167 changeset: 7:a6a34bfa0076
167 changeset: 7:a6a34bfa0076
168 user: test
168 user: test
169 date: Thu Jan 01 00:00:00 1970 +0000
169 date: Thu Jan 01 00:00:00 1970 +0000
170 summary: 1.3m
170 summary: 1.3m
171
171
172 changeset: 6:7373c1169842
172 changeset: 6:7373c1169842
173 user: test
173 user: test
174 date: Thu Jan 01 00:00:00 1970 +0000
174 date: Thu Jan 01 00:00:00 1970 +0000
175 summary: 1.3
175 summary: 1.3
176
176
177 changeset: 5:1bb50a9436a7
177 changeset: 5:1bb50a9436a7
178 user: test
178 user: test
179 date: Thu Jan 01 00:00:00 1970 +0000
179 date: Thu Jan 01 00:00:00 1970 +0000
180 summary: 1.2
180 summary: 1.2
181
181
182 changeset: 4:095197eb4973
182 changeset: 4:095197eb4973
183 parent: 0:f9ee2f85a263
183 parent: 0:f9ee2f85a263
184 user: test
184 user: test
185 date: Thu Jan 01 00:00:00 1970 +0000
185 date: Thu Jan 01 00:00:00 1970 +0000
186 summary: 1.1
186 summary: 1.1
187
187
188 changeset: 3:eebf5a27f8ca
188 changeset: 3:eebf5a27f8ca
189 user: test
189 user: test
190 date: Thu Jan 01 00:00:00 1970 +0000
190 date: Thu Jan 01 00:00:00 1970 +0000
191 summary: 0.3
191 summary: 0.3
192
192
193 changeset: 2:e38ba6f5b7e0
193 changeset: 2:e38ba6f5b7e0
194 user: test
194 user: test
195 date: Thu Jan 01 00:00:00 1970 +0000
195 date: Thu Jan 01 00:00:00 1970 +0000
196 summary: 0.2
196 summary: 0.2
197
197
198 changeset: 1:34c2bf6b0626
198 changeset: 1:34c2bf6b0626
199 user: test
199 user: test
200 date: Thu Jan 01 00:00:00 1970 +0000
200 date: Thu Jan 01 00:00:00 1970 +0000
201 summary: 0.1
201 summary: 0.1
202
202
203 changeset: 0:f9ee2f85a263
203 changeset: 0:f9ee2f85a263
204 user: test
204 user: test
205 date: Thu Jan 01 00:00:00 1970 +0000
205 date: Thu Jan 01 00:00:00 1970 +0000
206 summary: 0.0
206 summary: 0.0
207
207
208 Make sure bundlerepo doesn't leak tempfiles (issue2491)
208 Make sure bundlerepo doesn't leak tempfiles (issue2491)
209
209
210 $ ls .hg
210 $ ls .hg
211 00changelog.i
211 00changelog.i
212 cache
212 cache
213 requires
213 requires
214 store
214 store
215
215
216 Pull ../full.hg into empty (with hook)
216 Pull ../full.hg into empty (with hook)
217
217
218 $ cat >> .hg/hgrc <<EOF
218 $ cat >> .hg/hgrc <<EOF
219 > [hooks]
219 > [hooks]
220 > changegroup = sh -c "printenv.py changegroup"
220 > changegroup = sh -c "printenv.py changegroup"
221 > EOF
221 > EOF
222
222
223 doesn't work (yet ?)
223 doesn't work (yet ?)
224
224
225 hg -R bundle://../full.hg verify
225 hg -R bundle://../full.hg verify
226
226
227 $ hg pull bundle://../full.hg
227 $ hg pull bundle://../full.hg
228 pulling from bundle:../full.hg
228 pulling from bundle:../full.hg
229 requesting all changes
229 requesting all changes
230 adding changesets
230 adding changesets
231 adding manifests
231 adding manifests
232 adding file changes
232 adding file changes
233 added 9 changesets with 7 changes to 4 files (+1 heads)
233 added 9 changesets with 7 changes to 4 files (+1 heads)
234 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
234 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
235 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=bundle*../full.hg (glob)
235 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=bundle*../full.hg (glob)
236 (run 'hg heads' to see heads, 'hg merge' to merge)
236 (run 'hg heads' to see heads, 'hg merge' to merge)
237
237
238 Rollback empty
238 Rollback empty
239
239
240 $ hg rollback
240 $ hg rollback
241 repository tip rolled back to revision -1 (undo pull)
241 repository tip rolled back to revision -1 (undo pull)
242 $ cd ..
242 $ cd ..
243
243
244 Log -R bundle:empty+full.hg
244 Log -R bundle:empty+full.hg
245
245
246 $ hg -R bundle:empty+full.hg log --template="{rev} "; echo ""
246 $ hg -R bundle:empty+full.hg log --template="{rev} "; echo ""
247 8 7 6 5 4 3 2 1 0
247 8 7 6 5 4 3 2 1 0
248
248
249 Pull full.hg into empty again (using -R; with hook)
249 Pull full.hg into empty again (using -R; with hook)
250
250
251 $ hg -R empty pull full.hg
251 $ hg -R empty pull full.hg
252 pulling from full.hg
252 pulling from full.hg
253 requesting all changes
253 requesting all changes
254 adding changesets
254 adding changesets
255 adding manifests
255 adding manifests
256 adding file changes
256 adding file changes
257 added 9 changesets with 7 changes to 4 files (+1 heads)
257 added 9 changesets with 7 changes to 4 files (+1 heads)
258 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
258 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
259 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=bundle:empty+full.hg
259 changegroup hook: HG_HOOKNAME=changegroup HG_HOOKTYPE=changegroup HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf HG_SOURCE=pull HG_TXNID=TXN:$ID$ HG_URL=bundle:empty+full.hg
260 (run 'hg heads' to see heads, 'hg merge' to merge)
260 (run 'hg heads' to see heads, 'hg merge' to merge)
261
261
262 #endif
262 #endif
263
263
264 Cannot produce streaming clone bundles with "hg bundle"
264 Cannot produce streaming clone bundles with "hg bundle"
265
265
266 $ hg -R test bundle -t packed1 packed.hg
266 $ hg -R test bundle -t packed1 packed.hg
267 abort: packed bundles cannot be produced by "hg bundle"
267 abort: packed bundles cannot be produced by "hg bundle"
268 (use 'hg debugcreatestreamclonebundle')
268 (use 'hg debugcreatestreamclonebundle')
269 [255]
269 [255]
270
270
271 packed1 is produced properly
271 packed1 is produced properly
272
272
273 #if reporevlogstore
273 #if reporevlogstore
274
274
275 $ hg -R test debugcreatestreamclonebundle packed.hg
275 $ hg -R test debugcreatestreamclonebundle packed.hg
276 writing 2664 bytes for 6 files
276 writing 2664 bytes for 6 files
277 bundle requirements: generaldelta, revlogv1
277 bundle requirements: generaldelta, revlogv1
278
278
279 $ f -B 64 --size --sha1 --hexdump packed.hg
279 $ f -B 64 --size --sha1 --hexdump packed.hg
280 packed.hg: size=2827, sha1=9d14cb90c66a21462d915ab33656f38b9deed686
280 packed.hg: size=2827, sha1=9d14cb90c66a21462d915ab33656f38b9deed686
281 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
281 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........|
282 0010: 00 00 00 00 0a 68 00 16 67 65 6e 65 72 61 6c 64 |.....h..generald|
282 0010: 00 00 00 00 0a 68 00 16 67 65 6e 65 72 61 6c 64 |.....h..generald|
283 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 00 64 61 |elta,revlogv1.da|
283 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 00 64 61 |elta,revlogv1.da|
284 0030: 74 61 2f 61 64 69 66 66 65 72 65 6e 74 66 69 6c |ta/adifferentfil|
284 0030: 74 61 2f 61 64 69 66 66 65 72 65 6e 74 66 69 6c |ta/adifferentfil|
285
285
286 $ hg debugbundle --spec packed.hg
286 $ hg debugbundle --spec packed.hg
287 none-packed1;requirements%3Dgeneraldelta%2Crevlogv1
287 none-packed1;requirements%3Dgeneraldelta%2Crevlogv1
288
288
289 generaldelta requirement is not listed in stream clone bundles unless used
289 generaldelta requirement is not listed in stream clone bundles unless used
290
290
291 $ hg --config format.usegeneraldelta=false init testnongd
291 $ hg --config format.usegeneraldelta=false init testnongd
292 $ cd testnongd
292 $ cd testnongd
293 $ touch foo
293 $ touch foo
294 $ hg -q commit -A -m initial
294 $ hg -q commit -A -m initial
295 $ cd ..
295 $ cd ..
296 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
296 $ hg -R testnongd debugcreatestreamclonebundle packednongd.hg
297 writing 301 bytes for 3 files
297 writing 301 bytes for 3 files
298 bundle requirements: revlogv1
298 bundle requirements: revlogv1
299
299
300 $ f -B 64 --size --sha1 --hexdump packednongd.hg
300 $ f -B 64 --size --sha1 --hexdump packednongd.hg
301 packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f
301 packednongd.hg: size=383, sha1=1d9c230238edd5d38907100b729ba72b1831fe6f
302 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
302 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........|
303 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1|
303 0010: 00 00 00 00 01 2d 00 09 72 65 76 6c 6f 67 76 31 |.....-..revlogv1|
304 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..|
304 0020: 00 64 61 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 |.data/foo.i.64..|
305 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
305 0030: 01 00 01 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
306
306
307 $ hg debugbundle --spec packednongd.hg
307 $ hg debugbundle --spec packednongd.hg
308 none-packed1;requirements%3Drevlogv1
308 none-packed1;requirements%3Drevlogv1
309
309
310 Warning emitted when packed bundles contain secret changesets
310 Warning emitted when packed bundles contain secret changesets
311
311
312 $ hg init testsecret
312 $ hg init testsecret
313 $ cd testsecret
313 $ cd testsecret
314 $ touch foo
314 $ touch foo
315 $ hg -q commit -A -m initial
315 $ hg -q commit -A -m initial
316 $ hg phase --force --secret -r .
316 $ hg phase --force --secret -r .
317 $ cd ..
317 $ cd ..
318
318
319 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
319 $ hg -R testsecret debugcreatestreamclonebundle packedsecret.hg
320 (warning: stream clone bundle will contain secret revisions)
320 (warning: stream clone bundle will contain secret revisions)
321 writing 301 bytes for 3 files
321 writing 301 bytes for 3 files
322 bundle requirements: generaldelta, revlogv1
322 bundle requirements: generaldelta, revlogv1
323
323
324 Unpacking packed1 bundles with "hg unbundle" isn't allowed
324 Unpacking packed1 bundles with "hg unbundle" isn't allowed
325
325
326 $ hg init packed
326 $ hg init packed
327 $ hg -R packed unbundle packed.hg
327 $ hg -R packed unbundle packed.hg
328 abort: packed bundles cannot be applied with "hg unbundle"
328 abort: packed bundles cannot be applied with "hg unbundle"
329 (use "hg debugapplystreamclonebundle")
329 (use "hg debugapplystreamclonebundle")
330 [255]
330 [255]
331
331
332 packed1 can be consumed from debug command
332 packed1 can be consumed from debug command
333
333
334 (this also confirms that streamclone-ed changes are visible via
334 (this also confirms that streamclone-ed changes are visible via
335 @filecache properties to in-process procedures before closing
335 @filecache properties to in-process procedures before closing
336 transaction)
336 transaction)
337
337
338 $ cat > $TESTTMP/showtip.py <<EOF
338 $ cat > $TESTTMP/showtip.py <<EOF
339 > from __future__ import absolute_import
339 > from __future__ import absolute_import
340 >
340 >
341 > def showtip(ui, repo, hooktype, **kwargs):
341 > def showtip(ui, repo, hooktype, **kwargs):
342 > ui.warn(b'%s: %s\n' % (hooktype, repo[b'tip'].hex()[:12]))
342 > ui.warn(b'%s: %s\n' % (hooktype, repo[b'tip'].hex()[:12]))
343 >
343 >
344 > def reposetup(ui, repo):
344 > def reposetup(ui, repo):
345 > # this confirms (and ensures) that (empty) 00changelog.i
345 > # this confirms (and ensures) that (empty) 00changelog.i
346 > # before streamclone is already cached as repo.changelog
346 > # before streamclone is already cached as repo.changelog
347 > ui.setconfig(b'hooks', b'pretxnopen.showtip', showtip)
347 > ui.setconfig(b'hooks', b'pretxnopen.showtip', showtip)
348 >
348 >
349 > # this confirms that streamclone-ed changes are visible to
349 > # this confirms that streamclone-ed changes are visible to
350 > # in-process procedures before closing transaction
350 > # in-process procedures before closing transaction
351 > ui.setconfig(b'hooks', b'pretxnclose.showtip', showtip)
351 > ui.setconfig(b'hooks', b'pretxnclose.showtip', showtip)
352 >
352 >
353 > # this confirms that streamclone-ed changes are still visible
353 > # this confirms that streamclone-ed changes are still visible
354 > # after closing transaction
354 > # after closing transaction
355 > ui.setconfig(b'hooks', b'txnclose.showtip', showtip)
355 > ui.setconfig(b'hooks', b'txnclose.showtip', showtip)
356 > EOF
356 > EOF
357 $ cat >> $HGRCPATH <<EOF
357 $ cat >> $HGRCPATH <<EOF
358 > [extensions]
358 > [extensions]
359 > showtip = $TESTTMP/showtip.py
359 > showtip = $TESTTMP/showtip.py
360 > EOF
360 > EOF
361
361
362 $ hg -R packed debugapplystreamclonebundle packed.hg
362 $ hg -R packed debugapplystreamclonebundle packed.hg
363 6 files to transfer, 2.60 KB of data
363 6 files to transfer, 2.60 KB of data
364 pretxnopen: 000000000000
364 pretxnopen: 000000000000
365 pretxnclose: aa35859c02ea
365 pretxnclose: aa35859c02ea
366 transferred 2.60 KB in *.* seconds (* */sec) (glob)
366 transferred 2.60 KB in *.* seconds (* */sec) (glob)
367 txnclose: aa35859c02ea
367 txnclose: aa35859c02ea
368
368
369 (for safety, confirm visibility of streamclone-ed changes by another
369 (for safety, confirm visibility of streamclone-ed changes by another
370 process, too)
370 process, too)
371
371
372 $ hg -R packed tip -T "{node|short}\n"
372 $ hg -R packed tip -T "{node|short}\n"
373 aa35859c02ea
373 aa35859c02ea
374
374
375 $ cat >> $HGRCPATH <<EOF
375 $ cat >> $HGRCPATH <<EOF
376 > [extensions]
376 > [extensions]
377 > showtip = !
377 > showtip = !
378 > EOF
378 > EOF
379
379
380 Does not work on non-empty repo
380 Does not work on non-empty repo
381
381
382 $ hg -R packed debugapplystreamclonebundle packed.hg
382 $ hg -R packed debugapplystreamclonebundle packed.hg
383 abort: cannot apply stream clone bundle on non-empty repo
383 abort: cannot apply stream clone bundle on non-empty repo
384 [255]
384 [255]
385
385
386 #endif
386 #endif
387
387
388 Create partial clones
388 Create partial clones
389
389
390 $ rm -r empty
390 $ rm -r empty
391 $ hg init empty
391 $ hg init empty
392 $ hg clone -r 3 test partial
392 $ hg clone -r 3 test partial
393 adding changesets
393 adding changesets
394 adding manifests
394 adding manifests
395 adding file changes
395 adding file changes
396 added 4 changesets with 4 changes to 1 files
396 added 4 changesets with 4 changes to 1 files
397 new changesets f9ee2f85a263:eebf5a27f8ca
397 new changesets f9ee2f85a263:eebf5a27f8ca
398 updating to branch default
398 updating to branch default
399 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
399 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
400 $ hg clone partial partial2
400 $ hg clone partial partial2
401 updating to branch default
401 updating to branch default
402 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
402 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
403 $ cd partial
403 $ cd partial
404
404
405 #if repobundlerepo
405 #if repobundlerepo
406
406
407 Log -R full.hg in partial
407 Log -R full.hg in partial
408
408
409 $ hg -R bundle://../full.hg log -T phases
409 $ hg -R bundle://../full.hg log -T phases
410 changeset: 8:aa35859c02ea
410 changeset: 8:aa35859c02ea
411 tag: tip
411 tag: tip
412 phase: draft
412 phase: draft
413 parent: 3:eebf5a27f8ca
413 parent: 3:eebf5a27f8ca
414 user: test
414 user: test
415 date: Thu Jan 01 00:00:00 1970 +0000
415 date: Thu Jan 01 00:00:00 1970 +0000
416 summary: 0.3m
416 summary: 0.3m
417
417
418 changeset: 7:a6a34bfa0076
418 changeset: 7:a6a34bfa0076
419 phase: draft
419 phase: draft
420 user: test
420 user: test
421 date: Thu Jan 01 00:00:00 1970 +0000
421 date: Thu Jan 01 00:00:00 1970 +0000
422 summary: 1.3m
422 summary: 1.3m
423
423
424 changeset: 6:7373c1169842
424 changeset: 6:7373c1169842
425 phase: draft
425 phase: draft
426 user: test
426 user: test
427 date: Thu Jan 01 00:00:00 1970 +0000
427 date: Thu Jan 01 00:00:00 1970 +0000
428 summary: 1.3
428 summary: 1.3
429
429
430 changeset: 5:1bb50a9436a7
430 changeset: 5:1bb50a9436a7
431 phase: draft
431 phase: draft
432 user: test
432 user: test
433 date: Thu Jan 01 00:00:00 1970 +0000
433 date: Thu Jan 01 00:00:00 1970 +0000
434 summary: 1.2
434 summary: 1.2
435
435
436 changeset: 4:095197eb4973
436 changeset: 4:095197eb4973
437 phase: draft
437 phase: draft
438 parent: 0:f9ee2f85a263
438 parent: 0:f9ee2f85a263
439 user: test
439 user: test
440 date: Thu Jan 01 00:00:00 1970 +0000
440 date: Thu Jan 01 00:00:00 1970 +0000
441 summary: 1.1
441 summary: 1.1
442
442
443 changeset: 3:eebf5a27f8ca
443 changeset: 3:eebf5a27f8ca
444 phase: public
444 phase: public
445 user: test
445 user: test
446 date: Thu Jan 01 00:00:00 1970 +0000
446 date: Thu Jan 01 00:00:00 1970 +0000
447 summary: 0.3
447 summary: 0.3
448
448
449 changeset: 2:e38ba6f5b7e0
449 changeset: 2:e38ba6f5b7e0
450 phase: public
450 phase: public
451 user: test
451 user: test
452 date: Thu Jan 01 00:00:00 1970 +0000
452 date: Thu Jan 01 00:00:00 1970 +0000
453 summary: 0.2
453 summary: 0.2
454
454
455 changeset: 1:34c2bf6b0626
455 changeset: 1:34c2bf6b0626
456 phase: public
456 phase: public
457 user: test
457 user: test
458 date: Thu Jan 01 00:00:00 1970 +0000
458 date: Thu Jan 01 00:00:00 1970 +0000
459 summary: 0.1
459 summary: 0.1
460
460
461 changeset: 0:f9ee2f85a263
461 changeset: 0:f9ee2f85a263
462 phase: public
462 phase: public
463 user: test
463 user: test
464 date: Thu Jan 01 00:00:00 1970 +0000
464 date: Thu Jan 01 00:00:00 1970 +0000
465 summary: 0.0
465 summary: 0.0
466
466
467
467
468 Incoming full.hg in partial
468 Incoming full.hg in partial
469
469
470 $ hg incoming bundle://../full.hg
470 $ hg incoming bundle://../full.hg
471 comparing with bundle:../full.hg
471 comparing with bundle:../full.hg
472 searching for changes
472 searching for changes
473 changeset: 4:095197eb4973
473 changeset: 4:095197eb4973
474 parent: 0:f9ee2f85a263
474 parent: 0:f9ee2f85a263
475 user: test
475 user: test
476 date: Thu Jan 01 00:00:00 1970 +0000
476 date: Thu Jan 01 00:00:00 1970 +0000
477 summary: 1.1
477 summary: 1.1
478
478
479 changeset: 5:1bb50a9436a7
479 changeset: 5:1bb50a9436a7
480 user: test
480 user: test
481 date: Thu Jan 01 00:00:00 1970 +0000
481 date: Thu Jan 01 00:00:00 1970 +0000
482 summary: 1.2
482 summary: 1.2
483
483
484 changeset: 6:7373c1169842
484 changeset: 6:7373c1169842
485 user: test
485 user: test
486 date: Thu Jan 01 00:00:00 1970 +0000
486 date: Thu Jan 01 00:00:00 1970 +0000
487 summary: 1.3
487 summary: 1.3
488
488
489 changeset: 7:a6a34bfa0076
489 changeset: 7:a6a34bfa0076
490 user: test
490 user: test
491 date: Thu Jan 01 00:00:00 1970 +0000
491 date: Thu Jan 01 00:00:00 1970 +0000
492 summary: 1.3m
492 summary: 1.3m
493
493
494 changeset: 8:aa35859c02ea
494 changeset: 8:aa35859c02ea
495 tag: tip
495 tag: tip
496 parent: 3:eebf5a27f8ca
496 parent: 3:eebf5a27f8ca
497 user: test
497 user: test
498 date: Thu Jan 01 00:00:00 1970 +0000
498 date: Thu Jan 01 00:00:00 1970 +0000
499 summary: 0.3m
499 summary: 0.3m
500
500
501
501
502 Outgoing -R full.hg vs partial2 in partial
502 Outgoing -R full.hg vs partial2 in partial
503
503
504 $ hg -R bundle://../full.hg outgoing ../partial2
504 $ hg -R bundle://../full.hg outgoing ../partial2
505 comparing with ../partial2
505 comparing with ../partial2
506 searching for changes
506 searching for changes
507 changeset: 4:095197eb4973
507 changeset: 4:095197eb4973
508 parent: 0:f9ee2f85a263
508 parent: 0:f9ee2f85a263
509 user: test
509 user: test
510 date: Thu Jan 01 00:00:00 1970 +0000
510 date: Thu Jan 01 00:00:00 1970 +0000
511 summary: 1.1
511 summary: 1.1
512
512
513 changeset: 5:1bb50a9436a7
513 changeset: 5:1bb50a9436a7
514 user: test
514 user: test
515 date: Thu Jan 01 00:00:00 1970 +0000
515 date: Thu Jan 01 00:00:00 1970 +0000
516 summary: 1.2
516 summary: 1.2
517
517
518 changeset: 6:7373c1169842
518 changeset: 6:7373c1169842
519 user: test
519 user: test
520 date: Thu Jan 01 00:00:00 1970 +0000
520 date: Thu Jan 01 00:00:00 1970 +0000
521 summary: 1.3
521 summary: 1.3
522
522
523 changeset: 7:a6a34bfa0076
523 changeset: 7:a6a34bfa0076
524 user: test
524 user: test
525 date: Thu Jan 01 00:00:00 1970 +0000
525 date: Thu Jan 01 00:00:00 1970 +0000
526 summary: 1.3m
526 summary: 1.3m
527
527
528 changeset: 8:aa35859c02ea
528 changeset: 8:aa35859c02ea
529 tag: tip
529 tag: tip
530 parent: 3:eebf5a27f8ca
530 parent: 3:eebf5a27f8ca
531 user: test
531 user: test
532 date: Thu Jan 01 00:00:00 1970 +0000
532 date: Thu Jan 01 00:00:00 1970 +0000
533 summary: 0.3m
533 summary: 0.3m
534
534
535
535
536 Outgoing -R does-not-exist.hg vs partial2 in partial
536 Outgoing -R does-not-exist.hg vs partial2 in partial
537
537
538 $ hg -R bundle://../does-not-exist.hg outgoing ../partial2
538 $ hg -R bundle://../does-not-exist.hg outgoing ../partial2
539 abort: *../does-not-exist.hg* (glob)
539 abort: *../does-not-exist.hg* (glob)
540 [255]
540 [255]
541
541
542 #endif
542 #endif
543
543
544 $ cd ..
544 $ cd ..
545
545
546 hide outer repo
546 hide outer repo
547 $ hg init
547 $ hg init
548
548
549 Direct clone from bundle (all-history)
549 Direct clone from bundle (all-history)
550
550
551 #if repobundlerepo
551 #if repobundlerepo
552
552
553 $ hg clone full.hg full-clone
553 $ hg clone full.hg full-clone
554 requesting all changes
554 requesting all changes
555 adding changesets
555 adding changesets
556 adding manifests
556 adding manifests
557 adding file changes
557 adding file changes
558 added 9 changesets with 7 changes to 4 files (+1 heads)
558 added 9 changesets with 7 changes to 4 files (+1 heads)
559 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
559 new changesets f9ee2f85a263:aa35859c02ea (9 drafts)
560 updating to branch default
560 updating to branch default
561 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
561 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
562 $ hg -R full-clone heads
562 $ hg -R full-clone heads
563 changeset: 8:aa35859c02ea
563 changeset: 8:aa35859c02ea
564 tag: tip
564 tag: tip
565 parent: 3:eebf5a27f8ca
565 parent: 3:eebf5a27f8ca
566 user: test
566 user: test
567 date: Thu Jan 01 00:00:00 1970 +0000
567 date: Thu Jan 01 00:00:00 1970 +0000
568 summary: 0.3m
568 summary: 0.3m
569
569
570 changeset: 7:a6a34bfa0076
570 changeset: 7:a6a34bfa0076
571 user: test
571 user: test
572 date: Thu Jan 01 00:00:00 1970 +0000
572 date: Thu Jan 01 00:00:00 1970 +0000
573 summary: 1.3m
573 summary: 1.3m
574
574
575 $ rm -r full-clone
575 $ rm -r full-clone
576
576
577 When cloning from a non-copiable repository into '', do not
577 When cloning from a non-copiable repository into '', do not
578 recurse infinitely (issue2528)
578 recurse infinitely (issue2528)
579
579
580 $ hg clone full.hg ''
580 $ hg clone full.hg ''
581 abort: empty destination path is not valid
581 abort: empty destination path is not valid
582 [255]
582 [255]
583
583
584 test for https://bz.mercurial-scm.org/216
584 test for https://bz.mercurial-scm.org/216
585
585
586 Unbundle incremental bundles into fresh empty in one go
586 Unbundle incremental bundles into fresh empty in one go
587
587
588 $ rm -r empty
588 $ rm -r empty
589 $ hg init empty
589 $ hg init empty
590 $ hg -R test bundle --base null -r 0 ../0.hg
590 $ hg -R test bundle --base null -r 0 ../0.hg
591 1 changesets found
591 1 changesets found
592 $ hg -R test bundle --base 0 -r 1 ../1.hg
592 $ hg -R test bundle --base 0 -r 1 ../1.hg
593 1 changesets found
593 1 changesets found
594 $ hg -R empty unbundle -u ../0.hg ../1.hg
594 $ hg -R empty unbundle -u ../0.hg ../1.hg
595 adding changesets
595 adding changesets
596 adding manifests
596 adding manifests
597 adding file changes
597 adding file changes
598 added 1 changesets with 1 changes to 1 files
598 added 1 changesets with 1 changes to 1 files
599 new changesets f9ee2f85a263 (1 drafts)
599 new changesets f9ee2f85a263 (1 drafts)
600 adding changesets
600 adding changesets
601 adding manifests
601 adding manifests
602 adding file changes
602 adding file changes
603 added 1 changesets with 1 changes to 1 files
603 added 1 changesets with 1 changes to 1 files
604 new changesets 34c2bf6b0626 (1 drafts)
604 new changesets 34c2bf6b0626 (1 drafts)
605 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
605 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
606
606
607 View full contents of the bundle
607 View full contents of the bundle
608 $ hg -R test bundle --base null -r 3 ../partial.hg
608 $ hg -R test bundle --base null -r 3 ../partial.hg
609 4 changesets found
609 4 changesets found
610 $ cd test
610 $ cd test
611 $ hg -R ../../partial.hg log -r "bundle()"
611 $ hg -R ../../partial.hg log -r "bundle()"
612 changeset: 0:f9ee2f85a263
612 changeset: 0:f9ee2f85a263
613 user: test
613 user: test
614 date: Thu Jan 01 00:00:00 1970 +0000
614 date: Thu Jan 01 00:00:00 1970 +0000
615 summary: 0.0
615 summary: 0.0
616
616
617 changeset: 1:34c2bf6b0626
617 changeset: 1:34c2bf6b0626
618 user: test
618 user: test
619 date: Thu Jan 01 00:00:00 1970 +0000
619 date: Thu Jan 01 00:00:00 1970 +0000
620 summary: 0.1
620 summary: 0.1
621
621
622 changeset: 2:e38ba6f5b7e0
622 changeset: 2:e38ba6f5b7e0
623 user: test
623 user: test
624 date: Thu Jan 01 00:00:00 1970 +0000
624 date: Thu Jan 01 00:00:00 1970 +0000
625 summary: 0.2
625 summary: 0.2
626
626
627 changeset: 3:eebf5a27f8ca
627 changeset: 3:eebf5a27f8ca
628 user: test
628 user: test
629 date: Thu Jan 01 00:00:00 1970 +0000
629 date: Thu Jan 01 00:00:00 1970 +0000
630 summary: 0.3
630 summary: 0.3
631
631
632 $ cd ..
632 $ cd ..
633
633
634 #endif
634 #endif
635
635
636 test for 540d1059c802
636 test for 540d1059c802
637
637
638 $ hg init orig
638 $ hg init orig
639 $ cd orig
639 $ cd orig
640 $ echo foo > foo
640 $ echo foo > foo
641 $ hg add foo
641 $ hg add foo
642 $ hg ci -m 'add foo'
642 $ hg ci -m 'add foo'
643
643
644 $ hg clone . ../copy
644 $ hg clone . ../copy
645 updating to branch default
645 updating to branch default
646 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
646 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
647 $ hg tag foo
647 $ hg tag foo
648
648
649 $ cd ../copy
649 $ cd ../copy
650 $ echo >> foo
650 $ echo >> foo
651 $ hg ci -m 'change foo'
651 $ hg ci -m 'change foo'
652 $ hg bundle ../bundle.hg ../orig
652 $ hg bundle ../bundle.hg ../orig
653 searching for changes
653 searching for changes
654 1 changesets found
654 1 changesets found
655
655
656 $ cd ..
656 $ cd ..
657
657
658 #if repobundlerepo
658 #if repobundlerepo
659 $ cd orig
659 $ cd orig
660 $ hg incoming ../bundle.hg
660 $ hg incoming ../bundle.hg
661 comparing with ../bundle.hg
661 comparing with ../bundle.hg
662 searching for changes
662 searching for changes
663 changeset: 2:ed1b79f46b9a
663 changeset: 2:ed1b79f46b9a
664 tag: tip
664 tag: tip
665 parent: 0:bbd179dfa0a7
665 parent: 0:bbd179dfa0a7
666 user: test
666 user: test
667 date: Thu Jan 01 00:00:00 1970 +0000
667 date: Thu Jan 01 00:00:00 1970 +0000
668 summary: change foo
668 summary: change foo
669
669
670 $ cd ..
670 $ cd ..
671
671
672 test bundle with # in the filename (issue2154):
672 test bundle with # in the filename (issue2154):
673
673
674 $ cp bundle.hg 'test#bundle.hg'
674 $ cp bundle.hg 'test#bundle.hg'
675 $ cd orig
675 $ cd orig
676 $ hg incoming '../test#bundle.hg'
676 $ hg incoming '../test#bundle.hg'
677 comparing with ../test
677 comparing with ../test
678 abort: unknown revision 'bundle.hg'!
678 abort: unknown revision 'bundle.hg'!
679 [255]
679 [255]
680
680
681 note that percent encoding is not handled:
681 note that percent encoding is not handled:
682
682
683 $ hg incoming ../test%23bundle.hg
683 $ hg incoming ../test%23bundle.hg
684 abort: repository ../test%23bundle.hg not found!
684 abort: repository ../test%23bundle.hg not found!
685 [255]
685 [255]
686 $ cd ..
686 $ cd ..
687
687
688 #endif
688 #endif
689
689
690 test to bundle revisions on the newly created branch (issue3828):
690 test to bundle revisions on the newly created branch (issue3828):
691
691
692 $ hg -q clone -U test test-clone
692 $ hg -q clone -U test test-clone
693 $ cd test
693 $ cd test
694
694
695 $ hg -q branch foo
695 $ hg -q branch foo
696 $ hg commit -m "create foo branch"
696 $ hg commit -m "create foo branch"
697 $ hg -q outgoing ../test-clone
697 $ hg -q outgoing ../test-clone
698 9:b4f5acb1ee27
698 9:b4f5acb1ee27
699 $ hg -q bundle --branch foo foo.hg ../test-clone
699 $ hg -q bundle --branch foo foo.hg ../test-clone
700 #if repobundlerepo
700 #if repobundlerepo
701 $ hg -R foo.hg -q log -r "bundle()"
701 $ hg -R foo.hg -q log -r "bundle()"
702 9:b4f5acb1ee27
702 9:b4f5acb1ee27
703 #endif
703 #endif
704
704
705 $ cd ..
705 $ cd ..
706
706
707 test for https://bz.mercurial-scm.org/1144
707 test for https://bz.mercurial-scm.org/1144
708
708
709 test that verify bundle does not traceback
709 test that verify bundle does not traceback
710
710
711 partial history bundle, fails w/ unknown parent
711 partial history bundle, fails w/ unknown parent
712
712
713 $ hg -R bundle.hg verify
713 $ hg -R bundle.hg verify
714 abort: 00changelog.i@bbd179dfa0a7: unknown parent!
714 abort: 00changelog.i@bbd179dfa0a7: unknown parent!
715 [255]
715 [255]
716
716
717 full history bundle, refuses to verify non-local repo
717 full history bundle, refuses to verify non-local repo
718
718
719 #if repobundlerepo
719 #if repobundlerepo
720 $ hg -R all.hg verify
720 $ hg -R all.hg verify
721 abort: cannot verify bundle or remote repos
721 abort: cannot verify bundle or remote repos
722 [255]
722 [255]
723 #endif
723 #endif
724
724
725 but, regular verify must continue to work
725 but, regular verify must continue to work
726
726
727 $ hg -R orig verify
727 $ hg -R orig verify
728 checking changesets
728 checking changesets
729 checking manifests
729 checking manifests
730 crosschecking files in changesets and manifests
730 crosschecking files in changesets and manifests
731 checking files
731 checking files
732 checked 2 changesets with 2 changes to 2 files
732 checked 2 changesets with 2 changes to 2 files
733
733
734 #if repobundlerepo
734 #if repobundlerepo
735 diff against bundle
735 diff against bundle
736
736
737 $ hg init b
737 $ hg init b
738 $ cd b
738 $ cd b
739 $ hg -R ../all.hg diff -r tip
739 $ hg -R ../all.hg diff -r tip
740 diff -r aa35859c02ea anotherfile
740 diff -r aa35859c02ea anotherfile
741 --- a/anotherfile Thu Jan 01 00:00:00 1970 +0000
741 --- a/anotherfile Thu Jan 01 00:00:00 1970 +0000
742 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
742 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
743 @@ -1,4 +0,0 @@
743 @@ -1,4 +0,0 @@
744 -0
744 -0
745 -1
745 -1
746 -2
746 -2
747 -3
747 -3
748 $ cd ..
748 $ cd ..
749 #endif
749 #endif
750
750
751 bundle single branch
751 bundle single branch
752
752
753 $ hg init branchy
753 $ hg init branchy
754 $ cd branchy
754 $ cd branchy
755 $ echo a >a
755 $ echo a >a
756 $ echo x >x
756 $ echo x >x
757 $ hg ci -Ama
757 $ hg ci -Ama
758 adding a
758 adding a
759 adding x
759 adding x
760 $ echo c >c
760 $ echo c >c
761 $ echo xx >x
761 $ echo xx >x
762 $ hg ci -Amc
762 $ hg ci -Amc
763 adding c
763 adding c
764 $ echo c1 >c1
764 $ echo c1 >c1
765 $ hg ci -Amc1
765 $ hg ci -Amc1
766 adding c1
766 adding c1
767 $ hg up 0
767 $ hg up 0
768 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
768 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
769 $ echo b >b
769 $ echo b >b
770 $ hg ci -Amb
770 $ hg ci -Amb
771 adding b
771 adding b
772 created new head
772 created new head
773 $ echo b1 >b1
773 $ echo b1 >b1
774 $ echo xx >x
774 $ echo xx >x
775 $ hg ci -Amb1
775 $ hg ci -Amb1
776 adding b1
776 adding b1
777 $ hg clone -q -r2 . part
777 $ hg clone -q -r2 . part
778
778
779 == bundling via incoming
779 == bundling via incoming
780
780
781 $ hg in -R part --bundle incoming.hg --template "{node}\n" .
781 $ hg in -R part --bundle incoming.hg --template "{node}\n" .
782 comparing with .
782 comparing with .
783 searching for changes
783 searching for changes
784 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
784 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
785 057f4db07f61970e1c11e83be79e9d08adc4dc31
785 057f4db07f61970e1c11e83be79e9d08adc4dc31
786
786
787 == bundling
787 == bundling
788
788
789 $ hg bundle bundle.hg part --debug --config progress.debug=true
789 $ hg bundle bundle.hg part --debug --config progress.debug=true
790 query 1; heads
790 query 1; heads
791 searching for changes
791 searching for changes
792 all remote heads known locally
792 all remote heads known locally
793 2 changesets found
793 2 changesets found
794 list of changesets:
794 list of changesets:
795 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
795 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a
796 057f4db07f61970e1c11e83be79e9d08adc4dc31
796 057f4db07f61970e1c11e83be79e9d08adc4dc31
797 bundle2-output-bundle: "HG20", (1 params) 2 parts total
797 bundle2-output-bundle: "HG20", (1 params) 2 parts total
798 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
798 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
799 changesets: 1/2 chunks (50.00%)
799 changesets: 1/2 chunks (50.00%)
800 changesets: 2/2 chunks (100.00%)
800 changesets: 2/2 chunks (100.00%)
801 manifests: 1/2 chunks (50.00%)
801 manifests: 1/2 chunks (50.00%)
802 manifests: 2/2 chunks (100.00%)
802 manifests: 2/2 chunks (100.00%)
803 files: b 1/3 files (33.33%)
803 files: b 1/3 files (33.33%)
804 files: b1 2/3 files (66.67%)
804 files: b1 2/3 files (66.67%)
805 files: x 3/3 files (100.00%)
805 files: x 3/3 files (100.00%)
806 bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload
806 bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload
807
807
808 #if repobundlerepo
808 #if repobundlerepo
809 == Test for issue3441
809 == Test for issue3441
810
810
811 $ hg clone -q -r0 . part2
811 $ hg clone -q -r0 . part2
812 $ hg -q -R part2 pull bundle.hg
812 $ hg -q -R part2 pull bundle.hg
813 $ hg -R part2 verify
813 $ hg -R part2 verify
814 checking changesets
814 checking changesets
815 checking manifests
815 checking manifests
816 crosschecking files in changesets and manifests
816 crosschecking files in changesets and manifests
817 checking files
817 checking files
818 checked 3 changesets with 5 changes to 4 files
818 checked 3 changesets with 5 changes to 4 files
819 #endif
819 #endif
820
820
821 == Test bundling no commits
821 == Test bundling no commits
822
822
823 $ hg bundle -r 'public()' no-output.hg
823 $ hg bundle -r 'public()' no-output.hg
824 abort: no commits to bundle
824 abort: no commits to bundle
825 [255]
825 [255]
826
826
827 $ cd ..
827 $ cd ..
828
828
829 When user merges to the revision existing only in the bundle,
829 When user merges to the revision existing only in the bundle,
830 it should show warning that second parent of the working
830 it should show warning that second parent of the working
831 directory does not exist
831 directory does not exist
832
832
833 $ hg init update2bundled
833 $ hg init update2bundled
834 $ cd update2bundled
834 $ cd update2bundled
835 $ cat <<EOF >> .hg/hgrc
835 $ cat <<EOF >> .hg/hgrc
836 > [extensions]
836 > [extensions]
837 > strip =
837 > strip =
838 > EOF
838 > EOF
839 $ echo "aaa" >> a
839 $ echo "aaa" >> a
840 $ hg commit -A -m 0
840 $ hg commit -A -m 0
841 adding a
841 adding a
842 $ echo "bbb" >> b
842 $ echo "bbb" >> b
843 $ hg commit -A -m 1
843 $ hg commit -A -m 1
844 adding b
844 adding b
845 $ echo "ccc" >> c
845 $ echo "ccc" >> c
846 $ hg commit -A -m 2
846 $ hg commit -A -m 2
847 adding c
847 adding c
848 $ hg update -r 1
848 $ hg update -r 1
849 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
849 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
850 $ echo "ddd" >> d
850 $ echo "ddd" >> d
851 $ hg commit -A -m 3
851 $ hg commit -A -m 3
852 adding d
852 adding d
853 created new head
853 created new head
854 $ hg update -r 2
854 $ hg update -r 2
855 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
855 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
856 $ hg log -G
856 $ hg log -G
857 o changeset: 3:8bd3e1f196af
857 o changeset: 3:8bd3e1f196af
858 | tag: tip
858 | tag: tip
859 | parent: 1:a01eca7af26d
859 | parent: 1:a01eca7af26d
860 | user: test
860 | user: test
861 | date: Thu Jan 01 00:00:00 1970 +0000
861 | date: Thu Jan 01 00:00:00 1970 +0000
862 | summary: 3
862 | summary: 3
863 |
863 |
864 | @ changeset: 2:4652c276ac4f
864 | @ changeset: 2:4652c276ac4f
865 |/ user: test
865 |/ user: test
866 | date: Thu Jan 01 00:00:00 1970 +0000
866 | date: Thu Jan 01 00:00:00 1970 +0000
867 | summary: 2
867 | summary: 2
868 |
868 |
869 o changeset: 1:a01eca7af26d
869 o changeset: 1:a01eca7af26d
870 | user: test
870 | user: test
871 | date: Thu Jan 01 00:00:00 1970 +0000
871 | date: Thu Jan 01 00:00:00 1970 +0000
872 | summary: 1
872 | summary: 1
873 |
873 |
874 o changeset: 0:4fe08cd4693e
874 o changeset: 0:4fe08cd4693e
875 user: test
875 user: test
876 date: Thu Jan 01 00:00:00 1970 +0000
876 date: Thu Jan 01 00:00:00 1970 +0000
877 summary: 0
877 summary: 0
878
878
879
879
880 #if repobundlerepo
880 #if repobundlerepo
881 $ hg bundle --base 1 -r 3 ../update2bundled.hg
881 $ hg bundle --base 1 -r 3 ../update2bundled.hg
882 1 changesets found
882 1 changesets found
883 $ hg strip -r 3
883 $ hg strip -r 3
884 saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg
884 saved backup bundle to $TESTTMP/update2bundled/.hg/strip-backup/8bd3e1f196af-017e56d8-backup.hg
885 $ hg merge -R ../update2bundled.hg -r 3
885 $ hg merge -R ../update2bundled.hg -r 3
886 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
886 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
887 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
887 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
888 (branch merge, don't forget to commit)
888 (branch merge, don't forget to commit)
889
889
890 When user updates to the revision existing only in the bundle,
890 When user updates to the revision existing only in the bundle,
891 it should show warning
891 it should show warning
892
892
893 $ hg update -R ../update2bundled.hg --clean -r 3
893 $ hg update -R ../update2bundled.hg --clean -r 3
894 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
894 setting parent to node 8bd3e1f196af289b2b121be08031e76d7ae92098 that only exists in the bundle
895 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
895 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
896
896
897 When user updates to the revision existing in the local repository
897 When user updates to the revision existing in the local repository
898 the warning shouldn't be emitted
898 the warning shouldn't be emitted
899
899
900 $ hg update -R ../update2bundled.hg -r 0
900 $ hg update -R ../update2bundled.hg -r 0
901 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
901 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
902 #endif
902 #endif
903
903
904 Test the option that create slim bundle
904 Test the option that create slim bundle
905
905
906 $ hg bundle -a --config devel.bundle.delta=p1 ./slim.hg
906 $ hg bundle -a --config devel.bundle.delta=p1 ./slim.hg
907 3 changesets found
907 3 changesets found
908
909 Test the option that create and no-delta's bundle
910 $ hg bundle -a --config devel.bundle.delta=full ./full.hg
911 3 changesets found
General Comments 0
You need to be logged in to leave comments. Login now