##// END OF EJS Templates
exchange: perform stream clone with clone bundle with --uncompressed...
Gregory Szorc -
r34360:ff406f3e default
parent child Browse files
Show More
@@ -1,2060 +1,2076 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 hex,
16 hex,
17 nullid,
17 nullid,
18 )
18 )
19 from . import (
19 from . import (
20 bookmarks as bookmod,
20 bookmarks as bookmod,
21 bundle2,
21 bundle2,
22 changegroup,
22 changegroup,
23 discovery,
23 discovery,
24 error,
24 error,
25 lock as lockmod,
25 lock as lockmod,
26 obsolete,
26 obsolete,
27 phases,
27 phases,
28 pushkey,
28 pushkey,
29 pycompat,
29 pycompat,
30 scmutil,
30 scmutil,
31 sslutil,
31 sslutil,
32 streamclone,
32 streamclone,
33 url as urlmod,
33 url as urlmod,
34 util,
34 util,
35 )
35 )
36
36
37 urlerr = util.urlerr
37 urlerr = util.urlerr
38 urlreq = util.urlreq
38 urlreq = util.urlreq
39
39
40 # Maps bundle version human names to changegroup versions.
40 # Maps bundle version human names to changegroup versions.
41 _bundlespeccgversions = {'v1': '01',
41 _bundlespeccgversions = {'v1': '01',
42 'v2': '02',
42 'v2': '02',
43 'packed1': 's1',
43 'packed1': 's1',
44 'bundle2': '02', #legacy
44 'bundle2': '02', #legacy
45 }
45 }
46
46
47 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
48 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
48 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
49
49
50 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 def parsebundlespec(repo, spec, strict=True, externalnames=False):
51 """Parse a bundle string specification into parts.
51 """Parse a bundle string specification into parts.
52
52
53 Bundle specifications denote a well-defined bundle/exchange format.
53 Bundle specifications denote a well-defined bundle/exchange format.
54 The content of a given specification should not change over time in
54 The content of a given specification should not change over time in
55 order to ensure that bundles produced by a newer version of Mercurial are
55 order to ensure that bundles produced by a newer version of Mercurial are
56 readable from an older version.
56 readable from an older version.
57
57
58 The string currently has the form:
58 The string currently has the form:
59
59
60 <compression>-<type>[;<parameter0>[;<parameter1>]]
60 <compression>-<type>[;<parameter0>[;<parameter1>]]
61
61
62 Where <compression> is one of the supported compression formats
62 Where <compression> is one of the supported compression formats
63 and <type> is (currently) a version string. A ";" can follow the type and
63 and <type> is (currently) a version string. A ";" can follow the type and
64 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 all text afterwards is interpreted as URI encoded, ";" delimited key=value
65 pairs.
65 pairs.
66
66
67 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 If ``strict`` is True (the default) <compression> is required. Otherwise,
68 it is optional.
68 it is optional.
69
69
70 If ``externalnames`` is False (the default), the human-centric names will
70 If ``externalnames`` is False (the default), the human-centric names will
71 be converted to their internal representation.
71 be converted to their internal representation.
72
72
73 Returns a 3-tuple of (compression, version, parameters). Compression will
73 Returns a 3-tuple of (compression, version, parameters). Compression will
74 be ``None`` if not in strict mode and a compression isn't defined.
74 be ``None`` if not in strict mode and a compression isn't defined.
75
75
76 An ``InvalidBundleSpecification`` is raised when the specification is
76 An ``InvalidBundleSpecification`` is raised when the specification is
77 not syntactically well formed.
77 not syntactically well formed.
78
78
79 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 An ``UnsupportedBundleSpecification`` is raised when the compression or
80 bundle type/version is not recognized.
80 bundle type/version is not recognized.
81
81
82 Note: this function will likely eventually return a more complex data
82 Note: this function will likely eventually return a more complex data
83 structure, including bundle2 part information.
83 structure, including bundle2 part information.
84 """
84 """
85 def parseparams(s):
85 def parseparams(s):
86 if ';' not in s:
86 if ';' not in s:
87 return s, {}
87 return s, {}
88
88
89 params = {}
89 params = {}
90 version, paramstr = s.split(';', 1)
90 version, paramstr = s.split(';', 1)
91
91
92 for p in paramstr.split(';'):
92 for p in paramstr.split(';'):
93 if '=' not in p:
93 if '=' not in p:
94 raise error.InvalidBundleSpecification(
94 raise error.InvalidBundleSpecification(
95 _('invalid bundle specification: '
95 _('invalid bundle specification: '
96 'missing "=" in parameter: %s') % p)
96 'missing "=" in parameter: %s') % p)
97
97
98 key, value = p.split('=', 1)
98 key, value = p.split('=', 1)
99 key = urlreq.unquote(key)
99 key = urlreq.unquote(key)
100 value = urlreq.unquote(value)
100 value = urlreq.unquote(value)
101 params[key] = value
101 params[key] = value
102
102
103 return version, params
103 return version, params
104
104
105
105
106 if strict and '-' not in spec:
106 if strict and '-' not in spec:
107 raise error.InvalidBundleSpecification(
107 raise error.InvalidBundleSpecification(
108 _('invalid bundle specification; '
108 _('invalid bundle specification; '
109 'must be prefixed with compression: %s') % spec)
109 'must be prefixed with compression: %s') % spec)
110
110
111 if '-' in spec:
111 if '-' in spec:
112 compression, version = spec.split('-', 1)
112 compression, version = spec.split('-', 1)
113
113
114 if compression not in util.compengines.supportedbundlenames:
114 if compression not in util.compengines.supportedbundlenames:
115 raise error.UnsupportedBundleSpecification(
115 raise error.UnsupportedBundleSpecification(
116 _('%s compression is not supported') % compression)
116 _('%s compression is not supported') % compression)
117
117
118 version, params = parseparams(version)
118 version, params = parseparams(version)
119
119
120 if version not in _bundlespeccgversions:
120 if version not in _bundlespeccgversions:
121 raise error.UnsupportedBundleSpecification(
121 raise error.UnsupportedBundleSpecification(
122 _('%s is not a recognized bundle version') % version)
122 _('%s is not a recognized bundle version') % version)
123 else:
123 else:
124 # Value could be just the compression or just the version, in which
124 # Value could be just the compression or just the version, in which
125 # case some defaults are assumed (but only when not in strict mode).
125 # case some defaults are assumed (but only when not in strict mode).
126 assert not strict
126 assert not strict
127
127
128 spec, params = parseparams(spec)
128 spec, params = parseparams(spec)
129
129
130 if spec in util.compengines.supportedbundlenames:
130 if spec in util.compengines.supportedbundlenames:
131 compression = spec
131 compression = spec
132 version = 'v1'
132 version = 'v1'
133 # Generaldelta repos require v2.
133 # Generaldelta repos require v2.
134 if 'generaldelta' in repo.requirements:
134 if 'generaldelta' in repo.requirements:
135 version = 'v2'
135 version = 'v2'
136 # Modern compression engines require v2.
136 # Modern compression engines require v2.
137 if compression not in _bundlespecv1compengines:
137 if compression not in _bundlespecv1compengines:
138 version = 'v2'
138 version = 'v2'
139 elif spec in _bundlespeccgversions:
139 elif spec in _bundlespeccgversions:
140 if spec == 'packed1':
140 if spec == 'packed1':
141 compression = 'none'
141 compression = 'none'
142 else:
142 else:
143 compression = 'bzip2'
143 compression = 'bzip2'
144 version = spec
144 version = spec
145 else:
145 else:
146 raise error.UnsupportedBundleSpecification(
146 raise error.UnsupportedBundleSpecification(
147 _('%s is not a recognized bundle specification') % spec)
147 _('%s is not a recognized bundle specification') % spec)
148
148
149 # Bundle version 1 only supports a known set of compression engines.
149 # Bundle version 1 only supports a known set of compression engines.
150 if version == 'v1' and compression not in _bundlespecv1compengines:
150 if version == 'v1' and compression not in _bundlespecv1compengines:
151 raise error.UnsupportedBundleSpecification(
151 raise error.UnsupportedBundleSpecification(
152 _('compression engine %s is not supported on v1 bundles') %
152 _('compression engine %s is not supported on v1 bundles') %
153 compression)
153 compression)
154
154
155 # The specification for packed1 can optionally declare the data formats
155 # The specification for packed1 can optionally declare the data formats
156 # required to apply it. If we see this metadata, compare against what the
156 # required to apply it. If we see this metadata, compare against what the
157 # repo supports and error if the bundle isn't compatible.
157 # repo supports and error if the bundle isn't compatible.
158 if version == 'packed1' and 'requirements' in params:
158 if version == 'packed1' and 'requirements' in params:
159 requirements = set(params['requirements'].split(','))
159 requirements = set(params['requirements'].split(','))
160 missingreqs = requirements - repo.supportedformats
160 missingreqs = requirements - repo.supportedformats
161 if missingreqs:
161 if missingreqs:
162 raise error.UnsupportedBundleSpecification(
162 raise error.UnsupportedBundleSpecification(
163 _('missing support for repository features: %s') %
163 _('missing support for repository features: %s') %
164 ', '.join(sorted(missingreqs)))
164 ', '.join(sorted(missingreqs)))
165
165
166 if not externalnames:
166 if not externalnames:
167 engine = util.compengines.forbundlename(compression)
167 engine = util.compengines.forbundlename(compression)
168 compression = engine.bundletype()[1]
168 compression = engine.bundletype()[1]
169 version = _bundlespeccgversions[version]
169 version = _bundlespeccgversions[version]
170 return compression, version, params
170 return compression, version, params
171
171
172 def readbundle(ui, fh, fname, vfs=None):
172 def readbundle(ui, fh, fname, vfs=None):
173 header = changegroup.readexactly(fh, 4)
173 header = changegroup.readexactly(fh, 4)
174
174
175 alg = None
175 alg = None
176 if not fname:
176 if not fname:
177 fname = "stream"
177 fname = "stream"
178 if not header.startswith('HG') and header.startswith('\0'):
178 if not header.startswith('HG') and header.startswith('\0'):
179 fh = changegroup.headerlessfixup(fh, header)
179 fh = changegroup.headerlessfixup(fh, header)
180 header = "HG10"
180 header = "HG10"
181 alg = 'UN'
181 alg = 'UN'
182 elif vfs:
182 elif vfs:
183 fname = vfs.join(fname)
183 fname = vfs.join(fname)
184
184
185 magic, version = header[0:2], header[2:4]
185 magic, version = header[0:2], header[2:4]
186
186
187 if magic != 'HG':
187 if magic != 'HG':
188 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
189 if version == '10':
189 if version == '10':
190 if alg is None:
190 if alg is None:
191 alg = changegroup.readexactly(fh, 2)
191 alg = changegroup.readexactly(fh, 2)
192 return changegroup.cg1unpacker(fh, alg)
192 return changegroup.cg1unpacker(fh, alg)
193 elif version.startswith('2'):
193 elif version.startswith('2'):
194 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
195 elif version == 'S1':
195 elif version == 'S1':
196 return streamclone.streamcloneapplier(fh)
196 return streamclone.streamcloneapplier(fh)
197 else:
197 else:
198 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
199
199
200 def getbundlespec(ui, fh):
200 def getbundlespec(ui, fh):
201 """Infer the bundlespec from a bundle file handle.
201 """Infer the bundlespec from a bundle file handle.
202
202
203 The input file handle is seeked and the original seek position is not
203 The input file handle is seeked and the original seek position is not
204 restored.
204 restored.
205 """
205 """
206 def speccompression(alg):
206 def speccompression(alg):
207 try:
207 try:
208 return util.compengines.forbundletype(alg).bundletype()[0]
208 return util.compengines.forbundletype(alg).bundletype()[0]
209 except KeyError:
209 except KeyError:
210 return None
210 return None
211
211
212 b = readbundle(ui, fh, None)
212 b = readbundle(ui, fh, None)
213 if isinstance(b, changegroup.cg1unpacker):
213 if isinstance(b, changegroup.cg1unpacker):
214 alg = b._type
214 alg = b._type
215 if alg == '_truncatedBZ':
215 if alg == '_truncatedBZ':
216 alg = 'BZ'
216 alg = 'BZ'
217 comp = speccompression(alg)
217 comp = speccompression(alg)
218 if not comp:
218 if not comp:
219 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 raise error.Abort(_('unknown compression algorithm: %s') % alg)
220 return '%s-v1' % comp
220 return '%s-v1' % comp
221 elif isinstance(b, bundle2.unbundle20):
221 elif isinstance(b, bundle2.unbundle20):
222 if 'Compression' in b.params:
222 if 'Compression' in b.params:
223 comp = speccompression(b.params['Compression'])
223 comp = speccompression(b.params['Compression'])
224 if not comp:
224 if not comp:
225 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 raise error.Abort(_('unknown compression algorithm: %s') % comp)
226 else:
226 else:
227 comp = 'none'
227 comp = 'none'
228
228
229 version = None
229 version = None
230 for part in b.iterparts():
230 for part in b.iterparts():
231 if part.type == 'changegroup':
231 if part.type == 'changegroup':
232 version = part.params['version']
232 version = part.params['version']
233 if version in ('01', '02'):
233 if version in ('01', '02'):
234 version = 'v2'
234 version = 'v2'
235 else:
235 else:
236 raise error.Abort(_('changegroup version %s does not have '
236 raise error.Abort(_('changegroup version %s does not have '
237 'a known bundlespec') % version,
237 'a known bundlespec') % version,
238 hint=_('try upgrading your Mercurial '
238 hint=_('try upgrading your Mercurial '
239 'client'))
239 'client'))
240
240
241 if not version:
241 if not version:
242 raise error.Abort(_('could not identify changegroup version in '
242 raise error.Abort(_('could not identify changegroup version in '
243 'bundle'))
243 'bundle'))
244
244
245 return '%s-%s' % (comp, version)
245 return '%s-%s' % (comp, version)
246 elif isinstance(b, streamclone.streamcloneapplier):
246 elif isinstance(b, streamclone.streamcloneapplier):
247 requirements = streamclone.readbundle1header(fh)[2]
247 requirements = streamclone.readbundle1header(fh)[2]
248 params = 'requirements=%s' % ','.join(sorted(requirements))
248 params = 'requirements=%s' % ','.join(sorted(requirements))
249 return 'none-packed1;%s' % urlreq.quote(params)
249 return 'none-packed1;%s' % urlreq.quote(params)
250 else:
250 else:
251 raise error.Abort(_('unknown bundle type: %s') % b)
251 raise error.Abort(_('unknown bundle type: %s') % b)
252
252
253 def _computeoutgoing(repo, heads, common):
253 def _computeoutgoing(repo, heads, common):
254 """Computes which revs are outgoing given a set of common
254 """Computes which revs are outgoing given a set of common
255 and a set of heads.
255 and a set of heads.
256
256
257 This is a separate function so extensions can have access to
257 This is a separate function so extensions can have access to
258 the logic.
258 the logic.
259
259
260 Returns a discovery.outgoing object.
260 Returns a discovery.outgoing object.
261 """
261 """
262 cl = repo.changelog
262 cl = repo.changelog
263 if common:
263 if common:
264 hasnode = cl.hasnode
264 hasnode = cl.hasnode
265 common = [n for n in common if hasnode(n)]
265 common = [n for n in common if hasnode(n)]
266 else:
266 else:
267 common = [nullid]
267 common = [nullid]
268 if not heads:
268 if not heads:
269 heads = cl.heads()
269 heads = cl.heads()
270 return discovery.outgoing(repo, common, heads)
270 return discovery.outgoing(repo, common, heads)
271
271
272 def _forcebundle1(op):
272 def _forcebundle1(op):
273 """return true if a pull/push must use bundle1
273 """return true if a pull/push must use bundle1
274
274
275 This function is used to allow testing of the older bundle version"""
275 This function is used to allow testing of the older bundle version"""
276 ui = op.repo.ui
276 ui = op.repo.ui
277 forcebundle1 = False
277 forcebundle1 = False
278 # The goal is this config is to allow developer to choose the bundle
278 # The goal is this config is to allow developer to choose the bundle
279 # version used during exchanged. This is especially handy during test.
279 # version used during exchanged. This is especially handy during test.
280 # Value is a list of bundle version to be picked from, highest version
280 # Value is a list of bundle version to be picked from, highest version
281 # should be used.
281 # should be used.
282 #
282 #
283 # developer config: devel.legacy.exchange
283 # developer config: devel.legacy.exchange
284 exchange = ui.configlist('devel', 'legacy.exchange')
284 exchange = ui.configlist('devel', 'legacy.exchange')
285 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
285 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
286 return forcebundle1 or not op.remote.capable('bundle2')
286 return forcebundle1 or not op.remote.capable('bundle2')
287
287
288 class pushoperation(object):
288 class pushoperation(object):
289 """A object that represent a single push operation
289 """A object that represent a single push operation
290
290
291 Its purpose is to carry push related state and very common operations.
291 Its purpose is to carry push related state and very common operations.
292
292
293 A new pushoperation should be created at the beginning of each push and
293 A new pushoperation should be created at the beginning of each push and
294 discarded afterward.
294 discarded afterward.
295 """
295 """
296
296
297 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
297 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
298 bookmarks=(), pushvars=None):
298 bookmarks=(), pushvars=None):
299 # repo we push from
299 # repo we push from
300 self.repo = repo
300 self.repo = repo
301 self.ui = repo.ui
301 self.ui = repo.ui
302 # repo we push to
302 # repo we push to
303 self.remote = remote
303 self.remote = remote
304 # force option provided
304 # force option provided
305 self.force = force
305 self.force = force
306 # revs to be pushed (None is "all")
306 # revs to be pushed (None is "all")
307 self.revs = revs
307 self.revs = revs
308 # bookmark explicitly pushed
308 # bookmark explicitly pushed
309 self.bookmarks = bookmarks
309 self.bookmarks = bookmarks
310 # allow push of new branch
310 # allow push of new branch
311 self.newbranch = newbranch
311 self.newbranch = newbranch
312 # step already performed
312 # step already performed
313 # (used to check what steps have been already performed through bundle2)
313 # (used to check what steps have been already performed through bundle2)
314 self.stepsdone = set()
314 self.stepsdone = set()
315 # Integer version of the changegroup push result
315 # Integer version of the changegroup push result
316 # - None means nothing to push
316 # - None means nothing to push
317 # - 0 means HTTP error
317 # - 0 means HTTP error
318 # - 1 means we pushed and remote head count is unchanged *or*
318 # - 1 means we pushed and remote head count is unchanged *or*
319 # we have outgoing changesets but refused to push
319 # we have outgoing changesets but refused to push
320 # - other values as described by addchangegroup()
320 # - other values as described by addchangegroup()
321 self.cgresult = None
321 self.cgresult = None
322 # Boolean value for the bookmark push
322 # Boolean value for the bookmark push
323 self.bkresult = None
323 self.bkresult = None
324 # discover.outgoing object (contains common and outgoing data)
324 # discover.outgoing object (contains common and outgoing data)
325 self.outgoing = None
325 self.outgoing = None
326 # all remote topological heads before the push
326 # all remote topological heads before the push
327 self.remoteheads = None
327 self.remoteheads = None
328 # Details of the remote branch pre and post push
328 # Details of the remote branch pre and post push
329 #
329 #
330 # mapping: {'branch': ([remoteheads],
330 # mapping: {'branch': ([remoteheads],
331 # [newheads],
331 # [newheads],
332 # [unsyncedheads],
332 # [unsyncedheads],
333 # [discardedheads])}
333 # [discardedheads])}
334 # - branch: the branch name
334 # - branch: the branch name
335 # - remoteheads: the list of remote heads known locally
335 # - remoteheads: the list of remote heads known locally
336 # None if the branch is new
336 # None if the branch is new
337 # - newheads: the new remote heads (known locally) with outgoing pushed
337 # - newheads: the new remote heads (known locally) with outgoing pushed
338 # - unsyncedheads: the list of remote heads unknown locally.
338 # - unsyncedheads: the list of remote heads unknown locally.
339 # - discardedheads: the list of remote heads made obsolete by the push
339 # - discardedheads: the list of remote heads made obsolete by the push
340 self.pushbranchmap = None
340 self.pushbranchmap = None
341 # testable as a boolean indicating if any nodes are missing locally.
341 # testable as a boolean indicating if any nodes are missing locally.
342 self.incoming = None
342 self.incoming = None
343 # phases changes that must be pushed along side the changesets
343 # phases changes that must be pushed along side the changesets
344 self.outdatedphases = None
344 self.outdatedphases = None
345 # phases changes that must be pushed if changeset push fails
345 # phases changes that must be pushed if changeset push fails
346 self.fallbackoutdatedphases = None
346 self.fallbackoutdatedphases = None
347 # outgoing obsmarkers
347 # outgoing obsmarkers
348 self.outobsmarkers = set()
348 self.outobsmarkers = set()
349 # outgoing bookmarks
349 # outgoing bookmarks
350 self.outbookmarks = []
350 self.outbookmarks = []
351 # transaction manager
351 # transaction manager
352 self.trmanager = None
352 self.trmanager = None
353 # map { pushkey partid -> callback handling failure}
353 # map { pushkey partid -> callback handling failure}
354 # used to handle exception from mandatory pushkey part failure
354 # used to handle exception from mandatory pushkey part failure
355 self.pkfailcb = {}
355 self.pkfailcb = {}
356 # an iterable of pushvars or None
356 # an iterable of pushvars or None
357 self.pushvars = pushvars
357 self.pushvars = pushvars
358
358
359 @util.propertycache
359 @util.propertycache
360 def futureheads(self):
360 def futureheads(self):
361 """future remote heads if the changeset push succeeds"""
361 """future remote heads if the changeset push succeeds"""
362 return self.outgoing.missingheads
362 return self.outgoing.missingheads
363
363
364 @util.propertycache
364 @util.propertycache
365 def fallbackheads(self):
365 def fallbackheads(self):
366 """future remote heads if the changeset push fails"""
366 """future remote heads if the changeset push fails"""
367 if self.revs is None:
367 if self.revs is None:
368 # not target to push, all common are relevant
368 # not target to push, all common are relevant
369 return self.outgoing.commonheads
369 return self.outgoing.commonheads
370 unfi = self.repo.unfiltered()
370 unfi = self.repo.unfiltered()
371 # I want cheads = heads(::missingheads and ::commonheads)
371 # I want cheads = heads(::missingheads and ::commonheads)
372 # (missingheads is revs with secret changeset filtered out)
372 # (missingheads is revs with secret changeset filtered out)
373 #
373 #
374 # This can be expressed as:
374 # This can be expressed as:
375 # cheads = ( (missingheads and ::commonheads)
375 # cheads = ( (missingheads and ::commonheads)
376 # + (commonheads and ::missingheads))"
376 # + (commonheads and ::missingheads))"
377 # )
377 # )
378 #
378 #
379 # while trying to push we already computed the following:
379 # while trying to push we already computed the following:
380 # common = (::commonheads)
380 # common = (::commonheads)
381 # missing = ((commonheads::missingheads) - commonheads)
381 # missing = ((commonheads::missingheads) - commonheads)
382 #
382 #
383 # We can pick:
383 # We can pick:
384 # * missingheads part of common (::commonheads)
384 # * missingheads part of common (::commonheads)
385 common = self.outgoing.common
385 common = self.outgoing.common
386 nm = self.repo.changelog.nodemap
386 nm = self.repo.changelog.nodemap
387 cheads = [node for node in self.revs if nm[node] in common]
387 cheads = [node for node in self.revs if nm[node] in common]
388 # and
388 # and
389 # * commonheads parents on missing
389 # * commonheads parents on missing
390 revset = unfi.set('%ln and parents(roots(%ln))',
390 revset = unfi.set('%ln and parents(roots(%ln))',
391 self.outgoing.commonheads,
391 self.outgoing.commonheads,
392 self.outgoing.missing)
392 self.outgoing.missing)
393 cheads.extend(c.node() for c in revset)
393 cheads.extend(c.node() for c in revset)
394 return cheads
394 return cheads
395
395
396 @property
396 @property
397 def commonheads(self):
397 def commonheads(self):
398 """set of all common heads after changeset bundle push"""
398 """set of all common heads after changeset bundle push"""
399 if self.cgresult:
399 if self.cgresult:
400 return self.futureheads
400 return self.futureheads
401 else:
401 else:
402 return self.fallbackheads
402 return self.fallbackheads
403
403
404 # mapping of message used when pushing bookmark
404 # mapping of message used when pushing bookmark
405 bookmsgmap = {'update': (_("updating bookmark %s\n"),
405 bookmsgmap = {'update': (_("updating bookmark %s\n"),
406 _('updating bookmark %s failed!\n')),
406 _('updating bookmark %s failed!\n')),
407 'export': (_("exporting bookmark %s\n"),
407 'export': (_("exporting bookmark %s\n"),
408 _('exporting bookmark %s failed!\n')),
408 _('exporting bookmark %s failed!\n')),
409 'delete': (_("deleting remote bookmark %s\n"),
409 'delete': (_("deleting remote bookmark %s\n"),
410 _('deleting remote bookmark %s failed!\n')),
410 _('deleting remote bookmark %s failed!\n')),
411 }
411 }
412
412
413
413
414 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
414 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
415 opargs=None):
415 opargs=None):
416 '''Push outgoing changesets (limited by revs) from a local
416 '''Push outgoing changesets (limited by revs) from a local
417 repository to remote. Return an integer:
417 repository to remote. Return an integer:
418 - None means nothing to push
418 - None means nothing to push
419 - 0 means HTTP error
419 - 0 means HTTP error
420 - 1 means we pushed and remote head count is unchanged *or*
420 - 1 means we pushed and remote head count is unchanged *or*
421 we have outgoing changesets but refused to push
421 we have outgoing changesets but refused to push
422 - other values as described by addchangegroup()
422 - other values as described by addchangegroup()
423 '''
423 '''
424 if opargs is None:
424 if opargs is None:
425 opargs = {}
425 opargs = {}
426 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
426 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
427 **pycompat.strkwargs(opargs))
427 **pycompat.strkwargs(opargs))
428 if pushop.remote.local():
428 if pushop.remote.local():
429 missing = (set(pushop.repo.requirements)
429 missing = (set(pushop.repo.requirements)
430 - pushop.remote.local().supported)
430 - pushop.remote.local().supported)
431 if missing:
431 if missing:
432 msg = _("required features are not"
432 msg = _("required features are not"
433 " supported in the destination:"
433 " supported in the destination:"
434 " %s") % (', '.join(sorted(missing)))
434 " %s") % (', '.join(sorted(missing)))
435 raise error.Abort(msg)
435 raise error.Abort(msg)
436
436
437 if not pushop.remote.canpush():
437 if not pushop.remote.canpush():
438 raise error.Abort(_("destination does not support push"))
438 raise error.Abort(_("destination does not support push"))
439
439
440 if not pushop.remote.capable('unbundle'):
440 if not pushop.remote.capable('unbundle'):
441 raise error.Abort(_('cannot push: destination does not support the '
441 raise error.Abort(_('cannot push: destination does not support the '
442 'unbundle wire protocol command'))
442 'unbundle wire protocol command'))
443
443
444 # get lock as we might write phase data
444 # get lock as we might write phase data
445 wlock = lock = None
445 wlock = lock = None
446 try:
446 try:
447 # bundle2 push may receive a reply bundle touching bookmarks or other
447 # bundle2 push may receive a reply bundle touching bookmarks or other
448 # things requiring the wlock. Take it now to ensure proper ordering.
448 # things requiring the wlock. Take it now to ensure proper ordering.
449 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
449 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
450 if (not _forcebundle1(pushop)) and maypushback:
450 if (not _forcebundle1(pushop)) and maypushback:
451 wlock = pushop.repo.wlock()
451 wlock = pushop.repo.wlock()
452 lock = pushop.repo.lock()
452 lock = pushop.repo.lock()
453 pushop.trmanager = transactionmanager(pushop.repo,
453 pushop.trmanager = transactionmanager(pushop.repo,
454 'push-response',
454 'push-response',
455 pushop.remote.url())
455 pushop.remote.url())
456 except IOError as err:
456 except IOError as err:
457 if err.errno != errno.EACCES:
457 if err.errno != errno.EACCES:
458 raise
458 raise
459 # source repo cannot be locked.
459 # source repo cannot be locked.
460 # We do not abort the push, but just disable the local phase
460 # We do not abort the push, but just disable the local phase
461 # synchronisation.
461 # synchronisation.
462 msg = 'cannot lock source repository: %s\n' % err
462 msg = 'cannot lock source repository: %s\n' % err
463 pushop.ui.debug(msg)
463 pushop.ui.debug(msg)
464
464
465 with wlock or util.nullcontextmanager(), \
465 with wlock or util.nullcontextmanager(), \
466 lock or util.nullcontextmanager(), \
466 lock or util.nullcontextmanager(), \
467 pushop.trmanager or util.nullcontextmanager():
467 pushop.trmanager or util.nullcontextmanager():
468 pushop.repo.checkpush(pushop)
468 pushop.repo.checkpush(pushop)
469 _pushdiscovery(pushop)
469 _pushdiscovery(pushop)
470 if not _forcebundle1(pushop):
470 if not _forcebundle1(pushop):
471 _pushbundle2(pushop)
471 _pushbundle2(pushop)
472 _pushchangeset(pushop)
472 _pushchangeset(pushop)
473 _pushsyncphase(pushop)
473 _pushsyncphase(pushop)
474 _pushobsolete(pushop)
474 _pushobsolete(pushop)
475 _pushbookmark(pushop)
475 _pushbookmark(pushop)
476
476
477 return pushop
477 return pushop
478
478
479 # list of steps to perform discovery before push
479 # list of steps to perform discovery before push
480 pushdiscoveryorder = []
480 pushdiscoveryorder = []
481
481
482 # Mapping between step name and function
482 # Mapping between step name and function
483 #
483 #
484 # This exists to help extensions wrap steps if necessary
484 # This exists to help extensions wrap steps if necessary
485 pushdiscoverymapping = {}
485 pushdiscoverymapping = {}
486
486
487 def pushdiscovery(stepname):
487 def pushdiscovery(stepname):
488 """decorator for function performing discovery before push
488 """decorator for function performing discovery before push
489
489
490 The function is added to the step -> function mapping and appended to the
490 The function is added to the step -> function mapping and appended to the
491 list of steps. Beware that decorated function will be added in order (this
491 list of steps. Beware that decorated function will be added in order (this
492 may matter).
492 may matter).
493
493
494 You can only use this decorator for a new step, if you want to wrap a step
494 You can only use this decorator for a new step, if you want to wrap a step
495 from an extension, change the pushdiscovery dictionary directly."""
495 from an extension, change the pushdiscovery dictionary directly."""
496 def dec(func):
496 def dec(func):
497 assert stepname not in pushdiscoverymapping
497 assert stepname not in pushdiscoverymapping
498 pushdiscoverymapping[stepname] = func
498 pushdiscoverymapping[stepname] = func
499 pushdiscoveryorder.append(stepname)
499 pushdiscoveryorder.append(stepname)
500 return func
500 return func
501 return dec
501 return dec
502
502
503 def _pushdiscovery(pushop):
503 def _pushdiscovery(pushop):
504 """Run all discovery steps"""
504 """Run all discovery steps"""
505 for stepname in pushdiscoveryorder:
505 for stepname in pushdiscoveryorder:
506 step = pushdiscoverymapping[stepname]
506 step = pushdiscoverymapping[stepname]
507 step(pushop)
507 step(pushop)
508
508
509 @pushdiscovery('changeset')
509 @pushdiscovery('changeset')
510 def _pushdiscoverychangeset(pushop):
510 def _pushdiscoverychangeset(pushop):
511 """discover the changeset that need to be pushed"""
511 """discover the changeset that need to be pushed"""
512 fci = discovery.findcommonincoming
512 fci = discovery.findcommonincoming
513 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
513 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
514 common, inc, remoteheads = commoninc
514 common, inc, remoteheads = commoninc
515 fco = discovery.findcommonoutgoing
515 fco = discovery.findcommonoutgoing
516 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
516 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
517 commoninc=commoninc, force=pushop.force)
517 commoninc=commoninc, force=pushop.force)
518 pushop.outgoing = outgoing
518 pushop.outgoing = outgoing
519 pushop.remoteheads = remoteheads
519 pushop.remoteheads = remoteheads
520 pushop.incoming = inc
520 pushop.incoming = inc
521
521
522 @pushdiscovery('phase')
522 @pushdiscovery('phase')
523 def _pushdiscoveryphase(pushop):
523 def _pushdiscoveryphase(pushop):
524 """discover the phase that needs to be pushed
524 """discover the phase that needs to be pushed
525
525
526 (computed for both success and failure case for changesets push)"""
526 (computed for both success and failure case for changesets push)"""
527 outgoing = pushop.outgoing
527 outgoing = pushop.outgoing
528 unfi = pushop.repo.unfiltered()
528 unfi = pushop.repo.unfiltered()
529 remotephases = pushop.remote.listkeys('phases')
529 remotephases = pushop.remote.listkeys('phases')
530 publishing = remotephases.get('publishing', False)
530 publishing = remotephases.get('publishing', False)
531 if (pushop.ui.configbool('ui', '_usedassubrepo')
531 if (pushop.ui.configbool('ui', '_usedassubrepo')
532 and remotephases # server supports phases
532 and remotephases # server supports phases
533 and not pushop.outgoing.missing # no changesets to be pushed
533 and not pushop.outgoing.missing # no changesets to be pushed
534 and publishing):
534 and publishing):
535 # When:
535 # When:
536 # - this is a subrepo push
536 # - this is a subrepo push
537 # - and remote support phase
537 # - and remote support phase
538 # - and no changeset are to be pushed
538 # - and no changeset are to be pushed
539 # - and remote is publishing
539 # - and remote is publishing
540 # We may be in issue 3871 case!
540 # We may be in issue 3871 case!
541 # We drop the possible phase synchronisation done by
541 # We drop the possible phase synchronisation done by
542 # courtesy to publish changesets possibly locally draft
542 # courtesy to publish changesets possibly locally draft
543 # on the remote.
543 # on the remote.
544 remotephases = {'publishing': 'True'}
544 remotephases = {'publishing': 'True'}
545 ana = phases.analyzeremotephases(pushop.repo,
545 ana = phases.analyzeremotephases(pushop.repo,
546 pushop.fallbackheads,
546 pushop.fallbackheads,
547 remotephases)
547 remotephases)
548 pheads, droots = ana
548 pheads, droots = ana
549 extracond = ''
549 extracond = ''
550 if not publishing:
550 if not publishing:
551 extracond = ' and public()'
551 extracond = ' and public()'
552 revset = 'heads((%%ln::%%ln) %s)' % extracond
552 revset = 'heads((%%ln::%%ln) %s)' % extracond
553 # Get the list of all revs draft on remote by public here.
553 # Get the list of all revs draft on remote by public here.
554 # XXX Beware that revset break if droots is not strictly
554 # XXX Beware that revset break if droots is not strictly
555 # XXX root we may want to ensure it is but it is costly
555 # XXX root we may want to ensure it is but it is costly
556 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
556 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
557 if not outgoing.missing:
557 if not outgoing.missing:
558 future = fallback
558 future = fallback
559 else:
559 else:
560 # adds changeset we are going to push as draft
560 # adds changeset we are going to push as draft
561 #
561 #
562 # should not be necessary for publishing server, but because of an
562 # should not be necessary for publishing server, but because of an
563 # issue fixed in xxxxx we have to do it anyway.
563 # issue fixed in xxxxx we have to do it anyway.
564 fdroots = list(unfi.set('roots(%ln + %ln::)',
564 fdroots = list(unfi.set('roots(%ln + %ln::)',
565 outgoing.missing, droots))
565 outgoing.missing, droots))
566 fdroots = [f.node() for f in fdroots]
566 fdroots = [f.node() for f in fdroots]
567 future = list(unfi.set(revset, fdroots, pushop.futureheads))
567 future = list(unfi.set(revset, fdroots, pushop.futureheads))
568 pushop.outdatedphases = future
568 pushop.outdatedphases = future
569 pushop.fallbackoutdatedphases = fallback
569 pushop.fallbackoutdatedphases = fallback
570
570
571 @pushdiscovery('obsmarker')
571 @pushdiscovery('obsmarker')
572 def _pushdiscoveryobsmarkers(pushop):
572 def _pushdiscoveryobsmarkers(pushop):
573 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
573 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
574 and pushop.repo.obsstore
574 and pushop.repo.obsstore
575 and 'obsolete' in pushop.remote.listkeys('namespaces')):
575 and 'obsolete' in pushop.remote.listkeys('namespaces')):
576 repo = pushop.repo
576 repo = pushop.repo
577 # very naive computation, that can be quite expensive on big repo.
577 # very naive computation, that can be quite expensive on big repo.
578 # However: evolution is currently slow on them anyway.
578 # However: evolution is currently slow on them anyway.
579 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
579 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
580 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
580 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
581
581
582 @pushdiscovery('bookmarks')
582 @pushdiscovery('bookmarks')
583 def _pushdiscoverybookmarks(pushop):
583 def _pushdiscoverybookmarks(pushop):
584 ui = pushop.ui
584 ui = pushop.ui
585 repo = pushop.repo.unfiltered()
585 repo = pushop.repo.unfiltered()
586 remote = pushop.remote
586 remote = pushop.remote
587 ui.debug("checking for updated bookmarks\n")
587 ui.debug("checking for updated bookmarks\n")
588 ancestors = ()
588 ancestors = ()
589 if pushop.revs:
589 if pushop.revs:
590 revnums = map(repo.changelog.rev, pushop.revs)
590 revnums = map(repo.changelog.rev, pushop.revs)
591 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
591 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
592 remotebookmark = remote.listkeys('bookmarks')
592 remotebookmark = remote.listkeys('bookmarks')
593
593
594 explicit = set([repo._bookmarks.expandname(bookmark)
594 explicit = set([repo._bookmarks.expandname(bookmark)
595 for bookmark in pushop.bookmarks])
595 for bookmark in pushop.bookmarks])
596
596
597 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
597 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
598 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
598 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
599
599
600 def safehex(x):
600 def safehex(x):
601 if x is None:
601 if x is None:
602 return x
602 return x
603 return hex(x)
603 return hex(x)
604
604
605 def hexifycompbookmarks(bookmarks):
605 def hexifycompbookmarks(bookmarks):
606 for b, scid, dcid in bookmarks:
606 for b, scid, dcid in bookmarks:
607 yield b, safehex(scid), safehex(dcid)
607 yield b, safehex(scid), safehex(dcid)
608
608
609 comp = [hexifycompbookmarks(marks) for marks in comp]
609 comp = [hexifycompbookmarks(marks) for marks in comp]
610 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
610 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
611
611
612 for b, scid, dcid in advsrc:
612 for b, scid, dcid in advsrc:
613 if b in explicit:
613 if b in explicit:
614 explicit.remove(b)
614 explicit.remove(b)
615 if not ancestors or repo[scid].rev() in ancestors:
615 if not ancestors or repo[scid].rev() in ancestors:
616 pushop.outbookmarks.append((b, dcid, scid))
616 pushop.outbookmarks.append((b, dcid, scid))
617 # search added bookmark
617 # search added bookmark
618 for b, scid, dcid in addsrc:
618 for b, scid, dcid in addsrc:
619 if b in explicit:
619 if b in explicit:
620 explicit.remove(b)
620 explicit.remove(b)
621 pushop.outbookmarks.append((b, '', scid))
621 pushop.outbookmarks.append((b, '', scid))
622 # search for overwritten bookmark
622 # search for overwritten bookmark
623 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
623 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
624 if b in explicit:
624 if b in explicit:
625 explicit.remove(b)
625 explicit.remove(b)
626 pushop.outbookmarks.append((b, dcid, scid))
626 pushop.outbookmarks.append((b, dcid, scid))
627 # search for bookmark to delete
627 # search for bookmark to delete
628 for b, scid, dcid in adddst:
628 for b, scid, dcid in adddst:
629 if b in explicit:
629 if b in explicit:
630 explicit.remove(b)
630 explicit.remove(b)
631 # treat as "deleted locally"
631 # treat as "deleted locally"
632 pushop.outbookmarks.append((b, dcid, ''))
632 pushop.outbookmarks.append((b, dcid, ''))
633 # identical bookmarks shouldn't get reported
633 # identical bookmarks shouldn't get reported
634 for b, scid, dcid in same:
634 for b, scid, dcid in same:
635 if b in explicit:
635 if b in explicit:
636 explicit.remove(b)
636 explicit.remove(b)
637
637
638 if explicit:
638 if explicit:
639 explicit = sorted(explicit)
639 explicit = sorted(explicit)
640 # we should probably list all of them
640 # we should probably list all of them
641 ui.warn(_('bookmark %s does not exist on the local '
641 ui.warn(_('bookmark %s does not exist on the local '
642 'or remote repository!\n') % explicit[0])
642 'or remote repository!\n') % explicit[0])
643 pushop.bkresult = 2
643 pushop.bkresult = 2
644
644
645 pushop.outbookmarks.sort()
645 pushop.outbookmarks.sort()
646
646
647 def _pushcheckoutgoing(pushop):
647 def _pushcheckoutgoing(pushop):
648 outgoing = pushop.outgoing
648 outgoing = pushop.outgoing
649 unfi = pushop.repo.unfiltered()
649 unfi = pushop.repo.unfiltered()
650 if not outgoing.missing:
650 if not outgoing.missing:
651 # nothing to push
651 # nothing to push
652 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
652 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
653 return False
653 return False
654 # something to push
654 # something to push
655 if not pushop.force:
655 if not pushop.force:
656 # if repo.obsstore == False --> no obsolete
656 # if repo.obsstore == False --> no obsolete
657 # then, save the iteration
657 # then, save the iteration
658 if unfi.obsstore:
658 if unfi.obsstore:
659 # this message are here for 80 char limit reason
659 # this message are here for 80 char limit reason
660 mso = _("push includes obsolete changeset: %s!")
660 mso = _("push includes obsolete changeset: %s!")
661 mspd = _("push includes phase-divergent changeset: %s!")
661 mspd = _("push includes phase-divergent changeset: %s!")
662 mscd = _("push includes content-divergent changeset: %s!")
662 mscd = _("push includes content-divergent changeset: %s!")
663 mst = {"orphan": _("push includes orphan changeset: %s!"),
663 mst = {"orphan": _("push includes orphan changeset: %s!"),
664 "phase-divergent": mspd,
664 "phase-divergent": mspd,
665 "content-divergent": mscd}
665 "content-divergent": mscd}
666 # If we are to push if there is at least one
666 # If we are to push if there is at least one
667 # obsolete or unstable changeset in missing, at
667 # obsolete or unstable changeset in missing, at
668 # least one of the missinghead will be obsolete or
668 # least one of the missinghead will be obsolete or
669 # unstable. So checking heads only is ok
669 # unstable. So checking heads only is ok
670 for node in outgoing.missingheads:
670 for node in outgoing.missingheads:
671 ctx = unfi[node]
671 ctx = unfi[node]
672 if ctx.obsolete():
672 if ctx.obsolete():
673 raise error.Abort(mso % ctx)
673 raise error.Abort(mso % ctx)
674 elif ctx.isunstable():
674 elif ctx.isunstable():
675 # TODO print more than one instability in the abort
675 # TODO print more than one instability in the abort
676 # message
676 # message
677 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
677 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
678
678
679 discovery.checkheads(pushop)
679 discovery.checkheads(pushop)
680 return True
680 return True
681
681
682 # List of names of steps to perform for an outgoing bundle2, order matters.
682 # List of names of steps to perform for an outgoing bundle2, order matters.
683 b2partsgenorder = []
683 b2partsgenorder = []
684
684
685 # Mapping between step name and function
685 # Mapping between step name and function
686 #
686 #
687 # This exists to help extensions wrap steps if necessary
687 # This exists to help extensions wrap steps if necessary
688 b2partsgenmapping = {}
688 b2partsgenmapping = {}
689
689
690 def b2partsgenerator(stepname, idx=None):
690 def b2partsgenerator(stepname, idx=None):
691 """decorator for function generating bundle2 part
691 """decorator for function generating bundle2 part
692
692
693 The function is added to the step -> function mapping and appended to the
693 The function is added to the step -> function mapping and appended to the
694 list of steps. Beware that decorated functions will be added in order
694 list of steps. Beware that decorated functions will be added in order
695 (this may matter).
695 (this may matter).
696
696
697 You can only use this decorator for new steps, if you want to wrap a step
697 You can only use this decorator for new steps, if you want to wrap a step
698 from an extension, attack the b2partsgenmapping dictionary directly."""
698 from an extension, attack the b2partsgenmapping dictionary directly."""
699 def dec(func):
699 def dec(func):
700 assert stepname not in b2partsgenmapping
700 assert stepname not in b2partsgenmapping
701 b2partsgenmapping[stepname] = func
701 b2partsgenmapping[stepname] = func
702 if idx is None:
702 if idx is None:
703 b2partsgenorder.append(stepname)
703 b2partsgenorder.append(stepname)
704 else:
704 else:
705 b2partsgenorder.insert(idx, stepname)
705 b2partsgenorder.insert(idx, stepname)
706 return func
706 return func
707 return dec
707 return dec
708
708
709 def _pushb2ctxcheckheads(pushop, bundler):
709 def _pushb2ctxcheckheads(pushop, bundler):
710 """Generate race condition checking parts
710 """Generate race condition checking parts
711
711
712 Exists as an independent function to aid extensions
712 Exists as an independent function to aid extensions
713 """
713 """
714 # * 'force' do not check for push race,
714 # * 'force' do not check for push race,
715 # * if we don't push anything, there are nothing to check.
715 # * if we don't push anything, there are nothing to check.
716 if not pushop.force and pushop.outgoing.missingheads:
716 if not pushop.force and pushop.outgoing.missingheads:
717 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
717 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
718 emptyremote = pushop.pushbranchmap is None
718 emptyremote = pushop.pushbranchmap is None
719 if not allowunrelated or emptyremote:
719 if not allowunrelated or emptyremote:
720 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
720 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
721 else:
721 else:
722 affected = set()
722 affected = set()
723 for branch, heads in pushop.pushbranchmap.iteritems():
723 for branch, heads in pushop.pushbranchmap.iteritems():
724 remoteheads, newheads, unsyncedheads, discardedheads = heads
724 remoteheads, newheads, unsyncedheads, discardedheads = heads
725 if remoteheads is not None:
725 if remoteheads is not None:
726 remote = set(remoteheads)
726 remote = set(remoteheads)
727 affected |= set(discardedheads) & remote
727 affected |= set(discardedheads) & remote
728 affected |= remote - set(newheads)
728 affected |= remote - set(newheads)
729 if affected:
729 if affected:
730 data = iter(sorted(affected))
730 data = iter(sorted(affected))
731 bundler.newpart('check:updated-heads', data=data)
731 bundler.newpart('check:updated-heads', data=data)
732
732
733 @b2partsgenerator('changeset')
733 @b2partsgenerator('changeset')
734 def _pushb2ctx(pushop, bundler):
734 def _pushb2ctx(pushop, bundler):
735 """handle changegroup push through bundle2
735 """handle changegroup push through bundle2
736
736
737 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
737 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
738 """
738 """
739 if 'changesets' in pushop.stepsdone:
739 if 'changesets' in pushop.stepsdone:
740 return
740 return
741 pushop.stepsdone.add('changesets')
741 pushop.stepsdone.add('changesets')
742 # Send known heads to the server for race detection.
742 # Send known heads to the server for race detection.
743 if not _pushcheckoutgoing(pushop):
743 if not _pushcheckoutgoing(pushop):
744 return
744 return
745 pushop.repo.prepushoutgoinghooks(pushop)
745 pushop.repo.prepushoutgoinghooks(pushop)
746
746
747 _pushb2ctxcheckheads(pushop, bundler)
747 _pushb2ctxcheckheads(pushop, bundler)
748
748
749 b2caps = bundle2.bundle2caps(pushop.remote)
749 b2caps = bundle2.bundle2caps(pushop.remote)
750 version = '01'
750 version = '01'
751 cgversions = b2caps.get('changegroup')
751 cgversions = b2caps.get('changegroup')
752 if cgversions: # 3.1 and 3.2 ship with an empty value
752 if cgversions: # 3.1 and 3.2 ship with an empty value
753 cgversions = [v for v in cgversions
753 cgversions = [v for v in cgversions
754 if v in changegroup.supportedoutgoingversions(
754 if v in changegroup.supportedoutgoingversions(
755 pushop.repo)]
755 pushop.repo)]
756 if not cgversions:
756 if not cgversions:
757 raise ValueError(_('no common changegroup version'))
757 raise ValueError(_('no common changegroup version'))
758 version = max(cgversions)
758 version = max(cgversions)
759 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
759 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
760 'push')
760 'push')
761 cgpart = bundler.newpart('changegroup', data=cgstream)
761 cgpart = bundler.newpart('changegroup', data=cgstream)
762 if cgversions:
762 if cgversions:
763 cgpart.addparam('version', version)
763 cgpart.addparam('version', version)
764 if 'treemanifest' in pushop.repo.requirements:
764 if 'treemanifest' in pushop.repo.requirements:
765 cgpart.addparam('treemanifest', '1')
765 cgpart.addparam('treemanifest', '1')
766 def handlereply(op):
766 def handlereply(op):
767 """extract addchangegroup returns from server reply"""
767 """extract addchangegroup returns from server reply"""
768 cgreplies = op.records.getreplies(cgpart.id)
768 cgreplies = op.records.getreplies(cgpart.id)
769 assert len(cgreplies['changegroup']) == 1
769 assert len(cgreplies['changegroup']) == 1
770 pushop.cgresult = cgreplies['changegroup'][0]['return']
770 pushop.cgresult = cgreplies['changegroup'][0]['return']
771 return handlereply
771 return handlereply
772
772
773 @b2partsgenerator('phase')
773 @b2partsgenerator('phase')
774 def _pushb2phases(pushop, bundler):
774 def _pushb2phases(pushop, bundler):
775 """handle phase push through bundle2"""
775 """handle phase push through bundle2"""
776 if 'phases' in pushop.stepsdone:
776 if 'phases' in pushop.stepsdone:
777 return
777 return
778 b2caps = bundle2.bundle2caps(pushop.remote)
778 b2caps = bundle2.bundle2caps(pushop.remote)
779 if not 'pushkey' in b2caps:
779 if not 'pushkey' in b2caps:
780 return
780 return
781 pushop.stepsdone.add('phases')
781 pushop.stepsdone.add('phases')
782 part2node = []
782 part2node = []
783
783
784 def handlefailure(pushop, exc):
784 def handlefailure(pushop, exc):
785 targetid = int(exc.partid)
785 targetid = int(exc.partid)
786 for partid, node in part2node:
786 for partid, node in part2node:
787 if partid == targetid:
787 if partid == targetid:
788 raise error.Abort(_('updating %s to public failed') % node)
788 raise error.Abort(_('updating %s to public failed') % node)
789
789
790 enc = pushkey.encode
790 enc = pushkey.encode
791 for newremotehead in pushop.outdatedphases:
791 for newremotehead in pushop.outdatedphases:
792 part = bundler.newpart('pushkey')
792 part = bundler.newpart('pushkey')
793 part.addparam('namespace', enc('phases'))
793 part.addparam('namespace', enc('phases'))
794 part.addparam('key', enc(newremotehead.hex()))
794 part.addparam('key', enc(newremotehead.hex()))
795 part.addparam('old', enc('%d' % phases.draft))
795 part.addparam('old', enc('%d' % phases.draft))
796 part.addparam('new', enc('%d' % phases.public))
796 part.addparam('new', enc('%d' % phases.public))
797 part2node.append((part.id, newremotehead))
797 part2node.append((part.id, newremotehead))
798 pushop.pkfailcb[part.id] = handlefailure
798 pushop.pkfailcb[part.id] = handlefailure
799
799
800 def handlereply(op):
800 def handlereply(op):
801 for partid, node in part2node:
801 for partid, node in part2node:
802 partrep = op.records.getreplies(partid)
802 partrep = op.records.getreplies(partid)
803 results = partrep['pushkey']
803 results = partrep['pushkey']
804 assert len(results) <= 1
804 assert len(results) <= 1
805 msg = None
805 msg = None
806 if not results:
806 if not results:
807 msg = _('server ignored update of %s to public!\n') % node
807 msg = _('server ignored update of %s to public!\n') % node
808 elif not int(results[0]['return']):
808 elif not int(results[0]['return']):
809 msg = _('updating %s to public failed!\n') % node
809 msg = _('updating %s to public failed!\n') % node
810 if msg is not None:
810 if msg is not None:
811 pushop.ui.warn(msg)
811 pushop.ui.warn(msg)
812 return handlereply
812 return handlereply
813
813
814 @b2partsgenerator('obsmarkers')
814 @b2partsgenerator('obsmarkers')
815 def _pushb2obsmarkers(pushop, bundler):
815 def _pushb2obsmarkers(pushop, bundler):
816 if 'obsmarkers' in pushop.stepsdone:
816 if 'obsmarkers' in pushop.stepsdone:
817 return
817 return
818 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
818 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
819 if obsolete.commonversion(remoteversions) is None:
819 if obsolete.commonversion(remoteversions) is None:
820 return
820 return
821 pushop.stepsdone.add('obsmarkers')
821 pushop.stepsdone.add('obsmarkers')
822 if pushop.outobsmarkers:
822 if pushop.outobsmarkers:
823 markers = sorted(pushop.outobsmarkers)
823 markers = sorted(pushop.outobsmarkers)
824 bundle2.buildobsmarkerspart(bundler, markers)
824 bundle2.buildobsmarkerspart(bundler, markers)
825
825
826 @b2partsgenerator('bookmarks')
826 @b2partsgenerator('bookmarks')
827 def _pushb2bookmarks(pushop, bundler):
827 def _pushb2bookmarks(pushop, bundler):
828 """handle bookmark push through bundle2"""
828 """handle bookmark push through bundle2"""
829 if 'bookmarks' in pushop.stepsdone:
829 if 'bookmarks' in pushop.stepsdone:
830 return
830 return
831 b2caps = bundle2.bundle2caps(pushop.remote)
831 b2caps = bundle2.bundle2caps(pushop.remote)
832 if 'pushkey' not in b2caps:
832 if 'pushkey' not in b2caps:
833 return
833 return
834 pushop.stepsdone.add('bookmarks')
834 pushop.stepsdone.add('bookmarks')
835 part2book = []
835 part2book = []
836 enc = pushkey.encode
836 enc = pushkey.encode
837
837
838 def handlefailure(pushop, exc):
838 def handlefailure(pushop, exc):
839 targetid = int(exc.partid)
839 targetid = int(exc.partid)
840 for partid, book, action in part2book:
840 for partid, book, action in part2book:
841 if partid == targetid:
841 if partid == targetid:
842 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
842 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
843 # we should not be called for part we did not generated
843 # we should not be called for part we did not generated
844 assert False
844 assert False
845
845
846 for book, old, new in pushop.outbookmarks:
846 for book, old, new in pushop.outbookmarks:
847 part = bundler.newpart('pushkey')
847 part = bundler.newpart('pushkey')
848 part.addparam('namespace', enc('bookmarks'))
848 part.addparam('namespace', enc('bookmarks'))
849 part.addparam('key', enc(book))
849 part.addparam('key', enc(book))
850 part.addparam('old', enc(old))
850 part.addparam('old', enc(old))
851 part.addparam('new', enc(new))
851 part.addparam('new', enc(new))
852 action = 'update'
852 action = 'update'
853 if not old:
853 if not old:
854 action = 'export'
854 action = 'export'
855 elif not new:
855 elif not new:
856 action = 'delete'
856 action = 'delete'
857 part2book.append((part.id, book, action))
857 part2book.append((part.id, book, action))
858 pushop.pkfailcb[part.id] = handlefailure
858 pushop.pkfailcb[part.id] = handlefailure
859
859
860 def handlereply(op):
860 def handlereply(op):
861 ui = pushop.ui
861 ui = pushop.ui
862 for partid, book, action in part2book:
862 for partid, book, action in part2book:
863 partrep = op.records.getreplies(partid)
863 partrep = op.records.getreplies(partid)
864 results = partrep['pushkey']
864 results = partrep['pushkey']
865 assert len(results) <= 1
865 assert len(results) <= 1
866 if not results:
866 if not results:
867 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
867 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
868 else:
868 else:
869 ret = int(results[0]['return'])
869 ret = int(results[0]['return'])
870 if ret:
870 if ret:
871 ui.status(bookmsgmap[action][0] % book)
871 ui.status(bookmsgmap[action][0] % book)
872 else:
872 else:
873 ui.warn(bookmsgmap[action][1] % book)
873 ui.warn(bookmsgmap[action][1] % book)
874 if pushop.bkresult is not None:
874 if pushop.bkresult is not None:
875 pushop.bkresult = 1
875 pushop.bkresult = 1
876 return handlereply
876 return handlereply
877
877
878 @b2partsgenerator('pushvars', idx=0)
878 @b2partsgenerator('pushvars', idx=0)
879 def _getbundlesendvars(pushop, bundler):
879 def _getbundlesendvars(pushop, bundler):
880 '''send shellvars via bundle2'''
880 '''send shellvars via bundle2'''
881 pushvars = pushop.pushvars
881 pushvars = pushop.pushvars
882 if pushvars:
882 if pushvars:
883 shellvars = {}
883 shellvars = {}
884 for raw in pushvars:
884 for raw in pushvars:
885 if '=' not in raw:
885 if '=' not in raw:
886 msg = ("unable to parse variable '%s', should follow "
886 msg = ("unable to parse variable '%s', should follow "
887 "'KEY=VALUE' or 'KEY=' format")
887 "'KEY=VALUE' or 'KEY=' format")
888 raise error.Abort(msg % raw)
888 raise error.Abort(msg % raw)
889 k, v = raw.split('=', 1)
889 k, v = raw.split('=', 1)
890 shellvars[k] = v
890 shellvars[k] = v
891
891
892 part = bundler.newpart('pushvars')
892 part = bundler.newpart('pushvars')
893
893
894 for key, value in shellvars.iteritems():
894 for key, value in shellvars.iteritems():
895 part.addparam(key, value, mandatory=False)
895 part.addparam(key, value, mandatory=False)
896
896
897 def _pushbundle2(pushop):
897 def _pushbundle2(pushop):
898 """push data to the remote using bundle2
898 """push data to the remote using bundle2
899
899
900 The only currently supported type of data is changegroup but this will
900 The only currently supported type of data is changegroup but this will
901 evolve in the future."""
901 evolve in the future."""
902 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
902 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
903 pushback = (pushop.trmanager
903 pushback = (pushop.trmanager
904 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
904 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
905
905
906 # create reply capability
906 # create reply capability
907 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
907 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
908 allowpushback=pushback))
908 allowpushback=pushback))
909 bundler.newpart('replycaps', data=capsblob)
909 bundler.newpart('replycaps', data=capsblob)
910 replyhandlers = []
910 replyhandlers = []
911 for partgenname in b2partsgenorder:
911 for partgenname in b2partsgenorder:
912 partgen = b2partsgenmapping[partgenname]
912 partgen = b2partsgenmapping[partgenname]
913 ret = partgen(pushop, bundler)
913 ret = partgen(pushop, bundler)
914 if callable(ret):
914 if callable(ret):
915 replyhandlers.append(ret)
915 replyhandlers.append(ret)
916 # do not push if nothing to push
916 # do not push if nothing to push
917 if bundler.nbparts <= 1:
917 if bundler.nbparts <= 1:
918 return
918 return
919 stream = util.chunkbuffer(bundler.getchunks())
919 stream = util.chunkbuffer(bundler.getchunks())
920 try:
920 try:
921 try:
921 try:
922 reply = pushop.remote.unbundle(
922 reply = pushop.remote.unbundle(
923 stream, ['force'], pushop.remote.url())
923 stream, ['force'], pushop.remote.url())
924 except error.BundleValueError as exc:
924 except error.BundleValueError as exc:
925 raise error.Abort(_('missing support for %s') % exc)
925 raise error.Abort(_('missing support for %s') % exc)
926 try:
926 try:
927 trgetter = None
927 trgetter = None
928 if pushback:
928 if pushback:
929 trgetter = pushop.trmanager.transaction
929 trgetter = pushop.trmanager.transaction
930 op = bundle2.processbundle(pushop.repo, reply, trgetter)
930 op = bundle2.processbundle(pushop.repo, reply, trgetter)
931 except error.BundleValueError as exc:
931 except error.BundleValueError as exc:
932 raise error.Abort(_('missing support for %s') % exc)
932 raise error.Abort(_('missing support for %s') % exc)
933 except bundle2.AbortFromPart as exc:
933 except bundle2.AbortFromPart as exc:
934 pushop.ui.status(_('remote: %s\n') % exc)
934 pushop.ui.status(_('remote: %s\n') % exc)
935 if exc.hint is not None:
935 if exc.hint is not None:
936 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
936 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
937 raise error.Abort(_('push failed on remote'))
937 raise error.Abort(_('push failed on remote'))
938 except error.PushkeyFailed as exc:
938 except error.PushkeyFailed as exc:
939 partid = int(exc.partid)
939 partid = int(exc.partid)
940 if partid not in pushop.pkfailcb:
940 if partid not in pushop.pkfailcb:
941 raise
941 raise
942 pushop.pkfailcb[partid](pushop, exc)
942 pushop.pkfailcb[partid](pushop, exc)
943 for rephand in replyhandlers:
943 for rephand in replyhandlers:
944 rephand(op)
944 rephand(op)
945
945
946 def _pushchangeset(pushop):
946 def _pushchangeset(pushop):
947 """Make the actual push of changeset bundle to remote repo"""
947 """Make the actual push of changeset bundle to remote repo"""
948 if 'changesets' in pushop.stepsdone:
948 if 'changesets' in pushop.stepsdone:
949 return
949 return
950 pushop.stepsdone.add('changesets')
950 pushop.stepsdone.add('changesets')
951 if not _pushcheckoutgoing(pushop):
951 if not _pushcheckoutgoing(pushop):
952 return
952 return
953
953
954 # Should have verified this in push().
954 # Should have verified this in push().
955 assert pushop.remote.capable('unbundle')
955 assert pushop.remote.capable('unbundle')
956
956
957 pushop.repo.prepushoutgoinghooks(pushop)
957 pushop.repo.prepushoutgoinghooks(pushop)
958 outgoing = pushop.outgoing
958 outgoing = pushop.outgoing
959 # TODO: get bundlecaps from remote
959 # TODO: get bundlecaps from remote
960 bundlecaps = None
960 bundlecaps = None
961 # create a changegroup from local
961 # create a changegroup from local
962 if pushop.revs is None and not (outgoing.excluded
962 if pushop.revs is None and not (outgoing.excluded
963 or pushop.repo.changelog.filteredrevs):
963 or pushop.repo.changelog.filteredrevs):
964 # push everything,
964 # push everything,
965 # use the fast path, no race possible on push
965 # use the fast path, no race possible on push
966 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
966 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
967 fastpath=True, bundlecaps=bundlecaps)
967 fastpath=True, bundlecaps=bundlecaps)
968 else:
968 else:
969 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
969 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
970 'push', bundlecaps=bundlecaps)
970 'push', bundlecaps=bundlecaps)
971
971
972 # apply changegroup to remote
972 # apply changegroup to remote
973 # local repo finds heads on server, finds out what
973 # local repo finds heads on server, finds out what
974 # revs it must push. once revs transferred, if server
974 # revs it must push. once revs transferred, if server
975 # finds it has different heads (someone else won
975 # finds it has different heads (someone else won
976 # commit/push race), server aborts.
976 # commit/push race), server aborts.
977 if pushop.force:
977 if pushop.force:
978 remoteheads = ['force']
978 remoteheads = ['force']
979 else:
979 else:
980 remoteheads = pushop.remoteheads
980 remoteheads = pushop.remoteheads
981 # ssh: return remote's addchangegroup()
981 # ssh: return remote's addchangegroup()
982 # http: return remote's addchangegroup() or 0 for error
982 # http: return remote's addchangegroup() or 0 for error
983 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
983 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
984 pushop.repo.url())
984 pushop.repo.url())
985
985
986 def _pushsyncphase(pushop):
986 def _pushsyncphase(pushop):
987 """synchronise phase information locally and remotely"""
987 """synchronise phase information locally and remotely"""
988 cheads = pushop.commonheads
988 cheads = pushop.commonheads
989 # even when we don't push, exchanging phase data is useful
989 # even when we don't push, exchanging phase data is useful
990 remotephases = pushop.remote.listkeys('phases')
990 remotephases = pushop.remote.listkeys('phases')
991 if (pushop.ui.configbool('ui', '_usedassubrepo')
991 if (pushop.ui.configbool('ui', '_usedassubrepo')
992 and remotephases # server supports phases
992 and remotephases # server supports phases
993 and pushop.cgresult is None # nothing was pushed
993 and pushop.cgresult is None # nothing was pushed
994 and remotephases.get('publishing', False)):
994 and remotephases.get('publishing', False)):
995 # When:
995 # When:
996 # - this is a subrepo push
996 # - this is a subrepo push
997 # - and remote support phase
997 # - and remote support phase
998 # - and no changeset was pushed
998 # - and no changeset was pushed
999 # - and remote is publishing
999 # - and remote is publishing
1000 # We may be in issue 3871 case!
1000 # We may be in issue 3871 case!
1001 # We drop the possible phase synchronisation done by
1001 # We drop the possible phase synchronisation done by
1002 # courtesy to publish changesets possibly locally draft
1002 # courtesy to publish changesets possibly locally draft
1003 # on the remote.
1003 # on the remote.
1004 remotephases = {'publishing': 'True'}
1004 remotephases = {'publishing': 'True'}
1005 if not remotephases: # old server or public only reply from non-publishing
1005 if not remotephases: # old server or public only reply from non-publishing
1006 _localphasemove(pushop, cheads)
1006 _localphasemove(pushop, cheads)
1007 # don't push any phase data as there is nothing to push
1007 # don't push any phase data as there is nothing to push
1008 else:
1008 else:
1009 ana = phases.analyzeremotephases(pushop.repo, cheads,
1009 ana = phases.analyzeremotephases(pushop.repo, cheads,
1010 remotephases)
1010 remotephases)
1011 pheads, droots = ana
1011 pheads, droots = ana
1012 ### Apply remote phase on local
1012 ### Apply remote phase on local
1013 if remotephases.get('publishing', False):
1013 if remotephases.get('publishing', False):
1014 _localphasemove(pushop, cheads)
1014 _localphasemove(pushop, cheads)
1015 else: # publish = False
1015 else: # publish = False
1016 _localphasemove(pushop, pheads)
1016 _localphasemove(pushop, pheads)
1017 _localphasemove(pushop, cheads, phases.draft)
1017 _localphasemove(pushop, cheads, phases.draft)
1018 ### Apply local phase on remote
1018 ### Apply local phase on remote
1019
1019
1020 if pushop.cgresult:
1020 if pushop.cgresult:
1021 if 'phases' in pushop.stepsdone:
1021 if 'phases' in pushop.stepsdone:
1022 # phases already pushed though bundle2
1022 # phases already pushed though bundle2
1023 return
1023 return
1024 outdated = pushop.outdatedphases
1024 outdated = pushop.outdatedphases
1025 else:
1025 else:
1026 outdated = pushop.fallbackoutdatedphases
1026 outdated = pushop.fallbackoutdatedphases
1027
1027
1028 pushop.stepsdone.add('phases')
1028 pushop.stepsdone.add('phases')
1029
1029
1030 # filter heads already turned public by the push
1030 # filter heads already turned public by the push
1031 outdated = [c for c in outdated if c.node() not in pheads]
1031 outdated = [c for c in outdated if c.node() not in pheads]
1032 # fallback to independent pushkey command
1032 # fallback to independent pushkey command
1033 for newremotehead in outdated:
1033 for newremotehead in outdated:
1034 r = pushop.remote.pushkey('phases',
1034 r = pushop.remote.pushkey('phases',
1035 newremotehead.hex(),
1035 newremotehead.hex(),
1036 str(phases.draft),
1036 str(phases.draft),
1037 str(phases.public))
1037 str(phases.public))
1038 if not r:
1038 if not r:
1039 pushop.ui.warn(_('updating %s to public failed!\n')
1039 pushop.ui.warn(_('updating %s to public failed!\n')
1040 % newremotehead)
1040 % newremotehead)
1041
1041
1042 def _localphasemove(pushop, nodes, phase=phases.public):
1042 def _localphasemove(pushop, nodes, phase=phases.public):
1043 """move <nodes> to <phase> in the local source repo"""
1043 """move <nodes> to <phase> in the local source repo"""
1044 if pushop.trmanager:
1044 if pushop.trmanager:
1045 phases.advanceboundary(pushop.repo,
1045 phases.advanceboundary(pushop.repo,
1046 pushop.trmanager.transaction(),
1046 pushop.trmanager.transaction(),
1047 phase,
1047 phase,
1048 nodes)
1048 nodes)
1049 else:
1049 else:
1050 # repo is not locked, do not change any phases!
1050 # repo is not locked, do not change any phases!
1051 # Informs the user that phases should have been moved when
1051 # Informs the user that phases should have been moved when
1052 # applicable.
1052 # applicable.
1053 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1053 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1054 phasestr = phases.phasenames[phase]
1054 phasestr = phases.phasenames[phase]
1055 if actualmoves:
1055 if actualmoves:
1056 pushop.ui.status(_('cannot lock source repo, skipping '
1056 pushop.ui.status(_('cannot lock source repo, skipping '
1057 'local %s phase update\n') % phasestr)
1057 'local %s phase update\n') % phasestr)
1058
1058
1059 def _pushobsolete(pushop):
1059 def _pushobsolete(pushop):
1060 """utility function to push obsolete markers to a remote"""
1060 """utility function to push obsolete markers to a remote"""
1061 if 'obsmarkers' in pushop.stepsdone:
1061 if 'obsmarkers' in pushop.stepsdone:
1062 return
1062 return
1063 repo = pushop.repo
1063 repo = pushop.repo
1064 remote = pushop.remote
1064 remote = pushop.remote
1065 pushop.stepsdone.add('obsmarkers')
1065 pushop.stepsdone.add('obsmarkers')
1066 if pushop.outobsmarkers:
1066 if pushop.outobsmarkers:
1067 pushop.ui.debug('try to push obsolete markers to remote\n')
1067 pushop.ui.debug('try to push obsolete markers to remote\n')
1068 rslts = []
1068 rslts = []
1069 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1069 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1070 for key in sorted(remotedata, reverse=True):
1070 for key in sorted(remotedata, reverse=True):
1071 # reverse sort to ensure we end with dump0
1071 # reverse sort to ensure we end with dump0
1072 data = remotedata[key]
1072 data = remotedata[key]
1073 rslts.append(remote.pushkey('obsolete', key, '', data))
1073 rslts.append(remote.pushkey('obsolete', key, '', data))
1074 if [r for r in rslts if not r]:
1074 if [r for r in rslts if not r]:
1075 msg = _('failed to push some obsolete markers!\n')
1075 msg = _('failed to push some obsolete markers!\n')
1076 repo.ui.warn(msg)
1076 repo.ui.warn(msg)
1077
1077
1078 def _pushbookmark(pushop):
1078 def _pushbookmark(pushop):
1079 """Update bookmark position on remote"""
1079 """Update bookmark position on remote"""
1080 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1080 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1081 return
1081 return
1082 pushop.stepsdone.add('bookmarks')
1082 pushop.stepsdone.add('bookmarks')
1083 ui = pushop.ui
1083 ui = pushop.ui
1084 remote = pushop.remote
1084 remote = pushop.remote
1085
1085
1086 for b, old, new in pushop.outbookmarks:
1086 for b, old, new in pushop.outbookmarks:
1087 action = 'update'
1087 action = 'update'
1088 if not old:
1088 if not old:
1089 action = 'export'
1089 action = 'export'
1090 elif not new:
1090 elif not new:
1091 action = 'delete'
1091 action = 'delete'
1092 if remote.pushkey('bookmarks', b, old, new):
1092 if remote.pushkey('bookmarks', b, old, new):
1093 ui.status(bookmsgmap[action][0] % b)
1093 ui.status(bookmsgmap[action][0] % b)
1094 else:
1094 else:
1095 ui.warn(bookmsgmap[action][1] % b)
1095 ui.warn(bookmsgmap[action][1] % b)
1096 # discovery can have set the value form invalid entry
1096 # discovery can have set the value form invalid entry
1097 if pushop.bkresult is not None:
1097 if pushop.bkresult is not None:
1098 pushop.bkresult = 1
1098 pushop.bkresult = 1
1099
1099
1100 class pulloperation(object):
1100 class pulloperation(object):
1101 """A object that represent a single pull operation
1101 """A object that represent a single pull operation
1102
1102
1103 It purpose is to carry pull related state and very common operation.
1103 It purpose is to carry pull related state and very common operation.
1104
1104
1105 A new should be created at the beginning of each pull and discarded
1105 A new should be created at the beginning of each pull and discarded
1106 afterward.
1106 afterward.
1107 """
1107 """
1108
1108
1109 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1109 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1110 remotebookmarks=None, streamclonerequested=None):
1110 remotebookmarks=None, streamclonerequested=None):
1111 # repo we pull into
1111 # repo we pull into
1112 self.repo = repo
1112 self.repo = repo
1113 # repo we pull from
1113 # repo we pull from
1114 self.remote = remote
1114 self.remote = remote
1115 # revision we try to pull (None is "all")
1115 # revision we try to pull (None is "all")
1116 self.heads = heads
1116 self.heads = heads
1117 # bookmark pulled explicitly
1117 # bookmark pulled explicitly
1118 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1118 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1119 for bookmark in bookmarks]
1119 for bookmark in bookmarks]
1120 # do we force pull?
1120 # do we force pull?
1121 self.force = force
1121 self.force = force
1122 # whether a streaming clone was requested
1122 # whether a streaming clone was requested
1123 self.streamclonerequested = streamclonerequested
1123 self.streamclonerequested = streamclonerequested
1124 # transaction manager
1124 # transaction manager
1125 self.trmanager = None
1125 self.trmanager = None
1126 # set of common changeset between local and remote before pull
1126 # set of common changeset between local and remote before pull
1127 self.common = None
1127 self.common = None
1128 # set of pulled head
1128 # set of pulled head
1129 self.rheads = None
1129 self.rheads = None
1130 # list of missing changeset to fetch remotely
1130 # list of missing changeset to fetch remotely
1131 self.fetch = None
1131 self.fetch = None
1132 # remote bookmarks data
1132 # remote bookmarks data
1133 self.remotebookmarks = remotebookmarks
1133 self.remotebookmarks = remotebookmarks
1134 # result of changegroup pulling (used as return code by pull)
1134 # result of changegroup pulling (used as return code by pull)
1135 self.cgresult = None
1135 self.cgresult = None
1136 # list of step already done
1136 # list of step already done
1137 self.stepsdone = set()
1137 self.stepsdone = set()
1138 # Whether we attempted a clone from pre-generated bundles.
1138 # Whether we attempted a clone from pre-generated bundles.
1139 self.clonebundleattempted = False
1139 self.clonebundleattempted = False
1140
1140
1141 @util.propertycache
1141 @util.propertycache
1142 def pulledsubset(self):
1142 def pulledsubset(self):
1143 """heads of the set of changeset target by the pull"""
1143 """heads of the set of changeset target by the pull"""
1144 # compute target subset
1144 # compute target subset
1145 if self.heads is None:
1145 if self.heads is None:
1146 # We pulled every thing possible
1146 # We pulled every thing possible
1147 # sync on everything common
1147 # sync on everything common
1148 c = set(self.common)
1148 c = set(self.common)
1149 ret = list(self.common)
1149 ret = list(self.common)
1150 for n in self.rheads:
1150 for n in self.rheads:
1151 if n not in c:
1151 if n not in c:
1152 ret.append(n)
1152 ret.append(n)
1153 return ret
1153 return ret
1154 else:
1154 else:
1155 # We pulled a specific subset
1155 # We pulled a specific subset
1156 # sync on this subset
1156 # sync on this subset
1157 return self.heads
1157 return self.heads
1158
1158
1159 @util.propertycache
1159 @util.propertycache
1160 def canusebundle2(self):
1160 def canusebundle2(self):
1161 return not _forcebundle1(self)
1161 return not _forcebundle1(self)
1162
1162
1163 @util.propertycache
1163 @util.propertycache
1164 def remotebundle2caps(self):
1164 def remotebundle2caps(self):
1165 return bundle2.bundle2caps(self.remote)
1165 return bundle2.bundle2caps(self.remote)
1166
1166
1167 def gettransaction(self):
1167 def gettransaction(self):
1168 # deprecated; talk to trmanager directly
1168 # deprecated; talk to trmanager directly
1169 return self.trmanager.transaction()
1169 return self.trmanager.transaction()
1170
1170
1171 class transactionmanager(util.transactional):
1171 class transactionmanager(util.transactional):
1172 """An object to manage the life cycle of a transaction
1172 """An object to manage the life cycle of a transaction
1173
1173
1174 It creates the transaction on demand and calls the appropriate hooks when
1174 It creates the transaction on demand and calls the appropriate hooks when
1175 closing the transaction."""
1175 closing the transaction."""
1176 def __init__(self, repo, source, url):
1176 def __init__(self, repo, source, url):
1177 self.repo = repo
1177 self.repo = repo
1178 self.source = source
1178 self.source = source
1179 self.url = url
1179 self.url = url
1180 self._tr = None
1180 self._tr = None
1181
1181
1182 def transaction(self):
1182 def transaction(self):
1183 """Return an open transaction object, constructing if necessary"""
1183 """Return an open transaction object, constructing if necessary"""
1184 if not self._tr:
1184 if not self._tr:
1185 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1185 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1186 self._tr = self.repo.transaction(trname)
1186 self._tr = self.repo.transaction(trname)
1187 self._tr.hookargs['source'] = self.source
1187 self._tr.hookargs['source'] = self.source
1188 self._tr.hookargs['url'] = self.url
1188 self._tr.hookargs['url'] = self.url
1189 return self._tr
1189 return self._tr
1190
1190
1191 def close(self):
1191 def close(self):
1192 """close transaction if created"""
1192 """close transaction if created"""
1193 if self._tr is not None:
1193 if self._tr is not None:
1194 self._tr.close()
1194 self._tr.close()
1195
1195
1196 def release(self):
1196 def release(self):
1197 """release transaction if created"""
1197 """release transaction if created"""
1198 if self._tr is not None:
1198 if self._tr is not None:
1199 self._tr.release()
1199 self._tr.release()
1200
1200
1201 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1201 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1202 streamclonerequested=None):
1202 streamclonerequested=None):
1203 """Fetch repository data from a remote.
1203 """Fetch repository data from a remote.
1204
1204
1205 This is the main function used to retrieve data from a remote repository.
1205 This is the main function used to retrieve data from a remote repository.
1206
1206
1207 ``repo`` is the local repository to clone into.
1207 ``repo`` is the local repository to clone into.
1208 ``remote`` is a peer instance.
1208 ``remote`` is a peer instance.
1209 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1209 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1210 default) means to pull everything from the remote.
1210 default) means to pull everything from the remote.
1211 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1211 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1212 default, all remote bookmarks are pulled.
1212 default, all remote bookmarks are pulled.
1213 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1213 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1214 initialization.
1214 initialization.
1215 ``streamclonerequested`` is a boolean indicating whether a "streaming
1215 ``streamclonerequested`` is a boolean indicating whether a "streaming
1216 clone" is requested. A "streaming clone" is essentially a raw file copy
1216 clone" is requested. A "streaming clone" is essentially a raw file copy
1217 of revlogs from the server. This only works when the local repository is
1217 of revlogs from the server. This only works when the local repository is
1218 empty. The default value of ``None`` means to respect the server
1218 empty. The default value of ``None`` means to respect the server
1219 configuration for preferring stream clones.
1219 configuration for preferring stream clones.
1220
1220
1221 Returns the ``pulloperation`` created for this pull.
1221 Returns the ``pulloperation`` created for this pull.
1222 """
1222 """
1223 if opargs is None:
1223 if opargs is None:
1224 opargs = {}
1224 opargs = {}
1225 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1225 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1226 streamclonerequested=streamclonerequested, **opargs)
1226 streamclonerequested=streamclonerequested, **opargs)
1227
1227
1228 peerlocal = pullop.remote.local()
1228 peerlocal = pullop.remote.local()
1229 if peerlocal:
1229 if peerlocal:
1230 missing = set(peerlocal.requirements) - pullop.repo.supported
1230 missing = set(peerlocal.requirements) - pullop.repo.supported
1231 if missing:
1231 if missing:
1232 msg = _("required features are not"
1232 msg = _("required features are not"
1233 " supported in the destination:"
1233 " supported in the destination:"
1234 " %s") % (', '.join(sorted(missing)))
1234 " %s") % (', '.join(sorted(missing)))
1235 raise error.Abort(msg)
1235 raise error.Abort(msg)
1236
1236
1237 wlock = lock = None
1237 wlock = lock = None
1238 try:
1238 try:
1239 wlock = pullop.repo.wlock()
1239 wlock = pullop.repo.wlock()
1240 lock = pullop.repo.lock()
1240 lock = pullop.repo.lock()
1241 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1241 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1242 streamclone.maybeperformlegacystreamclone(pullop)
1243 # This should ideally be in _pullbundle2(). However, it needs to run
1242 # This should ideally be in _pullbundle2(). However, it needs to run
1244 # before discovery to avoid extra work.
1243 # before discovery to avoid extra work.
1245 _maybeapplyclonebundle(pullop)
1244 _maybeapplyclonebundle(pullop)
1245 streamclone.maybeperformlegacystreamclone(pullop)
1246 _pulldiscovery(pullop)
1246 _pulldiscovery(pullop)
1247 if pullop.canusebundle2:
1247 if pullop.canusebundle2:
1248 _pullbundle2(pullop)
1248 _pullbundle2(pullop)
1249 _pullchangeset(pullop)
1249 _pullchangeset(pullop)
1250 _pullphase(pullop)
1250 _pullphase(pullop)
1251 _pullbookmarks(pullop)
1251 _pullbookmarks(pullop)
1252 _pullobsolete(pullop)
1252 _pullobsolete(pullop)
1253 pullop.trmanager.close()
1253 pullop.trmanager.close()
1254 finally:
1254 finally:
1255 lockmod.release(pullop.trmanager, lock, wlock)
1255 lockmod.release(pullop.trmanager, lock, wlock)
1256
1256
1257 return pullop
1257 return pullop
1258
1258
1259 # list of steps to perform discovery before pull
1259 # list of steps to perform discovery before pull
1260 pulldiscoveryorder = []
1260 pulldiscoveryorder = []
1261
1261
1262 # Mapping between step name and function
1262 # Mapping between step name and function
1263 #
1263 #
1264 # This exists to help extensions wrap steps if necessary
1264 # This exists to help extensions wrap steps if necessary
1265 pulldiscoverymapping = {}
1265 pulldiscoverymapping = {}
1266
1266
1267 def pulldiscovery(stepname):
1267 def pulldiscovery(stepname):
1268 """decorator for function performing discovery before pull
1268 """decorator for function performing discovery before pull
1269
1269
1270 The function is added to the step -> function mapping and appended to the
1270 The function is added to the step -> function mapping and appended to the
1271 list of steps. Beware that decorated function will be added in order (this
1271 list of steps. Beware that decorated function will be added in order (this
1272 may matter).
1272 may matter).
1273
1273
1274 You can only use this decorator for a new step, if you want to wrap a step
1274 You can only use this decorator for a new step, if you want to wrap a step
1275 from an extension, change the pulldiscovery dictionary directly."""
1275 from an extension, change the pulldiscovery dictionary directly."""
1276 def dec(func):
1276 def dec(func):
1277 assert stepname not in pulldiscoverymapping
1277 assert stepname not in pulldiscoverymapping
1278 pulldiscoverymapping[stepname] = func
1278 pulldiscoverymapping[stepname] = func
1279 pulldiscoveryorder.append(stepname)
1279 pulldiscoveryorder.append(stepname)
1280 return func
1280 return func
1281 return dec
1281 return dec
1282
1282
1283 def _pulldiscovery(pullop):
1283 def _pulldiscovery(pullop):
1284 """Run all discovery steps"""
1284 """Run all discovery steps"""
1285 for stepname in pulldiscoveryorder:
1285 for stepname in pulldiscoveryorder:
1286 step = pulldiscoverymapping[stepname]
1286 step = pulldiscoverymapping[stepname]
1287 step(pullop)
1287 step(pullop)
1288
1288
1289 @pulldiscovery('b1:bookmarks')
1289 @pulldiscovery('b1:bookmarks')
1290 def _pullbookmarkbundle1(pullop):
1290 def _pullbookmarkbundle1(pullop):
1291 """fetch bookmark data in bundle1 case
1291 """fetch bookmark data in bundle1 case
1292
1292
1293 If not using bundle2, we have to fetch bookmarks before changeset
1293 If not using bundle2, we have to fetch bookmarks before changeset
1294 discovery to reduce the chance and impact of race conditions."""
1294 discovery to reduce the chance and impact of race conditions."""
1295 if pullop.remotebookmarks is not None:
1295 if pullop.remotebookmarks is not None:
1296 return
1296 return
1297 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1297 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1298 # all known bundle2 servers now support listkeys, but lets be nice with
1298 # all known bundle2 servers now support listkeys, but lets be nice with
1299 # new implementation.
1299 # new implementation.
1300 return
1300 return
1301 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1301 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1302
1302
1303
1303
1304 @pulldiscovery('changegroup')
1304 @pulldiscovery('changegroup')
1305 def _pulldiscoverychangegroup(pullop):
1305 def _pulldiscoverychangegroup(pullop):
1306 """discovery phase for the pull
1306 """discovery phase for the pull
1307
1307
1308 Current handle changeset discovery only, will change handle all discovery
1308 Current handle changeset discovery only, will change handle all discovery
1309 at some point."""
1309 at some point."""
1310 tmp = discovery.findcommonincoming(pullop.repo,
1310 tmp = discovery.findcommonincoming(pullop.repo,
1311 pullop.remote,
1311 pullop.remote,
1312 heads=pullop.heads,
1312 heads=pullop.heads,
1313 force=pullop.force)
1313 force=pullop.force)
1314 common, fetch, rheads = tmp
1314 common, fetch, rheads = tmp
1315 nm = pullop.repo.unfiltered().changelog.nodemap
1315 nm = pullop.repo.unfiltered().changelog.nodemap
1316 if fetch and rheads:
1316 if fetch and rheads:
1317 # If a remote heads is filtered locally, put in back in common.
1317 # If a remote heads is filtered locally, put in back in common.
1318 #
1318 #
1319 # This is a hackish solution to catch most of "common but locally
1319 # This is a hackish solution to catch most of "common but locally
1320 # hidden situation". We do not performs discovery on unfiltered
1320 # hidden situation". We do not performs discovery on unfiltered
1321 # repository because it end up doing a pathological amount of round
1321 # repository because it end up doing a pathological amount of round
1322 # trip for w huge amount of changeset we do not care about.
1322 # trip for w huge amount of changeset we do not care about.
1323 #
1323 #
1324 # If a set of such "common but filtered" changeset exist on the server
1324 # If a set of such "common but filtered" changeset exist on the server
1325 # but are not including a remote heads, we'll not be able to detect it,
1325 # but are not including a remote heads, we'll not be able to detect it,
1326 scommon = set(common)
1326 scommon = set(common)
1327 for n in rheads:
1327 for n in rheads:
1328 if n in nm:
1328 if n in nm:
1329 if n not in scommon:
1329 if n not in scommon:
1330 common.append(n)
1330 common.append(n)
1331 if set(rheads).issubset(set(common)):
1331 if set(rheads).issubset(set(common)):
1332 fetch = []
1332 fetch = []
1333 pullop.common = common
1333 pullop.common = common
1334 pullop.fetch = fetch
1334 pullop.fetch = fetch
1335 pullop.rheads = rheads
1335 pullop.rheads = rheads
1336
1336
1337 def _pullbundle2(pullop):
1337 def _pullbundle2(pullop):
1338 """pull data using bundle2
1338 """pull data using bundle2
1339
1339
1340 For now, the only supported data are changegroup."""
1340 For now, the only supported data are changegroup."""
1341 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1341 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1342
1342
1343 # At the moment we don't do stream clones over bundle2. If that is
1343 # At the moment we don't do stream clones over bundle2. If that is
1344 # implemented then here's where the check for that will go.
1344 # implemented then here's where the check for that will go.
1345 streaming = False
1345 streaming = False
1346
1346
1347 # pulling changegroup
1347 # pulling changegroup
1348 pullop.stepsdone.add('changegroup')
1348 pullop.stepsdone.add('changegroup')
1349
1349
1350 kwargs['common'] = pullop.common
1350 kwargs['common'] = pullop.common
1351 kwargs['heads'] = pullop.heads or pullop.rheads
1351 kwargs['heads'] = pullop.heads or pullop.rheads
1352 kwargs['cg'] = pullop.fetch
1352 kwargs['cg'] = pullop.fetch
1353
1353
1354 ui = pullop.repo.ui
1354 ui = pullop.repo.ui
1355 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1355 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1356 if (not legacyphase and 'heads' in pullop.remotebundle2caps.get('phases')):
1356 if (not legacyphase and 'heads' in pullop.remotebundle2caps.get('phases')):
1357 kwargs['phases'] = True
1357 kwargs['phases'] = True
1358 pullop.stepsdone.add('phases')
1358 pullop.stepsdone.add('phases')
1359
1359
1360 if 'listkeys' in pullop.remotebundle2caps:
1360 if 'listkeys' in pullop.remotebundle2caps:
1361 if 'phases' not in pullop.stepsdone:
1361 if 'phases' not in pullop.stepsdone:
1362 kwargs['listkeys'] = ['phases']
1362 kwargs['listkeys'] = ['phases']
1363 if pullop.remotebookmarks is None:
1363 if pullop.remotebookmarks is None:
1364 # make sure to always includes bookmark data when migrating
1364 # make sure to always includes bookmark data when migrating
1365 # `hg incoming --bundle` to using this function.
1365 # `hg incoming --bundle` to using this function.
1366 kwargs.setdefault('listkeys', []).append('bookmarks')
1366 kwargs.setdefault('listkeys', []).append('bookmarks')
1367
1367
1368 # If this is a full pull / clone and the server supports the clone bundles
1368 # If this is a full pull / clone and the server supports the clone bundles
1369 # feature, tell the server whether we attempted a clone bundle. The
1369 # feature, tell the server whether we attempted a clone bundle. The
1370 # presence of this flag indicates the client supports clone bundles. This
1370 # presence of this flag indicates the client supports clone bundles. This
1371 # will enable the server to treat clients that support clone bundles
1371 # will enable the server to treat clients that support clone bundles
1372 # differently from those that don't.
1372 # differently from those that don't.
1373 if (pullop.remote.capable('clonebundles')
1373 if (pullop.remote.capable('clonebundles')
1374 and pullop.heads is None and list(pullop.common) == [nullid]):
1374 and pullop.heads is None and list(pullop.common) == [nullid]):
1375 kwargs['cbattempted'] = pullop.clonebundleattempted
1375 kwargs['cbattempted'] = pullop.clonebundleattempted
1376
1376
1377 if streaming:
1377 if streaming:
1378 pullop.repo.ui.status(_('streaming all changes\n'))
1378 pullop.repo.ui.status(_('streaming all changes\n'))
1379 elif not pullop.fetch:
1379 elif not pullop.fetch:
1380 pullop.repo.ui.status(_("no changes found\n"))
1380 pullop.repo.ui.status(_("no changes found\n"))
1381 pullop.cgresult = 0
1381 pullop.cgresult = 0
1382 else:
1382 else:
1383 if pullop.heads is None and list(pullop.common) == [nullid]:
1383 if pullop.heads is None and list(pullop.common) == [nullid]:
1384 pullop.repo.ui.status(_("requesting all changes\n"))
1384 pullop.repo.ui.status(_("requesting all changes\n"))
1385 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1385 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1386 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1386 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1387 if obsolete.commonversion(remoteversions) is not None:
1387 if obsolete.commonversion(remoteversions) is not None:
1388 kwargs['obsmarkers'] = True
1388 kwargs['obsmarkers'] = True
1389 pullop.stepsdone.add('obsmarkers')
1389 pullop.stepsdone.add('obsmarkers')
1390 _pullbundle2extraprepare(pullop, kwargs)
1390 _pullbundle2extraprepare(pullop, kwargs)
1391 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1391 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1392 try:
1392 try:
1393 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1393 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1394 except bundle2.AbortFromPart as exc:
1394 except bundle2.AbortFromPart as exc:
1395 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1395 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1396 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1396 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1397 except error.BundleValueError as exc:
1397 except error.BundleValueError as exc:
1398 raise error.Abort(_('missing support for %s') % exc)
1398 raise error.Abort(_('missing support for %s') % exc)
1399
1399
1400 if pullop.fetch:
1400 if pullop.fetch:
1401 pullop.cgresult = bundle2.combinechangegroupresults(op)
1401 pullop.cgresult = bundle2.combinechangegroupresults(op)
1402
1402
1403 # processing phases change
1403 # processing phases change
1404 for namespace, value in op.records['listkeys']:
1404 for namespace, value in op.records['listkeys']:
1405 if namespace == 'phases':
1405 if namespace == 'phases':
1406 _pullapplyphases(pullop, value)
1406 _pullapplyphases(pullop, value)
1407
1407
1408 # processing bookmark update
1408 # processing bookmark update
1409 for namespace, value in op.records['listkeys']:
1409 for namespace, value in op.records['listkeys']:
1410 if namespace == 'bookmarks':
1410 if namespace == 'bookmarks':
1411 pullop.remotebookmarks = value
1411 pullop.remotebookmarks = value
1412
1412
1413 # bookmark data were either already there or pulled in the bundle
1413 # bookmark data were either already there or pulled in the bundle
1414 if pullop.remotebookmarks is not None:
1414 if pullop.remotebookmarks is not None:
1415 _pullbookmarks(pullop)
1415 _pullbookmarks(pullop)
1416
1416
1417 def _pullbundle2extraprepare(pullop, kwargs):
1417 def _pullbundle2extraprepare(pullop, kwargs):
1418 """hook function so that extensions can extend the getbundle call"""
1418 """hook function so that extensions can extend the getbundle call"""
1419 pass
1419 pass
1420
1420
1421 def _pullchangeset(pullop):
1421 def _pullchangeset(pullop):
1422 """pull changeset from unbundle into the local repo"""
1422 """pull changeset from unbundle into the local repo"""
1423 # We delay the open of the transaction as late as possible so we
1423 # We delay the open of the transaction as late as possible so we
1424 # don't open transaction for nothing or you break future useful
1424 # don't open transaction for nothing or you break future useful
1425 # rollback call
1425 # rollback call
1426 if 'changegroup' in pullop.stepsdone:
1426 if 'changegroup' in pullop.stepsdone:
1427 return
1427 return
1428 pullop.stepsdone.add('changegroup')
1428 pullop.stepsdone.add('changegroup')
1429 if not pullop.fetch:
1429 if not pullop.fetch:
1430 pullop.repo.ui.status(_("no changes found\n"))
1430 pullop.repo.ui.status(_("no changes found\n"))
1431 pullop.cgresult = 0
1431 pullop.cgresult = 0
1432 return
1432 return
1433 tr = pullop.gettransaction()
1433 tr = pullop.gettransaction()
1434 if pullop.heads is None and list(pullop.common) == [nullid]:
1434 if pullop.heads is None and list(pullop.common) == [nullid]:
1435 pullop.repo.ui.status(_("requesting all changes\n"))
1435 pullop.repo.ui.status(_("requesting all changes\n"))
1436 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1436 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1437 # issue1320, avoid a race if remote changed after discovery
1437 # issue1320, avoid a race if remote changed after discovery
1438 pullop.heads = pullop.rheads
1438 pullop.heads = pullop.rheads
1439
1439
1440 if pullop.remote.capable('getbundle'):
1440 if pullop.remote.capable('getbundle'):
1441 # TODO: get bundlecaps from remote
1441 # TODO: get bundlecaps from remote
1442 cg = pullop.remote.getbundle('pull', common=pullop.common,
1442 cg = pullop.remote.getbundle('pull', common=pullop.common,
1443 heads=pullop.heads or pullop.rheads)
1443 heads=pullop.heads or pullop.rheads)
1444 elif pullop.heads is None:
1444 elif pullop.heads is None:
1445 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1445 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1446 elif not pullop.remote.capable('changegroupsubset'):
1446 elif not pullop.remote.capable('changegroupsubset'):
1447 raise error.Abort(_("partial pull cannot be done because "
1447 raise error.Abort(_("partial pull cannot be done because "
1448 "other repository doesn't support "
1448 "other repository doesn't support "
1449 "changegroupsubset."))
1449 "changegroupsubset."))
1450 else:
1450 else:
1451 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1451 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1452 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1452 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1453 pullop.remote.url())
1453 pullop.remote.url())
1454 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1454 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1455
1455
1456 def _pullphase(pullop):
1456 def _pullphase(pullop):
1457 # Get remote phases data from remote
1457 # Get remote phases data from remote
1458 if 'phases' in pullop.stepsdone:
1458 if 'phases' in pullop.stepsdone:
1459 return
1459 return
1460 remotephases = pullop.remote.listkeys('phases')
1460 remotephases = pullop.remote.listkeys('phases')
1461 _pullapplyphases(pullop, remotephases)
1461 _pullapplyphases(pullop, remotephases)
1462
1462
1463 def _pullapplyphases(pullop, remotephases):
1463 def _pullapplyphases(pullop, remotephases):
1464 """apply phase movement from observed remote state"""
1464 """apply phase movement from observed remote state"""
1465 if 'phases' in pullop.stepsdone:
1465 if 'phases' in pullop.stepsdone:
1466 return
1466 return
1467 pullop.stepsdone.add('phases')
1467 pullop.stepsdone.add('phases')
1468 publishing = bool(remotephases.get('publishing', False))
1468 publishing = bool(remotephases.get('publishing', False))
1469 if remotephases and not publishing:
1469 if remotephases and not publishing:
1470 # remote is new and non-publishing
1470 # remote is new and non-publishing
1471 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1471 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1472 pullop.pulledsubset,
1472 pullop.pulledsubset,
1473 remotephases)
1473 remotephases)
1474 dheads = pullop.pulledsubset
1474 dheads = pullop.pulledsubset
1475 else:
1475 else:
1476 # Remote is old or publishing all common changesets
1476 # Remote is old or publishing all common changesets
1477 # should be seen as public
1477 # should be seen as public
1478 pheads = pullop.pulledsubset
1478 pheads = pullop.pulledsubset
1479 dheads = []
1479 dheads = []
1480 unfi = pullop.repo.unfiltered()
1480 unfi = pullop.repo.unfiltered()
1481 phase = unfi._phasecache.phase
1481 phase = unfi._phasecache.phase
1482 rev = unfi.changelog.nodemap.get
1482 rev = unfi.changelog.nodemap.get
1483 public = phases.public
1483 public = phases.public
1484 draft = phases.draft
1484 draft = phases.draft
1485
1485
1486 # exclude changesets already public locally and update the others
1486 # exclude changesets already public locally and update the others
1487 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1487 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1488 if pheads:
1488 if pheads:
1489 tr = pullop.gettransaction()
1489 tr = pullop.gettransaction()
1490 phases.advanceboundary(pullop.repo, tr, public, pheads)
1490 phases.advanceboundary(pullop.repo, tr, public, pheads)
1491
1491
1492 # exclude changesets already draft locally and update the others
1492 # exclude changesets already draft locally and update the others
1493 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1493 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1494 if dheads:
1494 if dheads:
1495 tr = pullop.gettransaction()
1495 tr = pullop.gettransaction()
1496 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1496 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1497
1497
1498 def _pullbookmarks(pullop):
1498 def _pullbookmarks(pullop):
1499 """process the remote bookmark information to update the local one"""
1499 """process the remote bookmark information to update the local one"""
1500 if 'bookmarks' in pullop.stepsdone:
1500 if 'bookmarks' in pullop.stepsdone:
1501 return
1501 return
1502 pullop.stepsdone.add('bookmarks')
1502 pullop.stepsdone.add('bookmarks')
1503 repo = pullop.repo
1503 repo = pullop.repo
1504 remotebookmarks = pullop.remotebookmarks
1504 remotebookmarks = pullop.remotebookmarks
1505 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1505 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1506 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1506 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1507 pullop.remote.url(),
1507 pullop.remote.url(),
1508 pullop.gettransaction,
1508 pullop.gettransaction,
1509 explicit=pullop.explicitbookmarks)
1509 explicit=pullop.explicitbookmarks)
1510
1510
1511 def _pullobsolete(pullop):
1511 def _pullobsolete(pullop):
1512 """utility function to pull obsolete markers from a remote
1512 """utility function to pull obsolete markers from a remote
1513
1513
1514 The `gettransaction` is function that return the pull transaction, creating
1514 The `gettransaction` is function that return the pull transaction, creating
1515 one if necessary. We return the transaction to inform the calling code that
1515 one if necessary. We return the transaction to inform the calling code that
1516 a new transaction have been created (when applicable).
1516 a new transaction have been created (when applicable).
1517
1517
1518 Exists mostly to allow overriding for experimentation purpose"""
1518 Exists mostly to allow overriding for experimentation purpose"""
1519 if 'obsmarkers' in pullop.stepsdone:
1519 if 'obsmarkers' in pullop.stepsdone:
1520 return
1520 return
1521 pullop.stepsdone.add('obsmarkers')
1521 pullop.stepsdone.add('obsmarkers')
1522 tr = None
1522 tr = None
1523 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1523 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1524 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1524 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1525 remoteobs = pullop.remote.listkeys('obsolete')
1525 remoteobs = pullop.remote.listkeys('obsolete')
1526 if 'dump0' in remoteobs:
1526 if 'dump0' in remoteobs:
1527 tr = pullop.gettransaction()
1527 tr = pullop.gettransaction()
1528 markers = []
1528 markers = []
1529 for key in sorted(remoteobs, reverse=True):
1529 for key in sorted(remoteobs, reverse=True):
1530 if key.startswith('dump'):
1530 if key.startswith('dump'):
1531 data = util.b85decode(remoteobs[key])
1531 data = util.b85decode(remoteobs[key])
1532 version, newmarks = obsolete._readmarkers(data)
1532 version, newmarks = obsolete._readmarkers(data)
1533 markers += newmarks
1533 markers += newmarks
1534 if markers:
1534 if markers:
1535 pullop.repo.obsstore.add(tr, markers)
1535 pullop.repo.obsstore.add(tr, markers)
1536 pullop.repo.invalidatevolatilesets()
1536 pullop.repo.invalidatevolatilesets()
1537 return tr
1537 return tr
1538
1538
1539 def caps20to10(repo):
1539 def caps20to10(repo):
1540 """return a set with appropriate options to use bundle20 during getbundle"""
1540 """return a set with appropriate options to use bundle20 during getbundle"""
1541 caps = {'HG20'}
1541 caps = {'HG20'}
1542 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1542 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1543 caps.add('bundle2=' + urlreq.quote(capsblob))
1543 caps.add('bundle2=' + urlreq.quote(capsblob))
1544 return caps
1544 return caps
1545
1545
1546 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1546 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1547 getbundle2partsorder = []
1547 getbundle2partsorder = []
1548
1548
1549 # Mapping between step name and function
1549 # Mapping between step name and function
1550 #
1550 #
1551 # This exists to help extensions wrap steps if necessary
1551 # This exists to help extensions wrap steps if necessary
1552 getbundle2partsmapping = {}
1552 getbundle2partsmapping = {}
1553
1553
1554 def getbundle2partsgenerator(stepname, idx=None):
1554 def getbundle2partsgenerator(stepname, idx=None):
1555 """decorator for function generating bundle2 part for getbundle
1555 """decorator for function generating bundle2 part for getbundle
1556
1556
1557 The function is added to the step -> function mapping and appended to the
1557 The function is added to the step -> function mapping and appended to the
1558 list of steps. Beware that decorated functions will be added in order
1558 list of steps. Beware that decorated functions will be added in order
1559 (this may matter).
1559 (this may matter).
1560
1560
1561 You can only use this decorator for new steps, if you want to wrap a step
1561 You can only use this decorator for new steps, if you want to wrap a step
1562 from an extension, attack the getbundle2partsmapping dictionary directly."""
1562 from an extension, attack the getbundle2partsmapping dictionary directly."""
1563 def dec(func):
1563 def dec(func):
1564 assert stepname not in getbundle2partsmapping
1564 assert stepname not in getbundle2partsmapping
1565 getbundle2partsmapping[stepname] = func
1565 getbundle2partsmapping[stepname] = func
1566 if idx is None:
1566 if idx is None:
1567 getbundle2partsorder.append(stepname)
1567 getbundle2partsorder.append(stepname)
1568 else:
1568 else:
1569 getbundle2partsorder.insert(idx, stepname)
1569 getbundle2partsorder.insert(idx, stepname)
1570 return func
1570 return func
1571 return dec
1571 return dec
1572
1572
1573 def bundle2requested(bundlecaps):
1573 def bundle2requested(bundlecaps):
1574 if bundlecaps is not None:
1574 if bundlecaps is not None:
1575 return any(cap.startswith('HG2') for cap in bundlecaps)
1575 return any(cap.startswith('HG2') for cap in bundlecaps)
1576 return False
1576 return False
1577
1577
1578 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1578 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1579 **kwargs):
1579 **kwargs):
1580 """Return chunks constituting a bundle's raw data.
1580 """Return chunks constituting a bundle's raw data.
1581
1581
1582 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1582 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1583 passed.
1583 passed.
1584
1584
1585 Returns an iterator over raw chunks (of varying sizes).
1585 Returns an iterator over raw chunks (of varying sizes).
1586 """
1586 """
1587 kwargs = pycompat.byteskwargs(kwargs)
1587 kwargs = pycompat.byteskwargs(kwargs)
1588 usebundle2 = bundle2requested(bundlecaps)
1588 usebundle2 = bundle2requested(bundlecaps)
1589 # bundle10 case
1589 # bundle10 case
1590 if not usebundle2:
1590 if not usebundle2:
1591 if bundlecaps and not kwargs.get('cg', True):
1591 if bundlecaps and not kwargs.get('cg', True):
1592 raise ValueError(_('request for bundle10 must include changegroup'))
1592 raise ValueError(_('request for bundle10 must include changegroup'))
1593
1593
1594 if kwargs:
1594 if kwargs:
1595 raise ValueError(_('unsupported getbundle arguments: %s')
1595 raise ValueError(_('unsupported getbundle arguments: %s')
1596 % ', '.join(sorted(kwargs.keys())))
1596 % ', '.join(sorted(kwargs.keys())))
1597 outgoing = _computeoutgoing(repo, heads, common)
1597 outgoing = _computeoutgoing(repo, heads, common)
1598 return changegroup.makestream(repo, outgoing, '01', source,
1598 return changegroup.makestream(repo, outgoing, '01', source,
1599 bundlecaps=bundlecaps)
1599 bundlecaps=bundlecaps)
1600
1600
1601 # bundle20 case
1601 # bundle20 case
1602 b2caps = {}
1602 b2caps = {}
1603 for bcaps in bundlecaps:
1603 for bcaps in bundlecaps:
1604 if bcaps.startswith('bundle2='):
1604 if bcaps.startswith('bundle2='):
1605 blob = urlreq.unquote(bcaps[len('bundle2='):])
1605 blob = urlreq.unquote(bcaps[len('bundle2='):])
1606 b2caps.update(bundle2.decodecaps(blob))
1606 b2caps.update(bundle2.decodecaps(blob))
1607 bundler = bundle2.bundle20(repo.ui, b2caps)
1607 bundler = bundle2.bundle20(repo.ui, b2caps)
1608
1608
1609 kwargs['heads'] = heads
1609 kwargs['heads'] = heads
1610 kwargs['common'] = common
1610 kwargs['common'] = common
1611
1611
1612 for name in getbundle2partsorder:
1612 for name in getbundle2partsorder:
1613 func = getbundle2partsmapping[name]
1613 func = getbundle2partsmapping[name]
1614 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1614 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1615 **pycompat.strkwargs(kwargs))
1615 **pycompat.strkwargs(kwargs))
1616
1616
1617 return bundler.getchunks()
1617 return bundler.getchunks()
1618
1618
1619 @getbundle2partsgenerator('changegroup')
1619 @getbundle2partsgenerator('changegroup')
1620 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1620 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1621 b2caps=None, heads=None, common=None, **kwargs):
1621 b2caps=None, heads=None, common=None, **kwargs):
1622 """add a changegroup part to the requested bundle"""
1622 """add a changegroup part to the requested bundle"""
1623 cgstream = None
1623 cgstream = None
1624 if kwargs.get('cg', True):
1624 if kwargs.get('cg', True):
1625 # build changegroup bundle here.
1625 # build changegroup bundle here.
1626 version = '01'
1626 version = '01'
1627 cgversions = b2caps.get('changegroup')
1627 cgversions = b2caps.get('changegroup')
1628 if cgversions: # 3.1 and 3.2 ship with an empty value
1628 if cgversions: # 3.1 and 3.2 ship with an empty value
1629 cgversions = [v for v in cgversions
1629 cgversions = [v for v in cgversions
1630 if v in changegroup.supportedoutgoingversions(repo)]
1630 if v in changegroup.supportedoutgoingversions(repo)]
1631 if not cgversions:
1631 if not cgversions:
1632 raise ValueError(_('no common changegroup version'))
1632 raise ValueError(_('no common changegroup version'))
1633 version = max(cgversions)
1633 version = max(cgversions)
1634 outgoing = _computeoutgoing(repo, heads, common)
1634 outgoing = _computeoutgoing(repo, heads, common)
1635 if outgoing.missing:
1635 if outgoing.missing:
1636 cgstream = changegroup.makestream(repo, outgoing, version, source,
1636 cgstream = changegroup.makestream(repo, outgoing, version, source,
1637 bundlecaps=bundlecaps)
1637 bundlecaps=bundlecaps)
1638
1638
1639 if cgstream:
1639 if cgstream:
1640 part = bundler.newpart('changegroup', data=cgstream)
1640 part = bundler.newpart('changegroup', data=cgstream)
1641 if cgversions:
1641 if cgversions:
1642 part.addparam('version', version)
1642 part.addparam('version', version)
1643 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1643 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1644 mandatory=False)
1644 mandatory=False)
1645 if 'treemanifest' in repo.requirements:
1645 if 'treemanifest' in repo.requirements:
1646 part.addparam('treemanifest', '1')
1646 part.addparam('treemanifest', '1')
1647
1647
1648 @getbundle2partsgenerator('listkeys')
1648 @getbundle2partsgenerator('listkeys')
1649 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1649 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1650 b2caps=None, **kwargs):
1650 b2caps=None, **kwargs):
1651 """add parts containing listkeys namespaces to the requested bundle"""
1651 """add parts containing listkeys namespaces to the requested bundle"""
1652 listkeys = kwargs.get('listkeys', ())
1652 listkeys = kwargs.get('listkeys', ())
1653 for namespace in listkeys:
1653 for namespace in listkeys:
1654 part = bundler.newpart('listkeys')
1654 part = bundler.newpart('listkeys')
1655 part.addparam('namespace', namespace)
1655 part.addparam('namespace', namespace)
1656 keys = repo.listkeys(namespace).items()
1656 keys = repo.listkeys(namespace).items()
1657 part.data = pushkey.encodekeys(keys)
1657 part.data = pushkey.encodekeys(keys)
1658
1658
1659 @getbundle2partsgenerator('obsmarkers')
1659 @getbundle2partsgenerator('obsmarkers')
1660 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1660 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1661 b2caps=None, heads=None, **kwargs):
1661 b2caps=None, heads=None, **kwargs):
1662 """add an obsolescence markers part to the requested bundle"""
1662 """add an obsolescence markers part to the requested bundle"""
1663 if kwargs.get('obsmarkers', False):
1663 if kwargs.get('obsmarkers', False):
1664 if heads is None:
1664 if heads is None:
1665 heads = repo.heads()
1665 heads = repo.heads()
1666 subset = [c.node() for c in repo.set('::%ln', heads)]
1666 subset = [c.node() for c in repo.set('::%ln', heads)]
1667 markers = repo.obsstore.relevantmarkers(subset)
1667 markers = repo.obsstore.relevantmarkers(subset)
1668 markers = sorted(markers)
1668 markers = sorted(markers)
1669 bundle2.buildobsmarkerspart(bundler, markers)
1669 bundle2.buildobsmarkerspart(bundler, markers)
1670
1670
1671 @getbundle2partsgenerator('phases')
1671 @getbundle2partsgenerator('phases')
1672 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1672 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1673 b2caps=None, heads=None, **kwargs):
1673 b2caps=None, heads=None, **kwargs):
1674 """add phase heads part to the requested bundle"""
1674 """add phase heads part to the requested bundle"""
1675 if kwargs.get('phases', False):
1675 if kwargs.get('phases', False):
1676 if not 'heads' in b2caps.get('phases'):
1676 if not 'heads' in b2caps.get('phases'):
1677 raise ValueError(_('no common phases exchange method'))
1677 raise ValueError(_('no common phases exchange method'))
1678 if heads is None:
1678 if heads is None:
1679 heads = repo.heads()
1679 heads = repo.heads()
1680
1680
1681 headsbyphase = collections.defaultdict(set)
1681 headsbyphase = collections.defaultdict(set)
1682 if repo.publishing():
1682 if repo.publishing():
1683 headsbyphase[phases.public] = heads
1683 headsbyphase[phases.public] = heads
1684 else:
1684 else:
1685 # find the appropriate heads to move
1685 # find the appropriate heads to move
1686
1686
1687 phase = repo._phasecache.phase
1687 phase = repo._phasecache.phase
1688 node = repo.changelog.node
1688 node = repo.changelog.node
1689 rev = repo.changelog.rev
1689 rev = repo.changelog.rev
1690 for h in heads:
1690 for h in heads:
1691 headsbyphase[phase(repo, rev(h))].add(h)
1691 headsbyphase[phase(repo, rev(h))].add(h)
1692 seenphases = list(headsbyphase.keys())
1692 seenphases = list(headsbyphase.keys())
1693
1693
1694 # We do not handle anything but public and draft phase for now)
1694 # We do not handle anything but public and draft phase for now)
1695 if seenphases:
1695 if seenphases:
1696 assert max(seenphases) <= phases.draft
1696 assert max(seenphases) <= phases.draft
1697
1697
1698 # if client is pulling non-public changesets, we need to find
1698 # if client is pulling non-public changesets, we need to find
1699 # intermediate public heads.
1699 # intermediate public heads.
1700 draftheads = headsbyphase.get(phases.draft, set())
1700 draftheads = headsbyphase.get(phases.draft, set())
1701 if draftheads:
1701 if draftheads:
1702 publicheads = headsbyphase.get(phases.public, set())
1702 publicheads = headsbyphase.get(phases.public, set())
1703
1703
1704 revset = 'heads(only(%ln, %ln) and public())'
1704 revset = 'heads(only(%ln, %ln) and public())'
1705 extraheads = repo.revs(revset, draftheads, publicheads)
1705 extraheads = repo.revs(revset, draftheads, publicheads)
1706 for r in extraheads:
1706 for r in extraheads:
1707 headsbyphase[phases.public].add(node(r))
1707 headsbyphase[phases.public].add(node(r))
1708
1708
1709 # transform data in a format used by the encoding function
1709 # transform data in a format used by the encoding function
1710 phasemapping = []
1710 phasemapping = []
1711 for phase in phases.allphases:
1711 for phase in phases.allphases:
1712 phasemapping.append(sorted(headsbyphase[phase]))
1712 phasemapping.append(sorted(headsbyphase[phase]))
1713
1713
1714 # generate the actual part
1714 # generate the actual part
1715 phasedata = phases.binaryencode(phasemapping)
1715 phasedata = phases.binaryencode(phasemapping)
1716 bundler.newpart('phase-heads', data=phasedata)
1716 bundler.newpart('phase-heads', data=phasedata)
1717
1717
1718 @getbundle2partsgenerator('hgtagsfnodes')
1718 @getbundle2partsgenerator('hgtagsfnodes')
1719 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1719 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1720 b2caps=None, heads=None, common=None,
1720 b2caps=None, heads=None, common=None,
1721 **kwargs):
1721 **kwargs):
1722 """Transfer the .hgtags filenodes mapping.
1722 """Transfer the .hgtags filenodes mapping.
1723
1723
1724 Only values for heads in this bundle will be transferred.
1724 Only values for heads in this bundle will be transferred.
1725
1725
1726 The part data consists of pairs of 20 byte changeset node and .hgtags
1726 The part data consists of pairs of 20 byte changeset node and .hgtags
1727 filenodes raw values.
1727 filenodes raw values.
1728 """
1728 """
1729 # Don't send unless:
1729 # Don't send unless:
1730 # - changeset are being exchanged,
1730 # - changeset are being exchanged,
1731 # - the client supports it.
1731 # - the client supports it.
1732 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1732 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1733 return
1733 return
1734
1734
1735 outgoing = _computeoutgoing(repo, heads, common)
1735 outgoing = _computeoutgoing(repo, heads, common)
1736 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1736 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1737
1737
1738 def _getbookmarks(repo, **kwargs):
1738 def _getbookmarks(repo, **kwargs):
1739 """Returns bookmark to node mapping.
1739 """Returns bookmark to node mapping.
1740
1740
1741 This function is primarily used to generate `bookmarks` bundle2 part.
1741 This function is primarily used to generate `bookmarks` bundle2 part.
1742 It is a separate function in order to make it easy to wrap it
1742 It is a separate function in order to make it easy to wrap it
1743 in extensions. Passing `kwargs` to the function makes it easy to
1743 in extensions. Passing `kwargs` to the function makes it easy to
1744 add new parameters in extensions.
1744 add new parameters in extensions.
1745 """
1745 """
1746
1746
1747 return dict(bookmod.listbinbookmarks(repo))
1747 return dict(bookmod.listbinbookmarks(repo))
1748
1748
1749 def check_heads(repo, their_heads, context):
1749 def check_heads(repo, their_heads, context):
1750 """check if the heads of a repo have been modified
1750 """check if the heads of a repo have been modified
1751
1751
1752 Used by peer for unbundling.
1752 Used by peer for unbundling.
1753 """
1753 """
1754 heads = repo.heads()
1754 heads = repo.heads()
1755 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1755 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1756 if not (their_heads == ['force'] or their_heads == heads or
1756 if not (their_heads == ['force'] or their_heads == heads or
1757 their_heads == ['hashed', heads_hash]):
1757 their_heads == ['hashed', heads_hash]):
1758 # someone else committed/pushed/unbundled while we
1758 # someone else committed/pushed/unbundled while we
1759 # were transferring data
1759 # were transferring data
1760 raise error.PushRaced('repository changed while %s - '
1760 raise error.PushRaced('repository changed while %s - '
1761 'please try again' % context)
1761 'please try again' % context)
1762
1762
1763 def unbundle(repo, cg, heads, source, url):
1763 def unbundle(repo, cg, heads, source, url):
1764 """Apply a bundle to a repo.
1764 """Apply a bundle to a repo.
1765
1765
1766 this function makes sure the repo is locked during the application and have
1766 this function makes sure the repo is locked during the application and have
1767 mechanism to check that no push race occurred between the creation of the
1767 mechanism to check that no push race occurred between the creation of the
1768 bundle and its application.
1768 bundle and its application.
1769
1769
1770 If the push was raced as PushRaced exception is raised."""
1770 If the push was raced as PushRaced exception is raised."""
1771 r = 0
1771 r = 0
1772 # need a transaction when processing a bundle2 stream
1772 # need a transaction when processing a bundle2 stream
1773 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1773 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1774 lockandtr = [None, None, None]
1774 lockandtr = [None, None, None]
1775 recordout = None
1775 recordout = None
1776 # quick fix for output mismatch with bundle2 in 3.4
1776 # quick fix for output mismatch with bundle2 in 3.4
1777 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1777 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1778 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1778 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1779 captureoutput = True
1779 captureoutput = True
1780 try:
1780 try:
1781 # note: outside bundle1, 'heads' is expected to be empty and this
1781 # note: outside bundle1, 'heads' is expected to be empty and this
1782 # 'check_heads' call wil be a no-op
1782 # 'check_heads' call wil be a no-op
1783 check_heads(repo, heads, 'uploading changes')
1783 check_heads(repo, heads, 'uploading changes')
1784 # push can proceed
1784 # push can proceed
1785 if not isinstance(cg, bundle2.unbundle20):
1785 if not isinstance(cg, bundle2.unbundle20):
1786 # legacy case: bundle1 (changegroup 01)
1786 # legacy case: bundle1 (changegroup 01)
1787 txnname = "\n".join([source, util.hidepassword(url)])
1787 txnname = "\n".join([source, util.hidepassword(url)])
1788 with repo.lock(), repo.transaction(txnname) as tr:
1788 with repo.lock(), repo.transaction(txnname) as tr:
1789 op = bundle2.applybundle(repo, cg, tr, source, url)
1789 op = bundle2.applybundle(repo, cg, tr, source, url)
1790 r = bundle2.combinechangegroupresults(op)
1790 r = bundle2.combinechangegroupresults(op)
1791 else:
1791 else:
1792 r = None
1792 r = None
1793 try:
1793 try:
1794 def gettransaction():
1794 def gettransaction():
1795 if not lockandtr[2]:
1795 if not lockandtr[2]:
1796 lockandtr[0] = repo.wlock()
1796 lockandtr[0] = repo.wlock()
1797 lockandtr[1] = repo.lock()
1797 lockandtr[1] = repo.lock()
1798 lockandtr[2] = repo.transaction(source)
1798 lockandtr[2] = repo.transaction(source)
1799 lockandtr[2].hookargs['source'] = source
1799 lockandtr[2].hookargs['source'] = source
1800 lockandtr[2].hookargs['url'] = url
1800 lockandtr[2].hookargs['url'] = url
1801 lockandtr[2].hookargs['bundle2'] = '1'
1801 lockandtr[2].hookargs['bundle2'] = '1'
1802 return lockandtr[2]
1802 return lockandtr[2]
1803
1803
1804 # Do greedy locking by default until we're satisfied with lazy
1804 # Do greedy locking by default until we're satisfied with lazy
1805 # locking.
1805 # locking.
1806 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1806 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1807 gettransaction()
1807 gettransaction()
1808
1808
1809 op = bundle2.bundleoperation(repo, gettransaction,
1809 op = bundle2.bundleoperation(repo, gettransaction,
1810 captureoutput=captureoutput)
1810 captureoutput=captureoutput)
1811 try:
1811 try:
1812 op = bundle2.processbundle(repo, cg, op=op)
1812 op = bundle2.processbundle(repo, cg, op=op)
1813 finally:
1813 finally:
1814 r = op.reply
1814 r = op.reply
1815 if captureoutput and r is not None:
1815 if captureoutput and r is not None:
1816 repo.ui.pushbuffer(error=True, subproc=True)
1816 repo.ui.pushbuffer(error=True, subproc=True)
1817 def recordout(output):
1817 def recordout(output):
1818 r.newpart('output', data=output, mandatory=False)
1818 r.newpart('output', data=output, mandatory=False)
1819 if lockandtr[2] is not None:
1819 if lockandtr[2] is not None:
1820 lockandtr[2].close()
1820 lockandtr[2].close()
1821 except BaseException as exc:
1821 except BaseException as exc:
1822 exc.duringunbundle2 = True
1822 exc.duringunbundle2 = True
1823 if captureoutput and r is not None:
1823 if captureoutput and r is not None:
1824 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1824 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1825 def recordout(output):
1825 def recordout(output):
1826 part = bundle2.bundlepart('output', data=output,
1826 part = bundle2.bundlepart('output', data=output,
1827 mandatory=False)
1827 mandatory=False)
1828 parts.append(part)
1828 parts.append(part)
1829 raise
1829 raise
1830 finally:
1830 finally:
1831 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1831 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1832 if recordout is not None:
1832 if recordout is not None:
1833 recordout(repo.ui.popbuffer())
1833 recordout(repo.ui.popbuffer())
1834 return r
1834 return r
1835
1835
1836 def _maybeapplyclonebundle(pullop):
1836 def _maybeapplyclonebundle(pullop):
1837 """Apply a clone bundle from a remote, if possible."""
1837 """Apply a clone bundle from a remote, if possible."""
1838
1838
1839 repo = pullop.repo
1839 repo = pullop.repo
1840 remote = pullop.remote
1840 remote = pullop.remote
1841
1841
1842 if not repo.ui.configbool('ui', 'clonebundles'):
1842 if not repo.ui.configbool('ui', 'clonebundles'):
1843 return
1843 return
1844
1844
1845 # Only run if local repo is empty.
1845 # Only run if local repo is empty.
1846 if len(repo):
1846 if len(repo):
1847 return
1847 return
1848
1848
1849 if pullop.heads:
1849 if pullop.heads:
1850 return
1850 return
1851
1851
1852 if not remote.capable('clonebundles'):
1852 if not remote.capable('clonebundles'):
1853 return
1853 return
1854
1854
1855 res = remote._call('clonebundles')
1855 res = remote._call('clonebundles')
1856
1856
1857 # If we call the wire protocol command, that's good enough to record the
1857 # If we call the wire protocol command, that's good enough to record the
1858 # attempt.
1858 # attempt.
1859 pullop.clonebundleattempted = True
1859 pullop.clonebundleattempted = True
1860
1860
1861 entries = parseclonebundlesmanifest(repo, res)
1861 entries = parseclonebundlesmanifest(repo, res)
1862 if not entries:
1862 if not entries:
1863 repo.ui.note(_('no clone bundles available on remote; '
1863 repo.ui.note(_('no clone bundles available on remote; '
1864 'falling back to regular clone\n'))
1864 'falling back to regular clone\n'))
1865 return
1865 return
1866
1866
1867 entries = filterclonebundleentries(repo, entries)
1867 entries = filterclonebundleentries(
1868 repo, entries, streamclonerequested=pullop.streamclonerequested)
1869
1868 if not entries:
1870 if not entries:
1869 # There is a thundering herd concern here. However, if a server
1871 # There is a thundering herd concern here. However, if a server
1870 # operator doesn't advertise bundles appropriate for its clients,
1872 # operator doesn't advertise bundles appropriate for its clients,
1871 # they deserve what's coming. Furthermore, from a client's
1873 # they deserve what's coming. Furthermore, from a client's
1872 # perspective, no automatic fallback would mean not being able to
1874 # perspective, no automatic fallback would mean not being able to
1873 # clone!
1875 # clone!
1874 repo.ui.warn(_('no compatible clone bundles available on server; '
1876 repo.ui.warn(_('no compatible clone bundles available on server; '
1875 'falling back to regular clone\n'))
1877 'falling back to regular clone\n'))
1876 repo.ui.warn(_('(you may want to report this to the server '
1878 repo.ui.warn(_('(you may want to report this to the server '
1877 'operator)\n'))
1879 'operator)\n'))
1878 return
1880 return
1879
1881
1880 entries = sortclonebundleentries(repo.ui, entries)
1882 entries = sortclonebundleentries(repo.ui, entries)
1881
1883
1882 url = entries[0]['URL']
1884 url = entries[0]['URL']
1883 repo.ui.status(_('applying clone bundle from %s\n') % url)
1885 repo.ui.status(_('applying clone bundle from %s\n') % url)
1884 if trypullbundlefromurl(repo.ui, repo, url):
1886 if trypullbundlefromurl(repo.ui, repo, url):
1885 repo.ui.status(_('finished applying clone bundle\n'))
1887 repo.ui.status(_('finished applying clone bundle\n'))
1886 # Bundle failed.
1888 # Bundle failed.
1887 #
1889 #
1888 # We abort by default to avoid the thundering herd of
1890 # We abort by default to avoid the thundering herd of
1889 # clients flooding a server that was expecting expensive
1891 # clients flooding a server that was expecting expensive
1890 # clone load to be offloaded.
1892 # clone load to be offloaded.
1891 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1893 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1892 repo.ui.warn(_('falling back to normal clone\n'))
1894 repo.ui.warn(_('falling back to normal clone\n'))
1893 else:
1895 else:
1894 raise error.Abort(_('error applying bundle'),
1896 raise error.Abort(_('error applying bundle'),
1895 hint=_('if this error persists, consider contacting '
1897 hint=_('if this error persists, consider contacting '
1896 'the server operator or disable clone '
1898 'the server operator or disable clone '
1897 'bundles via '
1899 'bundles via '
1898 '"--config ui.clonebundles=false"'))
1900 '"--config ui.clonebundles=false"'))
1899
1901
1900 def parseclonebundlesmanifest(repo, s):
1902 def parseclonebundlesmanifest(repo, s):
1901 """Parses the raw text of a clone bundles manifest.
1903 """Parses the raw text of a clone bundles manifest.
1902
1904
1903 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1905 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1904 to the URL and other keys are the attributes for the entry.
1906 to the URL and other keys are the attributes for the entry.
1905 """
1907 """
1906 m = []
1908 m = []
1907 for line in s.splitlines():
1909 for line in s.splitlines():
1908 fields = line.split()
1910 fields = line.split()
1909 if not fields:
1911 if not fields:
1910 continue
1912 continue
1911 attrs = {'URL': fields[0]}
1913 attrs = {'URL': fields[0]}
1912 for rawattr in fields[1:]:
1914 for rawattr in fields[1:]:
1913 key, value = rawattr.split('=', 1)
1915 key, value = rawattr.split('=', 1)
1914 key = urlreq.unquote(key)
1916 key = urlreq.unquote(key)
1915 value = urlreq.unquote(value)
1917 value = urlreq.unquote(value)
1916 attrs[key] = value
1918 attrs[key] = value
1917
1919
1918 # Parse BUNDLESPEC into components. This makes client-side
1920 # Parse BUNDLESPEC into components. This makes client-side
1919 # preferences easier to specify since you can prefer a single
1921 # preferences easier to specify since you can prefer a single
1920 # component of the BUNDLESPEC.
1922 # component of the BUNDLESPEC.
1921 if key == 'BUNDLESPEC':
1923 if key == 'BUNDLESPEC':
1922 try:
1924 try:
1923 comp, version, params = parsebundlespec(repo, value,
1925 comp, version, params = parsebundlespec(repo, value,
1924 externalnames=True)
1926 externalnames=True)
1925 attrs['COMPRESSION'] = comp
1927 attrs['COMPRESSION'] = comp
1926 attrs['VERSION'] = version
1928 attrs['VERSION'] = version
1927 except error.InvalidBundleSpecification:
1929 except error.InvalidBundleSpecification:
1928 pass
1930 pass
1929 except error.UnsupportedBundleSpecification:
1931 except error.UnsupportedBundleSpecification:
1930 pass
1932 pass
1931
1933
1932 m.append(attrs)
1934 m.append(attrs)
1933
1935
1934 return m
1936 return m
1935
1937
1936 def filterclonebundleentries(repo, entries):
1938 def filterclonebundleentries(repo, entries, streamclonerequested=False):
1937 """Remove incompatible clone bundle manifest entries.
1939 """Remove incompatible clone bundle manifest entries.
1938
1940
1939 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1941 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1940 and returns a new list consisting of only the entries that this client
1942 and returns a new list consisting of only the entries that this client
1941 should be able to apply.
1943 should be able to apply.
1942
1944
1943 There is no guarantee we'll be able to apply all returned entries because
1945 There is no guarantee we'll be able to apply all returned entries because
1944 the metadata we use to filter on may be missing or wrong.
1946 the metadata we use to filter on may be missing or wrong.
1945 """
1947 """
1946 newentries = []
1948 newentries = []
1947 for entry in entries:
1949 for entry in entries:
1948 spec = entry.get('BUNDLESPEC')
1950 spec = entry.get('BUNDLESPEC')
1949 if spec:
1951 if spec:
1950 try:
1952 try:
1951 parsebundlespec(repo, spec, strict=True)
1953 comp, version, params = parsebundlespec(repo, spec, strict=True)
1954
1955 # If a stream clone was requested, filter out non-streamclone
1956 # entries.
1957 if streamclonerequested and (comp != 'UN' or version != 's1'):
1958 repo.ui.debug('filtering %s because not a stream clone\n' %
1959 entry['URL'])
1960 continue
1961
1952 except error.InvalidBundleSpecification as e:
1962 except error.InvalidBundleSpecification as e:
1953 repo.ui.debug(str(e) + '\n')
1963 repo.ui.debug(str(e) + '\n')
1954 continue
1964 continue
1955 except error.UnsupportedBundleSpecification as e:
1965 except error.UnsupportedBundleSpecification as e:
1956 repo.ui.debug('filtering %s because unsupported bundle '
1966 repo.ui.debug('filtering %s because unsupported bundle '
1957 'spec: %s\n' % (entry['URL'], str(e)))
1967 'spec: %s\n' % (entry['URL'], str(e)))
1958 continue
1968 continue
1969 # If we don't have a spec and requested a stream clone, we don't know
1970 # what the entry is so don't attempt to apply it.
1971 elif streamclonerequested:
1972 repo.ui.debug('filtering %s because cannot determine if a stream '
1973 'clone bundle\n' % entry['URL'])
1974 continue
1959
1975
1960 if 'REQUIRESNI' in entry and not sslutil.hassni:
1976 if 'REQUIRESNI' in entry and not sslutil.hassni:
1961 repo.ui.debug('filtering %s because SNI not supported\n' %
1977 repo.ui.debug('filtering %s because SNI not supported\n' %
1962 entry['URL'])
1978 entry['URL'])
1963 continue
1979 continue
1964
1980
1965 newentries.append(entry)
1981 newentries.append(entry)
1966
1982
1967 return newentries
1983 return newentries
1968
1984
1969 class clonebundleentry(object):
1985 class clonebundleentry(object):
1970 """Represents an item in a clone bundles manifest.
1986 """Represents an item in a clone bundles manifest.
1971
1987
1972 This rich class is needed to support sorting since sorted() in Python 3
1988 This rich class is needed to support sorting since sorted() in Python 3
1973 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1989 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1974 won't work.
1990 won't work.
1975 """
1991 """
1976
1992
1977 def __init__(self, value, prefers):
1993 def __init__(self, value, prefers):
1978 self.value = value
1994 self.value = value
1979 self.prefers = prefers
1995 self.prefers = prefers
1980
1996
1981 def _cmp(self, other):
1997 def _cmp(self, other):
1982 for prefkey, prefvalue in self.prefers:
1998 for prefkey, prefvalue in self.prefers:
1983 avalue = self.value.get(prefkey)
1999 avalue = self.value.get(prefkey)
1984 bvalue = other.value.get(prefkey)
2000 bvalue = other.value.get(prefkey)
1985
2001
1986 # Special case for b missing attribute and a matches exactly.
2002 # Special case for b missing attribute and a matches exactly.
1987 if avalue is not None and bvalue is None and avalue == prefvalue:
2003 if avalue is not None and bvalue is None and avalue == prefvalue:
1988 return -1
2004 return -1
1989
2005
1990 # Special case for a missing attribute and b matches exactly.
2006 # Special case for a missing attribute and b matches exactly.
1991 if bvalue is not None and avalue is None and bvalue == prefvalue:
2007 if bvalue is not None and avalue is None and bvalue == prefvalue:
1992 return 1
2008 return 1
1993
2009
1994 # We can't compare unless attribute present on both.
2010 # We can't compare unless attribute present on both.
1995 if avalue is None or bvalue is None:
2011 if avalue is None or bvalue is None:
1996 continue
2012 continue
1997
2013
1998 # Same values should fall back to next attribute.
2014 # Same values should fall back to next attribute.
1999 if avalue == bvalue:
2015 if avalue == bvalue:
2000 continue
2016 continue
2001
2017
2002 # Exact matches come first.
2018 # Exact matches come first.
2003 if avalue == prefvalue:
2019 if avalue == prefvalue:
2004 return -1
2020 return -1
2005 if bvalue == prefvalue:
2021 if bvalue == prefvalue:
2006 return 1
2022 return 1
2007
2023
2008 # Fall back to next attribute.
2024 # Fall back to next attribute.
2009 continue
2025 continue
2010
2026
2011 # If we got here we couldn't sort by attributes and prefers. Fall
2027 # If we got here we couldn't sort by attributes and prefers. Fall
2012 # back to index order.
2028 # back to index order.
2013 return 0
2029 return 0
2014
2030
2015 def __lt__(self, other):
2031 def __lt__(self, other):
2016 return self._cmp(other) < 0
2032 return self._cmp(other) < 0
2017
2033
2018 def __gt__(self, other):
2034 def __gt__(self, other):
2019 return self._cmp(other) > 0
2035 return self._cmp(other) > 0
2020
2036
2021 def __eq__(self, other):
2037 def __eq__(self, other):
2022 return self._cmp(other) == 0
2038 return self._cmp(other) == 0
2023
2039
2024 def __le__(self, other):
2040 def __le__(self, other):
2025 return self._cmp(other) <= 0
2041 return self._cmp(other) <= 0
2026
2042
2027 def __ge__(self, other):
2043 def __ge__(self, other):
2028 return self._cmp(other) >= 0
2044 return self._cmp(other) >= 0
2029
2045
2030 def __ne__(self, other):
2046 def __ne__(self, other):
2031 return self._cmp(other) != 0
2047 return self._cmp(other) != 0
2032
2048
2033 def sortclonebundleentries(ui, entries):
2049 def sortclonebundleentries(ui, entries):
2034 prefers = ui.configlist('ui', 'clonebundleprefers')
2050 prefers = ui.configlist('ui', 'clonebundleprefers')
2035 if not prefers:
2051 if not prefers:
2036 return list(entries)
2052 return list(entries)
2037
2053
2038 prefers = [p.split('=', 1) for p in prefers]
2054 prefers = [p.split('=', 1) for p in prefers]
2039
2055
2040 items = sorted(clonebundleentry(v, prefers) for v in entries)
2056 items = sorted(clonebundleentry(v, prefers) for v in entries)
2041 return [i.value for i in items]
2057 return [i.value for i in items]
2042
2058
2043 def trypullbundlefromurl(ui, repo, url):
2059 def trypullbundlefromurl(ui, repo, url):
2044 """Attempt to apply a bundle from a URL."""
2060 """Attempt to apply a bundle from a URL."""
2045 with repo.lock(), repo.transaction('bundleurl') as tr:
2061 with repo.lock(), repo.transaction('bundleurl') as tr:
2046 try:
2062 try:
2047 fh = urlmod.open(ui, url)
2063 fh = urlmod.open(ui, url)
2048 cg = readbundle(ui, fh, 'stream')
2064 cg = readbundle(ui, fh, 'stream')
2049
2065
2050 if isinstance(cg, streamclone.streamcloneapplier):
2066 if isinstance(cg, streamclone.streamcloneapplier):
2051 cg.apply(repo)
2067 cg.apply(repo)
2052 else:
2068 else:
2053 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2069 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2054 return True
2070 return True
2055 except urlerr.httperror as e:
2071 except urlerr.httperror as e:
2056 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2072 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2057 except urlerr.urlerror as e:
2073 except urlerr.urlerror as e:
2058 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2074 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2059
2075
2060 return False
2076 return False
@@ -1,503 +1,511 b''
1 Set up a server
1 Set up a server
2
2
3 $ cat >> $HGRCPATH << EOF
3 $ cat >> $HGRCPATH << EOF
4 > [format]
4 > [format]
5 > usegeneraldelta=yes
5 > usegeneraldelta=yes
6 > EOF
6 > EOF
7 $ hg init server
7 $ hg init server
8 $ cd server
8 $ cd server
9 $ cat >> .hg/hgrc << EOF
9 $ cat >> .hg/hgrc << EOF
10 > [extensions]
10 > [extensions]
11 > clonebundles =
11 > clonebundles =
12 > EOF
12 > EOF
13
13
14 $ touch foo
14 $ touch foo
15 $ hg -q commit -A -m 'add foo'
15 $ hg -q commit -A -m 'add foo'
16 $ touch bar
16 $ touch bar
17 $ hg -q commit -A -m 'add bar'
17 $ hg -q commit -A -m 'add bar'
18
18
19 $ hg serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
19 $ hg serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log
20 $ cat hg.pid >> $DAEMON_PIDS
20 $ cat hg.pid >> $DAEMON_PIDS
21 $ cd ..
21 $ cd ..
22
22
23 Missing manifest should not result in server lookup
23 Missing manifest should not result in server lookup
24
24
25 $ hg --verbose clone -U http://localhost:$HGPORT no-manifest
25 $ hg --verbose clone -U http://localhost:$HGPORT no-manifest
26 requesting all changes
26 requesting all changes
27 adding changesets
27 adding changesets
28 adding manifests
28 adding manifests
29 adding file changes
29 adding file changes
30 added 2 changesets with 2 changes to 2 files
30 added 2 changesets with 2 changes to 2 files
31
31
32 $ cat server/access.log
32 $ cat server/access.log
33 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
33 * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
34 * - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
34 * - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=*zlib,none,bzip2 (glob)
35 * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*,zlib,none,bzip2 (glob)
35 * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=*,zlib,none,bzip2 (glob)
36
36
37 Empty manifest file results in retrieval
37 Empty manifest file results in retrieval
38 (the extension only checks if the manifest file exists)
38 (the extension only checks if the manifest file exists)
39
39
40 $ touch server/.hg/clonebundles.manifest
40 $ touch server/.hg/clonebundles.manifest
41 $ hg --verbose clone -U http://localhost:$HGPORT empty-manifest
41 $ hg --verbose clone -U http://localhost:$HGPORT empty-manifest
42 no clone bundles available on remote; falling back to regular clone
42 no clone bundles available on remote; falling back to regular clone
43 requesting all changes
43 requesting all changes
44 adding changesets
44 adding changesets
45 adding manifests
45 adding manifests
46 adding file changes
46 adding file changes
47 added 2 changesets with 2 changes to 2 files
47 added 2 changesets with 2 changes to 2 files
48
48
49 Manifest file with invalid URL aborts
49 Manifest file with invalid URL aborts
50
50
51 $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
51 $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest
52 $ hg clone http://localhost:$HGPORT 404-url
52 $ hg clone http://localhost:$HGPORT 404-url
53 applying clone bundle from http://does.not.exist/bundle.hg
53 applying clone bundle from http://does.not.exist/bundle.hg
54 error fetching bundle: (.* not known|No address associated with hostname) (re) (no-windows !)
54 error fetching bundle: (.* not known|No address associated with hostname) (re) (no-windows !)
55 error fetching bundle: [Errno 11004] getaddrinfo failed (windows !)
55 error fetching bundle: [Errno 11004] getaddrinfo failed (windows !)
56 abort: error applying bundle
56 abort: error applying bundle
57 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
57 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
58 [255]
58 [255]
59
59
60 Server is not running aborts
60 Server is not running aborts
61
61
62 $ echo "http://localhost:$HGPORT1/bundle.hg" > server/.hg/clonebundles.manifest
62 $ echo "http://localhost:$HGPORT1/bundle.hg" > server/.hg/clonebundles.manifest
63 $ hg clone http://localhost:$HGPORT server-not-runner
63 $ hg clone http://localhost:$HGPORT server-not-runner
64 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
64 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
65 error fetching bundle: (.* refused.*|Protocol not supported|(.* )?Cannot assign requested address) (re)
65 error fetching bundle: (.* refused.*|Protocol not supported|(.* )?Cannot assign requested address) (re)
66 abort: error applying bundle
66 abort: error applying bundle
67 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
67 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
68 [255]
68 [255]
69
69
70 Server returns 404
70 Server returns 404
71
71
72 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
72 $ "$PYTHON" $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid
73 $ cat http.pid >> $DAEMON_PIDS
73 $ cat http.pid >> $DAEMON_PIDS
74 $ hg clone http://localhost:$HGPORT running-404
74 $ hg clone http://localhost:$HGPORT running-404
75 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
75 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
76 HTTP error fetching bundle: HTTP Error 404: File not found
76 HTTP error fetching bundle: HTTP Error 404: File not found
77 abort: error applying bundle
77 abort: error applying bundle
78 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
78 (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false")
79 [255]
79 [255]
80
80
81 We can override failure to fall back to regular clone
81 We can override failure to fall back to regular clone
82
82
83 $ hg --config ui.clonebundlefallback=true clone -U http://localhost:$HGPORT 404-fallback
83 $ hg --config ui.clonebundlefallback=true clone -U http://localhost:$HGPORT 404-fallback
84 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
84 applying clone bundle from http://localhost:$HGPORT1/bundle.hg
85 HTTP error fetching bundle: HTTP Error 404: File not found
85 HTTP error fetching bundle: HTTP Error 404: File not found
86 falling back to normal clone
86 falling back to normal clone
87 requesting all changes
87 requesting all changes
88 adding changesets
88 adding changesets
89 adding manifests
89 adding manifests
90 adding file changes
90 adding file changes
91 added 2 changesets with 2 changes to 2 files
91 added 2 changesets with 2 changes to 2 files
92
92
93 Bundle with partial content works
93 Bundle with partial content works
94
94
95 $ hg -R server bundle --type gzip-v1 --base null -r 53245c60e682 partial.hg
95 $ hg -R server bundle --type gzip-v1 --base null -r 53245c60e682 partial.hg
96 1 changesets found
96 1 changesets found
97
97
98 We verify exact bundle content as an extra check against accidental future
98 We verify exact bundle content as an extra check against accidental future
99 changes. If this output changes, we could break old clients.
99 changes. If this output changes, we could break old clients.
100
100
101 $ f --size --hexdump partial.hg
101 $ f --size --hexdump partial.hg
102 partial.hg: size=207
102 partial.hg: size=207
103 0000: 48 47 31 30 47 5a 78 9c 63 60 60 98 17 ac 12 93 |HG10GZx.c``.....|
103 0000: 48 47 31 30 47 5a 78 9c 63 60 60 98 17 ac 12 93 |HG10GZx.c``.....|
104 0010: f0 ac a9 23 45 70 cb bf 0d 5f 59 4e 4a 7f 79 21 |...#Ep..._YNJ.y!|
104 0010: f0 ac a9 23 45 70 cb bf 0d 5f 59 4e 4a 7f 79 21 |...#Ep..._YNJ.y!|
105 0020: 9b cc 40 24 20 a0 d7 ce 2c d1 38 25 cd 24 25 d5 |..@$ ...,.8%.$%.|
105 0020: 9b cc 40 24 20 a0 d7 ce 2c d1 38 25 cd 24 25 d5 |..@$ ...,.8%.$%.|
106 0030: d8 c2 22 cd 38 d9 24 cd 22 d5 c8 22 cd 24 cd 32 |..".8.$."..".$.2|
106 0030: d8 c2 22 cd 38 d9 24 cd 22 d5 c8 22 cd 24 cd 32 |..".8.$."..".$.2|
107 0040: d1 c2 d0 c4 c8 d2 32 d1 38 39 29 c9 34 cd d4 80 |......2.89).4...|
107 0040: d1 c2 d0 c4 c8 d2 32 d1 38 39 29 c9 34 cd d4 80 |......2.89).4...|
108 0050: ab 24 b5 b8 84 cb 40 c1 80 2b 2d 3f 9f 8b 2b 31 |.$....@..+-?..+1|
108 0050: ab 24 b5 b8 84 cb 40 c1 80 2b 2d 3f 9f 8b 2b 31 |.$....@..+-?..+1|
109 0060: 25 45 01 c8 80 9a d2 9b 65 fb e5 9e 45 bf 8d 7f |%E......e...E...|
109 0060: 25 45 01 c8 80 9a d2 9b 65 fb e5 9e 45 bf 8d 7f |%E......e...E...|
110 0070: 9f c6 97 9f 2b 44 34 67 d9 ec 8e 0f a0 92 0b 75 |....+D4g.......u|
110 0070: 9f c6 97 9f 2b 44 34 67 d9 ec 8e 0f a0 92 0b 75 |....+D4g.......u|
111 0080: 41 d6 24 59 18 a4 a4 9a a6 18 1a 5b 98 9b 5a 98 |A.$Y.......[..Z.|
111 0080: 41 d6 24 59 18 a4 a4 9a a6 18 1a 5b 98 9b 5a 98 |A.$Y.......[..Z.|
112 0090: 9a 18 26 9b a6 19 98 1a 99 99 26 a6 18 9a 98 24 |..&.......&....$|
112 0090: 9a 18 26 9b a6 19 98 1a 99 99 26 a6 18 9a 98 24 |..&.......&....$|
113 00a0: 26 59 a6 25 5a 98 a5 18 a6 24 71 41 35 b1 43 dc |&Y.%Z....$qA5.C.|
113 00a0: 26 59 a6 25 5a 98 a5 18 a6 24 71 41 35 b1 43 dc |&Y.%Z....$qA5.C.|
114 00b0: 16 b2 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a |.....E..V....R..|
114 00b0: 16 b2 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a |.....E..V....R..|
115 00c0: 78 ed fc d5 76 f1 36 35 dc 05 00 36 ed 5e c7 |x...v.65...6.^.|
115 00c0: 78 ed fc d5 76 f1 36 35 dc 05 00 36 ed 5e c7 |x...v.65...6.^.|
116
116
117 $ echo "http://localhost:$HGPORT1/partial.hg" > server/.hg/clonebundles.manifest
117 $ echo "http://localhost:$HGPORT1/partial.hg" > server/.hg/clonebundles.manifest
118 $ hg clone -U http://localhost:$HGPORT partial-bundle
118 $ hg clone -U http://localhost:$HGPORT partial-bundle
119 applying clone bundle from http://localhost:$HGPORT1/partial.hg
119 applying clone bundle from http://localhost:$HGPORT1/partial.hg
120 adding changesets
120 adding changesets
121 adding manifests
121 adding manifests
122 adding file changes
122 adding file changes
123 added 1 changesets with 1 changes to 1 files
123 added 1 changesets with 1 changes to 1 files
124 finished applying clone bundle
124 finished applying clone bundle
125 searching for changes
125 searching for changes
126 adding changesets
126 adding changesets
127 adding manifests
127 adding manifests
128 adding file changes
128 adding file changes
129 added 1 changesets with 1 changes to 1 files
129 added 1 changesets with 1 changes to 1 files
130
130
131 Incremental pull doesn't fetch bundle
131 Incremental pull doesn't fetch bundle
132
132
133 $ hg clone -r 53245c60e682 -U http://localhost:$HGPORT partial-clone
133 $ hg clone -r 53245c60e682 -U http://localhost:$HGPORT partial-clone
134 adding changesets
134 adding changesets
135 adding manifests
135 adding manifests
136 adding file changes
136 adding file changes
137 added 1 changesets with 1 changes to 1 files
137 added 1 changesets with 1 changes to 1 files
138
138
139 $ cd partial-clone
139 $ cd partial-clone
140 $ hg pull
140 $ hg pull
141 pulling from http://localhost:$HGPORT/
141 pulling from http://localhost:$HGPORT/
142 searching for changes
142 searching for changes
143 adding changesets
143 adding changesets
144 adding manifests
144 adding manifests
145 adding file changes
145 adding file changes
146 added 1 changesets with 1 changes to 1 files
146 added 1 changesets with 1 changes to 1 files
147 (run 'hg update' to get a working copy)
147 (run 'hg update' to get a working copy)
148 $ cd ..
148 $ cd ..
149
149
150 Bundle with full content works
150 Bundle with full content works
151
151
152 $ hg -R server bundle --type gzip-v2 --base null -r tip full.hg
152 $ hg -R server bundle --type gzip-v2 --base null -r tip full.hg
153 2 changesets found
153 2 changesets found
154
154
155 Again, we perform an extra check against bundle content changes. If this content
155 Again, we perform an extra check against bundle content changes. If this content
156 changes, clone bundles produced by new Mercurial versions may not be readable
156 changes, clone bundles produced by new Mercurial versions may not be readable
157 by old clients.
157 by old clients.
158
158
159 $ f --size --hexdump full.hg
159 $ f --size --hexdump full.hg
160 full.hg: size=396
160 full.hg: size=396
161 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
161 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
162 0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 d0 e4 76 f6 70 |ion=GZx.c``..v.p|
162 0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 d0 e4 76 f6 70 |ion=GZx.c``..v.p|
163 0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 46 76 26 4e |.swu....`..FFv&N|
163 0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 46 76 26 4e |.swu....`..FFv&N|
164 0030: c6 b2 d4 a2 e2 cc fc 3c 03 a3 bc a4 e4 8c c4 bc |.......<........|
164 0030: c6 b2 d4 a2 e2 cc fc 3c 03 a3 bc a4 e4 8c c4 bc |.......<........|
165 0040: f4 d4 62 23 06 06 e6 19 40 f9 4d c1 2a 31 09 cf |..b#....@.M.*1..|
165 0040: f4 d4 62 23 06 06 e6 19 40 f9 4d c1 2a 31 09 cf |..b#....@.M.*1..|
166 0050: 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 97 17 b2 c9 |.:R.............|
166 0050: 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 97 17 b2 c9 |.:R.............|
167 0060: 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 a4 a4 1a 5b |.......%.......[|
167 0060: 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 a4 a4 1a 5b |.......%.......[|
168 0070: 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 a4 59 26 5a |X..'..Y..Y...Y&Z|
168 0070: 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 a4 59 26 5a |X..'..Y..Y...Y&Z|
169 0080: 18 9a 18 59 5a 26 1a 27 27 25 99 a6 99 1a 70 95 |...YZ&.''%....p.|
169 0080: 18 9a 18 59 5a 26 1a 27 27 25 99 a6 99 1a 70 95 |...YZ&.''%....p.|
170 0090: a4 16 97 70 19 28 18 70 a5 e5 e7 73 71 25 a6 a4 |...p.(.p...sq%..|
170 0090: a4 16 97 70 19 28 18 70 a5 e5 e7 73 71 25 a6 a4 |...p.(.p...sq%..|
171 00a0: 28 00 19 20 17 af fa df ab ff 7b 3f fb 92 dc 8b |(.. ......{?....|
171 00a0: 28 00 19 20 17 af fa df ab ff 7b 3f fb 92 dc 8b |(.. ......{?....|
172 00b0: 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 89 2f b0 99 87 |.b......=ZD./...|
172 00b0: 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 89 2f b0 99 87 |.b......=ZD./...|
173 00c0: ec e2 54 63 43 e3 b4 64 43 73 23 33 43 53 0b 63 |..TcC..dCs#3CS.c|
173 00c0: ec e2 54 63 43 e3 b4 64 43 73 23 33 43 53 0b 63 |..TcC..dCs#3CS.c|
174 00d0: d3 14 23 03 a0 fb 2c 2c 0c d3 80 1e 30 49 49 b1 |..#...,,....0II.|
174 00d0: d3 14 23 03 a0 fb 2c 2c 0c d3 80 1e 30 49 49 b1 |..#...,,....0II.|
175 00e0: 4c 4a 32 48 33 30 b0 34 42 b8 38 29 b1 08 e2 62 |LJ2H30.4B.8)...b|
175 00e0: 4c 4a 32 48 33 30 b0 34 42 b8 38 29 b1 08 e2 62 |LJ2H30.4B.8)...b|
176 00f0: 20 03 6a ca c2 2c db 2f f7 2c fa 6d fc fb 34 be | .j..,./.,.m..4.|
176 00f0: 20 03 6a ca c2 2c db 2f f7 2c fa 6d fc fb 34 be | .j..,./.,.m..4.|
177 0100: fc 5c 21 a2 39 cb 66 77 7c 00 0d c3 59 17 14 58 |.\!.9.fw|...Y..X|
177 0100: fc 5c 21 a2 39 cb 66 77 7c 00 0d c3 59 17 14 58 |.\!.9.fw|...Y..X|
178 0110: 49 16 06 29 a9 a6 29 86 c6 16 e6 a6 16 a6 26 86 |I..)..).......&.|
178 0110: 49 16 06 29 a9 a6 29 86 c6 16 e6 a6 16 a6 26 86 |I..)..).......&.|
179 0120: c9 a6 69 06 a6 46 66 a6 89 29 86 26 26 89 49 96 |..i..Ff..).&&.I.|
179 0120: c9 a6 69 06 a6 46 66 a6 89 29 86 26 26 89 49 96 |..i..Ff..).&&.I.|
180 0130: 69 89 16 66 29 86 29 49 5c 20 07 3e 16 fe 23 ae |i..f).)I\ .>..#.|
180 0130: 69 89 16 66 29 86 29 49 5c 20 07 3e 16 fe 23 ae |i..f).)I\ .>..#.|
181 0140: 26 da 1c ab 10 1f d1 f8 e3 b3 ef cd dd fc 0c 93 |&...............|
181 0140: 26 da 1c ab 10 1f d1 f8 e3 b3 ef cd dd fc 0c 93 |&...............|
182 0150: 88 75 34 36 75 04 82 55 17 14 36 a4 38 10 04 d8 |.u46u..U..6.8...|
182 0150: 88 75 34 36 75 04 82 55 17 14 36 a4 38 10 04 d8 |.u46u..U..6.8...|
183 0160: 21 01 9a b1 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 |!......E..V....R|
183 0160: 21 01 9a b1 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 |!......E..V....R|
184 0170: d7 8a 78 ed fc d5 76 f1 36 25 81 89 c7 ad ec 90 |..x...v.6%......|
184 0170: d7 8a 78 ed fc d5 76 f1 36 25 81 89 c7 ad ec 90 |..x...v.6%......|
185 0180: 54 47 75 2b 89 49 b1 00 d2 8a eb 92 |TGu+.I......|
185 0180: 54 47 75 2b 89 49 b1 00 d2 8a eb 92 |TGu+.I......|
186
186
187 $ echo "http://localhost:$HGPORT1/full.hg" > server/.hg/clonebundles.manifest
187 $ echo "http://localhost:$HGPORT1/full.hg" > server/.hg/clonebundles.manifest
188 $ hg clone -U http://localhost:$HGPORT full-bundle
188 $ hg clone -U http://localhost:$HGPORT full-bundle
189 applying clone bundle from http://localhost:$HGPORT1/full.hg
189 applying clone bundle from http://localhost:$HGPORT1/full.hg
190 adding changesets
190 adding changesets
191 adding manifests
191 adding manifests
192 adding file changes
192 adding file changes
193 added 2 changesets with 2 changes to 2 files
193 added 2 changesets with 2 changes to 2 files
194 finished applying clone bundle
194 finished applying clone bundle
195 searching for changes
195 searching for changes
196 no changes found
196 no changes found
197
197
198 Feature works over SSH
198 Feature works over SSH
199
199
200 $ hg clone -U -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/server ssh-full-clone
200 $ hg clone -U -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/server ssh-full-clone
201 applying clone bundle from http://localhost:$HGPORT1/full.hg
201 applying clone bundle from http://localhost:$HGPORT1/full.hg
202 adding changesets
202 adding changesets
203 adding manifests
203 adding manifests
204 adding file changes
204 adding file changes
205 added 2 changesets with 2 changes to 2 files
205 added 2 changesets with 2 changes to 2 files
206 finished applying clone bundle
206 finished applying clone bundle
207 searching for changes
207 searching for changes
208 no changes found
208 no changes found
209
209
210 Entry with unknown BUNDLESPEC is filtered and not used
210 Entry with unknown BUNDLESPEC is filtered and not used
211
211
212 $ cat > server/.hg/clonebundles.manifest << EOF
212 $ cat > server/.hg/clonebundles.manifest << EOF
213 > http://bad.entry1 BUNDLESPEC=UNKNOWN
213 > http://bad.entry1 BUNDLESPEC=UNKNOWN
214 > http://bad.entry2 BUNDLESPEC=xz-v1
214 > http://bad.entry2 BUNDLESPEC=xz-v1
215 > http://bad.entry3 BUNDLESPEC=none-v100
215 > http://bad.entry3 BUNDLESPEC=none-v100
216 > http://localhost:$HGPORT1/full.hg BUNDLESPEC=gzip-v2
216 > http://localhost:$HGPORT1/full.hg BUNDLESPEC=gzip-v2
217 > EOF
217 > EOF
218
218
219 $ hg clone -U http://localhost:$HGPORT filter-unknown-type
219 $ hg clone -U http://localhost:$HGPORT filter-unknown-type
220 applying clone bundle from http://localhost:$HGPORT1/full.hg
220 applying clone bundle from http://localhost:$HGPORT1/full.hg
221 adding changesets
221 adding changesets
222 adding manifests
222 adding manifests
223 adding file changes
223 adding file changes
224 added 2 changesets with 2 changes to 2 files
224 added 2 changesets with 2 changes to 2 files
225 finished applying clone bundle
225 finished applying clone bundle
226 searching for changes
226 searching for changes
227 no changes found
227 no changes found
228
228
229 Automatic fallback when all entries are filtered
229 Automatic fallback when all entries are filtered
230
230
231 $ cat > server/.hg/clonebundles.manifest << EOF
231 $ cat > server/.hg/clonebundles.manifest << EOF
232 > http://bad.entry BUNDLESPEC=UNKNOWN
232 > http://bad.entry BUNDLESPEC=UNKNOWN
233 > EOF
233 > EOF
234
234
235 $ hg clone -U http://localhost:$HGPORT filter-all
235 $ hg clone -U http://localhost:$HGPORT filter-all
236 no compatible clone bundles available on server; falling back to regular clone
236 no compatible clone bundles available on server; falling back to regular clone
237 (you may want to report this to the server operator)
237 (you may want to report this to the server operator)
238 requesting all changes
238 requesting all changes
239 adding changesets
239 adding changesets
240 adding manifests
240 adding manifests
241 adding file changes
241 adding file changes
242 added 2 changesets with 2 changes to 2 files
242 added 2 changesets with 2 changes to 2 files
243
243
244 URLs requiring SNI are filtered in Python <2.7.9
244 URLs requiring SNI are filtered in Python <2.7.9
245
245
246 $ cp full.hg sni.hg
246 $ cp full.hg sni.hg
247 $ cat > server/.hg/clonebundles.manifest << EOF
247 $ cat > server/.hg/clonebundles.manifest << EOF
248 > http://localhost:$HGPORT1/sni.hg REQUIRESNI=true
248 > http://localhost:$HGPORT1/sni.hg REQUIRESNI=true
249 > http://localhost:$HGPORT1/full.hg
249 > http://localhost:$HGPORT1/full.hg
250 > EOF
250 > EOF
251
251
252 #if sslcontext
252 #if sslcontext
253 Python 2.7.9+ support SNI
253 Python 2.7.9+ support SNI
254
254
255 $ hg clone -U http://localhost:$HGPORT sni-supported
255 $ hg clone -U http://localhost:$HGPORT sni-supported
256 applying clone bundle from http://localhost:$HGPORT1/sni.hg
256 applying clone bundle from http://localhost:$HGPORT1/sni.hg
257 adding changesets
257 adding changesets
258 adding manifests
258 adding manifests
259 adding file changes
259 adding file changes
260 added 2 changesets with 2 changes to 2 files
260 added 2 changesets with 2 changes to 2 files
261 finished applying clone bundle
261 finished applying clone bundle
262 searching for changes
262 searching for changes
263 no changes found
263 no changes found
264 #else
264 #else
265 Python <2.7.9 will filter SNI URLs
265 Python <2.7.9 will filter SNI URLs
266
266
267 $ hg clone -U http://localhost:$HGPORT sni-unsupported
267 $ hg clone -U http://localhost:$HGPORT sni-unsupported
268 applying clone bundle from http://localhost:$HGPORT1/full.hg
268 applying clone bundle from http://localhost:$HGPORT1/full.hg
269 adding changesets
269 adding changesets
270 adding manifests
270 adding manifests
271 adding file changes
271 adding file changes
272 added 2 changesets with 2 changes to 2 files
272 added 2 changesets with 2 changes to 2 files
273 finished applying clone bundle
273 finished applying clone bundle
274 searching for changes
274 searching for changes
275 no changes found
275 no changes found
276 #endif
276 #endif
277
277
278 Stream clone bundles are supported
278 Stream clone bundles are supported
279
279
280 $ hg -R server debugcreatestreamclonebundle packed.hg
280 $ hg -R server debugcreatestreamclonebundle packed.hg
281 writing 613 bytes for 4 files
281 writing 613 bytes for 4 files
282 bundle requirements: generaldelta, revlogv1
282 bundle requirements: generaldelta, revlogv1
283
283
284 No bundle spec should work
284 No bundle spec should work
285
285
286 $ cat > server/.hg/clonebundles.manifest << EOF
286 $ cat > server/.hg/clonebundles.manifest << EOF
287 > http://localhost:$HGPORT1/packed.hg
287 > http://localhost:$HGPORT1/packed.hg
288 > EOF
288 > EOF
289
289
290 $ hg clone -U http://localhost:$HGPORT stream-clone-no-spec
290 $ hg clone -U http://localhost:$HGPORT stream-clone-no-spec
291 applying clone bundle from http://localhost:$HGPORT1/packed.hg
291 applying clone bundle from http://localhost:$HGPORT1/packed.hg
292 4 files to transfer, 613 bytes of data
292 4 files to transfer, 613 bytes of data
293 transferred 613 bytes in *.* seconds (*) (glob)
293 transferred 613 bytes in *.* seconds (*) (glob)
294 finished applying clone bundle
294 finished applying clone bundle
295 searching for changes
295 searching for changes
296 no changes found
296 no changes found
297
297
298 Bundle spec without parameters should work
298 Bundle spec without parameters should work
299
299
300 $ cat > server/.hg/clonebundles.manifest << EOF
300 $ cat > server/.hg/clonebundles.manifest << EOF
301 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
301 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
302 > EOF
302 > EOF
303
303
304 $ hg clone -U http://localhost:$HGPORT stream-clone-vanilla-spec
304 $ hg clone -U http://localhost:$HGPORT stream-clone-vanilla-spec
305 applying clone bundle from http://localhost:$HGPORT1/packed.hg
305 applying clone bundle from http://localhost:$HGPORT1/packed.hg
306 4 files to transfer, 613 bytes of data
306 4 files to transfer, 613 bytes of data
307 transferred 613 bytes in *.* seconds (*) (glob)
307 transferred 613 bytes in *.* seconds (*) (glob)
308 finished applying clone bundle
308 finished applying clone bundle
309 searching for changes
309 searching for changes
310 no changes found
310 no changes found
311
311
312 Bundle spec with format requirements should work
312 Bundle spec with format requirements should work
313
313
314 $ cat > server/.hg/clonebundles.manifest << EOF
314 $ cat > server/.hg/clonebundles.manifest << EOF
315 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
315 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
316 > EOF
316 > EOF
317
317
318 $ hg clone -U http://localhost:$HGPORT stream-clone-supported-requirements
318 $ hg clone -U http://localhost:$HGPORT stream-clone-supported-requirements
319 applying clone bundle from http://localhost:$HGPORT1/packed.hg
319 applying clone bundle from http://localhost:$HGPORT1/packed.hg
320 4 files to transfer, 613 bytes of data
320 4 files to transfer, 613 bytes of data
321 transferred 613 bytes in *.* seconds (*) (glob)
321 transferred 613 bytes in *.* seconds (*) (glob)
322 finished applying clone bundle
322 finished applying clone bundle
323 searching for changes
323 searching for changes
324 no changes found
324 no changes found
325
325
326 Stream bundle spec with unknown requirements should be filtered out
326 Stream bundle spec with unknown requirements should be filtered out
327
327
328 $ cat > server/.hg/clonebundles.manifest << EOF
328 $ cat > server/.hg/clonebundles.manifest << EOF
329 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
329 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
330 > EOF
330 > EOF
331
331
332 $ hg clone -U http://localhost:$HGPORT stream-clone-unsupported-requirements
332 $ hg clone -U http://localhost:$HGPORT stream-clone-unsupported-requirements
333 no compatible clone bundles available on server; falling back to regular clone
333 no compatible clone bundles available on server; falling back to regular clone
334 (you may want to report this to the server operator)
334 (you may want to report this to the server operator)
335 requesting all changes
335 requesting all changes
336 adding changesets
336 adding changesets
337 adding manifests
337 adding manifests
338 adding file changes
338 adding file changes
339 added 2 changesets with 2 changes to 2 files
339 added 2 changesets with 2 changes to 2 files
340
340
341 Set up manifest for testing preferences
341 Set up manifest for testing preferences
342 (Remember, the TYPE does not have to match reality - the URL is
342 (Remember, the TYPE does not have to match reality - the URL is
343 important)
343 important)
344
344
345 $ cp full.hg gz-a.hg
345 $ cp full.hg gz-a.hg
346 $ cp full.hg gz-b.hg
346 $ cp full.hg gz-b.hg
347 $ cp full.hg bz2-a.hg
347 $ cp full.hg bz2-a.hg
348 $ cp full.hg bz2-b.hg
348 $ cp full.hg bz2-b.hg
349 $ cat > server/.hg/clonebundles.manifest << EOF
349 $ cat > server/.hg/clonebundles.manifest << EOF
350 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 extra=a
350 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 extra=a
351 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2 extra=a
351 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2 extra=a
352 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
352 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
353 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
353 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
354 > EOF
354 > EOF
355
355
356 Preferring an undefined attribute will take first entry
356 Preferring an undefined attribute will take first entry
357
357
358 $ hg --config ui.clonebundleprefers=foo=bar clone -U http://localhost:$HGPORT prefer-foo
358 $ hg --config ui.clonebundleprefers=foo=bar clone -U http://localhost:$HGPORT prefer-foo
359 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
359 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
360 adding changesets
360 adding changesets
361 adding manifests
361 adding manifests
362 adding file changes
362 adding file changes
363 added 2 changesets with 2 changes to 2 files
363 added 2 changesets with 2 changes to 2 files
364 finished applying clone bundle
364 finished applying clone bundle
365 searching for changes
365 searching for changes
366 no changes found
366 no changes found
367
367
368 Preferring bz2 type will download first entry of that type
368 Preferring bz2 type will download first entry of that type
369
369
370 $ hg --config ui.clonebundleprefers=COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-bz
370 $ hg --config ui.clonebundleprefers=COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-bz
371 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
371 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
372 adding changesets
372 adding changesets
373 adding manifests
373 adding manifests
374 adding file changes
374 adding file changes
375 added 2 changesets with 2 changes to 2 files
375 added 2 changesets with 2 changes to 2 files
376 finished applying clone bundle
376 finished applying clone bundle
377 searching for changes
377 searching for changes
378 no changes found
378 no changes found
379
379
380 Preferring multiple values of an option works
380 Preferring multiple values of an option works
381
381
382 $ hg --config ui.clonebundleprefers=COMPRESSION=unknown,COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-multiple-bz
382 $ hg --config ui.clonebundleprefers=COMPRESSION=unknown,COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-multiple-bz
383 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
383 applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg
384 adding changesets
384 adding changesets
385 adding manifests
385 adding manifests
386 adding file changes
386 adding file changes
387 added 2 changesets with 2 changes to 2 files
387 added 2 changesets with 2 changes to 2 files
388 finished applying clone bundle
388 finished applying clone bundle
389 searching for changes
389 searching for changes
390 no changes found
390 no changes found
391
391
392 Sorting multiple values should get us back to original first entry
392 Sorting multiple values should get us back to original first entry
393
393
394 $ hg --config ui.clonebundleprefers=BUNDLESPEC=unknown,BUNDLESPEC=gzip-v2,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-multiple-gz
394 $ hg --config ui.clonebundleprefers=BUNDLESPEC=unknown,BUNDLESPEC=gzip-v2,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-multiple-gz
395 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
395 applying clone bundle from http://localhost:$HGPORT1/gz-a.hg
396 adding changesets
396 adding changesets
397 adding manifests
397 adding manifests
398 adding file changes
398 adding file changes
399 added 2 changesets with 2 changes to 2 files
399 added 2 changesets with 2 changes to 2 files
400 finished applying clone bundle
400 finished applying clone bundle
401 searching for changes
401 searching for changes
402 no changes found
402 no changes found
403
403
404 Preferring multiple attributes has correct order
404 Preferring multiple attributes has correct order
405
405
406 $ hg --config ui.clonebundleprefers=extra=b,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-separate-attributes
406 $ hg --config ui.clonebundleprefers=extra=b,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-separate-attributes
407 applying clone bundle from http://localhost:$HGPORT1/bz2-b.hg
407 applying clone bundle from http://localhost:$HGPORT1/bz2-b.hg
408 adding changesets
408 adding changesets
409 adding manifests
409 adding manifests
410 adding file changes
410 adding file changes
411 added 2 changesets with 2 changes to 2 files
411 added 2 changesets with 2 changes to 2 files
412 finished applying clone bundle
412 finished applying clone bundle
413 searching for changes
413 searching for changes
414 no changes found
414 no changes found
415
415
416 Test where attribute is missing from some entries
416 Test where attribute is missing from some entries
417
417
418 $ cat > server/.hg/clonebundles.manifest << EOF
418 $ cat > server/.hg/clonebundles.manifest << EOF
419 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
419 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
420 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2
420 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2
421 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
421 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b
422 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
422 > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b
423 > EOF
423 > EOF
424
424
425 $ hg --config ui.clonebundleprefers=extra=b clone -U http://localhost:$HGPORT prefer-partially-defined-attribute
425 $ hg --config ui.clonebundleprefers=extra=b clone -U http://localhost:$HGPORT prefer-partially-defined-attribute
426 applying clone bundle from http://localhost:$HGPORT1/gz-b.hg
426 applying clone bundle from http://localhost:$HGPORT1/gz-b.hg
427 adding changesets
427 adding changesets
428 adding manifests
428 adding manifests
429 adding file changes
429 adding file changes
430 added 2 changesets with 2 changes to 2 files
430 added 2 changesets with 2 changes to 2 files
431 finished applying clone bundle
431 finished applying clone bundle
432 searching for changes
432 searching for changes
433 no changes found
433 no changes found
434
434
435 Test interaction between clone bundles and --uncompressed
435 Test interaction between clone bundles and --uncompressed
436
436
437 A manifest with just a gzip bundle
437 A manifest with just a gzip bundle
438
438
439 $ cat > server/.hg/clonebundles.manifest << EOF
439 $ cat > server/.hg/clonebundles.manifest << EOF
440 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
440 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
441 > EOF
441 > EOF
442
442
443 $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip
443 $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip
444 no compatible clone bundles available on server; falling back to regular clone
445 (you may want to report this to the server operator)
444 streaming all changes
446 streaming all changes
445 4 files to transfer, 613 bytes of data
447 4 files to transfer, 613 bytes of data
446 transferred 613 bytes in * seconds (*) (glob)
448 transferred 613 bytes in * seconds (*) (glob)
447 searching for changes
449 searching for changes
448 no changes found
450 no changes found
449
451
450 A manifest with a stream clone but no BUNDLESPEC
452 A manifest with a stream clone but no BUNDLESPEC
451
453
452 $ cat > server/.hg/clonebundles.manifest << EOF
454 $ cat > server/.hg/clonebundles.manifest << EOF
453 > http://localhost:$HGPORT1/packed.hg
455 > http://localhost:$HGPORT1/packed.hg
454 > EOF
456 > EOF
455
457
456 $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-no-bundlespec
458 $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-no-bundlespec
459 no compatible clone bundles available on server; falling back to regular clone
460 (you may want to report this to the server operator)
457 streaming all changes
461 streaming all changes
458 4 files to transfer, 613 bytes of data
462 4 files to transfer, 613 bytes of data
459 transferred 613 bytes in * seconds (*) (glob)
463 transferred 613 bytes in * seconds (*) (glob)
460 searching for changes
464 searching for changes
461 no changes found
465 no changes found
462
466
463 A manifest with a gzip bundle and a stream clone
467 A manifest with a gzip bundle and a stream clone
464
468
465 $ cat > server/.hg/clonebundles.manifest << EOF
469 $ cat > server/.hg/clonebundles.manifest << EOF
466 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
470 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
467 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
471 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1
468 > EOF
472 > EOF
469
473
470 $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip-packed
474 $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip-packed
471 streaming all changes
475 applying clone bundle from http://localhost:$HGPORT1/packed.hg
472 4 files to transfer, 613 bytes of data
476 4 files to transfer, 613 bytes of data
473 transferred 613 bytes in * seconds (*) (glob)
477 transferred 613 bytes in * seconds (*) (glob)
478 finished applying clone bundle
474 searching for changes
479 searching for changes
475 no changes found
480 no changes found
476
481
477 A manifest with a gzip bundle and stream clone with supported requirements
482 A manifest with a gzip bundle and stream clone with supported requirements
478
483
479 $ cat > server/.hg/clonebundles.manifest << EOF
484 $ cat > server/.hg/clonebundles.manifest << EOF
480 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
485 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
481 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
486 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1
482 > EOF
487 > EOF
483
488
484 $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip-packed-requirements
489 $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip-packed-requirements
485 streaming all changes
490 applying clone bundle from http://localhost:$HGPORT1/packed.hg
486 4 files to transfer, 613 bytes of data
491 4 files to transfer, 613 bytes of data
487 transferred 613 bytes in * seconds (*) (glob)
492 transferred 613 bytes in * seconds (*) (glob)
493 finished applying clone bundle
488 searching for changes
494 searching for changes
489 no changes found
495 no changes found
490
496
491 A manifest with a gzip bundle and a stream clone with unsupported requirements
497 A manifest with a gzip bundle and a stream clone with unsupported requirements
492
498
493 $ cat > server/.hg/clonebundles.manifest << EOF
499 $ cat > server/.hg/clonebundles.manifest << EOF
494 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
500 > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2
495 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
501 > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42
496 > EOF
502 > EOF
497
503
498 $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip-packed-unsupported-requirements
504 $ hg clone -U --uncompressed http://localhost:$HGPORT uncompressed-gzip-packed-unsupported-requirements
505 no compatible clone bundles available on server; falling back to regular clone
506 (you may want to report this to the server operator)
499 streaming all changes
507 streaming all changes
500 4 files to transfer, 613 bytes of data
508 4 files to transfer, 613 bytes of data
501 transferred 613 bytes in * seconds (*) (glob)
509 transferred 613 bytes in * seconds (*) (glob)
502 searching for changes
510 searching for changes
503 no changes found
511 no changes found
General Comments 0
You need to be logged in to leave comments. Login now