##// END OF EJS Templates
exchange: use command executor interface for calling listkeys...
Gregory Szorc -
r37775:2a8ad00b default
parent child Browse files
Show More
@@ -1,2410 +1,2422 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 bin,
16 bin,
17 hex,
17 hex,
18 nullid,
18 nullid,
19 )
19 )
20 from .thirdparty import (
20 from .thirdparty import (
21 attr,
21 attr,
22 )
22 )
23 from . import (
23 from . import (
24 bookmarks as bookmod,
24 bookmarks as bookmod,
25 bundle2,
25 bundle2,
26 changegroup,
26 changegroup,
27 discovery,
27 discovery,
28 error,
28 error,
29 lock as lockmod,
29 lock as lockmod,
30 logexchange,
30 logexchange,
31 obsolete,
31 obsolete,
32 phases,
32 phases,
33 pushkey,
33 pushkey,
34 pycompat,
34 pycompat,
35 scmutil,
35 scmutil,
36 sslutil,
36 sslutil,
37 streamclone,
37 streamclone,
38 url as urlmod,
38 url as urlmod,
39 util,
39 util,
40 )
40 )
41 from .utils import (
41 from .utils import (
42 stringutil,
42 stringutil,
43 )
43 )
44
44
45 urlerr = util.urlerr
45 urlerr = util.urlerr
46 urlreq = util.urlreq
46 urlreq = util.urlreq
47
47
48 # Maps bundle version human names to changegroup versions.
48 # Maps bundle version human names to changegroup versions.
49 _bundlespeccgversions = {'v1': '01',
49 _bundlespeccgversions = {'v1': '01',
50 'v2': '02',
50 'v2': '02',
51 'packed1': 's1',
51 'packed1': 's1',
52 'bundle2': '02', #legacy
52 'bundle2': '02', #legacy
53 }
53 }
54
54
55 # Maps bundle version with content opts to choose which part to bundle
55 # Maps bundle version with content opts to choose which part to bundle
56 _bundlespeccontentopts = {
56 _bundlespeccontentopts = {
57 'v1': {
57 'v1': {
58 'changegroup': True,
58 'changegroup': True,
59 'cg.version': '01',
59 'cg.version': '01',
60 'obsolescence': False,
60 'obsolescence': False,
61 'phases': False,
61 'phases': False,
62 'tagsfnodescache': False,
62 'tagsfnodescache': False,
63 'revbranchcache': False
63 'revbranchcache': False
64 },
64 },
65 'v2': {
65 'v2': {
66 'changegroup': True,
66 'changegroup': True,
67 'cg.version': '02',
67 'cg.version': '02',
68 'obsolescence': False,
68 'obsolescence': False,
69 'phases': False,
69 'phases': False,
70 'tagsfnodescache': True,
70 'tagsfnodescache': True,
71 'revbranchcache': True
71 'revbranchcache': True
72 },
72 },
73 'packed1' : {
73 'packed1' : {
74 'cg.version': 's1'
74 'cg.version': 's1'
75 }
75 }
76 }
76 }
77 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
77 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
78
78
79 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
79 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
80 "tagsfnodescache": False,
80 "tagsfnodescache": False,
81 "revbranchcache": False}}
81 "revbranchcache": False}}
82
82
83 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
83 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
84 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
84 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
85
85
86 @attr.s
86 @attr.s
87 class bundlespec(object):
87 class bundlespec(object):
88 compression = attr.ib()
88 compression = attr.ib()
89 version = attr.ib()
89 version = attr.ib()
90 params = attr.ib()
90 params = attr.ib()
91 contentopts = attr.ib()
91 contentopts = attr.ib()
92
92
93 def parsebundlespec(repo, spec, strict=True, externalnames=False):
93 def parsebundlespec(repo, spec, strict=True, externalnames=False):
94 """Parse a bundle string specification into parts.
94 """Parse a bundle string specification into parts.
95
95
96 Bundle specifications denote a well-defined bundle/exchange format.
96 Bundle specifications denote a well-defined bundle/exchange format.
97 The content of a given specification should not change over time in
97 The content of a given specification should not change over time in
98 order to ensure that bundles produced by a newer version of Mercurial are
98 order to ensure that bundles produced by a newer version of Mercurial are
99 readable from an older version.
99 readable from an older version.
100
100
101 The string currently has the form:
101 The string currently has the form:
102
102
103 <compression>-<type>[;<parameter0>[;<parameter1>]]
103 <compression>-<type>[;<parameter0>[;<parameter1>]]
104
104
105 Where <compression> is one of the supported compression formats
105 Where <compression> is one of the supported compression formats
106 and <type> is (currently) a version string. A ";" can follow the type and
106 and <type> is (currently) a version string. A ";" can follow the type and
107 all text afterwards is interpreted as URI encoded, ";" delimited key=value
107 all text afterwards is interpreted as URI encoded, ";" delimited key=value
108 pairs.
108 pairs.
109
109
110 If ``strict`` is True (the default) <compression> is required. Otherwise,
110 If ``strict`` is True (the default) <compression> is required. Otherwise,
111 it is optional.
111 it is optional.
112
112
113 If ``externalnames`` is False (the default), the human-centric names will
113 If ``externalnames`` is False (the default), the human-centric names will
114 be converted to their internal representation.
114 be converted to their internal representation.
115
115
116 Returns a bundlespec object of (compression, version, parameters).
116 Returns a bundlespec object of (compression, version, parameters).
117 Compression will be ``None`` if not in strict mode and a compression isn't
117 Compression will be ``None`` if not in strict mode and a compression isn't
118 defined.
118 defined.
119
119
120 An ``InvalidBundleSpecification`` is raised when the specification is
120 An ``InvalidBundleSpecification`` is raised when the specification is
121 not syntactically well formed.
121 not syntactically well formed.
122
122
123 An ``UnsupportedBundleSpecification`` is raised when the compression or
123 An ``UnsupportedBundleSpecification`` is raised when the compression or
124 bundle type/version is not recognized.
124 bundle type/version is not recognized.
125
125
126 Note: this function will likely eventually return a more complex data
126 Note: this function will likely eventually return a more complex data
127 structure, including bundle2 part information.
127 structure, including bundle2 part information.
128 """
128 """
129 def parseparams(s):
129 def parseparams(s):
130 if ';' not in s:
130 if ';' not in s:
131 return s, {}
131 return s, {}
132
132
133 params = {}
133 params = {}
134 version, paramstr = s.split(';', 1)
134 version, paramstr = s.split(';', 1)
135
135
136 for p in paramstr.split(';'):
136 for p in paramstr.split(';'):
137 if '=' not in p:
137 if '=' not in p:
138 raise error.InvalidBundleSpecification(
138 raise error.InvalidBundleSpecification(
139 _('invalid bundle specification: '
139 _('invalid bundle specification: '
140 'missing "=" in parameter: %s') % p)
140 'missing "=" in parameter: %s') % p)
141
141
142 key, value = p.split('=', 1)
142 key, value = p.split('=', 1)
143 key = urlreq.unquote(key)
143 key = urlreq.unquote(key)
144 value = urlreq.unquote(value)
144 value = urlreq.unquote(value)
145 params[key] = value
145 params[key] = value
146
146
147 return version, params
147 return version, params
148
148
149
149
150 if strict and '-' not in spec:
150 if strict and '-' not in spec:
151 raise error.InvalidBundleSpecification(
151 raise error.InvalidBundleSpecification(
152 _('invalid bundle specification; '
152 _('invalid bundle specification; '
153 'must be prefixed with compression: %s') % spec)
153 'must be prefixed with compression: %s') % spec)
154
154
155 if '-' in spec:
155 if '-' in spec:
156 compression, version = spec.split('-', 1)
156 compression, version = spec.split('-', 1)
157
157
158 if compression not in util.compengines.supportedbundlenames:
158 if compression not in util.compengines.supportedbundlenames:
159 raise error.UnsupportedBundleSpecification(
159 raise error.UnsupportedBundleSpecification(
160 _('%s compression is not supported') % compression)
160 _('%s compression is not supported') % compression)
161
161
162 version, params = parseparams(version)
162 version, params = parseparams(version)
163
163
164 if version not in _bundlespeccgversions:
164 if version not in _bundlespeccgversions:
165 raise error.UnsupportedBundleSpecification(
165 raise error.UnsupportedBundleSpecification(
166 _('%s is not a recognized bundle version') % version)
166 _('%s is not a recognized bundle version') % version)
167 else:
167 else:
168 # Value could be just the compression or just the version, in which
168 # Value could be just the compression or just the version, in which
169 # case some defaults are assumed (but only when not in strict mode).
169 # case some defaults are assumed (but only when not in strict mode).
170 assert not strict
170 assert not strict
171
171
172 spec, params = parseparams(spec)
172 spec, params = parseparams(spec)
173
173
174 if spec in util.compengines.supportedbundlenames:
174 if spec in util.compengines.supportedbundlenames:
175 compression = spec
175 compression = spec
176 version = 'v1'
176 version = 'v1'
177 # Generaldelta repos require v2.
177 # Generaldelta repos require v2.
178 if 'generaldelta' in repo.requirements:
178 if 'generaldelta' in repo.requirements:
179 version = 'v2'
179 version = 'v2'
180 # Modern compression engines require v2.
180 # Modern compression engines require v2.
181 if compression not in _bundlespecv1compengines:
181 if compression not in _bundlespecv1compengines:
182 version = 'v2'
182 version = 'v2'
183 elif spec in _bundlespeccgversions:
183 elif spec in _bundlespeccgversions:
184 if spec == 'packed1':
184 if spec == 'packed1':
185 compression = 'none'
185 compression = 'none'
186 else:
186 else:
187 compression = 'bzip2'
187 compression = 'bzip2'
188 version = spec
188 version = spec
189 else:
189 else:
190 raise error.UnsupportedBundleSpecification(
190 raise error.UnsupportedBundleSpecification(
191 _('%s is not a recognized bundle specification') % spec)
191 _('%s is not a recognized bundle specification') % spec)
192
192
193 # Bundle version 1 only supports a known set of compression engines.
193 # Bundle version 1 only supports a known set of compression engines.
194 if version == 'v1' and compression not in _bundlespecv1compengines:
194 if version == 'v1' and compression not in _bundlespecv1compengines:
195 raise error.UnsupportedBundleSpecification(
195 raise error.UnsupportedBundleSpecification(
196 _('compression engine %s is not supported on v1 bundles') %
196 _('compression engine %s is not supported on v1 bundles') %
197 compression)
197 compression)
198
198
199 # The specification for packed1 can optionally declare the data formats
199 # The specification for packed1 can optionally declare the data formats
200 # required to apply it. If we see this metadata, compare against what the
200 # required to apply it. If we see this metadata, compare against what the
201 # repo supports and error if the bundle isn't compatible.
201 # repo supports and error if the bundle isn't compatible.
202 if version == 'packed1' and 'requirements' in params:
202 if version == 'packed1' and 'requirements' in params:
203 requirements = set(params['requirements'].split(','))
203 requirements = set(params['requirements'].split(','))
204 missingreqs = requirements - repo.supportedformats
204 missingreqs = requirements - repo.supportedformats
205 if missingreqs:
205 if missingreqs:
206 raise error.UnsupportedBundleSpecification(
206 raise error.UnsupportedBundleSpecification(
207 _('missing support for repository features: %s') %
207 _('missing support for repository features: %s') %
208 ', '.join(sorted(missingreqs)))
208 ', '.join(sorted(missingreqs)))
209
209
210 # Compute contentopts based on the version
210 # Compute contentopts based on the version
211 contentopts = _bundlespeccontentopts.get(version, {}).copy()
211 contentopts = _bundlespeccontentopts.get(version, {}).copy()
212
212
213 # Process the variants
213 # Process the variants
214 if "stream" in params and params["stream"] == "v2":
214 if "stream" in params and params["stream"] == "v2":
215 variant = _bundlespecvariants["streamv2"]
215 variant = _bundlespecvariants["streamv2"]
216 contentopts.update(variant)
216 contentopts.update(variant)
217
217
218 if not externalnames:
218 if not externalnames:
219 engine = util.compengines.forbundlename(compression)
219 engine = util.compengines.forbundlename(compression)
220 compression = engine.bundletype()[1]
220 compression = engine.bundletype()[1]
221 version = _bundlespeccgversions[version]
221 version = _bundlespeccgversions[version]
222
222
223 return bundlespec(compression, version, params, contentopts)
223 return bundlespec(compression, version, params, contentopts)
224
224
225 def readbundle(ui, fh, fname, vfs=None):
225 def readbundle(ui, fh, fname, vfs=None):
226 header = changegroup.readexactly(fh, 4)
226 header = changegroup.readexactly(fh, 4)
227
227
228 alg = None
228 alg = None
229 if not fname:
229 if not fname:
230 fname = "stream"
230 fname = "stream"
231 if not header.startswith('HG') and header.startswith('\0'):
231 if not header.startswith('HG') and header.startswith('\0'):
232 fh = changegroup.headerlessfixup(fh, header)
232 fh = changegroup.headerlessfixup(fh, header)
233 header = "HG10"
233 header = "HG10"
234 alg = 'UN'
234 alg = 'UN'
235 elif vfs:
235 elif vfs:
236 fname = vfs.join(fname)
236 fname = vfs.join(fname)
237
237
238 magic, version = header[0:2], header[2:4]
238 magic, version = header[0:2], header[2:4]
239
239
240 if magic != 'HG':
240 if magic != 'HG':
241 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
241 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
242 if version == '10':
242 if version == '10':
243 if alg is None:
243 if alg is None:
244 alg = changegroup.readexactly(fh, 2)
244 alg = changegroup.readexactly(fh, 2)
245 return changegroup.cg1unpacker(fh, alg)
245 return changegroup.cg1unpacker(fh, alg)
246 elif version.startswith('2'):
246 elif version.startswith('2'):
247 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
247 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
248 elif version == 'S1':
248 elif version == 'S1':
249 return streamclone.streamcloneapplier(fh)
249 return streamclone.streamcloneapplier(fh)
250 else:
250 else:
251 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
251 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
252
252
253 def getbundlespec(ui, fh):
253 def getbundlespec(ui, fh):
254 """Infer the bundlespec from a bundle file handle.
254 """Infer the bundlespec from a bundle file handle.
255
255
256 The input file handle is seeked and the original seek position is not
256 The input file handle is seeked and the original seek position is not
257 restored.
257 restored.
258 """
258 """
259 def speccompression(alg):
259 def speccompression(alg):
260 try:
260 try:
261 return util.compengines.forbundletype(alg).bundletype()[0]
261 return util.compengines.forbundletype(alg).bundletype()[0]
262 except KeyError:
262 except KeyError:
263 return None
263 return None
264
264
265 b = readbundle(ui, fh, None)
265 b = readbundle(ui, fh, None)
266 if isinstance(b, changegroup.cg1unpacker):
266 if isinstance(b, changegroup.cg1unpacker):
267 alg = b._type
267 alg = b._type
268 if alg == '_truncatedBZ':
268 if alg == '_truncatedBZ':
269 alg = 'BZ'
269 alg = 'BZ'
270 comp = speccompression(alg)
270 comp = speccompression(alg)
271 if not comp:
271 if not comp:
272 raise error.Abort(_('unknown compression algorithm: %s') % alg)
272 raise error.Abort(_('unknown compression algorithm: %s') % alg)
273 return '%s-v1' % comp
273 return '%s-v1' % comp
274 elif isinstance(b, bundle2.unbundle20):
274 elif isinstance(b, bundle2.unbundle20):
275 if 'Compression' in b.params:
275 if 'Compression' in b.params:
276 comp = speccompression(b.params['Compression'])
276 comp = speccompression(b.params['Compression'])
277 if not comp:
277 if not comp:
278 raise error.Abort(_('unknown compression algorithm: %s') % comp)
278 raise error.Abort(_('unknown compression algorithm: %s') % comp)
279 else:
279 else:
280 comp = 'none'
280 comp = 'none'
281
281
282 version = None
282 version = None
283 for part in b.iterparts():
283 for part in b.iterparts():
284 if part.type == 'changegroup':
284 if part.type == 'changegroup':
285 version = part.params['version']
285 version = part.params['version']
286 if version in ('01', '02'):
286 if version in ('01', '02'):
287 version = 'v2'
287 version = 'v2'
288 else:
288 else:
289 raise error.Abort(_('changegroup version %s does not have '
289 raise error.Abort(_('changegroup version %s does not have '
290 'a known bundlespec') % version,
290 'a known bundlespec') % version,
291 hint=_('try upgrading your Mercurial '
291 hint=_('try upgrading your Mercurial '
292 'client'))
292 'client'))
293 elif part.type == 'stream2' and version is None:
293 elif part.type == 'stream2' and version is None:
294 # A stream2 part requires to be part of a v2 bundle
294 # A stream2 part requires to be part of a v2 bundle
295 version = "v2"
295 version = "v2"
296 requirements = urlreq.unquote(part.params['requirements'])
296 requirements = urlreq.unquote(part.params['requirements'])
297 splitted = requirements.split()
297 splitted = requirements.split()
298 params = bundle2._formatrequirementsparams(splitted)
298 params = bundle2._formatrequirementsparams(splitted)
299 return 'none-v2;stream=v2;%s' % params
299 return 'none-v2;stream=v2;%s' % params
300
300
301 if not version:
301 if not version:
302 raise error.Abort(_('could not identify changegroup version in '
302 raise error.Abort(_('could not identify changegroup version in '
303 'bundle'))
303 'bundle'))
304
304
305 return '%s-%s' % (comp, version)
305 return '%s-%s' % (comp, version)
306 elif isinstance(b, streamclone.streamcloneapplier):
306 elif isinstance(b, streamclone.streamcloneapplier):
307 requirements = streamclone.readbundle1header(fh)[2]
307 requirements = streamclone.readbundle1header(fh)[2]
308 formatted = bundle2._formatrequirementsparams(requirements)
308 formatted = bundle2._formatrequirementsparams(requirements)
309 return 'none-packed1;%s' % formatted
309 return 'none-packed1;%s' % formatted
310 else:
310 else:
311 raise error.Abort(_('unknown bundle type: %s') % b)
311 raise error.Abort(_('unknown bundle type: %s') % b)
312
312
313 def _computeoutgoing(repo, heads, common):
313 def _computeoutgoing(repo, heads, common):
314 """Computes which revs are outgoing given a set of common
314 """Computes which revs are outgoing given a set of common
315 and a set of heads.
315 and a set of heads.
316
316
317 This is a separate function so extensions can have access to
317 This is a separate function so extensions can have access to
318 the logic.
318 the logic.
319
319
320 Returns a discovery.outgoing object.
320 Returns a discovery.outgoing object.
321 """
321 """
322 cl = repo.changelog
322 cl = repo.changelog
323 if common:
323 if common:
324 hasnode = cl.hasnode
324 hasnode = cl.hasnode
325 common = [n for n in common if hasnode(n)]
325 common = [n for n in common if hasnode(n)]
326 else:
326 else:
327 common = [nullid]
327 common = [nullid]
328 if not heads:
328 if not heads:
329 heads = cl.heads()
329 heads = cl.heads()
330 return discovery.outgoing(repo, common, heads)
330 return discovery.outgoing(repo, common, heads)
331
331
332 def _forcebundle1(op):
332 def _forcebundle1(op):
333 """return true if a pull/push must use bundle1
333 """return true if a pull/push must use bundle1
334
334
335 This function is used to allow testing of the older bundle version"""
335 This function is used to allow testing of the older bundle version"""
336 ui = op.repo.ui
336 ui = op.repo.ui
337 # The goal is this config is to allow developer to choose the bundle
337 # The goal is this config is to allow developer to choose the bundle
338 # version used during exchanged. This is especially handy during test.
338 # version used during exchanged. This is especially handy during test.
339 # Value is a list of bundle version to be picked from, highest version
339 # Value is a list of bundle version to be picked from, highest version
340 # should be used.
340 # should be used.
341 #
341 #
342 # developer config: devel.legacy.exchange
342 # developer config: devel.legacy.exchange
343 exchange = ui.configlist('devel', 'legacy.exchange')
343 exchange = ui.configlist('devel', 'legacy.exchange')
344 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
344 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
345 return forcebundle1 or not op.remote.capable('bundle2')
345 return forcebundle1 or not op.remote.capable('bundle2')
346
346
347 class pushoperation(object):
347 class pushoperation(object):
348 """A object that represent a single push operation
348 """A object that represent a single push operation
349
349
350 Its purpose is to carry push related state and very common operations.
350 Its purpose is to carry push related state and very common operations.
351
351
352 A new pushoperation should be created at the beginning of each push and
352 A new pushoperation should be created at the beginning of each push and
353 discarded afterward.
353 discarded afterward.
354 """
354 """
355
355
356 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
356 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
357 bookmarks=(), pushvars=None):
357 bookmarks=(), pushvars=None):
358 # repo we push from
358 # repo we push from
359 self.repo = repo
359 self.repo = repo
360 self.ui = repo.ui
360 self.ui = repo.ui
361 # repo we push to
361 # repo we push to
362 self.remote = remote
362 self.remote = remote
363 # force option provided
363 # force option provided
364 self.force = force
364 self.force = force
365 # revs to be pushed (None is "all")
365 # revs to be pushed (None is "all")
366 self.revs = revs
366 self.revs = revs
367 # bookmark explicitly pushed
367 # bookmark explicitly pushed
368 self.bookmarks = bookmarks
368 self.bookmarks = bookmarks
369 # allow push of new branch
369 # allow push of new branch
370 self.newbranch = newbranch
370 self.newbranch = newbranch
371 # step already performed
371 # step already performed
372 # (used to check what steps have been already performed through bundle2)
372 # (used to check what steps have been already performed through bundle2)
373 self.stepsdone = set()
373 self.stepsdone = set()
374 # Integer version of the changegroup push result
374 # Integer version of the changegroup push result
375 # - None means nothing to push
375 # - None means nothing to push
376 # - 0 means HTTP error
376 # - 0 means HTTP error
377 # - 1 means we pushed and remote head count is unchanged *or*
377 # - 1 means we pushed and remote head count is unchanged *or*
378 # we have outgoing changesets but refused to push
378 # we have outgoing changesets but refused to push
379 # - other values as described by addchangegroup()
379 # - other values as described by addchangegroup()
380 self.cgresult = None
380 self.cgresult = None
381 # Boolean value for the bookmark push
381 # Boolean value for the bookmark push
382 self.bkresult = None
382 self.bkresult = None
383 # discover.outgoing object (contains common and outgoing data)
383 # discover.outgoing object (contains common and outgoing data)
384 self.outgoing = None
384 self.outgoing = None
385 # all remote topological heads before the push
385 # all remote topological heads before the push
386 self.remoteheads = None
386 self.remoteheads = None
387 # Details of the remote branch pre and post push
387 # Details of the remote branch pre and post push
388 #
388 #
389 # mapping: {'branch': ([remoteheads],
389 # mapping: {'branch': ([remoteheads],
390 # [newheads],
390 # [newheads],
391 # [unsyncedheads],
391 # [unsyncedheads],
392 # [discardedheads])}
392 # [discardedheads])}
393 # - branch: the branch name
393 # - branch: the branch name
394 # - remoteheads: the list of remote heads known locally
394 # - remoteheads: the list of remote heads known locally
395 # None if the branch is new
395 # None if the branch is new
396 # - newheads: the new remote heads (known locally) with outgoing pushed
396 # - newheads: the new remote heads (known locally) with outgoing pushed
397 # - unsyncedheads: the list of remote heads unknown locally.
397 # - unsyncedheads: the list of remote heads unknown locally.
398 # - discardedheads: the list of remote heads made obsolete by the push
398 # - discardedheads: the list of remote heads made obsolete by the push
399 self.pushbranchmap = None
399 self.pushbranchmap = None
400 # testable as a boolean indicating if any nodes are missing locally.
400 # testable as a boolean indicating if any nodes are missing locally.
401 self.incoming = None
401 self.incoming = None
402 # summary of the remote phase situation
402 # summary of the remote phase situation
403 self.remotephases = None
403 self.remotephases = None
404 # phases changes that must be pushed along side the changesets
404 # phases changes that must be pushed along side the changesets
405 self.outdatedphases = None
405 self.outdatedphases = None
406 # phases changes that must be pushed if changeset push fails
406 # phases changes that must be pushed if changeset push fails
407 self.fallbackoutdatedphases = None
407 self.fallbackoutdatedphases = None
408 # outgoing obsmarkers
408 # outgoing obsmarkers
409 self.outobsmarkers = set()
409 self.outobsmarkers = set()
410 # outgoing bookmarks
410 # outgoing bookmarks
411 self.outbookmarks = []
411 self.outbookmarks = []
412 # transaction manager
412 # transaction manager
413 self.trmanager = None
413 self.trmanager = None
414 # map { pushkey partid -> callback handling failure}
414 # map { pushkey partid -> callback handling failure}
415 # used to handle exception from mandatory pushkey part failure
415 # used to handle exception from mandatory pushkey part failure
416 self.pkfailcb = {}
416 self.pkfailcb = {}
417 # an iterable of pushvars or None
417 # an iterable of pushvars or None
418 self.pushvars = pushvars
418 self.pushvars = pushvars
419
419
420 @util.propertycache
420 @util.propertycache
421 def futureheads(self):
421 def futureheads(self):
422 """future remote heads if the changeset push succeeds"""
422 """future remote heads if the changeset push succeeds"""
423 return self.outgoing.missingheads
423 return self.outgoing.missingheads
424
424
425 @util.propertycache
425 @util.propertycache
426 def fallbackheads(self):
426 def fallbackheads(self):
427 """future remote heads if the changeset push fails"""
427 """future remote heads if the changeset push fails"""
428 if self.revs is None:
428 if self.revs is None:
429 # not target to push, all common are relevant
429 # not target to push, all common are relevant
430 return self.outgoing.commonheads
430 return self.outgoing.commonheads
431 unfi = self.repo.unfiltered()
431 unfi = self.repo.unfiltered()
432 # I want cheads = heads(::missingheads and ::commonheads)
432 # I want cheads = heads(::missingheads and ::commonheads)
433 # (missingheads is revs with secret changeset filtered out)
433 # (missingheads is revs with secret changeset filtered out)
434 #
434 #
435 # This can be expressed as:
435 # This can be expressed as:
436 # cheads = ( (missingheads and ::commonheads)
436 # cheads = ( (missingheads and ::commonheads)
437 # + (commonheads and ::missingheads))"
437 # + (commonheads and ::missingheads))"
438 # )
438 # )
439 #
439 #
440 # while trying to push we already computed the following:
440 # while trying to push we already computed the following:
441 # common = (::commonheads)
441 # common = (::commonheads)
442 # missing = ((commonheads::missingheads) - commonheads)
442 # missing = ((commonheads::missingheads) - commonheads)
443 #
443 #
444 # We can pick:
444 # We can pick:
445 # * missingheads part of common (::commonheads)
445 # * missingheads part of common (::commonheads)
446 common = self.outgoing.common
446 common = self.outgoing.common
447 nm = self.repo.changelog.nodemap
447 nm = self.repo.changelog.nodemap
448 cheads = [node for node in self.revs if nm[node] in common]
448 cheads = [node for node in self.revs if nm[node] in common]
449 # and
449 # and
450 # * commonheads parents on missing
450 # * commonheads parents on missing
451 revset = unfi.set('%ln and parents(roots(%ln))',
451 revset = unfi.set('%ln and parents(roots(%ln))',
452 self.outgoing.commonheads,
452 self.outgoing.commonheads,
453 self.outgoing.missing)
453 self.outgoing.missing)
454 cheads.extend(c.node() for c in revset)
454 cheads.extend(c.node() for c in revset)
455 return cheads
455 return cheads
456
456
457 @property
457 @property
458 def commonheads(self):
458 def commonheads(self):
459 """set of all common heads after changeset bundle push"""
459 """set of all common heads after changeset bundle push"""
460 if self.cgresult:
460 if self.cgresult:
461 return self.futureheads
461 return self.futureheads
462 else:
462 else:
463 return self.fallbackheads
463 return self.fallbackheads
464
464
465 # mapping of message used when pushing bookmark
465 # mapping of message used when pushing bookmark
466 bookmsgmap = {'update': (_("updating bookmark %s\n"),
466 bookmsgmap = {'update': (_("updating bookmark %s\n"),
467 _('updating bookmark %s failed!\n')),
467 _('updating bookmark %s failed!\n')),
468 'export': (_("exporting bookmark %s\n"),
468 'export': (_("exporting bookmark %s\n"),
469 _('exporting bookmark %s failed!\n')),
469 _('exporting bookmark %s failed!\n')),
470 'delete': (_("deleting remote bookmark %s\n"),
470 'delete': (_("deleting remote bookmark %s\n"),
471 _('deleting remote bookmark %s failed!\n')),
471 _('deleting remote bookmark %s failed!\n')),
472 }
472 }
473
473
474
474
475 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
475 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
476 opargs=None):
476 opargs=None):
477 '''Push outgoing changesets (limited by revs) from a local
477 '''Push outgoing changesets (limited by revs) from a local
478 repository to remote. Return an integer:
478 repository to remote. Return an integer:
479 - None means nothing to push
479 - None means nothing to push
480 - 0 means HTTP error
480 - 0 means HTTP error
481 - 1 means we pushed and remote head count is unchanged *or*
481 - 1 means we pushed and remote head count is unchanged *or*
482 we have outgoing changesets but refused to push
482 we have outgoing changesets but refused to push
483 - other values as described by addchangegroup()
483 - other values as described by addchangegroup()
484 '''
484 '''
485 if opargs is None:
485 if opargs is None:
486 opargs = {}
486 opargs = {}
487 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
487 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
488 **pycompat.strkwargs(opargs))
488 **pycompat.strkwargs(opargs))
489 if pushop.remote.local():
489 if pushop.remote.local():
490 missing = (set(pushop.repo.requirements)
490 missing = (set(pushop.repo.requirements)
491 - pushop.remote.local().supported)
491 - pushop.remote.local().supported)
492 if missing:
492 if missing:
493 msg = _("required features are not"
493 msg = _("required features are not"
494 " supported in the destination:"
494 " supported in the destination:"
495 " %s") % (', '.join(sorted(missing)))
495 " %s") % (', '.join(sorted(missing)))
496 raise error.Abort(msg)
496 raise error.Abort(msg)
497
497
498 if not pushop.remote.canpush():
498 if not pushop.remote.canpush():
499 raise error.Abort(_("destination does not support push"))
499 raise error.Abort(_("destination does not support push"))
500
500
501 if not pushop.remote.capable('unbundle'):
501 if not pushop.remote.capable('unbundle'):
502 raise error.Abort(_('cannot push: destination does not support the '
502 raise error.Abort(_('cannot push: destination does not support the '
503 'unbundle wire protocol command'))
503 'unbundle wire protocol command'))
504
504
505 # get lock as we might write phase data
505 # get lock as we might write phase data
506 wlock = lock = None
506 wlock = lock = None
507 try:
507 try:
508 # bundle2 push may receive a reply bundle touching bookmarks or other
508 # bundle2 push may receive a reply bundle touching bookmarks or other
509 # things requiring the wlock. Take it now to ensure proper ordering.
509 # things requiring the wlock. Take it now to ensure proper ordering.
510 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
510 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
511 if (not _forcebundle1(pushop)) and maypushback:
511 if (not _forcebundle1(pushop)) and maypushback:
512 wlock = pushop.repo.wlock()
512 wlock = pushop.repo.wlock()
513 lock = pushop.repo.lock()
513 lock = pushop.repo.lock()
514 pushop.trmanager = transactionmanager(pushop.repo,
514 pushop.trmanager = transactionmanager(pushop.repo,
515 'push-response',
515 'push-response',
516 pushop.remote.url())
516 pushop.remote.url())
517 except IOError as err:
517 except IOError as err:
518 if err.errno != errno.EACCES:
518 if err.errno != errno.EACCES:
519 raise
519 raise
520 # source repo cannot be locked.
520 # source repo cannot be locked.
521 # We do not abort the push, but just disable the local phase
521 # We do not abort the push, but just disable the local phase
522 # synchronisation.
522 # synchronisation.
523 msg = 'cannot lock source repository: %s\n' % err
523 msg = 'cannot lock source repository: %s\n' % err
524 pushop.ui.debug(msg)
524 pushop.ui.debug(msg)
525
525
526 with wlock or util.nullcontextmanager(), \
526 with wlock or util.nullcontextmanager(), \
527 lock or util.nullcontextmanager(), \
527 lock or util.nullcontextmanager(), \
528 pushop.trmanager or util.nullcontextmanager():
528 pushop.trmanager or util.nullcontextmanager():
529 pushop.repo.checkpush(pushop)
529 pushop.repo.checkpush(pushop)
530 _pushdiscovery(pushop)
530 _pushdiscovery(pushop)
531 if not _forcebundle1(pushop):
531 if not _forcebundle1(pushop):
532 _pushbundle2(pushop)
532 _pushbundle2(pushop)
533 _pushchangeset(pushop)
533 _pushchangeset(pushop)
534 _pushsyncphase(pushop)
534 _pushsyncphase(pushop)
535 _pushobsolete(pushop)
535 _pushobsolete(pushop)
536 _pushbookmark(pushop)
536 _pushbookmark(pushop)
537
537
538 return pushop
538 return pushop
539
539
540 # list of steps to perform discovery before push
540 # list of steps to perform discovery before push
541 pushdiscoveryorder = []
541 pushdiscoveryorder = []
542
542
543 # Mapping between step name and function
543 # Mapping between step name and function
544 #
544 #
545 # This exists to help extensions wrap steps if necessary
545 # This exists to help extensions wrap steps if necessary
546 pushdiscoverymapping = {}
546 pushdiscoverymapping = {}
547
547
548 def pushdiscovery(stepname):
548 def pushdiscovery(stepname):
549 """decorator for function performing discovery before push
549 """decorator for function performing discovery before push
550
550
551 The function is added to the step -> function mapping and appended to the
551 The function is added to the step -> function mapping and appended to the
552 list of steps. Beware that decorated function will be added in order (this
552 list of steps. Beware that decorated function will be added in order (this
553 may matter).
553 may matter).
554
554
555 You can only use this decorator for a new step, if you want to wrap a step
555 You can only use this decorator for a new step, if you want to wrap a step
556 from an extension, change the pushdiscovery dictionary directly."""
556 from an extension, change the pushdiscovery dictionary directly."""
557 def dec(func):
557 def dec(func):
558 assert stepname not in pushdiscoverymapping
558 assert stepname not in pushdiscoverymapping
559 pushdiscoverymapping[stepname] = func
559 pushdiscoverymapping[stepname] = func
560 pushdiscoveryorder.append(stepname)
560 pushdiscoveryorder.append(stepname)
561 return func
561 return func
562 return dec
562 return dec
563
563
564 def _pushdiscovery(pushop):
564 def _pushdiscovery(pushop):
565 """Run all discovery steps"""
565 """Run all discovery steps"""
566 for stepname in pushdiscoveryorder:
566 for stepname in pushdiscoveryorder:
567 step = pushdiscoverymapping[stepname]
567 step = pushdiscoverymapping[stepname]
568 step(pushop)
568 step(pushop)
569
569
570 @pushdiscovery('changeset')
570 @pushdiscovery('changeset')
571 def _pushdiscoverychangeset(pushop):
571 def _pushdiscoverychangeset(pushop):
572 """discover the changeset that need to be pushed"""
572 """discover the changeset that need to be pushed"""
573 fci = discovery.findcommonincoming
573 fci = discovery.findcommonincoming
574 if pushop.revs:
574 if pushop.revs:
575 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
575 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
576 ancestorsof=pushop.revs)
576 ancestorsof=pushop.revs)
577 else:
577 else:
578 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
578 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
579 common, inc, remoteheads = commoninc
579 common, inc, remoteheads = commoninc
580 fco = discovery.findcommonoutgoing
580 fco = discovery.findcommonoutgoing
581 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
581 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
582 commoninc=commoninc, force=pushop.force)
582 commoninc=commoninc, force=pushop.force)
583 pushop.outgoing = outgoing
583 pushop.outgoing = outgoing
584 pushop.remoteheads = remoteheads
584 pushop.remoteheads = remoteheads
585 pushop.incoming = inc
585 pushop.incoming = inc
586
586
587 @pushdiscovery('phase')
587 @pushdiscovery('phase')
588 def _pushdiscoveryphase(pushop):
588 def _pushdiscoveryphase(pushop):
589 """discover the phase that needs to be pushed
589 """discover the phase that needs to be pushed
590
590
591 (computed for both success and failure case for changesets push)"""
591 (computed for both success and failure case for changesets push)"""
592 outgoing = pushop.outgoing
592 outgoing = pushop.outgoing
593 unfi = pushop.repo.unfiltered()
593 unfi = pushop.repo.unfiltered()
594 remotephases = pushop.remote.listkeys('phases')
594 remotephases = listkeys(pushop.remote, 'phases')
595
595 if (pushop.ui.configbool('ui', '_usedassubrepo')
596 if (pushop.ui.configbool('ui', '_usedassubrepo')
596 and remotephases # server supports phases
597 and remotephases # server supports phases
597 and not pushop.outgoing.missing # no changesets to be pushed
598 and not pushop.outgoing.missing # no changesets to be pushed
598 and remotephases.get('publishing', False)):
599 and remotephases.get('publishing', False)):
599 # When:
600 # When:
600 # - this is a subrepo push
601 # - this is a subrepo push
601 # - and remote support phase
602 # - and remote support phase
602 # - and no changeset are to be pushed
603 # - and no changeset are to be pushed
603 # - and remote is publishing
604 # - and remote is publishing
604 # We may be in issue 3781 case!
605 # We may be in issue 3781 case!
605 # We drop the possible phase synchronisation done by
606 # We drop the possible phase synchronisation done by
606 # courtesy to publish changesets possibly locally draft
607 # courtesy to publish changesets possibly locally draft
607 # on the remote.
608 # on the remote.
608 pushop.outdatedphases = []
609 pushop.outdatedphases = []
609 pushop.fallbackoutdatedphases = []
610 pushop.fallbackoutdatedphases = []
610 return
611 return
611
612
612 pushop.remotephases = phases.remotephasessummary(pushop.repo,
613 pushop.remotephases = phases.remotephasessummary(pushop.repo,
613 pushop.fallbackheads,
614 pushop.fallbackheads,
614 remotephases)
615 remotephases)
615 droots = pushop.remotephases.draftroots
616 droots = pushop.remotephases.draftroots
616
617
617 extracond = ''
618 extracond = ''
618 if not pushop.remotephases.publishing:
619 if not pushop.remotephases.publishing:
619 extracond = ' and public()'
620 extracond = ' and public()'
620 revset = 'heads((%%ln::%%ln) %s)' % extracond
621 revset = 'heads((%%ln::%%ln) %s)' % extracond
621 # Get the list of all revs draft on remote by public here.
622 # Get the list of all revs draft on remote by public here.
622 # XXX Beware that revset break if droots is not strictly
623 # XXX Beware that revset break if droots is not strictly
623 # XXX root we may want to ensure it is but it is costly
624 # XXX root we may want to ensure it is but it is costly
624 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
625 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
625 if not outgoing.missing:
626 if not outgoing.missing:
626 future = fallback
627 future = fallback
627 else:
628 else:
628 # adds changeset we are going to push as draft
629 # adds changeset we are going to push as draft
629 #
630 #
630 # should not be necessary for publishing server, but because of an
631 # should not be necessary for publishing server, but because of an
631 # issue fixed in xxxxx we have to do it anyway.
632 # issue fixed in xxxxx we have to do it anyway.
632 fdroots = list(unfi.set('roots(%ln + %ln::)',
633 fdroots = list(unfi.set('roots(%ln + %ln::)',
633 outgoing.missing, droots))
634 outgoing.missing, droots))
634 fdroots = [f.node() for f in fdroots]
635 fdroots = [f.node() for f in fdroots]
635 future = list(unfi.set(revset, fdroots, pushop.futureheads))
636 future = list(unfi.set(revset, fdroots, pushop.futureheads))
636 pushop.outdatedphases = future
637 pushop.outdatedphases = future
637 pushop.fallbackoutdatedphases = fallback
638 pushop.fallbackoutdatedphases = fallback
638
639
639 @pushdiscovery('obsmarker')
640 @pushdiscovery('obsmarker')
640 def _pushdiscoveryobsmarkers(pushop):
641 def _pushdiscoveryobsmarkers(pushop):
641 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
642 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
642 and pushop.repo.obsstore
643 return
643 and 'obsolete' in pushop.remote.listkeys('namespaces')):
644
644 repo = pushop.repo
645 if not pushop.repo.obsstore:
645 # very naive computation, that can be quite expensive on big repo.
646 return
646 # However: evolution is currently slow on them anyway.
647
647 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
648 if 'obsolete' not in listkeys(pushop.remote, 'namespaces'):
648 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
649 return
650
651 repo = pushop.repo
652 # very naive computation, that can be quite expensive on big repo.
653 # However: evolution is currently slow on them anyway.
654 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
655 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
649
656
650 @pushdiscovery('bookmarks')
657 @pushdiscovery('bookmarks')
651 def _pushdiscoverybookmarks(pushop):
658 def _pushdiscoverybookmarks(pushop):
652 ui = pushop.ui
659 ui = pushop.ui
653 repo = pushop.repo.unfiltered()
660 repo = pushop.repo.unfiltered()
654 remote = pushop.remote
661 remote = pushop.remote
655 ui.debug("checking for updated bookmarks\n")
662 ui.debug("checking for updated bookmarks\n")
656 ancestors = ()
663 ancestors = ()
657 if pushop.revs:
664 if pushop.revs:
658 revnums = map(repo.changelog.rev, pushop.revs)
665 revnums = map(repo.changelog.rev, pushop.revs)
659 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
666 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
660 remotebookmark = remote.listkeys('bookmarks')
667
668 remotebookmark = listkeys(remote, 'bookmarks')
661
669
662 explicit = set([repo._bookmarks.expandname(bookmark)
670 explicit = set([repo._bookmarks.expandname(bookmark)
663 for bookmark in pushop.bookmarks])
671 for bookmark in pushop.bookmarks])
664
672
665 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
673 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
666 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
674 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
667
675
668 def safehex(x):
676 def safehex(x):
669 if x is None:
677 if x is None:
670 return x
678 return x
671 return hex(x)
679 return hex(x)
672
680
673 def hexifycompbookmarks(bookmarks):
681 def hexifycompbookmarks(bookmarks):
674 return [(b, safehex(scid), safehex(dcid))
682 return [(b, safehex(scid), safehex(dcid))
675 for (b, scid, dcid) in bookmarks]
683 for (b, scid, dcid) in bookmarks]
676
684
677 comp = [hexifycompbookmarks(marks) for marks in comp]
685 comp = [hexifycompbookmarks(marks) for marks in comp]
678 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
686 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
679
687
680 def _processcompared(pushop, pushed, explicit, remotebms, comp):
688 def _processcompared(pushop, pushed, explicit, remotebms, comp):
681 """take decision on bookmark to pull from the remote bookmark
689 """take decision on bookmark to pull from the remote bookmark
682
690
683 Exist to help extensions who want to alter this behavior.
691 Exist to help extensions who want to alter this behavior.
684 """
692 """
685 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
693 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
686
694
687 repo = pushop.repo
695 repo = pushop.repo
688
696
689 for b, scid, dcid in advsrc:
697 for b, scid, dcid in advsrc:
690 if b in explicit:
698 if b in explicit:
691 explicit.remove(b)
699 explicit.remove(b)
692 if not pushed or repo[scid].rev() in pushed:
700 if not pushed or repo[scid].rev() in pushed:
693 pushop.outbookmarks.append((b, dcid, scid))
701 pushop.outbookmarks.append((b, dcid, scid))
694 # search added bookmark
702 # search added bookmark
695 for b, scid, dcid in addsrc:
703 for b, scid, dcid in addsrc:
696 if b in explicit:
704 if b in explicit:
697 explicit.remove(b)
705 explicit.remove(b)
698 pushop.outbookmarks.append((b, '', scid))
706 pushop.outbookmarks.append((b, '', scid))
699 # search for overwritten bookmark
707 # search for overwritten bookmark
700 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
708 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
701 if b in explicit:
709 if b in explicit:
702 explicit.remove(b)
710 explicit.remove(b)
703 pushop.outbookmarks.append((b, dcid, scid))
711 pushop.outbookmarks.append((b, dcid, scid))
704 # search for bookmark to delete
712 # search for bookmark to delete
705 for b, scid, dcid in adddst:
713 for b, scid, dcid in adddst:
706 if b in explicit:
714 if b in explicit:
707 explicit.remove(b)
715 explicit.remove(b)
708 # treat as "deleted locally"
716 # treat as "deleted locally"
709 pushop.outbookmarks.append((b, dcid, ''))
717 pushop.outbookmarks.append((b, dcid, ''))
710 # identical bookmarks shouldn't get reported
718 # identical bookmarks shouldn't get reported
711 for b, scid, dcid in same:
719 for b, scid, dcid in same:
712 if b in explicit:
720 if b in explicit:
713 explicit.remove(b)
721 explicit.remove(b)
714
722
715 if explicit:
723 if explicit:
716 explicit = sorted(explicit)
724 explicit = sorted(explicit)
717 # we should probably list all of them
725 # we should probably list all of them
718 pushop.ui.warn(_('bookmark %s does not exist on the local '
726 pushop.ui.warn(_('bookmark %s does not exist on the local '
719 'or remote repository!\n') % explicit[0])
727 'or remote repository!\n') % explicit[0])
720 pushop.bkresult = 2
728 pushop.bkresult = 2
721
729
722 pushop.outbookmarks.sort()
730 pushop.outbookmarks.sort()
723
731
724 def _pushcheckoutgoing(pushop):
732 def _pushcheckoutgoing(pushop):
725 outgoing = pushop.outgoing
733 outgoing = pushop.outgoing
726 unfi = pushop.repo.unfiltered()
734 unfi = pushop.repo.unfiltered()
727 if not outgoing.missing:
735 if not outgoing.missing:
728 # nothing to push
736 # nothing to push
729 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
737 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
730 return False
738 return False
731 # something to push
739 # something to push
732 if not pushop.force:
740 if not pushop.force:
733 # if repo.obsstore == False --> no obsolete
741 # if repo.obsstore == False --> no obsolete
734 # then, save the iteration
742 # then, save the iteration
735 if unfi.obsstore:
743 if unfi.obsstore:
736 # this message are here for 80 char limit reason
744 # this message are here for 80 char limit reason
737 mso = _("push includes obsolete changeset: %s!")
745 mso = _("push includes obsolete changeset: %s!")
738 mspd = _("push includes phase-divergent changeset: %s!")
746 mspd = _("push includes phase-divergent changeset: %s!")
739 mscd = _("push includes content-divergent changeset: %s!")
747 mscd = _("push includes content-divergent changeset: %s!")
740 mst = {"orphan": _("push includes orphan changeset: %s!"),
748 mst = {"orphan": _("push includes orphan changeset: %s!"),
741 "phase-divergent": mspd,
749 "phase-divergent": mspd,
742 "content-divergent": mscd}
750 "content-divergent": mscd}
743 # If we are to push if there is at least one
751 # If we are to push if there is at least one
744 # obsolete or unstable changeset in missing, at
752 # obsolete or unstable changeset in missing, at
745 # least one of the missinghead will be obsolete or
753 # least one of the missinghead will be obsolete or
746 # unstable. So checking heads only is ok
754 # unstable. So checking heads only is ok
747 for node in outgoing.missingheads:
755 for node in outgoing.missingheads:
748 ctx = unfi[node]
756 ctx = unfi[node]
749 if ctx.obsolete():
757 if ctx.obsolete():
750 raise error.Abort(mso % ctx)
758 raise error.Abort(mso % ctx)
751 elif ctx.isunstable():
759 elif ctx.isunstable():
752 # TODO print more than one instability in the abort
760 # TODO print more than one instability in the abort
753 # message
761 # message
754 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
762 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
755
763
756 discovery.checkheads(pushop)
764 discovery.checkheads(pushop)
757 return True
765 return True
758
766
759 # List of names of steps to perform for an outgoing bundle2, order matters.
767 # List of names of steps to perform for an outgoing bundle2, order matters.
760 b2partsgenorder = []
768 b2partsgenorder = []
761
769
762 # Mapping between step name and function
770 # Mapping between step name and function
763 #
771 #
764 # This exists to help extensions wrap steps if necessary
772 # This exists to help extensions wrap steps if necessary
765 b2partsgenmapping = {}
773 b2partsgenmapping = {}
766
774
767 def b2partsgenerator(stepname, idx=None):
775 def b2partsgenerator(stepname, idx=None):
768 """decorator for function generating bundle2 part
776 """decorator for function generating bundle2 part
769
777
770 The function is added to the step -> function mapping and appended to the
778 The function is added to the step -> function mapping and appended to the
771 list of steps. Beware that decorated functions will be added in order
779 list of steps. Beware that decorated functions will be added in order
772 (this may matter).
780 (this may matter).
773
781
774 You can only use this decorator for new steps, if you want to wrap a step
782 You can only use this decorator for new steps, if you want to wrap a step
775 from an extension, attack the b2partsgenmapping dictionary directly."""
783 from an extension, attack the b2partsgenmapping dictionary directly."""
776 def dec(func):
784 def dec(func):
777 assert stepname not in b2partsgenmapping
785 assert stepname not in b2partsgenmapping
778 b2partsgenmapping[stepname] = func
786 b2partsgenmapping[stepname] = func
779 if idx is None:
787 if idx is None:
780 b2partsgenorder.append(stepname)
788 b2partsgenorder.append(stepname)
781 else:
789 else:
782 b2partsgenorder.insert(idx, stepname)
790 b2partsgenorder.insert(idx, stepname)
783 return func
791 return func
784 return dec
792 return dec
785
793
786 def _pushb2ctxcheckheads(pushop, bundler):
794 def _pushb2ctxcheckheads(pushop, bundler):
787 """Generate race condition checking parts
795 """Generate race condition checking parts
788
796
789 Exists as an independent function to aid extensions
797 Exists as an independent function to aid extensions
790 """
798 """
791 # * 'force' do not check for push race,
799 # * 'force' do not check for push race,
792 # * if we don't push anything, there are nothing to check.
800 # * if we don't push anything, there are nothing to check.
793 if not pushop.force and pushop.outgoing.missingheads:
801 if not pushop.force and pushop.outgoing.missingheads:
794 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
802 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
795 emptyremote = pushop.pushbranchmap is None
803 emptyremote = pushop.pushbranchmap is None
796 if not allowunrelated or emptyremote:
804 if not allowunrelated or emptyremote:
797 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
805 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
798 else:
806 else:
799 affected = set()
807 affected = set()
800 for branch, heads in pushop.pushbranchmap.iteritems():
808 for branch, heads in pushop.pushbranchmap.iteritems():
801 remoteheads, newheads, unsyncedheads, discardedheads = heads
809 remoteheads, newheads, unsyncedheads, discardedheads = heads
802 if remoteheads is not None:
810 if remoteheads is not None:
803 remote = set(remoteheads)
811 remote = set(remoteheads)
804 affected |= set(discardedheads) & remote
812 affected |= set(discardedheads) & remote
805 affected |= remote - set(newheads)
813 affected |= remote - set(newheads)
806 if affected:
814 if affected:
807 data = iter(sorted(affected))
815 data = iter(sorted(affected))
808 bundler.newpart('check:updated-heads', data=data)
816 bundler.newpart('check:updated-heads', data=data)
809
817
810 def _pushing(pushop):
818 def _pushing(pushop):
811 """return True if we are pushing anything"""
819 """return True if we are pushing anything"""
812 return bool(pushop.outgoing.missing
820 return bool(pushop.outgoing.missing
813 or pushop.outdatedphases
821 or pushop.outdatedphases
814 or pushop.outobsmarkers
822 or pushop.outobsmarkers
815 or pushop.outbookmarks)
823 or pushop.outbookmarks)
816
824
817 @b2partsgenerator('check-bookmarks')
825 @b2partsgenerator('check-bookmarks')
818 def _pushb2checkbookmarks(pushop, bundler):
826 def _pushb2checkbookmarks(pushop, bundler):
819 """insert bookmark move checking"""
827 """insert bookmark move checking"""
820 if not _pushing(pushop) or pushop.force:
828 if not _pushing(pushop) or pushop.force:
821 return
829 return
822 b2caps = bundle2.bundle2caps(pushop.remote)
830 b2caps = bundle2.bundle2caps(pushop.remote)
823 hasbookmarkcheck = 'bookmarks' in b2caps
831 hasbookmarkcheck = 'bookmarks' in b2caps
824 if not (pushop.outbookmarks and hasbookmarkcheck):
832 if not (pushop.outbookmarks and hasbookmarkcheck):
825 return
833 return
826 data = []
834 data = []
827 for book, old, new in pushop.outbookmarks:
835 for book, old, new in pushop.outbookmarks:
828 old = bin(old)
836 old = bin(old)
829 data.append((book, old))
837 data.append((book, old))
830 checkdata = bookmod.binaryencode(data)
838 checkdata = bookmod.binaryencode(data)
831 bundler.newpart('check:bookmarks', data=checkdata)
839 bundler.newpart('check:bookmarks', data=checkdata)
832
840
833 @b2partsgenerator('check-phases')
841 @b2partsgenerator('check-phases')
834 def _pushb2checkphases(pushop, bundler):
842 def _pushb2checkphases(pushop, bundler):
835 """insert phase move checking"""
843 """insert phase move checking"""
836 if not _pushing(pushop) or pushop.force:
844 if not _pushing(pushop) or pushop.force:
837 return
845 return
838 b2caps = bundle2.bundle2caps(pushop.remote)
846 b2caps = bundle2.bundle2caps(pushop.remote)
839 hasphaseheads = 'heads' in b2caps.get('phases', ())
847 hasphaseheads = 'heads' in b2caps.get('phases', ())
840 if pushop.remotephases is not None and hasphaseheads:
848 if pushop.remotephases is not None and hasphaseheads:
841 # check that the remote phase has not changed
849 # check that the remote phase has not changed
842 checks = [[] for p in phases.allphases]
850 checks = [[] for p in phases.allphases]
843 checks[phases.public].extend(pushop.remotephases.publicheads)
851 checks[phases.public].extend(pushop.remotephases.publicheads)
844 checks[phases.draft].extend(pushop.remotephases.draftroots)
852 checks[phases.draft].extend(pushop.remotephases.draftroots)
845 if any(checks):
853 if any(checks):
846 for nodes in checks:
854 for nodes in checks:
847 nodes.sort()
855 nodes.sort()
848 checkdata = phases.binaryencode(checks)
856 checkdata = phases.binaryencode(checks)
849 bundler.newpart('check:phases', data=checkdata)
857 bundler.newpart('check:phases', data=checkdata)
850
858
851 @b2partsgenerator('changeset')
859 @b2partsgenerator('changeset')
852 def _pushb2ctx(pushop, bundler):
860 def _pushb2ctx(pushop, bundler):
853 """handle changegroup push through bundle2
861 """handle changegroup push through bundle2
854
862
855 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
863 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
856 """
864 """
857 if 'changesets' in pushop.stepsdone:
865 if 'changesets' in pushop.stepsdone:
858 return
866 return
859 pushop.stepsdone.add('changesets')
867 pushop.stepsdone.add('changesets')
860 # Send known heads to the server for race detection.
868 # Send known heads to the server for race detection.
861 if not _pushcheckoutgoing(pushop):
869 if not _pushcheckoutgoing(pushop):
862 return
870 return
863 pushop.repo.prepushoutgoinghooks(pushop)
871 pushop.repo.prepushoutgoinghooks(pushop)
864
872
865 _pushb2ctxcheckheads(pushop, bundler)
873 _pushb2ctxcheckheads(pushop, bundler)
866
874
867 b2caps = bundle2.bundle2caps(pushop.remote)
875 b2caps = bundle2.bundle2caps(pushop.remote)
868 version = '01'
876 version = '01'
869 cgversions = b2caps.get('changegroup')
877 cgversions = b2caps.get('changegroup')
870 if cgversions: # 3.1 and 3.2 ship with an empty value
878 if cgversions: # 3.1 and 3.2 ship with an empty value
871 cgversions = [v for v in cgversions
879 cgversions = [v for v in cgversions
872 if v in changegroup.supportedoutgoingversions(
880 if v in changegroup.supportedoutgoingversions(
873 pushop.repo)]
881 pushop.repo)]
874 if not cgversions:
882 if not cgversions:
875 raise ValueError(_('no common changegroup version'))
883 raise ValueError(_('no common changegroup version'))
876 version = max(cgversions)
884 version = max(cgversions)
877 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
885 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
878 'push')
886 'push')
879 cgpart = bundler.newpart('changegroup', data=cgstream)
887 cgpart = bundler.newpart('changegroup', data=cgstream)
880 if cgversions:
888 if cgversions:
881 cgpart.addparam('version', version)
889 cgpart.addparam('version', version)
882 if 'treemanifest' in pushop.repo.requirements:
890 if 'treemanifest' in pushop.repo.requirements:
883 cgpart.addparam('treemanifest', '1')
891 cgpart.addparam('treemanifest', '1')
884 def handlereply(op):
892 def handlereply(op):
885 """extract addchangegroup returns from server reply"""
893 """extract addchangegroup returns from server reply"""
886 cgreplies = op.records.getreplies(cgpart.id)
894 cgreplies = op.records.getreplies(cgpart.id)
887 assert len(cgreplies['changegroup']) == 1
895 assert len(cgreplies['changegroup']) == 1
888 pushop.cgresult = cgreplies['changegroup'][0]['return']
896 pushop.cgresult = cgreplies['changegroup'][0]['return']
889 return handlereply
897 return handlereply
890
898
891 @b2partsgenerator('phase')
899 @b2partsgenerator('phase')
892 def _pushb2phases(pushop, bundler):
900 def _pushb2phases(pushop, bundler):
893 """handle phase push through bundle2"""
901 """handle phase push through bundle2"""
894 if 'phases' in pushop.stepsdone:
902 if 'phases' in pushop.stepsdone:
895 return
903 return
896 b2caps = bundle2.bundle2caps(pushop.remote)
904 b2caps = bundle2.bundle2caps(pushop.remote)
897 ui = pushop.repo.ui
905 ui = pushop.repo.ui
898
906
899 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
907 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
900 haspushkey = 'pushkey' in b2caps
908 haspushkey = 'pushkey' in b2caps
901 hasphaseheads = 'heads' in b2caps.get('phases', ())
909 hasphaseheads = 'heads' in b2caps.get('phases', ())
902
910
903 if hasphaseheads and not legacyphase:
911 if hasphaseheads and not legacyphase:
904 return _pushb2phaseheads(pushop, bundler)
912 return _pushb2phaseheads(pushop, bundler)
905 elif haspushkey:
913 elif haspushkey:
906 return _pushb2phasespushkey(pushop, bundler)
914 return _pushb2phasespushkey(pushop, bundler)
907
915
908 def _pushb2phaseheads(pushop, bundler):
916 def _pushb2phaseheads(pushop, bundler):
909 """push phase information through a bundle2 - binary part"""
917 """push phase information through a bundle2 - binary part"""
910 pushop.stepsdone.add('phases')
918 pushop.stepsdone.add('phases')
911 if pushop.outdatedphases:
919 if pushop.outdatedphases:
912 updates = [[] for p in phases.allphases]
920 updates = [[] for p in phases.allphases]
913 updates[0].extend(h.node() for h in pushop.outdatedphases)
921 updates[0].extend(h.node() for h in pushop.outdatedphases)
914 phasedata = phases.binaryencode(updates)
922 phasedata = phases.binaryencode(updates)
915 bundler.newpart('phase-heads', data=phasedata)
923 bundler.newpart('phase-heads', data=phasedata)
916
924
917 def _pushb2phasespushkey(pushop, bundler):
925 def _pushb2phasespushkey(pushop, bundler):
918 """push phase information through a bundle2 - pushkey part"""
926 """push phase information through a bundle2 - pushkey part"""
919 pushop.stepsdone.add('phases')
927 pushop.stepsdone.add('phases')
920 part2node = []
928 part2node = []
921
929
922 def handlefailure(pushop, exc):
930 def handlefailure(pushop, exc):
923 targetid = int(exc.partid)
931 targetid = int(exc.partid)
924 for partid, node in part2node:
932 for partid, node in part2node:
925 if partid == targetid:
933 if partid == targetid:
926 raise error.Abort(_('updating %s to public failed') % node)
934 raise error.Abort(_('updating %s to public failed') % node)
927
935
928 enc = pushkey.encode
936 enc = pushkey.encode
929 for newremotehead in pushop.outdatedphases:
937 for newremotehead in pushop.outdatedphases:
930 part = bundler.newpart('pushkey')
938 part = bundler.newpart('pushkey')
931 part.addparam('namespace', enc('phases'))
939 part.addparam('namespace', enc('phases'))
932 part.addparam('key', enc(newremotehead.hex()))
940 part.addparam('key', enc(newremotehead.hex()))
933 part.addparam('old', enc('%d' % phases.draft))
941 part.addparam('old', enc('%d' % phases.draft))
934 part.addparam('new', enc('%d' % phases.public))
942 part.addparam('new', enc('%d' % phases.public))
935 part2node.append((part.id, newremotehead))
943 part2node.append((part.id, newremotehead))
936 pushop.pkfailcb[part.id] = handlefailure
944 pushop.pkfailcb[part.id] = handlefailure
937
945
938 def handlereply(op):
946 def handlereply(op):
939 for partid, node in part2node:
947 for partid, node in part2node:
940 partrep = op.records.getreplies(partid)
948 partrep = op.records.getreplies(partid)
941 results = partrep['pushkey']
949 results = partrep['pushkey']
942 assert len(results) <= 1
950 assert len(results) <= 1
943 msg = None
951 msg = None
944 if not results:
952 if not results:
945 msg = _('server ignored update of %s to public!\n') % node
953 msg = _('server ignored update of %s to public!\n') % node
946 elif not int(results[0]['return']):
954 elif not int(results[0]['return']):
947 msg = _('updating %s to public failed!\n') % node
955 msg = _('updating %s to public failed!\n') % node
948 if msg is not None:
956 if msg is not None:
949 pushop.ui.warn(msg)
957 pushop.ui.warn(msg)
950 return handlereply
958 return handlereply
951
959
952 @b2partsgenerator('obsmarkers')
960 @b2partsgenerator('obsmarkers')
953 def _pushb2obsmarkers(pushop, bundler):
961 def _pushb2obsmarkers(pushop, bundler):
954 if 'obsmarkers' in pushop.stepsdone:
962 if 'obsmarkers' in pushop.stepsdone:
955 return
963 return
956 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
964 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
957 if obsolete.commonversion(remoteversions) is None:
965 if obsolete.commonversion(remoteversions) is None:
958 return
966 return
959 pushop.stepsdone.add('obsmarkers')
967 pushop.stepsdone.add('obsmarkers')
960 if pushop.outobsmarkers:
968 if pushop.outobsmarkers:
961 markers = sorted(pushop.outobsmarkers)
969 markers = sorted(pushop.outobsmarkers)
962 bundle2.buildobsmarkerspart(bundler, markers)
970 bundle2.buildobsmarkerspart(bundler, markers)
963
971
964 @b2partsgenerator('bookmarks')
972 @b2partsgenerator('bookmarks')
965 def _pushb2bookmarks(pushop, bundler):
973 def _pushb2bookmarks(pushop, bundler):
966 """handle bookmark push through bundle2"""
974 """handle bookmark push through bundle2"""
967 if 'bookmarks' in pushop.stepsdone:
975 if 'bookmarks' in pushop.stepsdone:
968 return
976 return
969 b2caps = bundle2.bundle2caps(pushop.remote)
977 b2caps = bundle2.bundle2caps(pushop.remote)
970
978
971 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
979 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
972 legacybooks = 'bookmarks' in legacy
980 legacybooks = 'bookmarks' in legacy
973
981
974 if not legacybooks and 'bookmarks' in b2caps:
982 if not legacybooks and 'bookmarks' in b2caps:
975 return _pushb2bookmarkspart(pushop, bundler)
983 return _pushb2bookmarkspart(pushop, bundler)
976 elif 'pushkey' in b2caps:
984 elif 'pushkey' in b2caps:
977 return _pushb2bookmarkspushkey(pushop, bundler)
985 return _pushb2bookmarkspushkey(pushop, bundler)
978
986
979 def _bmaction(old, new):
987 def _bmaction(old, new):
980 """small utility for bookmark pushing"""
988 """small utility for bookmark pushing"""
981 if not old:
989 if not old:
982 return 'export'
990 return 'export'
983 elif not new:
991 elif not new:
984 return 'delete'
992 return 'delete'
985 return 'update'
993 return 'update'
986
994
987 def _pushb2bookmarkspart(pushop, bundler):
995 def _pushb2bookmarkspart(pushop, bundler):
988 pushop.stepsdone.add('bookmarks')
996 pushop.stepsdone.add('bookmarks')
989 if not pushop.outbookmarks:
997 if not pushop.outbookmarks:
990 return
998 return
991
999
992 allactions = []
1000 allactions = []
993 data = []
1001 data = []
994 for book, old, new in pushop.outbookmarks:
1002 for book, old, new in pushop.outbookmarks:
995 new = bin(new)
1003 new = bin(new)
996 data.append((book, new))
1004 data.append((book, new))
997 allactions.append((book, _bmaction(old, new)))
1005 allactions.append((book, _bmaction(old, new)))
998 checkdata = bookmod.binaryencode(data)
1006 checkdata = bookmod.binaryencode(data)
999 bundler.newpart('bookmarks', data=checkdata)
1007 bundler.newpart('bookmarks', data=checkdata)
1000
1008
1001 def handlereply(op):
1009 def handlereply(op):
1002 ui = pushop.ui
1010 ui = pushop.ui
1003 # if success
1011 # if success
1004 for book, action in allactions:
1012 for book, action in allactions:
1005 ui.status(bookmsgmap[action][0] % book)
1013 ui.status(bookmsgmap[action][0] % book)
1006
1014
1007 return handlereply
1015 return handlereply
1008
1016
1009 def _pushb2bookmarkspushkey(pushop, bundler):
1017 def _pushb2bookmarkspushkey(pushop, bundler):
1010 pushop.stepsdone.add('bookmarks')
1018 pushop.stepsdone.add('bookmarks')
1011 part2book = []
1019 part2book = []
1012 enc = pushkey.encode
1020 enc = pushkey.encode
1013
1021
1014 def handlefailure(pushop, exc):
1022 def handlefailure(pushop, exc):
1015 targetid = int(exc.partid)
1023 targetid = int(exc.partid)
1016 for partid, book, action in part2book:
1024 for partid, book, action in part2book:
1017 if partid == targetid:
1025 if partid == targetid:
1018 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1026 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1019 # we should not be called for part we did not generated
1027 # we should not be called for part we did not generated
1020 assert False
1028 assert False
1021
1029
1022 for book, old, new in pushop.outbookmarks:
1030 for book, old, new in pushop.outbookmarks:
1023 part = bundler.newpart('pushkey')
1031 part = bundler.newpart('pushkey')
1024 part.addparam('namespace', enc('bookmarks'))
1032 part.addparam('namespace', enc('bookmarks'))
1025 part.addparam('key', enc(book))
1033 part.addparam('key', enc(book))
1026 part.addparam('old', enc(old))
1034 part.addparam('old', enc(old))
1027 part.addparam('new', enc(new))
1035 part.addparam('new', enc(new))
1028 action = 'update'
1036 action = 'update'
1029 if not old:
1037 if not old:
1030 action = 'export'
1038 action = 'export'
1031 elif not new:
1039 elif not new:
1032 action = 'delete'
1040 action = 'delete'
1033 part2book.append((part.id, book, action))
1041 part2book.append((part.id, book, action))
1034 pushop.pkfailcb[part.id] = handlefailure
1042 pushop.pkfailcb[part.id] = handlefailure
1035
1043
1036 def handlereply(op):
1044 def handlereply(op):
1037 ui = pushop.ui
1045 ui = pushop.ui
1038 for partid, book, action in part2book:
1046 for partid, book, action in part2book:
1039 partrep = op.records.getreplies(partid)
1047 partrep = op.records.getreplies(partid)
1040 results = partrep['pushkey']
1048 results = partrep['pushkey']
1041 assert len(results) <= 1
1049 assert len(results) <= 1
1042 if not results:
1050 if not results:
1043 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1051 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1044 else:
1052 else:
1045 ret = int(results[0]['return'])
1053 ret = int(results[0]['return'])
1046 if ret:
1054 if ret:
1047 ui.status(bookmsgmap[action][0] % book)
1055 ui.status(bookmsgmap[action][0] % book)
1048 else:
1056 else:
1049 ui.warn(bookmsgmap[action][1] % book)
1057 ui.warn(bookmsgmap[action][1] % book)
1050 if pushop.bkresult is not None:
1058 if pushop.bkresult is not None:
1051 pushop.bkresult = 1
1059 pushop.bkresult = 1
1052 return handlereply
1060 return handlereply
1053
1061
1054 @b2partsgenerator('pushvars', idx=0)
1062 @b2partsgenerator('pushvars', idx=0)
1055 def _getbundlesendvars(pushop, bundler):
1063 def _getbundlesendvars(pushop, bundler):
1056 '''send shellvars via bundle2'''
1064 '''send shellvars via bundle2'''
1057 pushvars = pushop.pushvars
1065 pushvars = pushop.pushvars
1058 if pushvars:
1066 if pushvars:
1059 shellvars = {}
1067 shellvars = {}
1060 for raw in pushvars:
1068 for raw in pushvars:
1061 if '=' not in raw:
1069 if '=' not in raw:
1062 msg = ("unable to parse variable '%s', should follow "
1070 msg = ("unable to parse variable '%s', should follow "
1063 "'KEY=VALUE' or 'KEY=' format")
1071 "'KEY=VALUE' or 'KEY=' format")
1064 raise error.Abort(msg % raw)
1072 raise error.Abort(msg % raw)
1065 k, v = raw.split('=', 1)
1073 k, v = raw.split('=', 1)
1066 shellvars[k] = v
1074 shellvars[k] = v
1067
1075
1068 part = bundler.newpart('pushvars')
1076 part = bundler.newpart('pushvars')
1069
1077
1070 for key, value in shellvars.iteritems():
1078 for key, value in shellvars.iteritems():
1071 part.addparam(key, value, mandatory=False)
1079 part.addparam(key, value, mandatory=False)
1072
1080
1073 def _pushbundle2(pushop):
1081 def _pushbundle2(pushop):
1074 """push data to the remote using bundle2
1082 """push data to the remote using bundle2
1075
1083
1076 The only currently supported type of data is changegroup but this will
1084 The only currently supported type of data is changegroup but this will
1077 evolve in the future."""
1085 evolve in the future."""
1078 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1086 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1079 pushback = (pushop.trmanager
1087 pushback = (pushop.trmanager
1080 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1088 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1081
1089
1082 # create reply capability
1090 # create reply capability
1083 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1091 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1084 allowpushback=pushback,
1092 allowpushback=pushback,
1085 role='client'))
1093 role='client'))
1086 bundler.newpart('replycaps', data=capsblob)
1094 bundler.newpart('replycaps', data=capsblob)
1087 replyhandlers = []
1095 replyhandlers = []
1088 for partgenname in b2partsgenorder:
1096 for partgenname in b2partsgenorder:
1089 partgen = b2partsgenmapping[partgenname]
1097 partgen = b2partsgenmapping[partgenname]
1090 ret = partgen(pushop, bundler)
1098 ret = partgen(pushop, bundler)
1091 if callable(ret):
1099 if callable(ret):
1092 replyhandlers.append(ret)
1100 replyhandlers.append(ret)
1093 # do not push if nothing to push
1101 # do not push if nothing to push
1094 if bundler.nbparts <= 1:
1102 if bundler.nbparts <= 1:
1095 return
1103 return
1096 stream = util.chunkbuffer(bundler.getchunks())
1104 stream = util.chunkbuffer(bundler.getchunks())
1097 try:
1105 try:
1098 try:
1106 try:
1099 with pushop.remote.commandexecutor() as e:
1107 with pushop.remote.commandexecutor() as e:
1100 reply = e.callcommand('unbundle', {
1108 reply = e.callcommand('unbundle', {
1101 'bundle': stream,
1109 'bundle': stream,
1102 'heads': ['force'],
1110 'heads': ['force'],
1103 'url': pushop.remote.url(),
1111 'url': pushop.remote.url(),
1104 }).result()
1112 }).result()
1105 except error.BundleValueError as exc:
1113 except error.BundleValueError as exc:
1106 raise error.Abort(_('missing support for %s') % exc)
1114 raise error.Abort(_('missing support for %s') % exc)
1107 try:
1115 try:
1108 trgetter = None
1116 trgetter = None
1109 if pushback:
1117 if pushback:
1110 trgetter = pushop.trmanager.transaction
1118 trgetter = pushop.trmanager.transaction
1111 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1119 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1112 except error.BundleValueError as exc:
1120 except error.BundleValueError as exc:
1113 raise error.Abort(_('missing support for %s') % exc)
1121 raise error.Abort(_('missing support for %s') % exc)
1114 except bundle2.AbortFromPart as exc:
1122 except bundle2.AbortFromPart as exc:
1115 pushop.ui.status(_('remote: %s\n') % exc)
1123 pushop.ui.status(_('remote: %s\n') % exc)
1116 if exc.hint is not None:
1124 if exc.hint is not None:
1117 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1125 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1118 raise error.Abort(_('push failed on remote'))
1126 raise error.Abort(_('push failed on remote'))
1119 except error.PushkeyFailed as exc:
1127 except error.PushkeyFailed as exc:
1120 partid = int(exc.partid)
1128 partid = int(exc.partid)
1121 if partid not in pushop.pkfailcb:
1129 if partid not in pushop.pkfailcb:
1122 raise
1130 raise
1123 pushop.pkfailcb[partid](pushop, exc)
1131 pushop.pkfailcb[partid](pushop, exc)
1124 for rephand in replyhandlers:
1132 for rephand in replyhandlers:
1125 rephand(op)
1133 rephand(op)
1126
1134
1127 def _pushchangeset(pushop):
1135 def _pushchangeset(pushop):
1128 """Make the actual push of changeset bundle to remote repo"""
1136 """Make the actual push of changeset bundle to remote repo"""
1129 if 'changesets' in pushop.stepsdone:
1137 if 'changesets' in pushop.stepsdone:
1130 return
1138 return
1131 pushop.stepsdone.add('changesets')
1139 pushop.stepsdone.add('changesets')
1132 if not _pushcheckoutgoing(pushop):
1140 if not _pushcheckoutgoing(pushop):
1133 return
1141 return
1134
1142
1135 # Should have verified this in push().
1143 # Should have verified this in push().
1136 assert pushop.remote.capable('unbundle')
1144 assert pushop.remote.capable('unbundle')
1137
1145
1138 pushop.repo.prepushoutgoinghooks(pushop)
1146 pushop.repo.prepushoutgoinghooks(pushop)
1139 outgoing = pushop.outgoing
1147 outgoing = pushop.outgoing
1140 # TODO: get bundlecaps from remote
1148 # TODO: get bundlecaps from remote
1141 bundlecaps = None
1149 bundlecaps = None
1142 # create a changegroup from local
1150 # create a changegroup from local
1143 if pushop.revs is None and not (outgoing.excluded
1151 if pushop.revs is None and not (outgoing.excluded
1144 or pushop.repo.changelog.filteredrevs):
1152 or pushop.repo.changelog.filteredrevs):
1145 # push everything,
1153 # push everything,
1146 # use the fast path, no race possible on push
1154 # use the fast path, no race possible on push
1147 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1155 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1148 fastpath=True, bundlecaps=bundlecaps)
1156 fastpath=True, bundlecaps=bundlecaps)
1149 else:
1157 else:
1150 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1158 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1151 'push', bundlecaps=bundlecaps)
1159 'push', bundlecaps=bundlecaps)
1152
1160
1153 # apply changegroup to remote
1161 # apply changegroup to remote
1154 # local repo finds heads on server, finds out what
1162 # local repo finds heads on server, finds out what
1155 # revs it must push. once revs transferred, if server
1163 # revs it must push. once revs transferred, if server
1156 # finds it has different heads (someone else won
1164 # finds it has different heads (someone else won
1157 # commit/push race), server aborts.
1165 # commit/push race), server aborts.
1158 if pushop.force:
1166 if pushop.force:
1159 remoteheads = ['force']
1167 remoteheads = ['force']
1160 else:
1168 else:
1161 remoteheads = pushop.remoteheads
1169 remoteheads = pushop.remoteheads
1162 # ssh: return remote's addchangegroup()
1170 # ssh: return remote's addchangegroup()
1163 # http: return remote's addchangegroup() or 0 for error
1171 # http: return remote's addchangegroup() or 0 for error
1164 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1172 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1165 pushop.repo.url())
1173 pushop.repo.url())
1166
1174
1167 def _pushsyncphase(pushop):
1175 def _pushsyncphase(pushop):
1168 """synchronise phase information locally and remotely"""
1176 """synchronise phase information locally and remotely"""
1169 cheads = pushop.commonheads
1177 cheads = pushop.commonheads
1170 # even when we don't push, exchanging phase data is useful
1178 # even when we don't push, exchanging phase data is useful
1171 remotephases = pushop.remote.listkeys('phases')
1179 remotephases = listkeys(pushop.remote, 'phases')
1172 if (pushop.ui.configbool('ui', '_usedassubrepo')
1180 if (pushop.ui.configbool('ui', '_usedassubrepo')
1173 and remotephases # server supports phases
1181 and remotephases # server supports phases
1174 and pushop.cgresult is None # nothing was pushed
1182 and pushop.cgresult is None # nothing was pushed
1175 and remotephases.get('publishing', False)):
1183 and remotephases.get('publishing', False)):
1176 # When:
1184 # When:
1177 # - this is a subrepo push
1185 # - this is a subrepo push
1178 # - and remote support phase
1186 # - and remote support phase
1179 # - and no changeset was pushed
1187 # - and no changeset was pushed
1180 # - and remote is publishing
1188 # - and remote is publishing
1181 # We may be in issue 3871 case!
1189 # We may be in issue 3871 case!
1182 # We drop the possible phase synchronisation done by
1190 # We drop the possible phase synchronisation done by
1183 # courtesy to publish changesets possibly locally draft
1191 # courtesy to publish changesets possibly locally draft
1184 # on the remote.
1192 # on the remote.
1185 remotephases = {'publishing': 'True'}
1193 remotephases = {'publishing': 'True'}
1186 if not remotephases: # old server or public only reply from non-publishing
1194 if not remotephases: # old server or public only reply from non-publishing
1187 _localphasemove(pushop, cheads)
1195 _localphasemove(pushop, cheads)
1188 # don't push any phase data as there is nothing to push
1196 # don't push any phase data as there is nothing to push
1189 else:
1197 else:
1190 ana = phases.analyzeremotephases(pushop.repo, cheads,
1198 ana = phases.analyzeremotephases(pushop.repo, cheads,
1191 remotephases)
1199 remotephases)
1192 pheads, droots = ana
1200 pheads, droots = ana
1193 ### Apply remote phase on local
1201 ### Apply remote phase on local
1194 if remotephases.get('publishing', False):
1202 if remotephases.get('publishing', False):
1195 _localphasemove(pushop, cheads)
1203 _localphasemove(pushop, cheads)
1196 else: # publish = False
1204 else: # publish = False
1197 _localphasemove(pushop, pheads)
1205 _localphasemove(pushop, pheads)
1198 _localphasemove(pushop, cheads, phases.draft)
1206 _localphasemove(pushop, cheads, phases.draft)
1199 ### Apply local phase on remote
1207 ### Apply local phase on remote
1200
1208
1201 if pushop.cgresult:
1209 if pushop.cgresult:
1202 if 'phases' in pushop.stepsdone:
1210 if 'phases' in pushop.stepsdone:
1203 # phases already pushed though bundle2
1211 # phases already pushed though bundle2
1204 return
1212 return
1205 outdated = pushop.outdatedphases
1213 outdated = pushop.outdatedphases
1206 else:
1214 else:
1207 outdated = pushop.fallbackoutdatedphases
1215 outdated = pushop.fallbackoutdatedphases
1208
1216
1209 pushop.stepsdone.add('phases')
1217 pushop.stepsdone.add('phases')
1210
1218
1211 # filter heads already turned public by the push
1219 # filter heads already turned public by the push
1212 outdated = [c for c in outdated if c.node() not in pheads]
1220 outdated = [c for c in outdated if c.node() not in pheads]
1213 # fallback to independent pushkey command
1221 # fallback to independent pushkey command
1214 for newremotehead in outdated:
1222 for newremotehead in outdated:
1215 with pushop.remote.commandexecutor() as e:
1223 with pushop.remote.commandexecutor() as e:
1216 r = e.callcommand('pushkey', {
1224 r = e.callcommand('pushkey', {
1217 'namespace': 'phases',
1225 'namespace': 'phases',
1218 'key': newremotehead.hex(),
1226 'key': newremotehead.hex(),
1219 'old': '%d' % phases.draft,
1227 'old': '%d' % phases.draft,
1220 'new': '%d' % phases.public
1228 'new': '%d' % phases.public
1221 }).result()
1229 }).result()
1222
1230
1223 if not r:
1231 if not r:
1224 pushop.ui.warn(_('updating %s to public failed!\n')
1232 pushop.ui.warn(_('updating %s to public failed!\n')
1225 % newremotehead)
1233 % newremotehead)
1226
1234
1227 def _localphasemove(pushop, nodes, phase=phases.public):
1235 def _localphasemove(pushop, nodes, phase=phases.public):
1228 """move <nodes> to <phase> in the local source repo"""
1236 """move <nodes> to <phase> in the local source repo"""
1229 if pushop.trmanager:
1237 if pushop.trmanager:
1230 phases.advanceboundary(pushop.repo,
1238 phases.advanceboundary(pushop.repo,
1231 pushop.trmanager.transaction(),
1239 pushop.trmanager.transaction(),
1232 phase,
1240 phase,
1233 nodes)
1241 nodes)
1234 else:
1242 else:
1235 # repo is not locked, do not change any phases!
1243 # repo is not locked, do not change any phases!
1236 # Informs the user that phases should have been moved when
1244 # Informs the user that phases should have been moved when
1237 # applicable.
1245 # applicable.
1238 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1246 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1239 phasestr = phases.phasenames[phase]
1247 phasestr = phases.phasenames[phase]
1240 if actualmoves:
1248 if actualmoves:
1241 pushop.ui.status(_('cannot lock source repo, skipping '
1249 pushop.ui.status(_('cannot lock source repo, skipping '
1242 'local %s phase update\n') % phasestr)
1250 'local %s phase update\n') % phasestr)
1243
1251
1244 def _pushobsolete(pushop):
1252 def _pushobsolete(pushop):
1245 """utility function to push obsolete markers to a remote"""
1253 """utility function to push obsolete markers to a remote"""
1246 if 'obsmarkers' in pushop.stepsdone:
1254 if 'obsmarkers' in pushop.stepsdone:
1247 return
1255 return
1248 repo = pushop.repo
1256 repo = pushop.repo
1249 remote = pushop.remote
1257 remote = pushop.remote
1250 pushop.stepsdone.add('obsmarkers')
1258 pushop.stepsdone.add('obsmarkers')
1251 if pushop.outobsmarkers:
1259 if pushop.outobsmarkers:
1252 pushop.ui.debug('try to push obsolete markers to remote\n')
1260 pushop.ui.debug('try to push obsolete markers to remote\n')
1253 rslts = []
1261 rslts = []
1254 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1262 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1255 for key in sorted(remotedata, reverse=True):
1263 for key in sorted(remotedata, reverse=True):
1256 # reverse sort to ensure we end with dump0
1264 # reverse sort to ensure we end with dump0
1257 data = remotedata[key]
1265 data = remotedata[key]
1258 rslts.append(remote.pushkey('obsolete', key, '', data))
1266 rslts.append(remote.pushkey('obsolete', key, '', data))
1259 if [r for r in rslts if not r]:
1267 if [r for r in rslts if not r]:
1260 msg = _('failed to push some obsolete markers!\n')
1268 msg = _('failed to push some obsolete markers!\n')
1261 repo.ui.warn(msg)
1269 repo.ui.warn(msg)
1262
1270
1263 def _pushbookmark(pushop):
1271 def _pushbookmark(pushop):
1264 """Update bookmark position on remote"""
1272 """Update bookmark position on remote"""
1265 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1273 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1266 return
1274 return
1267 pushop.stepsdone.add('bookmarks')
1275 pushop.stepsdone.add('bookmarks')
1268 ui = pushop.ui
1276 ui = pushop.ui
1269 remote = pushop.remote
1277 remote = pushop.remote
1270
1278
1271 for b, old, new in pushop.outbookmarks:
1279 for b, old, new in pushop.outbookmarks:
1272 action = 'update'
1280 action = 'update'
1273 if not old:
1281 if not old:
1274 action = 'export'
1282 action = 'export'
1275 elif not new:
1283 elif not new:
1276 action = 'delete'
1284 action = 'delete'
1277
1285
1278 with remote.commandexecutor() as e:
1286 with remote.commandexecutor() as e:
1279 r = e.callcommand('pushkey', {
1287 r = e.callcommand('pushkey', {
1280 'namespace': 'bookmarks',
1288 'namespace': 'bookmarks',
1281 'key': b,
1289 'key': b,
1282 'old': old,
1290 'old': old,
1283 'new': new,
1291 'new': new,
1284 }).result()
1292 }).result()
1285
1293
1286 if r:
1294 if r:
1287 ui.status(bookmsgmap[action][0] % b)
1295 ui.status(bookmsgmap[action][0] % b)
1288 else:
1296 else:
1289 ui.warn(bookmsgmap[action][1] % b)
1297 ui.warn(bookmsgmap[action][1] % b)
1290 # discovery can have set the value form invalid entry
1298 # discovery can have set the value form invalid entry
1291 if pushop.bkresult is not None:
1299 if pushop.bkresult is not None:
1292 pushop.bkresult = 1
1300 pushop.bkresult = 1
1293
1301
1294 class pulloperation(object):
1302 class pulloperation(object):
1295 """A object that represent a single pull operation
1303 """A object that represent a single pull operation
1296
1304
1297 It purpose is to carry pull related state and very common operation.
1305 It purpose is to carry pull related state and very common operation.
1298
1306
1299 A new should be created at the beginning of each pull and discarded
1307 A new should be created at the beginning of each pull and discarded
1300 afterward.
1308 afterward.
1301 """
1309 """
1302
1310
1303 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1311 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1304 remotebookmarks=None, streamclonerequested=None):
1312 remotebookmarks=None, streamclonerequested=None):
1305 # repo we pull into
1313 # repo we pull into
1306 self.repo = repo
1314 self.repo = repo
1307 # repo we pull from
1315 # repo we pull from
1308 self.remote = remote
1316 self.remote = remote
1309 # revision we try to pull (None is "all")
1317 # revision we try to pull (None is "all")
1310 self.heads = heads
1318 self.heads = heads
1311 # bookmark pulled explicitly
1319 # bookmark pulled explicitly
1312 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1320 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1313 for bookmark in bookmarks]
1321 for bookmark in bookmarks]
1314 # do we force pull?
1322 # do we force pull?
1315 self.force = force
1323 self.force = force
1316 # whether a streaming clone was requested
1324 # whether a streaming clone was requested
1317 self.streamclonerequested = streamclonerequested
1325 self.streamclonerequested = streamclonerequested
1318 # transaction manager
1326 # transaction manager
1319 self.trmanager = None
1327 self.trmanager = None
1320 # set of common changeset between local and remote before pull
1328 # set of common changeset between local and remote before pull
1321 self.common = None
1329 self.common = None
1322 # set of pulled head
1330 # set of pulled head
1323 self.rheads = None
1331 self.rheads = None
1324 # list of missing changeset to fetch remotely
1332 # list of missing changeset to fetch remotely
1325 self.fetch = None
1333 self.fetch = None
1326 # remote bookmarks data
1334 # remote bookmarks data
1327 self.remotebookmarks = remotebookmarks
1335 self.remotebookmarks = remotebookmarks
1328 # result of changegroup pulling (used as return code by pull)
1336 # result of changegroup pulling (used as return code by pull)
1329 self.cgresult = None
1337 self.cgresult = None
1330 # list of step already done
1338 # list of step already done
1331 self.stepsdone = set()
1339 self.stepsdone = set()
1332 # Whether we attempted a clone from pre-generated bundles.
1340 # Whether we attempted a clone from pre-generated bundles.
1333 self.clonebundleattempted = False
1341 self.clonebundleattempted = False
1334
1342
1335 @util.propertycache
1343 @util.propertycache
1336 def pulledsubset(self):
1344 def pulledsubset(self):
1337 """heads of the set of changeset target by the pull"""
1345 """heads of the set of changeset target by the pull"""
1338 # compute target subset
1346 # compute target subset
1339 if self.heads is None:
1347 if self.heads is None:
1340 # We pulled every thing possible
1348 # We pulled every thing possible
1341 # sync on everything common
1349 # sync on everything common
1342 c = set(self.common)
1350 c = set(self.common)
1343 ret = list(self.common)
1351 ret = list(self.common)
1344 for n in self.rheads:
1352 for n in self.rheads:
1345 if n not in c:
1353 if n not in c:
1346 ret.append(n)
1354 ret.append(n)
1347 return ret
1355 return ret
1348 else:
1356 else:
1349 # We pulled a specific subset
1357 # We pulled a specific subset
1350 # sync on this subset
1358 # sync on this subset
1351 return self.heads
1359 return self.heads
1352
1360
1353 @util.propertycache
1361 @util.propertycache
1354 def canusebundle2(self):
1362 def canusebundle2(self):
1355 return not _forcebundle1(self)
1363 return not _forcebundle1(self)
1356
1364
1357 @util.propertycache
1365 @util.propertycache
1358 def remotebundle2caps(self):
1366 def remotebundle2caps(self):
1359 return bundle2.bundle2caps(self.remote)
1367 return bundle2.bundle2caps(self.remote)
1360
1368
1361 def gettransaction(self):
1369 def gettransaction(self):
1362 # deprecated; talk to trmanager directly
1370 # deprecated; talk to trmanager directly
1363 return self.trmanager.transaction()
1371 return self.trmanager.transaction()
1364
1372
1365 class transactionmanager(util.transactional):
1373 class transactionmanager(util.transactional):
1366 """An object to manage the life cycle of a transaction
1374 """An object to manage the life cycle of a transaction
1367
1375
1368 It creates the transaction on demand and calls the appropriate hooks when
1376 It creates the transaction on demand and calls the appropriate hooks when
1369 closing the transaction."""
1377 closing the transaction."""
1370 def __init__(self, repo, source, url):
1378 def __init__(self, repo, source, url):
1371 self.repo = repo
1379 self.repo = repo
1372 self.source = source
1380 self.source = source
1373 self.url = url
1381 self.url = url
1374 self._tr = None
1382 self._tr = None
1375
1383
1376 def transaction(self):
1384 def transaction(self):
1377 """Return an open transaction object, constructing if necessary"""
1385 """Return an open transaction object, constructing if necessary"""
1378 if not self._tr:
1386 if not self._tr:
1379 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1387 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1380 self._tr = self.repo.transaction(trname)
1388 self._tr = self.repo.transaction(trname)
1381 self._tr.hookargs['source'] = self.source
1389 self._tr.hookargs['source'] = self.source
1382 self._tr.hookargs['url'] = self.url
1390 self._tr.hookargs['url'] = self.url
1383 return self._tr
1391 return self._tr
1384
1392
1385 def close(self):
1393 def close(self):
1386 """close transaction if created"""
1394 """close transaction if created"""
1387 if self._tr is not None:
1395 if self._tr is not None:
1388 self._tr.close()
1396 self._tr.close()
1389
1397
1390 def release(self):
1398 def release(self):
1391 """release transaction if created"""
1399 """release transaction if created"""
1392 if self._tr is not None:
1400 if self._tr is not None:
1393 self._tr.release()
1401 self._tr.release()
1394
1402
1403 def listkeys(remote, namespace):
1404 with remote.commandexecutor() as e:
1405 return e.callcommand('listkeys', {'namespace': namespace}).result()
1406
1395 def _fullpullbundle2(repo, pullop):
1407 def _fullpullbundle2(repo, pullop):
1396 # The server may send a partial reply, i.e. when inlining
1408 # The server may send a partial reply, i.e. when inlining
1397 # pre-computed bundles. In that case, update the common
1409 # pre-computed bundles. In that case, update the common
1398 # set based on the results and pull another bundle.
1410 # set based on the results and pull another bundle.
1399 #
1411 #
1400 # There are two indicators that the process is finished:
1412 # There are two indicators that the process is finished:
1401 # - no changeset has been added, or
1413 # - no changeset has been added, or
1402 # - all remote heads are known locally.
1414 # - all remote heads are known locally.
1403 # The head check must use the unfiltered view as obsoletion
1415 # The head check must use the unfiltered view as obsoletion
1404 # markers can hide heads.
1416 # markers can hide heads.
1405 unfi = repo.unfiltered()
1417 unfi = repo.unfiltered()
1406 unficl = unfi.changelog
1418 unficl = unfi.changelog
1407 def headsofdiff(h1, h2):
1419 def headsofdiff(h1, h2):
1408 """Returns heads(h1 % h2)"""
1420 """Returns heads(h1 % h2)"""
1409 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1421 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1410 return set(ctx.node() for ctx in res)
1422 return set(ctx.node() for ctx in res)
1411 def headsofunion(h1, h2):
1423 def headsofunion(h1, h2):
1412 """Returns heads((h1 + h2) - null)"""
1424 """Returns heads((h1 + h2) - null)"""
1413 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1425 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1414 return set(ctx.node() for ctx in res)
1426 return set(ctx.node() for ctx in res)
1415 while True:
1427 while True:
1416 old_heads = unficl.heads()
1428 old_heads = unficl.heads()
1417 clstart = len(unficl)
1429 clstart = len(unficl)
1418 _pullbundle2(pullop)
1430 _pullbundle2(pullop)
1419 if changegroup.NARROW_REQUIREMENT in repo.requirements:
1431 if changegroup.NARROW_REQUIREMENT in repo.requirements:
1420 # XXX narrow clones filter the heads on the server side during
1432 # XXX narrow clones filter the heads on the server side during
1421 # XXX getbundle and result in partial replies as well.
1433 # XXX getbundle and result in partial replies as well.
1422 # XXX Disable pull bundles in this case as band aid to avoid
1434 # XXX Disable pull bundles in this case as band aid to avoid
1423 # XXX extra round trips.
1435 # XXX extra round trips.
1424 break
1436 break
1425 if clstart == len(unficl):
1437 if clstart == len(unficl):
1426 break
1438 break
1427 if all(unficl.hasnode(n) for n in pullop.rheads):
1439 if all(unficl.hasnode(n) for n in pullop.rheads):
1428 break
1440 break
1429 new_heads = headsofdiff(unficl.heads(), old_heads)
1441 new_heads = headsofdiff(unficl.heads(), old_heads)
1430 pullop.common = headsofunion(new_heads, pullop.common)
1442 pullop.common = headsofunion(new_heads, pullop.common)
1431 pullop.rheads = set(pullop.rheads) - pullop.common
1443 pullop.rheads = set(pullop.rheads) - pullop.common
1432
1444
1433 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1445 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1434 streamclonerequested=None):
1446 streamclonerequested=None):
1435 """Fetch repository data from a remote.
1447 """Fetch repository data from a remote.
1436
1448
1437 This is the main function used to retrieve data from a remote repository.
1449 This is the main function used to retrieve data from a remote repository.
1438
1450
1439 ``repo`` is the local repository to clone into.
1451 ``repo`` is the local repository to clone into.
1440 ``remote`` is a peer instance.
1452 ``remote`` is a peer instance.
1441 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1453 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1442 default) means to pull everything from the remote.
1454 default) means to pull everything from the remote.
1443 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1455 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1444 default, all remote bookmarks are pulled.
1456 default, all remote bookmarks are pulled.
1445 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1457 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1446 initialization.
1458 initialization.
1447 ``streamclonerequested`` is a boolean indicating whether a "streaming
1459 ``streamclonerequested`` is a boolean indicating whether a "streaming
1448 clone" is requested. A "streaming clone" is essentially a raw file copy
1460 clone" is requested. A "streaming clone" is essentially a raw file copy
1449 of revlogs from the server. This only works when the local repository is
1461 of revlogs from the server. This only works when the local repository is
1450 empty. The default value of ``None`` means to respect the server
1462 empty. The default value of ``None`` means to respect the server
1451 configuration for preferring stream clones.
1463 configuration for preferring stream clones.
1452
1464
1453 Returns the ``pulloperation`` created for this pull.
1465 Returns the ``pulloperation`` created for this pull.
1454 """
1466 """
1455 if opargs is None:
1467 if opargs is None:
1456 opargs = {}
1468 opargs = {}
1457 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1469 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1458 streamclonerequested=streamclonerequested,
1470 streamclonerequested=streamclonerequested,
1459 **pycompat.strkwargs(opargs))
1471 **pycompat.strkwargs(opargs))
1460
1472
1461 peerlocal = pullop.remote.local()
1473 peerlocal = pullop.remote.local()
1462 if peerlocal:
1474 if peerlocal:
1463 missing = set(peerlocal.requirements) - pullop.repo.supported
1475 missing = set(peerlocal.requirements) - pullop.repo.supported
1464 if missing:
1476 if missing:
1465 msg = _("required features are not"
1477 msg = _("required features are not"
1466 " supported in the destination:"
1478 " supported in the destination:"
1467 " %s") % (', '.join(sorted(missing)))
1479 " %s") % (', '.join(sorted(missing)))
1468 raise error.Abort(msg)
1480 raise error.Abort(msg)
1469
1481
1470 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1482 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1471 with repo.wlock(), repo.lock(), pullop.trmanager:
1483 with repo.wlock(), repo.lock(), pullop.trmanager:
1472 # This should ideally be in _pullbundle2(). However, it needs to run
1484 # This should ideally be in _pullbundle2(). However, it needs to run
1473 # before discovery to avoid extra work.
1485 # before discovery to avoid extra work.
1474 _maybeapplyclonebundle(pullop)
1486 _maybeapplyclonebundle(pullop)
1475 streamclone.maybeperformlegacystreamclone(pullop)
1487 streamclone.maybeperformlegacystreamclone(pullop)
1476 _pulldiscovery(pullop)
1488 _pulldiscovery(pullop)
1477 if pullop.canusebundle2:
1489 if pullop.canusebundle2:
1478 _fullpullbundle2(repo, pullop)
1490 _fullpullbundle2(repo, pullop)
1479 _pullchangeset(pullop)
1491 _pullchangeset(pullop)
1480 _pullphase(pullop)
1492 _pullphase(pullop)
1481 _pullbookmarks(pullop)
1493 _pullbookmarks(pullop)
1482 _pullobsolete(pullop)
1494 _pullobsolete(pullop)
1483
1495
1484 # storing remotenames
1496 # storing remotenames
1485 if repo.ui.configbool('experimental', 'remotenames'):
1497 if repo.ui.configbool('experimental', 'remotenames'):
1486 logexchange.pullremotenames(repo, remote)
1498 logexchange.pullremotenames(repo, remote)
1487
1499
1488 return pullop
1500 return pullop
1489
1501
1490 # list of steps to perform discovery before pull
1502 # list of steps to perform discovery before pull
1491 pulldiscoveryorder = []
1503 pulldiscoveryorder = []
1492
1504
1493 # Mapping between step name and function
1505 # Mapping between step name and function
1494 #
1506 #
1495 # This exists to help extensions wrap steps if necessary
1507 # This exists to help extensions wrap steps if necessary
1496 pulldiscoverymapping = {}
1508 pulldiscoverymapping = {}
1497
1509
1498 def pulldiscovery(stepname):
1510 def pulldiscovery(stepname):
1499 """decorator for function performing discovery before pull
1511 """decorator for function performing discovery before pull
1500
1512
1501 The function is added to the step -> function mapping and appended to the
1513 The function is added to the step -> function mapping and appended to the
1502 list of steps. Beware that decorated function will be added in order (this
1514 list of steps. Beware that decorated function will be added in order (this
1503 may matter).
1515 may matter).
1504
1516
1505 You can only use this decorator for a new step, if you want to wrap a step
1517 You can only use this decorator for a new step, if you want to wrap a step
1506 from an extension, change the pulldiscovery dictionary directly."""
1518 from an extension, change the pulldiscovery dictionary directly."""
1507 def dec(func):
1519 def dec(func):
1508 assert stepname not in pulldiscoverymapping
1520 assert stepname not in pulldiscoverymapping
1509 pulldiscoverymapping[stepname] = func
1521 pulldiscoverymapping[stepname] = func
1510 pulldiscoveryorder.append(stepname)
1522 pulldiscoveryorder.append(stepname)
1511 return func
1523 return func
1512 return dec
1524 return dec
1513
1525
1514 def _pulldiscovery(pullop):
1526 def _pulldiscovery(pullop):
1515 """Run all discovery steps"""
1527 """Run all discovery steps"""
1516 for stepname in pulldiscoveryorder:
1528 for stepname in pulldiscoveryorder:
1517 step = pulldiscoverymapping[stepname]
1529 step = pulldiscoverymapping[stepname]
1518 step(pullop)
1530 step(pullop)
1519
1531
1520 @pulldiscovery('b1:bookmarks')
1532 @pulldiscovery('b1:bookmarks')
1521 def _pullbookmarkbundle1(pullop):
1533 def _pullbookmarkbundle1(pullop):
1522 """fetch bookmark data in bundle1 case
1534 """fetch bookmark data in bundle1 case
1523
1535
1524 If not using bundle2, we have to fetch bookmarks before changeset
1536 If not using bundle2, we have to fetch bookmarks before changeset
1525 discovery to reduce the chance and impact of race conditions."""
1537 discovery to reduce the chance and impact of race conditions."""
1526 if pullop.remotebookmarks is not None:
1538 if pullop.remotebookmarks is not None:
1527 return
1539 return
1528 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1540 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1529 # all known bundle2 servers now support listkeys, but lets be nice with
1541 # all known bundle2 servers now support listkeys, but lets be nice with
1530 # new implementation.
1542 # new implementation.
1531 return
1543 return
1532 books = pullop.remote.listkeys('bookmarks')
1544 books = listkeys(pullop.remote, 'bookmarks')
1533 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1545 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1534
1546
1535
1547
1536 @pulldiscovery('changegroup')
1548 @pulldiscovery('changegroup')
1537 def _pulldiscoverychangegroup(pullop):
1549 def _pulldiscoverychangegroup(pullop):
1538 """discovery phase for the pull
1550 """discovery phase for the pull
1539
1551
1540 Current handle changeset discovery only, will change handle all discovery
1552 Current handle changeset discovery only, will change handle all discovery
1541 at some point."""
1553 at some point."""
1542 tmp = discovery.findcommonincoming(pullop.repo,
1554 tmp = discovery.findcommonincoming(pullop.repo,
1543 pullop.remote,
1555 pullop.remote,
1544 heads=pullop.heads,
1556 heads=pullop.heads,
1545 force=pullop.force)
1557 force=pullop.force)
1546 common, fetch, rheads = tmp
1558 common, fetch, rheads = tmp
1547 nm = pullop.repo.unfiltered().changelog.nodemap
1559 nm = pullop.repo.unfiltered().changelog.nodemap
1548 if fetch and rheads:
1560 if fetch and rheads:
1549 # If a remote heads is filtered locally, put in back in common.
1561 # If a remote heads is filtered locally, put in back in common.
1550 #
1562 #
1551 # This is a hackish solution to catch most of "common but locally
1563 # This is a hackish solution to catch most of "common but locally
1552 # hidden situation". We do not performs discovery on unfiltered
1564 # hidden situation". We do not performs discovery on unfiltered
1553 # repository because it end up doing a pathological amount of round
1565 # repository because it end up doing a pathological amount of round
1554 # trip for w huge amount of changeset we do not care about.
1566 # trip for w huge amount of changeset we do not care about.
1555 #
1567 #
1556 # If a set of such "common but filtered" changeset exist on the server
1568 # If a set of such "common but filtered" changeset exist on the server
1557 # but are not including a remote heads, we'll not be able to detect it,
1569 # but are not including a remote heads, we'll not be able to detect it,
1558 scommon = set(common)
1570 scommon = set(common)
1559 for n in rheads:
1571 for n in rheads:
1560 if n in nm:
1572 if n in nm:
1561 if n not in scommon:
1573 if n not in scommon:
1562 common.append(n)
1574 common.append(n)
1563 if set(rheads).issubset(set(common)):
1575 if set(rheads).issubset(set(common)):
1564 fetch = []
1576 fetch = []
1565 pullop.common = common
1577 pullop.common = common
1566 pullop.fetch = fetch
1578 pullop.fetch = fetch
1567 pullop.rheads = rheads
1579 pullop.rheads = rheads
1568
1580
1569 def _pullbundle2(pullop):
1581 def _pullbundle2(pullop):
1570 """pull data using bundle2
1582 """pull data using bundle2
1571
1583
1572 For now, the only supported data are changegroup."""
1584 For now, the only supported data are changegroup."""
1573 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1585 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1574
1586
1575 # make ui easier to access
1587 # make ui easier to access
1576 ui = pullop.repo.ui
1588 ui = pullop.repo.ui
1577
1589
1578 # At the moment we don't do stream clones over bundle2. If that is
1590 # At the moment we don't do stream clones over bundle2. If that is
1579 # implemented then here's where the check for that will go.
1591 # implemented then here's where the check for that will go.
1580 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1592 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1581
1593
1582 # declare pull perimeters
1594 # declare pull perimeters
1583 kwargs['common'] = pullop.common
1595 kwargs['common'] = pullop.common
1584 kwargs['heads'] = pullop.heads or pullop.rheads
1596 kwargs['heads'] = pullop.heads or pullop.rheads
1585
1597
1586 if streaming:
1598 if streaming:
1587 kwargs['cg'] = False
1599 kwargs['cg'] = False
1588 kwargs['stream'] = True
1600 kwargs['stream'] = True
1589 pullop.stepsdone.add('changegroup')
1601 pullop.stepsdone.add('changegroup')
1590 pullop.stepsdone.add('phases')
1602 pullop.stepsdone.add('phases')
1591
1603
1592 else:
1604 else:
1593 # pulling changegroup
1605 # pulling changegroup
1594 pullop.stepsdone.add('changegroup')
1606 pullop.stepsdone.add('changegroup')
1595
1607
1596 kwargs['cg'] = pullop.fetch
1608 kwargs['cg'] = pullop.fetch
1597
1609
1598 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1610 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1599 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1611 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1600 if (not legacyphase and hasbinaryphase):
1612 if (not legacyphase and hasbinaryphase):
1601 kwargs['phases'] = True
1613 kwargs['phases'] = True
1602 pullop.stepsdone.add('phases')
1614 pullop.stepsdone.add('phases')
1603
1615
1604 if 'listkeys' in pullop.remotebundle2caps:
1616 if 'listkeys' in pullop.remotebundle2caps:
1605 if 'phases' not in pullop.stepsdone:
1617 if 'phases' not in pullop.stepsdone:
1606 kwargs['listkeys'] = ['phases']
1618 kwargs['listkeys'] = ['phases']
1607
1619
1608 bookmarksrequested = False
1620 bookmarksrequested = False
1609 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1621 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1610 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1622 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1611
1623
1612 if pullop.remotebookmarks is not None:
1624 if pullop.remotebookmarks is not None:
1613 pullop.stepsdone.add('request-bookmarks')
1625 pullop.stepsdone.add('request-bookmarks')
1614
1626
1615 if ('request-bookmarks' not in pullop.stepsdone
1627 if ('request-bookmarks' not in pullop.stepsdone
1616 and pullop.remotebookmarks is None
1628 and pullop.remotebookmarks is None
1617 and not legacybookmark and hasbinarybook):
1629 and not legacybookmark and hasbinarybook):
1618 kwargs['bookmarks'] = True
1630 kwargs['bookmarks'] = True
1619 bookmarksrequested = True
1631 bookmarksrequested = True
1620
1632
1621 if 'listkeys' in pullop.remotebundle2caps:
1633 if 'listkeys' in pullop.remotebundle2caps:
1622 if 'request-bookmarks' not in pullop.stepsdone:
1634 if 'request-bookmarks' not in pullop.stepsdone:
1623 # make sure to always includes bookmark data when migrating
1635 # make sure to always includes bookmark data when migrating
1624 # `hg incoming --bundle` to using this function.
1636 # `hg incoming --bundle` to using this function.
1625 pullop.stepsdone.add('request-bookmarks')
1637 pullop.stepsdone.add('request-bookmarks')
1626 kwargs.setdefault('listkeys', []).append('bookmarks')
1638 kwargs.setdefault('listkeys', []).append('bookmarks')
1627
1639
1628 # If this is a full pull / clone and the server supports the clone bundles
1640 # If this is a full pull / clone and the server supports the clone bundles
1629 # feature, tell the server whether we attempted a clone bundle. The
1641 # feature, tell the server whether we attempted a clone bundle. The
1630 # presence of this flag indicates the client supports clone bundles. This
1642 # presence of this flag indicates the client supports clone bundles. This
1631 # will enable the server to treat clients that support clone bundles
1643 # will enable the server to treat clients that support clone bundles
1632 # differently from those that don't.
1644 # differently from those that don't.
1633 if (pullop.remote.capable('clonebundles')
1645 if (pullop.remote.capable('clonebundles')
1634 and pullop.heads is None and list(pullop.common) == [nullid]):
1646 and pullop.heads is None and list(pullop.common) == [nullid]):
1635 kwargs['cbattempted'] = pullop.clonebundleattempted
1647 kwargs['cbattempted'] = pullop.clonebundleattempted
1636
1648
1637 if streaming:
1649 if streaming:
1638 pullop.repo.ui.status(_('streaming all changes\n'))
1650 pullop.repo.ui.status(_('streaming all changes\n'))
1639 elif not pullop.fetch:
1651 elif not pullop.fetch:
1640 pullop.repo.ui.status(_("no changes found\n"))
1652 pullop.repo.ui.status(_("no changes found\n"))
1641 pullop.cgresult = 0
1653 pullop.cgresult = 0
1642 else:
1654 else:
1643 if pullop.heads is None and list(pullop.common) == [nullid]:
1655 if pullop.heads is None and list(pullop.common) == [nullid]:
1644 pullop.repo.ui.status(_("requesting all changes\n"))
1656 pullop.repo.ui.status(_("requesting all changes\n"))
1645 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1657 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1646 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1658 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1647 if obsolete.commonversion(remoteversions) is not None:
1659 if obsolete.commonversion(remoteversions) is not None:
1648 kwargs['obsmarkers'] = True
1660 kwargs['obsmarkers'] = True
1649 pullop.stepsdone.add('obsmarkers')
1661 pullop.stepsdone.add('obsmarkers')
1650 _pullbundle2extraprepare(pullop, kwargs)
1662 _pullbundle2extraprepare(pullop, kwargs)
1651
1663
1652 with pullop.remote.commandexecutor() as e:
1664 with pullop.remote.commandexecutor() as e:
1653 args = dict(kwargs)
1665 args = dict(kwargs)
1654 args['source'] = 'pull'
1666 args['source'] = 'pull'
1655 bundle = e.callcommand('getbundle', args).result()
1667 bundle = e.callcommand('getbundle', args).result()
1656
1668
1657 try:
1669 try:
1658 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
1670 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
1659 source='pull')
1671 source='pull')
1660 op.modes['bookmarks'] = 'records'
1672 op.modes['bookmarks'] = 'records'
1661 bundle2.processbundle(pullop.repo, bundle, op=op)
1673 bundle2.processbundle(pullop.repo, bundle, op=op)
1662 except bundle2.AbortFromPart as exc:
1674 except bundle2.AbortFromPart as exc:
1663 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1675 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1664 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1676 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1665 except error.BundleValueError as exc:
1677 except error.BundleValueError as exc:
1666 raise error.Abort(_('missing support for %s') % exc)
1678 raise error.Abort(_('missing support for %s') % exc)
1667
1679
1668 if pullop.fetch:
1680 if pullop.fetch:
1669 pullop.cgresult = bundle2.combinechangegroupresults(op)
1681 pullop.cgresult = bundle2.combinechangegroupresults(op)
1670
1682
1671 # processing phases change
1683 # processing phases change
1672 for namespace, value in op.records['listkeys']:
1684 for namespace, value in op.records['listkeys']:
1673 if namespace == 'phases':
1685 if namespace == 'phases':
1674 _pullapplyphases(pullop, value)
1686 _pullapplyphases(pullop, value)
1675
1687
1676 # processing bookmark update
1688 # processing bookmark update
1677 if bookmarksrequested:
1689 if bookmarksrequested:
1678 books = {}
1690 books = {}
1679 for record in op.records['bookmarks']:
1691 for record in op.records['bookmarks']:
1680 books[record['bookmark']] = record["node"]
1692 books[record['bookmark']] = record["node"]
1681 pullop.remotebookmarks = books
1693 pullop.remotebookmarks = books
1682 else:
1694 else:
1683 for namespace, value in op.records['listkeys']:
1695 for namespace, value in op.records['listkeys']:
1684 if namespace == 'bookmarks':
1696 if namespace == 'bookmarks':
1685 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1697 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1686
1698
1687 # bookmark data were either already there or pulled in the bundle
1699 # bookmark data were either already there or pulled in the bundle
1688 if pullop.remotebookmarks is not None:
1700 if pullop.remotebookmarks is not None:
1689 _pullbookmarks(pullop)
1701 _pullbookmarks(pullop)
1690
1702
1691 def _pullbundle2extraprepare(pullop, kwargs):
1703 def _pullbundle2extraprepare(pullop, kwargs):
1692 """hook function so that extensions can extend the getbundle call"""
1704 """hook function so that extensions can extend the getbundle call"""
1693
1705
1694 def _pullchangeset(pullop):
1706 def _pullchangeset(pullop):
1695 """pull changeset from unbundle into the local repo"""
1707 """pull changeset from unbundle into the local repo"""
1696 # We delay the open of the transaction as late as possible so we
1708 # We delay the open of the transaction as late as possible so we
1697 # don't open transaction for nothing or you break future useful
1709 # don't open transaction for nothing or you break future useful
1698 # rollback call
1710 # rollback call
1699 if 'changegroup' in pullop.stepsdone:
1711 if 'changegroup' in pullop.stepsdone:
1700 return
1712 return
1701 pullop.stepsdone.add('changegroup')
1713 pullop.stepsdone.add('changegroup')
1702 if not pullop.fetch:
1714 if not pullop.fetch:
1703 pullop.repo.ui.status(_("no changes found\n"))
1715 pullop.repo.ui.status(_("no changes found\n"))
1704 pullop.cgresult = 0
1716 pullop.cgresult = 0
1705 return
1717 return
1706 tr = pullop.gettransaction()
1718 tr = pullop.gettransaction()
1707 if pullop.heads is None and list(pullop.common) == [nullid]:
1719 if pullop.heads is None and list(pullop.common) == [nullid]:
1708 pullop.repo.ui.status(_("requesting all changes\n"))
1720 pullop.repo.ui.status(_("requesting all changes\n"))
1709 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1721 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1710 # issue1320, avoid a race if remote changed after discovery
1722 # issue1320, avoid a race if remote changed after discovery
1711 pullop.heads = pullop.rheads
1723 pullop.heads = pullop.rheads
1712
1724
1713 if pullop.remote.capable('getbundle'):
1725 if pullop.remote.capable('getbundle'):
1714 # TODO: get bundlecaps from remote
1726 # TODO: get bundlecaps from remote
1715 cg = pullop.remote.getbundle('pull', common=pullop.common,
1727 cg = pullop.remote.getbundle('pull', common=pullop.common,
1716 heads=pullop.heads or pullop.rheads)
1728 heads=pullop.heads or pullop.rheads)
1717 elif pullop.heads is None:
1729 elif pullop.heads is None:
1718 with pullop.remote.commandexecutor() as e:
1730 with pullop.remote.commandexecutor() as e:
1719 cg = e.callcommand('changegroup', {
1731 cg = e.callcommand('changegroup', {
1720 'nodes': pullop.fetch,
1732 'nodes': pullop.fetch,
1721 'source': 'pull',
1733 'source': 'pull',
1722 }).result()
1734 }).result()
1723
1735
1724 elif not pullop.remote.capable('changegroupsubset'):
1736 elif not pullop.remote.capable('changegroupsubset'):
1725 raise error.Abort(_("partial pull cannot be done because "
1737 raise error.Abort(_("partial pull cannot be done because "
1726 "other repository doesn't support "
1738 "other repository doesn't support "
1727 "changegroupsubset."))
1739 "changegroupsubset."))
1728 else:
1740 else:
1729 with pullop.remote.commandexecutor() as e:
1741 with pullop.remote.commandexecutor() as e:
1730 cg = e.callcommand('changegroupsubset', {
1742 cg = e.callcommand('changegroupsubset', {
1731 'bases': pullop.fetch,
1743 'bases': pullop.fetch,
1732 'heads': pullop.heads,
1744 'heads': pullop.heads,
1733 'source': 'pull',
1745 'source': 'pull',
1734 }).result()
1746 }).result()
1735
1747
1736 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1748 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1737 pullop.remote.url())
1749 pullop.remote.url())
1738 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1750 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1739
1751
1740 def _pullphase(pullop):
1752 def _pullphase(pullop):
1741 # Get remote phases data from remote
1753 # Get remote phases data from remote
1742 if 'phases' in pullop.stepsdone:
1754 if 'phases' in pullop.stepsdone:
1743 return
1755 return
1744 remotephases = pullop.remote.listkeys('phases')
1756 remotephases = listkeys(pullop.remote, 'phases')
1745 _pullapplyphases(pullop, remotephases)
1757 _pullapplyphases(pullop, remotephases)
1746
1758
1747 def _pullapplyphases(pullop, remotephases):
1759 def _pullapplyphases(pullop, remotephases):
1748 """apply phase movement from observed remote state"""
1760 """apply phase movement from observed remote state"""
1749 if 'phases' in pullop.stepsdone:
1761 if 'phases' in pullop.stepsdone:
1750 return
1762 return
1751 pullop.stepsdone.add('phases')
1763 pullop.stepsdone.add('phases')
1752 publishing = bool(remotephases.get('publishing', False))
1764 publishing = bool(remotephases.get('publishing', False))
1753 if remotephases and not publishing:
1765 if remotephases and not publishing:
1754 # remote is new and non-publishing
1766 # remote is new and non-publishing
1755 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1767 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1756 pullop.pulledsubset,
1768 pullop.pulledsubset,
1757 remotephases)
1769 remotephases)
1758 dheads = pullop.pulledsubset
1770 dheads = pullop.pulledsubset
1759 else:
1771 else:
1760 # Remote is old or publishing all common changesets
1772 # Remote is old or publishing all common changesets
1761 # should be seen as public
1773 # should be seen as public
1762 pheads = pullop.pulledsubset
1774 pheads = pullop.pulledsubset
1763 dheads = []
1775 dheads = []
1764 unfi = pullop.repo.unfiltered()
1776 unfi = pullop.repo.unfiltered()
1765 phase = unfi._phasecache.phase
1777 phase = unfi._phasecache.phase
1766 rev = unfi.changelog.nodemap.get
1778 rev = unfi.changelog.nodemap.get
1767 public = phases.public
1779 public = phases.public
1768 draft = phases.draft
1780 draft = phases.draft
1769
1781
1770 # exclude changesets already public locally and update the others
1782 # exclude changesets already public locally and update the others
1771 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1783 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1772 if pheads:
1784 if pheads:
1773 tr = pullop.gettransaction()
1785 tr = pullop.gettransaction()
1774 phases.advanceboundary(pullop.repo, tr, public, pheads)
1786 phases.advanceboundary(pullop.repo, tr, public, pheads)
1775
1787
1776 # exclude changesets already draft locally and update the others
1788 # exclude changesets already draft locally and update the others
1777 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1789 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1778 if dheads:
1790 if dheads:
1779 tr = pullop.gettransaction()
1791 tr = pullop.gettransaction()
1780 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1792 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1781
1793
1782 def _pullbookmarks(pullop):
1794 def _pullbookmarks(pullop):
1783 """process the remote bookmark information to update the local one"""
1795 """process the remote bookmark information to update the local one"""
1784 if 'bookmarks' in pullop.stepsdone:
1796 if 'bookmarks' in pullop.stepsdone:
1785 return
1797 return
1786 pullop.stepsdone.add('bookmarks')
1798 pullop.stepsdone.add('bookmarks')
1787 repo = pullop.repo
1799 repo = pullop.repo
1788 remotebookmarks = pullop.remotebookmarks
1800 remotebookmarks = pullop.remotebookmarks
1789 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1801 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1790 pullop.remote.url(),
1802 pullop.remote.url(),
1791 pullop.gettransaction,
1803 pullop.gettransaction,
1792 explicit=pullop.explicitbookmarks)
1804 explicit=pullop.explicitbookmarks)
1793
1805
1794 def _pullobsolete(pullop):
1806 def _pullobsolete(pullop):
1795 """utility function to pull obsolete markers from a remote
1807 """utility function to pull obsolete markers from a remote
1796
1808
1797 The `gettransaction` is function that return the pull transaction, creating
1809 The `gettransaction` is function that return the pull transaction, creating
1798 one if necessary. We return the transaction to inform the calling code that
1810 one if necessary. We return the transaction to inform the calling code that
1799 a new transaction have been created (when applicable).
1811 a new transaction have been created (when applicable).
1800
1812
1801 Exists mostly to allow overriding for experimentation purpose"""
1813 Exists mostly to allow overriding for experimentation purpose"""
1802 if 'obsmarkers' in pullop.stepsdone:
1814 if 'obsmarkers' in pullop.stepsdone:
1803 return
1815 return
1804 pullop.stepsdone.add('obsmarkers')
1816 pullop.stepsdone.add('obsmarkers')
1805 tr = None
1817 tr = None
1806 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1818 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1807 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1819 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1808 remoteobs = pullop.remote.listkeys('obsolete')
1820 remoteobs = listkeys(pullop.remote, 'obsolete')
1809 if 'dump0' in remoteobs:
1821 if 'dump0' in remoteobs:
1810 tr = pullop.gettransaction()
1822 tr = pullop.gettransaction()
1811 markers = []
1823 markers = []
1812 for key in sorted(remoteobs, reverse=True):
1824 for key in sorted(remoteobs, reverse=True):
1813 if key.startswith('dump'):
1825 if key.startswith('dump'):
1814 data = util.b85decode(remoteobs[key])
1826 data = util.b85decode(remoteobs[key])
1815 version, newmarks = obsolete._readmarkers(data)
1827 version, newmarks = obsolete._readmarkers(data)
1816 markers += newmarks
1828 markers += newmarks
1817 if markers:
1829 if markers:
1818 pullop.repo.obsstore.add(tr, markers)
1830 pullop.repo.obsstore.add(tr, markers)
1819 pullop.repo.invalidatevolatilesets()
1831 pullop.repo.invalidatevolatilesets()
1820 return tr
1832 return tr
1821
1833
1822 def caps20to10(repo, role):
1834 def caps20to10(repo, role):
1823 """return a set with appropriate options to use bundle20 during getbundle"""
1835 """return a set with appropriate options to use bundle20 during getbundle"""
1824 caps = {'HG20'}
1836 caps = {'HG20'}
1825 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1837 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1826 caps.add('bundle2=' + urlreq.quote(capsblob))
1838 caps.add('bundle2=' + urlreq.quote(capsblob))
1827 return caps
1839 return caps
1828
1840
1829 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1841 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1830 getbundle2partsorder = []
1842 getbundle2partsorder = []
1831
1843
1832 # Mapping between step name and function
1844 # Mapping between step name and function
1833 #
1845 #
1834 # This exists to help extensions wrap steps if necessary
1846 # This exists to help extensions wrap steps if necessary
1835 getbundle2partsmapping = {}
1847 getbundle2partsmapping = {}
1836
1848
1837 def getbundle2partsgenerator(stepname, idx=None):
1849 def getbundle2partsgenerator(stepname, idx=None):
1838 """decorator for function generating bundle2 part for getbundle
1850 """decorator for function generating bundle2 part for getbundle
1839
1851
1840 The function is added to the step -> function mapping and appended to the
1852 The function is added to the step -> function mapping and appended to the
1841 list of steps. Beware that decorated functions will be added in order
1853 list of steps. Beware that decorated functions will be added in order
1842 (this may matter).
1854 (this may matter).
1843
1855
1844 You can only use this decorator for new steps, if you want to wrap a step
1856 You can only use this decorator for new steps, if you want to wrap a step
1845 from an extension, attack the getbundle2partsmapping dictionary directly."""
1857 from an extension, attack the getbundle2partsmapping dictionary directly."""
1846 def dec(func):
1858 def dec(func):
1847 assert stepname not in getbundle2partsmapping
1859 assert stepname not in getbundle2partsmapping
1848 getbundle2partsmapping[stepname] = func
1860 getbundle2partsmapping[stepname] = func
1849 if idx is None:
1861 if idx is None:
1850 getbundle2partsorder.append(stepname)
1862 getbundle2partsorder.append(stepname)
1851 else:
1863 else:
1852 getbundle2partsorder.insert(idx, stepname)
1864 getbundle2partsorder.insert(idx, stepname)
1853 return func
1865 return func
1854 return dec
1866 return dec
1855
1867
1856 def bundle2requested(bundlecaps):
1868 def bundle2requested(bundlecaps):
1857 if bundlecaps is not None:
1869 if bundlecaps is not None:
1858 return any(cap.startswith('HG2') for cap in bundlecaps)
1870 return any(cap.startswith('HG2') for cap in bundlecaps)
1859 return False
1871 return False
1860
1872
1861 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1873 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1862 **kwargs):
1874 **kwargs):
1863 """Return chunks constituting a bundle's raw data.
1875 """Return chunks constituting a bundle's raw data.
1864
1876
1865 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1877 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1866 passed.
1878 passed.
1867
1879
1868 Returns a 2-tuple of a dict with metadata about the generated bundle
1880 Returns a 2-tuple of a dict with metadata about the generated bundle
1869 and an iterator over raw chunks (of varying sizes).
1881 and an iterator over raw chunks (of varying sizes).
1870 """
1882 """
1871 kwargs = pycompat.byteskwargs(kwargs)
1883 kwargs = pycompat.byteskwargs(kwargs)
1872 info = {}
1884 info = {}
1873 usebundle2 = bundle2requested(bundlecaps)
1885 usebundle2 = bundle2requested(bundlecaps)
1874 # bundle10 case
1886 # bundle10 case
1875 if not usebundle2:
1887 if not usebundle2:
1876 if bundlecaps and not kwargs.get('cg', True):
1888 if bundlecaps and not kwargs.get('cg', True):
1877 raise ValueError(_('request for bundle10 must include changegroup'))
1889 raise ValueError(_('request for bundle10 must include changegroup'))
1878
1890
1879 if kwargs:
1891 if kwargs:
1880 raise ValueError(_('unsupported getbundle arguments: %s')
1892 raise ValueError(_('unsupported getbundle arguments: %s')
1881 % ', '.join(sorted(kwargs.keys())))
1893 % ', '.join(sorted(kwargs.keys())))
1882 outgoing = _computeoutgoing(repo, heads, common)
1894 outgoing = _computeoutgoing(repo, heads, common)
1883 info['bundleversion'] = 1
1895 info['bundleversion'] = 1
1884 return info, changegroup.makestream(repo, outgoing, '01', source,
1896 return info, changegroup.makestream(repo, outgoing, '01', source,
1885 bundlecaps=bundlecaps)
1897 bundlecaps=bundlecaps)
1886
1898
1887 # bundle20 case
1899 # bundle20 case
1888 info['bundleversion'] = 2
1900 info['bundleversion'] = 2
1889 b2caps = {}
1901 b2caps = {}
1890 for bcaps in bundlecaps:
1902 for bcaps in bundlecaps:
1891 if bcaps.startswith('bundle2='):
1903 if bcaps.startswith('bundle2='):
1892 blob = urlreq.unquote(bcaps[len('bundle2='):])
1904 blob = urlreq.unquote(bcaps[len('bundle2='):])
1893 b2caps.update(bundle2.decodecaps(blob))
1905 b2caps.update(bundle2.decodecaps(blob))
1894 bundler = bundle2.bundle20(repo.ui, b2caps)
1906 bundler = bundle2.bundle20(repo.ui, b2caps)
1895
1907
1896 kwargs['heads'] = heads
1908 kwargs['heads'] = heads
1897 kwargs['common'] = common
1909 kwargs['common'] = common
1898
1910
1899 for name in getbundle2partsorder:
1911 for name in getbundle2partsorder:
1900 func = getbundle2partsmapping[name]
1912 func = getbundle2partsmapping[name]
1901 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1913 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1902 **pycompat.strkwargs(kwargs))
1914 **pycompat.strkwargs(kwargs))
1903
1915
1904 info['prefercompressed'] = bundler.prefercompressed
1916 info['prefercompressed'] = bundler.prefercompressed
1905
1917
1906 return info, bundler.getchunks()
1918 return info, bundler.getchunks()
1907
1919
1908 @getbundle2partsgenerator('stream2')
1920 @getbundle2partsgenerator('stream2')
1909 def _getbundlestream2(bundler, repo, *args, **kwargs):
1921 def _getbundlestream2(bundler, repo, *args, **kwargs):
1910 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
1922 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
1911
1923
1912 @getbundle2partsgenerator('changegroup')
1924 @getbundle2partsgenerator('changegroup')
1913 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1925 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1914 b2caps=None, heads=None, common=None, **kwargs):
1926 b2caps=None, heads=None, common=None, **kwargs):
1915 """add a changegroup part to the requested bundle"""
1927 """add a changegroup part to the requested bundle"""
1916 cgstream = None
1928 cgstream = None
1917 if kwargs.get(r'cg', True):
1929 if kwargs.get(r'cg', True):
1918 # build changegroup bundle here.
1930 # build changegroup bundle here.
1919 version = '01'
1931 version = '01'
1920 cgversions = b2caps.get('changegroup')
1932 cgversions = b2caps.get('changegroup')
1921 if cgversions: # 3.1 and 3.2 ship with an empty value
1933 if cgversions: # 3.1 and 3.2 ship with an empty value
1922 cgversions = [v for v in cgversions
1934 cgversions = [v for v in cgversions
1923 if v in changegroup.supportedoutgoingversions(repo)]
1935 if v in changegroup.supportedoutgoingversions(repo)]
1924 if not cgversions:
1936 if not cgversions:
1925 raise ValueError(_('no common changegroup version'))
1937 raise ValueError(_('no common changegroup version'))
1926 version = max(cgversions)
1938 version = max(cgversions)
1927 outgoing = _computeoutgoing(repo, heads, common)
1939 outgoing = _computeoutgoing(repo, heads, common)
1928 if outgoing.missing:
1940 if outgoing.missing:
1929 cgstream = changegroup.makestream(repo, outgoing, version, source,
1941 cgstream = changegroup.makestream(repo, outgoing, version, source,
1930 bundlecaps=bundlecaps)
1942 bundlecaps=bundlecaps)
1931
1943
1932 if cgstream:
1944 if cgstream:
1933 part = bundler.newpart('changegroup', data=cgstream)
1945 part = bundler.newpart('changegroup', data=cgstream)
1934 if cgversions:
1946 if cgversions:
1935 part.addparam('version', version)
1947 part.addparam('version', version)
1936 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1948 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1937 mandatory=False)
1949 mandatory=False)
1938 if 'treemanifest' in repo.requirements:
1950 if 'treemanifest' in repo.requirements:
1939 part.addparam('treemanifest', '1')
1951 part.addparam('treemanifest', '1')
1940
1952
1941 @getbundle2partsgenerator('bookmarks')
1953 @getbundle2partsgenerator('bookmarks')
1942 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1954 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1943 b2caps=None, **kwargs):
1955 b2caps=None, **kwargs):
1944 """add a bookmark part to the requested bundle"""
1956 """add a bookmark part to the requested bundle"""
1945 if not kwargs.get(r'bookmarks', False):
1957 if not kwargs.get(r'bookmarks', False):
1946 return
1958 return
1947 if 'bookmarks' not in b2caps:
1959 if 'bookmarks' not in b2caps:
1948 raise ValueError(_('no common bookmarks exchange method'))
1960 raise ValueError(_('no common bookmarks exchange method'))
1949 books = bookmod.listbinbookmarks(repo)
1961 books = bookmod.listbinbookmarks(repo)
1950 data = bookmod.binaryencode(books)
1962 data = bookmod.binaryencode(books)
1951 if data:
1963 if data:
1952 bundler.newpart('bookmarks', data=data)
1964 bundler.newpart('bookmarks', data=data)
1953
1965
1954 @getbundle2partsgenerator('listkeys')
1966 @getbundle2partsgenerator('listkeys')
1955 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1967 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1956 b2caps=None, **kwargs):
1968 b2caps=None, **kwargs):
1957 """add parts containing listkeys namespaces to the requested bundle"""
1969 """add parts containing listkeys namespaces to the requested bundle"""
1958 listkeys = kwargs.get(r'listkeys', ())
1970 listkeys = kwargs.get(r'listkeys', ())
1959 for namespace in listkeys:
1971 for namespace in listkeys:
1960 part = bundler.newpart('listkeys')
1972 part = bundler.newpart('listkeys')
1961 part.addparam('namespace', namespace)
1973 part.addparam('namespace', namespace)
1962 keys = repo.listkeys(namespace).items()
1974 keys = repo.listkeys(namespace).items()
1963 part.data = pushkey.encodekeys(keys)
1975 part.data = pushkey.encodekeys(keys)
1964
1976
1965 @getbundle2partsgenerator('obsmarkers')
1977 @getbundle2partsgenerator('obsmarkers')
1966 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1978 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1967 b2caps=None, heads=None, **kwargs):
1979 b2caps=None, heads=None, **kwargs):
1968 """add an obsolescence markers part to the requested bundle"""
1980 """add an obsolescence markers part to the requested bundle"""
1969 if kwargs.get(r'obsmarkers', False):
1981 if kwargs.get(r'obsmarkers', False):
1970 if heads is None:
1982 if heads is None:
1971 heads = repo.heads()
1983 heads = repo.heads()
1972 subset = [c.node() for c in repo.set('::%ln', heads)]
1984 subset = [c.node() for c in repo.set('::%ln', heads)]
1973 markers = repo.obsstore.relevantmarkers(subset)
1985 markers = repo.obsstore.relevantmarkers(subset)
1974 markers = sorted(markers)
1986 markers = sorted(markers)
1975 bundle2.buildobsmarkerspart(bundler, markers)
1987 bundle2.buildobsmarkerspart(bundler, markers)
1976
1988
1977 @getbundle2partsgenerator('phases')
1989 @getbundle2partsgenerator('phases')
1978 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1990 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1979 b2caps=None, heads=None, **kwargs):
1991 b2caps=None, heads=None, **kwargs):
1980 """add phase heads part to the requested bundle"""
1992 """add phase heads part to the requested bundle"""
1981 if kwargs.get(r'phases', False):
1993 if kwargs.get(r'phases', False):
1982 if not 'heads' in b2caps.get('phases'):
1994 if not 'heads' in b2caps.get('phases'):
1983 raise ValueError(_('no common phases exchange method'))
1995 raise ValueError(_('no common phases exchange method'))
1984 if heads is None:
1996 if heads is None:
1985 heads = repo.heads()
1997 heads = repo.heads()
1986
1998
1987 headsbyphase = collections.defaultdict(set)
1999 headsbyphase = collections.defaultdict(set)
1988 if repo.publishing():
2000 if repo.publishing():
1989 headsbyphase[phases.public] = heads
2001 headsbyphase[phases.public] = heads
1990 else:
2002 else:
1991 # find the appropriate heads to move
2003 # find the appropriate heads to move
1992
2004
1993 phase = repo._phasecache.phase
2005 phase = repo._phasecache.phase
1994 node = repo.changelog.node
2006 node = repo.changelog.node
1995 rev = repo.changelog.rev
2007 rev = repo.changelog.rev
1996 for h in heads:
2008 for h in heads:
1997 headsbyphase[phase(repo, rev(h))].add(h)
2009 headsbyphase[phase(repo, rev(h))].add(h)
1998 seenphases = list(headsbyphase.keys())
2010 seenphases = list(headsbyphase.keys())
1999
2011
2000 # We do not handle anything but public and draft phase for now)
2012 # We do not handle anything but public and draft phase for now)
2001 if seenphases:
2013 if seenphases:
2002 assert max(seenphases) <= phases.draft
2014 assert max(seenphases) <= phases.draft
2003
2015
2004 # if client is pulling non-public changesets, we need to find
2016 # if client is pulling non-public changesets, we need to find
2005 # intermediate public heads.
2017 # intermediate public heads.
2006 draftheads = headsbyphase.get(phases.draft, set())
2018 draftheads = headsbyphase.get(phases.draft, set())
2007 if draftheads:
2019 if draftheads:
2008 publicheads = headsbyphase.get(phases.public, set())
2020 publicheads = headsbyphase.get(phases.public, set())
2009
2021
2010 revset = 'heads(only(%ln, %ln) and public())'
2022 revset = 'heads(only(%ln, %ln) and public())'
2011 extraheads = repo.revs(revset, draftheads, publicheads)
2023 extraheads = repo.revs(revset, draftheads, publicheads)
2012 for r in extraheads:
2024 for r in extraheads:
2013 headsbyphase[phases.public].add(node(r))
2025 headsbyphase[phases.public].add(node(r))
2014
2026
2015 # transform data in a format used by the encoding function
2027 # transform data in a format used by the encoding function
2016 phasemapping = []
2028 phasemapping = []
2017 for phase in phases.allphases:
2029 for phase in phases.allphases:
2018 phasemapping.append(sorted(headsbyphase[phase]))
2030 phasemapping.append(sorted(headsbyphase[phase]))
2019
2031
2020 # generate the actual part
2032 # generate the actual part
2021 phasedata = phases.binaryencode(phasemapping)
2033 phasedata = phases.binaryencode(phasemapping)
2022 bundler.newpart('phase-heads', data=phasedata)
2034 bundler.newpart('phase-heads', data=phasedata)
2023
2035
2024 @getbundle2partsgenerator('hgtagsfnodes')
2036 @getbundle2partsgenerator('hgtagsfnodes')
2025 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
2037 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
2026 b2caps=None, heads=None, common=None,
2038 b2caps=None, heads=None, common=None,
2027 **kwargs):
2039 **kwargs):
2028 """Transfer the .hgtags filenodes mapping.
2040 """Transfer the .hgtags filenodes mapping.
2029
2041
2030 Only values for heads in this bundle will be transferred.
2042 Only values for heads in this bundle will be transferred.
2031
2043
2032 The part data consists of pairs of 20 byte changeset node and .hgtags
2044 The part data consists of pairs of 20 byte changeset node and .hgtags
2033 filenodes raw values.
2045 filenodes raw values.
2034 """
2046 """
2035 # Don't send unless:
2047 # Don't send unless:
2036 # - changeset are being exchanged,
2048 # - changeset are being exchanged,
2037 # - the client supports it.
2049 # - the client supports it.
2038 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
2050 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
2039 return
2051 return
2040
2052
2041 outgoing = _computeoutgoing(repo, heads, common)
2053 outgoing = _computeoutgoing(repo, heads, common)
2042 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2054 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2043
2055
2044 @getbundle2partsgenerator('cache:rev-branch-cache')
2056 @getbundle2partsgenerator('cache:rev-branch-cache')
2045 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
2057 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
2046 b2caps=None, heads=None, common=None,
2058 b2caps=None, heads=None, common=None,
2047 **kwargs):
2059 **kwargs):
2048 """Transfer the rev-branch-cache mapping
2060 """Transfer the rev-branch-cache mapping
2049
2061
2050 The payload is a series of data related to each branch
2062 The payload is a series of data related to each branch
2051
2063
2052 1) branch name length
2064 1) branch name length
2053 2) number of open heads
2065 2) number of open heads
2054 3) number of closed heads
2066 3) number of closed heads
2055 4) open heads nodes
2067 4) open heads nodes
2056 5) closed heads nodes
2068 5) closed heads nodes
2057 """
2069 """
2058 # Don't send unless:
2070 # Don't send unless:
2059 # - changeset are being exchanged,
2071 # - changeset are being exchanged,
2060 # - the client supports it.
2072 # - the client supports it.
2061 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
2073 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
2062 return
2074 return
2063 outgoing = _computeoutgoing(repo, heads, common)
2075 outgoing = _computeoutgoing(repo, heads, common)
2064 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2076 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2065
2077
2066 def check_heads(repo, their_heads, context):
2078 def check_heads(repo, their_heads, context):
2067 """check if the heads of a repo have been modified
2079 """check if the heads of a repo have been modified
2068
2080
2069 Used by peer for unbundling.
2081 Used by peer for unbundling.
2070 """
2082 """
2071 heads = repo.heads()
2083 heads = repo.heads()
2072 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2084 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2073 if not (their_heads == ['force'] or their_heads == heads or
2085 if not (their_heads == ['force'] or their_heads == heads or
2074 their_heads == ['hashed', heads_hash]):
2086 their_heads == ['hashed', heads_hash]):
2075 # someone else committed/pushed/unbundled while we
2087 # someone else committed/pushed/unbundled while we
2076 # were transferring data
2088 # were transferring data
2077 raise error.PushRaced('repository changed while %s - '
2089 raise error.PushRaced('repository changed while %s - '
2078 'please try again' % context)
2090 'please try again' % context)
2079
2091
2080 def unbundle(repo, cg, heads, source, url):
2092 def unbundle(repo, cg, heads, source, url):
2081 """Apply a bundle to a repo.
2093 """Apply a bundle to a repo.
2082
2094
2083 this function makes sure the repo is locked during the application and have
2095 this function makes sure the repo is locked during the application and have
2084 mechanism to check that no push race occurred between the creation of the
2096 mechanism to check that no push race occurred between the creation of the
2085 bundle and its application.
2097 bundle and its application.
2086
2098
2087 If the push was raced as PushRaced exception is raised."""
2099 If the push was raced as PushRaced exception is raised."""
2088 r = 0
2100 r = 0
2089 # need a transaction when processing a bundle2 stream
2101 # need a transaction when processing a bundle2 stream
2090 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2102 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2091 lockandtr = [None, None, None]
2103 lockandtr = [None, None, None]
2092 recordout = None
2104 recordout = None
2093 # quick fix for output mismatch with bundle2 in 3.4
2105 # quick fix for output mismatch with bundle2 in 3.4
2094 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2106 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2095 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2107 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2096 captureoutput = True
2108 captureoutput = True
2097 try:
2109 try:
2098 # note: outside bundle1, 'heads' is expected to be empty and this
2110 # note: outside bundle1, 'heads' is expected to be empty and this
2099 # 'check_heads' call wil be a no-op
2111 # 'check_heads' call wil be a no-op
2100 check_heads(repo, heads, 'uploading changes')
2112 check_heads(repo, heads, 'uploading changes')
2101 # push can proceed
2113 # push can proceed
2102 if not isinstance(cg, bundle2.unbundle20):
2114 if not isinstance(cg, bundle2.unbundle20):
2103 # legacy case: bundle1 (changegroup 01)
2115 # legacy case: bundle1 (changegroup 01)
2104 txnname = "\n".join([source, util.hidepassword(url)])
2116 txnname = "\n".join([source, util.hidepassword(url)])
2105 with repo.lock(), repo.transaction(txnname) as tr:
2117 with repo.lock(), repo.transaction(txnname) as tr:
2106 op = bundle2.applybundle(repo, cg, tr, source, url)
2118 op = bundle2.applybundle(repo, cg, tr, source, url)
2107 r = bundle2.combinechangegroupresults(op)
2119 r = bundle2.combinechangegroupresults(op)
2108 else:
2120 else:
2109 r = None
2121 r = None
2110 try:
2122 try:
2111 def gettransaction():
2123 def gettransaction():
2112 if not lockandtr[2]:
2124 if not lockandtr[2]:
2113 lockandtr[0] = repo.wlock()
2125 lockandtr[0] = repo.wlock()
2114 lockandtr[1] = repo.lock()
2126 lockandtr[1] = repo.lock()
2115 lockandtr[2] = repo.transaction(source)
2127 lockandtr[2] = repo.transaction(source)
2116 lockandtr[2].hookargs['source'] = source
2128 lockandtr[2].hookargs['source'] = source
2117 lockandtr[2].hookargs['url'] = url
2129 lockandtr[2].hookargs['url'] = url
2118 lockandtr[2].hookargs['bundle2'] = '1'
2130 lockandtr[2].hookargs['bundle2'] = '1'
2119 return lockandtr[2]
2131 return lockandtr[2]
2120
2132
2121 # Do greedy locking by default until we're satisfied with lazy
2133 # Do greedy locking by default until we're satisfied with lazy
2122 # locking.
2134 # locking.
2123 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2135 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2124 gettransaction()
2136 gettransaction()
2125
2137
2126 op = bundle2.bundleoperation(repo, gettransaction,
2138 op = bundle2.bundleoperation(repo, gettransaction,
2127 captureoutput=captureoutput,
2139 captureoutput=captureoutput,
2128 source='push')
2140 source='push')
2129 try:
2141 try:
2130 op = bundle2.processbundle(repo, cg, op=op)
2142 op = bundle2.processbundle(repo, cg, op=op)
2131 finally:
2143 finally:
2132 r = op.reply
2144 r = op.reply
2133 if captureoutput and r is not None:
2145 if captureoutput and r is not None:
2134 repo.ui.pushbuffer(error=True, subproc=True)
2146 repo.ui.pushbuffer(error=True, subproc=True)
2135 def recordout(output):
2147 def recordout(output):
2136 r.newpart('output', data=output, mandatory=False)
2148 r.newpart('output', data=output, mandatory=False)
2137 if lockandtr[2] is not None:
2149 if lockandtr[2] is not None:
2138 lockandtr[2].close()
2150 lockandtr[2].close()
2139 except BaseException as exc:
2151 except BaseException as exc:
2140 exc.duringunbundle2 = True
2152 exc.duringunbundle2 = True
2141 if captureoutput and r is not None:
2153 if captureoutput and r is not None:
2142 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2154 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2143 def recordout(output):
2155 def recordout(output):
2144 part = bundle2.bundlepart('output', data=output,
2156 part = bundle2.bundlepart('output', data=output,
2145 mandatory=False)
2157 mandatory=False)
2146 parts.append(part)
2158 parts.append(part)
2147 raise
2159 raise
2148 finally:
2160 finally:
2149 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2161 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2150 if recordout is not None:
2162 if recordout is not None:
2151 recordout(repo.ui.popbuffer())
2163 recordout(repo.ui.popbuffer())
2152 return r
2164 return r
2153
2165
2154 def _maybeapplyclonebundle(pullop):
2166 def _maybeapplyclonebundle(pullop):
2155 """Apply a clone bundle from a remote, if possible."""
2167 """Apply a clone bundle from a remote, if possible."""
2156
2168
2157 repo = pullop.repo
2169 repo = pullop.repo
2158 remote = pullop.remote
2170 remote = pullop.remote
2159
2171
2160 if not repo.ui.configbool('ui', 'clonebundles'):
2172 if not repo.ui.configbool('ui', 'clonebundles'):
2161 return
2173 return
2162
2174
2163 # Only run if local repo is empty.
2175 # Only run if local repo is empty.
2164 if len(repo):
2176 if len(repo):
2165 return
2177 return
2166
2178
2167 if pullop.heads:
2179 if pullop.heads:
2168 return
2180 return
2169
2181
2170 if not remote.capable('clonebundles'):
2182 if not remote.capable('clonebundles'):
2171 return
2183 return
2172
2184
2173 with remote.commandexecutor() as e:
2185 with remote.commandexecutor() as e:
2174 res = e.callcommand('clonebundles', {}).result()
2186 res = e.callcommand('clonebundles', {}).result()
2175
2187
2176 # If we call the wire protocol command, that's good enough to record the
2188 # If we call the wire protocol command, that's good enough to record the
2177 # attempt.
2189 # attempt.
2178 pullop.clonebundleattempted = True
2190 pullop.clonebundleattempted = True
2179
2191
2180 entries = parseclonebundlesmanifest(repo, res)
2192 entries = parseclonebundlesmanifest(repo, res)
2181 if not entries:
2193 if not entries:
2182 repo.ui.note(_('no clone bundles available on remote; '
2194 repo.ui.note(_('no clone bundles available on remote; '
2183 'falling back to regular clone\n'))
2195 'falling back to regular clone\n'))
2184 return
2196 return
2185
2197
2186 entries = filterclonebundleentries(
2198 entries = filterclonebundleentries(
2187 repo, entries, streamclonerequested=pullop.streamclonerequested)
2199 repo, entries, streamclonerequested=pullop.streamclonerequested)
2188
2200
2189 if not entries:
2201 if not entries:
2190 # There is a thundering herd concern here. However, if a server
2202 # There is a thundering herd concern here. However, if a server
2191 # operator doesn't advertise bundles appropriate for its clients,
2203 # operator doesn't advertise bundles appropriate for its clients,
2192 # they deserve what's coming. Furthermore, from a client's
2204 # they deserve what's coming. Furthermore, from a client's
2193 # perspective, no automatic fallback would mean not being able to
2205 # perspective, no automatic fallback would mean not being able to
2194 # clone!
2206 # clone!
2195 repo.ui.warn(_('no compatible clone bundles available on server; '
2207 repo.ui.warn(_('no compatible clone bundles available on server; '
2196 'falling back to regular clone\n'))
2208 'falling back to regular clone\n'))
2197 repo.ui.warn(_('(you may want to report this to the server '
2209 repo.ui.warn(_('(you may want to report this to the server '
2198 'operator)\n'))
2210 'operator)\n'))
2199 return
2211 return
2200
2212
2201 entries = sortclonebundleentries(repo.ui, entries)
2213 entries = sortclonebundleentries(repo.ui, entries)
2202
2214
2203 url = entries[0]['URL']
2215 url = entries[0]['URL']
2204 repo.ui.status(_('applying clone bundle from %s\n') % url)
2216 repo.ui.status(_('applying clone bundle from %s\n') % url)
2205 if trypullbundlefromurl(repo.ui, repo, url):
2217 if trypullbundlefromurl(repo.ui, repo, url):
2206 repo.ui.status(_('finished applying clone bundle\n'))
2218 repo.ui.status(_('finished applying clone bundle\n'))
2207 # Bundle failed.
2219 # Bundle failed.
2208 #
2220 #
2209 # We abort by default to avoid the thundering herd of
2221 # We abort by default to avoid the thundering herd of
2210 # clients flooding a server that was expecting expensive
2222 # clients flooding a server that was expecting expensive
2211 # clone load to be offloaded.
2223 # clone load to be offloaded.
2212 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2224 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2213 repo.ui.warn(_('falling back to normal clone\n'))
2225 repo.ui.warn(_('falling back to normal clone\n'))
2214 else:
2226 else:
2215 raise error.Abort(_('error applying bundle'),
2227 raise error.Abort(_('error applying bundle'),
2216 hint=_('if this error persists, consider contacting '
2228 hint=_('if this error persists, consider contacting '
2217 'the server operator or disable clone '
2229 'the server operator or disable clone '
2218 'bundles via '
2230 'bundles via '
2219 '"--config ui.clonebundles=false"'))
2231 '"--config ui.clonebundles=false"'))
2220
2232
2221 def parseclonebundlesmanifest(repo, s):
2233 def parseclonebundlesmanifest(repo, s):
2222 """Parses the raw text of a clone bundles manifest.
2234 """Parses the raw text of a clone bundles manifest.
2223
2235
2224 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2236 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2225 to the URL and other keys are the attributes for the entry.
2237 to the URL and other keys are the attributes for the entry.
2226 """
2238 """
2227 m = []
2239 m = []
2228 for line in s.splitlines():
2240 for line in s.splitlines():
2229 fields = line.split()
2241 fields = line.split()
2230 if not fields:
2242 if not fields:
2231 continue
2243 continue
2232 attrs = {'URL': fields[0]}
2244 attrs = {'URL': fields[0]}
2233 for rawattr in fields[1:]:
2245 for rawattr in fields[1:]:
2234 key, value = rawattr.split('=', 1)
2246 key, value = rawattr.split('=', 1)
2235 key = urlreq.unquote(key)
2247 key = urlreq.unquote(key)
2236 value = urlreq.unquote(value)
2248 value = urlreq.unquote(value)
2237 attrs[key] = value
2249 attrs[key] = value
2238
2250
2239 # Parse BUNDLESPEC into components. This makes client-side
2251 # Parse BUNDLESPEC into components. This makes client-side
2240 # preferences easier to specify since you can prefer a single
2252 # preferences easier to specify since you can prefer a single
2241 # component of the BUNDLESPEC.
2253 # component of the BUNDLESPEC.
2242 if key == 'BUNDLESPEC':
2254 if key == 'BUNDLESPEC':
2243 try:
2255 try:
2244 bundlespec = parsebundlespec(repo, value,
2256 bundlespec = parsebundlespec(repo, value,
2245 externalnames=True)
2257 externalnames=True)
2246 attrs['COMPRESSION'] = bundlespec.compression
2258 attrs['COMPRESSION'] = bundlespec.compression
2247 attrs['VERSION'] = bundlespec.version
2259 attrs['VERSION'] = bundlespec.version
2248 except error.InvalidBundleSpecification:
2260 except error.InvalidBundleSpecification:
2249 pass
2261 pass
2250 except error.UnsupportedBundleSpecification:
2262 except error.UnsupportedBundleSpecification:
2251 pass
2263 pass
2252
2264
2253 m.append(attrs)
2265 m.append(attrs)
2254
2266
2255 return m
2267 return m
2256
2268
2257 def isstreamclonespec(bundlespec):
2269 def isstreamclonespec(bundlespec):
2258 # Stream clone v1
2270 # Stream clone v1
2259 if (bundlespec.compression == 'UN' and bundlespec.version == 's1'):
2271 if (bundlespec.compression == 'UN' and bundlespec.version == 's1'):
2260 return True
2272 return True
2261
2273
2262 # Stream clone v2
2274 # Stream clone v2
2263 if (bundlespec.compression == 'UN' and bundlespec.version == '02' and \
2275 if (bundlespec.compression == 'UN' and bundlespec.version == '02' and \
2264 bundlespec.contentopts.get('streamv2')):
2276 bundlespec.contentopts.get('streamv2')):
2265 return True
2277 return True
2266
2278
2267 return False
2279 return False
2268
2280
2269 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2281 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2270 """Remove incompatible clone bundle manifest entries.
2282 """Remove incompatible clone bundle manifest entries.
2271
2283
2272 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2284 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2273 and returns a new list consisting of only the entries that this client
2285 and returns a new list consisting of only the entries that this client
2274 should be able to apply.
2286 should be able to apply.
2275
2287
2276 There is no guarantee we'll be able to apply all returned entries because
2288 There is no guarantee we'll be able to apply all returned entries because
2277 the metadata we use to filter on may be missing or wrong.
2289 the metadata we use to filter on may be missing or wrong.
2278 """
2290 """
2279 newentries = []
2291 newentries = []
2280 for entry in entries:
2292 for entry in entries:
2281 spec = entry.get('BUNDLESPEC')
2293 spec = entry.get('BUNDLESPEC')
2282 if spec:
2294 if spec:
2283 try:
2295 try:
2284 bundlespec = parsebundlespec(repo, spec, strict=True)
2296 bundlespec = parsebundlespec(repo, spec, strict=True)
2285
2297
2286 # If a stream clone was requested, filter out non-streamclone
2298 # If a stream clone was requested, filter out non-streamclone
2287 # entries.
2299 # entries.
2288 if streamclonerequested and not isstreamclonespec(bundlespec):
2300 if streamclonerequested and not isstreamclonespec(bundlespec):
2289 repo.ui.debug('filtering %s because not a stream clone\n' %
2301 repo.ui.debug('filtering %s because not a stream clone\n' %
2290 entry['URL'])
2302 entry['URL'])
2291 continue
2303 continue
2292
2304
2293 except error.InvalidBundleSpecification as e:
2305 except error.InvalidBundleSpecification as e:
2294 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
2306 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
2295 continue
2307 continue
2296 except error.UnsupportedBundleSpecification as e:
2308 except error.UnsupportedBundleSpecification as e:
2297 repo.ui.debug('filtering %s because unsupported bundle '
2309 repo.ui.debug('filtering %s because unsupported bundle '
2298 'spec: %s\n' % (
2310 'spec: %s\n' % (
2299 entry['URL'], stringutil.forcebytestr(e)))
2311 entry['URL'], stringutil.forcebytestr(e)))
2300 continue
2312 continue
2301 # If we don't have a spec and requested a stream clone, we don't know
2313 # If we don't have a spec and requested a stream clone, we don't know
2302 # what the entry is so don't attempt to apply it.
2314 # what the entry is so don't attempt to apply it.
2303 elif streamclonerequested:
2315 elif streamclonerequested:
2304 repo.ui.debug('filtering %s because cannot determine if a stream '
2316 repo.ui.debug('filtering %s because cannot determine if a stream '
2305 'clone bundle\n' % entry['URL'])
2317 'clone bundle\n' % entry['URL'])
2306 continue
2318 continue
2307
2319
2308 if 'REQUIRESNI' in entry and not sslutil.hassni:
2320 if 'REQUIRESNI' in entry and not sslutil.hassni:
2309 repo.ui.debug('filtering %s because SNI not supported\n' %
2321 repo.ui.debug('filtering %s because SNI not supported\n' %
2310 entry['URL'])
2322 entry['URL'])
2311 continue
2323 continue
2312
2324
2313 newentries.append(entry)
2325 newentries.append(entry)
2314
2326
2315 return newentries
2327 return newentries
2316
2328
2317 class clonebundleentry(object):
2329 class clonebundleentry(object):
2318 """Represents an item in a clone bundles manifest.
2330 """Represents an item in a clone bundles manifest.
2319
2331
2320 This rich class is needed to support sorting since sorted() in Python 3
2332 This rich class is needed to support sorting since sorted() in Python 3
2321 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2333 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2322 won't work.
2334 won't work.
2323 """
2335 """
2324
2336
2325 def __init__(self, value, prefers):
2337 def __init__(self, value, prefers):
2326 self.value = value
2338 self.value = value
2327 self.prefers = prefers
2339 self.prefers = prefers
2328
2340
2329 def _cmp(self, other):
2341 def _cmp(self, other):
2330 for prefkey, prefvalue in self.prefers:
2342 for prefkey, prefvalue in self.prefers:
2331 avalue = self.value.get(prefkey)
2343 avalue = self.value.get(prefkey)
2332 bvalue = other.value.get(prefkey)
2344 bvalue = other.value.get(prefkey)
2333
2345
2334 # Special case for b missing attribute and a matches exactly.
2346 # Special case for b missing attribute and a matches exactly.
2335 if avalue is not None and bvalue is None and avalue == prefvalue:
2347 if avalue is not None and bvalue is None and avalue == prefvalue:
2336 return -1
2348 return -1
2337
2349
2338 # Special case for a missing attribute and b matches exactly.
2350 # Special case for a missing attribute and b matches exactly.
2339 if bvalue is not None and avalue is None and bvalue == prefvalue:
2351 if bvalue is not None and avalue is None and bvalue == prefvalue:
2340 return 1
2352 return 1
2341
2353
2342 # We can't compare unless attribute present on both.
2354 # We can't compare unless attribute present on both.
2343 if avalue is None or bvalue is None:
2355 if avalue is None or bvalue is None:
2344 continue
2356 continue
2345
2357
2346 # Same values should fall back to next attribute.
2358 # Same values should fall back to next attribute.
2347 if avalue == bvalue:
2359 if avalue == bvalue:
2348 continue
2360 continue
2349
2361
2350 # Exact matches come first.
2362 # Exact matches come first.
2351 if avalue == prefvalue:
2363 if avalue == prefvalue:
2352 return -1
2364 return -1
2353 if bvalue == prefvalue:
2365 if bvalue == prefvalue:
2354 return 1
2366 return 1
2355
2367
2356 # Fall back to next attribute.
2368 # Fall back to next attribute.
2357 continue
2369 continue
2358
2370
2359 # If we got here we couldn't sort by attributes and prefers. Fall
2371 # If we got here we couldn't sort by attributes and prefers. Fall
2360 # back to index order.
2372 # back to index order.
2361 return 0
2373 return 0
2362
2374
2363 def __lt__(self, other):
2375 def __lt__(self, other):
2364 return self._cmp(other) < 0
2376 return self._cmp(other) < 0
2365
2377
2366 def __gt__(self, other):
2378 def __gt__(self, other):
2367 return self._cmp(other) > 0
2379 return self._cmp(other) > 0
2368
2380
2369 def __eq__(self, other):
2381 def __eq__(self, other):
2370 return self._cmp(other) == 0
2382 return self._cmp(other) == 0
2371
2383
2372 def __le__(self, other):
2384 def __le__(self, other):
2373 return self._cmp(other) <= 0
2385 return self._cmp(other) <= 0
2374
2386
2375 def __ge__(self, other):
2387 def __ge__(self, other):
2376 return self._cmp(other) >= 0
2388 return self._cmp(other) >= 0
2377
2389
2378 def __ne__(self, other):
2390 def __ne__(self, other):
2379 return self._cmp(other) != 0
2391 return self._cmp(other) != 0
2380
2392
2381 def sortclonebundleentries(ui, entries):
2393 def sortclonebundleentries(ui, entries):
2382 prefers = ui.configlist('ui', 'clonebundleprefers')
2394 prefers = ui.configlist('ui', 'clonebundleprefers')
2383 if not prefers:
2395 if not prefers:
2384 return list(entries)
2396 return list(entries)
2385
2397
2386 prefers = [p.split('=', 1) for p in prefers]
2398 prefers = [p.split('=', 1) for p in prefers]
2387
2399
2388 items = sorted(clonebundleentry(v, prefers) for v in entries)
2400 items = sorted(clonebundleentry(v, prefers) for v in entries)
2389 return [i.value for i in items]
2401 return [i.value for i in items]
2390
2402
2391 def trypullbundlefromurl(ui, repo, url):
2403 def trypullbundlefromurl(ui, repo, url):
2392 """Attempt to apply a bundle from a URL."""
2404 """Attempt to apply a bundle from a URL."""
2393 with repo.lock(), repo.transaction('bundleurl') as tr:
2405 with repo.lock(), repo.transaction('bundleurl') as tr:
2394 try:
2406 try:
2395 fh = urlmod.open(ui, url)
2407 fh = urlmod.open(ui, url)
2396 cg = readbundle(ui, fh, 'stream')
2408 cg = readbundle(ui, fh, 'stream')
2397
2409
2398 if isinstance(cg, streamclone.streamcloneapplier):
2410 if isinstance(cg, streamclone.streamcloneapplier):
2399 cg.apply(repo)
2411 cg.apply(repo)
2400 else:
2412 else:
2401 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2413 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2402 return True
2414 return True
2403 except urlerr.httperror as e:
2415 except urlerr.httperror as e:
2404 ui.warn(_('HTTP error fetching bundle: %s\n') %
2416 ui.warn(_('HTTP error fetching bundle: %s\n') %
2405 stringutil.forcebytestr(e))
2417 stringutil.forcebytestr(e))
2406 except urlerr.urlerror as e:
2418 except urlerr.urlerror as e:
2407 ui.warn(_('error fetching bundle: %s\n') %
2419 ui.warn(_('error fetching bundle: %s\n') %
2408 stringutil.forcebytestr(e.reason))
2420 stringutil.forcebytestr(e.reason))
2409
2421
2410 return False
2422 return False
General Comments 0
You need to be logged in to leave comments. Login now