##// END OF EJS Templates
exchange: use command executor for getbundle...
Gregory Szorc -
r37666:8f3c6fb5 default
parent child Browse files
Show More
@@ -1,2404 +1,2409 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 bin,
16 bin,
17 hex,
17 hex,
18 nullid,
18 nullid,
19 )
19 )
20 from .thirdparty import (
20 from .thirdparty import (
21 attr,
21 attr,
22 )
22 )
23 from . import (
23 from . import (
24 bookmarks as bookmod,
24 bookmarks as bookmod,
25 bundle2,
25 bundle2,
26 changegroup,
26 changegroup,
27 discovery,
27 discovery,
28 error,
28 error,
29 lock as lockmod,
29 lock as lockmod,
30 logexchange,
30 logexchange,
31 obsolete,
31 obsolete,
32 phases,
32 phases,
33 pushkey,
33 pushkey,
34 pycompat,
34 pycompat,
35 scmutil,
35 scmutil,
36 sslutil,
36 sslutil,
37 streamclone,
37 streamclone,
38 url as urlmod,
38 url as urlmod,
39 util,
39 util,
40 )
40 )
41 from .utils import (
41 from .utils import (
42 stringutil,
42 stringutil,
43 )
43 )
44
44
45 urlerr = util.urlerr
45 urlerr = util.urlerr
46 urlreq = util.urlreq
46 urlreq = util.urlreq
47
47
48 # Maps bundle version human names to changegroup versions.
48 # Maps bundle version human names to changegroup versions.
49 _bundlespeccgversions = {'v1': '01',
49 _bundlespeccgversions = {'v1': '01',
50 'v2': '02',
50 'v2': '02',
51 'packed1': 's1',
51 'packed1': 's1',
52 'bundle2': '02', #legacy
52 'bundle2': '02', #legacy
53 }
53 }
54
54
55 # Maps bundle version with content opts to choose which part to bundle
55 # Maps bundle version with content opts to choose which part to bundle
56 _bundlespeccontentopts = {
56 _bundlespeccontentopts = {
57 'v1': {
57 'v1': {
58 'changegroup': True,
58 'changegroup': True,
59 'cg.version': '01',
59 'cg.version': '01',
60 'obsolescence': False,
60 'obsolescence': False,
61 'phases': False,
61 'phases': False,
62 'tagsfnodescache': False,
62 'tagsfnodescache': False,
63 'revbranchcache': False
63 'revbranchcache': False
64 },
64 },
65 'v2': {
65 'v2': {
66 'changegroup': True,
66 'changegroup': True,
67 'cg.version': '02',
67 'cg.version': '02',
68 'obsolescence': False,
68 'obsolescence': False,
69 'phases': False,
69 'phases': False,
70 'tagsfnodescache': True,
70 'tagsfnodescache': True,
71 'revbranchcache': True
71 'revbranchcache': True
72 },
72 },
73 'packed1' : {
73 'packed1' : {
74 'cg.version': 's1'
74 'cg.version': 's1'
75 }
75 }
76 }
76 }
77 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
77 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
78
78
79 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
79 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
80 "tagsfnodescache": False,
80 "tagsfnodescache": False,
81 "revbranchcache": False}}
81 "revbranchcache": False}}
82
82
83 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
83 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
84 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
84 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
85
85
86 @attr.s
86 @attr.s
87 class bundlespec(object):
87 class bundlespec(object):
88 compression = attr.ib()
88 compression = attr.ib()
89 version = attr.ib()
89 version = attr.ib()
90 params = attr.ib()
90 params = attr.ib()
91 contentopts = attr.ib()
91 contentopts = attr.ib()
92
92
93 def parsebundlespec(repo, spec, strict=True, externalnames=False):
93 def parsebundlespec(repo, spec, strict=True, externalnames=False):
94 """Parse a bundle string specification into parts.
94 """Parse a bundle string specification into parts.
95
95
96 Bundle specifications denote a well-defined bundle/exchange format.
96 Bundle specifications denote a well-defined bundle/exchange format.
97 The content of a given specification should not change over time in
97 The content of a given specification should not change over time in
98 order to ensure that bundles produced by a newer version of Mercurial are
98 order to ensure that bundles produced by a newer version of Mercurial are
99 readable from an older version.
99 readable from an older version.
100
100
101 The string currently has the form:
101 The string currently has the form:
102
102
103 <compression>-<type>[;<parameter0>[;<parameter1>]]
103 <compression>-<type>[;<parameter0>[;<parameter1>]]
104
104
105 Where <compression> is one of the supported compression formats
105 Where <compression> is one of the supported compression formats
106 and <type> is (currently) a version string. A ";" can follow the type and
106 and <type> is (currently) a version string. A ";" can follow the type and
107 all text afterwards is interpreted as URI encoded, ";" delimited key=value
107 all text afterwards is interpreted as URI encoded, ";" delimited key=value
108 pairs.
108 pairs.
109
109
110 If ``strict`` is True (the default) <compression> is required. Otherwise,
110 If ``strict`` is True (the default) <compression> is required. Otherwise,
111 it is optional.
111 it is optional.
112
112
113 If ``externalnames`` is False (the default), the human-centric names will
113 If ``externalnames`` is False (the default), the human-centric names will
114 be converted to their internal representation.
114 be converted to their internal representation.
115
115
116 Returns a bundlespec object of (compression, version, parameters).
116 Returns a bundlespec object of (compression, version, parameters).
117 Compression will be ``None`` if not in strict mode and a compression isn't
117 Compression will be ``None`` if not in strict mode and a compression isn't
118 defined.
118 defined.
119
119
120 An ``InvalidBundleSpecification`` is raised when the specification is
120 An ``InvalidBundleSpecification`` is raised when the specification is
121 not syntactically well formed.
121 not syntactically well formed.
122
122
123 An ``UnsupportedBundleSpecification`` is raised when the compression or
123 An ``UnsupportedBundleSpecification`` is raised when the compression or
124 bundle type/version is not recognized.
124 bundle type/version is not recognized.
125
125
126 Note: this function will likely eventually return a more complex data
126 Note: this function will likely eventually return a more complex data
127 structure, including bundle2 part information.
127 structure, including bundle2 part information.
128 """
128 """
129 def parseparams(s):
129 def parseparams(s):
130 if ';' not in s:
130 if ';' not in s:
131 return s, {}
131 return s, {}
132
132
133 params = {}
133 params = {}
134 version, paramstr = s.split(';', 1)
134 version, paramstr = s.split(';', 1)
135
135
136 for p in paramstr.split(';'):
136 for p in paramstr.split(';'):
137 if '=' not in p:
137 if '=' not in p:
138 raise error.InvalidBundleSpecification(
138 raise error.InvalidBundleSpecification(
139 _('invalid bundle specification: '
139 _('invalid bundle specification: '
140 'missing "=" in parameter: %s') % p)
140 'missing "=" in parameter: %s') % p)
141
141
142 key, value = p.split('=', 1)
142 key, value = p.split('=', 1)
143 key = urlreq.unquote(key)
143 key = urlreq.unquote(key)
144 value = urlreq.unquote(value)
144 value = urlreq.unquote(value)
145 params[key] = value
145 params[key] = value
146
146
147 return version, params
147 return version, params
148
148
149
149
150 if strict and '-' not in spec:
150 if strict and '-' not in spec:
151 raise error.InvalidBundleSpecification(
151 raise error.InvalidBundleSpecification(
152 _('invalid bundle specification; '
152 _('invalid bundle specification; '
153 'must be prefixed with compression: %s') % spec)
153 'must be prefixed with compression: %s') % spec)
154
154
155 if '-' in spec:
155 if '-' in spec:
156 compression, version = spec.split('-', 1)
156 compression, version = spec.split('-', 1)
157
157
158 if compression not in util.compengines.supportedbundlenames:
158 if compression not in util.compengines.supportedbundlenames:
159 raise error.UnsupportedBundleSpecification(
159 raise error.UnsupportedBundleSpecification(
160 _('%s compression is not supported') % compression)
160 _('%s compression is not supported') % compression)
161
161
162 version, params = parseparams(version)
162 version, params = parseparams(version)
163
163
164 if version not in _bundlespeccgversions:
164 if version not in _bundlespeccgversions:
165 raise error.UnsupportedBundleSpecification(
165 raise error.UnsupportedBundleSpecification(
166 _('%s is not a recognized bundle version') % version)
166 _('%s is not a recognized bundle version') % version)
167 else:
167 else:
168 # Value could be just the compression or just the version, in which
168 # Value could be just the compression or just the version, in which
169 # case some defaults are assumed (but only when not in strict mode).
169 # case some defaults are assumed (but only when not in strict mode).
170 assert not strict
170 assert not strict
171
171
172 spec, params = parseparams(spec)
172 spec, params = parseparams(spec)
173
173
174 if spec in util.compengines.supportedbundlenames:
174 if spec in util.compengines.supportedbundlenames:
175 compression = spec
175 compression = spec
176 version = 'v1'
176 version = 'v1'
177 # Generaldelta repos require v2.
177 # Generaldelta repos require v2.
178 if 'generaldelta' in repo.requirements:
178 if 'generaldelta' in repo.requirements:
179 version = 'v2'
179 version = 'v2'
180 # Modern compression engines require v2.
180 # Modern compression engines require v2.
181 if compression not in _bundlespecv1compengines:
181 if compression not in _bundlespecv1compengines:
182 version = 'v2'
182 version = 'v2'
183 elif spec in _bundlespeccgversions:
183 elif spec in _bundlespeccgversions:
184 if spec == 'packed1':
184 if spec == 'packed1':
185 compression = 'none'
185 compression = 'none'
186 else:
186 else:
187 compression = 'bzip2'
187 compression = 'bzip2'
188 version = spec
188 version = spec
189 else:
189 else:
190 raise error.UnsupportedBundleSpecification(
190 raise error.UnsupportedBundleSpecification(
191 _('%s is not a recognized bundle specification') % spec)
191 _('%s is not a recognized bundle specification') % spec)
192
192
193 # Bundle version 1 only supports a known set of compression engines.
193 # Bundle version 1 only supports a known set of compression engines.
194 if version == 'v1' and compression not in _bundlespecv1compengines:
194 if version == 'v1' and compression not in _bundlespecv1compengines:
195 raise error.UnsupportedBundleSpecification(
195 raise error.UnsupportedBundleSpecification(
196 _('compression engine %s is not supported on v1 bundles') %
196 _('compression engine %s is not supported on v1 bundles') %
197 compression)
197 compression)
198
198
199 # The specification for packed1 can optionally declare the data formats
199 # The specification for packed1 can optionally declare the data formats
200 # required to apply it. If we see this metadata, compare against what the
200 # required to apply it. If we see this metadata, compare against what the
201 # repo supports and error if the bundle isn't compatible.
201 # repo supports and error if the bundle isn't compatible.
202 if version == 'packed1' and 'requirements' in params:
202 if version == 'packed1' and 'requirements' in params:
203 requirements = set(params['requirements'].split(','))
203 requirements = set(params['requirements'].split(','))
204 missingreqs = requirements - repo.supportedformats
204 missingreqs = requirements - repo.supportedformats
205 if missingreqs:
205 if missingreqs:
206 raise error.UnsupportedBundleSpecification(
206 raise error.UnsupportedBundleSpecification(
207 _('missing support for repository features: %s') %
207 _('missing support for repository features: %s') %
208 ', '.join(sorted(missingreqs)))
208 ', '.join(sorted(missingreqs)))
209
209
210 # Compute contentopts based on the version
210 # Compute contentopts based on the version
211 contentopts = _bundlespeccontentopts.get(version, {}).copy()
211 contentopts = _bundlespeccontentopts.get(version, {}).copy()
212
212
213 # Process the variants
213 # Process the variants
214 if "stream" in params and params["stream"] == "v2":
214 if "stream" in params and params["stream"] == "v2":
215 variant = _bundlespecvariants["streamv2"]
215 variant = _bundlespecvariants["streamv2"]
216 contentopts.update(variant)
216 contentopts.update(variant)
217
217
218 if not externalnames:
218 if not externalnames:
219 engine = util.compengines.forbundlename(compression)
219 engine = util.compengines.forbundlename(compression)
220 compression = engine.bundletype()[1]
220 compression = engine.bundletype()[1]
221 version = _bundlespeccgversions[version]
221 version = _bundlespeccgversions[version]
222
222
223 return bundlespec(compression, version, params, contentopts)
223 return bundlespec(compression, version, params, contentopts)
224
224
225 def readbundle(ui, fh, fname, vfs=None):
225 def readbundle(ui, fh, fname, vfs=None):
226 header = changegroup.readexactly(fh, 4)
226 header = changegroup.readexactly(fh, 4)
227
227
228 alg = None
228 alg = None
229 if not fname:
229 if not fname:
230 fname = "stream"
230 fname = "stream"
231 if not header.startswith('HG') and header.startswith('\0'):
231 if not header.startswith('HG') and header.startswith('\0'):
232 fh = changegroup.headerlessfixup(fh, header)
232 fh = changegroup.headerlessfixup(fh, header)
233 header = "HG10"
233 header = "HG10"
234 alg = 'UN'
234 alg = 'UN'
235 elif vfs:
235 elif vfs:
236 fname = vfs.join(fname)
236 fname = vfs.join(fname)
237
237
238 magic, version = header[0:2], header[2:4]
238 magic, version = header[0:2], header[2:4]
239
239
240 if magic != 'HG':
240 if magic != 'HG':
241 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
241 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
242 if version == '10':
242 if version == '10':
243 if alg is None:
243 if alg is None:
244 alg = changegroup.readexactly(fh, 2)
244 alg = changegroup.readexactly(fh, 2)
245 return changegroup.cg1unpacker(fh, alg)
245 return changegroup.cg1unpacker(fh, alg)
246 elif version.startswith('2'):
246 elif version.startswith('2'):
247 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
247 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
248 elif version == 'S1':
248 elif version == 'S1':
249 return streamclone.streamcloneapplier(fh)
249 return streamclone.streamcloneapplier(fh)
250 else:
250 else:
251 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
251 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
252
252
253 def getbundlespec(ui, fh):
253 def getbundlespec(ui, fh):
254 """Infer the bundlespec from a bundle file handle.
254 """Infer the bundlespec from a bundle file handle.
255
255
256 The input file handle is seeked and the original seek position is not
256 The input file handle is seeked and the original seek position is not
257 restored.
257 restored.
258 """
258 """
259 def speccompression(alg):
259 def speccompression(alg):
260 try:
260 try:
261 return util.compengines.forbundletype(alg).bundletype()[0]
261 return util.compengines.forbundletype(alg).bundletype()[0]
262 except KeyError:
262 except KeyError:
263 return None
263 return None
264
264
265 b = readbundle(ui, fh, None)
265 b = readbundle(ui, fh, None)
266 if isinstance(b, changegroup.cg1unpacker):
266 if isinstance(b, changegroup.cg1unpacker):
267 alg = b._type
267 alg = b._type
268 if alg == '_truncatedBZ':
268 if alg == '_truncatedBZ':
269 alg = 'BZ'
269 alg = 'BZ'
270 comp = speccompression(alg)
270 comp = speccompression(alg)
271 if not comp:
271 if not comp:
272 raise error.Abort(_('unknown compression algorithm: %s') % alg)
272 raise error.Abort(_('unknown compression algorithm: %s') % alg)
273 return '%s-v1' % comp
273 return '%s-v1' % comp
274 elif isinstance(b, bundle2.unbundle20):
274 elif isinstance(b, bundle2.unbundle20):
275 if 'Compression' in b.params:
275 if 'Compression' in b.params:
276 comp = speccompression(b.params['Compression'])
276 comp = speccompression(b.params['Compression'])
277 if not comp:
277 if not comp:
278 raise error.Abort(_('unknown compression algorithm: %s') % comp)
278 raise error.Abort(_('unknown compression algorithm: %s') % comp)
279 else:
279 else:
280 comp = 'none'
280 comp = 'none'
281
281
282 version = None
282 version = None
283 for part in b.iterparts():
283 for part in b.iterparts():
284 if part.type == 'changegroup':
284 if part.type == 'changegroup':
285 version = part.params['version']
285 version = part.params['version']
286 if version in ('01', '02'):
286 if version in ('01', '02'):
287 version = 'v2'
287 version = 'v2'
288 else:
288 else:
289 raise error.Abort(_('changegroup version %s does not have '
289 raise error.Abort(_('changegroup version %s does not have '
290 'a known bundlespec') % version,
290 'a known bundlespec') % version,
291 hint=_('try upgrading your Mercurial '
291 hint=_('try upgrading your Mercurial '
292 'client'))
292 'client'))
293 elif part.type == 'stream2' and version is None:
293 elif part.type == 'stream2' and version is None:
294 # A stream2 part requires to be part of a v2 bundle
294 # A stream2 part requires to be part of a v2 bundle
295 version = "v2"
295 version = "v2"
296 requirements = urlreq.unquote(part.params['requirements'])
296 requirements = urlreq.unquote(part.params['requirements'])
297 splitted = requirements.split()
297 splitted = requirements.split()
298 params = bundle2._formatrequirementsparams(splitted)
298 params = bundle2._formatrequirementsparams(splitted)
299 return 'none-v2;stream=v2;%s' % params
299 return 'none-v2;stream=v2;%s' % params
300
300
301 if not version:
301 if not version:
302 raise error.Abort(_('could not identify changegroup version in '
302 raise error.Abort(_('could not identify changegroup version in '
303 'bundle'))
303 'bundle'))
304
304
305 return '%s-%s' % (comp, version)
305 return '%s-%s' % (comp, version)
306 elif isinstance(b, streamclone.streamcloneapplier):
306 elif isinstance(b, streamclone.streamcloneapplier):
307 requirements = streamclone.readbundle1header(fh)[2]
307 requirements = streamclone.readbundle1header(fh)[2]
308 formatted = bundle2._formatrequirementsparams(requirements)
308 formatted = bundle2._formatrequirementsparams(requirements)
309 return 'none-packed1;%s' % formatted
309 return 'none-packed1;%s' % formatted
310 else:
310 else:
311 raise error.Abort(_('unknown bundle type: %s') % b)
311 raise error.Abort(_('unknown bundle type: %s') % b)
312
312
313 def _computeoutgoing(repo, heads, common):
313 def _computeoutgoing(repo, heads, common):
314 """Computes which revs are outgoing given a set of common
314 """Computes which revs are outgoing given a set of common
315 and a set of heads.
315 and a set of heads.
316
316
317 This is a separate function so extensions can have access to
317 This is a separate function so extensions can have access to
318 the logic.
318 the logic.
319
319
320 Returns a discovery.outgoing object.
320 Returns a discovery.outgoing object.
321 """
321 """
322 cl = repo.changelog
322 cl = repo.changelog
323 if common:
323 if common:
324 hasnode = cl.hasnode
324 hasnode = cl.hasnode
325 common = [n for n in common if hasnode(n)]
325 common = [n for n in common if hasnode(n)]
326 else:
326 else:
327 common = [nullid]
327 common = [nullid]
328 if not heads:
328 if not heads:
329 heads = cl.heads()
329 heads = cl.heads()
330 return discovery.outgoing(repo, common, heads)
330 return discovery.outgoing(repo, common, heads)
331
331
332 def _forcebundle1(op):
332 def _forcebundle1(op):
333 """return true if a pull/push must use bundle1
333 """return true if a pull/push must use bundle1
334
334
335 This function is used to allow testing of the older bundle version"""
335 This function is used to allow testing of the older bundle version"""
336 ui = op.repo.ui
336 ui = op.repo.ui
337 # The goal is this config is to allow developer to choose the bundle
337 # The goal is this config is to allow developer to choose the bundle
338 # version used during exchanged. This is especially handy during test.
338 # version used during exchanged. This is especially handy during test.
339 # Value is a list of bundle version to be picked from, highest version
339 # Value is a list of bundle version to be picked from, highest version
340 # should be used.
340 # should be used.
341 #
341 #
342 # developer config: devel.legacy.exchange
342 # developer config: devel.legacy.exchange
343 exchange = ui.configlist('devel', 'legacy.exchange')
343 exchange = ui.configlist('devel', 'legacy.exchange')
344 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
344 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
345 return forcebundle1 or not op.remote.capable('bundle2')
345 return forcebundle1 or not op.remote.capable('bundle2')
346
346
347 class pushoperation(object):
347 class pushoperation(object):
348 """A object that represent a single push operation
348 """A object that represent a single push operation
349
349
350 Its purpose is to carry push related state and very common operations.
350 Its purpose is to carry push related state and very common operations.
351
351
352 A new pushoperation should be created at the beginning of each push and
352 A new pushoperation should be created at the beginning of each push and
353 discarded afterward.
353 discarded afterward.
354 """
354 """
355
355
356 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
356 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
357 bookmarks=(), pushvars=None):
357 bookmarks=(), pushvars=None):
358 # repo we push from
358 # repo we push from
359 self.repo = repo
359 self.repo = repo
360 self.ui = repo.ui
360 self.ui = repo.ui
361 # repo we push to
361 # repo we push to
362 self.remote = remote
362 self.remote = remote
363 # force option provided
363 # force option provided
364 self.force = force
364 self.force = force
365 # revs to be pushed (None is "all")
365 # revs to be pushed (None is "all")
366 self.revs = revs
366 self.revs = revs
367 # bookmark explicitly pushed
367 # bookmark explicitly pushed
368 self.bookmarks = bookmarks
368 self.bookmarks = bookmarks
369 # allow push of new branch
369 # allow push of new branch
370 self.newbranch = newbranch
370 self.newbranch = newbranch
371 # step already performed
371 # step already performed
372 # (used to check what steps have been already performed through bundle2)
372 # (used to check what steps have been already performed through bundle2)
373 self.stepsdone = set()
373 self.stepsdone = set()
374 # Integer version of the changegroup push result
374 # Integer version of the changegroup push result
375 # - None means nothing to push
375 # - None means nothing to push
376 # - 0 means HTTP error
376 # - 0 means HTTP error
377 # - 1 means we pushed and remote head count is unchanged *or*
377 # - 1 means we pushed and remote head count is unchanged *or*
378 # we have outgoing changesets but refused to push
378 # we have outgoing changesets but refused to push
379 # - other values as described by addchangegroup()
379 # - other values as described by addchangegroup()
380 self.cgresult = None
380 self.cgresult = None
381 # Boolean value for the bookmark push
381 # Boolean value for the bookmark push
382 self.bkresult = None
382 self.bkresult = None
383 # discover.outgoing object (contains common and outgoing data)
383 # discover.outgoing object (contains common and outgoing data)
384 self.outgoing = None
384 self.outgoing = None
385 # all remote topological heads before the push
385 # all remote topological heads before the push
386 self.remoteheads = None
386 self.remoteheads = None
387 # Details of the remote branch pre and post push
387 # Details of the remote branch pre and post push
388 #
388 #
389 # mapping: {'branch': ([remoteheads],
389 # mapping: {'branch': ([remoteheads],
390 # [newheads],
390 # [newheads],
391 # [unsyncedheads],
391 # [unsyncedheads],
392 # [discardedheads])}
392 # [discardedheads])}
393 # - branch: the branch name
393 # - branch: the branch name
394 # - remoteheads: the list of remote heads known locally
394 # - remoteheads: the list of remote heads known locally
395 # None if the branch is new
395 # None if the branch is new
396 # - newheads: the new remote heads (known locally) with outgoing pushed
396 # - newheads: the new remote heads (known locally) with outgoing pushed
397 # - unsyncedheads: the list of remote heads unknown locally.
397 # - unsyncedheads: the list of remote heads unknown locally.
398 # - discardedheads: the list of remote heads made obsolete by the push
398 # - discardedheads: the list of remote heads made obsolete by the push
399 self.pushbranchmap = None
399 self.pushbranchmap = None
400 # testable as a boolean indicating if any nodes are missing locally.
400 # testable as a boolean indicating if any nodes are missing locally.
401 self.incoming = None
401 self.incoming = None
402 # summary of the remote phase situation
402 # summary of the remote phase situation
403 self.remotephases = None
403 self.remotephases = None
404 # phases changes that must be pushed along side the changesets
404 # phases changes that must be pushed along side the changesets
405 self.outdatedphases = None
405 self.outdatedphases = None
406 # phases changes that must be pushed if changeset push fails
406 # phases changes that must be pushed if changeset push fails
407 self.fallbackoutdatedphases = None
407 self.fallbackoutdatedphases = None
408 # outgoing obsmarkers
408 # outgoing obsmarkers
409 self.outobsmarkers = set()
409 self.outobsmarkers = set()
410 # outgoing bookmarks
410 # outgoing bookmarks
411 self.outbookmarks = []
411 self.outbookmarks = []
412 # transaction manager
412 # transaction manager
413 self.trmanager = None
413 self.trmanager = None
414 # map { pushkey partid -> callback handling failure}
414 # map { pushkey partid -> callback handling failure}
415 # used to handle exception from mandatory pushkey part failure
415 # used to handle exception from mandatory pushkey part failure
416 self.pkfailcb = {}
416 self.pkfailcb = {}
417 # an iterable of pushvars or None
417 # an iterable of pushvars or None
418 self.pushvars = pushvars
418 self.pushvars = pushvars
419
419
420 @util.propertycache
420 @util.propertycache
421 def futureheads(self):
421 def futureheads(self):
422 """future remote heads if the changeset push succeeds"""
422 """future remote heads if the changeset push succeeds"""
423 return self.outgoing.missingheads
423 return self.outgoing.missingheads
424
424
425 @util.propertycache
425 @util.propertycache
426 def fallbackheads(self):
426 def fallbackheads(self):
427 """future remote heads if the changeset push fails"""
427 """future remote heads if the changeset push fails"""
428 if self.revs is None:
428 if self.revs is None:
429 # not target to push, all common are relevant
429 # not target to push, all common are relevant
430 return self.outgoing.commonheads
430 return self.outgoing.commonheads
431 unfi = self.repo.unfiltered()
431 unfi = self.repo.unfiltered()
432 # I want cheads = heads(::missingheads and ::commonheads)
432 # I want cheads = heads(::missingheads and ::commonheads)
433 # (missingheads is revs with secret changeset filtered out)
433 # (missingheads is revs with secret changeset filtered out)
434 #
434 #
435 # This can be expressed as:
435 # This can be expressed as:
436 # cheads = ( (missingheads and ::commonheads)
436 # cheads = ( (missingheads and ::commonheads)
437 # + (commonheads and ::missingheads))"
437 # + (commonheads and ::missingheads))"
438 # )
438 # )
439 #
439 #
440 # while trying to push we already computed the following:
440 # while trying to push we already computed the following:
441 # common = (::commonheads)
441 # common = (::commonheads)
442 # missing = ((commonheads::missingheads) - commonheads)
442 # missing = ((commonheads::missingheads) - commonheads)
443 #
443 #
444 # We can pick:
444 # We can pick:
445 # * missingheads part of common (::commonheads)
445 # * missingheads part of common (::commonheads)
446 common = self.outgoing.common
446 common = self.outgoing.common
447 nm = self.repo.changelog.nodemap
447 nm = self.repo.changelog.nodemap
448 cheads = [node for node in self.revs if nm[node] in common]
448 cheads = [node for node in self.revs if nm[node] in common]
449 # and
449 # and
450 # * commonheads parents on missing
450 # * commonheads parents on missing
451 revset = unfi.set('%ln and parents(roots(%ln))',
451 revset = unfi.set('%ln and parents(roots(%ln))',
452 self.outgoing.commonheads,
452 self.outgoing.commonheads,
453 self.outgoing.missing)
453 self.outgoing.missing)
454 cheads.extend(c.node() for c in revset)
454 cheads.extend(c.node() for c in revset)
455 return cheads
455 return cheads
456
456
457 @property
457 @property
458 def commonheads(self):
458 def commonheads(self):
459 """set of all common heads after changeset bundle push"""
459 """set of all common heads after changeset bundle push"""
460 if self.cgresult:
460 if self.cgresult:
461 return self.futureheads
461 return self.futureheads
462 else:
462 else:
463 return self.fallbackheads
463 return self.fallbackheads
464
464
465 # mapping of message used when pushing bookmark
465 # mapping of message used when pushing bookmark
466 bookmsgmap = {'update': (_("updating bookmark %s\n"),
466 bookmsgmap = {'update': (_("updating bookmark %s\n"),
467 _('updating bookmark %s failed!\n')),
467 _('updating bookmark %s failed!\n')),
468 'export': (_("exporting bookmark %s\n"),
468 'export': (_("exporting bookmark %s\n"),
469 _('exporting bookmark %s failed!\n')),
469 _('exporting bookmark %s failed!\n')),
470 'delete': (_("deleting remote bookmark %s\n"),
470 'delete': (_("deleting remote bookmark %s\n"),
471 _('deleting remote bookmark %s failed!\n')),
471 _('deleting remote bookmark %s failed!\n')),
472 }
472 }
473
473
474
474
475 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
475 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
476 opargs=None):
476 opargs=None):
477 '''Push outgoing changesets (limited by revs) from a local
477 '''Push outgoing changesets (limited by revs) from a local
478 repository to remote. Return an integer:
478 repository to remote. Return an integer:
479 - None means nothing to push
479 - None means nothing to push
480 - 0 means HTTP error
480 - 0 means HTTP error
481 - 1 means we pushed and remote head count is unchanged *or*
481 - 1 means we pushed and remote head count is unchanged *or*
482 we have outgoing changesets but refused to push
482 we have outgoing changesets but refused to push
483 - other values as described by addchangegroup()
483 - other values as described by addchangegroup()
484 '''
484 '''
485 if opargs is None:
485 if opargs is None:
486 opargs = {}
486 opargs = {}
487 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
487 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
488 **pycompat.strkwargs(opargs))
488 **pycompat.strkwargs(opargs))
489 if pushop.remote.local():
489 if pushop.remote.local():
490 missing = (set(pushop.repo.requirements)
490 missing = (set(pushop.repo.requirements)
491 - pushop.remote.local().supported)
491 - pushop.remote.local().supported)
492 if missing:
492 if missing:
493 msg = _("required features are not"
493 msg = _("required features are not"
494 " supported in the destination:"
494 " supported in the destination:"
495 " %s") % (', '.join(sorted(missing)))
495 " %s") % (', '.join(sorted(missing)))
496 raise error.Abort(msg)
496 raise error.Abort(msg)
497
497
498 if not pushop.remote.canpush():
498 if not pushop.remote.canpush():
499 raise error.Abort(_("destination does not support push"))
499 raise error.Abort(_("destination does not support push"))
500
500
501 if not pushop.remote.capable('unbundle'):
501 if not pushop.remote.capable('unbundle'):
502 raise error.Abort(_('cannot push: destination does not support the '
502 raise error.Abort(_('cannot push: destination does not support the '
503 'unbundle wire protocol command'))
503 'unbundle wire protocol command'))
504
504
505 # get lock as we might write phase data
505 # get lock as we might write phase data
506 wlock = lock = None
506 wlock = lock = None
507 try:
507 try:
508 # bundle2 push may receive a reply bundle touching bookmarks or other
508 # bundle2 push may receive a reply bundle touching bookmarks or other
509 # things requiring the wlock. Take it now to ensure proper ordering.
509 # things requiring the wlock. Take it now to ensure proper ordering.
510 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
510 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
511 if (not _forcebundle1(pushop)) and maypushback:
511 if (not _forcebundle1(pushop)) and maypushback:
512 wlock = pushop.repo.wlock()
512 wlock = pushop.repo.wlock()
513 lock = pushop.repo.lock()
513 lock = pushop.repo.lock()
514 pushop.trmanager = transactionmanager(pushop.repo,
514 pushop.trmanager = transactionmanager(pushop.repo,
515 'push-response',
515 'push-response',
516 pushop.remote.url())
516 pushop.remote.url())
517 except IOError as err:
517 except IOError as err:
518 if err.errno != errno.EACCES:
518 if err.errno != errno.EACCES:
519 raise
519 raise
520 # source repo cannot be locked.
520 # source repo cannot be locked.
521 # We do not abort the push, but just disable the local phase
521 # We do not abort the push, but just disable the local phase
522 # synchronisation.
522 # synchronisation.
523 msg = 'cannot lock source repository: %s\n' % err
523 msg = 'cannot lock source repository: %s\n' % err
524 pushop.ui.debug(msg)
524 pushop.ui.debug(msg)
525
525
526 with wlock or util.nullcontextmanager(), \
526 with wlock or util.nullcontextmanager(), \
527 lock or util.nullcontextmanager(), \
527 lock or util.nullcontextmanager(), \
528 pushop.trmanager or util.nullcontextmanager():
528 pushop.trmanager or util.nullcontextmanager():
529 pushop.repo.checkpush(pushop)
529 pushop.repo.checkpush(pushop)
530 _pushdiscovery(pushop)
530 _pushdiscovery(pushop)
531 if not _forcebundle1(pushop):
531 if not _forcebundle1(pushop):
532 _pushbundle2(pushop)
532 _pushbundle2(pushop)
533 _pushchangeset(pushop)
533 _pushchangeset(pushop)
534 _pushsyncphase(pushop)
534 _pushsyncphase(pushop)
535 _pushobsolete(pushop)
535 _pushobsolete(pushop)
536 _pushbookmark(pushop)
536 _pushbookmark(pushop)
537
537
538 return pushop
538 return pushop
539
539
540 # list of steps to perform discovery before push
540 # list of steps to perform discovery before push
541 pushdiscoveryorder = []
541 pushdiscoveryorder = []
542
542
543 # Mapping between step name and function
543 # Mapping between step name and function
544 #
544 #
545 # This exists to help extensions wrap steps if necessary
545 # This exists to help extensions wrap steps if necessary
546 pushdiscoverymapping = {}
546 pushdiscoverymapping = {}
547
547
548 def pushdiscovery(stepname):
548 def pushdiscovery(stepname):
549 """decorator for function performing discovery before push
549 """decorator for function performing discovery before push
550
550
551 The function is added to the step -> function mapping and appended to the
551 The function is added to the step -> function mapping and appended to the
552 list of steps. Beware that decorated function will be added in order (this
552 list of steps. Beware that decorated function will be added in order (this
553 may matter).
553 may matter).
554
554
555 You can only use this decorator for a new step, if you want to wrap a step
555 You can only use this decorator for a new step, if you want to wrap a step
556 from an extension, change the pushdiscovery dictionary directly."""
556 from an extension, change the pushdiscovery dictionary directly."""
557 def dec(func):
557 def dec(func):
558 assert stepname not in pushdiscoverymapping
558 assert stepname not in pushdiscoverymapping
559 pushdiscoverymapping[stepname] = func
559 pushdiscoverymapping[stepname] = func
560 pushdiscoveryorder.append(stepname)
560 pushdiscoveryorder.append(stepname)
561 return func
561 return func
562 return dec
562 return dec
563
563
564 def _pushdiscovery(pushop):
564 def _pushdiscovery(pushop):
565 """Run all discovery steps"""
565 """Run all discovery steps"""
566 for stepname in pushdiscoveryorder:
566 for stepname in pushdiscoveryorder:
567 step = pushdiscoverymapping[stepname]
567 step = pushdiscoverymapping[stepname]
568 step(pushop)
568 step(pushop)
569
569
570 @pushdiscovery('changeset')
570 @pushdiscovery('changeset')
571 def _pushdiscoverychangeset(pushop):
571 def _pushdiscoverychangeset(pushop):
572 """discover the changeset that need to be pushed"""
572 """discover the changeset that need to be pushed"""
573 fci = discovery.findcommonincoming
573 fci = discovery.findcommonincoming
574 if pushop.revs:
574 if pushop.revs:
575 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
575 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
576 ancestorsof=pushop.revs)
576 ancestorsof=pushop.revs)
577 else:
577 else:
578 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
578 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
579 common, inc, remoteheads = commoninc
579 common, inc, remoteheads = commoninc
580 fco = discovery.findcommonoutgoing
580 fco = discovery.findcommonoutgoing
581 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
581 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
582 commoninc=commoninc, force=pushop.force)
582 commoninc=commoninc, force=pushop.force)
583 pushop.outgoing = outgoing
583 pushop.outgoing = outgoing
584 pushop.remoteheads = remoteheads
584 pushop.remoteheads = remoteheads
585 pushop.incoming = inc
585 pushop.incoming = inc
586
586
587 @pushdiscovery('phase')
587 @pushdiscovery('phase')
588 def _pushdiscoveryphase(pushop):
588 def _pushdiscoveryphase(pushop):
589 """discover the phase that needs to be pushed
589 """discover the phase that needs to be pushed
590
590
591 (computed for both success and failure case for changesets push)"""
591 (computed for both success and failure case for changesets push)"""
592 outgoing = pushop.outgoing
592 outgoing = pushop.outgoing
593 unfi = pushop.repo.unfiltered()
593 unfi = pushop.repo.unfiltered()
594 remotephases = pushop.remote.listkeys('phases')
594 remotephases = pushop.remote.listkeys('phases')
595 if (pushop.ui.configbool('ui', '_usedassubrepo')
595 if (pushop.ui.configbool('ui', '_usedassubrepo')
596 and remotephases # server supports phases
596 and remotephases # server supports phases
597 and not pushop.outgoing.missing # no changesets to be pushed
597 and not pushop.outgoing.missing # no changesets to be pushed
598 and remotephases.get('publishing', False)):
598 and remotephases.get('publishing', False)):
599 # When:
599 # When:
600 # - this is a subrepo push
600 # - this is a subrepo push
601 # - and remote support phase
601 # - and remote support phase
602 # - and no changeset are to be pushed
602 # - and no changeset are to be pushed
603 # - and remote is publishing
603 # - and remote is publishing
604 # We may be in issue 3781 case!
604 # We may be in issue 3781 case!
605 # We drop the possible phase synchronisation done by
605 # We drop the possible phase synchronisation done by
606 # courtesy to publish changesets possibly locally draft
606 # courtesy to publish changesets possibly locally draft
607 # on the remote.
607 # on the remote.
608 pushop.outdatedphases = []
608 pushop.outdatedphases = []
609 pushop.fallbackoutdatedphases = []
609 pushop.fallbackoutdatedphases = []
610 return
610 return
611
611
612 pushop.remotephases = phases.remotephasessummary(pushop.repo,
612 pushop.remotephases = phases.remotephasessummary(pushop.repo,
613 pushop.fallbackheads,
613 pushop.fallbackheads,
614 remotephases)
614 remotephases)
615 droots = pushop.remotephases.draftroots
615 droots = pushop.remotephases.draftroots
616
616
617 extracond = ''
617 extracond = ''
618 if not pushop.remotephases.publishing:
618 if not pushop.remotephases.publishing:
619 extracond = ' and public()'
619 extracond = ' and public()'
620 revset = 'heads((%%ln::%%ln) %s)' % extracond
620 revset = 'heads((%%ln::%%ln) %s)' % extracond
621 # Get the list of all revs draft on remote by public here.
621 # Get the list of all revs draft on remote by public here.
622 # XXX Beware that revset break if droots is not strictly
622 # XXX Beware that revset break if droots is not strictly
623 # XXX root we may want to ensure it is but it is costly
623 # XXX root we may want to ensure it is but it is costly
624 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
624 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
625 if not outgoing.missing:
625 if not outgoing.missing:
626 future = fallback
626 future = fallback
627 else:
627 else:
628 # adds changeset we are going to push as draft
628 # adds changeset we are going to push as draft
629 #
629 #
630 # should not be necessary for publishing server, but because of an
630 # should not be necessary for publishing server, but because of an
631 # issue fixed in xxxxx we have to do it anyway.
631 # issue fixed in xxxxx we have to do it anyway.
632 fdroots = list(unfi.set('roots(%ln + %ln::)',
632 fdroots = list(unfi.set('roots(%ln + %ln::)',
633 outgoing.missing, droots))
633 outgoing.missing, droots))
634 fdroots = [f.node() for f in fdroots]
634 fdroots = [f.node() for f in fdroots]
635 future = list(unfi.set(revset, fdroots, pushop.futureheads))
635 future = list(unfi.set(revset, fdroots, pushop.futureheads))
636 pushop.outdatedphases = future
636 pushop.outdatedphases = future
637 pushop.fallbackoutdatedphases = fallback
637 pushop.fallbackoutdatedphases = fallback
638
638
639 @pushdiscovery('obsmarker')
639 @pushdiscovery('obsmarker')
640 def _pushdiscoveryobsmarkers(pushop):
640 def _pushdiscoveryobsmarkers(pushop):
641 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
641 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
642 and pushop.repo.obsstore
642 and pushop.repo.obsstore
643 and 'obsolete' in pushop.remote.listkeys('namespaces')):
643 and 'obsolete' in pushop.remote.listkeys('namespaces')):
644 repo = pushop.repo
644 repo = pushop.repo
645 # very naive computation, that can be quite expensive on big repo.
645 # very naive computation, that can be quite expensive on big repo.
646 # However: evolution is currently slow on them anyway.
646 # However: evolution is currently slow on them anyway.
647 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
647 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
648 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
648 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
649
649
650 @pushdiscovery('bookmarks')
650 @pushdiscovery('bookmarks')
651 def _pushdiscoverybookmarks(pushop):
651 def _pushdiscoverybookmarks(pushop):
652 ui = pushop.ui
652 ui = pushop.ui
653 repo = pushop.repo.unfiltered()
653 repo = pushop.repo.unfiltered()
654 remote = pushop.remote
654 remote = pushop.remote
655 ui.debug("checking for updated bookmarks\n")
655 ui.debug("checking for updated bookmarks\n")
656 ancestors = ()
656 ancestors = ()
657 if pushop.revs:
657 if pushop.revs:
658 revnums = map(repo.changelog.rev, pushop.revs)
658 revnums = map(repo.changelog.rev, pushop.revs)
659 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
659 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
660 remotebookmark = remote.listkeys('bookmarks')
660 remotebookmark = remote.listkeys('bookmarks')
661
661
662 explicit = set([repo._bookmarks.expandname(bookmark)
662 explicit = set([repo._bookmarks.expandname(bookmark)
663 for bookmark in pushop.bookmarks])
663 for bookmark in pushop.bookmarks])
664
664
665 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
665 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
666 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
666 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
667
667
668 def safehex(x):
668 def safehex(x):
669 if x is None:
669 if x is None:
670 return x
670 return x
671 return hex(x)
671 return hex(x)
672
672
673 def hexifycompbookmarks(bookmarks):
673 def hexifycompbookmarks(bookmarks):
674 return [(b, safehex(scid), safehex(dcid))
674 return [(b, safehex(scid), safehex(dcid))
675 for (b, scid, dcid) in bookmarks]
675 for (b, scid, dcid) in bookmarks]
676
676
677 comp = [hexifycompbookmarks(marks) for marks in comp]
677 comp = [hexifycompbookmarks(marks) for marks in comp]
678 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
678 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
679
679
680 def _processcompared(pushop, pushed, explicit, remotebms, comp):
680 def _processcompared(pushop, pushed, explicit, remotebms, comp):
681 """take decision on bookmark to pull from the remote bookmark
681 """take decision on bookmark to pull from the remote bookmark
682
682
683 Exist to help extensions who want to alter this behavior.
683 Exist to help extensions who want to alter this behavior.
684 """
684 """
685 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
685 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
686
686
687 repo = pushop.repo
687 repo = pushop.repo
688
688
689 for b, scid, dcid in advsrc:
689 for b, scid, dcid in advsrc:
690 if b in explicit:
690 if b in explicit:
691 explicit.remove(b)
691 explicit.remove(b)
692 if not pushed or repo[scid].rev() in pushed:
692 if not pushed or repo[scid].rev() in pushed:
693 pushop.outbookmarks.append((b, dcid, scid))
693 pushop.outbookmarks.append((b, dcid, scid))
694 # search added bookmark
694 # search added bookmark
695 for b, scid, dcid in addsrc:
695 for b, scid, dcid in addsrc:
696 if b in explicit:
696 if b in explicit:
697 explicit.remove(b)
697 explicit.remove(b)
698 pushop.outbookmarks.append((b, '', scid))
698 pushop.outbookmarks.append((b, '', scid))
699 # search for overwritten bookmark
699 # search for overwritten bookmark
700 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
700 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
701 if b in explicit:
701 if b in explicit:
702 explicit.remove(b)
702 explicit.remove(b)
703 pushop.outbookmarks.append((b, dcid, scid))
703 pushop.outbookmarks.append((b, dcid, scid))
704 # search for bookmark to delete
704 # search for bookmark to delete
705 for b, scid, dcid in adddst:
705 for b, scid, dcid in adddst:
706 if b in explicit:
706 if b in explicit:
707 explicit.remove(b)
707 explicit.remove(b)
708 # treat as "deleted locally"
708 # treat as "deleted locally"
709 pushop.outbookmarks.append((b, dcid, ''))
709 pushop.outbookmarks.append((b, dcid, ''))
710 # identical bookmarks shouldn't get reported
710 # identical bookmarks shouldn't get reported
711 for b, scid, dcid in same:
711 for b, scid, dcid in same:
712 if b in explicit:
712 if b in explicit:
713 explicit.remove(b)
713 explicit.remove(b)
714
714
715 if explicit:
715 if explicit:
716 explicit = sorted(explicit)
716 explicit = sorted(explicit)
717 # we should probably list all of them
717 # we should probably list all of them
718 pushop.ui.warn(_('bookmark %s does not exist on the local '
718 pushop.ui.warn(_('bookmark %s does not exist on the local '
719 'or remote repository!\n') % explicit[0])
719 'or remote repository!\n') % explicit[0])
720 pushop.bkresult = 2
720 pushop.bkresult = 2
721
721
722 pushop.outbookmarks.sort()
722 pushop.outbookmarks.sort()
723
723
724 def _pushcheckoutgoing(pushop):
724 def _pushcheckoutgoing(pushop):
725 outgoing = pushop.outgoing
725 outgoing = pushop.outgoing
726 unfi = pushop.repo.unfiltered()
726 unfi = pushop.repo.unfiltered()
727 if not outgoing.missing:
727 if not outgoing.missing:
728 # nothing to push
728 # nothing to push
729 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
729 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
730 return False
730 return False
731 # something to push
731 # something to push
732 if not pushop.force:
732 if not pushop.force:
733 # if repo.obsstore == False --> no obsolete
733 # if repo.obsstore == False --> no obsolete
734 # then, save the iteration
734 # then, save the iteration
735 if unfi.obsstore:
735 if unfi.obsstore:
736 # this message are here for 80 char limit reason
736 # this message are here for 80 char limit reason
737 mso = _("push includes obsolete changeset: %s!")
737 mso = _("push includes obsolete changeset: %s!")
738 mspd = _("push includes phase-divergent changeset: %s!")
738 mspd = _("push includes phase-divergent changeset: %s!")
739 mscd = _("push includes content-divergent changeset: %s!")
739 mscd = _("push includes content-divergent changeset: %s!")
740 mst = {"orphan": _("push includes orphan changeset: %s!"),
740 mst = {"orphan": _("push includes orphan changeset: %s!"),
741 "phase-divergent": mspd,
741 "phase-divergent": mspd,
742 "content-divergent": mscd}
742 "content-divergent": mscd}
743 # If we are to push if there is at least one
743 # If we are to push if there is at least one
744 # obsolete or unstable changeset in missing, at
744 # obsolete or unstable changeset in missing, at
745 # least one of the missinghead will be obsolete or
745 # least one of the missinghead will be obsolete or
746 # unstable. So checking heads only is ok
746 # unstable. So checking heads only is ok
747 for node in outgoing.missingheads:
747 for node in outgoing.missingheads:
748 ctx = unfi[node]
748 ctx = unfi[node]
749 if ctx.obsolete():
749 if ctx.obsolete():
750 raise error.Abort(mso % ctx)
750 raise error.Abort(mso % ctx)
751 elif ctx.isunstable():
751 elif ctx.isunstable():
752 # TODO print more than one instability in the abort
752 # TODO print more than one instability in the abort
753 # message
753 # message
754 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
754 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
755
755
756 discovery.checkheads(pushop)
756 discovery.checkheads(pushop)
757 return True
757 return True
758
758
759 # List of names of steps to perform for an outgoing bundle2, order matters.
759 # List of names of steps to perform for an outgoing bundle2, order matters.
760 b2partsgenorder = []
760 b2partsgenorder = []
761
761
762 # Mapping between step name and function
762 # Mapping between step name and function
763 #
763 #
764 # This exists to help extensions wrap steps if necessary
764 # This exists to help extensions wrap steps if necessary
765 b2partsgenmapping = {}
765 b2partsgenmapping = {}
766
766
767 def b2partsgenerator(stepname, idx=None):
767 def b2partsgenerator(stepname, idx=None):
768 """decorator for function generating bundle2 part
768 """decorator for function generating bundle2 part
769
769
770 The function is added to the step -> function mapping and appended to the
770 The function is added to the step -> function mapping and appended to the
771 list of steps. Beware that decorated functions will be added in order
771 list of steps. Beware that decorated functions will be added in order
772 (this may matter).
772 (this may matter).
773
773
774 You can only use this decorator for new steps, if you want to wrap a step
774 You can only use this decorator for new steps, if you want to wrap a step
775 from an extension, attack the b2partsgenmapping dictionary directly."""
775 from an extension, attack the b2partsgenmapping dictionary directly."""
776 def dec(func):
776 def dec(func):
777 assert stepname not in b2partsgenmapping
777 assert stepname not in b2partsgenmapping
778 b2partsgenmapping[stepname] = func
778 b2partsgenmapping[stepname] = func
779 if idx is None:
779 if idx is None:
780 b2partsgenorder.append(stepname)
780 b2partsgenorder.append(stepname)
781 else:
781 else:
782 b2partsgenorder.insert(idx, stepname)
782 b2partsgenorder.insert(idx, stepname)
783 return func
783 return func
784 return dec
784 return dec
785
785
786 def _pushb2ctxcheckheads(pushop, bundler):
786 def _pushb2ctxcheckheads(pushop, bundler):
787 """Generate race condition checking parts
787 """Generate race condition checking parts
788
788
789 Exists as an independent function to aid extensions
789 Exists as an independent function to aid extensions
790 """
790 """
791 # * 'force' do not check for push race,
791 # * 'force' do not check for push race,
792 # * if we don't push anything, there are nothing to check.
792 # * if we don't push anything, there are nothing to check.
793 if not pushop.force and pushop.outgoing.missingheads:
793 if not pushop.force and pushop.outgoing.missingheads:
794 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
794 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
795 emptyremote = pushop.pushbranchmap is None
795 emptyremote = pushop.pushbranchmap is None
796 if not allowunrelated or emptyremote:
796 if not allowunrelated or emptyremote:
797 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
797 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
798 else:
798 else:
799 affected = set()
799 affected = set()
800 for branch, heads in pushop.pushbranchmap.iteritems():
800 for branch, heads in pushop.pushbranchmap.iteritems():
801 remoteheads, newheads, unsyncedheads, discardedheads = heads
801 remoteheads, newheads, unsyncedheads, discardedheads = heads
802 if remoteheads is not None:
802 if remoteheads is not None:
803 remote = set(remoteheads)
803 remote = set(remoteheads)
804 affected |= set(discardedheads) & remote
804 affected |= set(discardedheads) & remote
805 affected |= remote - set(newheads)
805 affected |= remote - set(newheads)
806 if affected:
806 if affected:
807 data = iter(sorted(affected))
807 data = iter(sorted(affected))
808 bundler.newpart('check:updated-heads', data=data)
808 bundler.newpart('check:updated-heads', data=data)
809
809
810 def _pushing(pushop):
810 def _pushing(pushop):
811 """return True if we are pushing anything"""
811 """return True if we are pushing anything"""
812 return bool(pushop.outgoing.missing
812 return bool(pushop.outgoing.missing
813 or pushop.outdatedphases
813 or pushop.outdatedphases
814 or pushop.outobsmarkers
814 or pushop.outobsmarkers
815 or pushop.outbookmarks)
815 or pushop.outbookmarks)
816
816
817 @b2partsgenerator('check-bookmarks')
817 @b2partsgenerator('check-bookmarks')
818 def _pushb2checkbookmarks(pushop, bundler):
818 def _pushb2checkbookmarks(pushop, bundler):
819 """insert bookmark move checking"""
819 """insert bookmark move checking"""
820 if not _pushing(pushop) or pushop.force:
820 if not _pushing(pushop) or pushop.force:
821 return
821 return
822 b2caps = bundle2.bundle2caps(pushop.remote)
822 b2caps = bundle2.bundle2caps(pushop.remote)
823 hasbookmarkcheck = 'bookmarks' in b2caps
823 hasbookmarkcheck = 'bookmarks' in b2caps
824 if not (pushop.outbookmarks and hasbookmarkcheck):
824 if not (pushop.outbookmarks and hasbookmarkcheck):
825 return
825 return
826 data = []
826 data = []
827 for book, old, new in pushop.outbookmarks:
827 for book, old, new in pushop.outbookmarks:
828 old = bin(old)
828 old = bin(old)
829 data.append((book, old))
829 data.append((book, old))
830 checkdata = bookmod.binaryencode(data)
830 checkdata = bookmod.binaryencode(data)
831 bundler.newpart('check:bookmarks', data=checkdata)
831 bundler.newpart('check:bookmarks', data=checkdata)
832
832
833 @b2partsgenerator('check-phases')
833 @b2partsgenerator('check-phases')
834 def _pushb2checkphases(pushop, bundler):
834 def _pushb2checkphases(pushop, bundler):
835 """insert phase move checking"""
835 """insert phase move checking"""
836 if not _pushing(pushop) or pushop.force:
836 if not _pushing(pushop) or pushop.force:
837 return
837 return
838 b2caps = bundle2.bundle2caps(pushop.remote)
838 b2caps = bundle2.bundle2caps(pushop.remote)
839 hasphaseheads = 'heads' in b2caps.get('phases', ())
839 hasphaseheads = 'heads' in b2caps.get('phases', ())
840 if pushop.remotephases is not None and hasphaseheads:
840 if pushop.remotephases is not None and hasphaseheads:
841 # check that the remote phase has not changed
841 # check that the remote phase has not changed
842 checks = [[] for p in phases.allphases]
842 checks = [[] for p in phases.allphases]
843 checks[phases.public].extend(pushop.remotephases.publicheads)
843 checks[phases.public].extend(pushop.remotephases.publicheads)
844 checks[phases.draft].extend(pushop.remotephases.draftroots)
844 checks[phases.draft].extend(pushop.remotephases.draftroots)
845 if any(checks):
845 if any(checks):
846 for nodes in checks:
846 for nodes in checks:
847 nodes.sort()
847 nodes.sort()
848 checkdata = phases.binaryencode(checks)
848 checkdata = phases.binaryencode(checks)
849 bundler.newpart('check:phases', data=checkdata)
849 bundler.newpart('check:phases', data=checkdata)
850
850
851 @b2partsgenerator('changeset')
851 @b2partsgenerator('changeset')
852 def _pushb2ctx(pushop, bundler):
852 def _pushb2ctx(pushop, bundler):
853 """handle changegroup push through bundle2
853 """handle changegroup push through bundle2
854
854
855 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
855 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
856 """
856 """
857 if 'changesets' in pushop.stepsdone:
857 if 'changesets' in pushop.stepsdone:
858 return
858 return
859 pushop.stepsdone.add('changesets')
859 pushop.stepsdone.add('changesets')
860 # Send known heads to the server for race detection.
860 # Send known heads to the server for race detection.
861 if not _pushcheckoutgoing(pushop):
861 if not _pushcheckoutgoing(pushop):
862 return
862 return
863 pushop.repo.prepushoutgoinghooks(pushop)
863 pushop.repo.prepushoutgoinghooks(pushop)
864
864
865 _pushb2ctxcheckheads(pushop, bundler)
865 _pushb2ctxcheckheads(pushop, bundler)
866
866
867 b2caps = bundle2.bundle2caps(pushop.remote)
867 b2caps = bundle2.bundle2caps(pushop.remote)
868 version = '01'
868 version = '01'
869 cgversions = b2caps.get('changegroup')
869 cgversions = b2caps.get('changegroup')
870 if cgversions: # 3.1 and 3.2 ship with an empty value
870 if cgversions: # 3.1 and 3.2 ship with an empty value
871 cgversions = [v for v in cgversions
871 cgversions = [v for v in cgversions
872 if v in changegroup.supportedoutgoingversions(
872 if v in changegroup.supportedoutgoingversions(
873 pushop.repo)]
873 pushop.repo)]
874 if not cgversions:
874 if not cgversions:
875 raise ValueError(_('no common changegroup version'))
875 raise ValueError(_('no common changegroup version'))
876 version = max(cgversions)
876 version = max(cgversions)
877 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
877 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
878 'push')
878 'push')
879 cgpart = bundler.newpart('changegroup', data=cgstream)
879 cgpart = bundler.newpart('changegroup', data=cgstream)
880 if cgversions:
880 if cgversions:
881 cgpart.addparam('version', version)
881 cgpart.addparam('version', version)
882 if 'treemanifest' in pushop.repo.requirements:
882 if 'treemanifest' in pushop.repo.requirements:
883 cgpart.addparam('treemanifest', '1')
883 cgpart.addparam('treemanifest', '1')
884 def handlereply(op):
884 def handlereply(op):
885 """extract addchangegroup returns from server reply"""
885 """extract addchangegroup returns from server reply"""
886 cgreplies = op.records.getreplies(cgpart.id)
886 cgreplies = op.records.getreplies(cgpart.id)
887 assert len(cgreplies['changegroup']) == 1
887 assert len(cgreplies['changegroup']) == 1
888 pushop.cgresult = cgreplies['changegroup'][0]['return']
888 pushop.cgresult = cgreplies['changegroup'][0]['return']
889 return handlereply
889 return handlereply
890
890
891 @b2partsgenerator('phase')
891 @b2partsgenerator('phase')
892 def _pushb2phases(pushop, bundler):
892 def _pushb2phases(pushop, bundler):
893 """handle phase push through bundle2"""
893 """handle phase push through bundle2"""
894 if 'phases' in pushop.stepsdone:
894 if 'phases' in pushop.stepsdone:
895 return
895 return
896 b2caps = bundle2.bundle2caps(pushop.remote)
896 b2caps = bundle2.bundle2caps(pushop.remote)
897 ui = pushop.repo.ui
897 ui = pushop.repo.ui
898
898
899 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
899 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
900 haspushkey = 'pushkey' in b2caps
900 haspushkey = 'pushkey' in b2caps
901 hasphaseheads = 'heads' in b2caps.get('phases', ())
901 hasphaseheads = 'heads' in b2caps.get('phases', ())
902
902
903 if hasphaseheads and not legacyphase:
903 if hasphaseheads and not legacyphase:
904 return _pushb2phaseheads(pushop, bundler)
904 return _pushb2phaseheads(pushop, bundler)
905 elif haspushkey:
905 elif haspushkey:
906 return _pushb2phasespushkey(pushop, bundler)
906 return _pushb2phasespushkey(pushop, bundler)
907
907
908 def _pushb2phaseheads(pushop, bundler):
908 def _pushb2phaseheads(pushop, bundler):
909 """push phase information through a bundle2 - binary part"""
909 """push phase information through a bundle2 - binary part"""
910 pushop.stepsdone.add('phases')
910 pushop.stepsdone.add('phases')
911 if pushop.outdatedphases:
911 if pushop.outdatedphases:
912 updates = [[] for p in phases.allphases]
912 updates = [[] for p in phases.allphases]
913 updates[0].extend(h.node() for h in pushop.outdatedphases)
913 updates[0].extend(h.node() for h in pushop.outdatedphases)
914 phasedata = phases.binaryencode(updates)
914 phasedata = phases.binaryencode(updates)
915 bundler.newpart('phase-heads', data=phasedata)
915 bundler.newpart('phase-heads', data=phasedata)
916
916
917 def _pushb2phasespushkey(pushop, bundler):
917 def _pushb2phasespushkey(pushop, bundler):
918 """push phase information through a bundle2 - pushkey part"""
918 """push phase information through a bundle2 - pushkey part"""
919 pushop.stepsdone.add('phases')
919 pushop.stepsdone.add('phases')
920 part2node = []
920 part2node = []
921
921
922 def handlefailure(pushop, exc):
922 def handlefailure(pushop, exc):
923 targetid = int(exc.partid)
923 targetid = int(exc.partid)
924 for partid, node in part2node:
924 for partid, node in part2node:
925 if partid == targetid:
925 if partid == targetid:
926 raise error.Abort(_('updating %s to public failed') % node)
926 raise error.Abort(_('updating %s to public failed') % node)
927
927
928 enc = pushkey.encode
928 enc = pushkey.encode
929 for newremotehead in pushop.outdatedphases:
929 for newremotehead in pushop.outdatedphases:
930 part = bundler.newpart('pushkey')
930 part = bundler.newpart('pushkey')
931 part.addparam('namespace', enc('phases'))
931 part.addparam('namespace', enc('phases'))
932 part.addparam('key', enc(newremotehead.hex()))
932 part.addparam('key', enc(newremotehead.hex()))
933 part.addparam('old', enc('%d' % phases.draft))
933 part.addparam('old', enc('%d' % phases.draft))
934 part.addparam('new', enc('%d' % phases.public))
934 part.addparam('new', enc('%d' % phases.public))
935 part2node.append((part.id, newremotehead))
935 part2node.append((part.id, newremotehead))
936 pushop.pkfailcb[part.id] = handlefailure
936 pushop.pkfailcb[part.id] = handlefailure
937
937
938 def handlereply(op):
938 def handlereply(op):
939 for partid, node in part2node:
939 for partid, node in part2node:
940 partrep = op.records.getreplies(partid)
940 partrep = op.records.getreplies(partid)
941 results = partrep['pushkey']
941 results = partrep['pushkey']
942 assert len(results) <= 1
942 assert len(results) <= 1
943 msg = None
943 msg = None
944 if not results:
944 if not results:
945 msg = _('server ignored update of %s to public!\n') % node
945 msg = _('server ignored update of %s to public!\n') % node
946 elif not int(results[0]['return']):
946 elif not int(results[0]['return']):
947 msg = _('updating %s to public failed!\n') % node
947 msg = _('updating %s to public failed!\n') % node
948 if msg is not None:
948 if msg is not None:
949 pushop.ui.warn(msg)
949 pushop.ui.warn(msg)
950 return handlereply
950 return handlereply
951
951
952 @b2partsgenerator('obsmarkers')
952 @b2partsgenerator('obsmarkers')
953 def _pushb2obsmarkers(pushop, bundler):
953 def _pushb2obsmarkers(pushop, bundler):
954 if 'obsmarkers' in pushop.stepsdone:
954 if 'obsmarkers' in pushop.stepsdone:
955 return
955 return
956 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
956 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
957 if obsolete.commonversion(remoteversions) is None:
957 if obsolete.commonversion(remoteversions) is None:
958 return
958 return
959 pushop.stepsdone.add('obsmarkers')
959 pushop.stepsdone.add('obsmarkers')
960 if pushop.outobsmarkers:
960 if pushop.outobsmarkers:
961 markers = sorted(pushop.outobsmarkers)
961 markers = sorted(pushop.outobsmarkers)
962 bundle2.buildobsmarkerspart(bundler, markers)
962 bundle2.buildobsmarkerspart(bundler, markers)
963
963
964 @b2partsgenerator('bookmarks')
964 @b2partsgenerator('bookmarks')
965 def _pushb2bookmarks(pushop, bundler):
965 def _pushb2bookmarks(pushop, bundler):
966 """handle bookmark push through bundle2"""
966 """handle bookmark push through bundle2"""
967 if 'bookmarks' in pushop.stepsdone:
967 if 'bookmarks' in pushop.stepsdone:
968 return
968 return
969 b2caps = bundle2.bundle2caps(pushop.remote)
969 b2caps = bundle2.bundle2caps(pushop.remote)
970
970
971 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
971 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
972 legacybooks = 'bookmarks' in legacy
972 legacybooks = 'bookmarks' in legacy
973
973
974 if not legacybooks and 'bookmarks' in b2caps:
974 if not legacybooks and 'bookmarks' in b2caps:
975 return _pushb2bookmarkspart(pushop, bundler)
975 return _pushb2bookmarkspart(pushop, bundler)
976 elif 'pushkey' in b2caps:
976 elif 'pushkey' in b2caps:
977 return _pushb2bookmarkspushkey(pushop, bundler)
977 return _pushb2bookmarkspushkey(pushop, bundler)
978
978
979 def _bmaction(old, new):
979 def _bmaction(old, new):
980 """small utility for bookmark pushing"""
980 """small utility for bookmark pushing"""
981 if not old:
981 if not old:
982 return 'export'
982 return 'export'
983 elif not new:
983 elif not new:
984 return 'delete'
984 return 'delete'
985 return 'update'
985 return 'update'
986
986
987 def _pushb2bookmarkspart(pushop, bundler):
987 def _pushb2bookmarkspart(pushop, bundler):
988 pushop.stepsdone.add('bookmarks')
988 pushop.stepsdone.add('bookmarks')
989 if not pushop.outbookmarks:
989 if not pushop.outbookmarks:
990 return
990 return
991
991
992 allactions = []
992 allactions = []
993 data = []
993 data = []
994 for book, old, new in pushop.outbookmarks:
994 for book, old, new in pushop.outbookmarks:
995 new = bin(new)
995 new = bin(new)
996 data.append((book, new))
996 data.append((book, new))
997 allactions.append((book, _bmaction(old, new)))
997 allactions.append((book, _bmaction(old, new)))
998 checkdata = bookmod.binaryencode(data)
998 checkdata = bookmod.binaryencode(data)
999 bundler.newpart('bookmarks', data=checkdata)
999 bundler.newpart('bookmarks', data=checkdata)
1000
1000
1001 def handlereply(op):
1001 def handlereply(op):
1002 ui = pushop.ui
1002 ui = pushop.ui
1003 # if success
1003 # if success
1004 for book, action in allactions:
1004 for book, action in allactions:
1005 ui.status(bookmsgmap[action][0] % book)
1005 ui.status(bookmsgmap[action][0] % book)
1006
1006
1007 return handlereply
1007 return handlereply
1008
1008
1009 def _pushb2bookmarkspushkey(pushop, bundler):
1009 def _pushb2bookmarkspushkey(pushop, bundler):
1010 pushop.stepsdone.add('bookmarks')
1010 pushop.stepsdone.add('bookmarks')
1011 part2book = []
1011 part2book = []
1012 enc = pushkey.encode
1012 enc = pushkey.encode
1013
1013
1014 def handlefailure(pushop, exc):
1014 def handlefailure(pushop, exc):
1015 targetid = int(exc.partid)
1015 targetid = int(exc.partid)
1016 for partid, book, action in part2book:
1016 for partid, book, action in part2book:
1017 if partid == targetid:
1017 if partid == targetid:
1018 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1018 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1019 # we should not be called for part we did not generated
1019 # we should not be called for part we did not generated
1020 assert False
1020 assert False
1021
1021
1022 for book, old, new in pushop.outbookmarks:
1022 for book, old, new in pushop.outbookmarks:
1023 part = bundler.newpart('pushkey')
1023 part = bundler.newpart('pushkey')
1024 part.addparam('namespace', enc('bookmarks'))
1024 part.addparam('namespace', enc('bookmarks'))
1025 part.addparam('key', enc(book))
1025 part.addparam('key', enc(book))
1026 part.addparam('old', enc(old))
1026 part.addparam('old', enc(old))
1027 part.addparam('new', enc(new))
1027 part.addparam('new', enc(new))
1028 action = 'update'
1028 action = 'update'
1029 if not old:
1029 if not old:
1030 action = 'export'
1030 action = 'export'
1031 elif not new:
1031 elif not new:
1032 action = 'delete'
1032 action = 'delete'
1033 part2book.append((part.id, book, action))
1033 part2book.append((part.id, book, action))
1034 pushop.pkfailcb[part.id] = handlefailure
1034 pushop.pkfailcb[part.id] = handlefailure
1035
1035
1036 def handlereply(op):
1036 def handlereply(op):
1037 ui = pushop.ui
1037 ui = pushop.ui
1038 for partid, book, action in part2book:
1038 for partid, book, action in part2book:
1039 partrep = op.records.getreplies(partid)
1039 partrep = op.records.getreplies(partid)
1040 results = partrep['pushkey']
1040 results = partrep['pushkey']
1041 assert len(results) <= 1
1041 assert len(results) <= 1
1042 if not results:
1042 if not results:
1043 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1043 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1044 else:
1044 else:
1045 ret = int(results[0]['return'])
1045 ret = int(results[0]['return'])
1046 if ret:
1046 if ret:
1047 ui.status(bookmsgmap[action][0] % book)
1047 ui.status(bookmsgmap[action][0] % book)
1048 else:
1048 else:
1049 ui.warn(bookmsgmap[action][1] % book)
1049 ui.warn(bookmsgmap[action][1] % book)
1050 if pushop.bkresult is not None:
1050 if pushop.bkresult is not None:
1051 pushop.bkresult = 1
1051 pushop.bkresult = 1
1052 return handlereply
1052 return handlereply
1053
1053
1054 @b2partsgenerator('pushvars', idx=0)
1054 @b2partsgenerator('pushvars', idx=0)
1055 def _getbundlesendvars(pushop, bundler):
1055 def _getbundlesendvars(pushop, bundler):
1056 '''send shellvars via bundle2'''
1056 '''send shellvars via bundle2'''
1057 pushvars = pushop.pushvars
1057 pushvars = pushop.pushvars
1058 if pushvars:
1058 if pushvars:
1059 shellvars = {}
1059 shellvars = {}
1060 for raw in pushvars:
1060 for raw in pushvars:
1061 if '=' not in raw:
1061 if '=' not in raw:
1062 msg = ("unable to parse variable '%s', should follow "
1062 msg = ("unable to parse variable '%s', should follow "
1063 "'KEY=VALUE' or 'KEY=' format")
1063 "'KEY=VALUE' or 'KEY=' format")
1064 raise error.Abort(msg % raw)
1064 raise error.Abort(msg % raw)
1065 k, v = raw.split('=', 1)
1065 k, v = raw.split('=', 1)
1066 shellvars[k] = v
1066 shellvars[k] = v
1067
1067
1068 part = bundler.newpart('pushvars')
1068 part = bundler.newpart('pushvars')
1069
1069
1070 for key, value in shellvars.iteritems():
1070 for key, value in shellvars.iteritems():
1071 part.addparam(key, value, mandatory=False)
1071 part.addparam(key, value, mandatory=False)
1072
1072
1073 def _pushbundle2(pushop):
1073 def _pushbundle2(pushop):
1074 """push data to the remote using bundle2
1074 """push data to the remote using bundle2
1075
1075
1076 The only currently supported type of data is changegroup but this will
1076 The only currently supported type of data is changegroup but this will
1077 evolve in the future."""
1077 evolve in the future."""
1078 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1078 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1079 pushback = (pushop.trmanager
1079 pushback = (pushop.trmanager
1080 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1080 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1081
1081
1082 # create reply capability
1082 # create reply capability
1083 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1083 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1084 allowpushback=pushback,
1084 allowpushback=pushback,
1085 role='client'))
1085 role='client'))
1086 bundler.newpart('replycaps', data=capsblob)
1086 bundler.newpart('replycaps', data=capsblob)
1087 replyhandlers = []
1087 replyhandlers = []
1088 for partgenname in b2partsgenorder:
1088 for partgenname in b2partsgenorder:
1089 partgen = b2partsgenmapping[partgenname]
1089 partgen = b2partsgenmapping[partgenname]
1090 ret = partgen(pushop, bundler)
1090 ret = partgen(pushop, bundler)
1091 if callable(ret):
1091 if callable(ret):
1092 replyhandlers.append(ret)
1092 replyhandlers.append(ret)
1093 # do not push if nothing to push
1093 # do not push if nothing to push
1094 if bundler.nbparts <= 1:
1094 if bundler.nbparts <= 1:
1095 return
1095 return
1096 stream = util.chunkbuffer(bundler.getchunks())
1096 stream = util.chunkbuffer(bundler.getchunks())
1097 try:
1097 try:
1098 try:
1098 try:
1099 with pushop.remote.commandexecutor() as e:
1099 with pushop.remote.commandexecutor() as e:
1100 reply = e.callcommand('unbundle', {
1100 reply = e.callcommand('unbundle', {
1101 'bundle': stream,
1101 'bundle': stream,
1102 'heads': ['force'],
1102 'heads': ['force'],
1103 'url': pushop.remote.url(),
1103 'url': pushop.remote.url(),
1104 }).result()
1104 }).result()
1105 except error.BundleValueError as exc:
1105 except error.BundleValueError as exc:
1106 raise error.Abort(_('missing support for %s') % exc)
1106 raise error.Abort(_('missing support for %s') % exc)
1107 try:
1107 try:
1108 trgetter = None
1108 trgetter = None
1109 if pushback:
1109 if pushback:
1110 trgetter = pushop.trmanager.transaction
1110 trgetter = pushop.trmanager.transaction
1111 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1111 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1112 except error.BundleValueError as exc:
1112 except error.BundleValueError as exc:
1113 raise error.Abort(_('missing support for %s') % exc)
1113 raise error.Abort(_('missing support for %s') % exc)
1114 except bundle2.AbortFromPart as exc:
1114 except bundle2.AbortFromPart as exc:
1115 pushop.ui.status(_('remote: %s\n') % exc)
1115 pushop.ui.status(_('remote: %s\n') % exc)
1116 if exc.hint is not None:
1116 if exc.hint is not None:
1117 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1117 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1118 raise error.Abort(_('push failed on remote'))
1118 raise error.Abort(_('push failed on remote'))
1119 except error.PushkeyFailed as exc:
1119 except error.PushkeyFailed as exc:
1120 partid = int(exc.partid)
1120 partid = int(exc.partid)
1121 if partid not in pushop.pkfailcb:
1121 if partid not in pushop.pkfailcb:
1122 raise
1122 raise
1123 pushop.pkfailcb[partid](pushop, exc)
1123 pushop.pkfailcb[partid](pushop, exc)
1124 for rephand in replyhandlers:
1124 for rephand in replyhandlers:
1125 rephand(op)
1125 rephand(op)
1126
1126
1127 def _pushchangeset(pushop):
1127 def _pushchangeset(pushop):
1128 """Make the actual push of changeset bundle to remote repo"""
1128 """Make the actual push of changeset bundle to remote repo"""
1129 if 'changesets' in pushop.stepsdone:
1129 if 'changesets' in pushop.stepsdone:
1130 return
1130 return
1131 pushop.stepsdone.add('changesets')
1131 pushop.stepsdone.add('changesets')
1132 if not _pushcheckoutgoing(pushop):
1132 if not _pushcheckoutgoing(pushop):
1133 return
1133 return
1134
1134
1135 # Should have verified this in push().
1135 # Should have verified this in push().
1136 assert pushop.remote.capable('unbundle')
1136 assert pushop.remote.capable('unbundle')
1137
1137
1138 pushop.repo.prepushoutgoinghooks(pushop)
1138 pushop.repo.prepushoutgoinghooks(pushop)
1139 outgoing = pushop.outgoing
1139 outgoing = pushop.outgoing
1140 # TODO: get bundlecaps from remote
1140 # TODO: get bundlecaps from remote
1141 bundlecaps = None
1141 bundlecaps = None
1142 # create a changegroup from local
1142 # create a changegroup from local
1143 if pushop.revs is None and not (outgoing.excluded
1143 if pushop.revs is None and not (outgoing.excluded
1144 or pushop.repo.changelog.filteredrevs):
1144 or pushop.repo.changelog.filteredrevs):
1145 # push everything,
1145 # push everything,
1146 # use the fast path, no race possible on push
1146 # use the fast path, no race possible on push
1147 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1147 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1148 fastpath=True, bundlecaps=bundlecaps)
1148 fastpath=True, bundlecaps=bundlecaps)
1149 else:
1149 else:
1150 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1150 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1151 'push', bundlecaps=bundlecaps)
1151 'push', bundlecaps=bundlecaps)
1152
1152
1153 # apply changegroup to remote
1153 # apply changegroup to remote
1154 # local repo finds heads on server, finds out what
1154 # local repo finds heads on server, finds out what
1155 # revs it must push. once revs transferred, if server
1155 # revs it must push. once revs transferred, if server
1156 # finds it has different heads (someone else won
1156 # finds it has different heads (someone else won
1157 # commit/push race), server aborts.
1157 # commit/push race), server aborts.
1158 if pushop.force:
1158 if pushop.force:
1159 remoteheads = ['force']
1159 remoteheads = ['force']
1160 else:
1160 else:
1161 remoteheads = pushop.remoteheads
1161 remoteheads = pushop.remoteheads
1162 # ssh: return remote's addchangegroup()
1162 # ssh: return remote's addchangegroup()
1163 # http: return remote's addchangegroup() or 0 for error
1163 # http: return remote's addchangegroup() or 0 for error
1164 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1164 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1165 pushop.repo.url())
1165 pushop.repo.url())
1166
1166
1167 def _pushsyncphase(pushop):
1167 def _pushsyncphase(pushop):
1168 """synchronise phase information locally and remotely"""
1168 """synchronise phase information locally and remotely"""
1169 cheads = pushop.commonheads
1169 cheads = pushop.commonheads
1170 # even when we don't push, exchanging phase data is useful
1170 # even when we don't push, exchanging phase data is useful
1171 remotephases = pushop.remote.listkeys('phases')
1171 remotephases = pushop.remote.listkeys('phases')
1172 if (pushop.ui.configbool('ui', '_usedassubrepo')
1172 if (pushop.ui.configbool('ui', '_usedassubrepo')
1173 and remotephases # server supports phases
1173 and remotephases # server supports phases
1174 and pushop.cgresult is None # nothing was pushed
1174 and pushop.cgresult is None # nothing was pushed
1175 and remotephases.get('publishing', False)):
1175 and remotephases.get('publishing', False)):
1176 # When:
1176 # When:
1177 # - this is a subrepo push
1177 # - this is a subrepo push
1178 # - and remote support phase
1178 # - and remote support phase
1179 # - and no changeset was pushed
1179 # - and no changeset was pushed
1180 # - and remote is publishing
1180 # - and remote is publishing
1181 # We may be in issue 3871 case!
1181 # We may be in issue 3871 case!
1182 # We drop the possible phase synchronisation done by
1182 # We drop the possible phase synchronisation done by
1183 # courtesy to publish changesets possibly locally draft
1183 # courtesy to publish changesets possibly locally draft
1184 # on the remote.
1184 # on the remote.
1185 remotephases = {'publishing': 'True'}
1185 remotephases = {'publishing': 'True'}
1186 if not remotephases: # old server or public only reply from non-publishing
1186 if not remotephases: # old server or public only reply from non-publishing
1187 _localphasemove(pushop, cheads)
1187 _localphasemove(pushop, cheads)
1188 # don't push any phase data as there is nothing to push
1188 # don't push any phase data as there is nothing to push
1189 else:
1189 else:
1190 ana = phases.analyzeremotephases(pushop.repo, cheads,
1190 ana = phases.analyzeremotephases(pushop.repo, cheads,
1191 remotephases)
1191 remotephases)
1192 pheads, droots = ana
1192 pheads, droots = ana
1193 ### Apply remote phase on local
1193 ### Apply remote phase on local
1194 if remotephases.get('publishing', False):
1194 if remotephases.get('publishing', False):
1195 _localphasemove(pushop, cheads)
1195 _localphasemove(pushop, cheads)
1196 else: # publish = False
1196 else: # publish = False
1197 _localphasemove(pushop, pheads)
1197 _localphasemove(pushop, pheads)
1198 _localphasemove(pushop, cheads, phases.draft)
1198 _localphasemove(pushop, cheads, phases.draft)
1199 ### Apply local phase on remote
1199 ### Apply local phase on remote
1200
1200
1201 if pushop.cgresult:
1201 if pushop.cgresult:
1202 if 'phases' in pushop.stepsdone:
1202 if 'phases' in pushop.stepsdone:
1203 # phases already pushed though bundle2
1203 # phases already pushed though bundle2
1204 return
1204 return
1205 outdated = pushop.outdatedphases
1205 outdated = pushop.outdatedphases
1206 else:
1206 else:
1207 outdated = pushop.fallbackoutdatedphases
1207 outdated = pushop.fallbackoutdatedphases
1208
1208
1209 pushop.stepsdone.add('phases')
1209 pushop.stepsdone.add('phases')
1210
1210
1211 # filter heads already turned public by the push
1211 # filter heads already turned public by the push
1212 outdated = [c for c in outdated if c.node() not in pheads]
1212 outdated = [c for c in outdated if c.node() not in pheads]
1213 # fallback to independent pushkey command
1213 # fallback to independent pushkey command
1214 for newremotehead in outdated:
1214 for newremotehead in outdated:
1215 with pushop.remote.commandexecutor() as e:
1215 with pushop.remote.commandexecutor() as e:
1216 r = e.callcommand('pushkey', {
1216 r = e.callcommand('pushkey', {
1217 'namespace': 'phases',
1217 'namespace': 'phases',
1218 'key': newremotehead.hex(),
1218 'key': newremotehead.hex(),
1219 'old': '%d' % phases.draft,
1219 'old': '%d' % phases.draft,
1220 'new': '%d' % phases.public
1220 'new': '%d' % phases.public
1221 }).result()
1221 }).result()
1222
1222
1223 if not r:
1223 if not r:
1224 pushop.ui.warn(_('updating %s to public failed!\n')
1224 pushop.ui.warn(_('updating %s to public failed!\n')
1225 % newremotehead)
1225 % newremotehead)
1226
1226
1227 def _localphasemove(pushop, nodes, phase=phases.public):
1227 def _localphasemove(pushop, nodes, phase=phases.public):
1228 """move <nodes> to <phase> in the local source repo"""
1228 """move <nodes> to <phase> in the local source repo"""
1229 if pushop.trmanager:
1229 if pushop.trmanager:
1230 phases.advanceboundary(pushop.repo,
1230 phases.advanceboundary(pushop.repo,
1231 pushop.trmanager.transaction(),
1231 pushop.trmanager.transaction(),
1232 phase,
1232 phase,
1233 nodes)
1233 nodes)
1234 else:
1234 else:
1235 # repo is not locked, do not change any phases!
1235 # repo is not locked, do not change any phases!
1236 # Informs the user that phases should have been moved when
1236 # Informs the user that phases should have been moved when
1237 # applicable.
1237 # applicable.
1238 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1238 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1239 phasestr = phases.phasenames[phase]
1239 phasestr = phases.phasenames[phase]
1240 if actualmoves:
1240 if actualmoves:
1241 pushop.ui.status(_('cannot lock source repo, skipping '
1241 pushop.ui.status(_('cannot lock source repo, skipping '
1242 'local %s phase update\n') % phasestr)
1242 'local %s phase update\n') % phasestr)
1243
1243
1244 def _pushobsolete(pushop):
1244 def _pushobsolete(pushop):
1245 """utility function to push obsolete markers to a remote"""
1245 """utility function to push obsolete markers to a remote"""
1246 if 'obsmarkers' in pushop.stepsdone:
1246 if 'obsmarkers' in pushop.stepsdone:
1247 return
1247 return
1248 repo = pushop.repo
1248 repo = pushop.repo
1249 remote = pushop.remote
1249 remote = pushop.remote
1250 pushop.stepsdone.add('obsmarkers')
1250 pushop.stepsdone.add('obsmarkers')
1251 if pushop.outobsmarkers:
1251 if pushop.outobsmarkers:
1252 pushop.ui.debug('try to push obsolete markers to remote\n')
1252 pushop.ui.debug('try to push obsolete markers to remote\n')
1253 rslts = []
1253 rslts = []
1254 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1254 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1255 for key in sorted(remotedata, reverse=True):
1255 for key in sorted(remotedata, reverse=True):
1256 # reverse sort to ensure we end with dump0
1256 # reverse sort to ensure we end with dump0
1257 data = remotedata[key]
1257 data = remotedata[key]
1258 rslts.append(remote.pushkey('obsolete', key, '', data))
1258 rslts.append(remote.pushkey('obsolete', key, '', data))
1259 if [r for r in rslts if not r]:
1259 if [r for r in rslts if not r]:
1260 msg = _('failed to push some obsolete markers!\n')
1260 msg = _('failed to push some obsolete markers!\n')
1261 repo.ui.warn(msg)
1261 repo.ui.warn(msg)
1262
1262
1263 def _pushbookmark(pushop):
1263 def _pushbookmark(pushop):
1264 """Update bookmark position on remote"""
1264 """Update bookmark position on remote"""
1265 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1265 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1266 return
1266 return
1267 pushop.stepsdone.add('bookmarks')
1267 pushop.stepsdone.add('bookmarks')
1268 ui = pushop.ui
1268 ui = pushop.ui
1269 remote = pushop.remote
1269 remote = pushop.remote
1270
1270
1271 for b, old, new in pushop.outbookmarks:
1271 for b, old, new in pushop.outbookmarks:
1272 action = 'update'
1272 action = 'update'
1273 if not old:
1273 if not old:
1274 action = 'export'
1274 action = 'export'
1275 elif not new:
1275 elif not new:
1276 action = 'delete'
1276 action = 'delete'
1277
1277
1278 with remote.commandexecutor() as e:
1278 with remote.commandexecutor() as e:
1279 r = e.callcommand('pushkey', {
1279 r = e.callcommand('pushkey', {
1280 'namespace': 'bookmarks',
1280 'namespace': 'bookmarks',
1281 'key': b,
1281 'key': b,
1282 'old': old,
1282 'old': old,
1283 'new': new,
1283 'new': new,
1284 }).result()
1284 }).result()
1285
1285
1286 if r:
1286 if r:
1287 ui.status(bookmsgmap[action][0] % b)
1287 ui.status(bookmsgmap[action][0] % b)
1288 else:
1288 else:
1289 ui.warn(bookmsgmap[action][1] % b)
1289 ui.warn(bookmsgmap[action][1] % b)
1290 # discovery can have set the value form invalid entry
1290 # discovery can have set the value form invalid entry
1291 if pushop.bkresult is not None:
1291 if pushop.bkresult is not None:
1292 pushop.bkresult = 1
1292 pushop.bkresult = 1
1293
1293
1294 class pulloperation(object):
1294 class pulloperation(object):
1295 """A object that represent a single pull operation
1295 """A object that represent a single pull operation
1296
1296
1297 It purpose is to carry pull related state and very common operation.
1297 It purpose is to carry pull related state and very common operation.
1298
1298
1299 A new should be created at the beginning of each pull and discarded
1299 A new should be created at the beginning of each pull and discarded
1300 afterward.
1300 afterward.
1301 """
1301 """
1302
1302
1303 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1303 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1304 remotebookmarks=None, streamclonerequested=None):
1304 remotebookmarks=None, streamclonerequested=None):
1305 # repo we pull into
1305 # repo we pull into
1306 self.repo = repo
1306 self.repo = repo
1307 # repo we pull from
1307 # repo we pull from
1308 self.remote = remote
1308 self.remote = remote
1309 # revision we try to pull (None is "all")
1309 # revision we try to pull (None is "all")
1310 self.heads = heads
1310 self.heads = heads
1311 # bookmark pulled explicitly
1311 # bookmark pulled explicitly
1312 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1312 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1313 for bookmark in bookmarks]
1313 for bookmark in bookmarks]
1314 # do we force pull?
1314 # do we force pull?
1315 self.force = force
1315 self.force = force
1316 # whether a streaming clone was requested
1316 # whether a streaming clone was requested
1317 self.streamclonerequested = streamclonerequested
1317 self.streamclonerequested = streamclonerequested
1318 # transaction manager
1318 # transaction manager
1319 self.trmanager = None
1319 self.trmanager = None
1320 # set of common changeset between local and remote before pull
1320 # set of common changeset between local and remote before pull
1321 self.common = None
1321 self.common = None
1322 # set of pulled head
1322 # set of pulled head
1323 self.rheads = None
1323 self.rheads = None
1324 # list of missing changeset to fetch remotely
1324 # list of missing changeset to fetch remotely
1325 self.fetch = None
1325 self.fetch = None
1326 # remote bookmarks data
1326 # remote bookmarks data
1327 self.remotebookmarks = remotebookmarks
1327 self.remotebookmarks = remotebookmarks
1328 # result of changegroup pulling (used as return code by pull)
1328 # result of changegroup pulling (used as return code by pull)
1329 self.cgresult = None
1329 self.cgresult = None
1330 # list of step already done
1330 # list of step already done
1331 self.stepsdone = set()
1331 self.stepsdone = set()
1332 # Whether we attempted a clone from pre-generated bundles.
1332 # Whether we attempted a clone from pre-generated bundles.
1333 self.clonebundleattempted = False
1333 self.clonebundleattempted = False
1334
1334
1335 @util.propertycache
1335 @util.propertycache
1336 def pulledsubset(self):
1336 def pulledsubset(self):
1337 """heads of the set of changeset target by the pull"""
1337 """heads of the set of changeset target by the pull"""
1338 # compute target subset
1338 # compute target subset
1339 if self.heads is None:
1339 if self.heads is None:
1340 # We pulled every thing possible
1340 # We pulled every thing possible
1341 # sync on everything common
1341 # sync on everything common
1342 c = set(self.common)
1342 c = set(self.common)
1343 ret = list(self.common)
1343 ret = list(self.common)
1344 for n in self.rheads:
1344 for n in self.rheads:
1345 if n not in c:
1345 if n not in c:
1346 ret.append(n)
1346 ret.append(n)
1347 return ret
1347 return ret
1348 else:
1348 else:
1349 # We pulled a specific subset
1349 # We pulled a specific subset
1350 # sync on this subset
1350 # sync on this subset
1351 return self.heads
1351 return self.heads
1352
1352
1353 @util.propertycache
1353 @util.propertycache
1354 def canusebundle2(self):
1354 def canusebundle2(self):
1355 return not _forcebundle1(self)
1355 return not _forcebundle1(self)
1356
1356
1357 @util.propertycache
1357 @util.propertycache
1358 def remotebundle2caps(self):
1358 def remotebundle2caps(self):
1359 return bundle2.bundle2caps(self.remote)
1359 return bundle2.bundle2caps(self.remote)
1360
1360
1361 def gettransaction(self):
1361 def gettransaction(self):
1362 # deprecated; talk to trmanager directly
1362 # deprecated; talk to trmanager directly
1363 return self.trmanager.transaction()
1363 return self.trmanager.transaction()
1364
1364
1365 class transactionmanager(util.transactional):
1365 class transactionmanager(util.transactional):
1366 """An object to manage the life cycle of a transaction
1366 """An object to manage the life cycle of a transaction
1367
1367
1368 It creates the transaction on demand and calls the appropriate hooks when
1368 It creates the transaction on demand and calls the appropriate hooks when
1369 closing the transaction."""
1369 closing the transaction."""
1370 def __init__(self, repo, source, url):
1370 def __init__(self, repo, source, url):
1371 self.repo = repo
1371 self.repo = repo
1372 self.source = source
1372 self.source = source
1373 self.url = url
1373 self.url = url
1374 self._tr = None
1374 self._tr = None
1375
1375
1376 def transaction(self):
1376 def transaction(self):
1377 """Return an open transaction object, constructing if necessary"""
1377 """Return an open transaction object, constructing if necessary"""
1378 if not self._tr:
1378 if not self._tr:
1379 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1379 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1380 self._tr = self.repo.transaction(trname)
1380 self._tr = self.repo.transaction(trname)
1381 self._tr.hookargs['source'] = self.source
1381 self._tr.hookargs['source'] = self.source
1382 self._tr.hookargs['url'] = self.url
1382 self._tr.hookargs['url'] = self.url
1383 return self._tr
1383 return self._tr
1384
1384
1385 def close(self):
1385 def close(self):
1386 """close transaction if created"""
1386 """close transaction if created"""
1387 if self._tr is not None:
1387 if self._tr is not None:
1388 self._tr.close()
1388 self._tr.close()
1389
1389
1390 def release(self):
1390 def release(self):
1391 """release transaction if created"""
1391 """release transaction if created"""
1392 if self._tr is not None:
1392 if self._tr is not None:
1393 self._tr.release()
1393 self._tr.release()
1394
1394
1395 def _fullpullbundle2(repo, pullop):
1395 def _fullpullbundle2(repo, pullop):
1396 # The server may send a partial reply, i.e. when inlining
1396 # The server may send a partial reply, i.e. when inlining
1397 # pre-computed bundles. In that case, update the common
1397 # pre-computed bundles. In that case, update the common
1398 # set based on the results and pull another bundle.
1398 # set based on the results and pull another bundle.
1399 #
1399 #
1400 # There are two indicators that the process is finished:
1400 # There are two indicators that the process is finished:
1401 # - no changeset has been added, or
1401 # - no changeset has been added, or
1402 # - all remote heads are known locally.
1402 # - all remote heads are known locally.
1403 # The head check must use the unfiltered view as obsoletion
1403 # The head check must use the unfiltered view as obsoletion
1404 # markers can hide heads.
1404 # markers can hide heads.
1405 unfi = repo.unfiltered()
1405 unfi = repo.unfiltered()
1406 unficl = unfi.changelog
1406 unficl = unfi.changelog
1407 def headsofdiff(h1, h2):
1407 def headsofdiff(h1, h2):
1408 """Returns heads(h1 % h2)"""
1408 """Returns heads(h1 % h2)"""
1409 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1409 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1410 return set(ctx.node() for ctx in res)
1410 return set(ctx.node() for ctx in res)
1411 def headsofunion(h1, h2):
1411 def headsofunion(h1, h2):
1412 """Returns heads((h1 + h2) - null)"""
1412 """Returns heads((h1 + h2) - null)"""
1413 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1413 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1414 return set(ctx.node() for ctx in res)
1414 return set(ctx.node() for ctx in res)
1415 while True:
1415 while True:
1416 old_heads = unficl.heads()
1416 old_heads = unficl.heads()
1417 clstart = len(unficl)
1417 clstart = len(unficl)
1418 _pullbundle2(pullop)
1418 _pullbundle2(pullop)
1419 if changegroup.NARROW_REQUIREMENT in repo.requirements:
1419 if changegroup.NARROW_REQUIREMENT in repo.requirements:
1420 # XXX narrow clones filter the heads on the server side during
1420 # XXX narrow clones filter the heads on the server side during
1421 # XXX getbundle and result in partial replies as well.
1421 # XXX getbundle and result in partial replies as well.
1422 # XXX Disable pull bundles in this case as band aid to avoid
1422 # XXX Disable pull bundles in this case as band aid to avoid
1423 # XXX extra round trips.
1423 # XXX extra round trips.
1424 break
1424 break
1425 if clstart == len(unficl):
1425 if clstart == len(unficl):
1426 break
1426 break
1427 if all(unficl.hasnode(n) for n in pullop.rheads):
1427 if all(unficl.hasnode(n) for n in pullop.rheads):
1428 break
1428 break
1429 new_heads = headsofdiff(unficl.heads(), old_heads)
1429 new_heads = headsofdiff(unficl.heads(), old_heads)
1430 pullop.common = headsofunion(new_heads, pullop.common)
1430 pullop.common = headsofunion(new_heads, pullop.common)
1431 pullop.rheads = set(pullop.rheads) - pullop.common
1431 pullop.rheads = set(pullop.rheads) - pullop.common
1432
1432
1433 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1433 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1434 streamclonerequested=None):
1434 streamclonerequested=None):
1435 """Fetch repository data from a remote.
1435 """Fetch repository data from a remote.
1436
1436
1437 This is the main function used to retrieve data from a remote repository.
1437 This is the main function used to retrieve data from a remote repository.
1438
1438
1439 ``repo`` is the local repository to clone into.
1439 ``repo`` is the local repository to clone into.
1440 ``remote`` is a peer instance.
1440 ``remote`` is a peer instance.
1441 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1441 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1442 default) means to pull everything from the remote.
1442 default) means to pull everything from the remote.
1443 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1443 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1444 default, all remote bookmarks are pulled.
1444 default, all remote bookmarks are pulled.
1445 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1445 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1446 initialization.
1446 initialization.
1447 ``streamclonerequested`` is a boolean indicating whether a "streaming
1447 ``streamclonerequested`` is a boolean indicating whether a "streaming
1448 clone" is requested. A "streaming clone" is essentially a raw file copy
1448 clone" is requested. A "streaming clone" is essentially a raw file copy
1449 of revlogs from the server. This only works when the local repository is
1449 of revlogs from the server. This only works when the local repository is
1450 empty. The default value of ``None`` means to respect the server
1450 empty. The default value of ``None`` means to respect the server
1451 configuration for preferring stream clones.
1451 configuration for preferring stream clones.
1452
1452
1453 Returns the ``pulloperation`` created for this pull.
1453 Returns the ``pulloperation`` created for this pull.
1454 """
1454 """
1455 if opargs is None:
1455 if opargs is None:
1456 opargs = {}
1456 opargs = {}
1457 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1457 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1458 streamclonerequested=streamclonerequested,
1458 streamclonerequested=streamclonerequested,
1459 **pycompat.strkwargs(opargs))
1459 **pycompat.strkwargs(opargs))
1460
1460
1461 peerlocal = pullop.remote.local()
1461 peerlocal = pullop.remote.local()
1462 if peerlocal:
1462 if peerlocal:
1463 missing = set(peerlocal.requirements) - pullop.repo.supported
1463 missing = set(peerlocal.requirements) - pullop.repo.supported
1464 if missing:
1464 if missing:
1465 msg = _("required features are not"
1465 msg = _("required features are not"
1466 " supported in the destination:"
1466 " supported in the destination:"
1467 " %s") % (', '.join(sorted(missing)))
1467 " %s") % (', '.join(sorted(missing)))
1468 raise error.Abort(msg)
1468 raise error.Abort(msg)
1469
1469
1470 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1470 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1471 with repo.wlock(), repo.lock(), pullop.trmanager:
1471 with repo.wlock(), repo.lock(), pullop.trmanager:
1472 # This should ideally be in _pullbundle2(). However, it needs to run
1472 # This should ideally be in _pullbundle2(). However, it needs to run
1473 # before discovery to avoid extra work.
1473 # before discovery to avoid extra work.
1474 _maybeapplyclonebundle(pullop)
1474 _maybeapplyclonebundle(pullop)
1475 streamclone.maybeperformlegacystreamclone(pullop)
1475 streamclone.maybeperformlegacystreamclone(pullop)
1476 _pulldiscovery(pullop)
1476 _pulldiscovery(pullop)
1477 if pullop.canusebundle2:
1477 if pullop.canusebundle2:
1478 _fullpullbundle2(repo, pullop)
1478 _fullpullbundle2(repo, pullop)
1479 _pullchangeset(pullop)
1479 _pullchangeset(pullop)
1480 _pullphase(pullop)
1480 _pullphase(pullop)
1481 _pullbookmarks(pullop)
1481 _pullbookmarks(pullop)
1482 _pullobsolete(pullop)
1482 _pullobsolete(pullop)
1483
1483
1484 # storing remotenames
1484 # storing remotenames
1485 if repo.ui.configbool('experimental', 'remotenames'):
1485 if repo.ui.configbool('experimental', 'remotenames'):
1486 logexchange.pullremotenames(repo, remote)
1486 logexchange.pullremotenames(repo, remote)
1487
1487
1488 return pullop
1488 return pullop
1489
1489
1490 # list of steps to perform discovery before pull
1490 # list of steps to perform discovery before pull
1491 pulldiscoveryorder = []
1491 pulldiscoveryorder = []
1492
1492
1493 # Mapping between step name and function
1493 # Mapping between step name and function
1494 #
1494 #
1495 # This exists to help extensions wrap steps if necessary
1495 # This exists to help extensions wrap steps if necessary
1496 pulldiscoverymapping = {}
1496 pulldiscoverymapping = {}
1497
1497
1498 def pulldiscovery(stepname):
1498 def pulldiscovery(stepname):
1499 """decorator for function performing discovery before pull
1499 """decorator for function performing discovery before pull
1500
1500
1501 The function is added to the step -> function mapping and appended to the
1501 The function is added to the step -> function mapping and appended to the
1502 list of steps. Beware that decorated function will be added in order (this
1502 list of steps. Beware that decorated function will be added in order (this
1503 may matter).
1503 may matter).
1504
1504
1505 You can only use this decorator for a new step, if you want to wrap a step
1505 You can only use this decorator for a new step, if you want to wrap a step
1506 from an extension, change the pulldiscovery dictionary directly."""
1506 from an extension, change the pulldiscovery dictionary directly."""
1507 def dec(func):
1507 def dec(func):
1508 assert stepname not in pulldiscoverymapping
1508 assert stepname not in pulldiscoverymapping
1509 pulldiscoverymapping[stepname] = func
1509 pulldiscoverymapping[stepname] = func
1510 pulldiscoveryorder.append(stepname)
1510 pulldiscoveryorder.append(stepname)
1511 return func
1511 return func
1512 return dec
1512 return dec
1513
1513
1514 def _pulldiscovery(pullop):
1514 def _pulldiscovery(pullop):
1515 """Run all discovery steps"""
1515 """Run all discovery steps"""
1516 for stepname in pulldiscoveryorder:
1516 for stepname in pulldiscoveryorder:
1517 step = pulldiscoverymapping[stepname]
1517 step = pulldiscoverymapping[stepname]
1518 step(pullop)
1518 step(pullop)
1519
1519
1520 @pulldiscovery('b1:bookmarks')
1520 @pulldiscovery('b1:bookmarks')
1521 def _pullbookmarkbundle1(pullop):
1521 def _pullbookmarkbundle1(pullop):
1522 """fetch bookmark data in bundle1 case
1522 """fetch bookmark data in bundle1 case
1523
1523
1524 If not using bundle2, we have to fetch bookmarks before changeset
1524 If not using bundle2, we have to fetch bookmarks before changeset
1525 discovery to reduce the chance and impact of race conditions."""
1525 discovery to reduce the chance and impact of race conditions."""
1526 if pullop.remotebookmarks is not None:
1526 if pullop.remotebookmarks is not None:
1527 return
1527 return
1528 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1528 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1529 # all known bundle2 servers now support listkeys, but lets be nice with
1529 # all known bundle2 servers now support listkeys, but lets be nice with
1530 # new implementation.
1530 # new implementation.
1531 return
1531 return
1532 books = pullop.remote.listkeys('bookmarks')
1532 books = pullop.remote.listkeys('bookmarks')
1533 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1533 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1534
1534
1535
1535
1536 @pulldiscovery('changegroup')
1536 @pulldiscovery('changegroup')
1537 def _pulldiscoverychangegroup(pullop):
1537 def _pulldiscoverychangegroup(pullop):
1538 """discovery phase for the pull
1538 """discovery phase for the pull
1539
1539
1540 Current handle changeset discovery only, will change handle all discovery
1540 Current handle changeset discovery only, will change handle all discovery
1541 at some point."""
1541 at some point."""
1542 tmp = discovery.findcommonincoming(pullop.repo,
1542 tmp = discovery.findcommonincoming(pullop.repo,
1543 pullop.remote,
1543 pullop.remote,
1544 heads=pullop.heads,
1544 heads=pullop.heads,
1545 force=pullop.force)
1545 force=pullop.force)
1546 common, fetch, rheads = tmp
1546 common, fetch, rheads = tmp
1547 nm = pullop.repo.unfiltered().changelog.nodemap
1547 nm = pullop.repo.unfiltered().changelog.nodemap
1548 if fetch and rheads:
1548 if fetch and rheads:
1549 # If a remote heads is filtered locally, put in back in common.
1549 # If a remote heads is filtered locally, put in back in common.
1550 #
1550 #
1551 # This is a hackish solution to catch most of "common but locally
1551 # This is a hackish solution to catch most of "common but locally
1552 # hidden situation". We do not performs discovery on unfiltered
1552 # hidden situation". We do not performs discovery on unfiltered
1553 # repository because it end up doing a pathological amount of round
1553 # repository because it end up doing a pathological amount of round
1554 # trip for w huge amount of changeset we do not care about.
1554 # trip for w huge amount of changeset we do not care about.
1555 #
1555 #
1556 # If a set of such "common but filtered" changeset exist on the server
1556 # If a set of such "common but filtered" changeset exist on the server
1557 # but are not including a remote heads, we'll not be able to detect it,
1557 # but are not including a remote heads, we'll not be able to detect it,
1558 scommon = set(common)
1558 scommon = set(common)
1559 for n in rheads:
1559 for n in rheads:
1560 if n in nm:
1560 if n in nm:
1561 if n not in scommon:
1561 if n not in scommon:
1562 common.append(n)
1562 common.append(n)
1563 if set(rheads).issubset(set(common)):
1563 if set(rheads).issubset(set(common)):
1564 fetch = []
1564 fetch = []
1565 pullop.common = common
1565 pullop.common = common
1566 pullop.fetch = fetch
1566 pullop.fetch = fetch
1567 pullop.rheads = rheads
1567 pullop.rheads = rheads
1568
1568
1569 def _pullbundle2(pullop):
1569 def _pullbundle2(pullop):
1570 """pull data using bundle2
1570 """pull data using bundle2
1571
1571
1572 For now, the only supported data are changegroup."""
1572 For now, the only supported data are changegroup."""
1573 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1573 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1574
1574
1575 # make ui easier to access
1575 # make ui easier to access
1576 ui = pullop.repo.ui
1576 ui = pullop.repo.ui
1577
1577
1578 # At the moment we don't do stream clones over bundle2. If that is
1578 # At the moment we don't do stream clones over bundle2. If that is
1579 # implemented then here's where the check for that will go.
1579 # implemented then here's where the check for that will go.
1580 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1580 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1581
1581
1582 # declare pull perimeters
1582 # declare pull perimeters
1583 kwargs['common'] = pullop.common
1583 kwargs['common'] = pullop.common
1584 kwargs['heads'] = pullop.heads or pullop.rheads
1584 kwargs['heads'] = pullop.heads or pullop.rheads
1585
1585
1586 if streaming:
1586 if streaming:
1587 kwargs['cg'] = False
1587 kwargs['cg'] = False
1588 kwargs['stream'] = True
1588 kwargs['stream'] = True
1589 pullop.stepsdone.add('changegroup')
1589 pullop.stepsdone.add('changegroup')
1590 pullop.stepsdone.add('phases')
1590 pullop.stepsdone.add('phases')
1591
1591
1592 else:
1592 else:
1593 # pulling changegroup
1593 # pulling changegroup
1594 pullop.stepsdone.add('changegroup')
1594 pullop.stepsdone.add('changegroup')
1595
1595
1596 kwargs['cg'] = pullop.fetch
1596 kwargs['cg'] = pullop.fetch
1597
1597
1598 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1598 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1599 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1599 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1600 if (not legacyphase and hasbinaryphase):
1600 if (not legacyphase and hasbinaryphase):
1601 kwargs['phases'] = True
1601 kwargs['phases'] = True
1602 pullop.stepsdone.add('phases')
1602 pullop.stepsdone.add('phases')
1603
1603
1604 if 'listkeys' in pullop.remotebundle2caps:
1604 if 'listkeys' in pullop.remotebundle2caps:
1605 if 'phases' not in pullop.stepsdone:
1605 if 'phases' not in pullop.stepsdone:
1606 kwargs['listkeys'] = ['phases']
1606 kwargs['listkeys'] = ['phases']
1607
1607
1608 bookmarksrequested = False
1608 bookmarksrequested = False
1609 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1609 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1610 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1610 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1611
1611
1612 if pullop.remotebookmarks is not None:
1612 if pullop.remotebookmarks is not None:
1613 pullop.stepsdone.add('request-bookmarks')
1613 pullop.stepsdone.add('request-bookmarks')
1614
1614
1615 if ('request-bookmarks' not in pullop.stepsdone
1615 if ('request-bookmarks' not in pullop.stepsdone
1616 and pullop.remotebookmarks is None
1616 and pullop.remotebookmarks is None
1617 and not legacybookmark and hasbinarybook):
1617 and not legacybookmark and hasbinarybook):
1618 kwargs['bookmarks'] = True
1618 kwargs['bookmarks'] = True
1619 bookmarksrequested = True
1619 bookmarksrequested = True
1620
1620
1621 if 'listkeys' in pullop.remotebundle2caps:
1621 if 'listkeys' in pullop.remotebundle2caps:
1622 if 'request-bookmarks' not in pullop.stepsdone:
1622 if 'request-bookmarks' not in pullop.stepsdone:
1623 # make sure to always includes bookmark data when migrating
1623 # make sure to always includes bookmark data when migrating
1624 # `hg incoming --bundle` to using this function.
1624 # `hg incoming --bundle` to using this function.
1625 pullop.stepsdone.add('request-bookmarks')
1625 pullop.stepsdone.add('request-bookmarks')
1626 kwargs.setdefault('listkeys', []).append('bookmarks')
1626 kwargs.setdefault('listkeys', []).append('bookmarks')
1627
1627
1628 # If this is a full pull / clone and the server supports the clone bundles
1628 # If this is a full pull / clone and the server supports the clone bundles
1629 # feature, tell the server whether we attempted a clone bundle. The
1629 # feature, tell the server whether we attempted a clone bundle. The
1630 # presence of this flag indicates the client supports clone bundles. This
1630 # presence of this flag indicates the client supports clone bundles. This
1631 # will enable the server to treat clients that support clone bundles
1631 # will enable the server to treat clients that support clone bundles
1632 # differently from those that don't.
1632 # differently from those that don't.
1633 if (pullop.remote.capable('clonebundles')
1633 if (pullop.remote.capable('clonebundles')
1634 and pullop.heads is None and list(pullop.common) == [nullid]):
1634 and pullop.heads is None and list(pullop.common) == [nullid]):
1635 kwargs['cbattempted'] = pullop.clonebundleattempted
1635 kwargs['cbattempted'] = pullop.clonebundleattempted
1636
1636
1637 if streaming:
1637 if streaming:
1638 pullop.repo.ui.status(_('streaming all changes\n'))
1638 pullop.repo.ui.status(_('streaming all changes\n'))
1639 elif not pullop.fetch:
1639 elif not pullop.fetch:
1640 pullop.repo.ui.status(_("no changes found\n"))
1640 pullop.repo.ui.status(_("no changes found\n"))
1641 pullop.cgresult = 0
1641 pullop.cgresult = 0
1642 else:
1642 else:
1643 if pullop.heads is None and list(pullop.common) == [nullid]:
1643 if pullop.heads is None and list(pullop.common) == [nullid]:
1644 pullop.repo.ui.status(_("requesting all changes\n"))
1644 pullop.repo.ui.status(_("requesting all changes\n"))
1645 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1645 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1646 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1646 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1647 if obsolete.commonversion(remoteversions) is not None:
1647 if obsolete.commonversion(remoteversions) is not None:
1648 kwargs['obsmarkers'] = True
1648 kwargs['obsmarkers'] = True
1649 pullop.stepsdone.add('obsmarkers')
1649 pullop.stepsdone.add('obsmarkers')
1650 _pullbundle2extraprepare(pullop, kwargs)
1650 _pullbundle2extraprepare(pullop, kwargs)
1651 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1651
1652 try:
1652 with pullop.remote.commandexecutor() as e:
1653 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
1653 args = dict(kwargs)
1654 source='pull')
1654 args['source'] = 'pull'
1655 op.modes['bookmarks'] = 'records'
1655 bundle = e.callcommand('getbundle', args).result()
1656 bundle2.processbundle(pullop.repo, bundle, op=op)
1656
1657 except bundle2.AbortFromPart as exc:
1657 try:
1658 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1658 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
1659 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1659 source='pull')
1660 except error.BundleValueError as exc:
1660 op.modes['bookmarks'] = 'records'
1661 raise error.Abort(_('missing support for %s') % exc)
1661 bundle2.processbundle(pullop.repo, bundle, op=op)
1662 except bundle2.AbortFromPart as exc:
1663 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1664 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1665 except error.BundleValueError as exc:
1666 raise error.Abort(_('missing support for %s') % exc)
1662
1667
1663 if pullop.fetch:
1668 if pullop.fetch:
1664 pullop.cgresult = bundle2.combinechangegroupresults(op)
1669 pullop.cgresult = bundle2.combinechangegroupresults(op)
1665
1670
1666 # processing phases change
1671 # processing phases change
1667 for namespace, value in op.records['listkeys']:
1672 for namespace, value in op.records['listkeys']:
1668 if namespace == 'phases':
1673 if namespace == 'phases':
1669 _pullapplyphases(pullop, value)
1674 _pullapplyphases(pullop, value)
1670
1675
1671 # processing bookmark update
1676 # processing bookmark update
1672 if bookmarksrequested:
1677 if bookmarksrequested:
1673 books = {}
1678 books = {}
1674 for record in op.records['bookmarks']:
1679 for record in op.records['bookmarks']:
1675 books[record['bookmark']] = record["node"]
1680 books[record['bookmark']] = record["node"]
1676 pullop.remotebookmarks = books
1681 pullop.remotebookmarks = books
1677 else:
1682 else:
1678 for namespace, value in op.records['listkeys']:
1683 for namespace, value in op.records['listkeys']:
1679 if namespace == 'bookmarks':
1684 if namespace == 'bookmarks':
1680 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1685 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1681
1686
1682 # bookmark data were either already there or pulled in the bundle
1687 # bookmark data were either already there or pulled in the bundle
1683 if pullop.remotebookmarks is not None:
1688 if pullop.remotebookmarks is not None:
1684 _pullbookmarks(pullop)
1689 _pullbookmarks(pullop)
1685
1690
1686 def _pullbundle2extraprepare(pullop, kwargs):
1691 def _pullbundle2extraprepare(pullop, kwargs):
1687 """hook function so that extensions can extend the getbundle call"""
1692 """hook function so that extensions can extend the getbundle call"""
1688
1693
1689 def _pullchangeset(pullop):
1694 def _pullchangeset(pullop):
1690 """pull changeset from unbundle into the local repo"""
1695 """pull changeset from unbundle into the local repo"""
1691 # We delay the open of the transaction as late as possible so we
1696 # We delay the open of the transaction as late as possible so we
1692 # don't open transaction for nothing or you break future useful
1697 # don't open transaction for nothing or you break future useful
1693 # rollback call
1698 # rollback call
1694 if 'changegroup' in pullop.stepsdone:
1699 if 'changegroup' in pullop.stepsdone:
1695 return
1700 return
1696 pullop.stepsdone.add('changegroup')
1701 pullop.stepsdone.add('changegroup')
1697 if not pullop.fetch:
1702 if not pullop.fetch:
1698 pullop.repo.ui.status(_("no changes found\n"))
1703 pullop.repo.ui.status(_("no changes found\n"))
1699 pullop.cgresult = 0
1704 pullop.cgresult = 0
1700 return
1705 return
1701 tr = pullop.gettransaction()
1706 tr = pullop.gettransaction()
1702 if pullop.heads is None and list(pullop.common) == [nullid]:
1707 if pullop.heads is None and list(pullop.common) == [nullid]:
1703 pullop.repo.ui.status(_("requesting all changes\n"))
1708 pullop.repo.ui.status(_("requesting all changes\n"))
1704 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1709 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1705 # issue1320, avoid a race if remote changed after discovery
1710 # issue1320, avoid a race if remote changed after discovery
1706 pullop.heads = pullop.rheads
1711 pullop.heads = pullop.rheads
1707
1712
1708 if pullop.remote.capable('getbundle'):
1713 if pullop.remote.capable('getbundle'):
1709 # TODO: get bundlecaps from remote
1714 # TODO: get bundlecaps from remote
1710 cg = pullop.remote.getbundle('pull', common=pullop.common,
1715 cg = pullop.remote.getbundle('pull', common=pullop.common,
1711 heads=pullop.heads or pullop.rheads)
1716 heads=pullop.heads or pullop.rheads)
1712 elif pullop.heads is None:
1717 elif pullop.heads is None:
1713 with pullop.remote.commandexecutor() as e:
1718 with pullop.remote.commandexecutor() as e:
1714 cg = e.callcommand('changegroup', {
1719 cg = e.callcommand('changegroup', {
1715 'nodes': pullop.fetch,
1720 'nodes': pullop.fetch,
1716 'source': 'pull',
1721 'source': 'pull',
1717 }).result()
1722 }).result()
1718
1723
1719 elif not pullop.remote.capable('changegroupsubset'):
1724 elif not pullop.remote.capable('changegroupsubset'):
1720 raise error.Abort(_("partial pull cannot be done because "
1725 raise error.Abort(_("partial pull cannot be done because "
1721 "other repository doesn't support "
1726 "other repository doesn't support "
1722 "changegroupsubset."))
1727 "changegroupsubset."))
1723 else:
1728 else:
1724 with pullop.remote.commandexecutor() as e:
1729 with pullop.remote.commandexecutor() as e:
1725 cg = e.callcommand('changegroupsubset', {
1730 cg = e.callcommand('changegroupsubset', {
1726 'bases': pullop.fetch,
1731 'bases': pullop.fetch,
1727 'heads': pullop.heads,
1732 'heads': pullop.heads,
1728 'source': 'pull',
1733 'source': 'pull',
1729 }).result()
1734 }).result()
1730
1735
1731 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1736 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1732 pullop.remote.url())
1737 pullop.remote.url())
1733 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1738 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1734
1739
1735 def _pullphase(pullop):
1740 def _pullphase(pullop):
1736 # Get remote phases data from remote
1741 # Get remote phases data from remote
1737 if 'phases' in pullop.stepsdone:
1742 if 'phases' in pullop.stepsdone:
1738 return
1743 return
1739 remotephases = pullop.remote.listkeys('phases')
1744 remotephases = pullop.remote.listkeys('phases')
1740 _pullapplyphases(pullop, remotephases)
1745 _pullapplyphases(pullop, remotephases)
1741
1746
1742 def _pullapplyphases(pullop, remotephases):
1747 def _pullapplyphases(pullop, remotephases):
1743 """apply phase movement from observed remote state"""
1748 """apply phase movement from observed remote state"""
1744 if 'phases' in pullop.stepsdone:
1749 if 'phases' in pullop.stepsdone:
1745 return
1750 return
1746 pullop.stepsdone.add('phases')
1751 pullop.stepsdone.add('phases')
1747 publishing = bool(remotephases.get('publishing', False))
1752 publishing = bool(remotephases.get('publishing', False))
1748 if remotephases and not publishing:
1753 if remotephases and not publishing:
1749 # remote is new and non-publishing
1754 # remote is new and non-publishing
1750 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1755 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1751 pullop.pulledsubset,
1756 pullop.pulledsubset,
1752 remotephases)
1757 remotephases)
1753 dheads = pullop.pulledsubset
1758 dheads = pullop.pulledsubset
1754 else:
1759 else:
1755 # Remote is old or publishing all common changesets
1760 # Remote is old or publishing all common changesets
1756 # should be seen as public
1761 # should be seen as public
1757 pheads = pullop.pulledsubset
1762 pheads = pullop.pulledsubset
1758 dheads = []
1763 dheads = []
1759 unfi = pullop.repo.unfiltered()
1764 unfi = pullop.repo.unfiltered()
1760 phase = unfi._phasecache.phase
1765 phase = unfi._phasecache.phase
1761 rev = unfi.changelog.nodemap.get
1766 rev = unfi.changelog.nodemap.get
1762 public = phases.public
1767 public = phases.public
1763 draft = phases.draft
1768 draft = phases.draft
1764
1769
1765 # exclude changesets already public locally and update the others
1770 # exclude changesets already public locally and update the others
1766 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1771 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1767 if pheads:
1772 if pheads:
1768 tr = pullop.gettransaction()
1773 tr = pullop.gettransaction()
1769 phases.advanceboundary(pullop.repo, tr, public, pheads)
1774 phases.advanceboundary(pullop.repo, tr, public, pheads)
1770
1775
1771 # exclude changesets already draft locally and update the others
1776 # exclude changesets already draft locally and update the others
1772 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1777 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1773 if dheads:
1778 if dheads:
1774 tr = pullop.gettransaction()
1779 tr = pullop.gettransaction()
1775 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1780 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1776
1781
1777 def _pullbookmarks(pullop):
1782 def _pullbookmarks(pullop):
1778 """process the remote bookmark information to update the local one"""
1783 """process the remote bookmark information to update the local one"""
1779 if 'bookmarks' in pullop.stepsdone:
1784 if 'bookmarks' in pullop.stepsdone:
1780 return
1785 return
1781 pullop.stepsdone.add('bookmarks')
1786 pullop.stepsdone.add('bookmarks')
1782 repo = pullop.repo
1787 repo = pullop.repo
1783 remotebookmarks = pullop.remotebookmarks
1788 remotebookmarks = pullop.remotebookmarks
1784 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1789 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1785 pullop.remote.url(),
1790 pullop.remote.url(),
1786 pullop.gettransaction,
1791 pullop.gettransaction,
1787 explicit=pullop.explicitbookmarks)
1792 explicit=pullop.explicitbookmarks)
1788
1793
1789 def _pullobsolete(pullop):
1794 def _pullobsolete(pullop):
1790 """utility function to pull obsolete markers from a remote
1795 """utility function to pull obsolete markers from a remote
1791
1796
1792 The `gettransaction` is function that return the pull transaction, creating
1797 The `gettransaction` is function that return the pull transaction, creating
1793 one if necessary. We return the transaction to inform the calling code that
1798 one if necessary. We return the transaction to inform the calling code that
1794 a new transaction have been created (when applicable).
1799 a new transaction have been created (when applicable).
1795
1800
1796 Exists mostly to allow overriding for experimentation purpose"""
1801 Exists mostly to allow overriding for experimentation purpose"""
1797 if 'obsmarkers' in pullop.stepsdone:
1802 if 'obsmarkers' in pullop.stepsdone:
1798 return
1803 return
1799 pullop.stepsdone.add('obsmarkers')
1804 pullop.stepsdone.add('obsmarkers')
1800 tr = None
1805 tr = None
1801 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1806 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1802 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1807 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1803 remoteobs = pullop.remote.listkeys('obsolete')
1808 remoteobs = pullop.remote.listkeys('obsolete')
1804 if 'dump0' in remoteobs:
1809 if 'dump0' in remoteobs:
1805 tr = pullop.gettransaction()
1810 tr = pullop.gettransaction()
1806 markers = []
1811 markers = []
1807 for key in sorted(remoteobs, reverse=True):
1812 for key in sorted(remoteobs, reverse=True):
1808 if key.startswith('dump'):
1813 if key.startswith('dump'):
1809 data = util.b85decode(remoteobs[key])
1814 data = util.b85decode(remoteobs[key])
1810 version, newmarks = obsolete._readmarkers(data)
1815 version, newmarks = obsolete._readmarkers(data)
1811 markers += newmarks
1816 markers += newmarks
1812 if markers:
1817 if markers:
1813 pullop.repo.obsstore.add(tr, markers)
1818 pullop.repo.obsstore.add(tr, markers)
1814 pullop.repo.invalidatevolatilesets()
1819 pullop.repo.invalidatevolatilesets()
1815 return tr
1820 return tr
1816
1821
1817 def caps20to10(repo, role):
1822 def caps20to10(repo, role):
1818 """return a set with appropriate options to use bundle20 during getbundle"""
1823 """return a set with appropriate options to use bundle20 during getbundle"""
1819 caps = {'HG20'}
1824 caps = {'HG20'}
1820 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1825 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1821 caps.add('bundle2=' + urlreq.quote(capsblob))
1826 caps.add('bundle2=' + urlreq.quote(capsblob))
1822 return caps
1827 return caps
1823
1828
1824 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1829 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1825 getbundle2partsorder = []
1830 getbundle2partsorder = []
1826
1831
1827 # Mapping between step name and function
1832 # Mapping between step name and function
1828 #
1833 #
1829 # This exists to help extensions wrap steps if necessary
1834 # This exists to help extensions wrap steps if necessary
1830 getbundle2partsmapping = {}
1835 getbundle2partsmapping = {}
1831
1836
1832 def getbundle2partsgenerator(stepname, idx=None):
1837 def getbundle2partsgenerator(stepname, idx=None):
1833 """decorator for function generating bundle2 part for getbundle
1838 """decorator for function generating bundle2 part for getbundle
1834
1839
1835 The function is added to the step -> function mapping and appended to the
1840 The function is added to the step -> function mapping and appended to the
1836 list of steps. Beware that decorated functions will be added in order
1841 list of steps. Beware that decorated functions will be added in order
1837 (this may matter).
1842 (this may matter).
1838
1843
1839 You can only use this decorator for new steps, if you want to wrap a step
1844 You can only use this decorator for new steps, if you want to wrap a step
1840 from an extension, attack the getbundle2partsmapping dictionary directly."""
1845 from an extension, attack the getbundle2partsmapping dictionary directly."""
1841 def dec(func):
1846 def dec(func):
1842 assert stepname not in getbundle2partsmapping
1847 assert stepname not in getbundle2partsmapping
1843 getbundle2partsmapping[stepname] = func
1848 getbundle2partsmapping[stepname] = func
1844 if idx is None:
1849 if idx is None:
1845 getbundle2partsorder.append(stepname)
1850 getbundle2partsorder.append(stepname)
1846 else:
1851 else:
1847 getbundle2partsorder.insert(idx, stepname)
1852 getbundle2partsorder.insert(idx, stepname)
1848 return func
1853 return func
1849 return dec
1854 return dec
1850
1855
1851 def bundle2requested(bundlecaps):
1856 def bundle2requested(bundlecaps):
1852 if bundlecaps is not None:
1857 if bundlecaps is not None:
1853 return any(cap.startswith('HG2') for cap in bundlecaps)
1858 return any(cap.startswith('HG2') for cap in bundlecaps)
1854 return False
1859 return False
1855
1860
1856 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1861 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1857 **kwargs):
1862 **kwargs):
1858 """Return chunks constituting a bundle's raw data.
1863 """Return chunks constituting a bundle's raw data.
1859
1864
1860 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1865 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1861 passed.
1866 passed.
1862
1867
1863 Returns a 2-tuple of a dict with metadata about the generated bundle
1868 Returns a 2-tuple of a dict with metadata about the generated bundle
1864 and an iterator over raw chunks (of varying sizes).
1869 and an iterator over raw chunks (of varying sizes).
1865 """
1870 """
1866 kwargs = pycompat.byteskwargs(kwargs)
1871 kwargs = pycompat.byteskwargs(kwargs)
1867 info = {}
1872 info = {}
1868 usebundle2 = bundle2requested(bundlecaps)
1873 usebundle2 = bundle2requested(bundlecaps)
1869 # bundle10 case
1874 # bundle10 case
1870 if not usebundle2:
1875 if not usebundle2:
1871 if bundlecaps and not kwargs.get('cg', True):
1876 if bundlecaps and not kwargs.get('cg', True):
1872 raise ValueError(_('request for bundle10 must include changegroup'))
1877 raise ValueError(_('request for bundle10 must include changegroup'))
1873
1878
1874 if kwargs:
1879 if kwargs:
1875 raise ValueError(_('unsupported getbundle arguments: %s')
1880 raise ValueError(_('unsupported getbundle arguments: %s')
1876 % ', '.join(sorted(kwargs.keys())))
1881 % ', '.join(sorted(kwargs.keys())))
1877 outgoing = _computeoutgoing(repo, heads, common)
1882 outgoing = _computeoutgoing(repo, heads, common)
1878 info['bundleversion'] = 1
1883 info['bundleversion'] = 1
1879 return info, changegroup.makestream(repo, outgoing, '01', source,
1884 return info, changegroup.makestream(repo, outgoing, '01', source,
1880 bundlecaps=bundlecaps)
1885 bundlecaps=bundlecaps)
1881
1886
1882 # bundle20 case
1887 # bundle20 case
1883 info['bundleversion'] = 2
1888 info['bundleversion'] = 2
1884 b2caps = {}
1889 b2caps = {}
1885 for bcaps in bundlecaps:
1890 for bcaps in bundlecaps:
1886 if bcaps.startswith('bundle2='):
1891 if bcaps.startswith('bundle2='):
1887 blob = urlreq.unquote(bcaps[len('bundle2='):])
1892 blob = urlreq.unquote(bcaps[len('bundle2='):])
1888 b2caps.update(bundle2.decodecaps(blob))
1893 b2caps.update(bundle2.decodecaps(blob))
1889 bundler = bundle2.bundle20(repo.ui, b2caps)
1894 bundler = bundle2.bundle20(repo.ui, b2caps)
1890
1895
1891 kwargs['heads'] = heads
1896 kwargs['heads'] = heads
1892 kwargs['common'] = common
1897 kwargs['common'] = common
1893
1898
1894 for name in getbundle2partsorder:
1899 for name in getbundle2partsorder:
1895 func = getbundle2partsmapping[name]
1900 func = getbundle2partsmapping[name]
1896 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1901 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1897 **pycompat.strkwargs(kwargs))
1902 **pycompat.strkwargs(kwargs))
1898
1903
1899 info['prefercompressed'] = bundler.prefercompressed
1904 info['prefercompressed'] = bundler.prefercompressed
1900
1905
1901 return info, bundler.getchunks()
1906 return info, bundler.getchunks()
1902
1907
1903 @getbundle2partsgenerator('stream2')
1908 @getbundle2partsgenerator('stream2')
1904 def _getbundlestream2(bundler, repo, *args, **kwargs):
1909 def _getbundlestream2(bundler, repo, *args, **kwargs):
1905 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
1910 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
1906
1911
1907 @getbundle2partsgenerator('changegroup')
1912 @getbundle2partsgenerator('changegroup')
1908 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1913 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1909 b2caps=None, heads=None, common=None, **kwargs):
1914 b2caps=None, heads=None, common=None, **kwargs):
1910 """add a changegroup part to the requested bundle"""
1915 """add a changegroup part to the requested bundle"""
1911 cgstream = None
1916 cgstream = None
1912 if kwargs.get(r'cg', True):
1917 if kwargs.get(r'cg', True):
1913 # build changegroup bundle here.
1918 # build changegroup bundle here.
1914 version = '01'
1919 version = '01'
1915 cgversions = b2caps.get('changegroup')
1920 cgversions = b2caps.get('changegroup')
1916 if cgversions: # 3.1 and 3.2 ship with an empty value
1921 if cgversions: # 3.1 and 3.2 ship with an empty value
1917 cgversions = [v for v in cgversions
1922 cgversions = [v for v in cgversions
1918 if v in changegroup.supportedoutgoingversions(repo)]
1923 if v in changegroup.supportedoutgoingversions(repo)]
1919 if not cgversions:
1924 if not cgversions:
1920 raise ValueError(_('no common changegroup version'))
1925 raise ValueError(_('no common changegroup version'))
1921 version = max(cgversions)
1926 version = max(cgversions)
1922 outgoing = _computeoutgoing(repo, heads, common)
1927 outgoing = _computeoutgoing(repo, heads, common)
1923 if outgoing.missing:
1928 if outgoing.missing:
1924 cgstream = changegroup.makestream(repo, outgoing, version, source,
1929 cgstream = changegroup.makestream(repo, outgoing, version, source,
1925 bundlecaps=bundlecaps)
1930 bundlecaps=bundlecaps)
1926
1931
1927 if cgstream:
1932 if cgstream:
1928 part = bundler.newpart('changegroup', data=cgstream)
1933 part = bundler.newpart('changegroup', data=cgstream)
1929 if cgversions:
1934 if cgversions:
1930 part.addparam('version', version)
1935 part.addparam('version', version)
1931 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1936 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1932 mandatory=False)
1937 mandatory=False)
1933 if 'treemanifest' in repo.requirements:
1938 if 'treemanifest' in repo.requirements:
1934 part.addparam('treemanifest', '1')
1939 part.addparam('treemanifest', '1')
1935
1940
1936 @getbundle2partsgenerator('bookmarks')
1941 @getbundle2partsgenerator('bookmarks')
1937 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1942 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1938 b2caps=None, **kwargs):
1943 b2caps=None, **kwargs):
1939 """add a bookmark part to the requested bundle"""
1944 """add a bookmark part to the requested bundle"""
1940 if not kwargs.get(r'bookmarks', False):
1945 if not kwargs.get(r'bookmarks', False):
1941 return
1946 return
1942 if 'bookmarks' not in b2caps:
1947 if 'bookmarks' not in b2caps:
1943 raise ValueError(_('no common bookmarks exchange method'))
1948 raise ValueError(_('no common bookmarks exchange method'))
1944 books = bookmod.listbinbookmarks(repo)
1949 books = bookmod.listbinbookmarks(repo)
1945 data = bookmod.binaryencode(books)
1950 data = bookmod.binaryencode(books)
1946 if data:
1951 if data:
1947 bundler.newpart('bookmarks', data=data)
1952 bundler.newpart('bookmarks', data=data)
1948
1953
1949 @getbundle2partsgenerator('listkeys')
1954 @getbundle2partsgenerator('listkeys')
1950 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1955 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1951 b2caps=None, **kwargs):
1956 b2caps=None, **kwargs):
1952 """add parts containing listkeys namespaces to the requested bundle"""
1957 """add parts containing listkeys namespaces to the requested bundle"""
1953 listkeys = kwargs.get(r'listkeys', ())
1958 listkeys = kwargs.get(r'listkeys', ())
1954 for namespace in listkeys:
1959 for namespace in listkeys:
1955 part = bundler.newpart('listkeys')
1960 part = bundler.newpart('listkeys')
1956 part.addparam('namespace', namespace)
1961 part.addparam('namespace', namespace)
1957 keys = repo.listkeys(namespace).items()
1962 keys = repo.listkeys(namespace).items()
1958 part.data = pushkey.encodekeys(keys)
1963 part.data = pushkey.encodekeys(keys)
1959
1964
1960 @getbundle2partsgenerator('obsmarkers')
1965 @getbundle2partsgenerator('obsmarkers')
1961 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1966 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1962 b2caps=None, heads=None, **kwargs):
1967 b2caps=None, heads=None, **kwargs):
1963 """add an obsolescence markers part to the requested bundle"""
1968 """add an obsolescence markers part to the requested bundle"""
1964 if kwargs.get(r'obsmarkers', False):
1969 if kwargs.get(r'obsmarkers', False):
1965 if heads is None:
1970 if heads is None:
1966 heads = repo.heads()
1971 heads = repo.heads()
1967 subset = [c.node() for c in repo.set('::%ln', heads)]
1972 subset = [c.node() for c in repo.set('::%ln', heads)]
1968 markers = repo.obsstore.relevantmarkers(subset)
1973 markers = repo.obsstore.relevantmarkers(subset)
1969 markers = sorted(markers)
1974 markers = sorted(markers)
1970 bundle2.buildobsmarkerspart(bundler, markers)
1975 bundle2.buildobsmarkerspart(bundler, markers)
1971
1976
1972 @getbundle2partsgenerator('phases')
1977 @getbundle2partsgenerator('phases')
1973 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1978 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1974 b2caps=None, heads=None, **kwargs):
1979 b2caps=None, heads=None, **kwargs):
1975 """add phase heads part to the requested bundle"""
1980 """add phase heads part to the requested bundle"""
1976 if kwargs.get(r'phases', False):
1981 if kwargs.get(r'phases', False):
1977 if not 'heads' in b2caps.get('phases'):
1982 if not 'heads' in b2caps.get('phases'):
1978 raise ValueError(_('no common phases exchange method'))
1983 raise ValueError(_('no common phases exchange method'))
1979 if heads is None:
1984 if heads is None:
1980 heads = repo.heads()
1985 heads = repo.heads()
1981
1986
1982 headsbyphase = collections.defaultdict(set)
1987 headsbyphase = collections.defaultdict(set)
1983 if repo.publishing():
1988 if repo.publishing():
1984 headsbyphase[phases.public] = heads
1989 headsbyphase[phases.public] = heads
1985 else:
1990 else:
1986 # find the appropriate heads to move
1991 # find the appropriate heads to move
1987
1992
1988 phase = repo._phasecache.phase
1993 phase = repo._phasecache.phase
1989 node = repo.changelog.node
1994 node = repo.changelog.node
1990 rev = repo.changelog.rev
1995 rev = repo.changelog.rev
1991 for h in heads:
1996 for h in heads:
1992 headsbyphase[phase(repo, rev(h))].add(h)
1997 headsbyphase[phase(repo, rev(h))].add(h)
1993 seenphases = list(headsbyphase.keys())
1998 seenphases = list(headsbyphase.keys())
1994
1999
1995 # We do not handle anything but public and draft phase for now)
2000 # We do not handle anything but public and draft phase for now)
1996 if seenphases:
2001 if seenphases:
1997 assert max(seenphases) <= phases.draft
2002 assert max(seenphases) <= phases.draft
1998
2003
1999 # if client is pulling non-public changesets, we need to find
2004 # if client is pulling non-public changesets, we need to find
2000 # intermediate public heads.
2005 # intermediate public heads.
2001 draftheads = headsbyphase.get(phases.draft, set())
2006 draftheads = headsbyphase.get(phases.draft, set())
2002 if draftheads:
2007 if draftheads:
2003 publicheads = headsbyphase.get(phases.public, set())
2008 publicheads = headsbyphase.get(phases.public, set())
2004
2009
2005 revset = 'heads(only(%ln, %ln) and public())'
2010 revset = 'heads(only(%ln, %ln) and public())'
2006 extraheads = repo.revs(revset, draftheads, publicheads)
2011 extraheads = repo.revs(revset, draftheads, publicheads)
2007 for r in extraheads:
2012 for r in extraheads:
2008 headsbyphase[phases.public].add(node(r))
2013 headsbyphase[phases.public].add(node(r))
2009
2014
2010 # transform data in a format used by the encoding function
2015 # transform data in a format used by the encoding function
2011 phasemapping = []
2016 phasemapping = []
2012 for phase in phases.allphases:
2017 for phase in phases.allphases:
2013 phasemapping.append(sorted(headsbyphase[phase]))
2018 phasemapping.append(sorted(headsbyphase[phase]))
2014
2019
2015 # generate the actual part
2020 # generate the actual part
2016 phasedata = phases.binaryencode(phasemapping)
2021 phasedata = phases.binaryencode(phasemapping)
2017 bundler.newpart('phase-heads', data=phasedata)
2022 bundler.newpart('phase-heads', data=phasedata)
2018
2023
2019 @getbundle2partsgenerator('hgtagsfnodes')
2024 @getbundle2partsgenerator('hgtagsfnodes')
2020 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
2025 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
2021 b2caps=None, heads=None, common=None,
2026 b2caps=None, heads=None, common=None,
2022 **kwargs):
2027 **kwargs):
2023 """Transfer the .hgtags filenodes mapping.
2028 """Transfer the .hgtags filenodes mapping.
2024
2029
2025 Only values for heads in this bundle will be transferred.
2030 Only values for heads in this bundle will be transferred.
2026
2031
2027 The part data consists of pairs of 20 byte changeset node and .hgtags
2032 The part data consists of pairs of 20 byte changeset node and .hgtags
2028 filenodes raw values.
2033 filenodes raw values.
2029 """
2034 """
2030 # Don't send unless:
2035 # Don't send unless:
2031 # - changeset are being exchanged,
2036 # - changeset are being exchanged,
2032 # - the client supports it.
2037 # - the client supports it.
2033 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
2038 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
2034 return
2039 return
2035
2040
2036 outgoing = _computeoutgoing(repo, heads, common)
2041 outgoing = _computeoutgoing(repo, heads, common)
2037 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2042 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2038
2043
2039 @getbundle2partsgenerator('cache:rev-branch-cache')
2044 @getbundle2partsgenerator('cache:rev-branch-cache')
2040 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
2045 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
2041 b2caps=None, heads=None, common=None,
2046 b2caps=None, heads=None, common=None,
2042 **kwargs):
2047 **kwargs):
2043 """Transfer the rev-branch-cache mapping
2048 """Transfer the rev-branch-cache mapping
2044
2049
2045 The payload is a series of data related to each branch
2050 The payload is a series of data related to each branch
2046
2051
2047 1) branch name length
2052 1) branch name length
2048 2) number of open heads
2053 2) number of open heads
2049 3) number of closed heads
2054 3) number of closed heads
2050 4) open heads nodes
2055 4) open heads nodes
2051 5) closed heads nodes
2056 5) closed heads nodes
2052 """
2057 """
2053 # Don't send unless:
2058 # Don't send unless:
2054 # - changeset are being exchanged,
2059 # - changeset are being exchanged,
2055 # - the client supports it.
2060 # - the client supports it.
2056 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
2061 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
2057 return
2062 return
2058 outgoing = _computeoutgoing(repo, heads, common)
2063 outgoing = _computeoutgoing(repo, heads, common)
2059 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2064 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2060
2065
2061 def check_heads(repo, their_heads, context):
2066 def check_heads(repo, their_heads, context):
2062 """check if the heads of a repo have been modified
2067 """check if the heads of a repo have been modified
2063
2068
2064 Used by peer for unbundling.
2069 Used by peer for unbundling.
2065 """
2070 """
2066 heads = repo.heads()
2071 heads = repo.heads()
2067 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2072 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2068 if not (their_heads == ['force'] or their_heads == heads or
2073 if not (their_heads == ['force'] or their_heads == heads or
2069 their_heads == ['hashed', heads_hash]):
2074 their_heads == ['hashed', heads_hash]):
2070 # someone else committed/pushed/unbundled while we
2075 # someone else committed/pushed/unbundled while we
2071 # were transferring data
2076 # were transferring data
2072 raise error.PushRaced('repository changed while %s - '
2077 raise error.PushRaced('repository changed while %s - '
2073 'please try again' % context)
2078 'please try again' % context)
2074
2079
2075 def unbundle(repo, cg, heads, source, url):
2080 def unbundle(repo, cg, heads, source, url):
2076 """Apply a bundle to a repo.
2081 """Apply a bundle to a repo.
2077
2082
2078 this function makes sure the repo is locked during the application and have
2083 this function makes sure the repo is locked during the application and have
2079 mechanism to check that no push race occurred between the creation of the
2084 mechanism to check that no push race occurred between the creation of the
2080 bundle and its application.
2085 bundle and its application.
2081
2086
2082 If the push was raced as PushRaced exception is raised."""
2087 If the push was raced as PushRaced exception is raised."""
2083 r = 0
2088 r = 0
2084 # need a transaction when processing a bundle2 stream
2089 # need a transaction when processing a bundle2 stream
2085 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2090 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2086 lockandtr = [None, None, None]
2091 lockandtr = [None, None, None]
2087 recordout = None
2092 recordout = None
2088 # quick fix for output mismatch with bundle2 in 3.4
2093 # quick fix for output mismatch with bundle2 in 3.4
2089 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2094 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2090 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2095 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2091 captureoutput = True
2096 captureoutput = True
2092 try:
2097 try:
2093 # note: outside bundle1, 'heads' is expected to be empty and this
2098 # note: outside bundle1, 'heads' is expected to be empty and this
2094 # 'check_heads' call wil be a no-op
2099 # 'check_heads' call wil be a no-op
2095 check_heads(repo, heads, 'uploading changes')
2100 check_heads(repo, heads, 'uploading changes')
2096 # push can proceed
2101 # push can proceed
2097 if not isinstance(cg, bundle2.unbundle20):
2102 if not isinstance(cg, bundle2.unbundle20):
2098 # legacy case: bundle1 (changegroup 01)
2103 # legacy case: bundle1 (changegroup 01)
2099 txnname = "\n".join([source, util.hidepassword(url)])
2104 txnname = "\n".join([source, util.hidepassword(url)])
2100 with repo.lock(), repo.transaction(txnname) as tr:
2105 with repo.lock(), repo.transaction(txnname) as tr:
2101 op = bundle2.applybundle(repo, cg, tr, source, url)
2106 op = bundle2.applybundle(repo, cg, tr, source, url)
2102 r = bundle2.combinechangegroupresults(op)
2107 r = bundle2.combinechangegroupresults(op)
2103 else:
2108 else:
2104 r = None
2109 r = None
2105 try:
2110 try:
2106 def gettransaction():
2111 def gettransaction():
2107 if not lockandtr[2]:
2112 if not lockandtr[2]:
2108 lockandtr[0] = repo.wlock()
2113 lockandtr[0] = repo.wlock()
2109 lockandtr[1] = repo.lock()
2114 lockandtr[1] = repo.lock()
2110 lockandtr[2] = repo.transaction(source)
2115 lockandtr[2] = repo.transaction(source)
2111 lockandtr[2].hookargs['source'] = source
2116 lockandtr[2].hookargs['source'] = source
2112 lockandtr[2].hookargs['url'] = url
2117 lockandtr[2].hookargs['url'] = url
2113 lockandtr[2].hookargs['bundle2'] = '1'
2118 lockandtr[2].hookargs['bundle2'] = '1'
2114 return lockandtr[2]
2119 return lockandtr[2]
2115
2120
2116 # Do greedy locking by default until we're satisfied with lazy
2121 # Do greedy locking by default until we're satisfied with lazy
2117 # locking.
2122 # locking.
2118 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2123 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2119 gettransaction()
2124 gettransaction()
2120
2125
2121 op = bundle2.bundleoperation(repo, gettransaction,
2126 op = bundle2.bundleoperation(repo, gettransaction,
2122 captureoutput=captureoutput,
2127 captureoutput=captureoutput,
2123 source='push')
2128 source='push')
2124 try:
2129 try:
2125 op = bundle2.processbundle(repo, cg, op=op)
2130 op = bundle2.processbundle(repo, cg, op=op)
2126 finally:
2131 finally:
2127 r = op.reply
2132 r = op.reply
2128 if captureoutput and r is not None:
2133 if captureoutput and r is not None:
2129 repo.ui.pushbuffer(error=True, subproc=True)
2134 repo.ui.pushbuffer(error=True, subproc=True)
2130 def recordout(output):
2135 def recordout(output):
2131 r.newpart('output', data=output, mandatory=False)
2136 r.newpart('output', data=output, mandatory=False)
2132 if lockandtr[2] is not None:
2137 if lockandtr[2] is not None:
2133 lockandtr[2].close()
2138 lockandtr[2].close()
2134 except BaseException as exc:
2139 except BaseException as exc:
2135 exc.duringunbundle2 = True
2140 exc.duringunbundle2 = True
2136 if captureoutput and r is not None:
2141 if captureoutput and r is not None:
2137 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2142 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2138 def recordout(output):
2143 def recordout(output):
2139 part = bundle2.bundlepart('output', data=output,
2144 part = bundle2.bundlepart('output', data=output,
2140 mandatory=False)
2145 mandatory=False)
2141 parts.append(part)
2146 parts.append(part)
2142 raise
2147 raise
2143 finally:
2148 finally:
2144 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2149 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2145 if recordout is not None:
2150 if recordout is not None:
2146 recordout(repo.ui.popbuffer())
2151 recordout(repo.ui.popbuffer())
2147 return r
2152 return r
2148
2153
2149 def _maybeapplyclonebundle(pullop):
2154 def _maybeapplyclonebundle(pullop):
2150 """Apply a clone bundle from a remote, if possible."""
2155 """Apply a clone bundle from a remote, if possible."""
2151
2156
2152 repo = pullop.repo
2157 repo = pullop.repo
2153 remote = pullop.remote
2158 remote = pullop.remote
2154
2159
2155 if not repo.ui.configbool('ui', 'clonebundles'):
2160 if not repo.ui.configbool('ui', 'clonebundles'):
2156 return
2161 return
2157
2162
2158 # Only run if local repo is empty.
2163 # Only run if local repo is empty.
2159 if len(repo):
2164 if len(repo):
2160 return
2165 return
2161
2166
2162 if pullop.heads:
2167 if pullop.heads:
2163 return
2168 return
2164
2169
2165 if not remote.capable('clonebundles'):
2170 if not remote.capable('clonebundles'):
2166 return
2171 return
2167
2172
2168 res = remote._call('clonebundles')
2173 res = remote._call('clonebundles')
2169
2174
2170 # If we call the wire protocol command, that's good enough to record the
2175 # If we call the wire protocol command, that's good enough to record the
2171 # attempt.
2176 # attempt.
2172 pullop.clonebundleattempted = True
2177 pullop.clonebundleattempted = True
2173
2178
2174 entries = parseclonebundlesmanifest(repo, res)
2179 entries = parseclonebundlesmanifest(repo, res)
2175 if not entries:
2180 if not entries:
2176 repo.ui.note(_('no clone bundles available on remote; '
2181 repo.ui.note(_('no clone bundles available on remote; '
2177 'falling back to regular clone\n'))
2182 'falling back to regular clone\n'))
2178 return
2183 return
2179
2184
2180 entries = filterclonebundleentries(
2185 entries = filterclonebundleentries(
2181 repo, entries, streamclonerequested=pullop.streamclonerequested)
2186 repo, entries, streamclonerequested=pullop.streamclonerequested)
2182
2187
2183 if not entries:
2188 if not entries:
2184 # There is a thundering herd concern here. However, if a server
2189 # There is a thundering herd concern here. However, if a server
2185 # operator doesn't advertise bundles appropriate for its clients,
2190 # operator doesn't advertise bundles appropriate for its clients,
2186 # they deserve what's coming. Furthermore, from a client's
2191 # they deserve what's coming. Furthermore, from a client's
2187 # perspective, no automatic fallback would mean not being able to
2192 # perspective, no automatic fallback would mean not being able to
2188 # clone!
2193 # clone!
2189 repo.ui.warn(_('no compatible clone bundles available on server; '
2194 repo.ui.warn(_('no compatible clone bundles available on server; '
2190 'falling back to regular clone\n'))
2195 'falling back to regular clone\n'))
2191 repo.ui.warn(_('(you may want to report this to the server '
2196 repo.ui.warn(_('(you may want to report this to the server '
2192 'operator)\n'))
2197 'operator)\n'))
2193 return
2198 return
2194
2199
2195 entries = sortclonebundleentries(repo.ui, entries)
2200 entries = sortclonebundleentries(repo.ui, entries)
2196
2201
2197 url = entries[0]['URL']
2202 url = entries[0]['URL']
2198 repo.ui.status(_('applying clone bundle from %s\n') % url)
2203 repo.ui.status(_('applying clone bundle from %s\n') % url)
2199 if trypullbundlefromurl(repo.ui, repo, url):
2204 if trypullbundlefromurl(repo.ui, repo, url):
2200 repo.ui.status(_('finished applying clone bundle\n'))
2205 repo.ui.status(_('finished applying clone bundle\n'))
2201 # Bundle failed.
2206 # Bundle failed.
2202 #
2207 #
2203 # We abort by default to avoid the thundering herd of
2208 # We abort by default to avoid the thundering herd of
2204 # clients flooding a server that was expecting expensive
2209 # clients flooding a server that was expecting expensive
2205 # clone load to be offloaded.
2210 # clone load to be offloaded.
2206 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2211 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2207 repo.ui.warn(_('falling back to normal clone\n'))
2212 repo.ui.warn(_('falling back to normal clone\n'))
2208 else:
2213 else:
2209 raise error.Abort(_('error applying bundle'),
2214 raise error.Abort(_('error applying bundle'),
2210 hint=_('if this error persists, consider contacting '
2215 hint=_('if this error persists, consider contacting '
2211 'the server operator or disable clone '
2216 'the server operator or disable clone '
2212 'bundles via '
2217 'bundles via '
2213 '"--config ui.clonebundles=false"'))
2218 '"--config ui.clonebundles=false"'))
2214
2219
2215 def parseclonebundlesmanifest(repo, s):
2220 def parseclonebundlesmanifest(repo, s):
2216 """Parses the raw text of a clone bundles manifest.
2221 """Parses the raw text of a clone bundles manifest.
2217
2222
2218 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2223 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2219 to the URL and other keys are the attributes for the entry.
2224 to the URL and other keys are the attributes for the entry.
2220 """
2225 """
2221 m = []
2226 m = []
2222 for line in s.splitlines():
2227 for line in s.splitlines():
2223 fields = line.split()
2228 fields = line.split()
2224 if not fields:
2229 if not fields:
2225 continue
2230 continue
2226 attrs = {'URL': fields[0]}
2231 attrs = {'URL': fields[0]}
2227 for rawattr in fields[1:]:
2232 for rawattr in fields[1:]:
2228 key, value = rawattr.split('=', 1)
2233 key, value = rawattr.split('=', 1)
2229 key = urlreq.unquote(key)
2234 key = urlreq.unquote(key)
2230 value = urlreq.unquote(value)
2235 value = urlreq.unquote(value)
2231 attrs[key] = value
2236 attrs[key] = value
2232
2237
2233 # Parse BUNDLESPEC into components. This makes client-side
2238 # Parse BUNDLESPEC into components. This makes client-side
2234 # preferences easier to specify since you can prefer a single
2239 # preferences easier to specify since you can prefer a single
2235 # component of the BUNDLESPEC.
2240 # component of the BUNDLESPEC.
2236 if key == 'BUNDLESPEC':
2241 if key == 'BUNDLESPEC':
2237 try:
2242 try:
2238 bundlespec = parsebundlespec(repo, value,
2243 bundlespec = parsebundlespec(repo, value,
2239 externalnames=True)
2244 externalnames=True)
2240 attrs['COMPRESSION'] = bundlespec.compression
2245 attrs['COMPRESSION'] = bundlespec.compression
2241 attrs['VERSION'] = bundlespec.version
2246 attrs['VERSION'] = bundlespec.version
2242 except error.InvalidBundleSpecification:
2247 except error.InvalidBundleSpecification:
2243 pass
2248 pass
2244 except error.UnsupportedBundleSpecification:
2249 except error.UnsupportedBundleSpecification:
2245 pass
2250 pass
2246
2251
2247 m.append(attrs)
2252 m.append(attrs)
2248
2253
2249 return m
2254 return m
2250
2255
2251 def isstreamclonespec(bundlespec):
2256 def isstreamclonespec(bundlespec):
2252 # Stream clone v1
2257 # Stream clone v1
2253 if (bundlespec.compression == 'UN' and bundlespec.version == 's1'):
2258 if (bundlespec.compression == 'UN' and bundlespec.version == 's1'):
2254 return True
2259 return True
2255
2260
2256 # Stream clone v2
2261 # Stream clone v2
2257 if (bundlespec.compression == 'UN' and bundlespec.version == '02' and \
2262 if (bundlespec.compression == 'UN' and bundlespec.version == '02' and \
2258 bundlespec.contentopts.get('streamv2')):
2263 bundlespec.contentopts.get('streamv2')):
2259 return True
2264 return True
2260
2265
2261 return False
2266 return False
2262
2267
2263 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2268 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2264 """Remove incompatible clone bundle manifest entries.
2269 """Remove incompatible clone bundle manifest entries.
2265
2270
2266 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2271 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2267 and returns a new list consisting of only the entries that this client
2272 and returns a new list consisting of only the entries that this client
2268 should be able to apply.
2273 should be able to apply.
2269
2274
2270 There is no guarantee we'll be able to apply all returned entries because
2275 There is no guarantee we'll be able to apply all returned entries because
2271 the metadata we use to filter on may be missing or wrong.
2276 the metadata we use to filter on may be missing or wrong.
2272 """
2277 """
2273 newentries = []
2278 newentries = []
2274 for entry in entries:
2279 for entry in entries:
2275 spec = entry.get('BUNDLESPEC')
2280 spec = entry.get('BUNDLESPEC')
2276 if spec:
2281 if spec:
2277 try:
2282 try:
2278 bundlespec = parsebundlespec(repo, spec, strict=True)
2283 bundlespec = parsebundlespec(repo, spec, strict=True)
2279
2284
2280 # If a stream clone was requested, filter out non-streamclone
2285 # If a stream clone was requested, filter out non-streamclone
2281 # entries.
2286 # entries.
2282 if streamclonerequested and not isstreamclonespec(bundlespec):
2287 if streamclonerequested and not isstreamclonespec(bundlespec):
2283 repo.ui.debug('filtering %s because not a stream clone\n' %
2288 repo.ui.debug('filtering %s because not a stream clone\n' %
2284 entry['URL'])
2289 entry['URL'])
2285 continue
2290 continue
2286
2291
2287 except error.InvalidBundleSpecification as e:
2292 except error.InvalidBundleSpecification as e:
2288 repo.ui.debug(str(e) + '\n')
2293 repo.ui.debug(str(e) + '\n')
2289 continue
2294 continue
2290 except error.UnsupportedBundleSpecification as e:
2295 except error.UnsupportedBundleSpecification as e:
2291 repo.ui.debug('filtering %s because unsupported bundle '
2296 repo.ui.debug('filtering %s because unsupported bundle '
2292 'spec: %s\n' % (
2297 'spec: %s\n' % (
2293 entry['URL'], stringutil.forcebytestr(e)))
2298 entry['URL'], stringutil.forcebytestr(e)))
2294 continue
2299 continue
2295 # If we don't have a spec and requested a stream clone, we don't know
2300 # If we don't have a spec and requested a stream clone, we don't know
2296 # what the entry is so don't attempt to apply it.
2301 # what the entry is so don't attempt to apply it.
2297 elif streamclonerequested:
2302 elif streamclonerequested:
2298 repo.ui.debug('filtering %s because cannot determine if a stream '
2303 repo.ui.debug('filtering %s because cannot determine if a stream '
2299 'clone bundle\n' % entry['URL'])
2304 'clone bundle\n' % entry['URL'])
2300 continue
2305 continue
2301
2306
2302 if 'REQUIRESNI' in entry and not sslutil.hassni:
2307 if 'REQUIRESNI' in entry and not sslutil.hassni:
2303 repo.ui.debug('filtering %s because SNI not supported\n' %
2308 repo.ui.debug('filtering %s because SNI not supported\n' %
2304 entry['URL'])
2309 entry['URL'])
2305 continue
2310 continue
2306
2311
2307 newentries.append(entry)
2312 newentries.append(entry)
2308
2313
2309 return newentries
2314 return newentries
2310
2315
2311 class clonebundleentry(object):
2316 class clonebundleentry(object):
2312 """Represents an item in a clone bundles manifest.
2317 """Represents an item in a clone bundles manifest.
2313
2318
2314 This rich class is needed to support sorting since sorted() in Python 3
2319 This rich class is needed to support sorting since sorted() in Python 3
2315 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2320 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2316 won't work.
2321 won't work.
2317 """
2322 """
2318
2323
2319 def __init__(self, value, prefers):
2324 def __init__(self, value, prefers):
2320 self.value = value
2325 self.value = value
2321 self.prefers = prefers
2326 self.prefers = prefers
2322
2327
2323 def _cmp(self, other):
2328 def _cmp(self, other):
2324 for prefkey, prefvalue in self.prefers:
2329 for prefkey, prefvalue in self.prefers:
2325 avalue = self.value.get(prefkey)
2330 avalue = self.value.get(prefkey)
2326 bvalue = other.value.get(prefkey)
2331 bvalue = other.value.get(prefkey)
2327
2332
2328 # Special case for b missing attribute and a matches exactly.
2333 # Special case for b missing attribute and a matches exactly.
2329 if avalue is not None and bvalue is None and avalue == prefvalue:
2334 if avalue is not None and bvalue is None and avalue == prefvalue:
2330 return -1
2335 return -1
2331
2336
2332 # Special case for a missing attribute and b matches exactly.
2337 # Special case for a missing attribute and b matches exactly.
2333 if bvalue is not None and avalue is None and bvalue == prefvalue:
2338 if bvalue is not None and avalue is None and bvalue == prefvalue:
2334 return 1
2339 return 1
2335
2340
2336 # We can't compare unless attribute present on both.
2341 # We can't compare unless attribute present on both.
2337 if avalue is None or bvalue is None:
2342 if avalue is None or bvalue is None:
2338 continue
2343 continue
2339
2344
2340 # Same values should fall back to next attribute.
2345 # Same values should fall back to next attribute.
2341 if avalue == bvalue:
2346 if avalue == bvalue:
2342 continue
2347 continue
2343
2348
2344 # Exact matches come first.
2349 # Exact matches come first.
2345 if avalue == prefvalue:
2350 if avalue == prefvalue:
2346 return -1
2351 return -1
2347 if bvalue == prefvalue:
2352 if bvalue == prefvalue:
2348 return 1
2353 return 1
2349
2354
2350 # Fall back to next attribute.
2355 # Fall back to next attribute.
2351 continue
2356 continue
2352
2357
2353 # If we got here we couldn't sort by attributes and prefers. Fall
2358 # If we got here we couldn't sort by attributes and prefers. Fall
2354 # back to index order.
2359 # back to index order.
2355 return 0
2360 return 0
2356
2361
2357 def __lt__(self, other):
2362 def __lt__(self, other):
2358 return self._cmp(other) < 0
2363 return self._cmp(other) < 0
2359
2364
2360 def __gt__(self, other):
2365 def __gt__(self, other):
2361 return self._cmp(other) > 0
2366 return self._cmp(other) > 0
2362
2367
2363 def __eq__(self, other):
2368 def __eq__(self, other):
2364 return self._cmp(other) == 0
2369 return self._cmp(other) == 0
2365
2370
2366 def __le__(self, other):
2371 def __le__(self, other):
2367 return self._cmp(other) <= 0
2372 return self._cmp(other) <= 0
2368
2373
2369 def __ge__(self, other):
2374 def __ge__(self, other):
2370 return self._cmp(other) >= 0
2375 return self._cmp(other) >= 0
2371
2376
2372 def __ne__(self, other):
2377 def __ne__(self, other):
2373 return self._cmp(other) != 0
2378 return self._cmp(other) != 0
2374
2379
2375 def sortclonebundleentries(ui, entries):
2380 def sortclonebundleentries(ui, entries):
2376 prefers = ui.configlist('ui', 'clonebundleprefers')
2381 prefers = ui.configlist('ui', 'clonebundleprefers')
2377 if not prefers:
2382 if not prefers:
2378 return list(entries)
2383 return list(entries)
2379
2384
2380 prefers = [p.split('=', 1) for p in prefers]
2385 prefers = [p.split('=', 1) for p in prefers]
2381
2386
2382 items = sorted(clonebundleentry(v, prefers) for v in entries)
2387 items = sorted(clonebundleentry(v, prefers) for v in entries)
2383 return [i.value for i in items]
2388 return [i.value for i in items]
2384
2389
2385 def trypullbundlefromurl(ui, repo, url):
2390 def trypullbundlefromurl(ui, repo, url):
2386 """Attempt to apply a bundle from a URL."""
2391 """Attempt to apply a bundle from a URL."""
2387 with repo.lock(), repo.transaction('bundleurl') as tr:
2392 with repo.lock(), repo.transaction('bundleurl') as tr:
2388 try:
2393 try:
2389 fh = urlmod.open(ui, url)
2394 fh = urlmod.open(ui, url)
2390 cg = readbundle(ui, fh, 'stream')
2395 cg = readbundle(ui, fh, 'stream')
2391
2396
2392 if isinstance(cg, streamclone.streamcloneapplier):
2397 if isinstance(cg, streamclone.streamcloneapplier):
2393 cg.apply(repo)
2398 cg.apply(repo)
2394 else:
2399 else:
2395 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2400 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2396 return True
2401 return True
2397 except urlerr.httperror as e:
2402 except urlerr.httperror as e:
2398 ui.warn(_('HTTP error fetching bundle: %s\n') %
2403 ui.warn(_('HTTP error fetching bundle: %s\n') %
2399 stringutil.forcebytestr(e))
2404 stringutil.forcebytestr(e))
2400 except urlerr.urlerror as e:
2405 except urlerr.urlerror as e:
2401 ui.warn(_('error fetching bundle: %s\n') %
2406 ui.warn(_('error fetching bundle: %s\n') %
2402 stringutil.forcebytestr(e.reason))
2407 stringutil.forcebytestr(e.reason))
2403
2408
2404 return False
2409 return False
General Comments 0
You need to be logged in to leave comments. Login now