##// END OF EJS Templates
bundlespec: add support for some variants...
Boris Feld -
r37185:a2b350d9 default
parent child Browse files
Show More
@@ -1,2310 +1,2326 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 bin,
16 bin,
17 hex,
17 hex,
18 nullid,
18 nullid,
19 )
19 )
20 from .thirdparty import (
20 from .thirdparty import (
21 attr,
21 attr,
22 )
22 )
23 from . import (
23 from . import (
24 bookmarks as bookmod,
24 bookmarks as bookmod,
25 bundle2,
25 bundle2,
26 changegroup,
26 changegroup,
27 discovery,
27 discovery,
28 error,
28 error,
29 lock as lockmod,
29 lock as lockmod,
30 logexchange,
30 logexchange,
31 obsolete,
31 obsolete,
32 phases,
32 phases,
33 pushkey,
33 pushkey,
34 pycompat,
34 pycompat,
35 scmutil,
35 scmutil,
36 sslutil,
36 sslutil,
37 streamclone,
37 streamclone,
38 url as urlmod,
38 url as urlmod,
39 util,
39 util,
40 )
40 )
41 from .utils import (
41 from .utils import (
42 stringutil,
42 stringutil,
43 )
43 )
44
44
45 urlerr = util.urlerr
45 urlerr = util.urlerr
46 urlreq = util.urlreq
46 urlreq = util.urlreq
47
47
48 # Maps bundle version human names to changegroup versions.
48 # Maps bundle version human names to changegroup versions.
49 _bundlespeccgversions = {'v1': '01',
49 _bundlespeccgversions = {'v1': '01',
50 'v2': '02',
50 'v2': '02',
51 'packed1': 's1',
51 'packed1': 's1',
52 'bundle2': '02', #legacy
52 'bundle2': '02', #legacy
53 }
53 }
54
54
55 # Maps bundle version with content opts to choose which part to bundle
55 # Maps bundle version with content opts to choose which part to bundle
56 _bundlespeccontentopts = {
56 _bundlespeccontentopts = {
57 'v1': {
57 'v1': {
58 'changegroup': True,
58 'changegroup': True,
59 'cg.version': '01',
59 'cg.version': '01',
60 'obsolescence': False,
60 'obsolescence': False,
61 'phases': False,
61 'phases': False,
62 'tagsfnodescache': False,
62 'tagsfnodescache': False,
63 'revbranchcache': False
63 'revbranchcache': False
64 },
64 },
65 'v2': {
65 'v2': {
66 'changegroup': True,
66 'changegroup': True,
67 'cg.version': '02',
67 'cg.version': '02',
68 'obsolescence': False,
68 'obsolescence': False,
69 'phases': False,
69 'phases': False,
70 'tagsfnodescache': True,
70 'tagsfnodescache': True,
71 'revbranchcache': True
71 'revbranchcache': True
72 },
72 },
73 'packed1' : {
73 'packed1' : {
74 'cg.version': 's1'
74 'cg.version': 's1'
75 }
75 }
76 }
76 }
77 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
77 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
78
78
79 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
80 "tagsfnodescache": False,
81 "revbranchcache": False}}
82
79 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
83 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
80 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
84 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
81
85
82 @attr.s
86 @attr.s
83 class bundlespec(object):
87 class bundlespec(object):
84 compression = attr.ib()
88 compression = attr.ib()
85 version = attr.ib()
89 version = attr.ib()
86 params = attr.ib()
90 params = attr.ib()
87 contentopts = attr.ib()
91 contentopts = attr.ib()
88
92
89 def parsebundlespec(repo, spec, strict=True, externalnames=False):
93 def parsebundlespec(repo, spec, strict=True, externalnames=False):
90 """Parse a bundle string specification into parts.
94 """Parse a bundle string specification into parts.
91
95
92 Bundle specifications denote a well-defined bundle/exchange format.
96 Bundle specifications denote a well-defined bundle/exchange format.
93 The content of a given specification should not change over time in
97 The content of a given specification should not change over time in
94 order to ensure that bundles produced by a newer version of Mercurial are
98 order to ensure that bundles produced by a newer version of Mercurial are
95 readable from an older version.
99 readable from an older version.
96
100
97 The string currently has the form:
101 The string currently has the form:
98
102
99 <compression>-<type>[;<parameter0>[;<parameter1>]]
103 <compression>-<type>[;<parameter0>[;<parameter1>]]
100
104
101 Where <compression> is one of the supported compression formats
105 Where <compression> is one of the supported compression formats
102 and <type> is (currently) a version string. A ";" can follow the type and
106 and <type> is (currently) a version string. A ";" can follow the type and
103 all text afterwards is interpreted as URI encoded, ";" delimited key=value
107 all text afterwards is interpreted as URI encoded, ";" delimited key=value
104 pairs.
108 pairs.
105
109
106 If ``strict`` is True (the default) <compression> is required. Otherwise,
110 If ``strict`` is True (the default) <compression> is required. Otherwise,
107 it is optional.
111 it is optional.
108
112
109 If ``externalnames`` is False (the default), the human-centric names will
113 If ``externalnames`` is False (the default), the human-centric names will
110 be converted to their internal representation.
114 be converted to their internal representation.
111
115
112 Returns a bundlespec object of (compression, version, parameters).
116 Returns a bundlespec object of (compression, version, parameters).
113 Compression will be ``None`` if not in strict mode and a compression isn't
117 Compression will be ``None`` if not in strict mode and a compression isn't
114 defined.
118 defined.
115
119
116 An ``InvalidBundleSpecification`` is raised when the specification is
120 An ``InvalidBundleSpecification`` is raised when the specification is
117 not syntactically well formed.
121 not syntactically well formed.
118
122
119 An ``UnsupportedBundleSpecification`` is raised when the compression or
123 An ``UnsupportedBundleSpecification`` is raised when the compression or
120 bundle type/version is not recognized.
124 bundle type/version is not recognized.
121
125
122 Note: this function will likely eventually return a more complex data
126 Note: this function will likely eventually return a more complex data
123 structure, including bundle2 part information.
127 structure, including bundle2 part information.
124 """
128 """
125 def parseparams(s):
129 def parseparams(s):
126 if ';' not in s:
130 if ';' not in s:
127 return s, {}
131 return s, {}
128
132
129 params = {}
133 params = {}
130 version, paramstr = s.split(';', 1)
134 version, paramstr = s.split(';', 1)
131
135
132 for p in paramstr.split(';'):
136 for p in paramstr.split(';'):
133 if '=' not in p:
137 if '=' not in p:
134 raise error.InvalidBundleSpecification(
138 raise error.InvalidBundleSpecification(
135 _('invalid bundle specification: '
139 _('invalid bundle specification: '
136 'missing "=" in parameter: %s') % p)
140 'missing "=" in parameter: %s') % p)
137
141
138 key, value = p.split('=', 1)
142 key, value = p.split('=', 1)
139 key = urlreq.unquote(key)
143 key = urlreq.unquote(key)
140 value = urlreq.unquote(value)
144 value = urlreq.unquote(value)
141 params[key] = value
145 params[key] = value
142
146
143 return version, params
147 return version, params
144
148
145
149
146 if strict and '-' not in spec:
150 if strict and '-' not in spec:
147 raise error.InvalidBundleSpecification(
151 raise error.InvalidBundleSpecification(
148 _('invalid bundle specification; '
152 _('invalid bundle specification; '
149 'must be prefixed with compression: %s') % spec)
153 'must be prefixed with compression: %s') % spec)
150
154
151 if '-' in spec:
155 if '-' in spec:
152 compression, version = spec.split('-', 1)
156 compression, version = spec.split('-', 1)
153
157
154 if compression not in util.compengines.supportedbundlenames:
158 if compression not in util.compengines.supportedbundlenames:
155 raise error.UnsupportedBundleSpecification(
159 raise error.UnsupportedBundleSpecification(
156 _('%s compression is not supported') % compression)
160 _('%s compression is not supported') % compression)
157
161
158 version, params = parseparams(version)
162 version, params = parseparams(version)
159
163
160 if version not in _bundlespeccgversions:
164 if version not in _bundlespeccgversions:
161 raise error.UnsupportedBundleSpecification(
165 raise error.UnsupportedBundleSpecification(
162 _('%s is not a recognized bundle version') % version)
166 _('%s is not a recognized bundle version') % version)
163 else:
167 else:
164 # Value could be just the compression or just the version, in which
168 # Value could be just the compression or just the version, in which
165 # case some defaults are assumed (but only when not in strict mode).
169 # case some defaults are assumed (but only when not in strict mode).
166 assert not strict
170 assert not strict
167
171
168 spec, params = parseparams(spec)
172 spec, params = parseparams(spec)
169
173
170 if spec in util.compengines.supportedbundlenames:
174 if spec in util.compengines.supportedbundlenames:
171 compression = spec
175 compression = spec
172 version = 'v1'
176 version = 'v1'
173 # Generaldelta repos require v2.
177 # Generaldelta repos require v2.
174 if 'generaldelta' in repo.requirements:
178 if 'generaldelta' in repo.requirements:
175 version = 'v2'
179 version = 'v2'
176 # Modern compression engines require v2.
180 # Modern compression engines require v2.
177 if compression not in _bundlespecv1compengines:
181 if compression not in _bundlespecv1compengines:
178 version = 'v2'
182 version = 'v2'
179 elif spec in _bundlespeccgversions:
183 elif spec in _bundlespeccgversions:
180 if spec == 'packed1':
184 if spec == 'packed1':
181 compression = 'none'
185 compression = 'none'
182 else:
186 else:
183 compression = 'bzip2'
187 compression = 'bzip2'
184 version = spec
188 version = spec
185 else:
189 else:
186 raise error.UnsupportedBundleSpecification(
190 raise error.UnsupportedBundleSpecification(
187 _('%s is not a recognized bundle specification') % spec)
191 _('%s is not a recognized bundle specification') % spec)
188
192
189 # Bundle version 1 only supports a known set of compression engines.
193 # Bundle version 1 only supports a known set of compression engines.
190 if version == 'v1' and compression not in _bundlespecv1compengines:
194 if version == 'v1' and compression not in _bundlespecv1compengines:
191 raise error.UnsupportedBundleSpecification(
195 raise error.UnsupportedBundleSpecification(
192 _('compression engine %s is not supported on v1 bundles') %
196 _('compression engine %s is not supported on v1 bundles') %
193 compression)
197 compression)
194
198
195 # The specification for packed1 can optionally declare the data formats
199 # The specification for packed1 can optionally declare the data formats
196 # required to apply it. If we see this metadata, compare against what the
200 # required to apply it. If we see this metadata, compare against what the
197 # repo supports and error if the bundle isn't compatible.
201 # repo supports and error if the bundle isn't compatible.
198 if version == 'packed1' and 'requirements' in params:
202 if version == 'packed1' and 'requirements' in params:
199 requirements = set(params['requirements'].split(','))
203 requirements = set(params['requirements'].split(','))
200 missingreqs = requirements - repo.supportedformats
204 missingreqs = requirements - repo.supportedformats
201 if missingreqs:
205 if missingreqs:
202 raise error.UnsupportedBundleSpecification(
206 raise error.UnsupportedBundleSpecification(
203 _('missing support for repository features: %s') %
207 _('missing support for repository features: %s') %
204 ', '.join(sorted(missingreqs)))
208 ', '.join(sorted(missingreqs)))
205
209
206 # Compute contentopts based on the version
210 # Compute contentopts based on the version
207 contentopts = _bundlespeccontentopts.get(version, {}).copy()
211 contentopts = _bundlespeccontentopts.get(version, {}).copy()
208
212
213 # Process the variants
214 if "stream" in params and params["stream"] == "v2":
215 variant = _bundlespecvariants["streamv2"]
216 contentopts.update(variant)
217
209 if not externalnames:
218 if not externalnames:
210 engine = util.compengines.forbundlename(compression)
219 engine = util.compengines.forbundlename(compression)
211 compression = engine.bundletype()[1]
220 compression = engine.bundletype()[1]
212 version = _bundlespeccgversions[version]
221 version = _bundlespeccgversions[version]
213
222
214 return bundlespec(compression, version, params, contentopts)
223 return bundlespec(compression, version, params, contentopts)
215
224
216 def readbundle(ui, fh, fname, vfs=None):
225 def readbundle(ui, fh, fname, vfs=None):
217 header = changegroup.readexactly(fh, 4)
226 header = changegroup.readexactly(fh, 4)
218
227
219 alg = None
228 alg = None
220 if not fname:
229 if not fname:
221 fname = "stream"
230 fname = "stream"
222 if not header.startswith('HG') and header.startswith('\0'):
231 if not header.startswith('HG') and header.startswith('\0'):
223 fh = changegroup.headerlessfixup(fh, header)
232 fh = changegroup.headerlessfixup(fh, header)
224 header = "HG10"
233 header = "HG10"
225 alg = 'UN'
234 alg = 'UN'
226 elif vfs:
235 elif vfs:
227 fname = vfs.join(fname)
236 fname = vfs.join(fname)
228
237
229 magic, version = header[0:2], header[2:4]
238 magic, version = header[0:2], header[2:4]
230
239
231 if magic != 'HG':
240 if magic != 'HG':
232 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
241 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
233 if version == '10':
242 if version == '10':
234 if alg is None:
243 if alg is None:
235 alg = changegroup.readexactly(fh, 2)
244 alg = changegroup.readexactly(fh, 2)
236 return changegroup.cg1unpacker(fh, alg)
245 return changegroup.cg1unpacker(fh, alg)
237 elif version.startswith('2'):
246 elif version.startswith('2'):
238 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
247 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
239 elif version == 'S1':
248 elif version == 'S1':
240 return streamclone.streamcloneapplier(fh)
249 return streamclone.streamcloneapplier(fh)
241 else:
250 else:
242 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
251 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
243
252
244 def getbundlespec(ui, fh):
253 def getbundlespec(ui, fh):
245 """Infer the bundlespec from a bundle file handle.
254 """Infer the bundlespec from a bundle file handle.
246
255
247 The input file handle is seeked and the original seek position is not
256 The input file handle is seeked and the original seek position is not
248 restored.
257 restored.
249 """
258 """
250 def speccompression(alg):
259 def speccompression(alg):
251 try:
260 try:
252 return util.compengines.forbundletype(alg).bundletype()[0]
261 return util.compengines.forbundletype(alg).bundletype()[0]
253 except KeyError:
262 except KeyError:
254 return None
263 return None
255
264
256 b = readbundle(ui, fh, None)
265 b = readbundle(ui, fh, None)
257 if isinstance(b, changegroup.cg1unpacker):
266 if isinstance(b, changegroup.cg1unpacker):
258 alg = b._type
267 alg = b._type
259 if alg == '_truncatedBZ':
268 if alg == '_truncatedBZ':
260 alg = 'BZ'
269 alg = 'BZ'
261 comp = speccompression(alg)
270 comp = speccompression(alg)
262 if not comp:
271 if not comp:
263 raise error.Abort(_('unknown compression algorithm: %s') % alg)
272 raise error.Abort(_('unknown compression algorithm: %s') % alg)
264 return '%s-v1' % comp
273 return '%s-v1' % comp
265 elif isinstance(b, bundle2.unbundle20):
274 elif isinstance(b, bundle2.unbundle20):
266 if 'Compression' in b.params:
275 if 'Compression' in b.params:
267 comp = speccompression(b.params['Compression'])
276 comp = speccompression(b.params['Compression'])
268 if not comp:
277 if not comp:
269 raise error.Abort(_('unknown compression algorithm: %s') % comp)
278 raise error.Abort(_('unknown compression algorithm: %s') % comp)
270 else:
279 else:
271 comp = 'none'
280 comp = 'none'
272
281
273 version = None
282 version = None
274 for part in b.iterparts():
283 for part in b.iterparts():
275 if part.type == 'changegroup':
284 if part.type == 'changegroup':
276 version = part.params['version']
285 version = part.params['version']
277 if version in ('01', '02'):
286 if version in ('01', '02'):
278 version = 'v2'
287 version = 'v2'
279 else:
288 else:
280 raise error.Abort(_('changegroup version %s does not have '
289 raise error.Abort(_('changegroup version %s does not have '
281 'a known bundlespec') % version,
290 'a known bundlespec') % version,
282 hint=_('try upgrading your Mercurial '
291 hint=_('try upgrading your Mercurial '
283 'client'))
292 'client'))
293 elif part.type == 'stream2' and version is None:
294 # A stream2 part requires to be part of a v2 bundle
295 version = "v2"
296 requirements = urlreq.unquote(part.params['requirements'])
297 splitted = requirements.split()
298 params = bundle2._formatrequirementsparams(splitted)
299 return 'none-v2;stream=v2;%s' % params
284
300
285 if not version:
301 if not version:
286 raise error.Abort(_('could not identify changegroup version in '
302 raise error.Abort(_('could not identify changegroup version in '
287 'bundle'))
303 'bundle'))
288
304
289 return '%s-%s' % (comp, version)
305 return '%s-%s' % (comp, version)
290 elif isinstance(b, streamclone.streamcloneapplier):
306 elif isinstance(b, streamclone.streamcloneapplier):
291 requirements = streamclone.readbundle1header(fh)[2]
307 requirements = streamclone.readbundle1header(fh)[2]
292 formatted = bundle2._formatrequirementsparams(requirements)
308 formatted = bundle2._formatrequirementsparams(requirements)
293 return 'none-packed1;%s' % formatted
309 return 'none-packed1;%s' % formatted
294 else:
310 else:
295 raise error.Abort(_('unknown bundle type: %s') % b)
311 raise error.Abort(_('unknown bundle type: %s') % b)
296
312
297 def _computeoutgoing(repo, heads, common):
313 def _computeoutgoing(repo, heads, common):
298 """Computes which revs are outgoing given a set of common
314 """Computes which revs are outgoing given a set of common
299 and a set of heads.
315 and a set of heads.
300
316
301 This is a separate function so extensions can have access to
317 This is a separate function so extensions can have access to
302 the logic.
318 the logic.
303
319
304 Returns a discovery.outgoing object.
320 Returns a discovery.outgoing object.
305 """
321 """
306 cl = repo.changelog
322 cl = repo.changelog
307 if common:
323 if common:
308 hasnode = cl.hasnode
324 hasnode = cl.hasnode
309 common = [n for n in common if hasnode(n)]
325 common = [n for n in common if hasnode(n)]
310 else:
326 else:
311 common = [nullid]
327 common = [nullid]
312 if not heads:
328 if not heads:
313 heads = cl.heads()
329 heads = cl.heads()
314 return discovery.outgoing(repo, common, heads)
330 return discovery.outgoing(repo, common, heads)
315
331
316 def _forcebundle1(op):
332 def _forcebundle1(op):
317 """return true if a pull/push must use bundle1
333 """return true if a pull/push must use bundle1
318
334
319 This function is used to allow testing of the older bundle version"""
335 This function is used to allow testing of the older bundle version"""
320 ui = op.repo.ui
336 ui = op.repo.ui
321 # The goal is this config is to allow developer to choose the bundle
337 # The goal is this config is to allow developer to choose the bundle
322 # version used during exchanged. This is especially handy during test.
338 # version used during exchanged. This is especially handy during test.
323 # Value is a list of bundle version to be picked from, highest version
339 # Value is a list of bundle version to be picked from, highest version
324 # should be used.
340 # should be used.
325 #
341 #
326 # developer config: devel.legacy.exchange
342 # developer config: devel.legacy.exchange
327 exchange = ui.configlist('devel', 'legacy.exchange')
343 exchange = ui.configlist('devel', 'legacy.exchange')
328 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
344 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
329 return forcebundle1 or not op.remote.capable('bundle2')
345 return forcebundle1 or not op.remote.capable('bundle2')
330
346
331 class pushoperation(object):
347 class pushoperation(object):
332 """A object that represent a single push operation
348 """A object that represent a single push operation
333
349
334 Its purpose is to carry push related state and very common operations.
350 Its purpose is to carry push related state and very common operations.
335
351
336 A new pushoperation should be created at the beginning of each push and
352 A new pushoperation should be created at the beginning of each push and
337 discarded afterward.
353 discarded afterward.
338 """
354 """
339
355
340 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
356 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
341 bookmarks=(), pushvars=None):
357 bookmarks=(), pushvars=None):
342 # repo we push from
358 # repo we push from
343 self.repo = repo
359 self.repo = repo
344 self.ui = repo.ui
360 self.ui = repo.ui
345 # repo we push to
361 # repo we push to
346 self.remote = remote
362 self.remote = remote
347 # force option provided
363 # force option provided
348 self.force = force
364 self.force = force
349 # revs to be pushed (None is "all")
365 # revs to be pushed (None is "all")
350 self.revs = revs
366 self.revs = revs
351 # bookmark explicitly pushed
367 # bookmark explicitly pushed
352 self.bookmarks = bookmarks
368 self.bookmarks = bookmarks
353 # allow push of new branch
369 # allow push of new branch
354 self.newbranch = newbranch
370 self.newbranch = newbranch
355 # step already performed
371 # step already performed
356 # (used to check what steps have been already performed through bundle2)
372 # (used to check what steps have been already performed through bundle2)
357 self.stepsdone = set()
373 self.stepsdone = set()
358 # Integer version of the changegroup push result
374 # Integer version of the changegroup push result
359 # - None means nothing to push
375 # - None means nothing to push
360 # - 0 means HTTP error
376 # - 0 means HTTP error
361 # - 1 means we pushed and remote head count is unchanged *or*
377 # - 1 means we pushed and remote head count is unchanged *or*
362 # we have outgoing changesets but refused to push
378 # we have outgoing changesets but refused to push
363 # - other values as described by addchangegroup()
379 # - other values as described by addchangegroup()
364 self.cgresult = None
380 self.cgresult = None
365 # Boolean value for the bookmark push
381 # Boolean value for the bookmark push
366 self.bkresult = None
382 self.bkresult = None
367 # discover.outgoing object (contains common and outgoing data)
383 # discover.outgoing object (contains common and outgoing data)
368 self.outgoing = None
384 self.outgoing = None
369 # all remote topological heads before the push
385 # all remote topological heads before the push
370 self.remoteheads = None
386 self.remoteheads = None
371 # Details of the remote branch pre and post push
387 # Details of the remote branch pre and post push
372 #
388 #
373 # mapping: {'branch': ([remoteheads],
389 # mapping: {'branch': ([remoteheads],
374 # [newheads],
390 # [newheads],
375 # [unsyncedheads],
391 # [unsyncedheads],
376 # [discardedheads])}
392 # [discardedheads])}
377 # - branch: the branch name
393 # - branch: the branch name
378 # - remoteheads: the list of remote heads known locally
394 # - remoteheads: the list of remote heads known locally
379 # None if the branch is new
395 # None if the branch is new
380 # - newheads: the new remote heads (known locally) with outgoing pushed
396 # - newheads: the new remote heads (known locally) with outgoing pushed
381 # - unsyncedheads: the list of remote heads unknown locally.
397 # - unsyncedheads: the list of remote heads unknown locally.
382 # - discardedheads: the list of remote heads made obsolete by the push
398 # - discardedheads: the list of remote heads made obsolete by the push
383 self.pushbranchmap = None
399 self.pushbranchmap = None
384 # testable as a boolean indicating if any nodes are missing locally.
400 # testable as a boolean indicating if any nodes are missing locally.
385 self.incoming = None
401 self.incoming = None
386 # summary of the remote phase situation
402 # summary of the remote phase situation
387 self.remotephases = None
403 self.remotephases = None
388 # phases changes that must be pushed along side the changesets
404 # phases changes that must be pushed along side the changesets
389 self.outdatedphases = None
405 self.outdatedphases = None
390 # phases changes that must be pushed if changeset push fails
406 # phases changes that must be pushed if changeset push fails
391 self.fallbackoutdatedphases = None
407 self.fallbackoutdatedphases = None
392 # outgoing obsmarkers
408 # outgoing obsmarkers
393 self.outobsmarkers = set()
409 self.outobsmarkers = set()
394 # outgoing bookmarks
410 # outgoing bookmarks
395 self.outbookmarks = []
411 self.outbookmarks = []
396 # transaction manager
412 # transaction manager
397 self.trmanager = None
413 self.trmanager = None
398 # map { pushkey partid -> callback handling failure}
414 # map { pushkey partid -> callback handling failure}
399 # used to handle exception from mandatory pushkey part failure
415 # used to handle exception from mandatory pushkey part failure
400 self.pkfailcb = {}
416 self.pkfailcb = {}
401 # an iterable of pushvars or None
417 # an iterable of pushvars or None
402 self.pushvars = pushvars
418 self.pushvars = pushvars
403
419
404 @util.propertycache
420 @util.propertycache
405 def futureheads(self):
421 def futureheads(self):
406 """future remote heads if the changeset push succeeds"""
422 """future remote heads if the changeset push succeeds"""
407 return self.outgoing.missingheads
423 return self.outgoing.missingheads
408
424
409 @util.propertycache
425 @util.propertycache
410 def fallbackheads(self):
426 def fallbackheads(self):
411 """future remote heads if the changeset push fails"""
427 """future remote heads if the changeset push fails"""
412 if self.revs is None:
428 if self.revs is None:
413 # not target to push, all common are relevant
429 # not target to push, all common are relevant
414 return self.outgoing.commonheads
430 return self.outgoing.commonheads
415 unfi = self.repo.unfiltered()
431 unfi = self.repo.unfiltered()
416 # I want cheads = heads(::missingheads and ::commonheads)
432 # I want cheads = heads(::missingheads and ::commonheads)
417 # (missingheads is revs with secret changeset filtered out)
433 # (missingheads is revs with secret changeset filtered out)
418 #
434 #
419 # This can be expressed as:
435 # This can be expressed as:
420 # cheads = ( (missingheads and ::commonheads)
436 # cheads = ( (missingheads and ::commonheads)
421 # + (commonheads and ::missingheads))"
437 # + (commonheads and ::missingheads))"
422 # )
438 # )
423 #
439 #
424 # while trying to push we already computed the following:
440 # while trying to push we already computed the following:
425 # common = (::commonheads)
441 # common = (::commonheads)
426 # missing = ((commonheads::missingheads) - commonheads)
442 # missing = ((commonheads::missingheads) - commonheads)
427 #
443 #
428 # We can pick:
444 # We can pick:
429 # * missingheads part of common (::commonheads)
445 # * missingheads part of common (::commonheads)
430 common = self.outgoing.common
446 common = self.outgoing.common
431 nm = self.repo.changelog.nodemap
447 nm = self.repo.changelog.nodemap
432 cheads = [node for node in self.revs if nm[node] in common]
448 cheads = [node for node in self.revs if nm[node] in common]
433 # and
449 # and
434 # * commonheads parents on missing
450 # * commonheads parents on missing
435 revset = unfi.set('%ln and parents(roots(%ln))',
451 revset = unfi.set('%ln and parents(roots(%ln))',
436 self.outgoing.commonheads,
452 self.outgoing.commonheads,
437 self.outgoing.missing)
453 self.outgoing.missing)
438 cheads.extend(c.node() for c in revset)
454 cheads.extend(c.node() for c in revset)
439 return cheads
455 return cheads
440
456
441 @property
457 @property
442 def commonheads(self):
458 def commonheads(self):
443 """set of all common heads after changeset bundle push"""
459 """set of all common heads after changeset bundle push"""
444 if self.cgresult:
460 if self.cgresult:
445 return self.futureheads
461 return self.futureheads
446 else:
462 else:
447 return self.fallbackheads
463 return self.fallbackheads
448
464
449 # mapping of message used when pushing bookmark
465 # mapping of message used when pushing bookmark
450 bookmsgmap = {'update': (_("updating bookmark %s\n"),
466 bookmsgmap = {'update': (_("updating bookmark %s\n"),
451 _('updating bookmark %s failed!\n')),
467 _('updating bookmark %s failed!\n')),
452 'export': (_("exporting bookmark %s\n"),
468 'export': (_("exporting bookmark %s\n"),
453 _('exporting bookmark %s failed!\n')),
469 _('exporting bookmark %s failed!\n')),
454 'delete': (_("deleting remote bookmark %s\n"),
470 'delete': (_("deleting remote bookmark %s\n"),
455 _('deleting remote bookmark %s failed!\n')),
471 _('deleting remote bookmark %s failed!\n')),
456 }
472 }
457
473
458
474
459 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
475 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
460 opargs=None):
476 opargs=None):
461 '''Push outgoing changesets (limited by revs) from a local
477 '''Push outgoing changesets (limited by revs) from a local
462 repository to remote. Return an integer:
478 repository to remote. Return an integer:
463 - None means nothing to push
479 - None means nothing to push
464 - 0 means HTTP error
480 - 0 means HTTP error
465 - 1 means we pushed and remote head count is unchanged *or*
481 - 1 means we pushed and remote head count is unchanged *or*
466 we have outgoing changesets but refused to push
482 we have outgoing changesets but refused to push
467 - other values as described by addchangegroup()
483 - other values as described by addchangegroup()
468 '''
484 '''
469 if opargs is None:
485 if opargs is None:
470 opargs = {}
486 opargs = {}
471 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
487 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
472 **pycompat.strkwargs(opargs))
488 **pycompat.strkwargs(opargs))
473 if pushop.remote.local():
489 if pushop.remote.local():
474 missing = (set(pushop.repo.requirements)
490 missing = (set(pushop.repo.requirements)
475 - pushop.remote.local().supported)
491 - pushop.remote.local().supported)
476 if missing:
492 if missing:
477 msg = _("required features are not"
493 msg = _("required features are not"
478 " supported in the destination:"
494 " supported in the destination:"
479 " %s") % (', '.join(sorted(missing)))
495 " %s") % (', '.join(sorted(missing)))
480 raise error.Abort(msg)
496 raise error.Abort(msg)
481
497
482 if not pushop.remote.canpush():
498 if not pushop.remote.canpush():
483 raise error.Abort(_("destination does not support push"))
499 raise error.Abort(_("destination does not support push"))
484
500
485 if not pushop.remote.capable('unbundle'):
501 if not pushop.remote.capable('unbundle'):
486 raise error.Abort(_('cannot push: destination does not support the '
502 raise error.Abort(_('cannot push: destination does not support the '
487 'unbundle wire protocol command'))
503 'unbundle wire protocol command'))
488
504
489 # get lock as we might write phase data
505 # get lock as we might write phase data
490 wlock = lock = None
506 wlock = lock = None
491 try:
507 try:
492 # bundle2 push may receive a reply bundle touching bookmarks or other
508 # bundle2 push may receive a reply bundle touching bookmarks or other
493 # things requiring the wlock. Take it now to ensure proper ordering.
509 # things requiring the wlock. Take it now to ensure proper ordering.
494 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
510 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
495 if (not _forcebundle1(pushop)) and maypushback:
511 if (not _forcebundle1(pushop)) and maypushback:
496 wlock = pushop.repo.wlock()
512 wlock = pushop.repo.wlock()
497 lock = pushop.repo.lock()
513 lock = pushop.repo.lock()
498 pushop.trmanager = transactionmanager(pushop.repo,
514 pushop.trmanager = transactionmanager(pushop.repo,
499 'push-response',
515 'push-response',
500 pushop.remote.url())
516 pushop.remote.url())
501 except IOError as err:
517 except IOError as err:
502 if err.errno != errno.EACCES:
518 if err.errno != errno.EACCES:
503 raise
519 raise
504 # source repo cannot be locked.
520 # source repo cannot be locked.
505 # We do not abort the push, but just disable the local phase
521 # We do not abort the push, but just disable the local phase
506 # synchronisation.
522 # synchronisation.
507 msg = 'cannot lock source repository: %s\n' % err
523 msg = 'cannot lock source repository: %s\n' % err
508 pushop.ui.debug(msg)
524 pushop.ui.debug(msg)
509
525
510 with wlock or util.nullcontextmanager(), \
526 with wlock or util.nullcontextmanager(), \
511 lock or util.nullcontextmanager(), \
527 lock or util.nullcontextmanager(), \
512 pushop.trmanager or util.nullcontextmanager():
528 pushop.trmanager or util.nullcontextmanager():
513 pushop.repo.checkpush(pushop)
529 pushop.repo.checkpush(pushop)
514 _pushdiscovery(pushop)
530 _pushdiscovery(pushop)
515 if not _forcebundle1(pushop):
531 if not _forcebundle1(pushop):
516 _pushbundle2(pushop)
532 _pushbundle2(pushop)
517 _pushchangeset(pushop)
533 _pushchangeset(pushop)
518 _pushsyncphase(pushop)
534 _pushsyncphase(pushop)
519 _pushobsolete(pushop)
535 _pushobsolete(pushop)
520 _pushbookmark(pushop)
536 _pushbookmark(pushop)
521
537
522 return pushop
538 return pushop
523
539
524 # list of steps to perform discovery before push
540 # list of steps to perform discovery before push
525 pushdiscoveryorder = []
541 pushdiscoveryorder = []
526
542
527 # Mapping between step name and function
543 # Mapping between step name and function
528 #
544 #
529 # This exists to help extensions wrap steps if necessary
545 # This exists to help extensions wrap steps if necessary
530 pushdiscoverymapping = {}
546 pushdiscoverymapping = {}
531
547
532 def pushdiscovery(stepname):
548 def pushdiscovery(stepname):
533 """decorator for function performing discovery before push
549 """decorator for function performing discovery before push
534
550
535 The function is added to the step -> function mapping and appended to the
551 The function is added to the step -> function mapping and appended to the
536 list of steps. Beware that decorated function will be added in order (this
552 list of steps. Beware that decorated function will be added in order (this
537 may matter).
553 may matter).
538
554
539 You can only use this decorator for a new step, if you want to wrap a step
555 You can only use this decorator for a new step, if you want to wrap a step
540 from an extension, change the pushdiscovery dictionary directly."""
556 from an extension, change the pushdiscovery dictionary directly."""
541 def dec(func):
557 def dec(func):
542 assert stepname not in pushdiscoverymapping
558 assert stepname not in pushdiscoverymapping
543 pushdiscoverymapping[stepname] = func
559 pushdiscoverymapping[stepname] = func
544 pushdiscoveryorder.append(stepname)
560 pushdiscoveryorder.append(stepname)
545 return func
561 return func
546 return dec
562 return dec
547
563
548 def _pushdiscovery(pushop):
564 def _pushdiscovery(pushop):
549 """Run all discovery steps"""
565 """Run all discovery steps"""
550 for stepname in pushdiscoveryorder:
566 for stepname in pushdiscoveryorder:
551 step = pushdiscoverymapping[stepname]
567 step = pushdiscoverymapping[stepname]
552 step(pushop)
568 step(pushop)
553
569
554 @pushdiscovery('changeset')
570 @pushdiscovery('changeset')
555 def _pushdiscoverychangeset(pushop):
571 def _pushdiscoverychangeset(pushop):
556 """discover the changeset that need to be pushed"""
572 """discover the changeset that need to be pushed"""
557 fci = discovery.findcommonincoming
573 fci = discovery.findcommonincoming
558 if pushop.revs:
574 if pushop.revs:
559 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
575 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
560 ancestorsof=pushop.revs)
576 ancestorsof=pushop.revs)
561 else:
577 else:
562 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
578 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
563 common, inc, remoteheads = commoninc
579 common, inc, remoteheads = commoninc
564 fco = discovery.findcommonoutgoing
580 fco = discovery.findcommonoutgoing
565 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
581 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
566 commoninc=commoninc, force=pushop.force)
582 commoninc=commoninc, force=pushop.force)
567 pushop.outgoing = outgoing
583 pushop.outgoing = outgoing
568 pushop.remoteheads = remoteheads
584 pushop.remoteheads = remoteheads
569 pushop.incoming = inc
585 pushop.incoming = inc
570
586
571 @pushdiscovery('phase')
587 @pushdiscovery('phase')
572 def _pushdiscoveryphase(pushop):
588 def _pushdiscoveryphase(pushop):
573 """discover the phase that needs to be pushed
589 """discover the phase that needs to be pushed
574
590
575 (computed for both success and failure case for changesets push)"""
591 (computed for both success and failure case for changesets push)"""
576 outgoing = pushop.outgoing
592 outgoing = pushop.outgoing
577 unfi = pushop.repo.unfiltered()
593 unfi = pushop.repo.unfiltered()
578 remotephases = pushop.remote.listkeys('phases')
594 remotephases = pushop.remote.listkeys('phases')
579 if (pushop.ui.configbool('ui', '_usedassubrepo')
595 if (pushop.ui.configbool('ui', '_usedassubrepo')
580 and remotephases # server supports phases
596 and remotephases # server supports phases
581 and not pushop.outgoing.missing # no changesets to be pushed
597 and not pushop.outgoing.missing # no changesets to be pushed
582 and remotephases.get('publishing', False)):
598 and remotephases.get('publishing', False)):
583 # When:
599 # When:
584 # - this is a subrepo push
600 # - this is a subrepo push
585 # - and remote support phase
601 # - and remote support phase
586 # - and no changeset are to be pushed
602 # - and no changeset are to be pushed
587 # - and remote is publishing
603 # - and remote is publishing
588 # We may be in issue 3781 case!
604 # We may be in issue 3781 case!
589 # We drop the possible phase synchronisation done by
605 # We drop the possible phase synchronisation done by
590 # courtesy to publish changesets possibly locally draft
606 # courtesy to publish changesets possibly locally draft
591 # on the remote.
607 # on the remote.
592 pushop.outdatedphases = []
608 pushop.outdatedphases = []
593 pushop.fallbackoutdatedphases = []
609 pushop.fallbackoutdatedphases = []
594 return
610 return
595
611
596 pushop.remotephases = phases.remotephasessummary(pushop.repo,
612 pushop.remotephases = phases.remotephasessummary(pushop.repo,
597 pushop.fallbackheads,
613 pushop.fallbackheads,
598 remotephases)
614 remotephases)
599 droots = pushop.remotephases.draftroots
615 droots = pushop.remotephases.draftroots
600
616
601 extracond = ''
617 extracond = ''
602 if not pushop.remotephases.publishing:
618 if not pushop.remotephases.publishing:
603 extracond = ' and public()'
619 extracond = ' and public()'
604 revset = 'heads((%%ln::%%ln) %s)' % extracond
620 revset = 'heads((%%ln::%%ln) %s)' % extracond
605 # Get the list of all revs draft on remote by public here.
621 # Get the list of all revs draft on remote by public here.
606 # XXX Beware that revset break if droots is not strictly
622 # XXX Beware that revset break if droots is not strictly
607 # XXX root we may want to ensure it is but it is costly
623 # XXX root we may want to ensure it is but it is costly
608 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
624 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
609 if not outgoing.missing:
625 if not outgoing.missing:
610 future = fallback
626 future = fallback
611 else:
627 else:
612 # adds changeset we are going to push as draft
628 # adds changeset we are going to push as draft
613 #
629 #
614 # should not be necessary for publishing server, but because of an
630 # should not be necessary for publishing server, but because of an
615 # issue fixed in xxxxx we have to do it anyway.
631 # issue fixed in xxxxx we have to do it anyway.
616 fdroots = list(unfi.set('roots(%ln + %ln::)',
632 fdroots = list(unfi.set('roots(%ln + %ln::)',
617 outgoing.missing, droots))
633 outgoing.missing, droots))
618 fdroots = [f.node() for f in fdroots]
634 fdroots = [f.node() for f in fdroots]
619 future = list(unfi.set(revset, fdroots, pushop.futureheads))
635 future = list(unfi.set(revset, fdroots, pushop.futureheads))
620 pushop.outdatedphases = future
636 pushop.outdatedphases = future
621 pushop.fallbackoutdatedphases = fallback
637 pushop.fallbackoutdatedphases = fallback
622
638
623 @pushdiscovery('obsmarker')
639 @pushdiscovery('obsmarker')
624 def _pushdiscoveryobsmarkers(pushop):
640 def _pushdiscoveryobsmarkers(pushop):
625 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
641 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
626 and pushop.repo.obsstore
642 and pushop.repo.obsstore
627 and 'obsolete' in pushop.remote.listkeys('namespaces')):
643 and 'obsolete' in pushop.remote.listkeys('namespaces')):
628 repo = pushop.repo
644 repo = pushop.repo
629 # very naive computation, that can be quite expensive on big repo.
645 # very naive computation, that can be quite expensive on big repo.
630 # However: evolution is currently slow on them anyway.
646 # However: evolution is currently slow on them anyway.
631 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
647 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
632 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
648 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
633
649
634 @pushdiscovery('bookmarks')
650 @pushdiscovery('bookmarks')
635 def _pushdiscoverybookmarks(pushop):
651 def _pushdiscoverybookmarks(pushop):
636 ui = pushop.ui
652 ui = pushop.ui
637 repo = pushop.repo.unfiltered()
653 repo = pushop.repo.unfiltered()
638 remote = pushop.remote
654 remote = pushop.remote
639 ui.debug("checking for updated bookmarks\n")
655 ui.debug("checking for updated bookmarks\n")
640 ancestors = ()
656 ancestors = ()
641 if pushop.revs:
657 if pushop.revs:
642 revnums = map(repo.changelog.rev, pushop.revs)
658 revnums = map(repo.changelog.rev, pushop.revs)
643 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
659 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
644 remotebookmark = remote.listkeys('bookmarks')
660 remotebookmark = remote.listkeys('bookmarks')
645
661
646 explicit = set([repo._bookmarks.expandname(bookmark)
662 explicit = set([repo._bookmarks.expandname(bookmark)
647 for bookmark in pushop.bookmarks])
663 for bookmark in pushop.bookmarks])
648
664
649 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
665 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
650 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
666 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
651
667
652 def safehex(x):
668 def safehex(x):
653 if x is None:
669 if x is None:
654 return x
670 return x
655 return hex(x)
671 return hex(x)
656
672
657 def hexifycompbookmarks(bookmarks):
673 def hexifycompbookmarks(bookmarks):
658 return [(b, safehex(scid), safehex(dcid))
674 return [(b, safehex(scid), safehex(dcid))
659 for (b, scid, dcid) in bookmarks]
675 for (b, scid, dcid) in bookmarks]
660
676
661 comp = [hexifycompbookmarks(marks) for marks in comp]
677 comp = [hexifycompbookmarks(marks) for marks in comp]
662 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
678 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
663
679
664 def _processcompared(pushop, pushed, explicit, remotebms, comp):
680 def _processcompared(pushop, pushed, explicit, remotebms, comp):
665 """take decision on bookmark to pull from the remote bookmark
681 """take decision on bookmark to pull from the remote bookmark
666
682
667 Exist to help extensions who want to alter this behavior.
683 Exist to help extensions who want to alter this behavior.
668 """
684 """
669 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
685 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
670
686
671 repo = pushop.repo
687 repo = pushop.repo
672
688
673 for b, scid, dcid in advsrc:
689 for b, scid, dcid in advsrc:
674 if b in explicit:
690 if b in explicit:
675 explicit.remove(b)
691 explicit.remove(b)
676 if not pushed or repo[scid].rev() in pushed:
692 if not pushed or repo[scid].rev() in pushed:
677 pushop.outbookmarks.append((b, dcid, scid))
693 pushop.outbookmarks.append((b, dcid, scid))
678 # search added bookmark
694 # search added bookmark
679 for b, scid, dcid in addsrc:
695 for b, scid, dcid in addsrc:
680 if b in explicit:
696 if b in explicit:
681 explicit.remove(b)
697 explicit.remove(b)
682 pushop.outbookmarks.append((b, '', scid))
698 pushop.outbookmarks.append((b, '', scid))
683 # search for overwritten bookmark
699 # search for overwritten bookmark
684 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
700 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
685 if b in explicit:
701 if b in explicit:
686 explicit.remove(b)
702 explicit.remove(b)
687 pushop.outbookmarks.append((b, dcid, scid))
703 pushop.outbookmarks.append((b, dcid, scid))
688 # search for bookmark to delete
704 # search for bookmark to delete
689 for b, scid, dcid in adddst:
705 for b, scid, dcid in adddst:
690 if b in explicit:
706 if b in explicit:
691 explicit.remove(b)
707 explicit.remove(b)
692 # treat as "deleted locally"
708 # treat as "deleted locally"
693 pushop.outbookmarks.append((b, dcid, ''))
709 pushop.outbookmarks.append((b, dcid, ''))
694 # identical bookmarks shouldn't get reported
710 # identical bookmarks shouldn't get reported
695 for b, scid, dcid in same:
711 for b, scid, dcid in same:
696 if b in explicit:
712 if b in explicit:
697 explicit.remove(b)
713 explicit.remove(b)
698
714
699 if explicit:
715 if explicit:
700 explicit = sorted(explicit)
716 explicit = sorted(explicit)
701 # we should probably list all of them
717 # we should probably list all of them
702 pushop.ui.warn(_('bookmark %s does not exist on the local '
718 pushop.ui.warn(_('bookmark %s does not exist on the local '
703 'or remote repository!\n') % explicit[0])
719 'or remote repository!\n') % explicit[0])
704 pushop.bkresult = 2
720 pushop.bkresult = 2
705
721
706 pushop.outbookmarks.sort()
722 pushop.outbookmarks.sort()
707
723
708 def _pushcheckoutgoing(pushop):
724 def _pushcheckoutgoing(pushop):
709 outgoing = pushop.outgoing
725 outgoing = pushop.outgoing
710 unfi = pushop.repo.unfiltered()
726 unfi = pushop.repo.unfiltered()
711 if not outgoing.missing:
727 if not outgoing.missing:
712 # nothing to push
728 # nothing to push
713 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
729 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
714 return False
730 return False
715 # something to push
731 # something to push
716 if not pushop.force:
732 if not pushop.force:
717 # if repo.obsstore == False --> no obsolete
733 # if repo.obsstore == False --> no obsolete
718 # then, save the iteration
734 # then, save the iteration
719 if unfi.obsstore:
735 if unfi.obsstore:
720 # this message are here for 80 char limit reason
736 # this message are here for 80 char limit reason
721 mso = _("push includes obsolete changeset: %s!")
737 mso = _("push includes obsolete changeset: %s!")
722 mspd = _("push includes phase-divergent changeset: %s!")
738 mspd = _("push includes phase-divergent changeset: %s!")
723 mscd = _("push includes content-divergent changeset: %s!")
739 mscd = _("push includes content-divergent changeset: %s!")
724 mst = {"orphan": _("push includes orphan changeset: %s!"),
740 mst = {"orphan": _("push includes orphan changeset: %s!"),
725 "phase-divergent": mspd,
741 "phase-divergent": mspd,
726 "content-divergent": mscd}
742 "content-divergent": mscd}
727 # If we are to push if there is at least one
743 # If we are to push if there is at least one
728 # obsolete or unstable changeset in missing, at
744 # obsolete or unstable changeset in missing, at
729 # least one of the missinghead will be obsolete or
745 # least one of the missinghead will be obsolete or
730 # unstable. So checking heads only is ok
746 # unstable. So checking heads only is ok
731 for node in outgoing.missingheads:
747 for node in outgoing.missingheads:
732 ctx = unfi[node]
748 ctx = unfi[node]
733 if ctx.obsolete():
749 if ctx.obsolete():
734 raise error.Abort(mso % ctx)
750 raise error.Abort(mso % ctx)
735 elif ctx.isunstable():
751 elif ctx.isunstable():
736 # TODO print more than one instability in the abort
752 # TODO print more than one instability in the abort
737 # message
753 # message
738 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
754 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
739
755
740 discovery.checkheads(pushop)
756 discovery.checkheads(pushop)
741 return True
757 return True
742
758
743 # List of names of steps to perform for an outgoing bundle2, order matters.
759 # List of names of steps to perform for an outgoing bundle2, order matters.
744 b2partsgenorder = []
760 b2partsgenorder = []
745
761
746 # Mapping between step name and function
762 # Mapping between step name and function
747 #
763 #
748 # This exists to help extensions wrap steps if necessary
764 # This exists to help extensions wrap steps if necessary
749 b2partsgenmapping = {}
765 b2partsgenmapping = {}
750
766
751 def b2partsgenerator(stepname, idx=None):
767 def b2partsgenerator(stepname, idx=None):
752 """decorator for function generating bundle2 part
768 """decorator for function generating bundle2 part
753
769
754 The function is added to the step -> function mapping and appended to the
770 The function is added to the step -> function mapping and appended to the
755 list of steps. Beware that decorated functions will be added in order
771 list of steps. Beware that decorated functions will be added in order
756 (this may matter).
772 (this may matter).
757
773
758 You can only use this decorator for new steps, if you want to wrap a step
774 You can only use this decorator for new steps, if you want to wrap a step
759 from an extension, attack the b2partsgenmapping dictionary directly."""
775 from an extension, attack the b2partsgenmapping dictionary directly."""
760 def dec(func):
776 def dec(func):
761 assert stepname not in b2partsgenmapping
777 assert stepname not in b2partsgenmapping
762 b2partsgenmapping[stepname] = func
778 b2partsgenmapping[stepname] = func
763 if idx is None:
779 if idx is None:
764 b2partsgenorder.append(stepname)
780 b2partsgenorder.append(stepname)
765 else:
781 else:
766 b2partsgenorder.insert(idx, stepname)
782 b2partsgenorder.insert(idx, stepname)
767 return func
783 return func
768 return dec
784 return dec
769
785
770 def _pushb2ctxcheckheads(pushop, bundler):
786 def _pushb2ctxcheckheads(pushop, bundler):
771 """Generate race condition checking parts
787 """Generate race condition checking parts
772
788
773 Exists as an independent function to aid extensions
789 Exists as an independent function to aid extensions
774 """
790 """
775 # * 'force' do not check for push race,
791 # * 'force' do not check for push race,
776 # * if we don't push anything, there are nothing to check.
792 # * if we don't push anything, there are nothing to check.
777 if not pushop.force and pushop.outgoing.missingheads:
793 if not pushop.force and pushop.outgoing.missingheads:
778 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
794 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
779 emptyremote = pushop.pushbranchmap is None
795 emptyremote = pushop.pushbranchmap is None
780 if not allowunrelated or emptyremote:
796 if not allowunrelated or emptyremote:
781 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
797 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
782 else:
798 else:
783 affected = set()
799 affected = set()
784 for branch, heads in pushop.pushbranchmap.iteritems():
800 for branch, heads in pushop.pushbranchmap.iteritems():
785 remoteheads, newheads, unsyncedheads, discardedheads = heads
801 remoteheads, newheads, unsyncedheads, discardedheads = heads
786 if remoteheads is not None:
802 if remoteheads is not None:
787 remote = set(remoteheads)
803 remote = set(remoteheads)
788 affected |= set(discardedheads) & remote
804 affected |= set(discardedheads) & remote
789 affected |= remote - set(newheads)
805 affected |= remote - set(newheads)
790 if affected:
806 if affected:
791 data = iter(sorted(affected))
807 data = iter(sorted(affected))
792 bundler.newpart('check:updated-heads', data=data)
808 bundler.newpart('check:updated-heads', data=data)
793
809
794 def _pushing(pushop):
810 def _pushing(pushop):
795 """return True if we are pushing anything"""
811 """return True if we are pushing anything"""
796 return bool(pushop.outgoing.missing
812 return bool(pushop.outgoing.missing
797 or pushop.outdatedphases
813 or pushop.outdatedphases
798 or pushop.outobsmarkers
814 or pushop.outobsmarkers
799 or pushop.outbookmarks)
815 or pushop.outbookmarks)
800
816
801 @b2partsgenerator('check-bookmarks')
817 @b2partsgenerator('check-bookmarks')
802 def _pushb2checkbookmarks(pushop, bundler):
818 def _pushb2checkbookmarks(pushop, bundler):
803 """insert bookmark move checking"""
819 """insert bookmark move checking"""
804 if not _pushing(pushop) or pushop.force:
820 if not _pushing(pushop) or pushop.force:
805 return
821 return
806 b2caps = bundle2.bundle2caps(pushop.remote)
822 b2caps = bundle2.bundle2caps(pushop.remote)
807 hasbookmarkcheck = 'bookmarks' in b2caps
823 hasbookmarkcheck = 'bookmarks' in b2caps
808 if not (pushop.outbookmarks and hasbookmarkcheck):
824 if not (pushop.outbookmarks and hasbookmarkcheck):
809 return
825 return
810 data = []
826 data = []
811 for book, old, new in pushop.outbookmarks:
827 for book, old, new in pushop.outbookmarks:
812 old = bin(old)
828 old = bin(old)
813 data.append((book, old))
829 data.append((book, old))
814 checkdata = bookmod.binaryencode(data)
830 checkdata = bookmod.binaryencode(data)
815 bundler.newpart('check:bookmarks', data=checkdata)
831 bundler.newpart('check:bookmarks', data=checkdata)
816
832
817 @b2partsgenerator('check-phases')
833 @b2partsgenerator('check-phases')
818 def _pushb2checkphases(pushop, bundler):
834 def _pushb2checkphases(pushop, bundler):
819 """insert phase move checking"""
835 """insert phase move checking"""
820 if not _pushing(pushop) or pushop.force:
836 if not _pushing(pushop) or pushop.force:
821 return
837 return
822 b2caps = bundle2.bundle2caps(pushop.remote)
838 b2caps = bundle2.bundle2caps(pushop.remote)
823 hasphaseheads = 'heads' in b2caps.get('phases', ())
839 hasphaseheads = 'heads' in b2caps.get('phases', ())
824 if pushop.remotephases is not None and hasphaseheads:
840 if pushop.remotephases is not None and hasphaseheads:
825 # check that the remote phase has not changed
841 # check that the remote phase has not changed
826 checks = [[] for p in phases.allphases]
842 checks = [[] for p in phases.allphases]
827 checks[phases.public].extend(pushop.remotephases.publicheads)
843 checks[phases.public].extend(pushop.remotephases.publicheads)
828 checks[phases.draft].extend(pushop.remotephases.draftroots)
844 checks[phases.draft].extend(pushop.remotephases.draftroots)
829 if any(checks):
845 if any(checks):
830 for nodes in checks:
846 for nodes in checks:
831 nodes.sort()
847 nodes.sort()
832 checkdata = phases.binaryencode(checks)
848 checkdata = phases.binaryencode(checks)
833 bundler.newpart('check:phases', data=checkdata)
849 bundler.newpart('check:phases', data=checkdata)
834
850
835 @b2partsgenerator('changeset')
851 @b2partsgenerator('changeset')
836 def _pushb2ctx(pushop, bundler):
852 def _pushb2ctx(pushop, bundler):
837 """handle changegroup push through bundle2
853 """handle changegroup push through bundle2
838
854
839 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
855 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
840 """
856 """
841 if 'changesets' in pushop.stepsdone:
857 if 'changesets' in pushop.stepsdone:
842 return
858 return
843 pushop.stepsdone.add('changesets')
859 pushop.stepsdone.add('changesets')
844 # Send known heads to the server for race detection.
860 # Send known heads to the server for race detection.
845 if not _pushcheckoutgoing(pushop):
861 if not _pushcheckoutgoing(pushop):
846 return
862 return
847 pushop.repo.prepushoutgoinghooks(pushop)
863 pushop.repo.prepushoutgoinghooks(pushop)
848
864
849 _pushb2ctxcheckheads(pushop, bundler)
865 _pushb2ctxcheckheads(pushop, bundler)
850
866
851 b2caps = bundle2.bundle2caps(pushop.remote)
867 b2caps = bundle2.bundle2caps(pushop.remote)
852 version = '01'
868 version = '01'
853 cgversions = b2caps.get('changegroup')
869 cgversions = b2caps.get('changegroup')
854 if cgversions: # 3.1 and 3.2 ship with an empty value
870 if cgversions: # 3.1 and 3.2 ship with an empty value
855 cgversions = [v for v in cgversions
871 cgversions = [v for v in cgversions
856 if v in changegroup.supportedoutgoingversions(
872 if v in changegroup.supportedoutgoingversions(
857 pushop.repo)]
873 pushop.repo)]
858 if not cgversions:
874 if not cgversions:
859 raise ValueError(_('no common changegroup version'))
875 raise ValueError(_('no common changegroup version'))
860 version = max(cgversions)
876 version = max(cgversions)
861 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
877 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
862 'push')
878 'push')
863 cgpart = bundler.newpart('changegroup', data=cgstream)
879 cgpart = bundler.newpart('changegroup', data=cgstream)
864 if cgversions:
880 if cgversions:
865 cgpart.addparam('version', version)
881 cgpart.addparam('version', version)
866 if 'treemanifest' in pushop.repo.requirements:
882 if 'treemanifest' in pushop.repo.requirements:
867 cgpart.addparam('treemanifest', '1')
883 cgpart.addparam('treemanifest', '1')
868 def handlereply(op):
884 def handlereply(op):
869 """extract addchangegroup returns from server reply"""
885 """extract addchangegroup returns from server reply"""
870 cgreplies = op.records.getreplies(cgpart.id)
886 cgreplies = op.records.getreplies(cgpart.id)
871 assert len(cgreplies['changegroup']) == 1
887 assert len(cgreplies['changegroup']) == 1
872 pushop.cgresult = cgreplies['changegroup'][0]['return']
888 pushop.cgresult = cgreplies['changegroup'][0]['return']
873 return handlereply
889 return handlereply
874
890
875 @b2partsgenerator('phase')
891 @b2partsgenerator('phase')
876 def _pushb2phases(pushop, bundler):
892 def _pushb2phases(pushop, bundler):
877 """handle phase push through bundle2"""
893 """handle phase push through bundle2"""
878 if 'phases' in pushop.stepsdone:
894 if 'phases' in pushop.stepsdone:
879 return
895 return
880 b2caps = bundle2.bundle2caps(pushop.remote)
896 b2caps = bundle2.bundle2caps(pushop.remote)
881 ui = pushop.repo.ui
897 ui = pushop.repo.ui
882
898
883 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
899 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
884 haspushkey = 'pushkey' in b2caps
900 haspushkey = 'pushkey' in b2caps
885 hasphaseheads = 'heads' in b2caps.get('phases', ())
901 hasphaseheads = 'heads' in b2caps.get('phases', ())
886
902
887 if hasphaseheads and not legacyphase:
903 if hasphaseheads and not legacyphase:
888 return _pushb2phaseheads(pushop, bundler)
904 return _pushb2phaseheads(pushop, bundler)
889 elif haspushkey:
905 elif haspushkey:
890 return _pushb2phasespushkey(pushop, bundler)
906 return _pushb2phasespushkey(pushop, bundler)
891
907
892 def _pushb2phaseheads(pushop, bundler):
908 def _pushb2phaseheads(pushop, bundler):
893 """push phase information through a bundle2 - binary part"""
909 """push phase information through a bundle2 - binary part"""
894 pushop.stepsdone.add('phases')
910 pushop.stepsdone.add('phases')
895 if pushop.outdatedphases:
911 if pushop.outdatedphases:
896 updates = [[] for p in phases.allphases]
912 updates = [[] for p in phases.allphases]
897 updates[0].extend(h.node() for h in pushop.outdatedphases)
913 updates[0].extend(h.node() for h in pushop.outdatedphases)
898 phasedata = phases.binaryencode(updates)
914 phasedata = phases.binaryencode(updates)
899 bundler.newpart('phase-heads', data=phasedata)
915 bundler.newpart('phase-heads', data=phasedata)
900
916
901 def _pushb2phasespushkey(pushop, bundler):
917 def _pushb2phasespushkey(pushop, bundler):
902 """push phase information through a bundle2 - pushkey part"""
918 """push phase information through a bundle2 - pushkey part"""
903 pushop.stepsdone.add('phases')
919 pushop.stepsdone.add('phases')
904 part2node = []
920 part2node = []
905
921
906 def handlefailure(pushop, exc):
922 def handlefailure(pushop, exc):
907 targetid = int(exc.partid)
923 targetid = int(exc.partid)
908 for partid, node in part2node:
924 for partid, node in part2node:
909 if partid == targetid:
925 if partid == targetid:
910 raise error.Abort(_('updating %s to public failed') % node)
926 raise error.Abort(_('updating %s to public failed') % node)
911
927
912 enc = pushkey.encode
928 enc = pushkey.encode
913 for newremotehead in pushop.outdatedphases:
929 for newremotehead in pushop.outdatedphases:
914 part = bundler.newpart('pushkey')
930 part = bundler.newpart('pushkey')
915 part.addparam('namespace', enc('phases'))
931 part.addparam('namespace', enc('phases'))
916 part.addparam('key', enc(newremotehead.hex()))
932 part.addparam('key', enc(newremotehead.hex()))
917 part.addparam('old', enc('%d' % phases.draft))
933 part.addparam('old', enc('%d' % phases.draft))
918 part.addparam('new', enc('%d' % phases.public))
934 part.addparam('new', enc('%d' % phases.public))
919 part2node.append((part.id, newremotehead))
935 part2node.append((part.id, newremotehead))
920 pushop.pkfailcb[part.id] = handlefailure
936 pushop.pkfailcb[part.id] = handlefailure
921
937
922 def handlereply(op):
938 def handlereply(op):
923 for partid, node in part2node:
939 for partid, node in part2node:
924 partrep = op.records.getreplies(partid)
940 partrep = op.records.getreplies(partid)
925 results = partrep['pushkey']
941 results = partrep['pushkey']
926 assert len(results) <= 1
942 assert len(results) <= 1
927 msg = None
943 msg = None
928 if not results:
944 if not results:
929 msg = _('server ignored update of %s to public!\n') % node
945 msg = _('server ignored update of %s to public!\n') % node
930 elif not int(results[0]['return']):
946 elif not int(results[0]['return']):
931 msg = _('updating %s to public failed!\n') % node
947 msg = _('updating %s to public failed!\n') % node
932 if msg is not None:
948 if msg is not None:
933 pushop.ui.warn(msg)
949 pushop.ui.warn(msg)
934 return handlereply
950 return handlereply
935
951
936 @b2partsgenerator('obsmarkers')
952 @b2partsgenerator('obsmarkers')
937 def _pushb2obsmarkers(pushop, bundler):
953 def _pushb2obsmarkers(pushop, bundler):
938 if 'obsmarkers' in pushop.stepsdone:
954 if 'obsmarkers' in pushop.stepsdone:
939 return
955 return
940 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
956 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
941 if obsolete.commonversion(remoteversions) is None:
957 if obsolete.commonversion(remoteversions) is None:
942 return
958 return
943 pushop.stepsdone.add('obsmarkers')
959 pushop.stepsdone.add('obsmarkers')
944 if pushop.outobsmarkers:
960 if pushop.outobsmarkers:
945 markers = sorted(pushop.outobsmarkers)
961 markers = sorted(pushop.outobsmarkers)
946 bundle2.buildobsmarkerspart(bundler, markers)
962 bundle2.buildobsmarkerspart(bundler, markers)
947
963
948 @b2partsgenerator('bookmarks')
964 @b2partsgenerator('bookmarks')
949 def _pushb2bookmarks(pushop, bundler):
965 def _pushb2bookmarks(pushop, bundler):
950 """handle bookmark push through bundle2"""
966 """handle bookmark push through bundle2"""
951 if 'bookmarks' in pushop.stepsdone:
967 if 'bookmarks' in pushop.stepsdone:
952 return
968 return
953 b2caps = bundle2.bundle2caps(pushop.remote)
969 b2caps = bundle2.bundle2caps(pushop.remote)
954
970
955 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
971 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
956 legacybooks = 'bookmarks' in legacy
972 legacybooks = 'bookmarks' in legacy
957
973
958 if not legacybooks and 'bookmarks' in b2caps:
974 if not legacybooks and 'bookmarks' in b2caps:
959 return _pushb2bookmarkspart(pushop, bundler)
975 return _pushb2bookmarkspart(pushop, bundler)
960 elif 'pushkey' in b2caps:
976 elif 'pushkey' in b2caps:
961 return _pushb2bookmarkspushkey(pushop, bundler)
977 return _pushb2bookmarkspushkey(pushop, bundler)
962
978
963 def _bmaction(old, new):
979 def _bmaction(old, new):
964 """small utility for bookmark pushing"""
980 """small utility for bookmark pushing"""
965 if not old:
981 if not old:
966 return 'export'
982 return 'export'
967 elif not new:
983 elif not new:
968 return 'delete'
984 return 'delete'
969 return 'update'
985 return 'update'
970
986
971 def _pushb2bookmarkspart(pushop, bundler):
987 def _pushb2bookmarkspart(pushop, bundler):
972 pushop.stepsdone.add('bookmarks')
988 pushop.stepsdone.add('bookmarks')
973 if not pushop.outbookmarks:
989 if not pushop.outbookmarks:
974 return
990 return
975
991
976 allactions = []
992 allactions = []
977 data = []
993 data = []
978 for book, old, new in pushop.outbookmarks:
994 for book, old, new in pushop.outbookmarks:
979 new = bin(new)
995 new = bin(new)
980 data.append((book, new))
996 data.append((book, new))
981 allactions.append((book, _bmaction(old, new)))
997 allactions.append((book, _bmaction(old, new)))
982 checkdata = bookmod.binaryencode(data)
998 checkdata = bookmod.binaryencode(data)
983 bundler.newpart('bookmarks', data=checkdata)
999 bundler.newpart('bookmarks', data=checkdata)
984
1000
985 def handlereply(op):
1001 def handlereply(op):
986 ui = pushop.ui
1002 ui = pushop.ui
987 # if success
1003 # if success
988 for book, action in allactions:
1004 for book, action in allactions:
989 ui.status(bookmsgmap[action][0] % book)
1005 ui.status(bookmsgmap[action][0] % book)
990
1006
991 return handlereply
1007 return handlereply
992
1008
993 def _pushb2bookmarkspushkey(pushop, bundler):
1009 def _pushb2bookmarkspushkey(pushop, bundler):
994 pushop.stepsdone.add('bookmarks')
1010 pushop.stepsdone.add('bookmarks')
995 part2book = []
1011 part2book = []
996 enc = pushkey.encode
1012 enc = pushkey.encode
997
1013
998 def handlefailure(pushop, exc):
1014 def handlefailure(pushop, exc):
999 targetid = int(exc.partid)
1015 targetid = int(exc.partid)
1000 for partid, book, action in part2book:
1016 for partid, book, action in part2book:
1001 if partid == targetid:
1017 if partid == targetid:
1002 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1018 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1003 # we should not be called for part we did not generated
1019 # we should not be called for part we did not generated
1004 assert False
1020 assert False
1005
1021
1006 for book, old, new in pushop.outbookmarks:
1022 for book, old, new in pushop.outbookmarks:
1007 part = bundler.newpart('pushkey')
1023 part = bundler.newpart('pushkey')
1008 part.addparam('namespace', enc('bookmarks'))
1024 part.addparam('namespace', enc('bookmarks'))
1009 part.addparam('key', enc(book))
1025 part.addparam('key', enc(book))
1010 part.addparam('old', enc(old))
1026 part.addparam('old', enc(old))
1011 part.addparam('new', enc(new))
1027 part.addparam('new', enc(new))
1012 action = 'update'
1028 action = 'update'
1013 if not old:
1029 if not old:
1014 action = 'export'
1030 action = 'export'
1015 elif not new:
1031 elif not new:
1016 action = 'delete'
1032 action = 'delete'
1017 part2book.append((part.id, book, action))
1033 part2book.append((part.id, book, action))
1018 pushop.pkfailcb[part.id] = handlefailure
1034 pushop.pkfailcb[part.id] = handlefailure
1019
1035
1020 def handlereply(op):
1036 def handlereply(op):
1021 ui = pushop.ui
1037 ui = pushop.ui
1022 for partid, book, action in part2book:
1038 for partid, book, action in part2book:
1023 partrep = op.records.getreplies(partid)
1039 partrep = op.records.getreplies(partid)
1024 results = partrep['pushkey']
1040 results = partrep['pushkey']
1025 assert len(results) <= 1
1041 assert len(results) <= 1
1026 if not results:
1042 if not results:
1027 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1043 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1028 else:
1044 else:
1029 ret = int(results[0]['return'])
1045 ret = int(results[0]['return'])
1030 if ret:
1046 if ret:
1031 ui.status(bookmsgmap[action][0] % book)
1047 ui.status(bookmsgmap[action][0] % book)
1032 else:
1048 else:
1033 ui.warn(bookmsgmap[action][1] % book)
1049 ui.warn(bookmsgmap[action][1] % book)
1034 if pushop.bkresult is not None:
1050 if pushop.bkresult is not None:
1035 pushop.bkresult = 1
1051 pushop.bkresult = 1
1036 return handlereply
1052 return handlereply
1037
1053
1038 @b2partsgenerator('pushvars', idx=0)
1054 @b2partsgenerator('pushvars', idx=0)
1039 def _getbundlesendvars(pushop, bundler):
1055 def _getbundlesendvars(pushop, bundler):
1040 '''send shellvars via bundle2'''
1056 '''send shellvars via bundle2'''
1041 pushvars = pushop.pushvars
1057 pushvars = pushop.pushvars
1042 if pushvars:
1058 if pushvars:
1043 shellvars = {}
1059 shellvars = {}
1044 for raw in pushvars:
1060 for raw in pushvars:
1045 if '=' not in raw:
1061 if '=' not in raw:
1046 msg = ("unable to parse variable '%s', should follow "
1062 msg = ("unable to parse variable '%s', should follow "
1047 "'KEY=VALUE' or 'KEY=' format")
1063 "'KEY=VALUE' or 'KEY=' format")
1048 raise error.Abort(msg % raw)
1064 raise error.Abort(msg % raw)
1049 k, v = raw.split('=', 1)
1065 k, v = raw.split('=', 1)
1050 shellvars[k] = v
1066 shellvars[k] = v
1051
1067
1052 part = bundler.newpart('pushvars')
1068 part = bundler.newpart('pushvars')
1053
1069
1054 for key, value in shellvars.iteritems():
1070 for key, value in shellvars.iteritems():
1055 part.addparam(key, value, mandatory=False)
1071 part.addparam(key, value, mandatory=False)
1056
1072
1057 def _pushbundle2(pushop):
1073 def _pushbundle2(pushop):
1058 """push data to the remote using bundle2
1074 """push data to the remote using bundle2
1059
1075
1060 The only currently supported type of data is changegroup but this will
1076 The only currently supported type of data is changegroup but this will
1061 evolve in the future."""
1077 evolve in the future."""
1062 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1078 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1063 pushback = (pushop.trmanager
1079 pushback = (pushop.trmanager
1064 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1080 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1065
1081
1066 # create reply capability
1082 # create reply capability
1067 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1083 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1068 allowpushback=pushback,
1084 allowpushback=pushback,
1069 role='client'))
1085 role='client'))
1070 bundler.newpart('replycaps', data=capsblob)
1086 bundler.newpart('replycaps', data=capsblob)
1071 replyhandlers = []
1087 replyhandlers = []
1072 for partgenname in b2partsgenorder:
1088 for partgenname in b2partsgenorder:
1073 partgen = b2partsgenmapping[partgenname]
1089 partgen = b2partsgenmapping[partgenname]
1074 ret = partgen(pushop, bundler)
1090 ret = partgen(pushop, bundler)
1075 if callable(ret):
1091 if callable(ret):
1076 replyhandlers.append(ret)
1092 replyhandlers.append(ret)
1077 # do not push if nothing to push
1093 # do not push if nothing to push
1078 if bundler.nbparts <= 1:
1094 if bundler.nbparts <= 1:
1079 return
1095 return
1080 stream = util.chunkbuffer(bundler.getchunks())
1096 stream = util.chunkbuffer(bundler.getchunks())
1081 try:
1097 try:
1082 try:
1098 try:
1083 reply = pushop.remote.unbundle(
1099 reply = pushop.remote.unbundle(
1084 stream, ['force'], pushop.remote.url())
1100 stream, ['force'], pushop.remote.url())
1085 except error.BundleValueError as exc:
1101 except error.BundleValueError as exc:
1086 raise error.Abort(_('missing support for %s') % exc)
1102 raise error.Abort(_('missing support for %s') % exc)
1087 try:
1103 try:
1088 trgetter = None
1104 trgetter = None
1089 if pushback:
1105 if pushback:
1090 trgetter = pushop.trmanager.transaction
1106 trgetter = pushop.trmanager.transaction
1091 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1107 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1092 except error.BundleValueError as exc:
1108 except error.BundleValueError as exc:
1093 raise error.Abort(_('missing support for %s') % exc)
1109 raise error.Abort(_('missing support for %s') % exc)
1094 except bundle2.AbortFromPart as exc:
1110 except bundle2.AbortFromPart as exc:
1095 pushop.ui.status(_('remote: %s\n') % exc)
1111 pushop.ui.status(_('remote: %s\n') % exc)
1096 if exc.hint is not None:
1112 if exc.hint is not None:
1097 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1113 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1098 raise error.Abort(_('push failed on remote'))
1114 raise error.Abort(_('push failed on remote'))
1099 except error.PushkeyFailed as exc:
1115 except error.PushkeyFailed as exc:
1100 partid = int(exc.partid)
1116 partid = int(exc.partid)
1101 if partid not in pushop.pkfailcb:
1117 if partid not in pushop.pkfailcb:
1102 raise
1118 raise
1103 pushop.pkfailcb[partid](pushop, exc)
1119 pushop.pkfailcb[partid](pushop, exc)
1104 for rephand in replyhandlers:
1120 for rephand in replyhandlers:
1105 rephand(op)
1121 rephand(op)
1106
1122
1107 def _pushchangeset(pushop):
1123 def _pushchangeset(pushop):
1108 """Make the actual push of changeset bundle to remote repo"""
1124 """Make the actual push of changeset bundle to remote repo"""
1109 if 'changesets' in pushop.stepsdone:
1125 if 'changesets' in pushop.stepsdone:
1110 return
1126 return
1111 pushop.stepsdone.add('changesets')
1127 pushop.stepsdone.add('changesets')
1112 if not _pushcheckoutgoing(pushop):
1128 if not _pushcheckoutgoing(pushop):
1113 return
1129 return
1114
1130
1115 # Should have verified this in push().
1131 # Should have verified this in push().
1116 assert pushop.remote.capable('unbundle')
1132 assert pushop.remote.capable('unbundle')
1117
1133
1118 pushop.repo.prepushoutgoinghooks(pushop)
1134 pushop.repo.prepushoutgoinghooks(pushop)
1119 outgoing = pushop.outgoing
1135 outgoing = pushop.outgoing
1120 # TODO: get bundlecaps from remote
1136 # TODO: get bundlecaps from remote
1121 bundlecaps = None
1137 bundlecaps = None
1122 # create a changegroup from local
1138 # create a changegroup from local
1123 if pushop.revs is None and not (outgoing.excluded
1139 if pushop.revs is None and not (outgoing.excluded
1124 or pushop.repo.changelog.filteredrevs):
1140 or pushop.repo.changelog.filteredrevs):
1125 # push everything,
1141 # push everything,
1126 # use the fast path, no race possible on push
1142 # use the fast path, no race possible on push
1127 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1143 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1128 fastpath=True, bundlecaps=bundlecaps)
1144 fastpath=True, bundlecaps=bundlecaps)
1129 else:
1145 else:
1130 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1146 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1131 'push', bundlecaps=bundlecaps)
1147 'push', bundlecaps=bundlecaps)
1132
1148
1133 # apply changegroup to remote
1149 # apply changegroup to remote
1134 # local repo finds heads on server, finds out what
1150 # local repo finds heads on server, finds out what
1135 # revs it must push. once revs transferred, if server
1151 # revs it must push. once revs transferred, if server
1136 # finds it has different heads (someone else won
1152 # finds it has different heads (someone else won
1137 # commit/push race), server aborts.
1153 # commit/push race), server aborts.
1138 if pushop.force:
1154 if pushop.force:
1139 remoteheads = ['force']
1155 remoteheads = ['force']
1140 else:
1156 else:
1141 remoteheads = pushop.remoteheads
1157 remoteheads = pushop.remoteheads
1142 # ssh: return remote's addchangegroup()
1158 # ssh: return remote's addchangegroup()
1143 # http: return remote's addchangegroup() or 0 for error
1159 # http: return remote's addchangegroup() or 0 for error
1144 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1160 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1145 pushop.repo.url())
1161 pushop.repo.url())
1146
1162
1147 def _pushsyncphase(pushop):
1163 def _pushsyncphase(pushop):
1148 """synchronise phase information locally and remotely"""
1164 """synchronise phase information locally and remotely"""
1149 cheads = pushop.commonheads
1165 cheads = pushop.commonheads
1150 # even when we don't push, exchanging phase data is useful
1166 # even when we don't push, exchanging phase data is useful
1151 remotephases = pushop.remote.listkeys('phases')
1167 remotephases = pushop.remote.listkeys('phases')
1152 if (pushop.ui.configbool('ui', '_usedassubrepo')
1168 if (pushop.ui.configbool('ui', '_usedassubrepo')
1153 and remotephases # server supports phases
1169 and remotephases # server supports phases
1154 and pushop.cgresult is None # nothing was pushed
1170 and pushop.cgresult is None # nothing was pushed
1155 and remotephases.get('publishing', False)):
1171 and remotephases.get('publishing', False)):
1156 # When:
1172 # When:
1157 # - this is a subrepo push
1173 # - this is a subrepo push
1158 # - and remote support phase
1174 # - and remote support phase
1159 # - and no changeset was pushed
1175 # - and no changeset was pushed
1160 # - and remote is publishing
1176 # - and remote is publishing
1161 # We may be in issue 3871 case!
1177 # We may be in issue 3871 case!
1162 # We drop the possible phase synchronisation done by
1178 # We drop the possible phase synchronisation done by
1163 # courtesy to publish changesets possibly locally draft
1179 # courtesy to publish changesets possibly locally draft
1164 # on the remote.
1180 # on the remote.
1165 remotephases = {'publishing': 'True'}
1181 remotephases = {'publishing': 'True'}
1166 if not remotephases: # old server or public only reply from non-publishing
1182 if not remotephases: # old server or public only reply from non-publishing
1167 _localphasemove(pushop, cheads)
1183 _localphasemove(pushop, cheads)
1168 # don't push any phase data as there is nothing to push
1184 # don't push any phase data as there is nothing to push
1169 else:
1185 else:
1170 ana = phases.analyzeremotephases(pushop.repo, cheads,
1186 ana = phases.analyzeremotephases(pushop.repo, cheads,
1171 remotephases)
1187 remotephases)
1172 pheads, droots = ana
1188 pheads, droots = ana
1173 ### Apply remote phase on local
1189 ### Apply remote phase on local
1174 if remotephases.get('publishing', False):
1190 if remotephases.get('publishing', False):
1175 _localphasemove(pushop, cheads)
1191 _localphasemove(pushop, cheads)
1176 else: # publish = False
1192 else: # publish = False
1177 _localphasemove(pushop, pheads)
1193 _localphasemove(pushop, pheads)
1178 _localphasemove(pushop, cheads, phases.draft)
1194 _localphasemove(pushop, cheads, phases.draft)
1179 ### Apply local phase on remote
1195 ### Apply local phase on remote
1180
1196
1181 if pushop.cgresult:
1197 if pushop.cgresult:
1182 if 'phases' in pushop.stepsdone:
1198 if 'phases' in pushop.stepsdone:
1183 # phases already pushed though bundle2
1199 # phases already pushed though bundle2
1184 return
1200 return
1185 outdated = pushop.outdatedphases
1201 outdated = pushop.outdatedphases
1186 else:
1202 else:
1187 outdated = pushop.fallbackoutdatedphases
1203 outdated = pushop.fallbackoutdatedphases
1188
1204
1189 pushop.stepsdone.add('phases')
1205 pushop.stepsdone.add('phases')
1190
1206
1191 # filter heads already turned public by the push
1207 # filter heads already turned public by the push
1192 outdated = [c for c in outdated if c.node() not in pheads]
1208 outdated = [c for c in outdated if c.node() not in pheads]
1193 # fallback to independent pushkey command
1209 # fallback to independent pushkey command
1194 for newremotehead in outdated:
1210 for newremotehead in outdated:
1195 r = pushop.remote.pushkey('phases',
1211 r = pushop.remote.pushkey('phases',
1196 newremotehead.hex(),
1212 newremotehead.hex(),
1197 ('%d' % phases.draft),
1213 ('%d' % phases.draft),
1198 ('%d' % phases.public))
1214 ('%d' % phases.public))
1199 if not r:
1215 if not r:
1200 pushop.ui.warn(_('updating %s to public failed!\n')
1216 pushop.ui.warn(_('updating %s to public failed!\n')
1201 % newremotehead)
1217 % newremotehead)
1202
1218
1203 def _localphasemove(pushop, nodes, phase=phases.public):
1219 def _localphasemove(pushop, nodes, phase=phases.public):
1204 """move <nodes> to <phase> in the local source repo"""
1220 """move <nodes> to <phase> in the local source repo"""
1205 if pushop.trmanager:
1221 if pushop.trmanager:
1206 phases.advanceboundary(pushop.repo,
1222 phases.advanceboundary(pushop.repo,
1207 pushop.trmanager.transaction(),
1223 pushop.trmanager.transaction(),
1208 phase,
1224 phase,
1209 nodes)
1225 nodes)
1210 else:
1226 else:
1211 # repo is not locked, do not change any phases!
1227 # repo is not locked, do not change any phases!
1212 # Informs the user that phases should have been moved when
1228 # Informs the user that phases should have been moved when
1213 # applicable.
1229 # applicable.
1214 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1230 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1215 phasestr = phases.phasenames[phase]
1231 phasestr = phases.phasenames[phase]
1216 if actualmoves:
1232 if actualmoves:
1217 pushop.ui.status(_('cannot lock source repo, skipping '
1233 pushop.ui.status(_('cannot lock source repo, skipping '
1218 'local %s phase update\n') % phasestr)
1234 'local %s phase update\n') % phasestr)
1219
1235
1220 def _pushobsolete(pushop):
1236 def _pushobsolete(pushop):
1221 """utility function to push obsolete markers to a remote"""
1237 """utility function to push obsolete markers to a remote"""
1222 if 'obsmarkers' in pushop.stepsdone:
1238 if 'obsmarkers' in pushop.stepsdone:
1223 return
1239 return
1224 repo = pushop.repo
1240 repo = pushop.repo
1225 remote = pushop.remote
1241 remote = pushop.remote
1226 pushop.stepsdone.add('obsmarkers')
1242 pushop.stepsdone.add('obsmarkers')
1227 if pushop.outobsmarkers:
1243 if pushop.outobsmarkers:
1228 pushop.ui.debug('try to push obsolete markers to remote\n')
1244 pushop.ui.debug('try to push obsolete markers to remote\n')
1229 rslts = []
1245 rslts = []
1230 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1246 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1231 for key in sorted(remotedata, reverse=True):
1247 for key in sorted(remotedata, reverse=True):
1232 # reverse sort to ensure we end with dump0
1248 # reverse sort to ensure we end with dump0
1233 data = remotedata[key]
1249 data = remotedata[key]
1234 rslts.append(remote.pushkey('obsolete', key, '', data))
1250 rslts.append(remote.pushkey('obsolete', key, '', data))
1235 if [r for r in rslts if not r]:
1251 if [r for r in rslts if not r]:
1236 msg = _('failed to push some obsolete markers!\n')
1252 msg = _('failed to push some obsolete markers!\n')
1237 repo.ui.warn(msg)
1253 repo.ui.warn(msg)
1238
1254
1239 def _pushbookmark(pushop):
1255 def _pushbookmark(pushop):
1240 """Update bookmark position on remote"""
1256 """Update bookmark position on remote"""
1241 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1257 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1242 return
1258 return
1243 pushop.stepsdone.add('bookmarks')
1259 pushop.stepsdone.add('bookmarks')
1244 ui = pushop.ui
1260 ui = pushop.ui
1245 remote = pushop.remote
1261 remote = pushop.remote
1246
1262
1247 for b, old, new in pushop.outbookmarks:
1263 for b, old, new in pushop.outbookmarks:
1248 action = 'update'
1264 action = 'update'
1249 if not old:
1265 if not old:
1250 action = 'export'
1266 action = 'export'
1251 elif not new:
1267 elif not new:
1252 action = 'delete'
1268 action = 'delete'
1253 if remote.pushkey('bookmarks', b, old, new):
1269 if remote.pushkey('bookmarks', b, old, new):
1254 ui.status(bookmsgmap[action][0] % b)
1270 ui.status(bookmsgmap[action][0] % b)
1255 else:
1271 else:
1256 ui.warn(bookmsgmap[action][1] % b)
1272 ui.warn(bookmsgmap[action][1] % b)
1257 # discovery can have set the value form invalid entry
1273 # discovery can have set the value form invalid entry
1258 if pushop.bkresult is not None:
1274 if pushop.bkresult is not None:
1259 pushop.bkresult = 1
1275 pushop.bkresult = 1
1260
1276
1261 class pulloperation(object):
1277 class pulloperation(object):
1262 """A object that represent a single pull operation
1278 """A object that represent a single pull operation
1263
1279
1264 It purpose is to carry pull related state and very common operation.
1280 It purpose is to carry pull related state and very common operation.
1265
1281
1266 A new should be created at the beginning of each pull and discarded
1282 A new should be created at the beginning of each pull and discarded
1267 afterward.
1283 afterward.
1268 """
1284 """
1269
1285
1270 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1286 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1271 remotebookmarks=None, streamclonerequested=None):
1287 remotebookmarks=None, streamclonerequested=None):
1272 # repo we pull into
1288 # repo we pull into
1273 self.repo = repo
1289 self.repo = repo
1274 # repo we pull from
1290 # repo we pull from
1275 self.remote = remote
1291 self.remote = remote
1276 # revision we try to pull (None is "all")
1292 # revision we try to pull (None is "all")
1277 self.heads = heads
1293 self.heads = heads
1278 # bookmark pulled explicitly
1294 # bookmark pulled explicitly
1279 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1295 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1280 for bookmark in bookmarks]
1296 for bookmark in bookmarks]
1281 # do we force pull?
1297 # do we force pull?
1282 self.force = force
1298 self.force = force
1283 # whether a streaming clone was requested
1299 # whether a streaming clone was requested
1284 self.streamclonerequested = streamclonerequested
1300 self.streamclonerequested = streamclonerequested
1285 # transaction manager
1301 # transaction manager
1286 self.trmanager = None
1302 self.trmanager = None
1287 # set of common changeset between local and remote before pull
1303 # set of common changeset between local and remote before pull
1288 self.common = None
1304 self.common = None
1289 # set of pulled head
1305 # set of pulled head
1290 self.rheads = None
1306 self.rheads = None
1291 # list of missing changeset to fetch remotely
1307 # list of missing changeset to fetch remotely
1292 self.fetch = None
1308 self.fetch = None
1293 # remote bookmarks data
1309 # remote bookmarks data
1294 self.remotebookmarks = remotebookmarks
1310 self.remotebookmarks = remotebookmarks
1295 # result of changegroup pulling (used as return code by pull)
1311 # result of changegroup pulling (used as return code by pull)
1296 self.cgresult = None
1312 self.cgresult = None
1297 # list of step already done
1313 # list of step already done
1298 self.stepsdone = set()
1314 self.stepsdone = set()
1299 # Whether we attempted a clone from pre-generated bundles.
1315 # Whether we attempted a clone from pre-generated bundles.
1300 self.clonebundleattempted = False
1316 self.clonebundleattempted = False
1301
1317
1302 @util.propertycache
1318 @util.propertycache
1303 def pulledsubset(self):
1319 def pulledsubset(self):
1304 """heads of the set of changeset target by the pull"""
1320 """heads of the set of changeset target by the pull"""
1305 # compute target subset
1321 # compute target subset
1306 if self.heads is None:
1322 if self.heads is None:
1307 # We pulled every thing possible
1323 # We pulled every thing possible
1308 # sync on everything common
1324 # sync on everything common
1309 c = set(self.common)
1325 c = set(self.common)
1310 ret = list(self.common)
1326 ret = list(self.common)
1311 for n in self.rheads:
1327 for n in self.rheads:
1312 if n not in c:
1328 if n not in c:
1313 ret.append(n)
1329 ret.append(n)
1314 return ret
1330 return ret
1315 else:
1331 else:
1316 # We pulled a specific subset
1332 # We pulled a specific subset
1317 # sync on this subset
1333 # sync on this subset
1318 return self.heads
1334 return self.heads
1319
1335
1320 @util.propertycache
1336 @util.propertycache
1321 def canusebundle2(self):
1337 def canusebundle2(self):
1322 return not _forcebundle1(self)
1338 return not _forcebundle1(self)
1323
1339
1324 @util.propertycache
1340 @util.propertycache
1325 def remotebundle2caps(self):
1341 def remotebundle2caps(self):
1326 return bundle2.bundle2caps(self.remote)
1342 return bundle2.bundle2caps(self.remote)
1327
1343
1328 def gettransaction(self):
1344 def gettransaction(self):
1329 # deprecated; talk to trmanager directly
1345 # deprecated; talk to trmanager directly
1330 return self.trmanager.transaction()
1346 return self.trmanager.transaction()
1331
1347
1332 class transactionmanager(util.transactional):
1348 class transactionmanager(util.transactional):
1333 """An object to manage the life cycle of a transaction
1349 """An object to manage the life cycle of a transaction
1334
1350
1335 It creates the transaction on demand and calls the appropriate hooks when
1351 It creates the transaction on demand and calls the appropriate hooks when
1336 closing the transaction."""
1352 closing the transaction."""
1337 def __init__(self, repo, source, url):
1353 def __init__(self, repo, source, url):
1338 self.repo = repo
1354 self.repo = repo
1339 self.source = source
1355 self.source = source
1340 self.url = url
1356 self.url = url
1341 self._tr = None
1357 self._tr = None
1342
1358
1343 def transaction(self):
1359 def transaction(self):
1344 """Return an open transaction object, constructing if necessary"""
1360 """Return an open transaction object, constructing if necessary"""
1345 if not self._tr:
1361 if not self._tr:
1346 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1362 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1347 self._tr = self.repo.transaction(trname)
1363 self._tr = self.repo.transaction(trname)
1348 self._tr.hookargs['source'] = self.source
1364 self._tr.hookargs['source'] = self.source
1349 self._tr.hookargs['url'] = self.url
1365 self._tr.hookargs['url'] = self.url
1350 return self._tr
1366 return self._tr
1351
1367
1352 def close(self):
1368 def close(self):
1353 """close transaction if created"""
1369 """close transaction if created"""
1354 if self._tr is not None:
1370 if self._tr is not None:
1355 self._tr.close()
1371 self._tr.close()
1356
1372
1357 def release(self):
1373 def release(self):
1358 """release transaction if created"""
1374 """release transaction if created"""
1359 if self._tr is not None:
1375 if self._tr is not None:
1360 self._tr.release()
1376 self._tr.release()
1361
1377
1362 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1378 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1363 streamclonerequested=None):
1379 streamclonerequested=None):
1364 """Fetch repository data from a remote.
1380 """Fetch repository data from a remote.
1365
1381
1366 This is the main function used to retrieve data from a remote repository.
1382 This is the main function used to retrieve data from a remote repository.
1367
1383
1368 ``repo`` is the local repository to clone into.
1384 ``repo`` is the local repository to clone into.
1369 ``remote`` is a peer instance.
1385 ``remote`` is a peer instance.
1370 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1386 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1371 default) means to pull everything from the remote.
1387 default) means to pull everything from the remote.
1372 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1388 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1373 default, all remote bookmarks are pulled.
1389 default, all remote bookmarks are pulled.
1374 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1390 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1375 initialization.
1391 initialization.
1376 ``streamclonerequested`` is a boolean indicating whether a "streaming
1392 ``streamclonerequested`` is a boolean indicating whether a "streaming
1377 clone" is requested. A "streaming clone" is essentially a raw file copy
1393 clone" is requested. A "streaming clone" is essentially a raw file copy
1378 of revlogs from the server. This only works when the local repository is
1394 of revlogs from the server. This only works when the local repository is
1379 empty. The default value of ``None`` means to respect the server
1395 empty. The default value of ``None`` means to respect the server
1380 configuration for preferring stream clones.
1396 configuration for preferring stream clones.
1381
1397
1382 Returns the ``pulloperation`` created for this pull.
1398 Returns the ``pulloperation`` created for this pull.
1383 """
1399 """
1384 if opargs is None:
1400 if opargs is None:
1385 opargs = {}
1401 opargs = {}
1386 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1402 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1387 streamclonerequested=streamclonerequested,
1403 streamclonerequested=streamclonerequested,
1388 **pycompat.strkwargs(opargs))
1404 **pycompat.strkwargs(opargs))
1389
1405
1390 peerlocal = pullop.remote.local()
1406 peerlocal = pullop.remote.local()
1391 if peerlocal:
1407 if peerlocal:
1392 missing = set(peerlocal.requirements) - pullop.repo.supported
1408 missing = set(peerlocal.requirements) - pullop.repo.supported
1393 if missing:
1409 if missing:
1394 msg = _("required features are not"
1410 msg = _("required features are not"
1395 " supported in the destination:"
1411 " supported in the destination:"
1396 " %s") % (', '.join(sorted(missing)))
1412 " %s") % (', '.join(sorted(missing)))
1397 raise error.Abort(msg)
1413 raise error.Abort(msg)
1398
1414
1399 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1415 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1400 with repo.wlock(), repo.lock(), pullop.trmanager:
1416 with repo.wlock(), repo.lock(), pullop.trmanager:
1401 # This should ideally be in _pullbundle2(). However, it needs to run
1417 # This should ideally be in _pullbundle2(). However, it needs to run
1402 # before discovery to avoid extra work.
1418 # before discovery to avoid extra work.
1403 _maybeapplyclonebundle(pullop)
1419 _maybeapplyclonebundle(pullop)
1404 streamclone.maybeperformlegacystreamclone(pullop)
1420 streamclone.maybeperformlegacystreamclone(pullop)
1405 _pulldiscovery(pullop)
1421 _pulldiscovery(pullop)
1406 if pullop.canusebundle2:
1422 if pullop.canusebundle2:
1407 _pullbundle2(pullop)
1423 _pullbundle2(pullop)
1408 _pullchangeset(pullop)
1424 _pullchangeset(pullop)
1409 _pullphase(pullop)
1425 _pullphase(pullop)
1410 _pullbookmarks(pullop)
1426 _pullbookmarks(pullop)
1411 _pullobsolete(pullop)
1427 _pullobsolete(pullop)
1412
1428
1413 # storing remotenames
1429 # storing remotenames
1414 if repo.ui.configbool('experimental', 'remotenames'):
1430 if repo.ui.configbool('experimental', 'remotenames'):
1415 logexchange.pullremotenames(repo, remote)
1431 logexchange.pullremotenames(repo, remote)
1416
1432
1417 return pullop
1433 return pullop
1418
1434
1419 # list of steps to perform discovery before pull
1435 # list of steps to perform discovery before pull
1420 pulldiscoveryorder = []
1436 pulldiscoveryorder = []
1421
1437
1422 # Mapping between step name and function
1438 # Mapping between step name and function
1423 #
1439 #
1424 # This exists to help extensions wrap steps if necessary
1440 # This exists to help extensions wrap steps if necessary
1425 pulldiscoverymapping = {}
1441 pulldiscoverymapping = {}
1426
1442
1427 def pulldiscovery(stepname):
1443 def pulldiscovery(stepname):
1428 """decorator for function performing discovery before pull
1444 """decorator for function performing discovery before pull
1429
1445
1430 The function is added to the step -> function mapping and appended to the
1446 The function is added to the step -> function mapping and appended to the
1431 list of steps. Beware that decorated function will be added in order (this
1447 list of steps. Beware that decorated function will be added in order (this
1432 may matter).
1448 may matter).
1433
1449
1434 You can only use this decorator for a new step, if you want to wrap a step
1450 You can only use this decorator for a new step, if you want to wrap a step
1435 from an extension, change the pulldiscovery dictionary directly."""
1451 from an extension, change the pulldiscovery dictionary directly."""
1436 def dec(func):
1452 def dec(func):
1437 assert stepname not in pulldiscoverymapping
1453 assert stepname not in pulldiscoverymapping
1438 pulldiscoverymapping[stepname] = func
1454 pulldiscoverymapping[stepname] = func
1439 pulldiscoveryorder.append(stepname)
1455 pulldiscoveryorder.append(stepname)
1440 return func
1456 return func
1441 return dec
1457 return dec
1442
1458
1443 def _pulldiscovery(pullop):
1459 def _pulldiscovery(pullop):
1444 """Run all discovery steps"""
1460 """Run all discovery steps"""
1445 for stepname in pulldiscoveryorder:
1461 for stepname in pulldiscoveryorder:
1446 step = pulldiscoverymapping[stepname]
1462 step = pulldiscoverymapping[stepname]
1447 step(pullop)
1463 step(pullop)
1448
1464
1449 @pulldiscovery('b1:bookmarks')
1465 @pulldiscovery('b1:bookmarks')
1450 def _pullbookmarkbundle1(pullop):
1466 def _pullbookmarkbundle1(pullop):
1451 """fetch bookmark data in bundle1 case
1467 """fetch bookmark data in bundle1 case
1452
1468
1453 If not using bundle2, we have to fetch bookmarks before changeset
1469 If not using bundle2, we have to fetch bookmarks before changeset
1454 discovery to reduce the chance and impact of race conditions."""
1470 discovery to reduce the chance and impact of race conditions."""
1455 if pullop.remotebookmarks is not None:
1471 if pullop.remotebookmarks is not None:
1456 return
1472 return
1457 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1473 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1458 # all known bundle2 servers now support listkeys, but lets be nice with
1474 # all known bundle2 servers now support listkeys, but lets be nice with
1459 # new implementation.
1475 # new implementation.
1460 return
1476 return
1461 books = pullop.remote.listkeys('bookmarks')
1477 books = pullop.remote.listkeys('bookmarks')
1462 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1478 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1463
1479
1464
1480
1465 @pulldiscovery('changegroup')
1481 @pulldiscovery('changegroup')
1466 def _pulldiscoverychangegroup(pullop):
1482 def _pulldiscoverychangegroup(pullop):
1467 """discovery phase for the pull
1483 """discovery phase for the pull
1468
1484
1469 Current handle changeset discovery only, will change handle all discovery
1485 Current handle changeset discovery only, will change handle all discovery
1470 at some point."""
1486 at some point."""
1471 tmp = discovery.findcommonincoming(pullop.repo,
1487 tmp = discovery.findcommonincoming(pullop.repo,
1472 pullop.remote,
1488 pullop.remote,
1473 heads=pullop.heads,
1489 heads=pullop.heads,
1474 force=pullop.force)
1490 force=pullop.force)
1475 common, fetch, rheads = tmp
1491 common, fetch, rheads = tmp
1476 nm = pullop.repo.unfiltered().changelog.nodemap
1492 nm = pullop.repo.unfiltered().changelog.nodemap
1477 if fetch and rheads:
1493 if fetch and rheads:
1478 # If a remote heads is filtered locally, put in back in common.
1494 # If a remote heads is filtered locally, put in back in common.
1479 #
1495 #
1480 # This is a hackish solution to catch most of "common but locally
1496 # This is a hackish solution to catch most of "common but locally
1481 # hidden situation". We do not performs discovery on unfiltered
1497 # hidden situation". We do not performs discovery on unfiltered
1482 # repository because it end up doing a pathological amount of round
1498 # repository because it end up doing a pathological amount of round
1483 # trip for w huge amount of changeset we do not care about.
1499 # trip for w huge amount of changeset we do not care about.
1484 #
1500 #
1485 # If a set of such "common but filtered" changeset exist on the server
1501 # If a set of such "common but filtered" changeset exist on the server
1486 # but are not including a remote heads, we'll not be able to detect it,
1502 # but are not including a remote heads, we'll not be able to detect it,
1487 scommon = set(common)
1503 scommon = set(common)
1488 for n in rheads:
1504 for n in rheads:
1489 if n in nm:
1505 if n in nm:
1490 if n not in scommon:
1506 if n not in scommon:
1491 common.append(n)
1507 common.append(n)
1492 if set(rheads).issubset(set(common)):
1508 if set(rheads).issubset(set(common)):
1493 fetch = []
1509 fetch = []
1494 pullop.common = common
1510 pullop.common = common
1495 pullop.fetch = fetch
1511 pullop.fetch = fetch
1496 pullop.rheads = rheads
1512 pullop.rheads = rheads
1497
1513
1498 def _pullbundle2(pullop):
1514 def _pullbundle2(pullop):
1499 """pull data using bundle2
1515 """pull data using bundle2
1500
1516
1501 For now, the only supported data are changegroup."""
1517 For now, the only supported data are changegroup."""
1502 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1518 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1503
1519
1504 # make ui easier to access
1520 # make ui easier to access
1505 ui = pullop.repo.ui
1521 ui = pullop.repo.ui
1506
1522
1507 # At the moment we don't do stream clones over bundle2. If that is
1523 # At the moment we don't do stream clones over bundle2. If that is
1508 # implemented then here's where the check for that will go.
1524 # implemented then here's where the check for that will go.
1509 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1525 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1510
1526
1511 # declare pull perimeters
1527 # declare pull perimeters
1512 kwargs['common'] = pullop.common
1528 kwargs['common'] = pullop.common
1513 kwargs['heads'] = pullop.heads or pullop.rheads
1529 kwargs['heads'] = pullop.heads or pullop.rheads
1514
1530
1515 if streaming:
1531 if streaming:
1516 kwargs['cg'] = False
1532 kwargs['cg'] = False
1517 kwargs['stream'] = True
1533 kwargs['stream'] = True
1518 pullop.stepsdone.add('changegroup')
1534 pullop.stepsdone.add('changegroup')
1519 pullop.stepsdone.add('phases')
1535 pullop.stepsdone.add('phases')
1520
1536
1521 else:
1537 else:
1522 # pulling changegroup
1538 # pulling changegroup
1523 pullop.stepsdone.add('changegroup')
1539 pullop.stepsdone.add('changegroup')
1524
1540
1525 kwargs['cg'] = pullop.fetch
1541 kwargs['cg'] = pullop.fetch
1526
1542
1527 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1543 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1528 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1544 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1529 if (not legacyphase and hasbinaryphase):
1545 if (not legacyphase and hasbinaryphase):
1530 kwargs['phases'] = True
1546 kwargs['phases'] = True
1531 pullop.stepsdone.add('phases')
1547 pullop.stepsdone.add('phases')
1532
1548
1533 if 'listkeys' in pullop.remotebundle2caps:
1549 if 'listkeys' in pullop.remotebundle2caps:
1534 if 'phases' not in pullop.stepsdone:
1550 if 'phases' not in pullop.stepsdone:
1535 kwargs['listkeys'] = ['phases']
1551 kwargs['listkeys'] = ['phases']
1536
1552
1537 bookmarksrequested = False
1553 bookmarksrequested = False
1538 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1554 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1539 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1555 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1540
1556
1541 if pullop.remotebookmarks is not None:
1557 if pullop.remotebookmarks is not None:
1542 pullop.stepsdone.add('request-bookmarks')
1558 pullop.stepsdone.add('request-bookmarks')
1543
1559
1544 if ('request-bookmarks' not in pullop.stepsdone
1560 if ('request-bookmarks' not in pullop.stepsdone
1545 and pullop.remotebookmarks is None
1561 and pullop.remotebookmarks is None
1546 and not legacybookmark and hasbinarybook):
1562 and not legacybookmark and hasbinarybook):
1547 kwargs['bookmarks'] = True
1563 kwargs['bookmarks'] = True
1548 bookmarksrequested = True
1564 bookmarksrequested = True
1549
1565
1550 if 'listkeys' in pullop.remotebundle2caps:
1566 if 'listkeys' in pullop.remotebundle2caps:
1551 if 'request-bookmarks' not in pullop.stepsdone:
1567 if 'request-bookmarks' not in pullop.stepsdone:
1552 # make sure to always includes bookmark data when migrating
1568 # make sure to always includes bookmark data when migrating
1553 # `hg incoming --bundle` to using this function.
1569 # `hg incoming --bundle` to using this function.
1554 pullop.stepsdone.add('request-bookmarks')
1570 pullop.stepsdone.add('request-bookmarks')
1555 kwargs.setdefault('listkeys', []).append('bookmarks')
1571 kwargs.setdefault('listkeys', []).append('bookmarks')
1556
1572
1557 # If this is a full pull / clone and the server supports the clone bundles
1573 # If this is a full pull / clone and the server supports the clone bundles
1558 # feature, tell the server whether we attempted a clone bundle. The
1574 # feature, tell the server whether we attempted a clone bundle. The
1559 # presence of this flag indicates the client supports clone bundles. This
1575 # presence of this flag indicates the client supports clone bundles. This
1560 # will enable the server to treat clients that support clone bundles
1576 # will enable the server to treat clients that support clone bundles
1561 # differently from those that don't.
1577 # differently from those that don't.
1562 if (pullop.remote.capable('clonebundles')
1578 if (pullop.remote.capable('clonebundles')
1563 and pullop.heads is None and list(pullop.common) == [nullid]):
1579 and pullop.heads is None and list(pullop.common) == [nullid]):
1564 kwargs['cbattempted'] = pullop.clonebundleattempted
1580 kwargs['cbattempted'] = pullop.clonebundleattempted
1565
1581
1566 if streaming:
1582 if streaming:
1567 pullop.repo.ui.status(_('streaming all changes\n'))
1583 pullop.repo.ui.status(_('streaming all changes\n'))
1568 elif not pullop.fetch:
1584 elif not pullop.fetch:
1569 pullop.repo.ui.status(_("no changes found\n"))
1585 pullop.repo.ui.status(_("no changes found\n"))
1570 pullop.cgresult = 0
1586 pullop.cgresult = 0
1571 else:
1587 else:
1572 if pullop.heads is None and list(pullop.common) == [nullid]:
1588 if pullop.heads is None and list(pullop.common) == [nullid]:
1573 pullop.repo.ui.status(_("requesting all changes\n"))
1589 pullop.repo.ui.status(_("requesting all changes\n"))
1574 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1590 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1575 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1591 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1576 if obsolete.commonversion(remoteversions) is not None:
1592 if obsolete.commonversion(remoteversions) is not None:
1577 kwargs['obsmarkers'] = True
1593 kwargs['obsmarkers'] = True
1578 pullop.stepsdone.add('obsmarkers')
1594 pullop.stepsdone.add('obsmarkers')
1579 _pullbundle2extraprepare(pullop, kwargs)
1595 _pullbundle2extraprepare(pullop, kwargs)
1580 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1596 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1581 try:
1597 try:
1582 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1598 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1583 op.modes['bookmarks'] = 'records'
1599 op.modes['bookmarks'] = 'records'
1584 bundle2.processbundle(pullop.repo, bundle, op=op)
1600 bundle2.processbundle(pullop.repo, bundle, op=op)
1585 except bundle2.AbortFromPart as exc:
1601 except bundle2.AbortFromPart as exc:
1586 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1602 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1587 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1603 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1588 except error.BundleValueError as exc:
1604 except error.BundleValueError as exc:
1589 raise error.Abort(_('missing support for %s') % exc)
1605 raise error.Abort(_('missing support for %s') % exc)
1590
1606
1591 if pullop.fetch:
1607 if pullop.fetch:
1592 pullop.cgresult = bundle2.combinechangegroupresults(op)
1608 pullop.cgresult = bundle2.combinechangegroupresults(op)
1593
1609
1594 # processing phases change
1610 # processing phases change
1595 for namespace, value in op.records['listkeys']:
1611 for namespace, value in op.records['listkeys']:
1596 if namespace == 'phases':
1612 if namespace == 'phases':
1597 _pullapplyphases(pullop, value)
1613 _pullapplyphases(pullop, value)
1598
1614
1599 # processing bookmark update
1615 # processing bookmark update
1600 if bookmarksrequested:
1616 if bookmarksrequested:
1601 books = {}
1617 books = {}
1602 for record in op.records['bookmarks']:
1618 for record in op.records['bookmarks']:
1603 books[record['bookmark']] = record["node"]
1619 books[record['bookmark']] = record["node"]
1604 pullop.remotebookmarks = books
1620 pullop.remotebookmarks = books
1605 else:
1621 else:
1606 for namespace, value in op.records['listkeys']:
1622 for namespace, value in op.records['listkeys']:
1607 if namespace == 'bookmarks':
1623 if namespace == 'bookmarks':
1608 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1624 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1609
1625
1610 # bookmark data were either already there or pulled in the bundle
1626 # bookmark data were either already there or pulled in the bundle
1611 if pullop.remotebookmarks is not None:
1627 if pullop.remotebookmarks is not None:
1612 _pullbookmarks(pullop)
1628 _pullbookmarks(pullop)
1613
1629
1614 def _pullbundle2extraprepare(pullop, kwargs):
1630 def _pullbundle2extraprepare(pullop, kwargs):
1615 """hook function so that extensions can extend the getbundle call"""
1631 """hook function so that extensions can extend the getbundle call"""
1616
1632
1617 def _pullchangeset(pullop):
1633 def _pullchangeset(pullop):
1618 """pull changeset from unbundle into the local repo"""
1634 """pull changeset from unbundle into the local repo"""
1619 # We delay the open of the transaction as late as possible so we
1635 # We delay the open of the transaction as late as possible so we
1620 # don't open transaction for nothing or you break future useful
1636 # don't open transaction for nothing or you break future useful
1621 # rollback call
1637 # rollback call
1622 if 'changegroup' in pullop.stepsdone:
1638 if 'changegroup' in pullop.stepsdone:
1623 return
1639 return
1624 pullop.stepsdone.add('changegroup')
1640 pullop.stepsdone.add('changegroup')
1625 if not pullop.fetch:
1641 if not pullop.fetch:
1626 pullop.repo.ui.status(_("no changes found\n"))
1642 pullop.repo.ui.status(_("no changes found\n"))
1627 pullop.cgresult = 0
1643 pullop.cgresult = 0
1628 return
1644 return
1629 tr = pullop.gettransaction()
1645 tr = pullop.gettransaction()
1630 if pullop.heads is None and list(pullop.common) == [nullid]:
1646 if pullop.heads is None and list(pullop.common) == [nullid]:
1631 pullop.repo.ui.status(_("requesting all changes\n"))
1647 pullop.repo.ui.status(_("requesting all changes\n"))
1632 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1648 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1633 # issue1320, avoid a race if remote changed after discovery
1649 # issue1320, avoid a race if remote changed after discovery
1634 pullop.heads = pullop.rheads
1650 pullop.heads = pullop.rheads
1635
1651
1636 if pullop.remote.capable('getbundle'):
1652 if pullop.remote.capable('getbundle'):
1637 # TODO: get bundlecaps from remote
1653 # TODO: get bundlecaps from remote
1638 cg = pullop.remote.getbundle('pull', common=pullop.common,
1654 cg = pullop.remote.getbundle('pull', common=pullop.common,
1639 heads=pullop.heads or pullop.rheads)
1655 heads=pullop.heads or pullop.rheads)
1640 elif pullop.heads is None:
1656 elif pullop.heads is None:
1641 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1657 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1642 elif not pullop.remote.capable('changegroupsubset'):
1658 elif not pullop.remote.capable('changegroupsubset'):
1643 raise error.Abort(_("partial pull cannot be done because "
1659 raise error.Abort(_("partial pull cannot be done because "
1644 "other repository doesn't support "
1660 "other repository doesn't support "
1645 "changegroupsubset."))
1661 "changegroupsubset."))
1646 else:
1662 else:
1647 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1663 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1648 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1664 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1649 pullop.remote.url())
1665 pullop.remote.url())
1650 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1666 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1651
1667
1652 def _pullphase(pullop):
1668 def _pullphase(pullop):
1653 # Get remote phases data from remote
1669 # Get remote phases data from remote
1654 if 'phases' in pullop.stepsdone:
1670 if 'phases' in pullop.stepsdone:
1655 return
1671 return
1656 remotephases = pullop.remote.listkeys('phases')
1672 remotephases = pullop.remote.listkeys('phases')
1657 _pullapplyphases(pullop, remotephases)
1673 _pullapplyphases(pullop, remotephases)
1658
1674
1659 def _pullapplyphases(pullop, remotephases):
1675 def _pullapplyphases(pullop, remotephases):
1660 """apply phase movement from observed remote state"""
1676 """apply phase movement from observed remote state"""
1661 if 'phases' in pullop.stepsdone:
1677 if 'phases' in pullop.stepsdone:
1662 return
1678 return
1663 pullop.stepsdone.add('phases')
1679 pullop.stepsdone.add('phases')
1664 publishing = bool(remotephases.get('publishing', False))
1680 publishing = bool(remotephases.get('publishing', False))
1665 if remotephases and not publishing:
1681 if remotephases and not publishing:
1666 # remote is new and non-publishing
1682 # remote is new and non-publishing
1667 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1683 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1668 pullop.pulledsubset,
1684 pullop.pulledsubset,
1669 remotephases)
1685 remotephases)
1670 dheads = pullop.pulledsubset
1686 dheads = pullop.pulledsubset
1671 else:
1687 else:
1672 # Remote is old or publishing all common changesets
1688 # Remote is old or publishing all common changesets
1673 # should be seen as public
1689 # should be seen as public
1674 pheads = pullop.pulledsubset
1690 pheads = pullop.pulledsubset
1675 dheads = []
1691 dheads = []
1676 unfi = pullop.repo.unfiltered()
1692 unfi = pullop.repo.unfiltered()
1677 phase = unfi._phasecache.phase
1693 phase = unfi._phasecache.phase
1678 rev = unfi.changelog.nodemap.get
1694 rev = unfi.changelog.nodemap.get
1679 public = phases.public
1695 public = phases.public
1680 draft = phases.draft
1696 draft = phases.draft
1681
1697
1682 # exclude changesets already public locally and update the others
1698 # exclude changesets already public locally and update the others
1683 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1699 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1684 if pheads:
1700 if pheads:
1685 tr = pullop.gettransaction()
1701 tr = pullop.gettransaction()
1686 phases.advanceboundary(pullop.repo, tr, public, pheads)
1702 phases.advanceboundary(pullop.repo, tr, public, pheads)
1687
1703
1688 # exclude changesets already draft locally and update the others
1704 # exclude changesets already draft locally and update the others
1689 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1705 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1690 if dheads:
1706 if dheads:
1691 tr = pullop.gettransaction()
1707 tr = pullop.gettransaction()
1692 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1708 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1693
1709
1694 def _pullbookmarks(pullop):
1710 def _pullbookmarks(pullop):
1695 """process the remote bookmark information to update the local one"""
1711 """process the remote bookmark information to update the local one"""
1696 if 'bookmarks' in pullop.stepsdone:
1712 if 'bookmarks' in pullop.stepsdone:
1697 return
1713 return
1698 pullop.stepsdone.add('bookmarks')
1714 pullop.stepsdone.add('bookmarks')
1699 repo = pullop.repo
1715 repo = pullop.repo
1700 remotebookmarks = pullop.remotebookmarks
1716 remotebookmarks = pullop.remotebookmarks
1701 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1717 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1702 pullop.remote.url(),
1718 pullop.remote.url(),
1703 pullop.gettransaction,
1719 pullop.gettransaction,
1704 explicit=pullop.explicitbookmarks)
1720 explicit=pullop.explicitbookmarks)
1705
1721
1706 def _pullobsolete(pullop):
1722 def _pullobsolete(pullop):
1707 """utility function to pull obsolete markers from a remote
1723 """utility function to pull obsolete markers from a remote
1708
1724
1709 The `gettransaction` is function that return the pull transaction, creating
1725 The `gettransaction` is function that return the pull transaction, creating
1710 one if necessary. We return the transaction to inform the calling code that
1726 one if necessary. We return the transaction to inform the calling code that
1711 a new transaction have been created (when applicable).
1727 a new transaction have been created (when applicable).
1712
1728
1713 Exists mostly to allow overriding for experimentation purpose"""
1729 Exists mostly to allow overriding for experimentation purpose"""
1714 if 'obsmarkers' in pullop.stepsdone:
1730 if 'obsmarkers' in pullop.stepsdone:
1715 return
1731 return
1716 pullop.stepsdone.add('obsmarkers')
1732 pullop.stepsdone.add('obsmarkers')
1717 tr = None
1733 tr = None
1718 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1734 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1719 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1735 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1720 remoteobs = pullop.remote.listkeys('obsolete')
1736 remoteobs = pullop.remote.listkeys('obsolete')
1721 if 'dump0' in remoteobs:
1737 if 'dump0' in remoteobs:
1722 tr = pullop.gettransaction()
1738 tr = pullop.gettransaction()
1723 markers = []
1739 markers = []
1724 for key in sorted(remoteobs, reverse=True):
1740 for key in sorted(remoteobs, reverse=True):
1725 if key.startswith('dump'):
1741 if key.startswith('dump'):
1726 data = util.b85decode(remoteobs[key])
1742 data = util.b85decode(remoteobs[key])
1727 version, newmarks = obsolete._readmarkers(data)
1743 version, newmarks = obsolete._readmarkers(data)
1728 markers += newmarks
1744 markers += newmarks
1729 if markers:
1745 if markers:
1730 pullop.repo.obsstore.add(tr, markers)
1746 pullop.repo.obsstore.add(tr, markers)
1731 pullop.repo.invalidatevolatilesets()
1747 pullop.repo.invalidatevolatilesets()
1732 return tr
1748 return tr
1733
1749
1734 def caps20to10(repo, role):
1750 def caps20to10(repo, role):
1735 """return a set with appropriate options to use bundle20 during getbundle"""
1751 """return a set with appropriate options to use bundle20 during getbundle"""
1736 caps = {'HG20'}
1752 caps = {'HG20'}
1737 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1753 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1738 caps.add('bundle2=' + urlreq.quote(capsblob))
1754 caps.add('bundle2=' + urlreq.quote(capsblob))
1739 return caps
1755 return caps
1740
1756
1741 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1757 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1742 getbundle2partsorder = []
1758 getbundle2partsorder = []
1743
1759
1744 # Mapping between step name and function
1760 # Mapping between step name and function
1745 #
1761 #
1746 # This exists to help extensions wrap steps if necessary
1762 # This exists to help extensions wrap steps if necessary
1747 getbundle2partsmapping = {}
1763 getbundle2partsmapping = {}
1748
1764
1749 def getbundle2partsgenerator(stepname, idx=None):
1765 def getbundle2partsgenerator(stepname, idx=None):
1750 """decorator for function generating bundle2 part for getbundle
1766 """decorator for function generating bundle2 part for getbundle
1751
1767
1752 The function is added to the step -> function mapping and appended to the
1768 The function is added to the step -> function mapping and appended to the
1753 list of steps. Beware that decorated functions will be added in order
1769 list of steps. Beware that decorated functions will be added in order
1754 (this may matter).
1770 (this may matter).
1755
1771
1756 You can only use this decorator for new steps, if you want to wrap a step
1772 You can only use this decorator for new steps, if you want to wrap a step
1757 from an extension, attack the getbundle2partsmapping dictionary directly."""
1773 from an extension, attack the getbundle2partsmapping dictionary directly."""
1758 def dec(func):
1774 def dec(func):
1759 assert stepname not in getbundle2partsmapping
1775 assert stepname not in getbundle2partsmapping
1760 getbundle2partsmapping[stepname] = func
1776 getbundle2partsmapping[stepname] = func
1761 if idx is None:
1777 if idx is None:
1762 getbundle2partsorder.append(stepname)
1778 getbundle2partsorder.append(stepname)
1763 else:
1779 else:
1764 getbundle2partsorder.insert(idx, stepname)
1780 getbundle2partsorder.insert(idx, stepname)
1765 return func
1781 return func
1766 return dec
1782 return dec
1767
1783
1768 def bundle2requested(bundlecaps):
1784 def bundle2requested(bundlecaps):
1769 if bundlecaps is not None:
1785 if bundlecaps is not None:
1770 return any(cap.startswith('HG2') for cap in bundlecaps)
1786 return any(cap.startswith('HG2') for cap in bundlecaps)
1771 return False
1787 return False
1772
1788
1773 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1789 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1774 **kwargs):
1790 **kwargs):
1775 """Return chunks constituting a bundle's raw data.
1791 """Return chunks constituting a bundle's raw data.
1776
1792
1777 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1793 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1778 passed.
1794 passed.
1779
1795
1780 Returns a 2-tuple of a dict with metadata about the generated bundle
1796 Returns a 2-tuple of a dict with metadata about the generated bundle
1781 and an iterator over raw chunks (of varying sizes).
1797 and an iterator over raw chunks (of varying sizes).
1782 """
1798 """
1783 kwargs = pycompat.byteskwargs(kwargs)
1799 kwargs = pycompat.byteskwargs(kwargs)
1784 info = {}
1800 info = {}
1785 usebundle2 = bundle2requested(bundlecaps)
1801 usebundle2 = bundle2requested(bundlecaps)
1786 # bundle10 case
1802 # bundle10 case
1787 if not usebundle2:
1803 if not usebundle2:
1788 if bundlecaps and not kwargs.get('cg', True):
1804 if bundlecaps and not kwargs.get('cg', True):
1789 raise ValueError(_('request for bundle10 must include changegroup'))
1805 raise ValueError(_('request for bundle10 must include changegroup'))
1790
1806
1791 if kwargs:
1807 if kwargs:
1792 raise ValueError(_('unsupported getbundle arguments: %s')
1808 raise ValueError(_('unsupported getbundle arguments: %s')
1793 % ', '.join(sorted(kwargs.keys())))
1809 % ', '.join(sorted(kwargs.keys())))
1794 outgoing = _computeoutgoing(repo, heads, common)
1810 outgoing = _computeoutgoing(repo, heads, common)
1795 info['bundleversion'] = 1
1811 info['bundleversion'] = 1
1796 return info, changegroup.makestream(repo, outgoing, '01', source,
1812 return info, changegroup.makestream(repo, outgoing, '01', source,
1797 bundlecaps=bundlecaps)
1813 bundlecaps=bundlecaps)
1798
1814
1799 # bundle20 case
1815 # bundle20 case
1800 info['bundleversion'] = 2
1816 info['bundleversion'] = 2
1801 b2caps = {}
1817 b2caps = {}
1802 for bcaps in bundlecaps:
1818 for bcaps in bundlecaps:
1803 if bcaps.startswith('bundle2='):
1819 if bcaps.startswith('bundle2='):
1804 blob = urlreq.unquote(bcaps[len('bundle2='):])
1820 blob = urlreq.unquote(bcaps[len('bundle2='):])
1805 b2caps.update(bundle2.decodecaps(blob))
1821 b2caps.update(bundle2.decodecaps(blob))
1806 bundler = bundle2.bundle20(repo.ui, b2caps)
1822 bundler = bundle2.bundle20(repo.ui, b2caps)
1807
1823
1808 kwargs['heads'] = heads
1824 kwargs['heads'] = heads
1809 kwargs['common'] = common
1825 kwargs['common'] = common
1810
1826
1811 for name in getbundle2partsorder:
1827 for name in getbundle2partsorder:
1812 func = getbundle2partsmapping[name]
1828 func = getbundle2partsmapping[name]
1813 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1829 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1814 **pycompat.strkwargs(kwargs))
1830 **pycompat.strkwargs(kwargs))
1815
1831
1816 info['prefercompressed'] = bundler.prefercompressed
1832 info['prefercompressed'] = bundler.prefercompressed
1817
1833
1818 return info, bundler.getchunks()
1834 return info, bundler.getchunks()
1819
1835
1820 @getbundle2partsgenerator('stream2')
1836 @getbundle2partsgenerator('stream2')
1821 def _getbundlestream2(bundler, repo, *args, **kwargs):
1837 def _getbundlestream2(bundler, repo, *args, **kwargs):
1822 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
1838 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
1823
1839
1824 @getbundle2partsgenerator('changegroup')
1840 @getbundle2partsgenerator('changegroup')
1825 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1841 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1826 b2caps=None, heads=None, common=None, **kwargs):
1842 b2caps=None, heads=None, common=None, **kwargs):
1827 """add a changegroup part to the requested bundle"""
1843 """add a changegroup part to the requested bundle"""
1828 cgstream = None
1844 cgstream = None
1829 if kwargs.get(r'cg', True):
1845 if kwargs.get(r'cg', True):
1830 # build changegroup bundle here.
1846 # build changegroup bundle here.
1831 version = '01'
1847 version = '01'
1832 cgversions = b2caps.get('changegroup')
1848 cgversions = b2caps.get('changegroup')
1833 if cgversions: # 3.1 and 3.2 ship with an empty value
1849 if cgversions: # 3.1 and 3.2 ship with an empty value
1834 cgversions = [v for v in cgversions
1850 cgversions = [v for v in cgversions
1835 if v in changegroup.supportedoutgoingversions(repo)]
1851 if v in changegroup.supportedoutgoingversions(repo)]
1836 if not cgversions:
1852 if not cgversions:
1837 raise ValueError(_('no common changegroup version'))
1853 raise ValueError(_('no common changegroup version'))
1838 version = max(cgversions)
1854 version = max(cgversions)
1839 outgoing = _computeoutgoing(repo, heads, common)
1855 outgoing = _computeoutgoing(repo, heads, common)
1840 if outgoing.missing:
1856 if outgoing.missing:
1841 cgstream = changegroup.makestream(repo, outgoing, version, source,
1857 cgstream = changegroup.makestream(repo, outgoing, version, source,
1842 bundlecaps=bundlecaps)
1858 bundlecaps=bundlecaps)
1843
1859
1844 if cgstream:
1860 if cgstream:
1845 part = bundler.newpart('changegroup', data=cgstream)
1861 part = bundler.newpart('changegroup', data=cgstream)
1846 if cgversions:
1862 if cgversions:
1847 part.addparam('version', version)
1863 part.addparam('version', version)
1848 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1864 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1849 mandatory=False)
1865 mandatory=False)
1850 if 'treemanifest' in repo.requirements:
1866 if 'treemanifest' in repo.requirements:
1851 part.addparam('treemanifest', '1')
1867 part.addparam('treemanifest', '1')
1852
1868
1853 @getbundle2partsgenerator('bookmarks')
1869 @getbundle2partsgenerator('bookmarks')
1854 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1870 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1855 b2caps=None, **kwargs):
1871 b2caps=None, **kwargs):
1856 """add a bookmark part to the requested bundle"""
1872 """add a bookmark part to the requested bundle"""
1857 if not kwargs.get(r'bookmarks', False):
1873 if not kwargs.get(r'bookmarks', False):
1858 return
1874 return
1859 if 'bookmarks' not in b2caps:
1875 if 'bookmarks' not in b2caps:
1860 raise ValueError(_('no common bookmarks exchange method'))
1876 raise ValueError(_('no common bookmarks exchange method'))
1861 books = bookmod.listbinbookmarks(repo)
1877 books = bookmod.listbinbookmarks(repo)
1862 data = bookmod.binaryencode(books)
1878 data = bookmod.binaryencode(books)
1863 if data:
1879 if data:
1864 bundler.newpart('bookmarks', data=data)
1880 bundler.newpart('bookmarks', data=data)
1865
1881
1866 @getbundle2partsgenerator('listkeys')
1882 @getbundle2partsgenerator('listkeys')
1867 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1883 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1868 b2caps=None, **kwargs):
1884 b2caps=None, **kwargs):
1869 """add parts containing listkeys namespaces to the requested bundle"""
1885 """add parts containing listkeys namespaces to the requested bundle"""
1870 listkeys = kwargs.get(r'listkeys', ())
1886 listkeys = kwargs.get(r'listkeys', ())
1871 for namespace in listkeys:
1887 for namespace in listkeys:
1872 part = bundler.newpart('listkeys')
1888 part = bundler.newpart('listkeys')
1873 part.addparam('namespace', namespace)
1889 part.addparam('namespace', namespace)
1874 keys = repo.listkeys(namespace).items()
1890 keys = repo.listkeys(namespace).items()
1875 part.data = pushkey.encodekeys(keys)
1891 part.data = pushkey.encodekeys(keys)
1876
1892
1877 @getbundle2partsgenerator('obsmarkers')
1893 @getbundle2partsgenerator('obsmarkers')
1878 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1894 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1879 b2caps=None, heads=None, **kwargs):
1895 b2caps=None, heads=None, **kwargs):
1880 """add an obsolescence markers part to the requested bundle"""
1896 """add an obsolescence markers part to the requested bundle"""
1881 if kwargs.get(r'obsmarkers', False):
1897 if kwargs.get(r'obsmarkers', False):
1882 if heads is None:
1898 if heads is None:
1883 heads = repo.heads()
1899 heads = repo.heads()
1884 subset = [c.node() for c in repo.set('::%ln', heads)]
1900 subset = [c.node() for c in repo.set('::%ln', heads)]
1885 markers = repo.obsstore.relevantmarkers(subset)
1901 markers = repo.obsstore.relevantmarkers(subset)
1886 markers = sorted(markers)
1902 markers = sorted(markers)
1887 bundle2.buildobsmarkerspart(bundler, markers)
1903 bundle2.buildobsmarkerspart(bundler, markers)
1888
1904
1889 @getbundle2partsgenerator('phases')
1905 @getbundle2partsgenerator('phases')
1890 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1906 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1891 b2caps=None, heads=None, **kwargs):
1907 b2caps=None, heads=None, **kwargs):
1892 """add phase heads part to the requested bundle"""
1908 """add phase heads part to the requested bundle"""
1893 if kwargs.get(r'phases', False):
1909 if kwargs.get(r'phases', False):
1894 if not 'heads' in b2caps.get('phases'):
1910 if not 'heads' in b2caps.get('phases'):
1895 raise ValueError(_('no common phases exchange method'))
1911 raise ValueError(_('no common phases exchange method'))
1896 if heads is None:
1912 if heads is None:
1897 heads = repo.heads()
1913 heads = repo.heads()
1898
1914
1899 headsbyphase = collections.defaultdict(set)
1915 headsbyphase = collections.defaultdict(set)
1900 if repo.publishing():
1916 if repo.publishing():
1901 headsbyphase[phases.public] = heads
1917 headsbyphase[phases.public] = heads
1902 else:
1918 else:
1903 # find the appropriate heads to move
1919 # find the appropriate heads to move
1904
1920
1905 phase = repo._phasecache.phase
1921 phase = repo._phasecache.phase
1906 node = repo.changelog.node
1922 node = repo.changelog.node
1907 rev = repo.changelog.rev
1923 rev = repo.changelog.rev
1908 for h in heads:
1924 for h in heads:
1909 headsbyphase[phase(repo, rev(h))].add(h)
1925 headsbyphase[phase(repo, rev(h))].add(h)
1910 seenphases = list(headsbyphase.keys())
1926 seenphases = list(headsbyphase.keys())
1911
1927
1912 # We do not handle anything but public and draft phase for now)
1928 # We do not handle anything but public and draft phase for now)
1913 if seenphases:
1929 if seenphases:
1914 assert max(seenphases) <= phases.draft
1930 assert max(seenphases) <= phases.draft
1915
1931
1916 # if client is pulling non-public changesets, we need to find
1932 # if client is pulling non-public changesets, we need to find
1917 # intermediate public heads.
1933 # intermediate public heads.
1918 draftheads = headsbyphase.get(phases.draft, set())
1934 draftheads = headsbyphase.get(phases.draft, set())
1919 if draftheads:
1935 if draftheads:
1920 publicheads = headsbyphase.get(phases.public, set())
1936 publicheads = headsbyphase.get(phases.public, set())
1921
1937
1922 revset = 'heads(only(%ln, %ln) and public())'
1938 revset = 'heads(only(%ln, %ln) and public())'
1923 extraheads = repo.revs(revset, draftheads, publicheads)
1939 extraheads = repo.revs(revset, draftheads, publicheads)
1924 for r in extraheads:
1940 for r in extraheads:
1925 headsbyphase[phases.public].add(node(r))
1941 headsbyphase[phases.public].add(node(r))
1926
1942
1927 # transform data in a format used by the encoding function
1943 # transform data in a format used by the encoding function
1928 phasemapping = []
1944 phasemapping = []
1929 for phase in phases.allphases:
1945 for phase in phases.allphases:
1930 phasemapping.append(sorted(headsbyphase[phase]))
1946 phasemapping.append(sorted(headsbyphase[phase]))
1931
1947
1932 # generate the actual part
1948 # generate the actual part
1933 phasedata = phases.binaryencode(phasemapping)
1949 phasedata = phases.binaryencode(phasemapping)
1934 bundler.newpart('phase-heads', data=phasedata)
1950 bundler.newpart('phase-heads', data=phasedata)
1935
1951
1936 @getbundle2partsgenerator('hgtagsfnodes')
1952 @getbundle2partsgenerator('hgtagsfnodes')
1937 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1953 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1938 b2caps=None, heads=None, common=None,
1954 b2caps=None, heads=None, common=None,
1939 **kwargs):
1955 **kwargs):
1940 """Transfer the .hgtags filenodes mapping.
1956 """Transfer the .hgtags filenodes mapping.
1941
1957
1942 Only values for heads in this bundle will be transferred.
1958 Only values for heads in this bundle will be transferred.
1943
1959
1944 The part data consists of pairs of 20 byte changeset node and .hgtags
1960 The part data consists of pairs of 20 byte changeset node and .hgtags
1945 filenodes raw values.
1961 filenodes raw values.
1946 """
1962 """
1947 # Don't send unless:
1963 # Don't send unless:
1948 # - changeset are being exchanged,
1964 # - changeset are being exchanged,
1949 # - the client supports it.
1965 # - the client supports it.
1950 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1966 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1951 return
1967 return
1952
1968
1953 outgoing = _computeoutgoing(repo, heads, common)
1969 outgoing = _computeoutgoing(repo, heads, common)
1954 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1970 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1955
1971
1956 @getbundle2partsgenerator('cache:rev-branch-cache')
1972 @getbundle2partsgenerator('cache:rev-branch-cache')
1957 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
1973 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
1958 b2caps=None, heads=None, common=None,
1974 b2caps=None, heads=None, common=None,
1959 **kwargs):
1975 **kwargs):
1960 """Transfer the rev-branch-cache mapping
1976 """Transfer the rev-branch-cache mapping
1961
1977
1962 The payload is a series of data related to each branch
1978 The payload is a series of data related to each branch
1963
1979
1964 1) branch name length
1980 1) branch name length
1965 2) number of open heads
1981 2) number of open heads
1966 3) number of closed heads
1982 3) number of closed heads
1967 4) open heads nodes
1983 4) open heads nodes
1968 5) closed heads nodes
1984 5) closed heads nodes
1969 """
1985 """
1970 # Don't send unless:
1986 # Don't send unless:
1971 # - changeset are being exchanged,
1987 # - changeset are being exchanged,
1972 # - the client supports it.
1988 # - the client supports it.
1973 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
1989 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
1974 return
1990 return
1975 outgoing = _computeoutgoing(repo, heads, common)
1991 outgoing = _computeoutgoing(repo, heads, common)
1976 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
1992 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
1977
1993
1978 def check_heads(repo, their_heads, context):
1994 def check_heads(repo, their_heads, context):
1979 """check if the heads of a repo have been modified
1995 """check if the heads of a repo have been modified
1980
1996
1981 Used by peer for unbundling.
1997 Used by peer for unbundling.
1982 """
1998 """
1983 heads = repo.heads()
1999 heads = repo.heads()
1984 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2000 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1985 if not (their_heads == ['force'] or their_heads == heads or
2001 if not (their_heads == ['force'] or their_heads == heads or
1986 their_heads == ['hashed', heads_hash]):
2002 their_heads == ['hashed', heads_hash]):
1987 # someone else committed/pushed/unbundled while we
2003 # someone else committed/pushed/unbundled while we
1988 # were transferring data
2004 # were transferring data
1989 raise error.PushRaced('repository changed while %s - '
2005 raise error.PushRaced('repository changed while %s - '
1990 'please try again' % context)
2006 'please try again' % context)
1991
2007
1992 def unbundle(repo, cg, heads, source, url):
2008 def unbundle(repo, cg, heads, source, url):
1993 """Apply a bundle to a repo.
2009 """Apply a bundle to a repo.
1994
2010
1995 this function makes sure the repo is locked during the application and have
2011 this function makes sure the repo is locked during the application and have
1996 mechanism to check that no push race occurred between the creation of the
2012 mechanism to check that no push race occurred between the creation of the
1997 bundle and its application.
2013 bundle and its application.
1998
2014
1999 If the push was raced as PushRaced exception is raised."""
2015 If the push was raced as PushRaced exception is raised."""
2000 r = 0
2016 r = 0
2001 # need a transaction when processing a bundle2 stream
2017 # need a transaction when processing a bundle2 stream
2002 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2018 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2003 lockandtr = [None, None, None]
2019 lockandtr = [None, None, None]
2004 recordout = None
2020 recordout = None
2005 # quick fix for output mismatch with bundle2 in 3.4
2021 # quick fix for output mismatch with bundle2 in 3.4
2006 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2022 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2007 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2023 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2008 captureoutput = True
2024 captureoutput = True
2009 try:
2025 try:
2010 # note: outside bundle1, 'heads' is expected to be empty and this
2026 # note: outside bundle1, 'heads' is expected to be empty and this
2011 # 'check_heads' call wil be a no-op
2027 # 'check_heads' call wil be a no-op
2012 check_heads(repo, heads, 'uploading changes')
2028 check_heads(repo, heads, 'uploading changes')
2013 # push can proceed
2029 # push can proceed
2014 if not isinstance(cg, bundle2.unbundle20):
2030 if not isinstance(cg, bundle2.unbundle20):
2015 # legacy case: bundle1 (changegroup 01)
2031 # legacy case: bundle1 (changegroup 01)
2016 txnname = "\n".join([source, util.hidepassword(url)])
2032 txnname = "\n".join([source, util.hidepassword(url)])
2017 with repo.lock(), repo.transaction(txnname) as tr:
2033 with repo.lock(), repo.transaction(txnname) as tr:
2018 op = bundle2.applybundle(repo, cg, tr, source, url)
2034 op = bundle2.applybundle(repo, cg, tr, source, url)
2019 r = bundle2.combinechangegroupresults(op)
2035 r = bundle2.combinechangegroupresults(op)
2020 else:
2036 else:
2021 r = None
2037 r = None
2022 try:
2038 try:
2023 def gettransaction():
2039 def gettransaction():
2024 if not lockandtr[2]:
2040 if not lockandtr[2]:
2025 lockandtr[0] = repo.wlock()
2041 lockandtr[0] = repo.wlock()
2026 lockandtr[1] = repo.lock()
2042 lockandtr[1] = repo.lock()
2027 lockandtr[2] = repo.transaction(source)
2043 lockandtr[2] = repo.transaction(source)
2028 lockandtr[2].hookargs['source'] = source
2044 lockandtr[2].hookargs['source'] = source
2029 lockandtr[2].hookargs['url'] = url
2045 lockandtr[2].hookargs['url'] = url
2030 lockandtr[2].hookargs['bundle2'] = '1'
2046 lockandtr[2].hookargs['bundle2'] = '1'
2031 return lockandtr[2]
2047 return lockandtr[2]
2032
2048
2033 # Do greedy locking by default until we're satisfied with lazy
2049 # Do greedy locking by default until we're satisfied with lazy
2034 # locking.
2050 # locking.
2035 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2051 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2036 gettransaction()
2052 gettransaction()
2037
2053
2038 op = bundle2.bundleoperation(repo, gettransaction,
2054 op = bundle2.bundleoperation(repo, gettransaction,
2039 captureoutput=captureoutput)
2055 captureoutput=captureoutput)
2040 try:
2056 try:
2041 op = bundle2.processbundle(repo, cg, op=op)
2057 op = bundle2.processbundle(repo, cg, op=op)
2042 finally:
2058 finally:
2043 r = op.reply
2059 r = op.reply
2044 if captureoutput and r is not None:
2060 if captureoutput and r is not None:
2045 repo.ui.pushbuffer(error=True, subproc=True)
2061 repo.ui.pushbuffer(error=True, subproc=True)
2046 def recordout(output):
2062 def recordout(output):
2047 r.newpart('output', data=output, mandatory=False)
2063 r.newpart('output', data=output, mandatory=False)
2048 if lockandtr[2] is not None:
2064 if lockandtr[2] is not None:
2049 lockandtr[2].close()
2065 lockandtr[2].close()
2050 except BaseException as exc:
2066 except BaseException as exc:
2051 exc.duringunbundle2 = True
2067 exc.duringunbundle2 = True
2052 if captureoutput and r is not None:
2068 if captureoutput and r is not None:
2053 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2069 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2054 def recordout(output):
2070 def recordout(output):
2055 part = bundle2.bundlepart('output', data=output,
2071 part = bundle2.bundlepart('output', data=output,
2056 mandatory=False)
2072 mandatory=False)
2057 parts.append(part)
2073 parts.append(part)
2058 raise
2074 raise
2059 finally:
2075 finally:
2060 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2076 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2061 if recordout is not None:
2077 if recordout is not None:
2062 recordout(repo.ui.popbuffer())
2078 recordout(repo.ui.popbuffer())
2063 return r
2079 return r
2064
2080
2065 def _maybeapplyclonebundle(pullop):
2081 def _maybeapplyclonebundle(pullop):
2066 """Apply a clone bundle from a remote, if possible."""
2082 """Apply a clone bundle from a remote, if possible."""
2067
2083
2068 repo = pullop.repo
2084 repo = pullop.repo
2069 remote = pullop.remote
2085 remote = pullop.remote
2070
2086
2071 if not repo.ui.configbool('ui', 'clonebundles'):
2087 if not repo.ui.configbool('ui', 'clonebundles'):
2072 return
2088 return
2073
2089
2074 # Only run if local repo is empty.
2090 # Only run if local repo is empty.
2075 if len(repo):
2091 if len(repo):
2076 return
2092 return
2077
2093
2078 if pullop.heads:
2094 if pullop.heads:
2079 return
2095 return
2080
2096
2081 if not remote.capable('clonebundles'):
2097 if not remote.capable('clonebundles'):
2082 return
2098 return
2083
2099
2084 res = remote._call('clonebundles')
2100 res = remote._call('clonebundles')
2085
2101
2086 # If we call the wire protocol command, that's good enough to record the
2102 # If we call the wire protocol command, that's good enough to record the
2087 # attempt.
2103 # attempt.
2088 pullop.clonebundleattempted = True
2104 pullop.clonebundleattempted = True
2089
2105
2090 entries = parseclonebundlesmanifest(repo, res)
2106 entries = parseclonebundlesmanifest(repo, res)
2091 if not entries:
2107 if not entries:
2092 repo.ui.note(_('no clone bundles available on remote; '
2108 repo.ui.note(_('no clone bundles available on remote; '
2093 'falling back to regular clone\n'))
2109 'falling back to regular clone\n'))
2094 return
2110 return
2095
2111
2096 entries = filterclonebundleentries(
2112 entries = filterclonebundleentries(
2097 repo, entries, streamclonerequested=pullop.streamclonerequested)
2113 repo, entries, streamclonerequested=pullop.streamclonerequested)
2098
2114
2099 if not entries:
2115 if not entries:
2100 # There is a thundering herd concern here. However, if a server
2116 # There is a thundering herd concern here. However, if a server
2101 # operator doesn't advertise bundles appropriate for its clients,
2117 # operator doesn't advertise bundles appropriate for its clients,
2102 # they deserve what's coming. Furthermore, from a client's
2118 # they deserve what's coming. Furthermore, from a client's
2103 # perspective, no automatic fallback would mean not being able to
2119 # perspective, no automatic fallback would mean not being able to
2104 # clone!
2120 # clone!
2105 repo.ui.warn(_('no compatible clone bundles available on server; '
2121 repo.ui.warn(_('no compatible clone bundles available on server; '
2106 'falling back to regular clone\n'))
2122 'falling back to regular clone\n'))
2107 repo.ui.warn(_('(you may want to report this to the server '
2123 repo.ui.warn(_('(you may want to report this to the server '
2108 'operator)\n'))
2124 'operator)\n'))
2109 return
2125 return
2110
2126
2111 entries = sortclonebundleentries(repo.ui, entries)
2127 entries = sortclonebundleentries(repo.ui, entries)
2112
2128
2113 url = entries[0]['URL']
2129 url = entries[0]['URL']
2114 repo.ui.status(_('applying clone bundle from %s\n') % url)
2130 repo.ui.status(_('applying clone bundle from %s\n') % url)
2115 if trypullbundlefromurl(repo.ui, repo, url):
2131 if trypullbundlefromurl(repo.ui, repo, url):
2116 repo.ui.status(_('finished applying clone bundle\n'))
2132 repo.ui.status(_('finished applying clone bundle\n'))
2117 # Bundle failed.
2133 # Bundle failed.
2118 #
2134 #
2119 # We abort by default to avoid the thundering herd of
2135 # We abort by default to avoid the thundering herd of
2120 # clients flooding a server that was expecting expensive
2136 # clients flooding a server that was expecting expensive
2121 # clone load to be offloaded.
2137 # clone load to be offloaded.
2122 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2138 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2123 repo.ui.warn(_('falling back to normal clone\n'))
2139 repo.ui.warn(_('falling back to normal clone\n'))
2124 else:
2140 else:
2125 raise error.Abort(_('error applying bundle'),
2141 raise error.Abort(_('error applying bundle'),
2126 hint=_('if this error persists, consider contacting '
2142 hint=_('if this error persists, consider contacting '
2127 'the server operator or disable clone '
2143 'the server operator or disable clone '
2128 'bundles via '
2144 'bundles via '
2129 '"--config ui.clonebundles=false"'))
2145 '"--config ui.clonebundles=false"'))
2130
2146
2131 def parseclonebundlesmanifest(repo, s):
2147 def parseclonebundlesmanifest(repo, s):
2132 """Parses the raw text of a clone bundles manifest.
2148 """Parses the raw text of a clone bundles manifest.
2133
2149
2134 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2150 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2135 to the URL and other keys are the attributes for the entry.
2151 to the URL and other keys are the attributes for the entry.
2136 """
2152 """
2137 m = []
2153 m = []
2138 for line in s.splitlines():
2154 for line in s.splitlines():
2139 fields = line.split()
2155 fields = line.split()
2140 if not fields:
2156 if not fields:
2141 continue
2157 continue
2142 attrs = {'URL': fields[0]}
2158 attrs = {'URL': fields[0]}
2143 for rawattr in fields[1:]:
2159 for rawattr in fields[1:]:
2144 key, value = rawattr.split('=', 1)
2160 key, value = rawattr.split('=', 1)
2145 key = urlreq.unquote(key)
2161 key = urlreq.unquote(key)
2146 value = urlreq.unquote(value)
2162 value = urlreq.unquote(value)
2147 attrs[key] = value
2163 attrs[key] = value
2148
2164
2149 # Parse BUNDLESPEC into components. This makes client-side
2165 # Parse BUNDLESPEC into components. This makes client-side
2150 # preferences easier to specify since you can prefer a single
2166 # preferences easier to specify since you can prefer a single
2151 # component of the BUNDLESPEC.
2167 # component of the BUNDLESPEC.
2152 if key == 'BUNDLESPEC':
2168 if key == 'BUNDLESPEC':
2153 try:
2169 try:
2154 bundlespec = parsebundlespec(repo, value,
2170 bundlespec = parsebundlespec(repo, value,
2155 externalnames=True)
2171 externalnames=True)
2156 attrs['COMPRESSION'] = bundlespec.compression
2172 attrs['COMPRESSION'] = bundlespec.compression
2157 attrs['VERSION'] = bundlespec.version
2173 attrs['VERSION'] = bundlespec.version
2158 except error.InvalidBundleSpecification:
2174 except error.InvalidBundleSpecification:
2159 pass
2175 pass
2160 except error.UnsupportedBundleSpecification:
2176 except error.UnsupportedBundleSpecification:
2161 pass
2177 pass
2162
2178
2163 m.append(attrs)
2179 m.append(attrs)
2164
2180
2165 return m
2181 return m
2166
2182
2167 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2183 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2168 """Remove incompatible clone bundle manifest entries.
2184 """Remove incompatible clone bundle manifest entries.
2169
2185
2170 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2186 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2171 and returns a new list consisting of only the entries that this client
2187 and returns a new list consisting of only the entries that this client
2172 should be able to apply.
2188 should be able to apply.
2173
2189
2174 There is no guarantee we'll be able to apply all returned entries because
2190 There is no guarantee we'll be able to apply all returned entries because
2175 the metadata we use to filter on may be missing or wrong.
2191 the metadata we use to filter on may be missing or wrong.
2176 """
2192 """
2177 newentries = []
2193 newentries = []
2178 for entry in entries:
2194 for entry in entries:
2179 spec = entry.get('BUNDLESPEC')
2195 spec = entry.get('BUNDLESPEC')
2180 if spec:
2196 if spec:
2181 try:
2197 try:
2182 bundlespec = parsebundlespec(repo, spec, strict=True)
2198 bundlespec = parsebundlespec(repo, spec, strict=True)
2183
2199
2184 # If a stream clone was requested, filter out non-streamclone
2200 # If a stream clone was requested, filter out non-streamclone
2185 # entries.
2201 # entries.
2186 comp = bundlespec.compression
2202 comp = bundlespec.compression
2187 version = bundlespec.version
2203 version = bundlespec.version
2188 if streamclonerequested and (comp != 'UN' or version != 's1'):
2204 if streamclonerequested and (comp != 'UN' or version != 's1'):
2189 repo.ui.debug('filtering %s because not a stream clone\n' %
2205 repo.ui.debug('filtering %s because not a stream clone\n' %
2190 entry['URL'])
2206 entry['URL'])
2191 continue
2207 continue
2192
2208
2193 except error.InvalidBundleSpecification as e:
2209 except error.InvalidBundleSpecification as e:
2194 repo.ui.debug(str(e) + '\n')
2210 repo.ui.debug(str(e) + '\n')
2195 continue
2211 continue
2196 except error.UnsupportedBundleSpecification as e:
2212 except error.UnsupportedBundleSpecification as e:
2197 repo.ui.debug('filtering %s because unsupported bundle '
2213 repo.ui.debug('filtering %s because unsupported bundle '
2198 'spec: %s\n' % (
2214 'spec: %s\n' % (
2199 entry['URL'], stringutil.forcebytestr(e)))
2215 entry['URL'], stringutil.forcebytestr(e)))
2200 continue
2216 continue
2201 # If we don't have a spec and requested a stream clone, we don't know
2217 # If we don't have a spec and requested a stream clone, we don't know
2202 # what the entry is so don't attempt to apply it.
2218 # what the entry is so don't attempt to apply it.
2203 elif streamclonerequested:
2219 elif streamclonerequested:
2204 repo.ui.debug('filtering %s because cannot determine if a stream '
2220 repo.ui.debug('filtering %s because cannot determine if a stream '
2205 'clone bundle\n' % entry['URL'])
2221 'clone bundle\n' % entry['URL'])
2206 continue
2222 continue
2207
2223
2208 if 'REQUIRESNI' in entry and not sslutil.hassni:
2224 if 'REQUIRESNI' in entry and not sslutil.hassni:
2209 repo.ui.debug('filtering %s because SNI not supported\n' %
2225 repo.ui.debug('filtering %s because SNI not supported\n' %
2210 entry['URL'])
2226 entry['URL'])
2211 continue
2227 continue
2212
2228
2213 newentries.append(entry)
2229 newentries.append(entry)
2214
2230
2215 return newentries
2231 return newentries
2216
2232
2217 class clonebundleentry(object):
2233 class clonebundleentry(object):
2218 """Represents an item in a clone bundles manifest.
2234 """Represents an item in a clone bundles manifest.
2219
2235
2220 This rich class is needed to support sorting since sorted() in Python 3
2236 This rich class is needed to support sorting since sorted() in Python 3
2221 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2237 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2222 won't work.
2238 won't work.
2223 """
2239 """
2224
2240
2225 def __init__(self, value, prefers):
2241 def __init__(self, value, prefers):
2226 self.value = value
2242 self.value = value
2227 self.prefers = prefers
2243 self.prefers = prefers
2228
2244
2229 def _cmp(self, other):
2245 def _cmp(self, other):
2230 for prefkey, prefvalue in self.prefers:
2246 for prefkey, prefvalue in self.prefers:
2231 avalue = self.value.get(prefkey)
2247 avalue = self.value.get(prefkey)
2232 bvalue = other.value.get(prefkey)
2248 bvalue = other.value.get(prefkey)
2233
2249
2234 # Special case for b missing attribute and a matches exactly.
2250 # Special case for b missing attribute and a matches exactly.
2235 if avalue is not None and bvalue is None and avalue == prefvalue:
2251 if avalue is not None and bvalue is None and avalue == prefvalue:
2236 return -1
2252 return -1
2237
2253
2238 # Special case for a missing attribute and b matches exactly.
2254 # Special case for a missing attribute and b matches exactly.
2239 if bvalue is not None and avalue is None and bvalue == prefvalue:
2255 if bvalue is not None and avalue is None and bvalue == prefvalue:
2240 return 1
2256 return 1
2241
2257
2242 # We can't compare unless attribute present on both.
2258 # We can't compare unless attribute present on both.
2243 if avalue is None or bvalue is None:
2259 if avalue is None or bvalue is None:
2244 continue
2260 continue
2245
2261
2246 # Same values should fall back to next attribute.
2262 # Same values should fall back to next attribute.
2247 if avalue == bvalue:
2263 if avalue == bvalue:
2248 continue
2264 continue
2249
2265
2250 # Exact matches come first.
2266 # Exact matches come first.
2251 if avalue == prefvalue:
2267 if avalue == prefvalue:
2252 return -1
2268 return -1
2253 if bvalue == prefvalue:
2269 if bvalue == prefvalue:
2254 return 1
2270 return 1
2255
2271
2256 # Fall back to next attribute.
2272 # Fall back to next attribute.
2257 continue
2273 continue
2258
2274
2259 # If we got here we couldn't sort by attributes and prefers. Fall
2275 # If we got here we couldn't sort by attributes and prefers. Fall
2260 # back to index order.
2276 # back to index order.
2261 return 0
2277 return 0
2262
2278
2263 def __lt__(self, other):
2279 def __lt__(self, other):
2264 return self._cmp(other) < 0
2280 return self._cmp(other) < 0
2265
2281
2266 def __gt__(self, other):
2282 def __gt__(self, other):
2267 return self._cmp(other) > 0
2283 return self._cmp(other) > 0
2268
2284
2269 def __eq__(self, other):
2285 def __eq__(self, other):
2270 return self._cmp(other) == 0
2286 return self._cmp(other) == 0
2271
2287
2272 def __le__(self, other):
2288 def __le__(self, other):
2273 return self._cmp(other) <= 0
2289 return self._cmp(other) <= 0
2274
2290
2275 def __ge__(self, other):
2291 def __ge__(self, other):
2276 return self._cmp(other) >= 0
2292 return self._cmp(other) >= 0
2277
2293
2278 def __ne__(self, other):
2294 def __ne__(self, other):
2279 return self._cmp(other) != 0
2295 return self._cmp(other) != 0
2280
2296
2281 def sortclonebundleentries(ui, entries):
2297 def sortclonebundleentries(ui, entries):
2282 prefers = ui.configlist('ui', 'clonebundleprefers')
2298 prefers = ui.configlist('ui', 'clonebundleprefers')
2283 if not prefers:
2299 if not prefers:
2284 return list(entries)
2300 return list(entries)
2285
2301
2286 prefers = [p.split('=', 1) for p in prefers]
2302 prefers = [p.split('=', 1) for p in prefers]
2287
2303
2288 items = sorted(clonebundleentry(v, prefers) for v in entries)
2304 items = sorted(clonebundleentry(v, prefers) for v in entries)
2289 return [i.value for i in items]
2305 return [i.value for i in items]
2290
2306
2291 def trypullbundlefromurl(ui, repo, url):
2307 def trypullbundlefromurl(ui, repo, url):
2292 """Attempt to apply a bundle from a URL."""
2308 """Attempt to apply a bundle from a URL."""
2293 with repo.lock(), repo.transaction('bundleurl') as tr:
2309 with repo.lock(), repo.transaction('bundleurl') as tr:
2294 try:
2310 try:
2295 fh = urlmod.open(ui, url)
2311 fh = urlmod.open(ui, url)
2296 cg = readbundle(ui, fh, 'stream')
2312 cg = readbundle(ui, fh, 'stream')
2297
2313
2298 if isinstance(cg, streamclone.streamcloneapplier):
2314 if isinstance(cg, streamclone.streamcloneapplier):
2299 cg.apply(repo)
2315 cg.apply(repo)
2300 else:
2316 else:
2301 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2317 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2302 return True
2318 return True
2303 except urlerr.httperror as e:
2319 except urlerr.httperror as e:
2304 ui.warn(_('HTTP error fetching bundle: %s\n') %
2320 ui.warn(_('HTTP error fetching bundle: %s\n') %
2305 stringutil.forcebytestr(e))
2321 stringutil.forcebytestr(e))
2306 except urlerr.urlerror as e:
2322 except urlerr.urlerror as e:
2307 ui.warn(_('error fetching bundle: %s\n') %
2323 ui.warn(_('error fetching bundle: %s\n') %
2308 stringutil.forcebytestr(e.reason))
2324 stringutil.forcebytestr(e.reason))
2309
2325
2310 return False
2326 return False
@@ -1,55 +1,48 b''
1 Test creating a consuming stream bundle v2
1 Test creating a consuming stream bundle v2
2
2
3 $ getmainid() {
3 $ getmainid() {
4 > hg -R main log --template '{node}\n' --rev "$1"
4 > hg -R main log --template '{node}\n' --rev "$1"
5 > }
5 > }
6
6
7 $ cp $HGRCPATH $TESTTMP/hgrc.orig
7 $ cp $HGRCPATH $TESTTMP/hgrc.orig
8
8
9 $ cat >> $HGRCPATH << EOF
9 $ cat >> $HGRCPATH << EOF
10 > [experimental]
10 > [experimental]
11 > evolution.createmarkers=True
11 > evolution.createmarkers=True
12 > evolution.exchange=True
12 > evolution.exchange=True
13 > bundle2-output-capture=True
13 > bundle2-output-capture=True
14 > [ui]
14 > [ui]
15 > ssh="$PYTHON" "$TESTDIR/dummyssh"
15 > ssh="$PYTHON" "$TESTDIR/dummyssh"
16 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
16 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
17 > [web]
17 > [web]
18 > push_ssl = false
18 > push_ssl = false
19 > allow_push = *
19 > allow_push = *
20 > [phases]
20 > [phases]
21 > publish=False
21 > publish=False
22 > [extensions]
22 > [extensions]
23 > drawdag=$TESTDIR/drawdag.py
23 > drawdag=$TESTDIR/drawdag.py
24 > EOF
24 > EOF
25
25
26 The extension requires a repo (currently unused)
26 The extension requires a repo (currently unused)
27
27
28 $ hg init main
28 $ hg init main
29 $ cd main
29 $ cd main
30
30
31 $ hg debugdrawdag <<'EOF'
31 $ hg debugdrawdag <<'EOF'
32 > E
32 > E
33 > |
33 > |
34 > D
34 > D
35 > |
35 > |
36 > C
36 > C
37 > |
37 > |
38 > B
38 > B
39 > |
39 > |
40 > A
40 > A
41 > EOF
41 > EOF
42
42
43 $ hg bundle -a --type="none-v2;stream=v2" bundle.hg
43 $ hg bundle -a --type="none-v2;stream=v2" bundle.hg
44 5 changesets found
45 $ hg debugbundle bundle.hg
44 $ hg debugbundle bundle.hg
46 Stream params: {}
45 Stream params: {}
47 changegroup -- {nbchanges: 5, version: 02}
46 stream2 -- {bytecount: 1693, filecount: 11, requirements: dotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Cstore}
48 426bada5c67598ca65036d57d9e4b64b0c1ce7a0
49 112478962961147124edd43549aedd1a335e44bf
50 26805aba1e600a82e93661149f2313866a221a7b
51 f585351a92f85104bff7c284233c338b10eb1df7
52 9bc730a19041f9ec7cb33c626e811aa233efb18c
53 cache:rev-branch-cache -- {}
54 $ hg debugbundle --spec bundle.hg
47 $ hg debugbundle --spec bundle.hg
55 none-v2
48 none-v2;stream=v2;requirements%3Ddotencode%2Cfncache%2Cgeneraldelta%2Crevlogv1%2Cstore
General Comments 0
You need to be logged in to leave comments. Login now