##// END OF EJS Templates
exchange: fix test for remote support of binary phases...
Boris Feld -
r34361:cd3f3971 default
parent child Browse files
Show More
@@ -1,2076 +1,2077 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 hex,
16 hex,
17 nullid,
17 nullid,
18 )
18 )
19 from . import (
19 from . import (
20 bookmarks as bookmod,
20 bookmarks as bookmod,
21 bundle2,
21 bundle2,
22 changegroup,
22 changegroup,
23 discovery,
23 discovery,
24 error,
24 error,
25 lock as lockmod,
25 lock as lockmod,
26 obsolete,
26 obsolete,
27 phases,
27 phases,
28 pushkey,
28 pushkey,
29 pycompat,
29 pycompat,
30 scmutil,
30 scmutil,
31 sslutil,
31 sslutil,
32 streamclone,
32 streamclone,
33 url as urlmod,
33 url as urlmod,
34 util,
34 util,
35 )
35 )
36
36
37 urlerr = util.urlerr
37 urlerr = util.urlerr
38 urlreq = util.urlreq
38 urlreq = util.urlreq
39
39
40 # Maps bundle version human names to changegroup versions.
40 # Maps bundle version human names to changegroup versions.
41 _bundlespeccgversions = {'v1': '01',
41 _bundlespeccgversions = {'v1': '01',
42 'v2': '02',
42 'v2': '02',
43 'packed1': 's1',
43 'packed1': 's1',
44 'bundle2': '02', #legacy
44 'bundle2': '02', #legacy
45 }
45 }
46
46
47 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
48 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
48 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
49
49
50 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 def parsebundlespec(repo, spec, strict=True, externalnames=False):
51 """Parse a bundle string specification into parts.
51 """Parse a bundle string specification into parts.
52
52
53 Bundle specifications denote a well-defined bundle/exchange format.
53 Bundle specifications denote a well-defined bundle/exchange format.
54 The content of a given specification should not change over time in
54 The content of a given specification should not change over time in
55 order to ensure that bundles produced by a newer version of Mercurial are
55 order to ensure that bundles produced by a newer version of Mercurial are
56 readable from an older version.
56 readable from an older version.
57
57
58 The string currently has the form:
58 The string currently has the form:
59
59
60 <compression>-<type>[;<parameter0>[;<parameter1>]]
60 <compression>-<type>[;<parameter0>[;<parameter1>]]
61
61
62 Where <compression> is one of the supported compression formats
62 Where <compression> is one of the supported compression formats
63 and <type> is (currently) a version string. A ";" can follow the type and
63 and <type> is (currently) a version string. A ";" can follow the type and
64 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 all text afterwards is interpreted as URI encoded, ";" delimited key=value
65 pairs.
65 pairs.
66
66
67 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 If ``strict`` is True (the default) <compression> is required. Otherwise,
68 it is optional.
68 it is optional.
69
69
70 If ``externalnames`` is False (the default), the human-centric names will
70 If ``externalnames`` is False (the default), the human-centric names will
71 be converted to their internal representation.
71 be converted to their internal representation.
72
72
73 Returns a 3-tuple of (compression, version, parameters). Compression will
73 Returns a 3-tuple of (compression, version, parameters). Compression will
74 be ``None`` if not in strict mode and a compression isn't defined.
74 be ``None`` if not in strict mode and a compression isn't defined.
75
75
76 An ``InvalidBundleSpecification`` is raised when the specification is
76 An ``InvalidBundleSpecification`` is raised when the specification is
77 not syntactically well formed.
77 not syntactically well formed.
78
78
79 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 An ``UnsupportedBundleSpecification`` is raised when the compression or
80 bundle type/version is not recognized.
80 bundle type/version is not recognized.
81
81
82 Note: this function will likely eventually return a more complex data
82 Note: this function will likely eventually return a more complex data
83 structure, including bundle2 part information.
83 structure, including bundle2 part information.
84 """
84 """
85 def parseparams(s):
85 def parseparams(s):
86 if ';' not in s:
86 if ';' not in s:
87 return s, {}
87 return s, {}
88
88
89 params = {}
89 params = {}
90 version, paramstr = s.split(';', 1)
90 version, paramstr = s.split(';', 1)
91
91
92 for p in paramstr.split(';'):
92 for p in paramstr.split(';'):
93 if '=' not in p:
93 if '=' not in p:
94 raise error.InvalidBundleSpecification(
94 raise error.InvalidBundleSpecification(
95 _('invalid bundle specification: '
95 _('invalid bundle specification: '
96 'missing "=" in parameter: %s') % p)
96 'missing "=" in parameter: %s') % p)
97
97
98 key, value = p.split('=', 1)
98 key, value = p.split('=', 1)
99 key = urlreq.unquote(key)
99 key = urlreq.unquote(key)
100 value = urlreq.unquote(value)
100 value = urlreq.unquote(value)
101 params[key] = value
101 params[key] = value
102
102
103 return version, params
103 return version, params
104
104
105
105
106 if strict and '-' not in spec:
106 if strict and '-' not in spec:
107 raise error.InvalidBundleSpecification(
107 raise error.InvalidBundleSpecification(
108 _('invalid bundle specification; '
108 _('invalid bundle specification; '
109 'must be prefixed with compression: %s') % spec)
109 'must be prefixed with compression: %s') % spec)
110
110
111 if '-' in spec:
111 if '-' in spec:
112 compression, version = spec.split('-', 1)
112 compression, version = spec.split('-', 1)
113
113
114 if compression not in util.compengines.supportedbundlenames:
114 if compression not in util.compengines.supportedbundlenames:
115 raise error.UnsupportedBundleSpecification(
115 raise error.UnsupportedBundleSpecification(
116 _('%s compression is not supported') % compression)
116 _('%s compression is not supported') % compression)
117
117
118 version, params = parseparams(version)
118 version, params = parseparams(version)
119
119
120 if version not in _bundlespeccgversions:
120 if version not in _bundlespeccgversions:
121 raise error.UnsupportedBundleSpecification(
121 raise error.UnsupportedBundleSpecification(
122 _('%s is not a recognized bundle version') % version)
122 _('%s is not a recognized bundle version') % version)
123 else:
123 else:
124 # Value could be just the compression or just the version, in which
124 # Value could be just the compression or just the version, in which
125 # case some defaults are assumed (but only when not in strict mode).
125 # case some defaults are assumed (but only when not in strict mode).
126 assert not strict
126 assert not strict
127
127
128 spec, params = parseparams(spec)
128 spec, params = parseparams(spec)
129
129
130 if spec in util.compengines.supportedbundlenames:
130 if spec in util.compengines.supportedbundlenames:
131 compression = spec
131 compression = spec
132 version = 'v1'
132 version = 'v1'
133 # Generaldelta repos require v2.
133 # Generaldelta repos require v2.
134 if 'generaldelta' in repo.requirements:
134 if 'generaldelta' in repo.requirements:
135 version = 'v2'
135 version = 'v2'
136 # Modern compression engines require v2.
136 # Modern compression engines require v2.
137 if compression not in _bundlespecv1compengines:
137 if compression not in _bundlespecv1compengines:
138 version = 'v2'
138 version = 'v2'
139 elif spec in _bundlespeccgversions:
139 elif spec in _bundlespeccgversions:
140 if spec == 'packed1':
140 if spec == 'packed1':
141 compression = 'none'
141 compression = 'none'
142 else:
142 else:
143 compression = 'bzip2'
143 compression = 'bzip2'
144 version = spec
144 version = spec
145 else:
145 else:
146 raise error.UnsupportedBundleSpecification(
146 raise error.UnsupportedBundleSpecification(
147 _('%s is not a recognized bundle specification') % spec)
147 _('%s is not a recognized bundle specification') % spec)
148
148
149 # Bundle version 1 only supports a known set of compression engines.
149 # Bundle version 1 only supports a known set of compression engines.
150 if version == 'v1' and compression not in _bundlespecv1compengines:
150 if version == 'v1' and compression not in _bundlespecv1compengines:
151 raise error.UnsupportedBundleSpecification(
151 raise error.UnsupportedBundleSpecification(
152 _('compression engine %s is not supported on v1 bundles') %
152 _('compression engine %s is not supported on v1 bundles') %
153 compression)
153 compression)
154
154
155 # The specification for packed1 can optionally declare the data formats
155 # The specification for packed1 can optionally declare the data formats
156 # required to apply it. If we see this metadata, compare against what the
156 # required to apply it. If we see this metadata, compare against what the
157 # repo supports and error if the bundle isn't compatible.
157 # repo supports and error if the bundle isn't compatible.
158 if version == 'packed1' and 'requirements' in params:
158 if version == 'packed1' and 'requirements' in params:
159 requirements = set(params['requirements'].split(','))
159 requirements = set(params['requirements'].split(','))
160 missingreqs = requirements - repo.supportedformats
160 missingreqs = requirements - repo.supportedformats
161 if missingreqs:
161 if missingreqs:
162 raise error.UnsupportedBundleSpecification(
162 raise error.UnsupportedBundleSpecification(
163 _('missing support for repository features: %s') %
163 _('missing support for repository features: %s') %
164 ', '.join(sorted(missingreqs)))
164 ', '.join(sorted(missingreqs)))
165
165
166 if not externalnames:
166 if not externalnames:
167 engine = util.compengines.forbundlename(compression)
167 engine = util.compengines.forbundlename(compression)
168 compression = engine.bundletype()[1]
168 compression = engine.bundletype()[1]
169 version = _bundlespeccgversions[version]
169 version = _bundlespeccgversions[version]
170 return compression, version, params
170 return compression, version, params
171
171
172 def readbundle(ui, fh, fname, vfs=None):
172 def readbundle(ui, fh, fname, vfs=None):
173 header = changegroup.readexactly(fh, 4)
173 header = changegroup.readexactly(fh, 4)
174
174
175 alg = None
175 alg = None
176 if not fname:
176 if not fname:
177 fname = "stream"
177 fname = "stream"
178 if not header.startswith('HG') and header.startswith('\0'):
178 if not header.startswith('HG') and header.startswith('\0'):
179 fh = changegroup.headerlessfixup(fh, header)
179 fh = changegroup.headerlessfixup(fh, header)
180 header = "HG10"
180 header = "HG10"
181 alg = 'UN'
181 alg = 'UN'
182 elif vfs:
182 elif vfs:
183 fname = vfs.join(fname)
183 fname = vfs.join(fname)
184
184
185 magic, version = header[0:2], header[2:4]
185 magic, version = header[0:2], header[2:4]
186
186
187 if magic != 'HG':
187 if magic != 'HG':
188 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
189 if version == '10':
189 if version == '10':
190 if alg is None:
190 if alg is None:
191 alg = changegroup.readexactly(fh, 2)
191 alg = changegroup.readexactly(fh, 2)
192 return changegroup.cg1unpacker(fh, alg)
192 return changegroup.cg1unpacker(fh, alg)
193 elif version.startswith('2'):
193 elif version.startswith('2'):
194 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
195 elif version == 'S1':
195 elif version == 'S1':
196 return streamclone.streamcloneapplier(fh)
196 return streamclone.streamcloneapplier(fh)
197 else:
197 else:
198 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
199
199
200 def getbundlespec(ui, fh):
200 def getbundlespec(ui, fh):
201 """Infer the bundlespec from a bundle file handle.
201 """Infer the bundlespec from a bundle file handle.
202
202
203 The input file handle is seeked and the original seek position is not
203 The input file handle is seeked and the original seek position is not
204 restored.
204 restored.
205 """
205 """
206 def speccompression(alg):
206 def speccompression(alg):
207 try:
207 try:
208 return util.compengines.forbundletype(alg).bundletype()[0]
208 return util.compengines.forbundletype(alg).bundletype()[0]
209 except KeyError:
209 except KeyError:
210 return None
210 return None
211
211
212 b = readbundle(ui, fh, None)
212 b = readbundle(ui, fh, None)
213 if isinstance(b, changegroup.cg1unpacker):
213 if isinstance(b, changegroup.cg1unpacker):
214 alg = b._type
214 alg = b._type
215 if alg == '_truncatedBZ':
215 if alg == '_truncatedBZ':
216 alg = 'BZ'
216 alg = 'BZ'
217 comp = speccompression(alg)
217 comp = speccompression(alg)
218 if not comp:
218 if not comp:
219 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 raise error.Abort(_('unknown compression algorithm: %s') % alg)
220 return '%s-v1' % comp
220 return '%s-v1' % comp
221 elif isinstance(b, bundle2.unbundle20):
221 elif isinstance(b, bundle2.unbundle20):
222 if 'Compression' in b.params:
222 if 'Compression' in b.params:
223 comp = speccompression(b.params['Compression'])
223 comp = speccompression(b.params['Compression'])
224 if not comp:
224 if not comp:
225 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 raise error.Abort(_('unknown compression algorithm: %s') % comp)
226 else:
226 else:
227 comp = 'none'
227 comp = 'none'
228
228
229 version = None
229 version = None
230 for part in b.iterparts():
230 for part in b.iterparts():
231 if part.type == 'changegroup':
231 if part.type == 'changegroup':
232 version = part.params['version']
232 version = part.params['version']
233 if version in ('01', '02'):
233 if version in ('01', '02'):
234 version = 'v2'
234 version = 'v2'
235 else:
235 else:
236 raise error.Abort(_('changegroup version %s does not have '
236 raise error.Abort(_('changegroup version %s does not have '
237 'a known bundlespec') % version,
237 'a known bundlespec') % version,
238 hint=_('try upgrading your Mercurial '
238 hint=_('try upgrading your Mercurial '
239 'client'))
239 'client'))
240
240
241 if not version:
241 if not version:
242 raise error.Abort(_('could not identify changegroup version in '
242 raise error.Abort(_('could not identify changegroup version in '
243 'bundle'))
243 'bundle'))
244
244
245 return '%s-%s' % (comp, version)
245 return '%s-%s' % (comp, version)
246 elif isinstance(b, streamclone.streamcloneapplier):
246 elif isinstance(b, streamclone.streamcloneapplier):
247 requirements = streamclone.readbundle1header(fh)[2]
247 requirements = streamclone.readbundle1header(fh)[2]
248 params = 'requirements=%s' % ','.join(sorted(requirements))
248 params = 'requirements=%s' % ','.join(sorted(requirements))
249 return 'none-packed1;%s' % urlreq.quote(params)
249 return 'none-packed1;%s' % urlreq.quote(params)
250 else:
250 else:
251 raise error.Abort(_('unknown bundle type: %s') % b)
251 raise error.Abort(_('unknown bundle type: %s') % b)
252
252
253 def _computeoutgoing(repo, heads, common):
253 def _computeoutgoing(repo, heads, common):
254 """Computes which revs are outgoing given a set of common
254 """Computes which revs are outgoing given a set of common
255 and a set of heads.
255 and a set of heads.
256
256
257 This is a separate function so extensions can have access to
257 This is a separate function so extensions can have access to
258 the logic.
258 the logic.
259
259
260 Returns a discovery.outgoing object.
260 Returns a discovery.outgoing object.
261 """
261 """
262 cl = repo.changelog
262 cl = repo.changelog
263 if common:
263 if common:
264 hasnode = cl.hasnode
264 hasnode = cl.hasnode
265 common = [n for n in common if hasnode(n)]
265 common = [n for n in common if hasnode(n)]
266 else:
266 else:
267 common = [nullid]
267 common = [nullid]
268 if not heads:
268 if not heads:
269 heads = cl.heads()
269 heads = cl.heads()
270 return discovery.outgoing(repo, common, heads)
270 return discovery.outgoing(repo, common, heads)
271
271
272 def _forcebundle1(op):
272 def _forcebundle1(op):
273 """return true if a pull/push must use bundle1
273 """return true if a pull/push must use bundle1
274
274
275 This function is used to allow testing of the older bundle version"""
275 This function is used to allow testing of the older bundle version"""
276 ui = op.repo.ui
276 ui = op.repo.ui
277 forcebundle1 = False
277 forcebundle1 = False
278 # The goal is this config is to allow developer to choose the bundle
278 # The goal is this config is to allow developer to choose the bundle
279 # version used during exchanged. This is especially handy during test.
279 # version used during exchanged. This is especially handy during test.
280 # Value is a list of bundle version to be picked from, highest version
280 # Value is a list of bundle version to be picked from, highest version
281 # should be used.
281 # should be used.
282 #
282 #
283 # developer config: devel.legacy.exchange
283 # developer config: devel.legacy.exchange
284 exchange = ui.configlist('devel', 'legacy.exchange')
284 exchange = ui.configlist('devel', 'legacy.exchange')
285 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
285 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
286 return forcebundle1 or not op.remote.capable('bundle2')
286 return forcebundle1 or not op.remote.capable('bundle2')
287
287
288 class pushoperation(object):
288 class pushoperation(object):
289 """A object that represent a single push operation
289 """A object that represent a single push operation
290
290
291 Its purpose is to carry push related state and very common operations.
291 Its purpose is to carry push related state and very common operations.
292
292
293 A new pushoperation should be created at the beginning of each push and
293 A new pushoperation should be created at the beginning of each push and
294 discarded afterward.
294 discarded afterward.
295 """
295 """
296
296
297 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
297 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
298 bookmarks=(), pushvars=None):
298 bookmarks=(), pushvars=None):
299 # repo we push from
299 # repo we push from
300 self.repo = repo
300 self.repo = repo
301 self.ui = repo.ui
301 self.ui = repo.ui
302 # repo we push to
302 # repo we push to
303 self.remote = remote
303 self.remote = remote
304 # force option provided
304 # force option provided
305 self.force = force
305 self.force = force
306 # revs to be pushed (None is "all")
306 # revs to be pushed (None is "all")
307 self.revs = revs
307 self.revs = revs
308 # bookmark explicitly pushed
308 # bookmark explicitly pushed
309 self.bookmarks = bookmarks
309 self.bookmarks = bookmarks
310 # allow push of new branch
310 # allow push of new branch
311 self.newbranch = newbranch
311 self.newbranch = newbranch
312 # step already performed
312 # step already performed
313 # (used to check what steps have been already performed through bundle2)
313 # (used to check what steps have been already performed through bundle2)
314 self.stepsdone = set()
314 self.stepsdone = set()
315 # Integer version of the changegroup push result
315 # Integer version of the changegroup push result
316 # - None means nothing to push
316 # - None means nothing to push
317 # - 0 means HTTP error
317 # - 0 means HTTP error
318 # - 1 means we pushed and remote head count is unchanged *or*
318 # - 1 means we pushed and remote head count is unchanged *or*
319 # we have outgoing changesets but refused to push
319 # we have outgoing changesets but refused to push
320 # - other values as described by addchangegroup()
320 # - other values as described by addchangegroup()
321 self.cgresult = None
321 self.cgresult = None
322 # Boolean value for the bookmark push
322 # Boolean value for the bookmark push
323 self.bkresult = None
323 self.bkresult = None
324 # discover.outgoing object (contains common and outgoing data)
324 # discover.outgoing object (contains common and outgoing data)
325 self.outgoing = None
325 self.outgoing = None
326 # all remote topological heads before the push
326 # all remote topological heads before the push
327 self.remoteheads = None
327 self.remoteheads = None
328 # Details of the remote branch pre and post push
328 # Details of the remote branch pre and post push
329 #
329 #
330 # mapping: {'branch': ([remoteheads],
330 # mapping: {'branch': ([remoteheads],
331 # [newheads],
331 # [newheads],
332 # [unsyncedheads],
332 # [unsyncedheads],
333 # [discardedheads])}
333 # [discardedheads])}
334 # - branch: the branch name
334 # - branch: the branch name
335 # - remoteheads: the list of remote heads known locally
335 # - remoteheads: the list of remote heads known locally
336 # None if the branch is new
336 # None if the branch is new
337 # - newheads: the new remote heads (known locally) with outgoing pushed
337 # - newheads: the new remote heads (known locally) with outgoing pushed
338 # - unsyncedheads: the list of remote heads unknown locally.
338 # - unsyncedheads: the list of remote heads unknown locally.
339 # - discardedheads: the list of remote heads made obsolete by the push
339 # - discardedheads: the list of remote heads made obsolete by the push
340 self.pushbranchmap = None
340 self.pushbranchmap = None
341 # testable as a boolean indicating if any nodes are missing locally.
341 # testable as a boolean indicating if any nodes are missing locally.
342 self.incoming = None
342 self.incoming = None
343 # phases changes that must be pushed along side the changesets
343 # phases changes that must be pushed along side the changesets
344 self.outdatedphases = None
344 self.outdatedphases = None
345 # phases changes that must be pushed if changeset push fails
345 # phases changes that must be pushed if changeset push fails
346 self.fallbackoutdatedphases = None
346 self.fallbackoutdatedphases = None
347 # outgoing obsmarkers
347 # outgoing obsmarkers
348 self.outobsmarkers = set()
348 self.outobsmarkers = set()
349 # outgoing bookmarks
349 # outgoing bookmarks
350 self.outbookmarks = []
350 self.outbookmarks = []
351 # transaction manager
351 # transaction manager
352 self.trmanager = None
352 self.trmanager = None
353 # map { pushkey partid -> callback handling failure}
353 # map { pushkey partid -> callback handling failure}
354 # used to handle exception from mandatory pushkey part failure
354 # used to handle exception from mandatory pushkey part failure
355 self.pkfailcb = {}
355 self.pkfailcb = {}
356 # an iterable of pushvars or None
356 # an iterable of pushvars or None
357 self.pushvars = pushvars
357 self.pushvars = pushvars
358
358
359 @util.propertycache
359 @util.propertycache
360 def futureheads(self):
360 def futureheads(self):
361 """future remote heads if the changeset push succeeds"""
361 """future remote heads if the changeset push succeeds"""
362 return self.outgoing.missingheads
362 return self.outgoing.missingheads
363
363
364 @util.propertycache
364 @util.propertycache
365 def fallbackheads(self):
365 def fallbackheads(self):
366 """future remote heads if the changeset push fails"""
366 """future remote heads if the changeset push fails"""
367 if self.revs is None:
367 if self.revs is None:
368 # not target to push, all common are relevant
368 # not target to push, all common are relevant
369 return self.outgoing.commonheads
369 return self.outgoing.commonheads
370 unfi = self.repo.unfiltered()
370 unfi = self.repo.unfiltered()
371 # I want cheads = heads(::missingheads and ::commonheads)
371 # I want cheads = heads(::missingheads and ::commonheads)
372 # (missingheads is revs with secret changeset filtered out)
372 # (missingheads is revs with secret changeset filtered out)
373 #
373 #
374 # This can be expressed as:
374 # This can be expressed as:
375 # cheads = ( (missingheads and ::commonheads)
375 # cheads = ( (missingheads and ::commonheads)
376 # + (commonheads and ::missingheads))"
376 # + (commonheads and ::missingheads))"
377 # )
377 # )
378 #
378 #
379 # while trying to push we already computed the following:
379 # while trying to push we already computed the following:
380 # common = (::commonheads)
380 # common = (::commonheads)
381 # missing = ((commonheads::missingheads) - commonheads)
381 # missing = ((commonheads::missingheads) - commonheads)
382 #
382 #
383 # We can pick:
383 # We can pick:
384 # * missingheads part of common (::commonheads)
384 # * missingheads part of common (::commonheads)
385 common = self.outgoing.common
385 common = self.outgoing.common
386 nm = self.repo.changelog.nodemap
386 nm = self.repo.changelog.nodemap
387 cheads = [node for node in self.revs if nm[node] in common]
387 cheads = [node for node in self.revs if nm[node] in common]
388 # and
388 # and
389 # * commonheads parents on missing
389 # * commonheads parents on missing
390 revset = unfi.set('%ln and parents(roots(%ln))',
390 revset = unfi.set('%ln and parents(roots(%ln))',
391 self.outgoing.commonheads,
391 self.outgoing.commonheads,
392 self.outgoing.missing)
392 self.outgoing.missing)
393 cheads.extend(c.node() for c in revset)
393 cheads.extend(c.node() for c in revset)
394 return cheads
394 return cheads
395
395
396 @property
396 @property
397 def commonheads(self):
397 def commonheads(self):
398 """set of all common heads after changeset bundle push"""
398 """set of all common heads after changeset bundle push"""
399 if self.cgresult:
399 if self.cgresult:
400 return self.futureheads
400 return self.futureheads
401 else:
401 else:
402 return self.fallbackheads
402 return self.fallbackheads
403
403
404 # mapping of message used when pushing bookmark
404 # mapping of message used when pushing bookmark
405 bookmsgmap = {'update': (_("updating bookmark %s\n"),
405 bookmsgmap = {'update': (_("updating bookmark %s\n"),
406 _('updating bookmark %s failed!\n')),
406 _('updating bookmark %s failed!\n')),
407 'export': (_("exporting bookmark %s\n"),
407 'export': (_("exporting bookmark %s\n"),
408 _('exporting bookmark %s failed!\n')),
408 _('exporting bookmark %s failed!\n')),
409 'delete': (_("deleting remote bookmark %s\n"),
409 'delete': (_("deleting remote bookmark %s\n"),
410 _('deleting remote bookmark %s failed!\n')),
410 _('deleting remote bookmark %s failed!\n')),
411 }
411 }
412
412
413
413
414 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
414 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
415 opargs=None):
415 opargs=None):
416 '''Push outgoing changesets (limited by revs) from a local
416 '''Push outgoing changesets (limited by revs) from a local
417 repository to remote. Return an integer:
417 repository to remote. Return an integer:
418 - None means nothing to push
418 - None means nothing to push
419 - 0 means HTTP error
419 - 0 means HTTP error
420 - 1 means we pushed and remote head count is unchanged *or*
420 - 1 means we pushed and remote head count is unchanged *or*
421 we have outgoing changesets but refused to push
421 we have outgoing changesets but refused to push
422 - other values as described by addchangegroup()
422 - other values as described by addchangegroup()
423 '''
423 '''
424 if opargs is None:
424 if opargs is None:
425 opargs = {}
425 opargs = {}
426 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
426 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
427 **pycompat.strkwargs(opargs))
427 **pycompat.strkwargs(opargs))
428 if pushop.remote.local():
428 if pushop.remote.local():
429 missing = (set(pushop.repo.requirements)
429 missing = (set(pushop.repo.requirements)
430 - pushop.remote.local().supported)
430 - pushop.remote.local().supported)
431 if missing:
431 if missing:
432 msg = _("required features are not"
432 msg = _("required features are not"
433 " supported in the destination:"
433 " supported in the destination:"
434 " %s") % (', '.join(sorted(missing)))
434 " %s") % (', '.join(sorted(missing)))
435 raise error.Abort(msg)
435 raise error.Abort(msg)
436
436
437 if not pushop.remote.canpush():
437 if not pushop.remote.canpush():
438 raise error.Abort(_("destination does not support push"))
438 raise error.Abort(_("destination does not support push"))
439
439
440 if not pushop.remote.capable('unbundle'):
440 if not pushop.remote.capable('unbundle'):
441 raise error.Abort(_('cannot push: destination does not support the '
441 raise error.Abort(_('cannot push: destination does not support the '
442 'unbundle wire protocol command'))
442 'unbundle wire protocol command'))
443
443
444 # get lock as we might write phase data
444 # get lock as we might write phase data
445 wlock = lock = None
445 wlock = lock = None
446 try:
446 try:
447 # bundle2 push may receive a reply bundle touching bookmarks or other
447 # bundle2 push may receive a reply bundle touching bookmarks or other
448 # things requiring the wlock. Take it now to ensure proper ordering.
448 # things requiring the wlock. Take it now to ensure proper ordering.
449 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
449 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
450 if (not _forcebundle1(pushop)) and maypushback:
450 if (not _forcebundle1(pushop)) and maypushback:
451 wlock = pushop.repo.wlock()
451 wlock = pushop.repo.wlock()
452 lock = pushop.repo.lock()
452 lock = pushop.repo.lock()
453 pushop.trmanager = transactionmanager(pushop.repo,
453 pushop.trmanager = transactionmanager(pushop.repo,
454 'push-response',
454 'push-response',
455 pushop.remote.url())
455 pushop.remote.url())
456 except IOError as err:
456 except IOError as err:
457 if err.errno != errno.EACCES:
457 if err.errno != errno.EACCES:
458 raise
458 raise
459 # source repo cannot be locked.
459 # source repo cannot be locked.
460 # We do not abort the push, but just disable the local phase
460 # We do not abort the push, but just disable the local phase
461 # synchronisation.
461 # synchronisation.
462 msg = 'cannot lock source repository: %s\n' % err
462 msg = 'cannot lock source repository: %s\n' % err
463 pushop.ui.debug(msg)
463 pushop.ui.debug(msg)
464
464
465 with wlock or util.nullcontextmanager(), \
465 with wlock or util.nullcontextmanager(), \
466 lock or util.nullcontextmanager(), \
466 lock or util.nullcontextmanager(), \
467 pushop.trmanager or util.nullcontextmanager():
467 pushop.trmanager or util.nullcontextmanager():
468 pushop.repo.checkpush(pushop)
468 pushop.repo.checkpush(pushop)
469 _pushdiscovery(pushop)
469 _pushdiscovery(pushop)
470 if not _forcebundle1(pushop):
470 if not _forcebundle1(pushop):
471 _pushbundle2(pushop)
471 _pushbundle2(pushop)
472 _pushchangeset(pushop)
472 _pushchangeset(pushop)
473 _pushsyncphase(pushop)
473 _pushsyncphase(pushop)
474 _pushobsolete(pushop)
474 _pushobsolete(pushop)
475 _pushbookmark(pushop)
475 _pushbookmark(pushop)
476
476
477 return pushop
477 return pushop
478
478
479 # list of steps to perform discovery before push
479 # list of steps to perform discovery before push
480 pushdiscoveryorder = []
480 pushdiscoveryorder = []
481
481
482 # Mapping between step name and function
482 # Mapping between step name and function
483 #
483 #
484 # This exists to help extensions wrap steps if necessary
484 # This exists to help extensions wrap steps if necessary
485 pushdiscoverymapping = {}
485 pushdiscoverymapping = {}
486
486
487 def pushdiscovery(stepname):
487 def pushdiscovery(stepname):
488 """decorator for function performing discovery before push
488 """decorator for function performing discovery before push
489
489
490 The function is added to the step -> function mapping and appended to the
490 The function is added to the step -> function mapping and appended to the
491 list of steps. Beware that decorated function will be added in order (this
491 list of steps. Beware that decorated function will be added in order (this
492 may matter).
492 may matter).
493
493
494 You can only use this decorator for a new step, if you want to wrap a step
494 You can only use this decorator for a new step, if you want to wrap a step
495 from an extension, change the pushdiscovery dictionary directly."""
495 from an extension, change the pushdiscovery dictionary directly."""
496 def dec(func):
496 def dec(func):
497 assert stepname not in pushdiscoverymapping
497 assert stepname not in pushdiscoverymapping
498 pushdiscoverymapping[stepname] = func
498 pushdiscoverymapping[stepname] = func
499 pushdiscoveryorder.append(stepname)
499 pushdiscoveryorder.append(stepname)
500 return func
500 return func
501 return dec
501 return dec
502
502
503 def _pushdiscovery(pushop):
503 def _pushdiscovery(pushop):
504 """Run all discovery steps"""
504 """Run all discovery steps"""
505 for stepname in pushdiscoveryorder:
505 for stepname in pushdiscoveryorder:
506 step = pushdiscoverymapping[stepname]
506 step = pushdiscoverymapping[stepname]
507 step(pushop)
507 step(pushop)
508
508
509 @pushdiscovery('changeset')
509 @pushdiscovery('changeset')
510 def _pushdiscoverychangeset(pushop):
510 def _pushdiscoverychangeset(pushop):
511 """discover the changeset that need to be pushed"""
511 """discover the changeset that need to be pushed"""
512 fci = discovery.findcommonincoming
512 fci = discovery.findcommonincoming
513 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
513 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
514 common, inc, remoteheads = commoninc
514 common, inc, remoteheads = commoninc
515 fco = discovery.findcommonoutgoing
515 fco = discovery.findcommonoutgoing
516 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
516 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
517 commoninc=commoninc, force=pushop.force)
517 commoninc=commoninc, force=pushop.force)
518 pushop.outgoing = outgoing
518 pushop.outgoing = outgoing
519 pushop.remoteheads = remoteheads
519 pushop.remoteheads = remoteheads
520 pushop.incoming = inc
520 pushop.incoming = inc
521
521
522 @pushdiscovery('phase')
522 @pushdiscovery('phase')
523 def _pushdiscoveryphase(pushop):
523 def _pushdiscoveryphase(pushop):
524 """discover the phase that needs to be pushed
524 """discover the phase that needs to be pushed
525
525
526 (computed for both success and failure case for changesets push)"""
526 (computed for both success and failure case for changesets push)"""
527 outgoing = pushop.outgoing
527 outgoing = pushop.outgoing
528 unfi = pushop.repo.unfiltered()
528 unfi = pushop.repo.unfiltered()
529 remotephases = pushop.remote.listkeys('phases')
529 remotephases = pushop.remote.listkeys('phases')
530 publishing = remotephases.get('publishing', False)
530 publishing = remotephases.get('publishing', False)
531 if (pushop.ui.configbool('ui', '_usedassubrepo')
531 if (pushop.ui.configbool('ui', '_usedassubrepo')
532 and remotephases # server supports phases
532 and remotephases # server supports phases
533 and not pushop.outgoing.missing # no changesets to be pushed
533 and not pushop.outgoing.missing # no changesets to be pushed
534 and publishing):
534 and publishing):
535 # When:
535 # When:
536 # - this is a subrepo push
536 # - this is a subrepo push
537 # - and remote support phase
537 # - and remote support phase
538 # - and no changeset are to be pushed
538 # - and no changeset are to be pushed
539 # - and remote is publishing
539 # - and remote is publishing
540 # We may be in issue 3871 case!
540 # We may be in issue 3871 case!
541 # We drop the possible phase synchronisation done by
541 # We drop the possible phase synchronisation done by
542 # courtesy to publish changesets possibly locally draft
542 # courtesy to publish changesets possibly locally draft
543 # on the remote.
543 # on the remote.
544 remotephases = {'publishing': 'True'}
544 remotephases = {'publishing': 'True'}
545 ana = phases.analyzeremotephases(pushop.repo,
545 ana = phases.analyzeremotephases(pushop.repo,
546 pushop.fallbackheads,
546 pushop.fallbackheads,
547 remotephases)
547 remotephases)
548 pheads, droots = ana
548 pheads, droots = ana
549 extracond = ''
549 extracond = ''
550 if not publishing:
550 if not publishing:
551 extracond = ' and public()'
551 extracond = ' and public()'
552 revset = 'heads((%%ln::%%ln) %s)' % extracond
552 revset = 'heads((%%ln::%%ln) %s)' % extracond
553 # Get the list of all revs draft on remote by public here.
553 # Get the list of all revs draft on remote by public here.
554 # XXX Beware that revset break if droots is not strictly
554 # XXX Beware that revset break if droots is not strictly
555 # XXX root we may want to ensure it is but it is costly
555 # XXX root we may want to ensure it is but it is costly
556 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
556 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
557 if not outgoing.missing:
557 if not outgoing.missing:
558 future = fallback
558 future = fallback
559 else:
559 else:
560 # adds changeset we are going to push as draft
560 # adds changeset we are going to push as draft
561 #
561 #
562 # should not be necessary for publishing server, but because of an
562 # should not be necessary for publishing server, but because of an
563 # issue fixed in xxxxx we have to do it anyway.
563 # issue fixed in xxxxx we have to do it anyway.
564 fdroots = list(unfi.set('roots(%ln + %ln::)',
564 fdroots = list(unfi.set('roots(%ln + %ln::)',
565 outgoing.missing, droots))
565 outgoing.missing, droots))
566 fdroots = [f.node() for f in fdroots]
566 fdroots = [f.node() for f in fdroots]
567 future = list(unfi.set(revset, fdroots, pushop.futureheads))
567 future = list(unfi.set(revset, fdroots, pushop.futureheads))
568 pushop.outdatedphases = future
568 pushop.outdatedphases = future
569 pushop.fallbackoutdatedphases = fallback
569 pushop.fallbackoutdatedphases = fallback
570
570
571 @pushdiscovery('obsmarker')
571 @pushdiscovery('obsmarker')
572 def _pushdiscoveryobsmarkers(pushop):
572 def _pushdiscoveryobsmarkers(pushop):
573 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
573 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
574 and pushop.repo.obsstore
574 and pushop.repo.obsstore
575 and 'obsolete' in pushop.remote.listkeys('namespaces')):
575 and 'obsolete' in pushop.remote.listkeys('namespaces')):
576 repo = pushop.repo
576 repo = pushop.repo
577 # very naive computation, that can be quite expensive on big repo.
577 # very naive computation, that can be quite expensive on big repo.
578 # However: evolution is currently slow on them anyway.
578 # However: evolution is currently slow on them anyway.
579 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
579 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
580 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
580 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
581
581
582 @pushdiscovery('bookmarks')
582 @pushdiscovery('bookmarks')
583 def _pushdiscoverybookmarks(pushop):
583 def _pushdiscoverybookmarks(pushop):
584 ui = pushop.ui
584 ui = pushop.ui
585 repo = pushop.repo.unfiltered()
585 repo = pushop.repo.unfiltered()
586 remote = pushop.remote
586 remote = pushop.remote
587 ui.debug("checking for updated bookmarks\n")
587 ui.debug("checking for updated bookmarks\n")
588 ancestors = ()
588 ancestors = ()
589 if pushop.revs:
589 if pushop.revs:
590 revnums = map(repo.changelog.rev, pushop.revs)
590 revnums = map(repo.changelog.rev, pushop.revs)
591 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
591 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
592 remotebookmark = remote.listkeys('bookmarks')
592 remotebookmark = remote.listkeys('bookmarks')
593
593
594 explicit = set([repo._bookmarks.expandname(bookmark)
594 explicit = set([repo._bookmarks.expandname(bookmark)
595 for bookmark in pushop.bookmarks])
595 for bookmark in pushop.bookmarks])
596
596
597 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
597 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
598 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
598 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
599
599
600 def safehex(x):
600 def safehex(x):
601 if x is None:
601 if x is None:
602 return x
602 return x
603 return hex(x)
603 return hex(x)
604
604
605 def hexifycompbookmarks(bookmarks):
605 def hexifycompbookmarks(bookmarks):
606 for b, scid, dcid in bookmarks:
606 for b, scid, dcid in bookmarks:
607 yield b, safehex(scid), safehex(dcid)
607 yield b, safehex(scid), safehex(dcid)
608
608
609 comp = [hexifycompbookmarks(marks) for marks in comp]
609 comp = [hexifycompbookmarks(marks) for marks in comp]
610 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
610 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
611
611
612 for b, scid, dcid in advsrc:
612 for b, scid, dcid in advsrc:
613 if b in explicit:
613 if b in explicit:
614 explicit.remove(b)
614 explicit.remove(b)
615 if not ancestors or repo[scid].rev() in ancestors:
615 if not ancestors or repo[scid].rev() in ancestors:
616 pushop.outbookmarks.append((b, dcid, scid))
616 pushop.outbookmarks.append((b, dcid, scid))
617 # search added bookmark
617 # search added bookmark
618 for b, scid, dcid in addsrc:
618 for b, scid, dcid in addsrc:
619 if b in explicit:
619 if b in explicit:
620 explicit.remove(b)
620 explicit.remove(b)
621 pushop.outbookmarks.append((b, '', scid))
621 pushop.outbookmarks.append((b, '', scid))
622 # search for overwritten bookmark
622 # search for overwritten bookmark
623 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
623 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
624 if b in explicit:
624 if b in explicit:
625 explicit.remove(b)
625 explicit.remove(b)
626 pushop.outbookmarks.append((b, dcid, scid))
626 pushop.outbookmarks.append((b, dcid, scid))
627 # search for bookmark to delete
627 # search for bookmark to delete
628 for b, scid, dcid in adddst:
628 for b, scid, dcid in adddst:
629 if b in explicit:
629 if b in explicit:
630 explicit.remove(b)
630 explicit.remove(b)
631 # treat as "deleted locally"
631 # treat as "deleted locally"
632 pushop.outbookmarks.append((b, dcid, ''))
632 pushop.outbookmarks.append((b, dcid, ''))
633 # identical bookmarks shouldn't get reported
633 # identical bookmarks shouldn't get reported
634 for b, scid, dcid in same:
634 for b, scid, dcid in same:
635 if b in explicit:
635 if b in explicit:
636 explicit.remove(b)
636 explicit.remove(b)
637
637
638 if explicit:
638 if explicit:
639 explicit = sorted(explicit)
639 explicit = sorted(explicit)
640 # we should probably list all of them
640 # we should probably list all of them
641 ui.warn(_('bookmark %s does not exist on the local '
641 ui.warn(_('bookmark %s does not exist on the local '
642 'or remote repository!\n') % explicit[0])
642 'or remote repository!\n') % explicit[0])
643 pushop.bkresult = 2
643 pushop.bkresult = 2
644
644
645 pushop.outbookmarks.sort()
645 pushop.outbookmarks.sort()
646
646
647 def _pushcheckoutgoing(pushop):
647 def _pushcheckoutgoing(pushop):
648 outgoing = pushop.outgoing
648 outgoing = pushop.outgoing
649 unfi = pushop.repo.unfiltered()
649 unfi = pushop.repo.unfiltered()
650 if not outgoing.missing:
650 if not outgoing.missing:
651 # nothing to push
651 # nothing to push
652 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
652 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
653 return False
653 return False
654 # something to push
654 # something to push
655 if not pushop.force:
655 if not pushop.force:
656 # if repo.obsstore == False --> no obsolete
656 # if repo.obsstore == False --> no obsolete
657 # then, save the iteration
657 # then, save the iteration
658 if unfi.obsstore:
658 if unfi.obsstore:
659 # this message are here for 80 char limit reason
659 # this message are here for 80 char limit reason
660 mso = _("push includes obsolete changeset: %s!")
660 mso = _("push includes obsolete changeset: %s!")
661 mspd = _("push includes phase-divergent changeset: %s!")
661 mspd = _("push includes phase-divergent changeset: %s!")
662 mscd = _("push includes content-divergent changeset: %s!")
662 mscd = _("push includes content-divergent changeset: %s!")
663 mst = {"orphan": _("push includes orphan changeset: %s!"),
663 mst = {"orphan": _("push includes orphan changeset: %s!"),
664 "phase-divergent": mspd,
664 "phase-divergent": mspd,
665 "content-divergent": mscd}
665 "content-divergent": mscd}
666 # If we are to push if there is at least one
666 # If we are to push if there is at least one
667 # obsolete or unstable changeset in missing, at
667 # obsolete or unstable changeset in missing, at
668 # least one of the missinghead will be obsolete or
668 # least one of the missinghead will be obsolete or
669 # unstable. So checking heads only is ok
669 # unstable. So checking heads only is ok
670 for node in outgoing.missingheads:
670 for node in outgoing.missingheads:
671 ctx = unfi[node]
671 ctx = unfi[node]
672 if ctx.obsolete():
672 if ctx.obsolete():
673 raise error.Abort(mso % ctx)
673 raise error.Abort(mso % ctx)
674 elif ctx.isunstable():
674 elif ctx.isunstable():
675 # TODO print more than one instability in the abort
675 # TODO print more than one instability in the abort
676 # message
676 # message
677 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
677 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
678
678
679 discovery.checkheads(pushop)
679 discovery.checkheads(pushop)
680 return True
680 return True
681
681
682 # List of names of steps to perform for an outgoing bundle2, order matters.
682 # List of names of steps to perform for an outgoing bundle2, order matters.
683 b2partsgenorder = []
683 b2partsgenorder = []
684
684
685 # Mapping between step name and function
685 # Mapping between step name and function
686 #
686 #
687 # This exists to help extensions wrap steps if necessary
687 # This exists to help extensions wrap steps if necessary
688 b2partsgenmapping = {}
688 b2partsgenmapping = {}
689
689
690 def b2partsgenerator(stepname, idx=None):
690 def b2partsgenerator(stepname, idx=None):
691 """decorator for function generating bundle2 part
691 """decorator for function generating bundle2 part
692
692
693 The function is added to the step -> function mapping and appended to the
693 The function is added to the step -> function mapping and appended to the
694 list of steps. Beware that decorated functions will be added in order
694 list of steps. Beware that decorated functions will be added in order
695 (this may matter).
695 (this may matter).
696
696
697 You can only use this decorator for new steps, if you want to wrap a step
697 You can only use this decorator for new steps, if you want to wrap a step
698 from an extension, attack the b2partsgenmapping dictionary directly."""
698 from an extension, attack the b2partsgenmapping dictionary directly."""
699 def dec(func):
699 def dec(func):
700 assert stepname not in b2partsgenmapping
700 assert stepname not in b2partsgenmapping
701 b2partsgenmapping[stepname] = func
701 b2partsgenmapping[stepname] = func
702 if idx is None:
702 if idx is None:
703 b2partsgenorder.append(stepname)
703 b2partsgenorder.append(stepname)
704 else:
704 else:
705 b2partsgenorder.insert(idx, stepname)
705 b2partsgenorder.insert(idx, stepname)
706 return func
706 return func
707 return dec
707 return dec
708
708
709 def _pushb2ctxcheckheads(pushop, bundler):
709 def _pushb2ctxcheckheads(pushop, bundler):
710 """Generate race condition checking parts
710 """Generate race condition checking parts
711
711
712 Exists as an independent function to aid extensions
712 Exists as an independent function to aid extensions
713 """
713 """
714 # * 'force' do not check for push race,
714 # * 'force' do not check for push race,
715 # * if we don't push anything, there are nothing to check.
715 # * if we don't push anything, there are nothing to check.
716 if not pushop.force and pushop.outgoing.missingheads:
716 if not pushop.force and pushop.outgoing.missingheads:
717 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
717 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
718 emptyremote = pushop.pushbranchmap is None
718 emptyremote = pushop.pushbranchmap is None
719 if not allowunrelated or emptyremote:
719 if not allowunrelated or emptyremote:
720 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
720 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
721 else:
721 else:
722 affected = set()
722 affected = set()
723 for branch, heads in pushop.pushbranchmap.iteritems():
723 for branch, heads in pushop.pushbranchmap.iteritems():
724 remoteheads, newheads, unsyncedheads, discardedheads = heads
724 remoteheads, newheads, unsyncedheads, discardedheads = heads
725 if remoteheads is not None:
725 if remoteheads is not None:
726 remote = set(remoteheads)
726 remote = set(remoteheads)
727 affected |= set(discardedheads) & remote
727 affected |= set(discardedheads) & remote
728 affected |= remote - set(newheads)
728 affected |= remote - set(newheads)
729 if affected:
729 if affected:
730 data = iter(sorted(affected))
730 data = iter(sorted(affected))
731 bundler.newpart('check:updated-heads', data=data)
731 bundler.newpart('check:updated-heads', data=data)
732
732
733 @b2partsgenerator('changeset')
733 @b2partsgenerator('changeset')
734 def _pushb2ctx(pushop, bundler):
734 def _pushb2ctx(pushop, bundler):
735 """handle changegroup push through bundle2
735 """handle changegroup push through bundle2
736
736
737 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
737 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
738 """
738 """
739 if 'changesets' in pushop.stepsdone:
739 if 'changesets' in pushop.stepsdone:
740 return
740 return
741 pushop.stepsdone.add('changesets')
741 pushop.stepsdone.add('changesets')
742 # Send known heads to the server for race detection.
742 # Send known heads to the server for race detection.
743 if not _pushcheckoutgoing(pushop):
743 if not _pushcheckoutgoing(pushop):
744 return
744 return
745 pushop.repo.prepushoutgoinghooks(pushop)
745 pushop.repo.prepushoutgoinghooks(pushop)
746
746
747 _pushb2ctxcheckheads(pushop, bundler)
747 _pushb2ctxcheckheads(pushop, bundler)
748
748
749 b2caps = bundle2.bundle2caps(pushop.remote)
749 b2caps = bundle2.bundle2caps(pushop.remote)
750 version = '01'
750 version = '01'
751 cgversions = b2caps.get('changegroup')
751 cgversions = b2caps.get('changegroup')
752 if cgversions: # 3.1 and 3.2 ship with an empty value
752 if cgversions: # 3.1 and 3.2 ship with an empty value
753 cgversions = [v for v in cgversions
753 cgversions = [v for v in cgversions
754 if v in changegroup.supportedoutgoingversions(
754 if v in changegroup.supportedoutgoingversions(
755 pushop.repo)]
755 pushop.repo)]
756 if not cgversions:
756 if not cgversions:
757 raise ValueError(_('no common changegroup version'))
757 raise ValueError(_('no common changegroup version'))
758 version = max(cgversions)
758 version = max(cgversions)
759 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
759 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
760 'push')
760 'push')
761 cgpart = bundler.newpart('changegroup', data=cgstream)
761 cgpart = bundler.newpart('changegroup', data=cgstream)
762 if cgversions:
762 if cgversions:
763 cgpart.addparam('version', version)
763 cgpart.addparam('version', version)
764 if 'treemanifest' in pushop.repo.requirements:
764 if 'treemanifest' in pushop.repo.requirements:
765 cgpart.addparam('treemanifest', '1')
765 cgpart.addparam('treemanifest', '1')
766 def handlereply(op):
766 def handlereply(op):
767 """extract addchangegroup returns from server reply"""
767 """extract addchangegroup returns from server reply"""
768 cgreplies = op.records.getreplies(cgpart.id)
768 cgreplies = op.records.getreplies(cgpart.id)
769 assert len(cgreplies['changegroup']) == 1
769 assert len(cgreplies['changegroup']) == 1
770 pushop.cgresult = cgreplies['changegroup'][0]['return']
770 pushop.cgresult = cgreplies['changegroup'][0]['return']
771 return handlereply
771 return handlereply
772
772
773 @b2partsgenerator('phase')
773 @b2partsgenerator('phase')
774 def _pushb2phases(pushop, bundler):
774 def _pushb2phases(pushop, bundler):
775 """handle phase push through bundle2"""
775 """handle phase push through bundle2"""
776 if 'phases' in pushop.stepsdone:
776 if 'phases' in pushop.stepsdone:
777 return
777 return
778 b2caps = bundle2.bundle2caps(pushop.remote)
778 b2caps = bundle2.bundle2caps(pushop.remote)
779 if not 'pushkey' in b2caps:
779 if not 'pushkey' in b2caps:
780 return
780 return
781 pushop.stepsdone.add('phases')
781 pushop.stepsdone.add('phases')
782 part2node = []
782 part2node = []
783
783
784 def handlefailure(pushop, exc):
784 def handlefailure(pushop, exc):
785 targetid = int(exc.partid)
785 targetid = int(exc.partid)
786 for partid, node in part2node:
786 for partid, node in part2node:
787 if partid == targetid:
787 if partid == targetid:
788 raise error.Abort(_('updating %s to public failed') % node)
788 raise error.Abort(_('updating %s to public failed') % node)
789
789
790 enc = pushkey.encode
790 enc = pushkey.encode
791 for newremotehead in pushop.outdatedphases:
791 for newremotehead in pushop.outdatedphases:
792 part = bundler.newpart('pushkey')
792 part = bundler.newpart('pushkey')
793 part.addparam('namespace', enc('phases'))
793 part.addparam('namespace', enc('phases'))
794 part.addparam('key', enc(newremotehead.hex()))
794 part.addparam('key', enc(newremotehead.hex()))
795 part.addparam('old', enc('%d' % phases.draft))
795 part.addparam('old', enc('%d' % phases.draft))
796 part.addparam('new', enc('%d' % phases.public))
796 part.addparam('new', enc('%d' % phases.public))
797 part2node.append((part.id, newremotehead))
797 part2node.append((part.id, newremotehead))
798 pushop.pkfailcb[part.id] = handlefailure
798 pushop.pkfailcb[part.id] = handlefailure
799
799
800 def handlereply(op):
800 def handlereply(op):
801 for partid, node in part2node:
801 for partid, node in part2node:
802 partrep = op.records.getreplies(partid)
802 partrep = op.records.getreplies(partid)
803 results = partrep['pushkey']
803 results = partrep['pushkey']
804 assert len(results) <= 1
804 assert len(results) <= 1
805 msg = None
805 msg = None
806 if not results:
806 if not results:
807 msg = _('server ignored update of %s to public!\n') % node
807 msg = _('server ignored update of %s to public!\n') % node
808 elif not int(results[0]['return']):
808 elif not int(results[0]['return']):
809 msg = _('updating %s to public failed!\n') % node
809 msg = _('updating %s to public failed!\n') % node
810 if msg is not None:
810 if msg is not None:
811 pushop.ui.warn(msg)
811 pushop.ui.warn(msg)
812 return handlereply
812 return handlereply
813
813
814 @b2partsgenerator('obsmarkers')
814 @b2partsgenerator('obsmarkers')
815 def _pushb2obsmarkers(pushop, bundler):
815 def _pushb2obsmarkers(pushop, bundler):
816 if 'obsmarkers' in pushop.stepsdone:
816 if 'obsmarkers' in pushop.stepsdone:
817 return
817 return
818 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
818 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
819 if obsolete.commonversion(remoteversions) is None:
819 if obsolete.commonversion(remoteversions) is None:
820 return
820 return
821 pushop.stepsdone.add('obsmarkers')
821 pushop.stepsdone.add('obsmarkers')
822 if pushop.outobsmarkers:
822 if pushop.outobsmarkers:
823 markers = sorted(pushop.outobsmarkers)
823 markers = sorted(pushop.outobsmarkers)
824 bundle2.buildobsmarkerspart(bundler, markers)
824 bundle2.buildobsmarkerspart(bundler, markers)
825
825
826 @b2partsgenerator('bookmarks')
826 @b2partsgenerator('bookmarks')
827 def _pushb2bookmarks(pushop, bundler):
827 def _pushb2bookmarks(pushop, bundler):
828 """handle bookmark push through bundle2"""
828 """handle bookmark push through bundle2"""
829 if 'bookmarks' in pushop.stepsdone:
829 if 'bookmarks' in pushop.stepsdone:
830 return
830 return
831 b2caps = bundle2.bundle2caps(pushop.remote)
831 b2caps = bundle2.bundle2caps(pushop.remote)
832 if 'pushkey' not in b2caps:
832 if 'pushkey' not in b2caps:
833 return
833 return
834 pushop.stepsdone.add('bookmarks')
834 pushop.stepsdone.add('bookmarks')
835 part2book = []
835 part2book = []
836 enc = pushkey.encode
836 enc = pushkey.encode
837
837
838 def handlefailure(pushop, exc):
838 def handlefailure(pushop, exc):
839 targetid = int(exc.partid)
839 targetid = int(exc.partid)
840 for partid, book, action in part2book:
840 for partid, book, action in part2book:
841 if partid == targetid:
841 if partid == targetid:
842 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
842 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
843 # we should not be called for part we did not generated
843 # we should not be called for part we did not generated
844 assert False
844 assert False
845
845
846 for book, old, new in pushop.outbookmarks:
846 for book, old, new in pushop.outbookmarks:
847 part = bundler.newpart('pushkey')
847 part = bundler.newpart('pushkey')
848 part.addparam('namespace', enc('bookmarks'))
848 part.addparam('namespace', enc('bookmarks'))
849 part.addparam('key', enc(book))
849 part.addparam('key', enc(book))
850 part.addparam('old', enc(old))
850 part.addparam('old', enc(old))
851 part.addparam('new', enc(new))
851 part.addparam('new', enc(new))
852 action = 'update'
852 action = 'update'
853 if not old:
853 if not old:
854 action = 'export'
854 action = 'export'
855 elif not new:
855 elif not new:
856 action = 'delete'
856 action = 'delete'
857 part2book.append((part.id, book, action))
857 part2book.append((part.id, book, action))
858 pushop.pkfailcb[part.id] = handlefailure
858 pushop.pkfailcb[part.id] = handlefailure
859
859
860 def handlereply(op):
860 def handlereply(op):
861 ui = pushop.ui
861 ui = pushop.ui
862 for partid, book, action in part2book:
862 for partid, book, action in part2book:
863 partrep = op.records.getreplies(partid)
863 partrep = op.records.getreplies(partid)
864 results = partrep['pushkey']
864 results = partrep['pushkey']
865 assert len(results) <= 1
865 assert len(results) <= 1
866 if not results:
866 if not results:
867 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
867 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
868 else:
868 else:
869 ret = int(results[0]['return'])
869 ret = int(results[0]['return'])
870 if ret:
870 if ret:
871 ui.status(bookmsgmap[action][0] % book)
871 ui.status(bookmsgmap[action][0] % book)
872 else:
872 else:
873 ui.warn(bookmsgmap[action][1] % book)
873 ui.warn(bookmsgmap[action][1] % book)
874 if pushop.bkresult is not None:
874 if pushop.bkresult is not None:
875 pushop.bkresult = 1
875 pushop.bkresult = 1
876 return handlereply
876 return handlereply
877
877
878 @b2partsgenerator('pushvars', idx=0)
878 @b2partsgenerator('pushvars', idx=0)
879 def _getbundlesendvars(pushop, bundler):
879 def _getbundlesendvars(pushop, bundler):
880 '''send shellvars via bundle2'''
880 '''send shellvars via bundle2'''
881 pushvars = pushop.pushvars
881 pushvars = pushop.pushvars
882 if pushvars:
882 if pushvars:
883 shellvars = {}
883 shellvars = {}
884 for raw in pushvars:
884 for raw in pushvars:
885 if '=' not in raw:
885 if '=' not in raw:
886 msg = ("unable to parse variable '%s', should follow "
886 msg = ("unable to parse variable '%s', should follow "
887 "'KEY=VALUE' or 'KEY=' format")
887 "'KEY=VALUE' or 'KEY=' format")
888 raise error.Abort(msg % raw)
888 raise error.Abort(msg % raw)
889 k, v = raw.split('=', 1)
889 k, v = raw.split('=', 1)
890 shellvars[k] = v
890 shellvars[k] = v
891
891
892 part = bundler.newpart('pushvars')
892 part = bundler.newpart('pushvars')
893
893
894 for key, value in shellvars.iteritems():
894 for key, value in shellvars.iteritems():
895 part.addparam(key, value, mandatory=False)
895 part.addparam(key, value, mandatory=False)
896
896
897 def _pushbundle2(pushop):
897 def _pushbundle2(pushop):
898 """push data to the remote using bundle2
898 """push data to the remote using bundle2
899
899
900 The only currently supported type of data is changegroup but this will
900 The only currently supported type of data is changegroup but this will
901 evolve in the future."""
901 evolve in the future."""
902 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
902 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
903 pushback = (pushop.trmanager
903 pushback = (pushop.trmanager
904 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
904 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
905
905
906 # create reply capability
906 # create reply capability
907 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
907 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
908 allowpushback=pushback))
908 allowpushback=pushback))
909 bundler.newpart('replycaps', data=capsblob)
909 bundler.newpart('replycaps', data=capsblob)
910 replyhandlers = []
910 replyhandlers = []
911 for partgenname in b2partsgenorder:
911 for partgenname in b2partsgenorder:
912 partgen = b2partsgenmapping[partgenname]
912 partgen = b2partsgenmapping[partgenname]
913 ret = partgen(pushop, bundler)
913 ret = partgen(pushop, bundler)
914 if callable(ret):
914 if callable(ret):
915 replyhandlers.append(ret)
915 replyhandlers.append(ret)
916 # do not push if nothing to push
916 # do not push if nothing to push
917 if bundler.nbparts <= 1:
917 if bundler.nbparts <= 1:
918 return
918 return
919 stream = util.chunkbuffer(bundler.getchunks())
919 stream = util.chunkbuffer(bundler.getchunks())
920 try:
920 try:
921 try:
921 try:
922 reply = pushop.remote.unbundle(
922 reply = pushop.remote.unbundle(
923 stream, ['force'], pushop.remote.url())
923 stream, ['force'], pushop.remote.url())
924 except error.BundleValueError as exc:
924 except error.BundleValueError as exc:
925 raise error.Abort(_('missing support for %s') % exc)
925 raise error.Abort(_('missing support for %s') % exc)
926 try:
926 try:
927 trgetter = None
927 trgetter = None
928 if pushback:
928 if pushback:
929 trgetter = pushop.trmanager.transaction
929 trgetter = pushop.trmanager.transaction
930 op = bundle2.processbundle(pushop.repo, reply, trgetter)
930 op = bundle2.processbundle(pushop.repo, reply, trgetter)
931 except error.BundleValueError as exc:
931 except error.BundleValueError as exc:
932 raise error.Abort(_('missing support for %s') % exc)
932 raise error.Abort(_('missing support for %s') % exc)
933 except bundle2.AbortFromPart as exc:
933 except bundle2.AbortFromPart as exc:
934 pushop.ui.status(_('remote: %s\n') % exc)
934 pushop.ui.status(_('remote: %s\n') % exc)
935 if exc.hint is not None:
935 if exc.hint is not None:
936 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
936 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
937 raise error.Abort(_('push failed on remote'))
937 raise error.Abort(_('push failed on remote'))
938 except error.PushkeyFailed as exc:
938 except error.PushkeyFailed as exc:
939 partid = int(exc.partid)
939 partid = int(exc.partid)
940 if partid not in pushop.pkfailcb:
940 if partid not in pushop.pkfailcb:
941 raise
941 raise
942 pushop.pkfailcb[partid](pushop, exc)
942 pushop.pkfailcb[partid](pushop, exc)
943 for rephand in replyhandlers:
943 for rephand in replyhandlers:
944 rephand(op)
944 rephand(op)
945
945
946 def _pushchangeset(pushop):
946 def _pushchangeset(pushop):
947 """Make the actual push of changeset bundle to remote repo"""
947 """Make the actual push of changeset bundle to remote repo"""
948 if 'changesets' in pushop.stepsdone:
948 if 'changesets' in pushop.stepsdone:
949 return
949 return
950 pushop.stepsdone.add('changesets')
950 pushop.stepsdone.add('changesets')
951 if not _pushcheckoutgoing(pushop):
951 if not _pushcheckoutgoing(pushop):
952 return
952 return
953
953
954 # Should have verified this in push().
954 # Should have verified this in push().
955 assert pushop.remote.capable('unbundle')
955 assert pushop.remote.capable('unbundle')
956
956
957 pushop.repo.prepushoutgoinghooks(pushop)
957 pushop.repo.prepushoutgoinghooks(pushop)
958 outgoing = pushop.outgoing
958 outgoing = pushop.outgoing
959 # TODO: get bundlecaps from remote
959 # TODO: get bundlecaps from remote
960 bundlecaps = None
960 bundlecaps = None
961 # create a changegroup from local
961 # create a changegroup from local
962 if pushop.revs is None and not (outgoing.excluded
962 if pushop.revs is None and not (outgoing.excluded
963 or pushop.repo.changelog.filteredrevs):
963 or pushop.repo.changelog.filteredrevs):
964 # push everything,
964 # push everything,
965 # use the fast path, no race possible on push
965 # use the fast path, no race possible on push
966 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
966 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
967 fastpath=True, bundlecaps=bundlecaps)
967 fastpath=True, bundlecaps=bundlecaps)
968 else:
968 else:
969 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
969 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
970 'push', bundlecaps=bundlecaps)
970 'push', bundlecaps=bundlecaps)
971
971
972 # apply changegroup to remote
972 # apply changegroup to remote
973 # local repo finds heads on server, finds out what
973 # local repo finds heads on server, finds out what
974 # revs it must push. once revs transferred, if server
974 # revs it must push. once revs transferred, if server
975 # finds it has different heads (someone else won
975 # finds it has different heads (someone else won
976 # commit/push race), server aborts.
976 # commit/push race), server aborts.
977 if pushop.force:
977 if pushop.force:
978 remoteheads = ['force']
978 remoteheads = ['force']
979 else:
979 else:
980 remoteheads = pushop.remoteheads
980 remoteheads = pushop.remoteheads
981 # ssh: return remote's addchangegroup()
981 # ssh: return remote's addchangegroup()
982 # http: return remote's addchangegroup() or 0 for error
982 # http: return remote's addchangegroup() or 0 for error
983 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
983 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
984 pushop.repo.url())
984 pushop.repo.url())
985
985
986 def _pushsyncphase(pushop):
986 def _pushsyncphase(pushop):
987 """synchronise phase information locally and remotely"""
987 """synchronise phase information locally and remotely"""
988 cheads = pushop.commonheads
988 cheads = pushop.commonheads
989 # even when we don't push, exchanging phase data is useful
989 # even when we don't push, exchanging phase data is useful
990 remotephases = pushop.remote.listkeys('phases')
990 remotephases = pushop.remote.listkeys('phases')
991 if (pushop.ui.configbool('ui', '_usedassubrepo')
991 if (pushop.ui.configbool('ui', '_usedassubrepo')
992 and remotephases # server supports phases
992 and remotephases # server supports phases
993 and pushop.cgresult is None # nothing was pushed
993 and pushop.cgresult is None # nothing was pushed
994 and remotephases.get('publishing', False)):
994 and remotephases.get('publishing', False)):
995 # When:
995 # When:
996 # - this is a subrepo push
996 # - this is a subrepo push
997 # - and remote support phase
997 # - and remote support phase
998 # - and no changeset was pushed
998 # - and no changeset was pushed
999 # - and remote is publishing
999 # - and remote is publishing
1000 # We may be in issue 3871 case!
1000 # We may be in issue 3871 case!
1001 # We drop the possible phase synchronisation done by
1001 # We drop the possible phase synchronisation done by
1002 # courtesy to publish changesets possibly locally draft
1002 # courtesy to publish changesets possibly locally draft
1003 # on the remote.
1003 # on the remote.
1004 remotephases = {'publishing': 'True'}
1004 remotephases = {'publishing': 'True'}
1005 if not remotephases: # old server or public only reply from non-publishing
1005 if not remotephases: # old server or public only reply from non-publishing
1006 _localphasemove(pushop, cheads)
1006 _localphasemove(pushop, cheads)
1007 # don't push any phase data as there is nothing to push
1007 # don't push any phase data as there is nothing to push
1008 else:
1008 else:
1009 ana = phases.analyzeremotephases(pushop.repo, cheads,
1009 ana = phases.analyzeremotephases(pushop.repo, cheads,
1010 remotephases)
1010 remotephases)
1011 pheads, droots = ana
1011 pheads, droots = ana
1012 ### Apply remote phase on local
1012 ### Apply remote phase on local
1013 if remotephases.get('publishing', False):
1013 if remotephases.get('publishing', False):
1014 _localphasemove(pushop, cheads)
1014 _localphasemove(pushop, cheads)
1015 else: # publish = False
1015 else: # publish = False
1016 _localphasemove(pushop, pheads)
1016 _localphasemove(pushop, pheads)
1017 _localphasemove(pushop, cheads, phases.draft)
1017 _localphasemove(pushop, cheads, phases.draft)
1018 ### Apply local phase on remote
1018 ### Apply local phase on remote
1019
1019
1020 if pushop.cgresult:
1020 if pushop.cgresult:
1021 if 'phases' in pushop.stepsdone:
1021 if 'phases' in pushop.stepsdone:
1022 # phases already pushed though bundle2
1022 # phases already pushed though bundle2
1023 return
1023 return
1024 outdated = pushop.outdatedphases
1024 outdated = pushop.outdatedphases
1025 else:
1025 else:
1026 outdated = pushop.fallbackoutdatedphases
1026 outdated = pushop.fallbackoutdatedphases
1027
1027
1028 pushop.stepsdone.add('phases')
1028 pushop.stepsdone.add('phases')
1029
1029
1030 # filter heads already turned public by the push
1030 # filter heads already turned public by the push
1031 outdated = [c for c in outdated if c.node() not in pheads]
1031 outdated = [c for c in outdated if c.node() not in pheads]
1032 # fallback to independent pushkey command
1032 # fallback to independent pushkey command
1033 for newremotehead in outdated:
1033 for newremotehead in outdated:
1034 r = pushop.remote.pushkey('phases',
1034 r = pushop.remote.pushkey('phases',
1035 newremotehead.hex(),
1035 newremotehead.hex(),
1036 str(phases.draft),
1036 str(phases.draft),
1037 str(phases.public))
1037 str(phases.public))
1038 if not r:
1038 if not r:
1039 pushop.ui.warn(_('updating %s to public failed!\n')
1039 pushop.ui.warn(_('updating %s to public failed!\n')
1040 % newremotehead)
1040 % newremotehead)
1041
1041
1042 def _localphasemove(pushop, nodes, phase=phases.public):
1042 def _localphasemove(pushop, nodes, phase=phases.public):
1043 """move <nodes> to <phase> in the local source repo"""
1043 """move <nodes> to <phase> in the local source repo"""
1044 if pushop.trmanager:
1044 if pushop.trmanager:
1045 phases.advanceboundary(pushop.repo,
1045 phases.advanceboundary(pushop.repo,
1046 pushop.trmanager.transaction(),
1046 pushop.trmanager.transaction(),
1047 phase,
1047 phase,
1048 nodes)
1048 nodes)
1049 else:
1049 else:
1050 # repo is not locked, do not change any phases!
1050 # repo is not locked, do not change any phases!
1051 # Informs the user that phases should have been moved when
1051 # Informs the user that phases should have been moved when
1052 # applicable.
1052 # applicable.
1053 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1053 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1054 phasestr = phases.phasenames[phase]
1054 phasestr = phases.phasenames[phase]
1055 if actualmoves:
1055 if actualmoves:
1056 pushop.ui.status(_('cannot lock source repo, skipping '
1056 pushop.ui.status(_('cannot lock source repo, skipping '
1057 'local %s phase update\n') % phasestr)
1057 'local %s phase update\n') % phasestr)
1058
1058
1059 def _pushobsolete(pushop):
1059 def _pushobsolete(pushop):
1060 """utility function to push obsolete markers to a remote"""
1060 """utility function to push obsolete markers to a remote"""
1061 if 'obsmarkers' in pushop.stepsdone:
1061 if 'obsmarkers' in pushop.stepsdone:
1062 return
1062 return
1063 repo = pushop.repo
1063 repo = pushop.repo
1064 remote = pushop.remote
1064 remote = pushop.remote
1065 pushop.stepsdone.add('obsmarkers')
1065 pushop.stepsdone.add('obsmarkers')
1066 if pushop.outobsmarkers:
1066 if pushop.outobsmarkers:
1067 pushop.ui.debug('try to push obsolete markers to remote\n')
1067 pushop.ui.debug('try to push obsolete markers to remote\n')
1068 rslts = []
1068 rslts = []
1069 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1069 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1070 for key in sorted(remotedata, reverse=True):
1070 for key in sorted(remotedata, reverse=True):
1071 # reverse sort to ensure we end with dump0
1071 # reverse sort to ensure we end with dump0
1072 data = remotedata[key]
1072 data = remotedata[key]
1073 rslts.append(remote.pushkey('obsolete', key, '', data))
1073 rslts.append(remote.pushkey('obsolete', key, '', data))
1074 if [r for r in rslts if not r]:
1074 if [r for r in rslts if not r]:
1075 msg = _('failed to push some obsolete markers!\n')
1075 msg = _('failed to push some obsolete markers!\n')
1076 repo.ui.warn(msg)
1076 repo.ui.warn(msg)
1077
1077
1078 def _pushbookmark(pushop):
1078 def _pushbookmark(pushop):
1079 """Update bookmark position on remote"""
1079 """Update bookmark position on remote"""
1080 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1080 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1081 return
1081 return
1082 pushop.stepsdone.add('bookmarks')
1082 pushop.stepsdone.add('bookmarks')
1083 ui = pushop.ui
1083 ui = pushop.ui
1084 remote = pushop.remote
1084 remote = pushop.remote
1085
1085
1086 for b, old, new in pushop.outbookmarks:
1086 for b, old, new in pushop.outbookmarks:
1087 action = 'update'
1087 action = 'update'
1088 if not old:
1088 if not old:
1089 action = 'export'
1089 action = 'export'
1090 elif not new:
1090 elif not new:
1091 action = 'delete'
1091 action = 'delete'
1092 if remote.pushkey('bookmarks', b, old, new):
1092 if remote.pushkey('bookmarks', b, old, new):
1093 ui.status(bookmsgmap[action][0] % b)
1093 ui.status(bookmsgmap[action][0] % b)
1094 else:
1094 else:
1095 ui.warn(bookmsgmap[action][1] % b)
1095 ui.warn(bookmsgmap[action][1] % b)
1096 # discovery can have set the value form invalid entry
1096 # discovery can have set the value form invalid entry
1097 if pushop.bkresult is not None:
1097 if pushop.bkresult is not None:
1098 pushop.bkresult = 1
1098 pushop.bkresult = 1
1099
1099
1100 class pulloperation(object):
1100 class pulloperation(object):
1101 """A object that represent a single pull operation
1101 """A object that represent a single pull operation
1102
1102
1103 It purpose is to carry pull related state and very common operation.
1103 It purpose is to carry pull related state and very common operation.
1104
1104
1105 A new should be created at the beginning of each pull and discarded
1105 A new should be created at the beginning of each pull and discarded
1106 afterward.
1106 afterward.
1107 """
1107 """
1108
1108
1109 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1109 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1110 remotebookmarks=None, streamclonerequested=None):
1110 remotebookmarks=None, streamclonerequested=None):
1111 # repo we pull into
1111 # repo we pull into
1112 self.repo = repo
1112 self.repo = repo
1113 # repo we pull from
1113 # repo we pull from
1114 self.remote = remote
1114 self.remote = remote
1115 # revision we try to pull (None is "all")
1115 # revision we try to pull (None is "all")
1116 self.heads = heads
1116 self.heads = heads
1117 # bookmark pulled explicitly
1117 # bookmark pulled explicitly
1118 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1118 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1119 for bookmark in bookmarks]
1119 for bookmark in bookmarks]
1120 # do we force pull?
1120 # do we force pull?
1121 self.force = force
1121 self.force = force
1122 # whether a streaming clone was requested
1122 # whether a streaming clone was requested
1123 self.streamclonerequested = streamclonerequested
1123 self.streamclonerequested = streamclonerequested
1124 # transaction manager
1124 # transaction manager
1125 self.trmanager = None
1125 self.trmanager = None
1126 # set of common changeset between local and remote before pull
1126 # set of common changeset between local and remote before pull
1127 self.common = None
1127 self.common = None
1128 # set of pulled head
1128 # set of pulled head
1129 self.rheads = None
1129 self.rheads = None
1130 # list of missing changeset to fetch remotely
1130 # list of missing changeset to fetch remotely
1131 self.fetch = None
1131 self.fetch = None
1132 # remote bookmarks data
1132 # remote bookmarks data
1133 self.remotebookmarks = remotebookmarks
1133 self.remotebookmarks = remotebookmarks
1134 # result of changegroup pulling (used as return code by pull)
1134 # result of changegroup pulling (used as return code by pull)
1135 self.cgresult = None
1135 self.cgresult = None
1136 # list of step already done
1136 # list of step already done
1137 self.stepsdone = set()
1137 self.stepsdone = set()
1138 # Whether we attempted a clone from pre-generated bundles.
1138 # Whether we attempted a clone from pre-generated bundles.
1139 self.clonebundleattempted = False
1139 self.clonebundleattempted = False
1140
1140
1141 @util.propertycache
1141 @util.propertycache
1142 def pulledsubset(self):
1142 def pulledsubset(self):
1143 """heads of the set of changeset target by the pull"""
1143 """heads of the set of changeset target by the pull"""
1144 # compute target subset
1144 # compute target subset
1145 if self.heads is None:
1145 if self.heads is None:
1146 # We pulled every thing possible
1146 # We pulled every thing possible
1147 # sync on everything common
1147 # sync on everything common
1148 c = set(self.common)
1148 c = set(self.common)
1149 ret = list(self.common)
1149 ret = list(self.common)
1150 for n in self.rheads:
1150 for n in self.rheads:
1151 if n not in c:
1151 if n not in c:
1152 ret.append(n)
1152 ret.append(n)
1153 return ret
1153 return ret
1154 else:
1154 else:
1155 # We pulled a specific subset
1155 # We pulled a specific subset
1156 # sync on this subset
1156 # sync on this subset
1157 return self.heads
1157 return self.heads
1158
1158
1159 @util.propertycache
1159 @util.propertycache
1160 def canusebundle2(self):
1160 def canusebundle2(self):
1161 return not _forcebundle1(self)
1161 return not _forcebundle1(self)
1162
1162
1163 @util.propertycache
1163 @util.propertycache
1164 def remotebundle2caps(self):
1164 def remotebundle2caps(self):
1165 return bundle2.bundle2caps(self.remote)
1165 return bundle2.bundle2caps(self.remote)
1166
1166
1167 def gettransaction(self):
1167 def gettransaction(self):
1168 # deprecated; talk to trmanager directly
1168 # deprecated; talk to trmanager directly
1169 return self.trmanager.transaction()
1169 return self.trmanager.transaction()
1170
1170
1171 class transactionmanager(util.transactional):
1171 class transactionmanager(util.transactional):
1172 """An object to manage the life cycle of a transaction
1172 """An object to manage the life cycle of a transaction
1173
1173
1174 It creates the transaction on demand and calls the appropriate hooks when
1174 It creates the transaction on demand and calls the appropriate hooks when
1175 closing the transaction."""
1175 closing the transaction."""
1176 def __init__(self, repo, source, url):
1176 def __init__(self, repo, source, url):
1177 self.repo = repo
1177 self.repo = repo
1178 self.source = source
1178 self.source = source
1179 self.url = url
1179 self.url = url
1180 self._tr = None
1180 self._tr = None
1181
1181
1182 def transaction(self):
1182 def transaction(self):
1183 """Return an open transaction object, constructing if necessary"""
1183 """Return an open transaction object, constructing if necessary"""
1184 if not self._tr:
1184 if not self._tr:
1185 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1185 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1186 self._tr = self.repo.transaction(trname)
1186 self._tr = self.repo.transaction(trname)
1187 self._tr.hookargs['source'] = self.source
1187 self._tr.hookargs['source'] = self.source
1188 self._tr.hookargs['url'] = self.url
1188 self._tr.hookargs['url'] = self.url
1189 return self._tr
1189 return self._tr
1190
1190
1191 def close(self):
1191 def close(self):
1192 """close transaction if created"""
1192 """close transaction if created"""
1193 if self._tr is not None:
1193 if self._tr is not None:
1194 self._tr.close()
1194 self._tr.close()
1195
1195
1196 def release(self):
1196 def release(self):
1197 """release transaction if created"""
1197 """release transaction if created"""
1198 if self._tr is not None:
1198 if self._tr is not None:
1199 self._tr.release()
1199 self._tr.release()
1200
1200
1201 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1201 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1202 streamclonerequested=None):
1202 streamclonerequested=None):
1203 """Fetch repository data from a remote.
1203 """Fetch repository data from a remote.
1204
1204
1205 This is the main function used to retrieve data from a remote repository.
1205 This is the main function used to retrieve data from a remote repository.
1206
1206
1207 ``repo`` is the local repository to clone into.
1207 ``repo`` is the local repository to clone into.
1208 ``remote`` is a peer instance.
1208 ``remote`` is a peer instance.
1209 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1209 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1210 default) means to pull everything from the remote.
1210 default) means to pull everything from the remote.
1211 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1211 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1212 default, all remote bookmarks are pulled.
1212 default, all remote bookmarks are pulled.
1213 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1213 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1214 initialization.
1214 initialization.
1215 ``streamclonerequested`` is a boolean indicating whether a "streaming
1215 ``streamclonerequested`` is a boolean indicating whether a "streaming
1216 clone" is requested. A "streaming clone" is essentially a raw file copy
1216 clone" is requested. A "streaming clone" is essentially a raw file copy
1217 of revlogs from the server. This only works when the local repository is
1217 of revlogs from the server. This only works when the local repository is
1218 empty. The default value of ``None`` means to respect the server
1218 empty. The default value of ``None`` means to respect the server
1219 configuration for preferring stream clones.
1219 configuration for preferring stream clones.
1220
1220
1221 Returns the ``pulloperation`` created for this pull.
1221 Returns the ``pulloperation`` created for this pull.
1222 """
1222 """
1223 if opargs is None:
1223 if opargs is None:
1224 opargs = {}
1224 opargs = {}
1225 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1225 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1226 streamclonerequested=streamclonerequested, **opargs)
1226 streamclonerequested=streamclonerequested, **opargs)
1227
1227
1228 peerlocal = pullop.remote.local()
1228 peerlocal = pullop.remote.local()
1229 if peerlocal:
1229 if peerlocal:
1230 missing = set(peerlocal.requirements) - pullop.repo.supported
1230 missing = set(peerlocal.requirements) - pullop.repo.supported
1231 if missing:
1231 if missing:
1232 msg = _("required features are not"
1232 msg = _("required features are not"
1233 " supported in the destination:"
1233 " supported in the destination:"
1234 " %s") % (', '.join(sorted(missing)))
1234 " %s") % (', '.join(sorted(missing)))
1235 raise error.Abort(msg)
1235 raise error.Abort(msg)
1236
1236
1237 wlock = lock = None
1237 wlock = lock = None
1238 try:
1238 try:
1239 wlock = pullop.repo.wlock()
1239 wlock = pullop.repo.wlock()
1240 lock = pullop.repo.lock()
1240 lock = pullop.repo.lock()
1241 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1241 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1242 # This should ideally be in _pullbundle2(). However, it needs to run
1242 # This should ideally be in _pullbundle2(). However, it needs to run
1243 # before discovery to avoid extra work.
1243 # before discovery to avoid extra work.
1244 _maybeapplyclonebundle(pullop)
1244 _maybeapplyclonebundle(pullop)
1245 streamclone.maybeperformlegacystreamclone(pullop)
1245 streamclone.maybeperformlegacystreamclone(pullop)
1246 _pulldiscovery(pullop)
1246 _pulldiscovery(pullop)
1247 if pullop.canusebundle2:
1247 if pullop.canusebundle2:
1248 _pullbundle2(pullop)
1248 _pullbundle2(pullop)
1249 _pullchangeset(pullop)
1249 _pullchangeset(pullop)
1250 _pullphase(pullop)
1250 _pullphase(pullop)
1251 _pullbookmarks(pullop)
1251 _pullbookmarks(pullop)
1252 _pullobsolete(pullop)
1252 _pullobsolete(pullop)
1253 pullop.trmanager.close()
1253 pullop.trmanager.close()
1254 finally:
1254 finally:
1255 lockmod.release(pullop.trmanager, lock, wlock)
1255 lockmod.release(pullop.trmanager, lock, wlock)
1256
1256
1257 return pullop
1257 return pullop
1258
1258
1259 # list of steps to perform discovery before pull
1259 # list of steps to perform discovery before pull
1260 pulldiscoveryorder = []
1260 pulldiscoveryorder = []
1261
1261
1262 # Mapping between step name and function
1262 # Mapping between step name and function
1263 #
1263 #
1264 # This exists to help extensions wrap steps if necessary
1264 # This exists to help extensions wrap steps if necessary
1265 pulldiscoverymapping = {}
1265 pulldiscoverymapping = {}
1266
1266
1267 def pulldiscovery(stepname):
1267 def pulldiscovery(stepname):
1268 """decorator for function performing discovery before pull
1268 """decorator for function performing discovery before pull
1269
1269
1270 The function is added to the step -> function mapping and appended to the
1270 The function is added to the step -> function mapping and appended to the
1271 list of steps. Beware that decorated function will be added in order (this
1271 list of steps. Beware that decorated function will be added in order (this
1272 may matter).
1272 may matter).
1273
1273
1274 You can only use this decorator for a new step, if you want to wrap a step
1274 You can only use this decorator for a new step, if you want to wrap a step
1275 from an extension, change the pulldiscovery dictionary directly."""
1275 from an extension, change the pulldiscovery dictionary directly."""
1276 def dec(func):
1276 def dec(func):
1277 assert stepname not in pulldiscoverymapping
1277 assert stepname not in pulldiscoverymapping
1278 pulldiscoverymapping[stepname] = func
1278 pulldiscoverymapping[stepname] = func
1279 pulldiscoveryorder.append(stepname)
1279 pulldiscoveryorder.append(stepname)
1280 return func
1280 return func
1281 return dec
1281 return dec
1282
1282
1283 def _pulldiscovery(pullop):
1283 def _pulldiscovery(pullop):
1284 """Run all discovery steps"""
1284 """Run all discovery steps"""
1285 for stepname in pulldiscoveryorder:
1285 for stepname in pulldiscoveryorder:
1286 step = pulldiscoverymapping[stepname]
1286 step = pulldiscoverymapping[stepname]
1287 step(pullop)
1287 step(pullop)
1288
1288
1289 @pulldiscovery('b1:bookmarks')
1289 @pulldiscovery('b1:bookmarks')
1290 def _pullbookmarkbundle1(pullop):
1290 def _pullbookmarkbundle1(pullop):
1291 """fetch bookmark data in bundle1 case
1291 """fetch bookmark data in bundle1 case
1292
1292
1293 If not using bundle2, we have to fetch bookmarks before changeset
1293 If not using bundle2, we have to fetch bookmarks before changeset
1294 discovery to reduce the chance and impact of race conditions."""
1294 discovery to reduce the chance and impact of race conditions."""
1295 if pullop.remotebookmarks is not None:
1295 if pullop.remotebookmarks is not None:
1296 return
1296 return
1297 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1297 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1298 # all known bundle2 servers now support listkeys, but lets be nice with
1298 # all known bundle2 servers now support listkeys, but lets be nice with
1299 # new implementation.
1299 # new implementation.
1300 return
1300 return
1301 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1301 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1302
1302
1303
1303
1304 @pulldiscovery('changegroup')
1304 @pulldiscovery('changegroup')
1305 def _pulldiscoverychangegroup(pullop):
1305 def _pulldiscoverychangegroup(pullop):
1306 """discovery phase for the pull
1306 """discovery phase for the pull
1307
1307
1308 Current handle changeset discovery only, will change handle all discovery
1308 Current handle changeset discovery only, will change handle all discovery
1309 at some point."""
1309 at some point."""
1310 tmp = discovery.findcommonincoming(pullop.repo,
1310 tmp = discovery.findcommonincoming(pullop.repo,
1311 pullop.remote,
1311 pullop.remote,
1312 heads=pullop.heads,
1312 heads=pullop.heads,
1313 force=pullop.force)
1313 force=pullop.force)
1314 common, fetch, rheads = tmp
1314 common, fetch, rheads = tmp
1315 nm = pullop.repo.unfiltered().changelog.nodemap
1315 nm = pullop.repo.unfiltered().changelog.nodemap
1316 if fetch and rheads:
1316 if fetch and rheads:
1317 # If a remote heads is filtered locally, put in back in common.
1317 # If a remote heads is filtered locally, put in back in common.
1318 #
1318 #
1319 # This is a hackish solution to catch most of "common but locally
1319 # This is a hackish solution to catch most of "common but locally
1320 # hidden situation". We do not performs discovery on unfiltered
1320 # hidden situation". We do not performs discovery on unfiltered
1321 # repository because it end up doing a pathological amount of round
1321 # repository because it end up doing a pathological amount of round
1322 # trip for w huge amount of changeset we do not care about.
1322 # trip for w huge amount of changeset we do not care about.
1323 #
1323 #
1324 # If a set of such "common but filtered" changeset exist on the server
1324 # If a set of such "common but filtered" changeset exist on the server
1325 # but are not including a remote heads, we'll not be able to detect it,
1325 # but are not including a remote heads, we'll not be able to detect it,
1326 scommon = set(common)
1326 scommon = set(common)
1327 for n in rheads:
1327 for n in rheads:
1328 if n in nm:
1328 if n in nm:
1329 if n not in scommon:
1329 if n not in scommon:
1330 common.append(n)
1330 common.append(n)
1331 if set(rheads).issubset(set(common)):
1331 if set(rheads).issubset(set(common)):
1332 fetch = []
1332 fetch = []
1333 pullop.common = common
1333 pullop.common = common
1334 pullop.fetch = fetch
1334 pullop.fetch = fetch
1335 pullop.rheads = rheads
1335 pullop.rheads = rheads
1336
1336
1337 def _pullbundle2(pullop):
1337 def _pullbundle2(pullop):
1338 """pull data using bundle2
1338 """pull data using bundle2
1339
1339
1340 For now, the only supported data are changegroup."""
1340 For now, the only supported data are changegroup."""
1341 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1341 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1342
1342
1343 # At the moment we don't do stream clones over bundle2. If that is
1343 # At the moment we don't do stream clones over bundle2. If that is
1344 # implemented then here's where the check for that will go.
1344 # implemented then here's where the check for that will go.
1345 streaming = False
1345 streaming = False
1346
1346
1347 # pulling changegroup
1347 # pulling changegroup
1348 pullop.stepsdone.add('changegroup')
1348 pullop.stepsdone.add('changegroup')
1349
1349
1350 kwargs['common'] = pullop.common
1350 kwargs['common'] = pullop.common
1351 kwargs['heads'] = pullop.heads or pullop.rheads
1351 kwargs['heads'] = pullop.heads or pullop.rheads
1352 kwargs['cg'] = pullop.fetch
1352 kwargs['cg'] = pullop.fetch
1353
1353
1354 ui = pullop.repo.ui
1354 ui = pullop.repo.ui
1355 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1355 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1356 if (not legacyphase and 'heads' in pullop.remotebundle2caps.get('phases')):
1356 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1357 if (not legacyphase and hasbinaryphase):
1357 kwargs['phases'] = True
1358 kwargs['phases'] = True
1358 pullop.stepsdone.add('phases')
1359 pullop.stepsdone.add('phases')
1359
1360
1360 if 'listkeys' in pullop.remotebundle2caps:
1361 if 'listkeys' in pullop.remotebundle2caps:
1361 if 'phases' not in pullop.stepsdone:
1362 if 'phases' not in pullop.stepsdone:
1362 kwargs['listkeys'] = ['phases']
1363 kwargs['listkeys'] = ['phases']
1363 if pullop.remotebookmarks is None:
1364 if pullop.remotebookmarks is None:
1364 # make sure to always includes bookmark data when migrating
1365 # make sure to always includes bookmark data when migrating
1365 # `hg incoming --bundle` to using this function.
1366 # `hg incoming --bundle` to using this function.
1366 kwargs.setdefault('listkeys', []).append('bookmarks')
1367 kwargs.setdefault('listkeys', []).append('bookmarks')
1367
1368
1368 # If this is a full pull / clone and the server supports the clone bundles
1369 # If this is a full pull / clone and the server supports the clone bundles
1369 # feature, tell the server whether we attempted a clone bundle. The
1370 # feature, tell the server whether we attempted a clone bundle. The
1370 # presence of this flag indicates the client supports clone bundles. This
1371 # presence of this flag indicates the client supports clone bundles. This
1371 # will enable the server to treat clients that support clone bundles
1372 # will enable the server to treat clients that support clone bundles
1372 # differently from those that don't.
1373 # differently from those that don't.
1373 if (pullop.remote.capable('clonebundles')
1374 if (pullop.remote.capable('clonebundles')
1374 and pullop.heads is None and list(pullop.common) == [nullid]):
1375 and pullop.heads is None and list(pullop.common) == [nullid]):
1375 kwargs['cbattempted'] = pullop.clonebundleattempted
1376 kwargs['cbattempted'] = pullop.clonebundleattempted
1376
1377
1377 if streaming:
1378 if streaming:
1378 pullop.repo.ui.status(_('streaming all changes\n'))
1379 pullop.repo.ui.status(_('streaming all changes\n'))
1379 elif not pullop.fetch:
1380 elif not pullop.fetch:
1380 pullop.repo.ui.status(_("no changes found\n"))
1381 pullop.repo.ui.status(_("no changes found\n"))
1381 pullop.cgresult = 0
1382 pullop.cgresult = 0
1382 else:
1383 else:
1383 if pullop.heads is None and list(pullop.common) == [nullid]:
1384 if pullop.heads is None and list(pullop.common) == [nullid]:
1384 pullop.repo.ui.status(_("requesting all changes\n"))
1385 pullop.repo.ui.status(_("requesting all changes\n"))
1385 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1386 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1386 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1387 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1387 if obsolete.commonversion(remoteversions) is not None:
1388 if obsolete.commonversion(remoteversions) is not None:
1388 kwargs['obsmarkers'] = True
1389 kwargs['obsmarkers'] = True
1389 pullop.stepsdone.add('obsmarkers')
1390 pullop.stepsdone.add('obsmarkers')
1390 _pullbundle2extraprepare(pullop, kwargs)
1391 _pullbundle2extraprepare(pullop, kwargs)
1391 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1392 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1392 try:
1393 try:
1393 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1394 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1394 except bundle2.AbortFromPart as exc:
1395 except bundle2.AbortFromPart as exc:
1395 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1396 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1396 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1397 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1397 except error.BundleValueError as exc:
1398 except error.BundleValueError as exc:
1398 raise error.Abort(_('missing support for %s') % exc)
1399 raise error.Abort(_('missing support for %s') % exc)
1399
1400
1400 if pullop.fetch:
1401 if pullop.fetch:
1401 pullop.cgresult = bundle2.combinechangegroupresults(op)
1402 pullop.cgresult = bundle2.combinechangegroupresults(op)
1402
1403
1403 # processing phases change
1404 # processing phases change
1404 for namespace, value in op.records['listkeys']:
1405 for namespace, value in op.records['listkeys']:
1405 if namespace == 'phases':
1406 if namespace == 'phases':
1406 _pullapplyphases(pullop, value)
1407 _pullapplyphases(pullop, value)
1407
1408
1408 # processing bookmark update
1409 # processing bookmark update
1409 for namespace, value in op.records['listkeys']:
1410 for namespace, value in op.records['listkeys']:
1410 if namespace == 'bookmarks':
1411 if namespace == 'bookmarks':
1411 pullop.remotebookmarks = value
1412 pullop.remotebookmarks = value
1412
1413
1413 # bookmark data were either already there or pulled in the bundle
1414 # bookmark data were either already there or pulled in the bundle
1414 if pullop.remotebookmarks is not None:
1415 if pullop.remotebookmarks is not None:
1415 _pullbookmarks(pullop)
1416 _pullbookmarks(pullop)
1416
1417
1417 def _pullbundle2extraprepare(pullop, kwargs):
1418 def _pullbundle2extraprepare(pullop, kwargs):
1418 """hook function so that extensions can extend the getbundle call"""
1419 """hook function so that extensions can extend the getbundle call"""
1419 pass
1420 pass
1420
1421
1421 def _pullchangeset(pullop):
1422 def _pullchangeset(pullop):
1422 """pull changeset from unbundle into the local repo"""
1423 """pull changeset from unbundle into the local repo"""
1423 # We delay the open of the transaction as late as possible so we
1424 # We delay the open of the transaction as late as possible so we
1424 # don't open transaction for nothing or you break future useful
1425 # don't open transaction for nothing or you break future useful
1425 # rollback call
1426 # rollback call
1426 if 'changegroup' in pullop.stepsdone:
1427 if 'changegroup' in pullop.stepsdone:
1427 return
1428 return
1428 pullop.stepsdone.add('changegroup')
1429 pullop.stepsdone.add('changegroup')
1429 if not pullop.fetch:
1430 if not pullop.fetch:
1430 pullop.repo.ui.status(_("no changes found\n"))
1431 pullop.repo.ui.status(_("no changes found\n"))
1431 pullop.cgresult = 0
1432 pullop.cgresult = 0
1432 return
1433 return
1433 tr = pullop.gettransaction()
1434 tr = pullop.gettransaction()
1434 if pullop.heads is None and list(pullop.common) == [nullid]:
1435 if pullop.heads is None and list(pullop.common) == [nullid]:
1435 pullop.repo.ui.status(_("requesting all changes\n"))
1436 pullop.repo.ui.status(_("requesting all changes\n"))
1436 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1437 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1437 # issue1320, avoid a race if remote changed after discovery
1438 # issue1320, avoid a race if remote changed after discovery
1438 pullop.heads = pullop.rheads
1439 pullop.heads = pullop.rheads
1439
1440
1440 if pullop.remote.capable('getbundle'):
1441 if pullop.remote.capable('getbundle'):
1441 # TODO: get bundlecaps from remote
1442 # TODO: get bundlecaps from remote
1442 cg = pullop.remote.getbundle('pull', common=pullop.common,
1443 cg = pullop.remote.getbundle('pull', common=pullop.common,
1443 heads=pullop.heads or pullop.rheads)
1444 heads=pullop.heads or pullop.rheads)
1444 elif pullop.heads is None:
1445 elif pullop.heads is None:
1445 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1446 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1446 elif not pullop.remote.capable('changegroupsubset'):
1447 elif not pullop.remote.capable('changegroupsubset'):
1447 raise error.Abort(_("partial pull cannot be done because "
1448 raise error.Abort(_("partial pull cannot be done because "
1448 "other repository doesn't support "
1449 "other repository doesn't support "
1449 "changegroupsubset."))
1450 "changegroupsubset."))
1450 else:
1451 else:
1451 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1452 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1452 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1453 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1453 pullop.remote.url())
1454 pullop.remote.url())
1454 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1455 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1455
1456
1456 def _pullphase(pullop):
1457 def _pullphase(pullop):
1457 # Get remote phases data from remote
1458 # Get remote phases data from remote
1458 if 'phases' in pullop.stepsdone:
1459 if 'phases' in pullop.stepsdone:
1459 return
1460 return
1460 remotephases = pullop.remote.listkeys('phases')
1461 remotephases = pullop.remote.listkeys('phases')
1461 _pullapplyphases(pullop, remotephases)
1462 _pullapplyphases(pullop, remotephases)
1462
1463
1463 def _pullapplyphases(pullop, remotephases):
1464 def _pullapplyphases(pullop, remotephases):
1464 """apply phase movement from observed remote state"""
1465 """apply phase movement from observed remote state"""
1465 if 'phases' in pullop.stepsdone:
1466 if 'phases' in pullop.stepsdone:
1466 return
1467 return
1467 pullop.stepsdone.add('phases')
1468 pullop.stepsdone.add('phases')
1468 publishing = bool(remotephases.get('publishing', False))
1469 publishing = bool(remotephases.get('publishing', False))
1469 if remotephases and not publishing:
1470 if remotephases and not publishing:
1470 # remote is new and non-publishing
1471 # remote is new and non-publishing
1471 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1472 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1472 pullop.pulledsubset,
1473 pullop.pulledsubset,
1473 remotephases)
1474 remotephases)
1474 dheads = pullop.pulledsubset
1475 dheads = pullop.pulledsubset
1475 else:
1476 else:
1476 # Remote is old or publishing all common changesets
1477 # Remote is old or publishing all common changesets
1477 # should be seen as public
1478 # should be seen as public
1478 pheads = pullop.pulledsubset
1479 pheads = pullop.pulledsubset
1479 dheads = []
1480 dheads = []
1480 unfi = pullop.repo.unfiltered()
1481 unfi = pullop.repo.unfiltered()
1481 phase = unfi._phasecache.phase
1482 phase = unfi._phasecache.phase
1482 rev = unfi.changelog.nodemap.get
1483 rev = unfi.changelog.nodemap.get
1483 public = phases.public
1484 public = phases.public
1484 draft = phases.draft
1485 draft = phases.draft
1485
1486
1486 # exclude changesets already public locally and update the others
1487 # exclude changesets already public locally and update the others
1487 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1488 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1488 if pheads:
1489 if pheads:
1489 tr = pullop.gettransaction()
1490 tr = pullop.gettransaction()
1490 phases.advanceboundary(pullop.repo, tr, public, pheads)
1491 phases.advanceboundary(pullop.repo, tr, public, pheads)
1491
1492
1492 # exclude changesets already draft locally and update the others
1493 # exclude changesets already draft locally and update the others
1493 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1494 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1494 if dheads:
1495 if dheads:
1495 tr = pullop.gettransaction()
1496 tr = pullop.gettransaction()
1496 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1497 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1497
1498
1498 def _pullbookmarks(pullop):
1499 def _pullbookmarks(pullop):
1499 """process the remote bookmark information to update the local one"""
1500 """process the remote bookmark information to update the local one"""
1500 if 'bookmarks' in pullop.stepsdone:
1501 if 'bookmarks' in pullop.stepsdone:
1501 return
1502 return
1502 pullop.stepsdone.add('bookmarks')
1503 pullop.stepsdone.add('bookmarks')
1503 repo = pullop.repo
1504 repo = pullop.repo
1504 remotebookmarks = pullop.remotebookmarks
1505 remotebookmarks = pullop.remotebookmarks
1505 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1506 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1506 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1507 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1507 pullop.remote.url(),
1508 pullop.remote.url(),
1508 pullop.gettransaction,
1509 pullop.gettransaction,
1509 explicit=pullop.explicitbookmarks)
1510 explicit=pullop.explicitbookmarks)
1510
1511
1511 def _pullobsolete(pullop):
1512 def _pullobsolete(pullop):
1512 """utility function to pull obsolete markers from a remote
1513 """utility function to pull obsolete markers from a remote
1513
1514
1514 The `gettransaction` is function that return the pull transaction, creating
1515 The `gettransaction` is function that return the pull transaction, creating
1515 one if necessary. We return the transaction to inform the calling code that
1516 one if necessary. We return the transaction to inform the calling code that
1516 a new transaction have been created (when applicable).
1517 a new transaction have been created (when applicable).
1517
1518
1518 Exists mostly to allow overriding for experimentation purpose"""
1519 Exists mostly to allow overriding for experimentation purpose"""
1519 if 'obsmarkers' in pullop.stepsdone:
1520 if 'obsmarkers' in pullop.stepsdone:
1520 return
1521 return
1521 pullop.stepsdone.add('obsmarkers')
1522 pullop.stepsdone.add('obsmarkers')
1522 tr = None
1523 tr = None
1523 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1524 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1524 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1525 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1525 remoteobs = pullop.remote.listkeys('obsolete')
1526 remoteobs = pullop.remote.listkeys('obsolete')
1526 if 'dump0' in remoteobs:
1527 if 'dump0' in remoteobs:
1527 tr = pullop.gettransaction()
1528 tr = pullop.gettransaction()
1528 markers = []
1529 markers = []
1529 for key in sorted(remoteobs, reverse=True):
1530 for key in sorted(remoteobs, reverse=True):
1530 if key.startswith('dump'):
1531 if key.startswith('dump'):
1531 data = util.b85decode(remoteobs[key])
1532 data = util.b85decode(remoteobs[key])
1532 version, newmarks = obsolete._readmarkers(data)
1533 version, newmarks = obsolete._readmarkers(data)
1533 markers += newmarks
1534 markers += newmarks
1534 if markers:
1535 if markers:
1535 pullop.repo.obsstore.add(tr, markers)
1536 pullop.repo.obsstore.add(tr, markers)
1536 pullop.repo.invalidatevolatilesets()
1537 pullop.repo.invalidatevolatilesets()
1537 return tr
1538 return tr
1538
1539
1539 def caps20to10(repo):
1540 def caps20to10(repo):
1540 """return a set with appropriate options to use bundle20 during getbundle"""
1541 """return a set with appropriate options to use bundle20 during getbundle"""
1541 caps = {'HG20'}
1542 caps = {'HG20'}
1542 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1543 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1543 caps.add('bundle2=' + urlreq.quote(capsblob))
1544 caps.add('bundle2=' + urlreq.quote(capsblob))
1544 return caps
1545 return caps
1545
1546
1546 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1547 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1547 getbundle2partsorder = []
1548 getbundle2partsorder = []
1548
1549
1549 # Mapping between step name and function
1550 # Mapping between step name and function
1550 #
1551 #
1551 # This exists to help extensions wrap steps if necessary
1552 # This exists to help extensions wrap steps if necessary
1552 getbundle2partsmapping = {}
1553 getbundle2partsmapping = {}
1553
1554
1554 def getbundle2partsgenerator(stepname, idx=None):
1555 def getbundle2partsgenerator(stepname, idx=None):
1555 """decorator for function generating bundle2 part for getbundle
1556 """decorator for function generating bundle2 part for getbundle
1556
1557
1557 The function is added to the step -> function mapping and appended to the
1558 The function is added to the step -> function mapping and appended to the
1558 list of steps. Beware that decorated functions will be added in order
1559 list of steps. Beware that decorated functions will be added in order
1559 (this may matter).
1560 (this may matter).
1560
1561
1561 You can only use this decorator for new steps, if you want to wrap a step
1562 You can only use this decorator for new steps, if you want to wrap a step
1562 from an extension, attack the getbundle2partsmapping dictionary directly."""
1563 from an extension, attack the getbundle2partsmapping dictionary directly."""
1563 def dec(func):
1564 def dec(func):
1564 assert stepname not in getbundle2partsmapping
1565 assert stepname not in getbundle2partsmapping
1565 getbundle2partsmapping[stepname] = func
1566 getbundle2partsmapping[stepname] = func
1566 if idx is None:
1567 if idx is None:
1567 getbundle2partsorder.append(stepname)
1568 getbundle2partsorder.append(stepname)
1568 else:
1569 else:
1569 getbundle2partsorder.insert(idx, stepname)
1570 getbundle2partsorder.insert(idx, stepname)
1570 return func
1571 return func
1571 return dec
1572 return dec
1572
1573
1573 def bundle2requested(bundlecaps):
1574 def bundle2requested(bundlecaps):
1574 if bundlecaps is not None:
1575 if bundlecaps is not None:
1575 return any(cap.startswith('HG2') for cap in bundlecaps)
1576 return any(cap.startswith('HG2') for cap in bundlecaps)
1576 return False
1577 return False
1577
1578
1578 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1579 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1579 **kwargs):
1580 **kwargs):
1580 """Return chunks constituting a bundle's raw data.
1581 """Return chunks constituting a bundle's raw data.
1581
1582
1582 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1583 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1583 passed.
1584 passed.
1584
1585
1585 Returns an iterator over raw chunks (of varying sizes).
1586 Returns an iterator over raw chunks (of varying sizes).
1586 """
1587 """
1587 kwargs = pycompat.byteskwargs(kwargs)
1588 kwargs = pycompat.byteskwargs(kwargs)
1588 usebundle2 = bundle2requested(bundlecaps)
1589 usebundle2 = bundle2requested(bundlecaps)
1589 # bundle10 case
1590 # bundle10 case
1590 if not usebundle2:
1591 if not usebundle2:
1591 if bundlecaps and not kwargs.get('cg', True):
1592 if bundlecaps and not kwargs.get('cg', True):
1592 raise ValueError(_('request for bundle10 must include changegroup'))
1593 raise ValueError(_('request for bundle10 must include changegroup'))
1593
1594
1594 if kwargs:
1595 if kwargs:
1595 raise ValueError(_('unsupported getbundle arguments: %s')
1596 raise ValueError(_('unsupported getbundle arguments: %s')
1596 % ', '.join(sorted(kwargs.keys())))
1597 % ', '.join(sorted(kwargs.keys())))
1597 outgoing = _computeoutgoing(repo, heads, common)
1598 outgoing = _computeoutgoing(repo, heads, common)
1598 return changegroup.makestream(repo, outgoing, '01', source,
1599 return changegroup.makestream(repo, outgoing, '01', source,
1599 bundlecaps=bundlecaps)
1600 bundlecaps=bundlecaps)
1600
1601
1601 # bundle20 case
1602 # bundle20 case
1602 b2caps = {}
1603 b2caps = {}
1603 for bcaps in bundlecaps:
1604 for bcaps in bundlecaps:
1604 if bcaps.startswith('bundle2='):
1605 if bcaps.startswith('bundle2='):
1605 blob = urlreq.unquote(bcaps[len('bundle2='):])
1606 blob = urlreq.unquote(bcaps[len('bundle2='):])
1606 b2caps.update(bundle2.decodecaps(blob))
1607 b2caps.update(bundle2.decodecaps(blob))
1607 bundler = bundle2.bundle20(repo.ui, b2caps)
1608 bundler = bundle2.bundle20(repo.ui, b2caps)
1608
1609
1609 kwargs['heads'] = heads
1610 kwargs['heads'] = heads
1610 kwargs['common'] = common
1611 kwargs['common'] = common
1611
1612
1612 for name in getbundle2partsorder:
1613 for name in getbundle2partsorder:
1613 func = getbundle2partsmapping[name]
1614 func = getbundle2partsmapping[name]
1614 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1615 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1615 **pycompat.strkwargs(kwargs))
1616 **pycompat.strkwargs(kwargs))
1616
1617
1617 return bundler.getchunks()
1618 return bundler.getchunks()
1618
1619
1619 @getbundle2partsgenerator('changegroup')
1620 @getbundle2partsgenerator('changegroup')
1620 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1621 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1621 b2caps=None, heads=None, common=None, **kwargs):
1622 b2caps=None, heads=None, common=None, **kwargs):
1622 """add a changegroup part to the requested bundle"""
1623 """add a changegroup part to the requested bundle"""
1623 cgstream = None
1624 cgstream = None
1624 if kwargs.get('cg', True):
1625 if kwargs.get('cg', True):
1625 # build changegroup bundle here.
1626 # build changegroup bundle here.
1626 version = '01'
1627 version = '01'
1627 cgversions = b2caps.get('changegroup')
1628 cgversions = b2caps.get('changegroup')
1628 if cgversions: # 3.1 and 3.2 ship with an empty value
1629 if cgversions: # 3.1 and 3.2 ship with an empty value
1629 cgversions = [v for v in cgversions
1630 cgversions = [v for v in cgversions
1630 if v in changegroup.supportedoutgoingversions(repo)]
1631 if v in changegroup.supportedoutgoingversions(repo)]
1631 if not cgversions:
1632 if not cgversions:
1632 raise ValueError(_('no common changegroup version'))
1633 raise ValueError(_('no common changegroup version'))
1633 version = max(cgversions)
1634 version = max(cgversions)
1634 outgoing = _computeoutgoing(repo, heads, common)
1635 outgoing = _computeoutgoing(repo, heads, common)
1635 if outgoing.missing:
1636 if outgoing.missing:
1636 cgstream = changegroup.makestream(repo, outgoing, version, source,
1637 cgstream = changegroup.makestream(repo, outgoing, version, source,
1637 bundlecaps=bundlecaps)
1638 bundlecaps=bundlecaps)
1638
1639
1639 if cgstream:
1640 if cgstream:
1640 part = bundler.newpart('changegroup', data=cgstream)
1641 part = bundler.newpart('changegroup', data=cgstream)
1641 if cgversions:
1642 if cgversions:
1642 part.addparam('version', version)
1643 part.addparam('version', version)
1643 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1644 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1644 mandatory=False)
1645 mandatory=False)
1645 if 'treemanifest' in repo.requirements:
1646 if 'treemanifest' in repo.requirements:
1646 part.addparam('treemanifest', '1')
1647 part.addparam('treemanifest', '1')
1647
1648
1648 @getbundle2partsgenerator('listkeys')
1649 @getbundle2partsgenerator('listkeys')
1649 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1650 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1650 b2caps=None, **kwargs):
1651 b2caps=None, **kwargs):
1651 """add parts containing listkeys namespaces to the requested bundle"""
1652 """add parts containing listkeys namespaces to the requested bundle"""
1652 listkeys = kwargs.get('listkeys', ())
1653 listkeys = kwargs.get('listkeys', ())
1653 for namespace in listkeys:
1654 for namespace in listkeys:
1654 part = bundler.newpart('listkeys')
1655 part = bundler.newpart('listkeys')
1655 part.addparam('namespace', namespace)
1656 part.addparam('namespace', namespace)
1656 keys = repo.listkeys(namespace).items()
1657 keys = repo.listkeys(namespace).items()
1657 part.data = pushkey.encodekeys(keys)
1658 part.data = pushkey.encodekeys(keys)
1658
1659
1659 @getbundle2partsgenerator('obsmarkers')
1660 @getbundle2partsgenerator('obsmarkers')
1660 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1661 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1661 b2caps=None, heads=None, **kwargs):
1662 b2caps=None, heads=None, **kwargs):
1662 """add an obsolescence markers part to the requested bundle"""
1663 """add an obsolescence markers part to the requested bundle"""
1663 if kwargs.get('obsmarkers', False):
1664 if kwargs.get('obsmarkers', False):
1664 if heads is None:
1665 if heads is None:
1665 heads = repo.heads()
1666 heads = repo.heads()
1666 subset = [c.node() for c in repo.set('::%ln', heads)]
1667 subset = [c.node() for c in repo.set('::%ln', heads)]
1667 markers = repo.obsstore.relevantmarkers(subset)
1668 markers = repo.obsstore.relevantmarkers(subset)
1668 markers = sorted(markers)
1669 markers = sorted(markers)
1669 bundle2.buildobsmarkerspart(bundler, markers)
1670 bundle2.buildobsmarkerspart(bundler, markers)
1670
1671
1671 @getbundle2partsgenerator('phases')
1672 @getbundle2partsgenerator('phases')
1672 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1673 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1673 b2caps=None, heads=None, **kwargs):
1674 b2caps=None, heads=None, **kwargs):
1674 """add phase heads part to the requested bundle"""
1675 """add phase heads part to the requested bundle"""
1675 if kwargs.get('phases', False):
1676 if kwargs.get('phases', False):
1676 if not 'heads' in b2caps.get('phases'):
1677 if not 'heads' in b2caps.get('phases'):
1677 raise ValueError(_('no common phases exchange method'))
1678 raise ValueError(_('no common phases exchange method'))
1678 if heads is None:
1679 if heads is None:
1679 heads = repo.heads()
1680 heads = repo.heads()
1680
1681
1681 headsbyphase = collections.defaultdict(set)
1682 headsbyphase = collections.defaultdict(set)
1682 if repo.publishing():
1683 if repo.publishing():
1683 headsbyphase[phases.public] = heads
1684 headsbyphase[phases.public] = heads
1684 else:
1685 else:
1685 # find the appropriate heads to move
1686 # find the appropriate heads to move
1686
1687
1687 phase = repo._phasecache.phase
1688 phase = repo._phasecache.phase
1688 node = repo.changelog.node
1689 node = repo.changelog.node
1689 rev = repo.changelog.rev
1690 rev = repo.changelog.rev
1690 for h in heads:
1691 for h in heads:
1691 headsbyphase[phase(repo, rev(h))].add(h)
1692 headsbyphase[phase(repo, rev(h))].add(h)
1692 seenphases = list(headsbyphase.keys())
1693 seenphases = list(headsbyphase.keys())
1693
1694
1694 # We do not handle anything but public and draft phase for now)
1695 # We do not handle anything but public and draft phase for now)
1695 if seenphases:
1696 if seenphases:
1696 assert max(seenphases) <= phases.draft
1697 assert max(seenphases) <= phases.draft
1697
1698
1698 # if client is pulling non-public changesets, we need to find
1699 # if client is pulling non-public changesets, we need to find
1699 # intermediate public heads.
1700 # intermediate public heads.
1700 draftheads = headsbyphase.get(phases.draft, set())
1701 draftheads = headsbyphase.get(phases.draft, set())
1701 if draftheads:
1702 if draftheads:
1702 publicheads = headsbyphase.get(phases.public, set())
1703 publicheads = headsbyphase.get(phases.public, set())
1703
1704
1704 revset = 'heads(only(%ln, %ln) and public())'
1705 revset = 'heads(only(%ln, %ln) and public())'
1705 extraheads = repo.revs(revset, draftheads, publicheads)
1706 extraheads = repo.revs(revset, draftheads, publicheads)
1706 for r in extraheads:
1707 for r in extraheads:
1707 headsbyphase[phases.public].add(node(r))
1708 headsbyphase[phases.public].add(node(r))
1708
1709
1709 # transform data in a format used by the encoding function
1710 # transform data in a format used by the encoding function
1710 phasemapping = []
1711 phasemapping = []
1711 for phase in phases.allphases:
1712 for phase in phases.allphases:
1712 phasemapping.append(sorted(headsbyphase[phase]))
1713 phasemapping.append(sorted(headsbyphase[phase]))
1713
1714
1714 # generate the actual part
1715 # generate the actual part
1715 phasedata = phases.binaryencode(phasemapping)
1716 phasedata = phases.binaryencode(phasemapping)
1716 bundler.newpart('phase-heads', data=phasedata)
1717 bundler.newpart('phase-heads', data=phasedata)
1717
1718
1718 @getbundle2partsgenerator('hgtagsfnodes')
1719 @getbundle2partsgenerator('hgtagsfnodes')
1719 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1720 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1720 b2caps=None, heads=None, common=None,
1721 b2caps=None, heads=None, common=None,
1721 **kwargs):
1722 **kwargs):
1722 """Transfer the .hgtags filenodes mapping.
1723 """Transfer the .hgtags filenodes mapping.
1723
1724
1724 Only values for heads in this bundle will be transferred.
1725 Only values for heads in this bundle will be transferred.
1725
1726
1726 The part data consists of pairs of 20 byte changeset node and .hgtags
1727 The part data consists of pairs of 20 byte changeset node and .hgtags
1727 filenodes raw values.
1728 filenodes raw values.
1728 """
1729 """
1729 # Don't send unless:
1730 # Don't send unless:
1730 # - changeset are being exchanged,
1731 # - changeset are being exchanged,
1731 # - the client supports it.
1732 # - the client supports it.
1732 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1733 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1733 return
1734 return
1734
1735
1735 outgoing = _computeoutgoing(repo, heads, common)
1736 outgoing = _computeoutgoing(repo, heads, common)
1736 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1737 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1737
1738
1738 def _getbookmarks(repo, **kwargs):
1739 def _getbookmarks(repo, **kwargs):
1739 """Returns bookmark to node mapping.
1740 """Returns bookmark to node mapping.
1740
1741
1741 This function is primarily used to generate `bookmarks` bundle2 part.
1742 This function is primarily used to generate `bookmarks` bundle2 part.
1742 It is a separate function in order to make it easy to wrap it
1743 It is a separate function in order to make it easy to wrap it
1743 in extensions. Passing `kwargs` to the function makes it easy to
1744 in extensions. Passing `kwargs` to the function makes it easy to
1744 add new parameters in extensions.
1745 add new parameters in extensions.
1745 """
1746 """
1746
1747
1747 return dict(bookmod.listbinbookmarks(repo))
1748 return dict(bookmod.listbinbookmarks(repo))
1748
1749
1749 def check_heads(repo, their_heads, context):
1750 def check_heads(repo, their_heads, context):
1750 """check if the heads of a repo have been modified
1751 """check if the heads of a repo have been modified
1751
1752
1752 Used by peer for unbundling.
1753 Used by peer for unbundling.
1753 """
1754 """
1754 heads = repo.heads()
1755 heads = repo.heads()
1755 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1756 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1756 if not (their_heads == ['force'] or their_heads == heads or
1757 if not (their_heads == ['force'] or their_heads == heads or
1757 their_heads == ['hashed', heads_hash]):
1758 their_heads == ['hashed', heads_hash]):
1758 # someone else committed/pushed/unbundled while we
1759 # someone else committed/pushed/unbundled while we
1759 # were transferring data
1760 # were transferring data
1760 raise error.PushRaced('repository changed while %s - '
1761 raise error.PushRaced('repository changed while %s - '
1761 'please try again' % context)
1762 'please try again' % context)
1762
1763
1763 def unbundle(repo, cg, heads, source, url):
1764 def unbundle(repo, cg, heads, source, url):
1764 """Apply a bundle to a repo.
1765 """Apply a bundle to a repo.
1765
1766
1766 this function makes sure the repo is locked during the application and have
1767 this function makes sure the repo is locked during the application and have
1767 mechanism to check that no push race occurred between the creation of the
1768 mechanism to check that no push race occurred between the creation of the
1768 bundle and its application.
1769 bundle and its application.
1769
1770
1770 If the push was raced as PushRaced exception is raised."""
1771 If the push was raced as PushRaced exception is raised."""
1771 r = 0
1772 r = 0
1772 # need a transaction when processing a bundle2 stream
1773 # need a transaction when processing a bundle2 stream
1773 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1774 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1774 lockandtr = [None, None, None]
1775 lockandtr = [None, None, None]
1775 recordout = None
1776 recordout = None
1776 # quick fix for output mismatch with bundle2 in 3.4
1777 # quick fix for output mismatch with bundle2 in 3.4
1777 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1778 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1778 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1779 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1779 captureoutput = True
1780 captureoutput = True
1780 try:
1781 try:
1781 # note: outside bundle1, 'heads' is expected to be empty and this
1782 # note: outside bundle1, 'heads' is expected to be empty and this
1782 # 'check_heads' call wil be a no-op
1783 # 'check_heads' call wil be a no-op
1783 check_heads(repo, heads, 'uploading changes')
1784 check_heads(repo, heads, 'uploading changes')
1784 # push can proceed
1785 # push can proceed
1785 if not isinstance(cg, bundle2.unbundle20):
1786 if not isinstance(cg, bundle2.unbundle20):
1786 # legacy case: bundle1 (changegroup 01)
1787 # legacy case: bundle1 (changegroup 01)
1787 txnname = "\n".join([source, util.hidepassword(url)])
1788 txnname = "\n".join([source, util.hidepassword(url)])
1788 with repo.lock(), repo.transaction(txnname) as tr:
1789 with repo.lock(), repo.transaction(txnname) as tr:
1789 op = bundle2.applybundle(repo, cg, tr, source, url)
1790 op = bundle2.applybundle(repo, cg, tr, source, url)
1790 r = bundle2.combinechangegroupresults(op)
1791 r = bundle2.combinechangegroupresults(op)
1791 else:
1792 else:
1792 r = None
1793 r = None
1793 try:
1794 try:
1794 def gettransaction():
1795 def gettransaction():
1795 if not lockandtr[2]:
1796 if not lockandtr[2]:
1796 lockandtr[0] = repo.wlock()
1797 lockandtr[0] = repo.wlock()
1797 lockandtr[1] = repo.lock()
1798 lockandtr[1] = repo.lock()
1798 lockandtr[2] = repo.transaction(source)
1799 lockandtr[2] = repo.transaction(source)
1799 lockandtr[2].hookargs['source'] = source
1800 lockandtr[2].hookargs['source'] = source
1800 lockandtr[2].hookargs['url'] = url
1801 lockandtr[2].hookargs['url'] = url
1801 lockandtr[2].hookargs['bundle2'] = '1'
1802 lockandtr[2].hookargs['bundle2'] = '1'
1802 return lockandtr[2]
1803 return lockandtr[2]
1803
1804
1804 # Do greedy locking by default until we're satisfied with lazy
1805 # Do greedy locking by default until we're satisfied with lazy
1805 # locking.
1806 # locking.
1806 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1807 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1807 gettransaction()
1808 gettransaction()
1808
1809
1809 op = bundle2.bundleoperation(repo, gettransaction,
1810 op = bundle2.bundleoperation(repo, gettransaction,
1810 captureoutput=captureoutput)
1811 captureoutput=captureoutput)
1811 try:
1812 try:
1812 op = bundle2.processbundle(repo, cg, op=op)
1813 op = bundle2.processbundle(repo, cg, op=op)
1813 finally:
1814 finally:
1814 r = op.reply
1815 r = op.reply
1815 if captureoutput and r is not None:
1816 if captureoutput and r is not None:
1816 repo.ui.pushbuffer(error=True, subproc=True)
1817 repo.ui.pushbuffer(error=True, subproc=True)
1817 def recordout(output):
1818 def recordout(output):
1818 r.newpart('output', data=output, mandatory=False)
1819 r.newpart('output', data=output, mandatory=False)
1819 if lockandtr[2] is not None:
1820 if lockandtr[2] is not None:
1820 lockandtr[2].close()
1821 lockandtr[2].close()
1821 except BaseException as exc:
1822 except BaseException as exc:
1822 exc.duringunbundle2 = True
1823 exc.duringunbundle2 = True
1823 if captureoutput and r is not None:
1824 if captureoutput and r is not None:
1824 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1825 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1825 def recordout(output):
1826 def recordout(output):
1826 part = bundle2.bundlepart('output', data=output,
1827 part = bundle2.bundlepart('output', data=output,
1827 mandatory=False)
1828 mandatory=False)
1828 parts.append(part)
1829 parts.append(part)
1829 raise
1830 raise
1830 finally:
1831 finally:
1831 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1832 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1832 if recordout is not None:
1833 if recordout is not None:
1833 recordout(repo.ui.popbuffer())
1834 recordout(repo.ui.popbuffer())
1834 return r
1835 return r
1835
1836
1836 def _maybeapplyclonebundle(pullop):
1837 def _maybeapplyclonebundle(pullop):
1837 """Apply a clone bundle from a remote, if possible."""
1838 """Apply a clone bundle from a remote, if possible."""
1838
1839
1839 repo = pullop.repo
1840 repo = pullop.repo
1840 remote = pullop.remote
1841 remote = pullop.remote
1841
1842
1842 if not repo.ui.configbool('ui', 'clonebundles'):
1843 if not repo.ui.configbool('ui', 'clonebundles'):
1843 return
1844 return
1844
1845
1845 # Only run if local repo is empty.
1846 # Only run if local repo is empty.
1846 if len(repo):
1847 if len(repo):
1847 return
1848 return
1848
1849
1849 if pullop.heads:
1850 if pullop.heads:
1850 return
1851 return
1851
1852
1852 if not remote.capable('clonebundles'):
1853 if not remote.capable('clonebundles'):
1853 return
1854 return
1854
1855
1855 res = remote._call('clonebundles')
1856 res = remote._call('clonebundles')
1856
1857
1857 # If we call the wire protocol command, that's good enough to record the
1858 # If we call the wire protocol command, that's good enough to record the
1858 # attempt.
1859 # attempt.
1859 pullop.clonebundleattempted = True
1860 pullop.clonebundleattempted = True
1860
1861
1861 entries = parseclonebundlesmanifest(repo, res)
1862 entries = parseclonebundlesmanifest(repo, res)
1862 if not entries:
1863 if not entries:
1863 repo.ui.note(_('no clone bundles available on remote; '
1864 repo.ui.note(_('no clone bundles available on remote; '
1864 'falling back to regular clone\n'))
1865 'falling back to regular clone\n'))
1865 return
1866 return
1866
1867
1867 entries = filterclonebundleentries(
1868 entries = filterclonebundleentries(
1868 repo, entries, streamclonerequested=pullop.streamclonerequested)
1869 repo, entries, streamclonerequested=pullop.streamclonerequested)
1869
1870
1870 if not entries:
1871 if not entries:
1871 # There is a thundering herd concern here. However, if a server
1872 # There is a thundering herd concern here. However, if a server
1872 # operator doesn't advertise bundles appropriate for its clients,
1873 # operator doesn't advertise bundles appropriate for its clients,
1873 # they deserve what's coming. Furthermore, from a client's
1874 # they deserve what's coming. Furthermore, from a client's
1874 # perspective, no automatic fallback would mean not being able to
1875 # perspective, no automatic fallback would mean not being able to
1875 # clone!
1876 # clone!
1876 repo.ui.warn(_('no compatible clone bundles available on server; '
1877 repo.ui.warn(_('no compatible clone bundles available on server; '
1877 'falling back to regular clone\n'))
1878 'falling back to regular clone\n'))
1878 repo.ui.warn(_('(you may want to report this to the server '
1879 repo.ui.warn(_('(you may want to report this to the server '
1879 'operator)\n'))
1880 'operator)\n'))
1880 return
1881 return
1881
1882
1882 entries = sortclonebundleentries(repo.ui, entries)
1883 entries = sortclonebundleentries(repo.ui, entries)
1883
1884
1884 url = entries[0]['URL']
1885 url = entries[0]['URL']
1885 repo.ui.status(_('applying clone bundle from %s\n') % url)
1886 repo.ui.status(_('applying clone bundle from %s\n') % url)
1886 if trypullbundlefromurl(repo.ui, repo, url):
1887 if trypullbundlefromurl(repo.ui, repo, url):
1887 repo.ui.status(_('finished applying clone bundle\n'))
1888 repo.ui.status(_('finished applying clone bundle\n'))
1888 # Bundle failed.
1889 # Bundle failed.
1889 #
1890 #
1890 # We abort by default to avoid the thundering herd of
1891 # We abort by default to avoid the thundering herd of
1891 # clients flooding a server that was expecting expensive
1892 # clients flooding a server that was expecting expensive
1892 # clone load to be offloaded.
1893 # clone load to be offloaded.
1893 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1894 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1894 repo.ui.warn(_('falling back to normal clone\n'))
1895 repo.ui.warn(_('falling back to normal clone\n'))
1895 else:
1896 else:
1896 raise error.Abort(_('error applying bundle'),
1897 raise error.Abort(_('error applying bundle'),
1897 hint=_('if this error persists, consider contacting '
1898 hint=_('if this error persists, consider contacting '
1898 'the server operator or disable clone '
1899 'the server operator or disable clone '
1899 'bundles via '
1900 'bundles via '
1900 '"--config ui.clonebundles=false"'))
1901 '"--config ui.clonebundles=false"'))
1901
1902
1902 def parseclonebundlesmanifest(repo, s):
1903 def parseclonebundlesmanifest(repo, s):
1903 """Parses the raw text of a clone bundles manifest.
1904 """Parses the raw text of a clone bundles manifest.
1904
1905
1905 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1906 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1906 to the URL and other keys are the attributes for the entry.
1907 to the URL and other keys are the attributes for the entry.
1907 """
1908 """
1908 m = []
1909 m = []
1909 for line in s.splitlines():
1910 for line in s.splitlines():
1910 fields = line.split()
1911 fields = line.split()
1911 if not fields:
1912 if not fields:
1912 continue
1913 continue
1913 attrs = {'URL': fields[0]}
1914 attrs = {'URL': fields[0]}
1914 for rawattr in fields[1:]:
1915 for rawattr in fields[1:]:
1915 key, value = rawattr.split('=', 1)
1916 key, value = rawattr.split('=', 1)
1916 key = urlreq.unquote(key)
1917 key = urlreq.unquote(key)
1917 value = urlreq.unquote(value)
1918 value = urlreq.unquote(value)
1918 attrs[key] = value
1919 attrs[key] = value
1919
1920
1920 # Parse BUNDLESPEC into components. This makes client-side
1921 # Parse BUNDLESPEC into components. This makes client-side
1921 # preferences easier to specify since you can prefer a single
1922 # preferences easier to specify since you can prefer a single
1922 # component of the BUNDLESPEC.
1923 # component of the BUNDLESPEC.
1923 if key == 'BUNDLESPEC':
1924 if key == 'BUNDLESPEC':
1924 try:
1925 try:
1925 comp, version, params = parsebundlespec(repo, value,
1926 comp, version, params = parsebundlespec(repo, value,
1926 externalnames=True)
1927 externalnames=True)
1927 attrs['COMPRESSION'] = comp
1928 attrs['COMPRESSION'] = comp
1928 attrs['VERSION'] = version
1929 attrs['VERSION'] = version
1929 except error.InvalidBundleSpecification:
1930 except error.InvalidBundleSpecification:
1930 pass
1931 pass
1931 except error.UnsupportedBundleSpecification:
1932 except error.UnsupportedBundleSpecification:
1932 pass
1933 pass
1933
1934
1934 m.append(attrs)
1935 m.append(attrs)
1935
1936
1936 return m
1937 return m
1937
1938
1938 def filterclonebundleentries(repo, entries, streamclonerequested=False):
1939 def filterclonebundleentries(repo, entries, streamclonerequested=False):
1939 """Remove incompatible clone bundle manifest entries.
1940 """Remove incompatible clone bundle manifest entries.
1940
1941
1941 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1942 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1942 and returns a new list consisting of only the entries that this client
1943 and returns a new list consisting of only the entries that this client
1943 should be able to apply.
1944 should be able to apply.
1944
1945
1945 There is no guarantee we'll be able to apply all returned entries because
1946 There is no guarantee we'll be able to apply all returned entries because
1946 the metadata we use to filter on may be missing or wrong.
1947 the metadata we use to filter on may be missing or wrong.
1947 """
1948 """
1948 newentries = []
1949 newentries = []
1949 for entry in entries:
1950 for entry in entries:
1950 spec = entry.get('BUNDLESPEC')
1951 spec = entry.get('BUNDLESPEC')
1951 if spec:
1952 if spec:
1952 try:
1953 try:
1953 comp, version, params = parsebundlespec(repo, spec, strict=True)
1954 comp, version, params = parsebundlespec(repo, spec, strict=True)
1954
1955
1955 # If a stream clone was requested, filter out non-streamclone
1956 # If a stream clone was requested, filter out non-streamclone
1956 # entries.
1957 # entries.
1957 if streamclonerequested and (comp != 'UN' or version != 's1'):
1958 if streamclonerequested and (comp != 'UN' or version != 's1'):
1958 repo.ui.debug('filtering %s because not a stream clone\n' %
1959 repo.ui.debug('filtering %s because not a stream clone\n' %
1959 entry['URL'])
1960 entry['URL'])
1960 continue
1961 continue
1961
1962
1962 except error.InvalidBundleSpecification as e:
1963 except error.InvalidBundleSpecification as e:
1963 repo.ui.debug(str(e) + '\n')
1964 repo.ui.debug(str(e) + '\n')
1964 continue
1965 continue
1965 except error.UnsupportedBundleSpecification as e:
1966 except error.UnsupportedBundleSpecification as e:
1966 repo.ui.debug('filtering %s because unsupported bundle '
1967 repo.ui.debug('filtering %s because unsupported bundle '
1967 'spec: %s\n' % (entry['URL'], str(e)))
1968 'spec: %s\n' % (entry['URL'], str(e)))
1968 continue
1969 continue
1969 # If we don't have a spec and requested a stream clone, we don't know
1970 # If we don't have a spec and requested a stream clone, we don't know
1970 # what the entry is so don't attempt to apply it.
1971 # what the entry is so don't attempt to apply it.
1971 elif streamclonerequested:
1972 elif streamclonerequested:
1972 repo.ui.debug('filtering %s because cannot determine if a stream '
1973 repo.ui.debug('filtering %s because cannot determine if a stream '
1973 'clone bundle\n' % entry['URL'])
1974 'clone bundle\n' % entry['URL'])
1974 continue
1975 continue
1975
1976
1976 if 'REQUIRESNI' in entry and not sslutil.hassni:
1977 if 'REQUIRESNI' in entry and not sslutil.hassni:
1977 repo.ui.debug('filtering %s because SNI not supported\n' %
1978 repo.ui.debug('filtering %s because SNI not supported\n' %
1978 entry['URL'])
1979 entry['URL'])
1979 continue
1980 continue
1980
1981
1981 newentries.append(entry)
1982 newentries.append(entry)
1982
1983
1983 return newentries
1984 return newentries
1984
1985
1985 class clonebundleentry(object):
1986 class clonebundleentry(object):
1986 """Represents an item in a clone bundles manifest.
1987 """Represents an item in a clone bundles manifest.
1987
1988
1988 This rich class is needed to support sorting since sorted() in Python 3
1989 This rich class is needed to support sorting since sorted() in Python 3
1989 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1990 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1990 won't work.
1991 won't work.
1991 """
1992 """
1992
1993
1993 def __init__(self, value, prefers):
1994 def __init__(self, value, prefers):
1994 self.value = value
1995 self.value = value
1995 self.prefers = prefers
1996 self.prefers = prefers
1996
1997
1997 def _cmp(self, other):
1998 def _cmp(self, other):
1998 for prefkey, prefvalue in self.prefers:
1999 for prefkey, prefvalue in self.prefers:
1999 avalue = self.value.get(prefkey)
2000 avalue = self.value.get(prefkey)
2000 bvalue = other.value.get(prefkey)
2001 bvalue = other.value.get(prefkey)
2001
2002
2002 # Special case for b missing attribute and a matches exactly.
2003 # Special case for b missing attribute and a matches exactly.
2003 if avalue is not None and bvalue is None and avalue == prefvalue:
2004 if avalue is not None and bvalue is None and avalue == prefvalue:
2004 return -1
2005 return -1
2005
2006
2006 # Special case for a missing attribute and b matches exactly.
2007 # Special case for a missing attribute and b matches exactly.
2007 if bvalue is not None and avalue is None and bvalue == prefvalue:
2008 if bvalue is not None and avalue is None and bvalue == prefvalue:
2008 return 1
2009 return 1
2009
2010
2010 # We can't compare unless attribute present on both.
2011 # We can't compare unless attribute present on both.
2011 if avalue is None or bvalue is None:
2012 if avalue is None or bvalue is None:
2012 continue
2013 continue
2013
2014
2014 # Same values should fall back to next attribute.
2015 # Same values should fall back to next attribute.
2015 if avalue == bvalue:
2016 if avalue == bvalue:
2016 continue
2017 continue
2017
2018
2018 # Exact matches come first.
2019 # Exact matches come first.
2019 if avalue == prefvalue:
2020 if avalue == prefvalue:
2020 return -1
2021 return -1
2021 if bvalue == prefvalue:
2022 if bvalue == prefvalue:
2022 return 1
2023 return 1
2023
2024
2024 # Fall back to next attribute.
2025 # Fall back to next attribute.
2025 continue
2026 continue
2026
2027
2027 # If we got here we couldn't sort by attributes and prefers. Fall
2028 # If we got here we couldn't sort by attributes and prefers. Fall
2028 # back to index order.
2029 # back to index order.
2029 return 0
2030 return 0
2030
2031
2031 def __lt__(self, other):
2032 def __lt__(self, other):
2032 return self._cmp(other) < 0
2033 return self._cmp(other) < 0
2033
2034
2034 def __gt__(self, other):
2035 def __gt__(self, other):
2035 return self._cmp(other) > 0
2036 return self._cmp(other) > 0
2036
2037
2037 def __eq__(self, other):
2038 def __eq__(self, other):
2038 return self._cmp(other) == 0
2039 return self._cmp(other) == 0
2039
2040
2040 def __le__(self, other):
2041 def __le__(self, other):
2041 return self._cmp(other) <= 0
2042 return self._cmp(other) <= 0
2042
2043
2043 def __ge__(self, other):
2044 def __ge__(self, other):
2044 return self._cmp(other) >= 0
2045 return self._cmp(other) >= 0
2045
2046
2046 def __ne__(self, other):
2047 def __ne__(self, other):
2047 return self._cmp(other) != 0
2048 return self._cmp(other) != 0
2048
2049
2049 def sortclonebundleentries(ui, entries):
2050 def sortclonebundleentries(ui, entries):
2050 prefers = ui.configlist('ui', 'clonebundleprefers')
2051 prefers = ui.configlist('ui', 'clonebundleprefers')
2051 if not prefers:
2052 if not prefers:
2052 return list(entries)
2053 return list(entries)
2053
2054
2054 prefers = [p.split('=', 1) for p in prefers]
2055 prefers = [p.split('=', 1) for p in prefers]
2055
2056
2056 items = sorted(clonebundleentry(v, prefers) for v in entries)
2057 items = sorted(clonebundleentry(v, prefers) for v in entries)
2057 return [i.value for i in items]
2058 return [i.value for i in items]
2058
2059
2059 def trypullbundlefromurl(ui, repo, url):
2060 def trypullbundlefromurl(ui, repo, url):
2060 """Attempt to apply a bundle from a URL."""
2061 """Attempt to apply a bundle from a URL."""
2061 with repo.lock(), repo.transaction('bundleurl') as tr:
2062 with repo.lock(), repo.transaction('bundleurl') as tr:
2062 try:
2063 try:
2063 fh = urlmod.open(ui, url)
2064 fh = urlmod.open(ui, url)
2064 cg = readbundle(ui, fh, 'stream')
2065 cg = readbundle(ui, fh, 'stream')
2065
2066
2066 if isinstance(cg, streamclone.streamcloneapplier):
2067 if isinstance(cg, streamclone.streamcloneapplier):
2067 cg.apply(repo)
2068 cg.apply(repo)
2068 else:
2069 else:
2069 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2070 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2070 return True
2071 return True
2071 except urlerr.httperror as e:
2072 except urlerr.httperror as e:
2072 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2073 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2073 except urlerr.urlerror as e:
2074 except urlerr.urlerror as e:
2074 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2075 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2075
2076
2076 return False
2077 return False
General Comments 0
You need to be logged in to leave comments. Login now