##// END OF EJS Templates
exchange: hit opargs with pycompat.strkwargs before **-ing it...
Augie Fackler -
r34217:e3cd7242 default
parent child Browse files
Show More
@@ -1,2012 +1,2012 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 bookmarks as bookmod,
19 bookmarks as bookmod,
20 bundle2,
20 bundle2,
21 changegroup,
21 changegroup,
22 discovery,
22 discovery,
23 error,
23 error,
24 lock as lockmod,
24 lock as lockmod,
25 obsolete,
25 obsolete,
26 phases,
26 phases,
27 pushkey,
27 pushkey,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sslutil,
30 sslutil,
31 streamclone,
31 streamclone,
32 url as urlmod,
32 url as urlmod,
33 util,
33 util,
34 )
34 )
35
35
36 urlerr = util.urlerr
36 urlerr = util.urlerr
37 urlreq = util.urlreq
37 urlreq = util.urlreq
38
38
39 # Maps bundle version human names to changegroup versions.
39 # Maps bundle version human names to changegroup versions.
40 _bundlespeccgversions = {'v1': '01',
40 _bundlespeccgversions = {'v1': '01',
41 'v2': '02',
41 'v2': '02',
42 'packed1': 's1',
42 'packed1': 's1',
43 'bundle2': '02', #legacy
43 'bundle2': '02', #legacy
44 }
44 }
45
45
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
48
48
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 """Parse a bundle string specification into parts.
50 """Parse a bundle string specification into parts.
51
51
52 Bundle specifications denote a well-defined bundle/exchange format.
52 Bundle specifications denote a well-defined bundle/exchange format.
53 The content of a given specification should not change over time in
53 The content of a given specification should not change over time in
54 order to ensure that bundles produced by a newer version of Mercurial are
54 order to ensure that bundles produced by a newer version of Mercurial are
55 readable from an older version.
55 readable from an older version.
56
56
57 The string currently has the form:
57 The string currently has the form:
58
58
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
60
60
61 Where <compression> is one of the supported compression formats
61 Where <compression> is one of the supported compression formats
62 and <type> is (currently) a version string. A ";" can follow the type and
62 and <type> is (currently) a version string. A ";" can follow the type and
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 pairs.
64 pairs.
65
65
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 it is optional.
67 it is optional.
68
68
69 If ``externalnames`` is False (the default), the human-centric names will
69 If ``externalnames`` is False (the default), the human-centric names will
70 be converted to their internal representation.
70 be converted to their internal representation.
71
71
72 Returns a 3-tuple of (compression, version, parameters). Compression will
72 Returns a 3-tuple of (compression, version, parameters). Compression will
73 be ``None`` if not in strict mode and a compression isn't defined.
73 be ``None`` if not in strict mode and a compression isn't defined.
74
74
75 An ``InvalidBundleSpecification`` is raised when the specification is
75 An ``InvalidBundleSpecification`` is raised when the specification is
76 not syntactically well formed.
76 not syntactically well formed.
77
77
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 bundle type/version is not recognized.
79 bundle type/version is not recognized.
80
80
81 Note: this function will likely eventually return a more complex data
81 Note: this function will likely eventually return a more complex data
82 structure, including bundle2 part information.
82 structure, including bundle2 part information.
83 """
83 """
84 def parseparams(s):
84 def parseparams(s):
85 if ';' not in s:
85 if ';' not in s:
86 return s, {}
86 return s, {}
87
87
88 params = {}
88 params = {}
89 version, paramstr = s.split(';', 1)
89 version, paramstr = s.split(';', 1)
90
90
91 for p in paramstr.split(';'):
91 for p in paramstr.split(';'):
92 if '=' not in p:
92 if '=' not in p:
93 raise error.InvalidBundleSpecification(
93 raise error.InvalidBundleSpecification(
94 _('invalid bundle specification: '
94 _('invalid bundle specification: '
95 'missing "=" in parameter: %s') % p)
95 'missing "=" in parameter: %s') % p)
96
96
97 key, value = p.split('=', 1)
97 key, value = p.split('=', 1)
98 key = urlreq.unquote(key)
98 key = urlreq.unquote(key)
99 value = urlreq.unquote(value)
99 value = urlreq.unquote(value)
100 params[key] = value
100 params[key] = value
101
101
102 return version, params
102 return version, params
103
103
104
104
105 if strict and '-' not in spec:
105 if strict and '-' not in spec:
106 raise error.InvalidBundleSpecification(
106 raise error.InvalidBundleSpecification(
107 _('invalid bundle specification; '
107 _('invalid bundle specification; '
108 'must be prefixed with compression: %s') % spec)
108 'must be prefixed with compression: %s') % spec)
109
109
110 if '-' in spec:
110 if '-' in spec:
111 compression, version = spec.split('-', 1)
111 compression, version = spec.split('-', 1)
112
112
113 if compression not in util.compengines.supportedbundlenames:
113 if compression not in util.compengines.supportedbundlenames:
114 raise error.UnsupportedBundleSpecification(
114 raise error.UnsupportedBundleSpecification(
115 _('%s compression is not supported') % compression)
115 _('%s compression is not supported') % compression)
116
116
117 version, params = parseparams(version)
117 version, params = parseparams(version)
118
118
119 if version not in _bundlespeccgversions:
119 if version not in _bundlespeccgversions:
120 raise error.UnsupportedBundleSpecification(
120 raise error.UnsupportedBundleSpecification(
121 _('%s is not a recognized bundle version') % version)
121 _('%s is not a recognized bundle version') % version)
122 else:
122 else:
123 # Value could be just the compression or just the version, in which
123 # Value could be just the compression or just the version, in which
124 # case some defaults are assumed (but only when not in strict mode).
124 # case some defaults are assumed (but only when not in strict mode).
125 assert not strict
125 assert not strict
126
126
127 spec, params = parseparams(spec)
127 spec, params = parseparams(spec)
128
128
129 if spec in util.compengines.supportedbundlenames:
129 if spec in util.compengines.supportedbundlenames:
130 compression = spec
130 compression = spec
131 version = 'v1'
131 version = 'v1'
132 # Generaldelta repos require v2.
132 # Generaldelta repos require v2.
133 if 'generaldelta' in repo.requirements:
133 if 'generaldelta' in repo.requirements:
134 version = 'v2'
134 version = 'v2'
135 # Modern compression engines require v2.
135 # Modern compression engines require v2.
136 if compression not in _bundlespecv1compengines:
136 if compression not in _bundlespecv1compengines:
137 version = 'v2'
137 version = 'v2'
138 elif spec in _bundlespeccgversions:
138 elif spec in _bundlespeccgversions:
139 if spec == 'packed1':
139 if spec == 'packed1':
140 compression = 'none'
140 compression = 'none'
141 else:
141 else:
142 compression = 'bzip2'
142 compression = 'bzip2'
143 version = spec
143 version = spec
144 else:
144 else:
145 raise error.UnsupportedBundleSpecification(
145 raise error.UnsupportedBundleSpecification(
146 _('%s is not a recognized bundle specification') % spec)
146 _('%s is not a recognized bundle specification') % spec)
147
147
148 # Bundle version 1 only supports a known set of compression engines.
148 # Bundle version 1 only supports a known set of compression engines.
149 if version == 'v1' and compression not in _bundlespecv1compengines:
149 if version == 'v1' and compression not in _bundlespecv1compengines:
150 raise error.UnsupportedBundleSpecification(
150 raise error.UnsupportedBundleSpecification(
151 _('compression engine %s is not supported on v1 bundles') %
151 _('compression engine %s is not supported on v1 bundles') %
152 compression)
152 compression)
153
153
154 # The specification for packed1 can optionally declare the data formats
154 # The specification for packed1 can optionally declare the data formats
155 # required to apply it. If we see this metadata, compare against what the
155 # required to apply it. If we see this metadata, compare against what the
156 # repo supports and error if the bundle isn't compatible.
156 # repo supports and error if the bundle isn't compatible.
157 if version == 'packed1' and 'requirements' in params:
157 if version == 'packed1' and 'requirements' in params:
158 requirements = set(params['requirements'].split(','))
158 requirements = set(params['requirements'].split(','))
159 missingreqs = requirements - repo.supportedformats
159 missingreqs = requirements - repo.supportedformats
160 if missingreqs:
160 if missingreqs:
161 raise error.UnsupportedBundleSpecification(
161 raise error.UnsupportedBundleSpecification(
162 _('missing support for repository features: %s') %
162 _('missing support for repository features: %s') %
163 ', '.join(sorted(missingreqs)))
163 ', '.join(sorted(missingreqs)))
164
164
165 if not externalnames:
165 if not externalnames:
166 engine = util.compengines.forbundlename(compression)
166 engine = util.compengines.forbundlename(compression)
167 compression = engine.bundletype()[1]
167 compression = engine.bundletype()[1]
168 version = _bundlespeccgversions[version]
168 version = _bundlespeccgversions[version]
169 return compression, version, params
169 return compression, version, params
170
170
171 def readbundle(ui, fh, fname, vfs=None):
171 def readbundle(ui, fh, fname, vfs=None):
172 header = changegroup.readexactly(fh, 4)
172 header = changegroup.readexactly(fh, 4)
173
173
174 alg = None
174 alg = None
175 if not fname:
175 if not fname:
176 fname = "stream"
176 fname = "stream"
177 if not header.startswith('HG') and header.startswith('\0'):
177 if not header.startswith('HG') and header.startswith('\0'):
178 fh = changegroup.headerlessfixup(fh, header)
178 fh = changegroup.headerlessfixup(fh, header)
179 header = "HG10"
179 header = "HG10"
180 alg = 'UN'
180 alg = 'UN'
181 elif vfs:
181 elif vfs:
182 fname = vfs.join(fname)
182 fname = vfs.join(fname)
183
183
184 magic, version = header[0:2], header[2:4]
184 magic, version = header[0:2], header[2:4]
185
185
186 if magic != 'HG':
186 if magic != 'HG':
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 if version == '10':
188 if version == '10':
189 if alg is None:
189 if alg is None:
190 alg = changegroup.readexactly(fh, 2)
190 alg = changegroup.readexactly(fh, 2)
191 return changegroup.cg1unpacker(fh, alg)
191 return changegroup.cg1unpacker(fh, alg)
192 elif version.startswith('2'):
192 elif version.startswith('2'):
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 elif version == 'S1':
194 elif version == 'S1':
195 return streamclone.streamcloneapplier(fh)
195 return streamclone.streamcloneapplier(fh)
196 else:
196 else:
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198
198
199 def getbundlespec(ui, fh):
199 def getbundlespec(ui, fh):
200 """Infer the bundlespec from a bundle file handle.
200 """Infer the bundlespec from a bundle file handle.
201
201
202 The input file handle is seeked and the original seek position is not
202 The input file handle is seeked and the original seek position is not
203 restored.
203 restored.
204 """
204 """
205 def speccompression(alg):
205 def speccompression(alg):
206 try:
206 try:
207 return util.compengines.forbundletype(alg).bundletype()[0]
207 return util.compengines.forbundletype(alg).bundletype()[0]
208 except KeyError:
208 except KeyError:
209 return None
209 return None
210
210
211 b = readbundle(ui, fh, None)
211 b = readbundle(ui, fh, None)
212 if isinstance(b, changegroup.cg1unpacker):
212 if isinstance(b, changegroup.cg1unpacker):
213 alg = b._type
213 alg = b._type
214 if alg == '_truncatedBZ':
214 if alg == '_truncatedBZ':
215 alg = 'BZ'
215 alg = 'BZ'
216 comp = speccompression(alg)
216 comp = speccompression(alg)
217 if not comp:
217 if not comp:
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 return '%s-v1' % comp
219 return '%s-v1' % comp
220 elif isinstance(b, bundle2.unbundle20):
220 elif isinstance(b, bundle2.unbundle20):
221 if 'Compression' in b.params:
221 if 'Compression' in b.params:
222 comp = speccompression(b.params['Compression'])
222 comp = speccompression(b.params['Compression'])
223 if not comp:
223 if not comp:
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 else:
225 else:
226 comp = 'none'
226 comp = 'none'
227
227
228 version = None
228 version = None
229 for part in b.iterparts():
229 for part in b.iterparts():
230 if part.type == 'changegroup':
230 if part.type == 'changegroup':
231 version = part.params['version']
231 version = part.params['version']
232 if version in ('01', '02'):
232 if version in ('01', '02'):
233 version = 'v2'
233 version = 'v2'
234 else:
234 else:
235 raise error.Abort(_('changegroup version %s does not have '
235 raise error.Abort(_('changegroup version %s does not have '
236 'a known bundlespec') % version,
236 'a known bundlespec') % version,
237 hint=_('try upgrading your Mercurial '
237 hint=_('try upgrading your Mercurial '
238 'client'))
238 'client'))
239
239
240 if not version:
240 if not version:
241 raise error.Abort(_('could not identify changegroup version in '
241 raise error.Abort(_('could not identify changegroup version in '
242 'bundle'))
242 'bundle'))
243
243
244 return '%s-%s' % (comp, version)
244 return '%s-%s' % (comp, version)
245 elif isinstance(b, streamclone.streamcloneapplier):
245 elif isinstance(b, streamclone.streamcloneapplier):
246 requirements = streamclone.readbundle1header(fh)[2]
246 requirements = streamclone.readbundle1header(fh)[2]
247 params = 'requirements=%s' % ','.join(sorted(requirements))
247 params = 'requirements=%s' % ','.join(sorted(requirements))
248 return 'none-packed1;%s' % urlreq.quote(params)
248 return 'none-packed1;%s' % urlreq.quote(params)
249 else:
249 else:
250 raise error.Abort(_('unknown bundle type: %s') % b)
250 raise error.Abort(_('unknown bundle type: %s') % b)
251
251
252 def _computeoutgoing(repo, heads, common):
252 def _computeoutgoing(repo, heads, common):
253 """Computes which revs are outgoing given a set of common
253 """Computes which revs are outgoing given a set of common
254 and a set of heads.
254 and a set of heads.
255
255
256 This is a separate function so extensions can have access to
256 This is a separate function so extensions can have access to
257 the logic.
257 the logic.
258
258
259 Returns a discovery.outgoing object.
259 Returns a discovery.outgoing object.
260 """
260 """
261 cl = repo.changelog
261 cl = repo.changelog
262 if common:
262 if common:
263 hasnode = cl.hasnode
263 hasnode = cl.hasnode
264 common = [n for n in common if hasnode(n)]
264 common = [n for n in common if hasnode(n)]
265 else:
265 else:
266 common = [nullid]
266 common = [nullid]
267 if not heads:
267 if not heads:
268 heads = cl.heads()
268 heads = cl.heads()
269 return discovery.outgoing(repo, common, heads)
269 return discovery.outgoing(repo, common, heads)
270
270
271 def _forcebundle1(op):
271 def _forcebundle1(op):
272 """return true if a pull/push must use bundle1
272 """return true if a pull/push must use bundle1
273
273
274 This function is used to allow testing of the older bundle version"""
274 This function is used to allow testing of the older bundle version"""
275 ui = op.repo.ui
275 ui = op.repo.ui
276 forcebundle1 = False
276 forcebundle1 = False
277 # The goal is this config is to allow developer to choose the bundle
277 # The goal is this config is to allow developer to choose the bundle
278 # version used during exchanged. This is especially handy during test.
278 # version used during exchanged. This is especially handy during test.
279 # Value is a list of bundle version to be picked from, highest version
279 # Value is a list of bundle version to be picked from, highest version
280 # should be used.
280 # should be used.
281 #
281 #
282 # developer config: devel.legacy.exchange
282 # developer config: devel.legacy.exchange
283 exchange = ui.configlist('devel', 'legacy.exchange')
283 exchange = ui.configlist('devel', 'legacy.exchange')
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
285 return forcebundle1 or not op.remote.capable('bundle2')
285 return forcebundle1 or not op.remote.capable('bundle2')
286
286
287 class pushoperation(object):
287 class pushoperation(object):
288 """A object that represent a single push operation
288 """A object that represent a single push operation
289
289
290 Its purpose is to carry push related state and very common operations.
290 Its purpose is to carry push related state and very common operations.
291
291
292 A new pushoperation should be created at the beginning of each push and
292 A new pushoperation should be created at the beginning of each push and
293 discarded afterward.
293 discarded afterward.
294 """
294 """
295
295
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
297 bookmarks=(), pushvars=None):
297 bookmarks=(), pushvars=None):
298 # repo we push from
298 # repo we push from
299 self.repo = repo
299 self.repo = repo
300 self.ui = repo.ui
300 self.ui = repo.ui
301 # repo we push to
301 # repo we push to
302 self.remote = remote
302 self.remote = remote
303 # force option provided
303 # force option provided
304 self.force = force
304 self.force = force
305 # revs to be pushed (None is "all")
305 # revs to be pushed (None is "all")
306 self.revs = revs
306 self.revs = revs
307 # bookmark explicitly pushed
307 # bookmark explicitly pushed
308 self.bookmarks = bookmarks
308 self.bookmarks = bookmarks
309 # allow push of new branch
309 # allow push of new branch
310 self.newbranch = newbranch
310 self.newbranch = newbranch
311 # step already performed
311 # step already performed
312 # (used to check what steps have been already performed through bundle2)
312 # (used to check what steps have been already performed through bundle2)
313 self.stepsdone = set()
313 self.stepsdone = set()
314 # Integer version of the changegroup push result
314 # Integer version of the changegroup push result
315 # - None means nothing to push
315 # - None means nothing to push
316 # - 0 means HTTP error
316 # - 0 means HTTP error
317 # - 1 means we pushed and remote head count is unchanged *or*
317 # - 1 means we pushed and remote head count is unchanged *or*
318 # we have outgoing changesets but refused to push
318 # we have outgoing changesets but refused to push
319 # - other values as described by addchangegroup()
319 # - other values as described by addchangegroup()
320 self.cgresult = None
320 self.cgresult = None
321 # Boolean value for the bookmark push
321 # Boolean value for the bookmark push
322 self.bkresult = None
322 self.bkresult = None
323 # discover.outgoing object (contains common and outgoing data)
323 # discover.outgoing object (contains common and outgoing data)
324 self.outgoing = None
324 self.outgoing = None
325 # all remote topological heads before the push
325 # all remote topological heads before the push
326 self.remoteheads = None
326 self.remoteheads = None
327 # Details of the remote branch pre and post push
327 # Details of the remote branch pre and post push
328 #
328 #
329 # mapping: {'branch': ([remoteheads],
329 # mapping: {'branch': ([remoteheads],
330 # [newheads],
330 # [newheads],
331 # [unsyncedheads],
331 # [unsyncedheads],
332 # [discardedheads])}
332 # [discardedheads])}
333 # - branch: the branch name
333 # - branch: the branch name
334 # - remoteheads: the list of remote heads known locally
334 # - remoteheads: the list of remote heads known locally
335 # None if the branch is new
335 # None if the branch is new
336 # - newheads: the new remote heads (known locally) with outgoing pushed
336 # - newheads: the new remote heads (known locally) with outgoing pushed
337 # - unsyncedheads: the list of remote heads unknown locally.
337 # - unsyncedheads: the list of remote heads unknown locally.
338 # - discardedheads: the list of remote heads made obsolete by the push
338 # - discardedheads: the list of remote heads made obsolete by the push
339 self.pushbranchmap = None
339 self.pushbranchmap = None
340 # testable as a boolean indicating if any nodes are missing locally.
340 # testable as a boolean indicating if any nodes are missing locally.
341 self.incoming = None
341 self.incoming = None
342 # phases changes that must be pushed along side the changesets
342 # phases changes that must be pushed along side the changesets
343 self.outdatedphases = None
343 self.outdatedphases = None
344 # phases changes that must be pushed if changeset push fails
344 # phases changes that must be pushed if changeset push fails
345 self.fallbackoutdatedphases = None
345 self.fallbackoutdatedphases = None
346 # outgoing obsmarkers
346 # outgoing obsmarkers
347 self.outobsmarkers = set()
347 self.outobsmarkers = set()
348 # outgoing bookmarks
348 # outgoing bookmarks
349 self.outbookmarks = []
349 self.outbookmarks = []
350 # transaction manager
350 # transaction manager
351 self.trmanager = None
351 self.trmanager = None
352 # map { pushkey partid -> callback handling failure}
352 # map { pushkey partid -> callback handling failure}
353 # used to handle exception from mandatory pushkey part failure
353 # used to handle exception from mandatory pushkey part failure
354 self.pkfailcb = {}
354 self.pkfailcb = {}
355 # an iterable of pushvars or None
355 # an iterable of pushvars or None
356 self.pushvars = pushvars
356 self.pushvars = pushvars
357
357
358 @util.propertycache
358 @util.propertycache
359 def futureheads(self):
359 def futureheads(self):
360 """future remote heads if the changeset push succeeds"""
360 """future remote heads if the changeset push succeeds"""
361 return self.outgoing.missingheads
361 return self.outgoing.missingheads
362
362
363 @util.propertycache
363 @util.propertycache
364 def fallbackheads(self):
364 def fallbackheads(self):
365 """future remote heads if the changeset push fails"""
365 """future remote heads if the changeset push fails"""
366 if self.revs is None:
366 if self.revs is None:
367 # not target to push, all common are relevant
367 # not target to push, all common are relevant
368 return self.outgoing.commonheads
368 return self.outgoing.commonheads
369 unfi = self.repo.unfiltered()
369 unfi = self.repo.unfiltered()
370 # I want cheads = heads(::missingheads and ::commonheads)
370 # I want cheads = heads(::missingheads and ::commonheads)
371 # (missingheads is revs with secret changeset filtered out)
371 # (missingheads is revs with secret changeset filtered out)
372 #
372 #
373 # This can be expressed as:
373 # This can be expressed as:
374 # cheads = ( (missingheads and ::commonheads)
374 # cheads = ( (missingheads and ::commonheads)
375 # + (commonheads and ::missingheads))"
375 # + (commonheads and ::missingheads))"
376 # )
376 # )
377 #
377 #
378 # while trying to push we already computed the following:
378 # while trying to push we already computed the following:
379 # common = (::commonheads)
379 # common = (::commonheads)
380 # missing = ((commonheads::missingheads) - commonheads)
380 # missing = ((commonheads::missingheads) - commonheads)
381 #
381 #
382 # We can pick:
382 # We can pick:
383 # * missingheads part of common (::commonheads)
383 # * missingheads part of common (::commonheads)
384 common = self.outgoing.common
384 common = self.outgoing.common
385 nm = self.repo.changelog.nodemap
385 nm = self.repo.changelog.nodemap
386 cheads = [node for node in self.revs if nm[node] in common]
386 cheads = [node for node in self.revs if nm[node] in common]
387 # and
387 # and
388 # * commonheads parents on missing
388 # * commonheads parents on missing
389 revset = unfi.set('%ln and parents(roots(%ln))',
389 revset = unfi.set('%ln and parents(roots(%ln))',
390 self.outgoing.commonheads,
390 self.outgoing.commonheads,
391 self.outgoing.missing)
391 self.outgoing.missing)
392 cheads.extend(c.node() for c in revset)
392 cheads.extend(c.node() for c in revset)
393 return cheads
393 return cheads
394
394
395 @property
395 @property
396 def commonheads(self):
396 def commonheads(self):
397 """set of all common heads after changeset bundle push"""
397 """set of all common heads after changeset bundle push"""
398 if self.cgresult:
398 if self.cgresult:
399 return self.futureheads
399 return self.futureheads
400 else:
400 else:
401 return self.fallbackheads
401 return self.fallbackheads
402
402
403 # mapping of message used when pushing bookmark
403 # mapping of message used when pushing bookmark
404 bookmsgmap = {'update': (_("updating bookmark %s\n"),
404 bookmsgmap = {'update': (_("updating bookmark %s\n"),
405 _('updating bookmark %s failed!\n')),
405 _('updating bookmark %s failed!\n')),
406 'export': (_("exporting bookmark %s\n"),
406 'export': (_("exporting bookmark %s\n"),
407 _('exporting bookmark %s failed!\n')),
407 _('exporting bookmark %s failed!\n')),
408 'delete': (_("deleting remote bookmark %s\n"),
408 'delete': (_("deleting remote bookmark %s\n"),
409 _('deleting remote bookmark %s failed!\n')),
409 _('deleting remote bookmark %s failed!\n')),
410 }
410 }
411
411
412
412
413 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
413 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
414 opargs=None):
414 opargs=None):
415 '''Push outgoing changesets (limited by revs) from a local
415 '''Push outgoing changesets (limited by revs) from a local
416 repository to remote. Return an integer:
416 repository to remote. Return an integer:
417 - None means nothing to push
417 - None means nothing to push
418 - 0 means HTTP error
418 - 0 means HTTP error
419 - 1 means we pushed and remote head count is unchanged *or*
419 - 1 means we pushed and remote head count is unchanged *or*
420 we have outgoing changesets but refused to push
420 we have outgoing changesets but refused to push
421 - other values as described by addchangegroup()
421 - other values as described by addchangegroup()
422 '''
422 '''
423 if opargs is None:
423 if opargs is None:
424 opargs = {}
424 opargs = {}
425 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
425 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
426 **opargs)
426 **pycompat.strkwargs(opargs))
427 if pushop.remote.local():
427 if pushop.remote.local():
428 missing = (set(pushop.repo.requirements)
428 missing = (set(pushop.repo.requirements)
429 - pushop.remote.local().supported)
429 - pushop.remote.local().supported)
430 if missing:
430 if missing:
431 msg = _("required features are not"
431 msg = _("required features are not"
432 " supported in the destination:"
432 " supported in the destination:"
433 " %s") % (', '.join(sorted(missing)))
433 " %s") % (', '.join(sorted(missing)))
434 raise error.Abort(msg)
434 raise error.Abort(msg)
435
435
436 if not pushop.remote.canpush():
436 if not pushop.remote.canpush():
437 raise error.Abort(_("destination does not support push"))
437 raise error.Abort(_("destination does not support push"))
438
438
439 if not pushop.remote.capable('unbundle'):
439 if not pushop.remote.capable('unbundle'):
440 raise error.Abort(_('cannot push: destination does not support the '
440 raise error.Abort(_('cannot push: destination does not support the '
441 'unbundle wire protocol command'))
441 'unbundle wire protocol command'))
442
442
443 # get lock as we might write phase data
443 # get lock as we might write phase data
444 wlock = lock = None
444 wlock = lock = None
445 try:
445 try:
446 # bundle2 push may receive a reply bundle touching bookmarks or other
446 # bundle2 push may receive a reply bundle touching bookmarks or other
447 # things requiring the wlock. Take it now to ensure proper ordering.
447 # things requiring the wlock. Take it now to ensure proper ordering.
448 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
448 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
449 if (not _forcebundle1(pushop)) and maypushback:
449 if (not _forcebundle1(pushop)) and maypushback:
450 wlock = pushop.repo.wlock()
450 wlock = pushop.repo.wlock()
451 lock = pushop.repo.lock()
451 lock = pushop.repo.lock()
452 pushop.trmanager = transactionmanager(pushop.repo,
452 pushop.trmanager = transactionmanager(pushop.repo,
453 'push-response',
453 'push-response',
454 pushop.remote.url())
454 pushop.remote.url())
455 except IOError as err:
455 except IOError as err:
456 if err.errno != errno.EACCES:
456 if err.errno != errno.EACCES:
457 raise
457 raise
458 # source repo cannot be locked.
458 # source repo cannot be locked.
459 # We do not abort the push, but just disable the local phase
459 # We do not abort the push, but just disable the local phase
460 # synchronisation.
460 # synchronisation.
461 msg = 'cannot lock source repository: %s\n' % err
461 msg = 'cannot lock source repository: %s\n' % err
462 pushop.ui.debug(msg)
462 pushop.ui.debug(msg)
463
463
464 with wlock or util.nullcontextmanager(), \
464 with wlock or util.nullcontextmanager(), \
465 lock or util.nullcontextmanager(), \
465 lock or util.nullcontextmanager(), \
466 pushop.trmanager or util.nullcontextmanager():
466 pushop.trmanager or util.nullcontextmanager():
467 pushop.repo.checkpush(pushop)
467 pushop.repo.checkpush(pushop)
468 _pushdiscovery(pushop)
468 _pushdiscovery(pushop)
469 if not _forcebundle1(pushop):
469 if not _forcebundle1(pushop):
470 _pushbundle2(pushop)
470 _pushbundle2(pushop)
471 _pushchangeset(pushop)
471 _pushchangeset(pushop)
472 _pushsyncphase(pushop)
472 _pushsyncphase(pushop)
473 _pushobsolete(pushop)
473 _pushobsolete(pushop)
474 _pushbookmark(pushop)
474 _pushbookmark(pushop)
475
475
476 return pushop
476 return pushop
477
477
478 # list of steps to perform discovery before push
478 # list of steps to perform discovery before push
479 pushdiscoveryorder = []
479 pushdiscoveryorder = []
480
480
481 # Mapping between step name and function
481 # Mapping between step name and function
482 #
482 #
483 # This exists to help extensions wrap steps if necessary
483 # This exists to help extensions wrap steps if necessary
484 pushdiscoverymapping = {}
484 pushdiscoverymapping = {}
485
485
486 def pushdiscovery(stepname):
486 def pushdiscovery(stepname):
487 """decorator for function performing discovery before push
487 """decorator for function performing discovery before push
488
488
489 The function is added to the step -> function mapping and appended to the
489 The function is added to the step -> function mapping and appended to the
490 list of steps. Beware that decorated function will be added in order (this
490 list of steps. Beware that decorated function will be added in order (this
491 may matter).
491 may matter).
492
492
493 You can only use this decorator for a new step, if you want to wrap a step
493 You can only use this decorator for a new step, if you want to wrap a step
494 from an extension, change the pushdiscovery dictionary directly."""
494 from an extension, change the pushdiscovery dictionary directly."""
495 def dec(func):
495 def dec(func):
496 assert stepname not in pushdiscoverymapping
496 assert stepname not in pushdiscoverymapping
497 pushdiscoverymapping[stepname] = func
497 pushdiscoverymapping[stepname] = func
498 pushdiscoveryorder.append(stepname)
498 pushdiscoveryorder.append(stepname)
499 return func
499 return func
500 return dec
500 return dec
501
501
502 def _pushdiscovery(pushop):
502 def _pushdiscovery(pushop):
503 """Run all discovery steps"""
503 """Run all discovery steps"""
504 for stepname in pushdiscoveryorder:
504 for stepname in pushdiscoveryorder:
505 step = pushdiscoverymapping[stepname]
505 step = pushdiscoverymapping[stepname]
506 step(pushop)
506 step(pushop)
507
507
508 @pushdiscovery('changeset')
508 @pushdiscovery('changeset')
509 def _pushdiscoverychangeset(pushop):
509 def _pushdiscoverychangeset(pushop):
510 """discover the changeset that need to be pushed"""
510 """discover the changeset that need to be pushed"""
511 fci = discovery.findcommonincoming
511 fci = discovery.findcommonincoming
512 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
512 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
513 common, inc, remoteheads = commoninc
513 common, inc, remoteheads = commoninc
514 fco = discovery.findcommonoutgoing
514 fco = discovery.findcommonoutgoing
515 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
515 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
516 commoninc=commoninc, force=pushop.force)
516 commoninc=commoninc, force=pushop.force)
517 pushop.outgoing = outgoing
517 pushop.outgoing = outgoing
518 pushop.remoteheads = remoteheads
518 pushop.remoteheads = remoteheads
519 pushop.incoming = inc
519 pushop.incoming = inc
520
520
521 @pushdiscovery('phase')
521 @pushdiscovery('phase')
522 def _pushdiscoveryphase(pushop):
522 def _pushdiscoveryphase(pushop):
523 """discover the phase that needs to be pushed
523 """discover the phase that needs to be pushed
524
524
525 (computed for both success and failure case for changesets push)"""
525 (computed for both success and failure case for changesets push)"""
526 outgoing = pushop.outgoing
526 outgoing = pushop.outgoing
527 unfi = pushop.repo.unfiltered()
527 unfi = pushop.repo.unfiltered()
528 remotephases = pushop.remote.listkeys('phases')
528 remotephases = pushop.remote.listkeys('phases')
529 publishing = remotephases.get('publishing', False)
529 publishing = remotephases.get('publishing', False)
530 if (pushop.ui.configbool('ui', '_usedassubrepo')
530 if (pushop.ui.configbool('ui', '_usedassubrepo')
531 and remotephases # server supports phases
531 and remotephases # server supports phases
532 and not pushop.outgoing.missing # no changesets to be pushed
532 and not pushop.outgoing.missing # no changesets to be pushed
533 and publishing):
533 and publishing):
534 # When:
534 # When:
535 # - this is a subrepo push
535 # - this is a subrepo push
536 # - and remote support phase
536 # - and remote support phase
537 # - and no changeset are to be pushed
537 # - and no changeset are to be pushed
538 # - and remote is publishing
538 # - and remote is publishing
539 # We may be in issue 3871 case!
539 # We may be in issue 3871 case!
540 # We drop the possible phase synchronisation done by
540 # We drop the possible phase synchronisation done by
541 # courtesy to publish changesets possibly locally draft
541 # courtesy to publish changesets possibly locally draft
542 # on the remote.
542 # on the remote.
543 remotephases = {'publishing': 'True'}
543 remotephases = {'publishing': 'True'}
544 ana = phases.analyzeremotephases(pushop.repo,
544 ana = phases.analyzeremotephases(pushop.repo,
545 pushop.fallbackheads,
545 pushop.fallbackheads,
546 remotephases)
546 remotephases)
547 pheads, droots = ana
547 pheads, droots = ana
548 extracond = ''
548 extracond = ''
549 if not publishing:
549 if not publishing:
550 extracond = ' and public()'
550 extracond = ' and public()'
551 revset = 'heads((%%ln::%%ln) %s)' % extracond
551 revset = 'heads((%%ln::%%ln) %s)' % extracond
552 # Get the list of all revs draft on remote by public here.
552 # Get the list of all revs draft on remote by public here.
553 # XXX Beware that revset break if droots is not strictly
553 # XXX Beware that revset break if droots is not strictly
554 # XXX root we may want to ensure it is but it is costly
554 # XXX root we may want to ensure it is but it is costly
555 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
555 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
556 if not outgoing.missing:
556 if not outgoing.missing:
557 future = fallback
557 future = fallback
558 else:
558 else:
559 # adds changeset we are going to push as draft
559 # adds changeset we are going to push as draft
560 #
560 #
561 # should not be necessary for publishing server, but because of an
561 # should not be necessary for publishing server, but because of an
562 # issue fixed in xxxxx we have to do it anyway.
562 # issue fixed in xxxxx we have to do it anyway.
563 fdroots = list(unfi.set('roots(%ln + %ln::)',
563 fdroots = list(unfi.set('roots(%ln + %ln::)',
564 outgoing.missing, droots))
564 outgoing.missing, droots))
565 fdroots = [f.node() for f in fdroots]
565 fdroots = [f.node() for f in fdroots]
566 future = list(unfi.set(revset, fdroots, pushop.futureheads))
566 future = list(unfi.set(revset, fdroots, pushop.futureheads))
567 pushop.outdatedphases = future
567 pushop.outdatedphases = future
568 pushop.fallbackoutdatedphases = fallback
568 pushop.fallbackoutdatedphases = fallback
569
569
570 @pushdiscovery('obsmarker')
570 @pushdiscovery('obsmarker')
571 def _pushdiscoveryobsmarkers(pushop):
571 def _pushdiscoveryobsmarkers(pushop):
572 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
572 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
573 and pushop.repo.obsstore
573 and pushop.repo.obsstore
574 and 'obsolete' in pushop.remote.listkeys('namespaces')):
574 and 'obsolete' in pushop.remote.listkeys('namespaces')):
575 repo = pushop.repo
575 repo = pushop.repo
576 # very naive computation, that can be quite expensive on big repo.
576 # very naive computation, that can be quite expensive on big repo.
577 # However: evolution is currently slow on them anyway.
577 # However: evolution is currently slow on them anyway.
578 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
578 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
579 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
579 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
580
580
581 @pushdiscovery('bookmarks')
581 @pushdiscovery('bookmarks')
582 def _pushdiscoverybookmarks(pushop):
582 def _pushdiscoverybookmarks(pushop):
583 ui = pushop.ui
583 ui = pushop.ui
584 repo = pushop.repo.unfiltered()
584 repo = pushop.repo.unfiltered()
585 remote = pushop.remote
585 remote = pushop.remote
586 ui.debug("checking for updated bookmarks\n")
586 ui.debug("checking for updated bookmarks\n")
587 ancestors = ()
587 ancestors = ()
588 if pushop.revs:
588 if pushop.revs:
589 revnums = map(repo.changelog.rev, pushop.revs)
589 revnums = map(repo.changelog.rev, pushop.revs)
590 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
590 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
591 remotebookmark = remote.listkeys('bookmarks')
591 remotebookmark = remote.listkeys('bookmarks')
592
592
593 explicit = set([repo._bookmarks.expandname(bookmark)
593 explicit = set([repo._bookmarks.expandname(bookmark)
594 for bookmark in pushop.bookmarks])
594 for bookmark in pushop.bookmarks])
595
595
596 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
596 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
597 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
597 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
598
598
599 def safehex(x):
599 def safehex(x):
600 if x is None:
600 if x is None:
601 return x
601 return x
602 return hex(x)
602 return hex(x)
603
603
604 def hexifycompbookmarks(bookmarks):
604 def hexifycompbookmarks(bookmarks):
605 for b, scid, dcid in bookmarks:
605 for b, scid, dcid in bookmarks:
606 yield b, safehex(scid), safehex(dcid)
606 yield b, safehex(scid), safehex(dcid)
607
607
608 comp = [hexifycompbookmarks(marks) for marks in comp]
608 comp = [hexifycompbookmarks(marks) for marks in comp]
609 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
609 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
610
610
611 for b, scid, dcid in advsrc:
611 for b, scid, dcid in advsrc:
612 if b in explicit:
612 if b in explicit:
613 explicit.remove(b)
613 explicit.remove(b)
614 if not ancestors or repo[scid].rev() in ancestors:
614 if not ancestors or repo[scid].rev() in ancestors:
615 pushop.outbookmarks.append((b, dcid, scid))
615 pushop.outbookmarks.append((b, dcid, scid))
616 # search added bookmark
616 # search added bookmark
617 for b, scid, dcid in addsrc:
617 for b, scid, dcid in addsrc:
618 if b in explicit:
618 if b in explicit:
619 explicit.remove(b)
619 explicit.remove(b)
620 pushop.outbookmarks.append((b, '', scid))
620 pushop.outbookmarks.append((b, '', scid))
621 # search for overwritten bookmark
621 # search for overwritten bookmark
622 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
622 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
623 if b in explicit:
623 if b in explicit:
624 explicit.remove(b)
624 explicit.remove(b)
625 pushop.outbookmarks.append((b, dcid, scid))
625 pushop.outbookmarks.append((b, dcid, scid))
626 # search for bookmark to delete
626 # search for bookmark to delete
627 for b, scid, dcid in adddst:
627 for b, scid, dcid in adddst:
628 if b in explicit:
628 if b in explicit:
629 explicit.remove(b)
629 explicit.remove(b)
630 # treat as "deleted locally"
630 # treat as "deleted locally"
631 pushop.outbookmarks.append((b, dcid, ''))
631 pushop.outbookmarks.append((b, dcid, ''))
632 # identical bookmarks shouldn't get reported
632 # identical bookmarks shouldn't get reported
633 for b, scid, dcid in same:
633 for b, scid, dcid in same:
634 if b in explicit:
634 if b in explicit:
635 explicit.remove(b)
635 explicit.remove(b)
636
636
637 if explicit:
637 if explicit:
638 explicit = sorted(explicit)
638 explicit = sorted(explicit)
639 # we should probably list all of them
639 # we should probably list all of them
640 ui.warn(_('bookmark %s does not exist on the local '
640 ui.warn(_('bookmark %s does not exist on the local '
641 'or remote repository!\n') % explicit[0])
641 'or remote repository!\n') % explicit[0])
642 pushop.bkresult = 2
642 pushop.bkresult = 2
643
643
644 pushop.outbookmarks.sort()
644 pushop.outbookmarks.sort()
645
645
646 def _pushcheckoutgoing(pushop):
646 def _pushcheckoutgoing(pushop):
647 outgoing = pushop.outgoing
647 outgoing = pushop.outgoing
648 unfi = pushop.repo.unfiltered()
648 unfi = pushop.repo.unfiltered()
649 if not outgoing.missing:
649 if not outgoing.missing:
650 # nothing to push
650 # nothing to push
651 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
651 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
652 return False
652 return False
653 # something to push
653 # something to push
654 if not pushop.force:
654 if not pushop.force:
655 # if repo.obsstore == False --> no obsolete
655 # if repo.obsstore == False --> no obsolete
656 # then, save the iteration
656 # then, save the iteration
657 if unfi.obsstore:
657 if unfi.obsstore:
658 # this message are here for 80 char limit reason
658 # this message are here for 80 char limit reason
659 mso = _("push includes obsolete changeset: %s!")
659 mso = _("push includes obsolete changeset: %s!")
660 mspd = _("push includes phase-divergent changeset: %s!")
660 mspd = _("push includes phase-divergent changeset: %s!")
661 mscd = _("push includes content-divergent changeset: %s!")
661 mscd = _("push includes content-divergent changeset: %s!")
662 mst = {"orphan": _("push includes orphan changeset: %s!"),
662 mst = {"orphan": _("push includes orphan changeset: %s!"),
663 "phase-divergent": mspd,
663 "phase-divergent": mspd,
664 "content-divergent": mscd}
664 "content-divergent": mscd}
665 # If we are to push if there is at least one
665 # If we are to push if there is at least one
666 # obsolete or unstable changeset in missing, at
666 # obsolete or unstable changeset in missing, at
667 # least one of the missinghead will be obsolete or
667 # least one of the missinghead will be obsolete or
668 # unstable. So checking heads only is ok
668 # unstable. So checking heads only is ok
669 for node in outgoing.missingheads:
669 for node in outgoing.missingheads:
670 ctx = unfi[node]
670 ctx = unfi[node]
671 if ctx.obsolete():
671 if ctx.obsolete():
672 raise error.Abort(mso % ctx)
672 raise error.Abort(mso % ctx)
673 elif ctx.isunstable():
673 elif ctx.isunstable():
674 # TODO print more than one instability in the abort
674 # TODO print more than one instability in the abort
675 # message
675 # message
676 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
676 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
677
677
678 discovery.checkheads(pushop)
678 discovery.checkheads(pushop)
679 return True
679 return True
680
680
681 # List of names of steps to perform for an outgoing bundle2, order matters.
681 # List of names of steps to perform for an outgoing bundle2, order matters.
682 b2partsgenorder = []
682 b2partsgenorder = []
683
683
684 # Mapping between step name and function
684 # Mapping between step name and function
685 #
685 #
686 # This exists to help extensions wrap steps if necessary
686 # This exists to help extensions wrap steps if necessary
687 b2partsgenmapping = {}
687 b2partsgenmapping = {}
688
688
689 def b2partsgenerator(stepname, idx=None):
689 def b2partsgenerator(stepname, idx=None):
690 """decorator for function generating bundle2 part
690 """decorator for function generating bundle2 part
691
691
692 The function is added to the step -> function mapping and appended to the
692 The function is added to the step -> function mapping and appended to the
693 list of steps. Beware that decorated functions will be added in order
693 list of steps. Beware that decorated functions will be added in order
694 (this may matter).
694 (this may matter).
695
695
696 You can only use this decorator for new steps, if you want to wrap a step
696 You can only use this decorator for new steps, if you want to wrap a step
697 from an extension, attack the b2partsgenmapping dictionary directly."""
697 from an extension, attack the b2partsgenmapping dictionary directly."""
698 def dec(func):
698 def dec(func):
699 assert stepname not in b2partsgenmapping
699 assert stepname not in b2partsgenmapping
700 b2partsgenmapping[stepname] = func
700 b2partsgenmapping[stepname] = func
701 if idx is None:
701 if idx is None:
702 b2partsgenorder.append(stepname)
702 b2partsgenorder.append(stepname)
703 else:
703 else:
704 b2partsgenorder.insert(idx, stepname)
704 b2partsgenorder.insert(idx, stepname)
705 return func
705 return func
706 return dec
706 return dec
707
707
708 def _pushb2ctxcheckheads(pushop, bundler):
708 def _pushb2ctxcheckheads(pushop, bundler):
709 """Generate race condition checking parts
709 """Generate race condition checking parts
710
710
711 Exists as an independent function to aid extensions
711 Exists as an independent function to aid extensions
712 """
712 """
713 # * 'force' do not check for push race,
713 # * 'force' do not check for push race,
714 # * if we don't push anything, there are nothing to check.
714 # * if we don't push anything, there are nothing to check.
715 if not pushop.force and pushop.outgoing.missingheads:
715 if not pushop.force and pushop.outgoing.missingheads:
716 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
716 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
717 emptyremote = pushop.pushbranchmap is None
717 emptyremote = pushop.pushbranchmap is None
718 if not allowunrelated or emptyremote:
718 if not allowunrelated or emptyremote:
719 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
719 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
720 else:
720 else:
721 affected = set()
721 affected = set()
722 for branch, heads in pushop.pushbranchmap.iteritems():
722 for branch, heads in pushop.pushbranchmap.iteritems():
723 remoteheads, newheads, unsyncedheads, discardedheads = heads
723 remoteheads, newheads, unsyncedheads, discardedheads = heads
724 if remoteheads is not None:
724 if remoteheads is not None:
725 remote = set(remoteheads)
725 remote = set(remoteheads)
726 affected |= set(discardedheads) & remote
726 affected |= set(discardedheads) & remote
727 affected |= remote - set(newheads)
727 affected |= remote - set(newheads)
728 if affected:
728 if affected:
729 data = iter(sorted(affected))
729 data = iter(sorted(affected))
730 bundler.newpart('check:updated-heads', data=data)
730 bundler.newpart('check:updated-heads', data=data)
731
731
732 @b2partsgenerator('changeset')
732 @b2partsgenerator('changeset')
733 def _pushb2ctx(pushop, bundler):
733 def _pushb2ctx(pushop, bundler):
734 """handle changegroup push through bundle2
734 """handle changegroup push through bundle2
735
735
736 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
736 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
737 """
737 """
738 if 'changesets' in pushop.stepsdone:
738 if 'changesets' in pushop.stepsdone:
739 return
739 return
740 pushop.stepsdone.add('changesets')
740 pushop.stepsdone.add('changesets')
741 # Send known heads to the server for race detection.
741 # Send known heads to the server for race detection.
742 if not _pushcheckoutgoing(pushop):
742 if not _pushcheckoutgoing(pushop):
743 return
743 return
744 pushop.repo.prepushoutgoinghooks(pushop)
744 pushop.repo.prepushoutgoinghooks(pushop)
745
745
746 _pushb2ctxcheckheads(pushop, bundler)
746 _pushb2ctxcheckheads(pushop, bundler)
747
747
748 b2caps = bundle2.bundle2caps(pushop.remote)
748 b2caps = bundle2.bundle2caps(pushop.remote)
749 version = '01'
749 version = '01'
750 cgversions = b2caps.get('changegroup')
750 cgversions = b2caps.get('changegroup')
751 if cgversions: # 3.1 and 3.2 ship with an empty value
751 if cgversions: # 3.1 and 3.2 ship with an empty value
752 cgversions = [v for v in cgversions
752 cgversions = [v for v in cgversions
753 if v in changegroup.supportedoutgoingversions(
753 if v in changegroup.supportedoutgoingversions(
754 pushop.repo)]
754 pushop.repo)]
755 if not cgversions:
755 if not cgversions:
756 raise ValueError(_('no common changegroup version'))
756 raise ValueError(_('no common changegroup version'))
757 version = max(cgversions)
757 version = max(cgversions)
758 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
758 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
759 'push')
759 'push')
760 cgpart = bundler.newpart('changegroup', data=cgstream)
760 cgpart = bundler.newpart('changegroup', data=cgstream)
761 if cgversions:
761 if cgversions:
762 cgpart.addparam('version', version)
762 cgpart.addparam('version', version)
763 if 'treemanifest' in pushop.repo.requirements:
763 if 'treemanifest' in pushop.repo.requirements:
764 cgpart.addparam('treemanifest', '1')
764 cgpart.addparam('treemanifest', '1')
765 def handlereply(op):
765 def handlereply(op):
766 """extract addchangegroup returns from server reply"""
766 """extract addchangegroup returns from server reply"""
767 cgreplies = op.records.getreplies(cgpart.id)
767 cgreplies = op.records.getreplies(cgpart.id)
768 assert len(cgreplies['changegroup']) == 1
768 assert len(cgreplies['changegroup']) == 1
769 pushop.cgresult = cgreplies['changegroup'][0]['return']
769 pushop.cgresult = cgreplies['changegroup'][0]['return']
770 return handlereply
770 return handlereply
771
771
772 @b2partsgenerator('phase')
772 @b2partsgenerator('phase')
773 def _pushb2phases(pushop, bundler):
773 def _pushb2phases(pushop, bundler):
774 """handle phase push through bundle2"""
774 """handle phase push through bundle2"""
775 if 'phases' in pushop.stepsdone:
775 if 'phases' in pushop.stepsdone:
776 return
776 return
777 b2caps = bundle2.bundle2caps(pushop.remote)
777 b2caps = bundle2.bundle2caps(pushop.remote)
778 if not 'pushkey' in b2caps:
778 if not 'pushkey' in b2caps:
779 return
779 return
780 pushop.stepsdone.add('phases')
780 pushop.stepsdone.add('phases')
781 part2node = []
781 part2node = []
782
782
783 def handlefailure(pushop, exc):
783 def handlefailure(pushop, exc):
784 targetid = int(exc.partid)
784 targetid = int(exc.partid)
785 for partid, node in part2node:
785 for partid, node in part2node:
786 if partid == targetid:
786 if partid == targetid:
787 raise error.Abort(_('updating %s to public failed') % node)
787 raise error.Abort(_('updating %s to public failed') % node)
788
788
789 enc = pushkey.encode
789 enc = pushkey.encode
790 for newremotehead in pushop.outdatedphases:
790 for newremotehead in pushop.outdatedphases:
791 part = bundler.newpart('pushkey')
791 part = bundler.newpart('pushkey')
792 part.addparam('namespace', enc('phases'))
792 part.addparam('namespace', enc('phases'))
793 part.addparam('key', enc(newremotehead.hex()))
793 part.addparam('key', enc(newremotehead.hex()))
794 part.addparam('old', enc('%d' % phases.draft))
794 part.addparam('old', enc('%d' % phases.draft))
795 part.addparam('new', enc('%d' % phases.public))
795 part.addparam('new', enc('%d' % phases.public))
796 part2node.append((part.id, newremotehead))
796 part2node.append((part.id, newremotehead))
797 pushop.pkfailcb[part.id] = handlefailure
797 pushop.pkfailcb[part.id] = handlefailure
798
798
799 def handlereply(op):
799 def handlereply(op):
800 for partid, node in part2node:
800 for partid, node in part2node:
801 partrep = op.records.getreplies(partid)
801 partrep = op.records.getreplies(partid)
802 results = partrep['pushkey']
802 results = partrep['pushkey']
803 assert len(results) <= 1
803 assert len(results) <= 1
804 msg = None
804 msg = None
805 if not results:
805 if not results:
806 msg = _('server ignored update of %s to public!\n') % node
806 msg = _('server ignored update of %s to public!\n') % node
807 elif not int(results[0]['return']):
807 elif not int(results[0]['return']):
808 msg = _('updating %s to public failed!\n') % node
808 msg = _('updating %s to public failed!\n') % node
809 if msg is not None:
809 if msg is not None:
810 pushop.ui.warn(msg)
810 pushop.ui.warn(msg)
811 return handlereply
811 return handlereply
812
812
813 @b2partsgenerator('obsmarkers')
813 @b2partsgenerator('obsmarkers')
814 def _pushb2obsmarkers(pushop, bundler):
814 def _pushb2obsmarkers(pushop, bundler):
815 if 'obsmarkers' in pushop.stepsdone:
815 if 'obsmarkers' in pushop.stepsdone:
816 return
816 return
817 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
817 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
818 if obsolete.commonversion(remoteversions) is None:
818 if obsolete.commonversion(remoteversions) is None:
819 return
819 return
820 pushop.stepsdone.add('obsmarkers')
820 pushop.stepsdone.add('obsmarkers')
821 if pushop.outobsmarkers:
821 if pushop.outobsmarkers:
822 markers = sorted(pushop.outobsmarkers)
822 markers = sorted(pushop.outobsmarkers)
823 bundle2.buildobsmarkerspart(bundler, markers)
823 bundle2.buildobsmarkerspart(bundler, markers)
824
824
825 @b2partsgenerator('bookmarks')
825 @b2partsgenerator('bookmarks')
826 def _pushb2bookmarks(pushop, bundler):
826 def _pushb2bookmarks(pushop, bundler):
827 """handle bookmark push through bundle2"""
827 """handle bookmark push through bundle2"""
828 if 'bookmarks' in pushop.stepsdone:
828 if 'bookmarks' in pushop.stepsdone:
829 return
829 return
830 b2caps = bundle2.bundle2caps(pushop.remote)
830 b2caps = bundle2.bundle2caps(pushop.remote)
831 if 'pushkey' not in b2caps:
831 if 'pushkey' not in b2caps:
832 return
832 return
833 pushop.stepsdone.add('bookmarks')
833 pushop.stepsdone.add('bookmarks')
834 part2book = []
834 part2book = []
835 enc = pushkey.encode
835 enc = pushkey.encode
836
836
837 def handlefailure(pushop, exc):
837 def handlefailure(pushop, exc):
838 targetid = int(exc.partid)
838 targetid = int(exc.partid)
839 for partid, book, action in part2book:
839 for partid, book, action in part2book:
840 if partid == targetid:
840 if partid == targetid:
841 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
841 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
842 # we should not be called for part we did not generated
842 # we should not be called for part we did not generated
843 assert False
843 assert False
844
844
845 for book, old, new in pushop.outbookmarks:
845 for book, old, new in pushop.outbookmarks:
846 part = bundler.newpart('pushkey')
846 part = bundler.newpart('pushkey')
847 part.addparam('namespace', enc('bookmarks'))
847 part.addparam('namespace', enc('bookmarks'))
848 part.addparam('key', enc(book))
848 part.addparam('key', enc(book))
849 part.addparam('old', enc(old))
849 part.addparam('old', enc(old))
850 part.addparam('new', enc(new))
850 part.addparam('new', enc(new))
851 action = 'update'
851 action = 'update'
852 if not old:
852 if not old:
853 action = 'export'
853 action = 'export'
854 elif not new:
854 elif not new:
855 action = 'delete'
855 action = 'delete'
856 part2book.append((part.id, book, action))
856 part2book.append((part.id, book, action))
857 pushop.pkfailcb[part.id] = handlefailure
857 pushop.pkfailcb[part.id] = handlefailure
858
858
859 def handlereply(op):
859 def handlereply(op):
860 ui = pushop.ui
860 ui = pushop.ui
861 for partid, book, action in part2book:
861 for partid, book, action in part2book:
862 partrep = op.records.getreplies(partid)
862 partrep = op.records.getreplies(partid)
863 results = partrep['pushkey']
863 results = partrep['pushkey']
864 assert len(results) <= 1
864 assert len(results) <= 1
865 if not results:
865 if not results:
866 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
866 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
867 else:
867 else:
868 ret = int(results[0]['return'])
868 ret = int(results[0]['return'])
869 if ret:
869 if ret:
870 ui.status(bookmsgmap[action][0] % book)
870 ui.status(bookmsgmap[action][0] % book)
871 else:
871 else:
872 ui.warn(bookmsgmap[action][1] % book)
872 ui.warn(bookmsgmap[action][1] % book)
873 if pushop.bkresult is not None:
873 if pushop.bkresult is not None:
874 pushop.bkresult = 1
874 pushop.bkresult = 1
875 return handlereply
875 return handlereply
876
876
877 @b2partsgenerator('pushvars', idx=0)
877 @b2partsgenerator('pushvars', idx=0)
878 def _getbundlesendvars(pushop, bundler):
878 def _getbundlesendvars(pushop, bundler):
879 '''send shellvars via bundle2'''
879 '''send shellvars via bundle2'''
880 pushvars = pushop.pushvars
880 pushvars = pushop.pushvars
881 if pushvars:
881 if pushvars:
882 shellvars = {}
882 shellvars = {}
883 for raw in pushvars:
883 for raw in pushvars:
884 if '=' not in raw:
884 if '=' not in raw:
885 msg = ("unable to parse variable '%s', should follow "
885 msg = ("unable to parse variable '%s', should follow "
886 "'KEY=VALUE' or 'KEY=' format")
886 "'KEY=VALUE' or 'KEY=' format")
887 raise error.Abort(msg % raw)
887 raise error.Abort(msg % raw)
888 k, v = raw.split('=', 1)
888 k, v = raw.split('=', 1)
889 shellvars[k] = v
889 shellvars[k] = v
890
890
891 part = bundler.newpart('pushvars')
891 part = bundler.newpart('pushvars')
892
892
893 for key, value in shellvars.iteritems():
893 for key, value in shellvars.iteritems():
894 part.addparam(key, value, mandatory=False)
894 part.addparam(key, value, mandatory=False)
895
895
896 def _pushbundle2(pushop):
896 def _pushbundle2(pushop):
897 """push data to the remote using bundle2
897 """push data to the remote using bundle2
898
898
899 The only currently supported type of data is changegroup but this will
899 The only currently supported type of data is changegroup but this will
900 evolve in the future."""
900 evolve in the future."""
901 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
901 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
902 pushback = (pushop.trmanager
902 pushback = (pushop.trmanager
903 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
903 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
904
904
905 # create reply capability
905 # create reply capability
906 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
906 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
907 allowpushback=pushback))
907 allowpushback=pushback))
908 bundler.newpart('replycaps', data=capsblob)
908 bundler.newpart('replycaps', data=capsblob)
909 replyhandlers = []
909 replyhandlers = []
910 for partgenname in b2partsgenorder:
910 for partgenname in b2partsgenorder:
911 partgen = b2partsgenmapping[partgenname]
911 partgen = b2partsgenmapping[partgenname]
912 ret = partgen(pushop, bundler)
912 ret = partgen(pushop, bundler)
913 if callable(ret):
913 if callable(ret):
914 replyhandlers.append(ret)
914 replyhandlers.append(ret)
915 # do not push if nothing to push
915 # do not push if nothing to push
916 if bundler.nbparts <= 1:
916 if bundler.nbparts <= 1:
917 return
917 return
918 stream = util.chunkbuffer(bundler.getchunks())
918 stream = util.chunkbuffer(bundler.getchunks())
919 try:
919 try:
920 try:
920 try:
921 reply = pushop.remote.unbundle(
921 reply = pushop.remote.unbundle(
922 stream, ['force'], pushop.remote.url())
922 stream, ['force'], pushop.remote.url())
923 except error.BundleValueError as exc:
923 except error.BundleValueError as exc:
924 raise error.Abort(_('missing support for %s') % exc)
924 raise error.Abort(_('missing support for %s') % exc)
925 try:
925 try:
926 trgetter = None
926 trgetter = None
927 if pushback:
927 if pushback:
928 trgetter = pushop.trmanager.transaction
928 trgetter = pushop.trmanager.transaction
929 op = bundle2.processbundle(pushop.repo, reply, trgetter)
929 op = bundle2.processbundle(pushop.repo, reply, trgetter)
930 except error.BundleValueError as exc:
930 except error.BundleValueError as exc:
931 raise error.Abort(_('missing support for %s') % exc)
931 raise error.Abort(_('missing support for %s') % exc)
932 except bundle2.AbortFromPart as exc:
932 except bundle2.AbortFromPart as exc:
933 pushop.ui.status(_('remote: %s\n') % exc)
933 pushop.ui.status(_('remote: %s\n') % exc)
934 if exc.hint is not None:
934 if exc.hint is not None:
935 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
935 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
936 raise error.Abort(_('push failed on remote'))
936 raise error.Abort(_('push failed on remote'))
937 except error.PushkeyFailed as exc:
937 except error.PushkeyFailed as exc:
938 partid = int(exc.partid)
938 partid = int(exc.partid)
939 if partid not in pushop.pkfailcb:
939 if partid not in pushop.pkfailcb:
940 raise
940 raise
941 pushop.pkfailcb[partid](pushop, exc)
941 pushop.pkfailcb[partid](pushop, exc)
942 for rephand in replyhandlers:
942 for rephand in replyhandlers:
943 rephand(op)
943 rephand(op)
944
944
945 def _pushchangeset(pushop):
945 def _pushchangeset(pushop):
946 """Make the actual push of changeset bundle to remote repo"""
946 """Make the actual push of changeset bundle to remote repo"""
947 if 'changesets' in pushop.stepsdone:
947 if 'changesets' in pushop.stepsdone:
948 return
948 return
949 pushop.stepsdone.add('changesets')
949 pushop.stepsdone.add('changesets')
950 if not _pushcheckoutgoing(pushop):
950 if not _pushcheckoutgoing(pushop):
951 return
951 return
952
952
953 # Should have verified this in push().
953 # Should have verified this in push().
954 assert pushop.remote.capable('unbundle')
954 assert pushop.remote.capable('unbundle')
955
955
956 pushop.repo.prepushoutgoinghooks(pushop)
956 pushop.repo.prepushoutgoinghooks(pushop)
957 outgoing = pushop.outgoing
957 outgoing = pushop.outgoing
958 # TODO: get bundlecaps from remote
958 # TODO: get bundlecaps from remote
959 bundlecaps = None
959 bundlecaps = None
960 # create a changegroup from local
960 # create a changegroup from local
961 if pushop.revs is None and not (outgoing.excluded
961 if pushop.revs is None and not (outgoing.excluded
962 or pushop.repo.changelog.filteredrevs):
962 or pushop.repo.changelog.filteredrevs):
963 # push everything,
963 # push everything,
964 # use the fast path, no race possible on push
964 # use the fast path, no race possible on push
965 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
965 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
966 fastpath=True, bundlecaps=bundlecaps)
966 fastpath=True, bundlecaps=bundlecaps)
967 else:
967 else:
968 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
968 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
969 'push', bundlecaps=bundlecaps)
969 'push', bundlecaps=bundlecaps)
970
970
971 # apply changegroup to remote
971 # apply changegroup to remote
972 # local repo finds heads on server, finds out what
972 # local repo finds heads on server, finds out what
973 # revs it must push. once revs transferred, if server
973 # revs it must push. once revs transferred, if server
974 # finds it has different heads (someone else won
974 # finds it has different heads (someone else won
975 # commit/push race), server aborts.
975 # commit/push race), server aborts.
976 if pushop.force:
976 if pushop.force:
977 remoteheads = ['force']
977 remoteheads = ['force']
978 else:
978 else:
979 remoteheads = pushop.remoteheads
979 remoteheads = pushop.remoteheads
980 # ssh: return remote's addchangegroup()
980 # ssh: return remote's addchangegroup()
981 # http: return remote's addchangegroup() or 0 for error
981 # http: return remote's addchangegroup() or 0 for error
982 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
982 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
983 pushop.repo.url())
983 pushop.repo.url())
984
984
985 def _pushsyncphase(pushop):
985 def _pushsyncphase(pushop):
986 """synchronise phase information locally and remotely"""
986 """synchronise phase information locally and remotely"""
987 cheads = pushop.commonheads
987 cheads = pushop.commonheads
988 # even when we don't push, exchanging phase data is useful
988 # even when we don't push, exchanging phase data is useful
989 remotephases = pushop.remote.listkeys('phases')
989 remotephases = pushop.remote.listkeys('phases')
990 if (pushop.ui.configbool('ui', '_usedassubrepo')
990 if (pushop.ui.configbool('ui', '_usedassubrepo')
991 and remotephases # server supports phases
991 and remotephases # server supports phases
992 and pushop.cgresult is None # nothing was pushed
992 and pushop.cgresult is None # nothing was pushed
993 and remotephases.get('publishing', False)):
993 and remotephases.get('publishing', False)):
994 # When:
994 # When:
995 # - this is a subrepo push
995 # - this is a subrepo push
996 # - and remote support phase
996 # - and remote support phase
997 # - and no changeset was pushed
997 # - and no changeset was pushed
998 # - and remote is publishing
998 # - and remote is publishing
999 # We may be in issue 3871 case!
999 # We may be in issue 3871 case!
1000 # We drop the possible phase synchronisation done by
1000 # We drop the possible phase synchronisation done by
1001 # courtesy to publish changesets possibly locally draft
1001 # courtesy to publish changesets possibly locally draft
1002 # on the remote.
1002 # on the remote.
1003 remotephases = {'publishing': 'True'}
1003 remotephases = {'publishing': 'True'}
1004 if not remotephases: # old server or public only reply from non-publishing
1004 if not remotephases: # old server or public only reply from non-publishing
1005 _localphasemove(pushop, cheads)
1005 _localphasemove(pushop, cheads)
1006 # don't push any phase data as there is nothing to push
1006 # don't push any phase data as there is nothing to push
1007 else:
1007 else:
1008 ana = phases.analyzeremotephases(pushop.repo, cheads,
1008 ana = phases.analyzeremotephases(pushop.repo, cheads,
1009 remotephases)
1009 remotephases)
1010 pheads, droots = ana
1010 pheads, droots = ana
1011 ### Apply remote phase on local
1011 ### Apply remote phase on local
1012 if remotephases.get('publishing', False):
1012 if remotephases.get('publishing', False):
1013 _localphasemove(pushop, cheads)
1013 _localphasemove(pushop, cheads)
1014 else: # publish = False
1014 else: # publish = False
1015 _localphasemove(pushop, pheads)
1015 _localphasemove(pushop, pheads)
1016 _localphasemove(pushop, cheads, phases.draft)
1016 _localphasemove(pushop, cheads, phases.draft)
1017 ### Apply local phase on remote
1017 ### Apply local phase on remote
1018
1018
1019 if pushop.cgresult:
1019 if pushop.cgresult:
1020 if 'phases' in pushop.stepsdone:
1020 if 'phases' in pushop.stepsdone:
1021 # phases already pushed though bundle2
1021 # phases already pushed though bundle2
1022 return
1022 return
1023 outdated = pushop.outdatedphases
1023 outdated = pushop.outdatedphases
1024 else:
1024 else:
1025 outdated = pushop.fallbackoutdatedphases
1025 outdated = pushop.fallbackoutdatedphases
1026
1026
1027 pushop.stepsdone.add('phases')
1027 pushop.stepsdone.add('phases')
1028
1028
1029 # filter heads already turned public by the push
1029 # filter heads already turned public by the push
1030 outdated = [c for c in outdated if c.node() not in pheads]
1030 outdated = [c for c in outdated if c.node() not in pheads]
1031 # fallback to independent pushkey command
1031 # fallback to independent pushkey command
1032 for newremotehead in outdated:
1032 for newremotehead in outdated:
1033 r = pushop.remote.pushkey('phases',
1033 r = pushop.remote.pushkey('phases',
1034 newremotehead.hex(),
1034 newremotehead.hex(),
1035 str(phases.draft),
1035 str(phases.draft),
1036 str(phases.public))
1036 str(phases.public))
1037 if not r:
1037 if not r:
1038 pushop.ui.warn(_('updating %s to public failed!\n')
1038 pushop.ui.warn(_('updating %s to public failed!\n')
1039 % newremotehead)
1039 % newremotehead)
1040
1040
1041 def _localphasemove(pushop, nodes, phase=phases.public):
1041 def _localphasemove(pushop, nodes, phase=phases.public):
1042 """move <nodes> to <phase> in the local source repo"""
1042 """move <nodes> to <phase> in the local source repo"""
1043 if pushop.trmanager:
1043 if pushop.trmanager:
1044 phases.advanceboundary(pushop.repo,
1044 phases.advanceboundary(pushop.repo,
1045 pushop.trmanager.transaction(),
1045 pushop.trmanager.transaction(),
1046 phase,
1046 phase,
1047 nodes)
1047 nodes)
1048 else:
1048 else:
1049 # repo is not locked, do not change any phases!
1049 # repo is not locked, do not change any phases!
1050 # Informs the user that phases should have been moved when
1050 # Informs the user that phases should have been moved when
1051 # applicable.
1051 # applicable.
1052 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1052 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1053 phasestr = phases.phasenames[phase]
1053 phasestr = phases.phasenames[phase]
1054 if actualmoves:
1054 if actualmoves:
1055 pushop.ui.status(_('cannot lock source repo, skipping '
1055 pushop.ui.status(_('cannot lock source repo, skipping '
1056 'local %s phase update\n') % phasestr)
1056 'local %s phase update\n') % phasestr)
1057
1057
1058 def _pushobsolete(pushop):
1058 def _pushobsolete(pushop):
1059 """utility function to push obsolete markers to a remote"""
1059 """utility function to push obsolete markers to a remote"""
1060 if 'obsmarkers' in pushop.stepsdone:
1060 if 'obsmarkers' in pushop.stepsdone:
1061 return
1061 return
1062 repo = pushop.repo
1062 repo = pushop.repo
1063 remote = pushop.remote
1063 remote = pushop.remote
1064 pushop.stepsdone.add('obsmarkers')
1064 pushop.stepsdone.add('obsmarkers')
1065 if pushop.outobsmarkers:
1065 if pushop.outobsmarkers:
1066 pushop.ui.debug('try to push obsolete markers to remote\n')
1066 pushop.ui.debug('try to push obsolete markers to remote\n')
1067 rslts = []
1067 rslts = []
1068 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1068 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1069 for key in sorted(remotedata, reverse=True):
1069 for key in sorted(remotedata, reverse=True):
1070 # reverse sort to ensure we end with dump0
1070 # reverse sort to ensure we end with dump0
1071 data = remotedata[key]
1071 data = remotedata[key]
1072 rslts.append(remote.pushkey('obsolete', key, '', data))
1072 rslts.append(remote.pushkey('obsolete', key, '', data))
1073 if [r for r in rslts if not r]:
1073 if [r for r in rslts if not r]:
1074 msg = _('failed to push some obsolete markers!\n')
1074 msg = _('failed to push some obsolete markers!\n')
1075 repo.ui.warn(msg)
1075 repo.ui.warn(msg)
1076
1076
1077 def _pushbookmark(pushop):
1077 def _pushbookmark(pushop):
1078 """Update bookmark position on remote"""
1078 """Update bookmark position on remote"""
1079 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1079 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1080 return
1080 return
1081 pushop.stepsdone.add('bookmarks')
1081 pushop.stepsdone.add('bookmarks')
1082 ui = pushop.ui
1082 ui = pushop.ui
1083 remote = pushop.remote
1083 remote = pushop.remote
1084
1084
1085 for b, old, new in pushop.outbookmarks:
1085 for b, old, new in pushop.outbookmarks:
1086 action = 'update'
1086 action = 'update'
1087 if not old:
1087 if not old:
1088 action = 'export'
1088 action = 'export'
1089 elif not new:
1089 elif not new:
1090 action = 'delete'
1090 action = 'delete'
1091 if remote.pushkey('bookmarks', b, old, new):
1091 if remote.pushkey('bookmarks', b, old, new):
1092 ui.status(bookmsgmap[action][0] % b)
1092 ui.status(bookmsgmap[action][0] % b)
1093 else:
1093 else:
1094 ui.warn(bookmsgmap[action][1] % b)
1094 ui.warn(bookmsgmap[action][1] % b)
1095 # discovery can have set the value form invalid entry
1095 # discovery can have set the value form invalid entry
1096 if pushop.bkresult is not None:
1096 if pushop.bkresult is not None:
1097 pushop.bkresult = 1
1097 pushop.bkresult = 1
1098
1098
1099 class pulloperation(object):
1099 class pulloperation(object):
1100 """A object that represent a single pull operation
1100 """A object that represent a single pull operation
1101
1101
1102 It purpose is to carry pull related state and very common operation.
1102 It purpose is to carry pull related state and very common operation.
1103
1103
1104 A new should be created at the beginning of each pull and discarded
1104 A new should be created at the beginning of each pull and discarded
1105 afterward.
1105 afterward.
1106 """
1106 """
1107
1107
1108 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1108 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1109 remotebookmarks=None, streamclonerequested=None):
1109 remotebookmarks=None, streamclonerequested=None):
1110 # repo we pull into
1110 # repo we pull into
1111 self.repo = repo
1111 self.repo = repo
1112 # repo we pull from
1112 # repo we pull from
1113 self.remote = remote
1113 self.remote = remote
1114 # revision we try to pull (None is "all")
1114 # revision we try to pull (None is "all")
1115 self.heads = heads
1115 self.heads = heads
1116 # bookmark pulled explicitly
1116 # bookmark pulled explicitly
1117 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1117 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1118 for bookmark in bookmarks]
1118 for bookmark in bookmarks]
1119 # do we force pull?
1119 # do we force pull?
1120 self.force = force
1120 self.force = force
1121 # whether a streaming clone was requested
1121 # whether a streaming clone was requested
1122 self.streamclonerequested = streamclonerequested
1122 self.streamclonerequested = streamclonerequested
1123 # transaction manager
1123 # transaction manager
1124 self.trmanager = None
1124 self.trmanager = None
1125 # set of common changeset between local and remote before pull
1125 # set of common changeset between local and remote before pull
1126 self.common = None
1126 self.common = None
1127 # set of pulled head
1127 # set of pulled head
1128 self.rheads = None
1128 self.rheads = None
1129 # list of missing changeset to fetch remotely
1129 # list of missing changeset to fetch remotely
1130 self.fetch = None
1130 self.fetch = None
1131 # remote bookmarks data
1131 # remote bookmarks data
1132 self.remotebookmarks = remotebookmarks
1132 self.remotebookmarks = remotebookmarks
1133 # result of changegroup pulling (used as return code by pull)
1133 # result of changegroup pulling (used as return code by pull)
1134 self.cgresult = None
1134 self.cgresult = None
1135 # list of step already done
1135 # list of step already done
1136 self.stepsdone = set()
1136 self.stepsdone = set()
1137 # Whether we attempted a clone from pre-generated bundles.
1137 # Whether we attempted a clone from pre-generated bundles.
1138 self.clonebundleattempted = False
1138 self.clonebundleattempted = False
1139
1139
1140 @util.propertycache
1140 @util.propertycache
1141 def pulledsubset(self):
1141 def pulledsubset(self):
1142 """heads of the set of changeset target by the pull"""
1142 """heads of the set of changeset target by the pull"""
1143 # compute target subset
1143 # compute target subset
1144 if self.heads is None:
1144 if self.heads is None:
1145 # We pulled every thing possible
1145 # We pulled every thing possible
1146 # sync on everything common
1146 # sync on everything common
1147 c = set(self.common)
1147 c = set(self.common)
1148 ret = list(self.common)
1148 ret = list(self.common)
1149 for n in self.rheads:
1149 for n in self.rheads:
1150 if n not in c:
1150 if n not in c:
1151 ret.append(n)
1151 ret.append(n)
1152 return ret
1152 return ret
1153 else:
1153 else:
1154 # We pulled a specific subset
1154 # We pulled a specific subset
1155 # sync on this subset
1155 # sync on this subset
1156 return self.heads
1156 return self.heads
1157
1157
1158 @util.propertycache
1158 @util.propertycache
1159 def canusebundle2(self):
1159 def canusebundle2(self):
1160 return not _forcebundle1(self)
1160 return not _forcebundle1(self)
1161
1161
1162 @util.propertycache
1162 @util.propertycache
1163 def remotebundle2caps(self):
1163 def remotebundle2caps(self):
1164 return bundle2.bundle2caps(self.remote)
1164 return bundle2.bundle2caps(self.remote)
1165
1165
1166 def gettransaction(self):
1166 def gettransaction(self):
1167 # deprecated; talk to trmanager directly
1167 # deprecated; talk to trmanager directly
1168 return self.trmanager.transaction()
1168 return self.trmanager.transaction()
1169
1169
1170 class transactionmanager(util.transactional):
1170 class transactionmanager(util.transactional):
1171 """An object to manage the life cycle of a transaction
1171 """An object to manage the life cycle of a transaction
1172
1172
1173 It creates the transaction on demand and calls the appropriate hooks when
1173 It creates the transaction on demand and calls the appropriate hooks when
1174 closing the transaction."""
1174 closing the transaction."""
1175 def __init__(self, repo, source, url):
1175 def __init__(self, repo, source, url):
1176 self.repo = repo
1176 self.repo = repo
1177 self.source = source
1177 self.source = source
1178 self.url = url
1178 self.url = url
1179 self._tr = None
1179 self._tr = None
1180
1180
1181 def transaction(self):
1181 def transaction(self):
1182 """Return an open transaction object, constructing if necessary"""
1182 """Return an open transaction object, constructing if necessary"""
1183 if not self._tr:
1183 if not self._tr:
1184 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1184 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1185 self._tr = self.repo.transaction(trname)
1185 self._tr = self.repo.transaction(trname)
1186 self._tr.hookargs['source'] = self.source
1186 self._tr.hookargs['source'] = self.source
1187 self._tr.hookargs['url'] = self.url
1187 self._tr.hookargs['url'] = self.url
1188 return self._tr
1188 return self._tr
1189
1189
1190 def close(self):
1190 def close(self):
1191 """close transaction if created"""
1191 """close transaction if created"""
1192 if self._tr is not None:
1192 if self._tr is not None:
1193 self._tr.close()
1193 self._tr.close()
1194
1194
1195 def release(self):
1195 def release(self):
1196 """release transaction if created"""
1196 """release transaction if created"""
1197 if self._tr is not None:
1197 if self._tr is not None:
1198 self._tr.release()
1198 self._tr.release()
1199
1199
1200 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1200 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1201 streamclonerequested=None):
1201 streamclonerequested=None):
1202 """Fetch repository data from a remote.
1202 """Fetch repository data from a remote.
1203
1203
1204 This is the main function used to retrieve data from a remote repository.
1204 This is the main function used to retrieve data from a remote repository.
1205
1205
1206 ``repo`` is the local repository to clone into.
1206 ``repo`` is the local repository to clone into.
1207 ``remote`` is a peer instance.
1207 ``remote`` is a peer instance.
1208 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1208 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1209 default) means to pull everything from the remote.
1209 default) means to pull everything from the remote.
1210 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1210 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1211 default, all remote bookmarks are pulled.
1211 default, all remote bookmarks are pulled.
1212 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1212 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1213 initialization.
1213 initialization.
1214 ``streamclonerequested`` is a boolean indicating whether a "streaming
1214 ``streamclonerequested`` is a boolean indicating whether a "streaming
1215 clone" is requested. A "streaming clone" is essentially a raw file copy
1215 clone" is requested. A "streaming clone" is essentially a raw file copy
1216 of revlogs from the server. This only works when the local repository is
1216 of revlogs from the server. This only works when the local repository is
1217 empty. The default value of ``None`` means to respect the server
1217 empty. The default value of ``None`` means to respect the server
1218 configuration for preferring stream clones.
1218 configuration for preferring stream clones.
1219
1219
1220 Returns the ``pulloperation`` created for this pull.
1220 Returns the ``pulloperation`` created for this pull.
1221 """
1221 """
1222 if opargs is None:
1222 if opargs is None:
1223 opargs = {}
1223 opargs = {}
1224 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1224 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1225 streamclonerequested=streamclonerequested, **opargs)
1225 streamclonerequested=streamclonerequested, **opargs)
1226
1226
1227 peerlocal = pullop.remote.local()
1227 peerlocal = pullop.remote.local()
1228 if peerlocal:
1228 if peerlocal:
1229 missing = set(peerlocal.requirements) - pullop.repo.supported
1229 missing = set(peerlocal.requirements) - pullop.repo.supported
1230 if missing:
1230 if missing:
1231 msg = _("required features are not"
1231 msg = _("required features are not"
1232 " supported in the destination:"
1232 " supported in the destination:"
1233 " %s") % (', '.join(sorted(missing)))
1233 " %s") % (', '.join(sorted(missing)))
1234 raise error.Abort(msg)
1234 raise error.Abort(msg)
1235
1235
1236 wlock = lock = None
1236 wlock = lock = None
1237 try:
1237 try:
1238 wlock = pullop.repo.wlock()
1238 wlock = pullop.repo.wlock()
1239 lock = pullop.repo.lock()
1239 lock = pullop.repo.lock()
1240 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1240 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1241 streamclone.maybeperformlegacystreamclone(pullop)
1241 streamclone.maybeperformlegacystreamclone(pullop)
1242 # This should ideally be in _pullbundle2(). However, it needs to run
1242 # This should ideally be in _pullbundle2(). However, it needs to run
1243 # before discovery to avoid extra work.
1243 # before discovery to avoid extra work.
1244 _maybeapplyclonebundle(pullop)
1244 _maybeapplyclonebundle(pullop)
1245 _pulldiscovery(pullop)
1245 _pulldiscovery(pullop)
1246 if pullop.canusebundle2:
1246 if pullop.canusebundle2:
1247 _pullbundle2(pullop)
1247 _pullbundle2(pullop)
1248 _pullchangeset(pullop)
1248 _pullchangeset(pullop)
1249 _pullphase(pullop)
1249 _pullphase(pullop)
1250 _pullbookmarks(pullop)
1250 _pullbookmarks(pullop)
1251 _pullobsolete(pullop)
1251 _pullobsolete(pullop)
1252 pullop.trmanager.close()
1252 pullop.trmanager.close()
1253 finally:
1253 finally:
1254 lockmod.release(pullop.trmanager, lock, wlock)
1254 lockmod.release(pullop.trmanager, lock, wlock)
1255
1255
1256 return pullop
1256 return pullop
1257
1257
1258 # list of steps to perform discovery before pull
1258 # list of steps to perform discovery before pull
1259 pulldiscoveryorder = []
1259 pulldiscoveryorder = []
1260
1260
1261 # Mapping between step name and function
1261 # Mapping between step name and function
1262 #
1262 #
1263 # This exists to help extensions wrap steps if necessary
1263 # This exists to help extensions wrap steps if necessary
1264 pulldiscoverymapping = {}
1264 pulldiscoverymapping = {}
1265
1265
1266 def pulldiscovery(stepname):
1266 def pulldiscovery(stepname):
1267 """decorator for function performing discovery before pull
1267 """decorator for function performing discovery before pull
1268
1268
1269 The function is added to the step -> function mapping and appended to the
1269 The function is added to the step -> function mapping and appended to the
1270 list of steps. Beware that decorated function will be added in order (this
1270 list of steps. Beware that decorated function will be added in order (this
1271 may matter).
1271 may matter).
1272
1272
1273 You can only use this decorator for a new step, if you want to wrap a step
1273 You can only use this decorator for a new step, if you want to wrap a step
1274 from an extension, change the pulldiscovery dictionary directly."""
1274 from an extension, change the pulldiscovery dictionary directly."""
1275 def dec(func):
1275 def dec(func):
1276 assert stepname not in pulldiscoverymapping
1276 assert stepname not in pulldiscoverymapping
1277 pulldiscoverymapping[stepname] = func
1277 pulldiscoverymapping[stepname] = func
1278 pulldiscoveryorder.append(stepname)
1278 pulldiscoveryorder.append(stepname)
1279 return func
1279 return func
1280 return dec
1280 return dec
1281
1281
1282 def _pulldiscovery(pullop):
1282 def _pulldiscovery(pullop):
1283 """Run all discovery steps"""
1283 """Run all discovery steps"""
1284 for stepname in pulldiscoveryorder:
1284 for stepname in pulldiscoveryorder:
1285 step = pulldiscoverymapping[stepname]
1285 step = pulldiscoverymapping[stepname]
1286 step(pullop)
1286 step(pullop)
1287
1287
1288 @pulldiscovery('b1:bookmarks')
1288 @pulldiscovery('b1:bookmarks')
1289 def _pullbookmarkbundle1(pullop):
1289 def _pullbookmarkbundle1(pullop):
1290 """fetch bookmark data in bundle1 case
1290 """fetch bookmark data in bundle1 case
1291
1291
1292 If not using bundle2, we have to fetch bookmarks before changeset
1292 If not using bundle2, we have to fetch bookmarks before changeset
1293 discovery to reduce the chance and impact of race conditions."""
1293 discovery to reduce the chance and impact of race conditions."""
1294 if pullop.remotebookmarks is not None:
1294 if pullop.remotebookmarks is not None:
1295 return
1295 return
1296 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1296 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1297 # all known bundle2 servers now support listkeys, but lets be nice with
1297 # all known bundle2 servers now support listkeys, but lets be nice with
1298 # new implementation.
1298 # new implementation.
1299 return
1299 return
1300 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1300 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1301
1301
1302
1302
1303 @pulldiscovery('changegroup')
1303 @pulldiscovery('changegroup')
1304 def _pulldiscoverychangegroup(pullop):
1304 def _pulldiscoverychangegroup(pullop):
1305 """discovery phase for the pull
1305 """discovery phase for the pull
1306
1306
1307 Current handle changeset discovery only, will change handle all discovery
1307 Current handle changeset discovery only, will change handle all discovery
1308 at some point."""
1308 at some point."""
1309 tmp = discovery.findcommonincoming(pullop.repo,
1309 tmp = discovery.findcommonincoming(pullop.repo,
1310 pullop.remote,
1310 pullop.remote,
1311 heads=pullop.heads,
1311 heads=pullop.heads,
1312 force=pullop.force)
1312 force=pullop.force)
1313 common, fetch, rheads = tmp
1313 common, fetch, rheads = tmp
1314 nm = pullop.repo.unfiltered().changelog.nodemap
1314 nm = pullop.repo.unfiltered().changelog.nodemap
1315 if fetch and rheads:
1315 if fetch and rheads:
1316 # If a remote heads in filtered locally, lets drop it from the unknown
1316 # If a remote heads in filtered locally, lets drop it from the unknown
1317 # remote heads and put in back in common.
1317 # remote heads and put in back in common.
1318 #
1318 #
1319 # This is a hackish solution to catch most of "common but locally
1319 # This is a hackish solution to catch most of "common but locally
1320 # hidden situation". We do not performs discovery on unfiltered
1320 # hidden situation". We do not performs discovery on unfiltered
1321 # repository because it end up doing a pathological amount of round
1321 # repository because it end up doing a pathological amount of round
1322 # trip for w huge amount of changeset we do not care about.
1322 # trip for w huge amount of changeset we do not care about.
1323 #
1323 #
1324 # If a set of such "common but filtered" changeset exist on the server
1324 # If a set of such "common but filtered" changeset exist on the server
1325 # but are not including a remote heads, we'll not be able to detect it,
1325 # but are not including a remote heads, we'll not be able to detect it,
1326 scommon = set(common)
1326 scommon = set(common)
1327 filteredrheads = []
1327 filteredrheads = []
1328 for n in rheads:
1328 for n in rheads:
1329 if n in nm:
1329 if n in nm:
1330 if n not in scommon:
1330 if n not in scommon:
1331 common.append(n)
1331 common.append(n)
1332 else:
1332 else:
1333 filteredrheads.append(n)
1333 filteredrheads.append(n)
1334 if not filteredrheads:
1334 if not filteredrheads:
1335 fetch = []
1335 fetch = []
1336 rheads = filteredrheads
1336 rheads = filteredrheads
1337 pullop.common = common
1337 pullop.common = common
1338 pullop.fetch = fetch
1338 pullop.fetch = fetch
1339 pullop.rheads = rheads
1339 pullop.rheads = rheads
1340
1340
1341 def _pullbundle2(pullop):
1341 def _pullbundle2(pullop):
1342 """pull data using bundle2
1342 """pull data using bundle2
1343
1343
1344 For now, the only supported data are changegroup."""
1344 For now, the only supported data are changegroup."""
1345 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1345 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1346
1346
1347 # At the moment we don't do stream clones over bundle2. If that is
1347 # At the moment we don't do stream clones over bundle2. If that is
1348 # implemented then here's where the check for that will go.
1348 # implemented then here's where the check for that will go.
1349 streaming = False
1349 streaming = False
1350
1350
1351 # pulling changegroup
1351 # pulling changegroup
1352 pullop.stepsdone.add('changegroup')
1352 pullop.stepsdone.add('changegroup')
1353
1353
1354 kwargs['common'] = pullop.common
1354 kwargs['common'] = pullop.common
1355 kwargs['heads'] = pullop.heads or pullop.rheads
1355 kwargs['heads'] = pullop.heads or pullop.rheads
1356 kwargs['cg'] = pullop.fetch
1356 kwargs['cg'] = pullop.fetch
1357 if 'listkeys' in pullop.remotebundle2caps:
1357 if 'listkeys' in pullop.remotebundle2caps:
1358 kwargs['listkeys'] = ['phases']
1358 kwargs['listkeys'] = ['phases']
1359 if pullop.remotebookmarks is None:
1359 if pullop.remotebookmarks is None:
1360 # make sure to always includes bookmark data when migrating
1360 # make sure to always includes bookmark data when migrating
1361 # `hg incoming --bundle` to using this function.
1361 # `hg incoming --bundle` to using this function.
1362 kwargs['listkeys'].append('bookmarks')
1362 kwargs['listkeys'].append('bookmarks')
1363
1363
1364 # If this is a full pull / clone and the server supports the clone bundles
1364 # If this is a full pull / clone and the server supports the clone bundles
1365 # feature, tell the server whether we attempted a clone bundle. The
1365 # feature, tell the server whether we attempted a clone bundle. The
1366 # presence of this flag indicates the client supports clone bundles. This
1366 # presence of this flag indicates the client supports clone bundles. This
1367 # will enable the server to treat clients that support clone bundles
1367 # will enable the server to treat clients that support clone bundles
1368 # differently from those that don't.
1368 # differently from those that don't.
1369 if (pullop.remote.capable('clonebundles')
1369 if (pullop.remote.capable('clonebundles')
1370 and pullop.heads is None and list(pullop.common) == [nullid]):
1370 and pullop.heads is None and list(pullop.common) == [nullid]):
1371 kwargs['cbattempted'] = pullop.clonebundleattempted
1371 kwargs['cbattempted'] = pullop.clonebundleattempted
1372
1372
1373 if streaming:
1373 if streaming:
1374 pullop.repo.ui.status(_('streaming all changes\n'))
1374 pullop.repo.ui.status(_('streaming all changes\n'))
1375 elif not pullop.fetch:
1375 elif not pullop.fetch:
1376 pullop.repo.ui.status(_("no changes found\n"))
1376 pullop.repo.ui.status(_("no changes found\n"))
1377 pullop.cgresult = 0
1377 pullop.cgresult = 0
1378 else:
1378 else:
1379 if pullop.heads is None and list(pullop.common) == [nullid]:
1379 if pullop.heads is None and list(pullop.common) == [nullid]:
1380 pullop.repo.ui.status(_("requesting all changes\n"))
1380 pullop.repo.ui.status(_("requesting all changes\n"))
1381 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1381 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1382 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1382 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1383 if obsolete.commonversion(remoteversions) is not None:
1383 if obsolete.commonversion(remoteversions) is not None:
1384 kwargs['obsmarkers'] = True
1384 kwargs['obsmarkers'] = True
1385 pullop.stepsdone.add('obsmarkers')
1385 pullop.stepsdone.add('obsmarkers')
1386 _pullbundle2extraprepare(pullop, kwargs)
1386 _pullbundle2extraprepare(pullop, kwargs)
1387 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1387 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1388 try:
1388 try:
1389 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1389 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1390 except bundle2.AbortFromPart as exc:
1390 except bundle2.AbortFromPart as exc:
1391 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1391 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1392 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1392 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1393 except error.BundleValueError as exc:
1393 except error.BundleValueError as exc:
1394 raise error.Abort(_('missing support for %s') % exc)
1394 raise error.Abort(_('missing support for %s') % exc)
1395
1395
1396 if pullop.fetch:
1396 if pullop.fetch:
1397 pullop.cgresult = bundle2.combinechangegroupresults(op)
1397 pullop.cgresult = bundle2.combinechangegroupresults(op)
1398
1398
1399 # If the bundle had a phase-heads part, then phase exchange is already done
1399 # If the bundle had a phase-heads part, then phase exchange is already done
1400 if op.records['phase-heads']:
1400 if op.records['phase-heads']:
1401 pullop.stepsdone.add('phases')
1401 pullop.stepsdone.add('phases')
1402
1402
1403 # processing phases change
1403 # processing phases change
1404 for namespace, value in op.records['listkeys']:
1404 for namespace, value in op.records['listkeys']:
1405 if namespace == 'phases':
1405 if namespace == 'phases':
1406 _pullapplyphases(pullop, value)
1406 _pullapplyphases(pullop, value)
1407
1407
1408 # processing bookmark update
1408 # processing bookmark update
1409 for namespace, value in op.records['listkeys']:
1409 for namespace, value in op.records['listkeys']:
1410 if namespace == 'bookmarks':
1410 if namespace == 'bookmarks':
1411 pullop.remotebookmarks = value
1411 pullop.remotebookmarks = value
1412
1412
1413 # bookmark data were either already there or pulled in the bundle
1413 # bookmark data were either already there or pulled in the bundle
1414 if pullop.remotebookmarks is not None:
1414 if pullop.remotebookmarks is not None:
1415 _pullbookmarks(pullop)
1415 _pullbookmarks(pullop)
1416
1416
1417 def _pullbundle2extraprepare(pullop, kwargs):
1417 def _pullbundle2extraprepare(pullop, kwargs):
1418 """hook function so that extensions can extend the getbundle call"""
1418 """hook function so that extensions can extend the getbundle call"""
1419 pass
1419 pass
1420
1420
1421 def _pullchangeset(pullop):
1421 def _pullchangeset(pullop):
1422 """pull changeset from unbundle into the local repo"""
1422 """pull changeset from unbundle into the local repo"""
1423 # We delay the open of the transaction as late as possible so we
1423 # We delay the open of the transaction as late as possible so we
1424 # don't open transaction for nothing or you break future useful
1424 # don't open transaction for nothing or you break future useful
1425 # rollback call
1425 # rollback call
1426 if 'changegroup' in pullop.stepsdone:
1426 if 'changegroup' in pullop.stepsdone:
1427 return
1427 return
1428 pullop.stepsdone.add('changegroup')
1428 pullop.stepsdone.add('changegroup')
1429 if not pullop.fetch:
1429 if not pullop.fetch:
1430 pullop.repo.ui.status(_("no changes found\n"))
1430 pullop.repo.ui.status(_("no changes found\n"))
1431 pullop.cgresult = 0
1431 pullop.cgresult = 0
1432 return
1432 return
1433 tr = pullop.gettransaction()
1433 tr = pullop.gettransaction()
1434 if pullop.heads is None and list(pullop.common) == [nullid]:
1434 if pullop.heads is None and list(pullop.common) == [nullid]:
1435 pullop.repo.ui.status(_("requesting all changes\n"))
1435 pullop.repo.ui.status(_("requesting all changes\n"))
1436 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1436 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1437 # issue1320, avoid a race if remote changed after discovery
1437 # issue1320, avoid a race if remote changed after discovery
1438 pullop.heads = pullop.rheads
1438 pullop.heads = pullop.rheads
1439
1439
1440 if pullop.remote.capable('getbundle'):
1440 if pullop.remote.capable('getbundle'):
1441 # TODO: get bundlecaps from remote
1441 # TODO: get bundlecaps from remote
1442 cg = pullop.remote.getbundle('pull', common=pullop.common,
1442 cg = pullop.remote.getbundle('pull', common=pullop.common,
1443 heads=pullop.heads or pullop.rheads)
1443 heads=pullop.heads or pullop.rheads)
1444 elif pullop.heads is None:
1444 elif pullop.heads is None:
1445 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1445 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1446 elif not pullop.remote.capable('changegroupsubset'):
1446 elif not pullop.remote.capable('changegroupsubset'):
1447 raise error.Abort(_("partial pull cannot be done because "
1447 raise error.Abort(_("partial pull cannot be done because "
1448 "other repository doesn't support "
1448 "other repository doesn't support "
1449 "changegroupsubset."))
1449 "changegroupsubset."))
1450 else:
1450 else:
1451 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1451 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1452 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1452 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1453 pullop.remote.url())
1453 pullop.remote.url())
1454 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1454 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1455
1455
1456 def _pullphase(pullop):
1456 def _pullphase(pullop):
1457 # Get remote phases data from remote
1457 # Get remote phases data from remote
1458 if 'phases' in pullop.stepsdone:
1458 if 'phases' in pullop.stepsdone:
1459 return
1459 return
1460 remotephases = pullop.remote.listkeys('phases')
1460 remotephases = pullop.remote.listkeys('phases')
1461 _pullapplyphases(pullop, remotephases)
1461 _pullapplyphases(pullop, remotephases)
1462
1462
1463 def _pullapplyphases(pullop, remotephases):
1463 def _pullapplyphases(pullop, remotephases):
1464 """apply phase movement from observed remote state"""
1464 """apply phase movement from observed remote state"""
1465 if 'phases' in pullop.stepsdone:
1465 if 'phases' in pullop.stepsdone:
1466 return
1466 return
1467 pullop.stepsdone.add('phases')
1467 pullop.stepsdone.add('phases')
1468 publishing = bool(remotephases.get('publishing', False))
1468 publishing = bool(remotephases.get('publishing', False))
1469 if remotephases and not publishing:
1469 if remotephases and not publishing:
1470 # remote is new and non-publishing
1470 # remote is new and non-publishing
1471 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1471 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1472 pullop.pulledsubset,
1472 pullop.pulledsubset,
1473 remotephases)
1473 remotephases)
1474 dheads = pullop.pulledsubset
1474 dheads = pullop.pulledsubset
1475 else:
1475 else:
1476 # Remote is old or publishing all common changesets
1476 # Remote is old or publishing all common changesets
1477 # should be seen as public
1477 # should be seen as public
1478 pheads = pullop.pulledsubset
1478 pheads = pullop.pulledsubset
1479 dheads = []
1479 dheads = []
1480 unfi = pullop.repo.unfiltered()
1480 unfi = pullop.repo.unfiltered()
1481 phase = unfi._phasecache.phase
1481 phase = unfi._phasecache.phase
1482 rev = unfi.changelog.nodemap.get
1482 rev = unfi.changelog.nodemap.get
1483 public = phases.public
1483 public = phases.public
1484 draft = phases.draft
1484 draft = phases.draft
1485
1485
1486 # exclude changesets already public locally and update the others
1486 # exclude changesets already public locally and update the others
1487 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1487 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1488 if pheads:
1488 if pheads:
1489 tr = pullop.gettransaction()
1489 tr = pullop.gettransaction()
1490 phases.advanceboundary(pullop.repo, tr, public, pheads)
1490 phases.advanceboundary(pullop.repo, tr, public, pheads)
1491
1491
1492 # exclude changesets already draft locally and update the others
1492 # exclude changesets already draft locally and update the others
1493 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1493 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1494 if dheads:
1494 if dheads:
1495 tr = pullop.gettransaction()
1495 tr = pullop.gettransaction()
1496 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1496 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1497
1497
1498 def _pullbookmarks(pullop):
1498 def _pullbookmarks(pullop):
1499 """process the remote bookmark information to update the local one"""
1499 """process the remote bookmark information to update the local one"""
1500 if 'bookmarks' in pullop.stepsdone:
1500 if 'bookmarks' in pullop.stepsdone:
1501 return
1501 return
1502 pullop.stepsdone.add('bookmarks')
1502 pullop.stepsdone.add('bookmarks')
1503 repo = pullop.repo
1503 repo = pullop.repo
1504 remotebookmarks = pullop.remotebookmarks
1504 remotebookmarks = pullop.remotebookmarks
1505 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1505 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1506 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1506 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1507 pullop.remote.url(),
1507 pullop.remote.url(),
1508 pullop.gettransaction,
1508 pullop.gettransaction,
1509 explicit=pullop.explicitbookmarks)
1509 explicit=pullop.explicitbookmarks)
1510
1510
1511 def _pullobsolete(pullop):
1511 def _pullobsolete(pullop):
1512 """utility function to pull obsolete markers from a remote
1512 """utility function to pull obsolete markers from a remote
1513
1513
1514 The `gettransaction` is function that return the pull transaction, creating
1514 The `gettransaction` is function that return the pull transaction, creating
1515 one if necessary. We return the transaction to inform the calling code that
1515 one if necessary. We return the transaction to inform the calling code that
1516 a new transaction have been created (when applicable).
1516 a new transaction have been created (when applicable).
1517
1517
1518 Exists mostly to allow overriding for experimentation purpose"""
1518 Exists mostly to allow overriding for experimentation purpose"""
1519 if 'obsmarkers' in pullop.stepsdone:
1519 if 'obsmarkers' in pullop.stepsdone:
1520 return
1520 return
1521 pullop.stepsdone.add('obsmarkers')
1521 pullop.stepsdone.add('obsmarkers')
1522 tr = None
1522 tr = None
1523 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1523 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1524 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1524 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1525 remoteobs = pullop.remote.listkeys('obsolete')
1525 remoteobs = pullop.remote.listkeys('obsolete')
1526 if 'dump0' in remoteobs:
1526 if 'dump0' in remoteobs:
1527 tr = pullop.gettransaction()
1527 tr = pullop.gettransaction()
1528 markers = []
1528 markers = []
1529 for key in sorted(remoteobs, reverse=True):
1529 for key in sorted(remoteobs, reverse=True):
1530 if key.startswith('dump'):
1530 if key.startswith('dump'):
1531 data = util.b85decode(remoteobs[key])
1531 data = util.b85decode(remoteobs[key])
1532 version, newmarks = obsolete._readmarkers(data)
1532 version, newmarks = obsolete._readmarkers(data)
1533 markers += newmarks
1533 markers += newmarks
1534 if markers:
1534 if markers:
1535 pullop.repo.obsstore.add(tr, markers)
1535 pullop.repo.obsstore.add(tr, markers)
1536 pullop.repo.invalidatevolatilesets()
1536 pullop.repo.invalidatevolatilesets()
1537 return tr
1537 return tr
1538
1538
1539 def caps20to10(repo):
1539 def caps20to10(repo):
1540 """return a set with appropriate options to use bundle20 during getbundle"""
1540 """return a set with appropriate options to use bundle20 during getbundle"""
1541 caps = {'HG20'}
1541 caps = {'HG20'}
1542 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1542 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1543 caps.add('bundle2=' + urlreq.quote(capsblob))
1543 caps.add('bundle2=' + urlreq.quote(capsblob))
1544 return caps
1544 return caps
1545
1545
1546 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1546 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1547 getbundle2partsorder = []
1547 getbundle2partsorder = []
1548
1548
1549 # Mapping between step name and function
1549 # Mapping between step name and function
1550 #
1550 #
1551 # This exists to help extensions wrap steps if necessary
1551 # This exists to help extensions wrap steps if necessary
1552 getbundle2partsmapping = {}
1552 getbundle2partsmapping = {}
1553
1553
1554 def getbundle2partsgenerator(stepname, idx=None):
1554 def getbundle2partsgenerator(stepname, idx=None):
1555 """decorator for function generating bundle2 part for getbundle
1555 """decorator for function generating bundle2 part for getbundle
1556
1556
1557 The function is added to the step -> function mapping and appended to the
1557 The function is added to the step -> function mapping and appended to the
1558 list of steps. Beware that decorated functions will be added in order
1558 list of steps. Beware that decorated functions will be added in order
1559 (this may matter).
1559 (this may matter).
1560
1560
1561 You can only use this decorator for new steps, if you want to wrap a step
1561 You can only use this decorator for new steps, if you want to wrap a step
1562 from an extension, attack the getbundle2partsmapping dictionary directly."""
1562 from an extension, attack the getbundle2partsmapping dictionary directly."""
1563 def dec(func):
1563 def dec(func):
1564 assert stepname not in getbundle2partsmapping
1564 assert stepname not in getbundle2partsmapping
1565 getbundle2partsmapping[stepname] = func
1565 getbundle2partsmapping[stepname] = func
1566 if idx is None:
1566 if idx is None:
1567 getbundle2partsorder.append(stepname)
1567 getbundle2partsorder.append(stepname)
1568 else:
1568 else:
1569 getbundle2partsorder.insert(idx, stepname)
1569 getbundle2partsorder.insert(idx, stepname)
1570 return func
1570 return func
1571 return dec
1571 return dec
1572
1572
1573 def bundle2requested(bundlecaps):
1573 def bundle2requested(bundlecaps):
1574 if bundlecaps is not None:
1574 if bundlecaps is not None:
1575 return any(cap.startswith('HG2') for cap in bundlecaps)
1575 return any(cap.startswith('HG2') for cap in bundlecaps)
1576 return False
1576 return False
1577
1577
1578 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1578 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1579 **kwargs):
1579 **kwargs):
1580 """Return chunks constituting a bundle's raw data.
1580 """Return chunks constituting a bundle's raw data.
1581
1581
1582 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1582 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1583 passed.
1583 passed.
1584
1584
1585 Returns an iterator over raw chunks (of varying sizes).
1585 Returns an iterator over raw chunks (of varying sizes).
1586 """
1586 """
1587 kwargs = pycompat.byteskwargs(kwargs)
1587 kwargs = pycompat.byteskwargs(kwargs)
1588 usebundle2 = bundle2requested(bundlecaps)
1588 usebundle2 = bundle2requested(bundlecaps)
1589 # bundle10 case
1589 # bundle10 case
1590 if not usebundle2:
1590 if not usebundle2:
1591 if bundlecaps and not kwargs.get('cg', True):
1591 if bundlecaps and not kwargs.get('cg', True):
1592 raise ValueError(_('request for bundle10 must include changegroup'))
1592 raise ValueError(_('request for bundle10 must include changegroup'))
1593
1593
1594 if kwargs:
1594 if kwargs:
1595 raise ValueError(_('unsupported getbundle arguments: %s')
1595 raise ValueError(_('unsupported getbundle arguments: %s')
1596 % ', '.join(sorted(kwargs.keys())))
1596 % ', '.join(sorted(kwargs.keys())))
1597 outgoing = _computeoutgoing(repo, heads, common)
1597 outgoing = _computeoutgoing(repo, heads, common)
1598 return changegroup.makestream(repo, outgoing, '01', source,
1598 return changegroup.makestream(repo, outgoing, '01', source,
1599 bundlecaps=bundlecaps)
1599 bundlecaps=bundlecaps)
1600
1600
1601 # bundle20 case
1601 # bundle20 case
1602 b2caps = {}
1602 b2caps = {}
1603 for bcaps in bundlecaps:
1603 for bcaps in bundlecaps:
1604 if bcaps.startswith('bundle2='):
1604 if bcaps.startswith('bundle2='):
1605 blob = urlreq.unquote(bcaps[len('bundle2='):])
1605 blob = urlreq.unquote(bcaps[len('bundle2='):])
1606 b2caps.update(bundle2.decodecaps(blob))
1606 b2caps.update(bundle2.decodecaps(blob))
1607 bundler = bundle2.bundle20(repo.ui, b2caps)
1607 bundler = bundle2.bundle20(repo.ui, b2caps)
1608
1608
1609 kwargs['heads'] = heads
1609 kwargs['heads'] = heads
1610 kwargs['common'] = common
1610 kwargs['common'] = common
1611
1611
1612 for name in getbundle2partsorder:
1612 for name in getbundle2partsorder:
1613 func = getbundle2partsmapping[name]
1613 func = getbundle2partsmapping[name]
1614 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1614 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1615 **pycompat.strkwargs(kwargs))
1615 **pycompat.strkwargs(kwargs))
1616
1616
1617 return bundler.getchunks()
1617 return bundler.getchunks()
1618
1618
1619 @getbundle2partsgenerator('changegroup')
1619 @getbundle2partsgenerator('changegroup')
1620 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1620 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1621 b2caps=None, heads=None, common=None, **kwargs):
1621 b2caps=None, heads=None, common=None, **kwargs):
1622 """add a changegroup part to the requested bundle"""
1622 """add a changegroup part to the requested bundle"""
1623 cgstream = None
1623 cgstream = None
1624 if kwargs.get('cg', True):
1624 if kwargs.get('cg', True):
1625 # build changegroup bundle here.
1625 # build changegroup bundle here.
1626 version = '01'
1626 version = '01'
1627 cgversions = b2caps.get('changegroup')
1627 cgversions = b2caps.get('changegroup')
1628 if cgversions: # 3.1 and 3.2 ship with an empty value
1628 if cgversions: # 3.1 and 3.2 ship with an empty value
1629 cgversions = [v for v in cgversions
1629 cgversions = [v for v in cgversions
1630 if v in changegroup.supportedoutgoingversions(repo)]
1630 if v in changegroup.supportedoutgoingversions(repo)]
1631 if not cgversions:
1631 if not cgversions:
1632 raise ValueError(_('no common changegroup version'))
1632 raise ValueError(_('no common changegroup version'))
1633 version = max(cgversions)
1633 version = max(cgversions)
1634 outgoing = _computeoutgoing(repo, heads, common)
1634 outgoing = _computeoutgoing(repo, heads, common)
1635 if outgoing.missing:
1635 if outgoing.missing:
1636 cgstream = changegroup.makestream(repo, outgoing, version, source,
1636 cgstream = changegroup.makestream(repo, outgoing, version, source,
1637 bundlecaps=bundlecaps)
1637 bundlecaps=bundlecaps)
1638
1638
1639 if cgstream:
1639 if cgstream:
1640 part = bundler.newpart('changegroup', data=cgstream)
1640 part = bundler.newpart('changegroup', data=cgstream)
1641 if cgversions:
1641 if cgversions:
1642 part.addparam('version', version)
1642 part.addparam('version', version)
1643 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1643 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1644 if 'treemanifest' in repo.requirements:
1644 if 'treemanifest' in repo.requirements:
1645 part.addparam('treemanifest', '1')
1645 part.addparam('treemanifest', '1')
1646
1646
1647 @getbundle2partsgenerator('listkeys')
1647 @getbundle2partsgenerator('listkeys')
1648 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1648 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1649 b2caps=None, **kwargs):
1649 b2caps=None, **kwargs):
1650 """add parts containing listkeys namespaces to the requested bundle"""
1650 """add parts containing listkeys namespaces to the requested bundle"""
1651 listkeys = kwargs.get('listkeys', ())
1651 listkeys = kwargs.get('listkeys', ())
1652 for namespace in listkeys:
1652 for namespace in listkeys:
1653 part = bundler.newpart('listkeys')
1653 part = bundler.newpart('listkeys')
1654 part.addparam('namespace', namespace)
1654 part.addparam('namespace', namespace)
1655 keys = repo.listkeys(namespace).items()
1655 keys = repo.listkeys(namespace).items()
1656 part.data = pushkey.encodekeys(keys)
1656 part.data = pushkey.encodekeys(keys)
1657
1657
1658 @getbundle2partsgenerator('obsmarkers')
1658 @getbundle2partsgenerator('obsmarkers')
1659 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1659 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1660 b2caps=None, heads=None, **kwargs):
1660 b2caps=None, heads=None, **kwargs):
1661 """add an obsolescence markers part to the requested bundle"""
1661 """add an obsolescence markers part to the requested bundle"""
1662 if kwargs.get('obsmarkers', False):
1662 if kwargs.get('obsmarkers', False):
1663 if heads is None:
1663 if heads is None:
1664 heads = repo.heads()
1664 heads = repo.heads()
1665 subset = [c.node() for c in repo.set('::%ln', heads)]
1665 subset = [c.node() for c in repo.set('::%ln', heads)]
1666 markers = repo.obsstore.relevantmarkers(subset)
1666 markers = repo.obsstore.relevantmarkers(subset)
1667 markers = sorted(markers)
1667 markers = sorted(markers)
1668 bundle2.buildobsmarkerspart(bundler, markers)
1668 bundle2.buildobsmarkerspart(bundler, markers)
1669
1669
1670 @getbundle2partsgenerator('hgtagsfnodes')
1670 @getbundle2partsgenerator('hgtagsfnodes')
1671 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1671 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1672 b2caps=None, heads=None, common=None,
1672 b2caps=None, heads=None, common=None,
1673 **kwargs):
1673 **kwargs):
1674 """Transfer the .hgtags filenodes mapping.
1674 """Transfer the .hgtags filenodes mapping.
1675
1675
1676 Only values for heads in this bundle will be transferred.
1676 Only values for heads in this bundle will be transferred.
1677
1677
1678 The part data consists of pairs of 20 byte changeset node and .hgtags
1678 The part data consists of pairs of 20 byte changeset node and .hgtags
1679 filenodes raw values.
1679 filenodes raw values.
1680 """
1680 """
1681 # Don't send unless:
1681 # Don't send unless:
1682 # - changeset are being exchanged,
1682 # - changeset are being exchanged,
1683 # - the client supports it.
1683 # - the client supports it.
1684 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1684 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1685 return
1685 return
1686
1686
1687 outgoing = _computeoutgoing(repo, heads, common)
1687 outgoing = _computeoutgoing(repo, heads, common)
1688 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1688 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1689
1689
1690 def _getbookmarks(repo, **kwargs):
1690 def _getbookmarks(repo, **kwargs):
1691 """Returns bookmark to node mapping.
1691 """Returns bookmark to node mapping.
1692
1692
1693 This function is primarily used to generate `bookmarks` bundle2 part.
1693 This function is primarily used to generate `bookmarks` bundle2 part.
1694 It is a separate function in order to make it easy to wrap it
1694 It is a separate function in order to make it easy to wrap it
1695 in extensions. Passing `kwargs` to the function makes it easy to
1695 in extensions. Passing `kwargs` to the function makes it easy to
1696 add new parameters in extensions.
1696 add new parameters in extensions.
1697 """
1697 """
1698
1698
1699 return dict(bookmod.listbinbookmarks(repo))
1699 return dict(bookmod.listbinbookmarks(repo))
1700
1700
1701 def check_heads(repo, their_heads, context):
1701 def check_heads(repo, their_heads, context):
1702 """check if the heads of a repo have been modified
1702 """check if the heads of a repo have been modified
1703
1703
1704 Used by peer for unbundling.
1704 Used by peer for unbundling.
1705 """
1705 """
1706 heads = repo.heads()
1706 heads = repo.heads()
1707 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1707 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1708 if not (their_heads == ['force'] or their_heads == heads or
1708 if not (their_heads == ['force'] or their_heads == heads or
1709 their_heads == ['hashed', heads_hash]):
1709 their_heads == ['hashed', heads_hash]):
1710 # someone else committed/pushed/unbundled while we
1710 # someone else committed/pushed/unbundled while we
1711 # were transferring data
1711 # were transferring data
1712 raise error.PushRaced('repository changed while %s - '
1712 raise error.PushRaced('repository changed while %s - '
1713 'please try again' % context)
1713 'please try again' % context)
1714
1714
1715 def unbundle(repo, cg, heads, source, url):
1715 def unbundle(repo, cg, heads, source, url):
1716 """Apply a bundle to a repo.
1716 """Apply a bundle to a repo.
1717
1717
1718 this function makes sure the repo is locked during the application and have
1718 this function makes sure the repo is locked during the application and have
1719 mechanism to check that no push race occurred between the creation of the
1719 mechanism to check that no push race occurred between the creation of the
1720 bundle and its application.
1720 bundle and its application.
1721
1721
1722 If the push was raced as PushRaced exception is raised."""
1722 If the push was raced as PushRaced exception is raised."""
1723 r = 0
1723 r = 0
1724 # need a transaction when processing a bundle2 stream
1724 # need a transaction when processing a bundle2 stream
1725 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1725 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1726 lockandtr = [None, None, None]
1726 lockandtr = [None, None, None]
1727 recordout = None
1727 recordout = None
1728 # quick fix for output mismatch with bundle2 in 3.4
1728 # quick fix for output mismatch with bundle2 in 3.4
1729 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1729 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1730 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1730 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1731 captureoutput = True
1731 captureoutput = True
1732 try:
1732 try:
1733 # note: outside bundle1, 'heads' is expected to be empty and this
1733 # note: outside bundle1, 'heads' is expected to be empty and this
1734 # 'check_heads' call wil be a no-op
1734 # 'check_heads' call wil be a no-op
1735 check_heads(repo, heads, 'uploading changes')
1735 check_heads(repo, heads, 'uploading changes')
1736 # push can proceed
1736 # push can proceed
1737 if not isinstance(cg, bundle2.unbundle20):
1737 if not isinstance(cg, bundle2.unbundle20):
1738 # legacy case: bundle1 (changegroup 01)
1738 # legacy case: bundle1 (changegroup 01)
1739 txnname = "\n".join([source, util.hidepassword(url)])
1739 txnname = "\n".join([source, util.hidepassword(url)])
1740 with repo.lock(), repo.transaction(txnname) as tr:
1740 with repo.lock(), repo.transaction(txnname) as tr:
1741 op = bundle2.applybundle(repo, cg, tr, source, url)
1741 op = bundle2.applybundle(repo, cg, tr, source, url)
1742 r = bundle2.combinechangegroupresults(op)
1742 r = bundle2.combinechangegroupresults(op)
1743 else:
1743 else:
1744 r = None
1744 r = None
1745 try:
1745 try:
1746 def gettransaction():
1746 def gettransaction():
1747 if not lockandtr[2]:
1747 if not lockandtr[2]:
1748 lockandtr[0] = repo.wlock()
1748 lockandtr[0] = repo.wlock()
1749 lockandtr[1] = repo.lock()
1749 lockandtr[1] = repo.lock()
1750 lockandtr[2] = repo.transaction(source)
1750 lockandtr[2] = repo.transaction(source)
1751 lockandtr[2].hookargs['source'] = source
1751 lockandtr[2].hookargs['source'] = source
1752 lockandtr[2].hookargs['url'] = url
1752 lockandtr[2].hookargs['url'] = url
1753 lockandtr[2].hookargs['bundle2'] = '1'
1753 lockandtr[2].hookargs['bundle2'] = '1'
1754 return lockandtr[2]
1754 return lockandtr[2]
1755
1755
1756 # Do greedy locking by default until we're satisfied with lazy
1756 # Do greedy locking by default until we're satisfied with lazy
1757 # locking.
1757 # locking.
1758 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1758 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1759 gettransaction()
1759 gettransaction()
1760
1760
1761 op = bundle2.bundleoperation(repo, gettransaction,
1761 op = bundle2.bundleoperation(repo, gettransaction,
1762 captureoutput=captureoutput)
1762 captureoutput=captureoutput)
1763 try:
1763 try:
1764 op = bundle2.processbundle(repo, cg, op=op)
1764 op = bundle2.processbundle(repo, cg, op=op)
1765 finally:
1765 finally:
1766 r = op.reply
1766 r = op.reply
1767 if captureoutput and r is not None:
1767 if captureoutput and r is not None:
1768 repo.ui.pushbuffer(error=True, subproc=True)
1768 repo.ui.pushbuffer(error=True, subproc=True)
1769 def recordout(output):
1769 def recordout(output):
1770 r.newpart('output', data=output, mandatory=False)
1770 r.newpart('output', data=output, mandatory=False)
1771 if lockandtr[2] is not None:
1771 if lockandtr[2] is not None:
1772 lockandtr[2].close()
1772 lockandtr[2].close()
1773 except BaseException as exc:
1773 except BaseException as exc:
1774 exc.duringunbundle2 = True
1774 exc.duringunbundle2 = True
1775 if captureoutput and r is not None:
1775 if captureoutput and r is not None:
1776 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1776 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1777 def recordout(output):
1777 def recordout(output):
1778 part = bundle2.bundlepart('output', data=output,
1778 part = bundle2.bundlepart('output', data=output,
1779 mandatory=False)
1779 mandatory=False)
1780 parts.append(part)
1780 parts.append(part)
1781 raise
1781 raise
1782 finally:
1782 finally:
1783 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1783 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1784 if recordout is not None:
1784 if recordout is not None:
1785 recordout(repo.ui.popbuffer())
1785 recordout(repo.ui.popbuffer())
1786 return r
1786 return r
1787
1787
1788 def _maybeapplyclonebundle(pullop):
1788 def _maybeapplyclonebundle(pullop):
1789 """Apply a clone bundle from a remote, if possible."""
1789 """Apply a clone bundle from a remote, if possible."""
1790
1790
1791 repo = pullop.repo
1791 repo = pullop.repo
1792 remote = pullop.remote
1792 remote = pullop.remote
1793
1793
1794 if not repo.ui.configbool('ui', 'clonebundles'):
1794 if not repo.ui.configbool('ui', 'clonebundles'):
1795 return
1795 return
1796
1796
1797 # Only run if local repo is empty.
1797 # Only run if local repo is empty.
1798 if len(repo):
1798 if len(repo):
1799 return
1799 return
1800
1800
1801 if pullop.heads:
1801 if pullop.heads:
1802 return
1802 return
1803
1803
1804 if not remote.capable('clonebundles'):
1804 if not remote.capable('clonebundles'):
1805 return
1805 return
1806
1806
1807 res = remote._call('clonebundles')
1807 res = remote._call('clonebundles')
1808
1808
1809 # If we call the wire protocol command, that's good enough to record the
1809 # If we call the wire protocol command, that's good enough to record the
1810 # attempt.
1810 # attempt.
1811 pullop.clonebundleattempted = True
1811 pullop.clonebundleattempted = True
1812
1812
1813 entries = parseclonebundlesmanifest(repo, res)
1813 entries = parseclonebundlesmanifest(repo, res)
1814 if not entries:
1814 if not entries:
1815 repo.ui.note(_('no clone bundles available on remote; '
1815 repo.ui.note(_('no clone bundles available on remote; '
1816 'falling back to regular clone\n'))
1816 'falling back to regular clone\n'))
1817 return
1817 return
1818
1818
1819 entries = filterclonebundleentries(repo, entries)
1819 entries = filterclonebundleentries(repo, entries)
1820 if not entries:
1820 if not entries:
1821 # There is a thundering herd concern here. However, if a server
1821 # There is a thundering herd concern here. However, if a server
1822 # operator doesn't advertise bundles appropriate for its clients,
1822 # operator doesn't advertise bundles appropriate for its clients,
1823 # they deserve what's coming. Furthermore, from a client's
1823 # they deserve what's coming. Furthermore, from a client's
1824 # perspective, no automatic fallback would mean not being able to
1824 # perspective, no automatic fallback would mean not being able to
1825 # clone!
1825 # clone!
1826 repo.ui.warn(_('no compatible clone bundles available on server; '
1826 repo.ui.warn(_('no compatible clone bundles available on server; '
1827 'falling back to regular clone\n'))
1827 'falling back to regular clone\n'))
1828 repo.ui.warn(_('(you may want to report this to the server '
1828 repo.ui.warn(_('(you may want to report this to the server '
1829 'operator)\n'))
1829 'operator)\n'))
1830 return
1830 return
1831
1831
1832 entries = sortclonebundleentries(repo.ui, entries)
1832 entries = sortclonebundleentries(repo.ui, entries)
1833
1833
1834 url = entries[0]['URL']
1834 url = entries[0]['URL']
1835 repo.ui.status(_('applying clone bundle from %s\n') % url)
1835 repo.ui.status(_('applying clone bundle from %s\n') % url)
1836 if trypullbundlefromurl(repo.ui, repo, url):
1836 if trypullbundlefromurl(repo.ui, repo, url):
1837 repo.ui.status(_('finished applying clone bundle\n'))
1837 repo.ui.status(_('finished applying clone bundle\n'))
1838 # Bundle failed.
1838 # Bundle failed.
1839 #
1839 #
1840 # We abort by default to avoid the thundering herd of
1840 # We abort by default to avoid the thundering herd of
1841 # clients flooding a server that was expecting expensive
1841 # clients flooding a server that was expecting expensive
1842 # clone load to be offloaded.
1842 # clone load to be offloaded.
1843 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1843 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1844 repo.ui.warn(_('falling back to normal clone\n'))
1844 repo.ui.warn(_('falling back to normal clone\n'))
1845 else:
1845 else:
1846 raise error.Abort(_('error applying bundle'),
1846 raise error.Abort(_('error applying bundle'),
1847 hint=_('if this error persists, consider contacting '
1847 hint=_('if this error persists, consider contacting '
1848 'the server operator or disable clone '
1848 'the server operator or disable clone '
1849 'bundles via '
1849 'bundles via '
1850 '"--config ui.clonebundles=false"'))
1850 '"--config ui.clonebundles=false"'))
1851
1851
1852 def parseclonebundlesmanifest(repo, s):
1852 def parseclonebundlesmanifest(repo, s):
1853 """Parses the raw text of a clone bundles manifest.
1853 """Parses the raw text of a clone bundles manifest.
1854
1854
1855 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1855 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1856 to the URL and other keys are the attributes for the entry.
1856 to the URL and other keys are the attributes for the entry.
1857 """
1857 """
1858 m = []
1858 m = []
1859 for line in s.splitlines():
1859 for line in s.splitlines():
1860 fields = line.split()
1860 fields = line.split()
1861 if not fields:
1861 if not fields:
1862 continue
1862 continue
1863 attrs = {'URL': fields[0]}
1863 attrs = {'URL': fields[0]}
1864 for rawattr in fields[1:]:
1864 for rawattr in fields[1:]:
1865 key, value = rawattr.split('=', 1)
1865 key, value = rawattr.split('=', 1)
1866 key = urlreq.unquote(key)
1866 key = urlreq.unquote(key)
1867 value = urlreq.unquote(value)
1867 value = urlreq.unquote(value)
1868 attrs[key] = value
1868 attrs[key] = value
1869
1869
1870 # Parse BUNDLESPEC into components. This makes client-side
1870 # Parse BUNDLESPEC into components. This makes client-side
1871 # preferences easier to specify since you can prefer a single
1871 # preferences easier to specify since you can prefer a single
1872 # component of the BUNDLESPEC.
1872 # component of the BUNDLESPEC.
1873 if key == 'BUNDLESPEC':
1873 if key == 'BUNDLESPEC':
1874 try:
1874 try:
1875 comp, version, params = parsebundlespec(repo, value,
1875 comp, version, params = parsebundlespec(repo, value,
1876 externalnames=True)
1876 externalnames=True)
1877 attrs['COMPRESSION'] = comp
1877 attrs['COMPRESSION'] = comp
1878 attrs['VERSION'] = version
1878 attrs['VERSION'] = version
1879 except error.InvalidBundleSpecification:
1879 except error.InvalidBundleSpecification:
1880 pass
1880 pass
1881 except error.UnsupportedBundleSpecification:
1881 except error.UnsupportedBundleSpecification:
1882 pass
1882 pass
1883
1883
1884 m.append(attrs)
1884 m.append(attrs)
1885
1885
1886 return m
1886 return m
1887
1887
1888 def filterclonebundleentries(repo, entries):
1888 def filterclonebundleentries(repo, entries):
1889 """Remove incompatible clone bundle manifest entries.
1889 """Remove incompatible clone bundle manifest entries.
1890
1890
1891 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1891 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1892 and returns a new list consisting of only the entries that this client
1892 and returns a new list consisting of only the entries that this client
1893 should be able to apply.
1893 should be able to apply.
1894
1894
1895 There is no guarantee we'll be able to apply all returned entries because
1895 There is no guarantee we'll be able to apply all returned entries because
1896 the metadata we use to filter on may be missing or wrong.
1896 the metadata we use to filter on may be missing or wrong.
1897 """
1897 """
1898 newentries = []
1898 newentries = []
1899 for entry in entries:
1899 for entry in entries:
1900 spec = entry.get('BUNDLESPEC')
1900 spec = entry.get('BUNDLESPEC')
1901 if spec:
1901 if spec:
1902 try:
1902 try:
1903 parsebundlespec(repo, spec, strict=True)
1903 parsebundlespec(repo, spec, strict=True)
1904 except error.InvalidBundleSpecification as e:
1904 except error.InvalidBundleSpecification as e:
1905 repo.ui.debug(str(e) + '\n')
1905 repo.ui.debug(str(e) + '\n')
1906 continue
1906 continue
1907 except error.UnsupportedBundleSpecification as e:
1907 except error.UnsupportedBundleSpecification as e:
1908 repo.ui.debug('filtering %s because unsupported bundle '
1908 repo.ui.debug('filtering %s because unsupported bundle '
1909 'spec: %s\n' % (entry['URL'], str(e)))
1909 'spec: %s\n' % (entry['URL'], str(e)))
1910 continue
1910 continue
1911
1911
1912 if 'REQUIRESNI' in entry and not sslutil.hassni:
1912 if 'REQUIRESNI' in entry and not sslutil.hassni:
1913 repo.ui.debug('filtering %s because SNI not supported\n' %
1913 repo.ui.debug('filtering %s because SNI not supported\n' %
1914 entry['URL'])
1914 entry['URL'])
1915 continue
1915 continue
1916
1916
1917 newentries.append(entry)
1917 newentries.append(entry)
1918
1918
1919 return newentries
1919 return newentries
1920
1920
1921 class clonebundleentry(object):
1921 class clonebundleentry(object):
1922 """Represents an item in a clone bundles manifest.
1922 """Represents an item in a clone bundles manifest.
1923
1923
1924 This rich class is needed to support sorting since sorted() in Python 3
1924 This rich class is needed to support sorting since sorted() in Python 3
1925 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1925 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1926 won't work.
1926 won't work.
1927 """
1927 """
1928
1928
1929 def __init__(self, value, prefers):
1929 def __init__(self, value, prefers):
1930 self.value = value
1930 self.value = value
1931 self.prefers = prefers
1931 self.prefers = prefers
1932
1932
1933 def _cmp(self, other):
1933 def _cmp(self, other):
1934 for prefkey, prefvalue in self.prefers:
1934 for prefkey, prefvalue in self.prefers:
1935 avalue = self.value.get(prefkey)
1935 avalue = self.value.get(prefkey)
1936 bvalue = other.value.get(prefkey)
1936 bvalue = other.value.get(prefkey)
1937
1937
1938 # Special case for b missing attribute and a matches exactly.
1938 # Special case for b missing attribute and a matches exactly.
1939 if avalue is not None and bvalue is None and avalue == prefvalue:
1939 if avalue is not None and bvalue is None and avalue == prefvalue:
1940 return -1
1940 return -1
1941
1941
1942 # Special case for a missing attribute and b matches exactly.
1942 # Special case for a missing attribute and b matches exactly.
1943 if bvalue is not None and avalue is None and bvalue == prefvalue:
1943 if bvalue is not None and avalue is None and bvalue == prefvalue:
1944 return 1
1944 return 1
1945
1945
1946 # We can't compare unless attribute present on both.
1946 # We can't compare unless attribute present on both.
1947 if avalue is None or bvalue is None:
1947 if avalue is None or bvalue is None:
1948 continue
1948 continue
1949
1949
1950 # Same values should fall back to next attribute.
1950 # Same values should fall back to next attribute.
1951 if avalue == bvalue:
1951 if avalue == bvalue:
1952 continue
1952 continue
1953
1953
1954 # Exact matches come first.
1954 # Exact matches come first.
1955 if avalue == prefvalue:
1955 if avalue == prefvalue:
1956 return -1
1956 return -1
1957 if bvalue == prefvalue:
1957 if bvalue == prefvalue:
1958 return 1
1958 return 1
1959
1959
1960 # Fall back to next attribute.
1960 # Fall back to next attribute.
1961 continue
1961 continue
1962
1962
1963 # If we got here we couldn't sort by attributes and prefers. Fall
1963 # If we got here we couldn't sort by attributes and prefers. Fall
1964 # back to index order.
1964 # back to index order.
1965 return 0
1965 return 0
1966
1966
1967 def __lt__(self, other):
1967 def __lt__(self, other):
1968 return self._cmp(other) < 0
1968 return self._cmp(other) < 0
1969
1969
1970 def __gt__(self, other):
1970 def __gt__(self, other):
1971 return self._cmp(other) > 0
1971 return self._cmp(other) > 0
1972
1972
1973 def __eq__(self, other):
1973 def __eq__(self, other):
1974 return self._cmp(other) == 0
1974 return self._cmp(other) == 0
1975
1975
1976 def __le__(self, other):
1976 def __le__(self, other):
1977 return self._cmp(other) <= 0
1977 return self._cmp(other) <= 0
1978
1978
1979 def __ge__(self, other):
1979 def __ge__(self, other):
1980 return self._cmp(other) >= 0
1980 return self._cmp(other) >= 0
1981
1981
1982 def __ne__(self, other):
1982 def __ne__(self, other):
1983 return self._cmp(other) != 0
1983 return self._cmp(other) != 0
1984
1984
1985 def sortclonebundleentries(ui, entries):
1985 def sortclonebundleentries(ui, entries):
1986 prefers = ui.configlist('ui', 'clonebundleprefers')
1986 prefers = ui.configlist('ui', 'clonebundleprefers')
1987 if not prefers:
1987 if not prefers:
1988 return list(entries)
1988 return list(entries)
1989
1989
1990 prefers = [p.split('=', 1) for p in prefers]
1990 prefers = [p.split('=', 1) for p in prefers]
1991
1991
1992 items = sorted(clonebundleentry(v, prefers) for v in entries)
1992 items = sorted(clonebundleentry(v, prefers) for v in entries)
1993 return [i.value for i in items]
1993 return [i.value for i in items]
1994
1994
1995 def trypullbundlefromurl(ui, repo, url):
1995 def trypullbundlefromurl(ui, repo, url):
1996 """Attempt to apply a bundle from a URL."""
1996 """Attempt to apply a bundle from a URL."""
1997 with repo.lock(), repo.transaction('bundleurl') as tr:
1997 with repo.lock(), repo.transaction('bundleurl') as tr:
1998 try:
1998 try:
1999 fh = urlmod.open(ui, url)
1999 fh = urlmod.open(ui, url)
2000 cg = readbundle(ui, fh, 'stream')
2000 cg = readbundle(ui, fh, 'stream')
2001
2001
2002 if isinstance(cg, streamclone.streamcloneapplier):
2002 if isinstance(cg, streamclone.streamcloneapplier):
2003 cg.apply(repo)
2003 cg.apply(repo)
2004 else:
2004 else:
2005 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2005 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2006 return True
2006 return True
2007 except urlerr.httperror as e:
2007 except urlerr.httperror as e:
2008 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2008 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2009 except urlerr.urlerror as e:
2009 except urlerr.urlerror as e:
2010 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2010 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2011
2011
2012 return False
2012 return False
General Comments 0
You need to be logged in to leave comments. Login now