##// END OF EJS Templates
streamclone: extract requirements formatting...
Boris Feld -
r35830:84965e5f stable
parent child Browse files
Show More
@@ -1,2254 +1,2261 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 bin,
16 bin,
17 hex,
17 hex,
18 nullid,
18 nullid,
19 )
19 )
20 from . import (
20 from . import (
21 bookmarks as bookmod,
21 bookmarks as bookmod,
22 bundle2,
22 bundle2,
23 changegroup,
23 changegroup,
24 discovery,
24 discovery,
25 error,
25 error,
26 lock as lockmod,
26 lock as lockmod,
27 logexchange,
27 logexchange,
28 obsolete,
28 obsolete,
29 phases,
29 phases,
30 pushkey,
30 pushkey,
31 pycompat,
31 pycompat,
32 scmutil,
32 scmutil,
33 sslutil,
33 sslutil,
34 streamclone,
34 streamclone,
35 url as urlmod,
35 url as urlmod,
36 util,
36 util,
37 )
37 )
38
38
39 urlerr = util.urlerr
39 urlerr = util.urlerr
40 urlreq = util.urlreq
40 urlreq = util.urlreq
41
41
42 # Maps bundle version human names to changegroup versions.
42 # Maps bundle version human names to changegroup versions.
43 _bundlespeccgversions = {'v1': '01',
43 _bundlespeccgversions = {'v1': '01',
44 'v2': '02',
44 'v2': '02',
45 'packed1': 's1',
45 'packed1': 's1',
46 'bundle2': '02', #legacy
46 'bundle2': '02', #legacy
47 }
47 }
48
48
49 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
49 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
50 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
50 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
51
51
52 def parsebundlespec(repo, spec, strict=True, externalnames=False):
52 def parsebundlespec(repo, spec, strict=True, externalnames=False):
53 """Parse a bundle string specification into parts.
53 """Parse a bundle string specification into parts.
54
54
55 Bundle specifications denote a well-defined bundle/exchange format.
55 Bundle specifications denote a well-defined bundle/exchange format.
56 The content of a given specification should not change over time in
56 The content of a given specification should not change over time in
57 order to ensure that bundles produced by a newer version of Mercurial are
57 order to ensure that bundles produced by a newer version of Mercurial are
58 readable from an older version.
58 readable from an older version.
59
59
60 The string currently has the form:
60 The string currently has the form:
61
61
62 <compression>-<type>[;<parameter0>[;<parameter1>]]
62 <compression>-<type>[;<parameter0>[;<parameter1>]]
63
63
64 Where <compression> is one of the supported compression formats
64 Where <compression> is one of the supported compression formats
65 and <type> is (currently) a version string. A ";" can follow the type and
65 and <type> is (currently) a version string. A ";" can follow the type and
66 all text afterwards is interpreted as URI encoded, ";" delimited key=value
66 all text afterwards is interpreted as URI encoded, ";" delimited key=value
67 pairs.
67 pairs.
68
68
69 If ``strict`` is True (the default) <compression> is required. Otherwise,
69 If ``strict`` is True (the default) <compression> is required. Otherwise,
70 it is optional.
70 it is optional.
71
71
72 If ``externalnames`` is False (the default), the human-centric names will
72 If ``externalnames`` is False (the default), the human-centric names will
73 be converted to their internal representation.
73 be converted to their internal representation.
74
74
75 Returns a 3-tuple of (compression, version, parameters). Compression will
75 Returns a 3-tuple of (compression, version, parameters). Compression will
76 be ``None`` if not in strict mode and a compression isn't defined.
76 be ``None`` if not in strict mode and a compression isn't defined.
77
77
78 An ``InvalidBundleSpecification`` is raised when the specification is
78 An ``InvalidBundleSpecification`` is raised when the specification is
79 not syntactically well formed.
79 not syntactically well formed.
80
80
81 An ``UnsupportedBundleSpecification`` is raised when the compression or
81 An ``UnsupportedBundleSpecification`` is raised when the compression or
82 bundle type/version is not recognized.
82 bundle type/version is not recognized.
83
83
84 Note: this function will likely eventually return a more complex data
84 Note: this function will likely eventually return a more complex data
85 structure, including bundle2 part information.
85 structure, including bundle2 part information.
86 """
86 """
87 def parseparams(s):
87 def parseparams(s):
88 if ';' not in s:
88 if ';' not in s:
89 return s, {}
89 return s, {}
90
90
91 params = {}
91 params = {}
92 version, paramstr = s.split(';', 1)
92 version, paramstr = s.split(';', 1)
93
93
94 for p in paramstr.split(';'):
94 for p in paramstr.split(';'):
95 if '=' not in p:
95 if '=' not in p:
96 raise error.InvalidBundleSpecification(
96 raise error.InvalidBundleSpecification(
97 _('invalid bundle specification: '
97 _('invalid bundle specification: '
98 'missing "=" in parameter: %s') % p)
98 'missing "=" in parameter: %s') % p)
99
99
100 key, value = p.split('=', 1)
100 key, value = p.split('=', 1)
101 key = urlreq.unquote(key)
101 key = urlreq.unquote(key)
102 value = urlreq.unquote(value)
102 value = urlreq.unquote(value)
103 params[key] = value
103 params[key] = value
104
104
105 return version, params
105 return version, params
106
106
107
107
108 if strict and '-' not in spec:
108 if strict and '-' not in spec:
109 raise error.InvalidBundleSpecification(
109 raise error.InvalidBundleSpecification(
110 _('invalid bundle specification; '
110 _('invalid bundle specification; '
111 'must be prefixed with compression: %s') % spec)
111 'must be prefixed with compression: %s') % spec)
112
112
113 if '-' in spec:
113 if '-' in spec:
114 compression, version = spec.split('-', 1)
114 compression, version = spec.split('-', 1)
115
115
116 if compression not in util.compengines.supportedbundlenames:
116 if compression not in util.compengines.supportedbundlenames:
117 raise error.UnsupportedBundleSpecification(
117 raise error.UnsupportedBundleSpecification(
118 _('%s compression is not supported') % compression)
118 _('%s compression is not supported') % compression)
119
119
120 version, params = parseparams(version)
120 version, params = parseparams(version)
121
121
122 if version not in _bundlespeccgversions:
122 if version not in _bundlespeccgversions:
123 raise error.UnsupportedBundleSpecification(
123 raise error.UnsupportedBundleSpecification(
124 _('%s is not a recognized bundle version') % version)
124 _('%s is not a recognized bundle version') % version)
125 else:
125 else:
126 # Value could be just the compression or just the version, in which
126 # Value could be just the compression or just the version, in which
127 # case some defaults are assumed (but only when not in strict mode).
127 # case some defaults are assumed (but only when not in strict mode).
128 assert not strict
128 assert not strict
129
129
130 spec, params = parseparams(spec)
130 spec, params = parseparams(spec)
131
131
132 if spec in util.compengines.supportedbundlenames:
132 if spec in util.compengines.supportedbundlenames:
133 compression = spec
133 compression = spec
134 version = 'v1'
134 version = 'v1'
135 # Generaldelta repos require v2.
135 # Generaldelta repos require v2.
136 if 'generaldelta' in repo.requirements:
136 if 'generaldelta' in repo.requirements:
137 version = 'v2'
137 version = 'v2'
138 # Modern compression engines require v2.
138 # Modern compression engines require v2.
139 if compression not in _bundlespecv1compengines:
139 if compression not in _bundlespecv1compengines:
140 version = 'v2'
140 version = 'v2'
141 elif spec in _bundlespeccgversions:
141 elif spec in _bundlespeccgversions:
142 if spec == 'packed1':
142 if spec == 'packed1':
143 compression = 'none'
143 compression = 'none'
144 else:
144 else:
145 compression = 'bzip2'
145 compression = 'bzip2'
146 version = spec
146 version = spec
147 else:
147 else:
148 raise error.UnsupportedBundleSpecification(
148 raise error.UnsupportedBundleSpecification(
149 _('%s is not a recognized bundle specification') % spec)
149 _('%s is not a recognized bundle specification') % spec)
150
150
151 # Bundle version 1 only supports a known set of compression engines.
151 # Bundle version 1 only supports a known set of compression engines.
152 if version == 'v1' and compression not in _bundlespecv1compengines:
152 if version == 'v1' and compression not in _bundlespecv1compengines:
153 raise error.UnsupportedBundleSpecification(
153 raise error.UnsupportedBundleSpecification(
154 _('compression engine %s is not supported on v1 bundles') %
154 _('compression engine %s is not supported on v1 bundles') %
155 compression)
155 compression)
156
156
157 # The specification for packed1 can optionally declare the data formats
157 # The specification for packed1 can optionally declare the data formats
158 # required to apply it. If we see this metadata, compare against what the
158 # required to apply it. If we see this metadata, compare against what the
159 # repo supports and error if the bundle isn't compatible.
159 # repo supports and error if the bundle isn't compatible.
160 if version == 'packed1' and 'requirements' in params:
160 if version == 'packed1' and 'requirements' in params:
161 requirements = set(params['requirements'].split(','))
161 requirements = set(params['requirements'].split(','))
162 missingreqs = requirements - repo.supportedformats
162 missingreqs = requirements - repo.supportedformats
163 if missingreqs:
163 if missingreqs:
164 raise error.UnsupportedBundleSpecification(
164 raise error.UnsupportedBundleSpecification(
165 _('missing support for repository features: %s') %
165 _('missing support for repository features: %s') %
166 ', '.join(sorted(missingreqs)))
166 ', '.join(sorted(missingreqs)))
167
167
168 if not externalnames:
168 if not externalnames:
169 engine = util.compengines.forbundlename(compression)
169 engine = util.compengines.forbundlename(compression)
170 compression = engine.bundletype()[1]
170 compression = engine.bundletype()[1]
171 version = _bundlespeccgversions[version]
171 version = _bundlespeccgversions[version]
172 return compression, version, params
172 return compression, version, params
173
173
174 def readbundle(ui, fh, fname, vfs=None):
174 def readbundle(ui, fh, fname, vfs=None):
175 header = changegroup.readexactly(fh, 4)
175 header = changegroup.readexactly(fh, 4)
176
176
177 alg = None
177 alg = None
178 if not fname:
178 if not fname:
179 fname = "stream"
179 fname = "stream"
180 if not header.startswith('HG') and header.startswith('\0'):
180 if not header.startswith('HG') and header.startswith('\0'):
181 fh = changegroup.headerlessfixup(fh, header)
181 fh = changegroup.headerlessfixup(fh, header)
182 header = "HG10"
182 header = "HG10"
183 alg = 'UN'
183 alg = 'UN'
184 elif vfs:
184 elif vfs:
185 fname = vfs.join(fname)
185 fname = vfs.join(fname)
186
186
187 magic, version = header[0:2], header[2:4]
187 magic, version = header[0:2], header[2:4]
188
188
189 if magic != 'HG':
189 if magic != 'HG':
190 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
190 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
191 if version == '10':
191 if version == '10':
192 if alg is None:
192 if alg is None:
193 alg = changegroup.readexactly(fh, 2)
193 alg = changegroup.readexactly(fh, 2)
194 return changegroup.cg1unpacker(fh, alg)
194 return changegroup.cg1unpacker(fh, alg)
195 elif version.startswith('2'):
195 elif version.startswith('2'):
196 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
196 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
197 elif version == 'S1':
197 elif version == 'S1':
198 return streamclone.streamcloneapplier(fh)
198 return streamclone.streamcloneapplier(fh)
199 else:
199 else:
200 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
200 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
201
201
202 def _formatrequirementsspec(requirements):
203 return urlreq.quote(','.join(sorted(requirements)))
204
205 def _formatrequirementsparams(requirements):
206 requirements = _formatrequirementsspec(requirements)
207 params = "%s%s" % (urlreq.quote("requirements="), requirements)
208 return params
209
202 def getbundlespec(ui, fh):
210 def getbundlespec(ui, fh):
203 """Infer the bundlespec from a bundle file handle.
211 """Infer the bundlespec from a bundle file handle.
204
212
205 The input file handle is seeked and the original seek position is not
213 The input file handle is seeked and the original seek position is not
206 restored.
214 restored.
207 """
215 """
208 def speccompression(alg):
216 def speccompression(alg):
209 try:
217 try:
210 return util.compengines.forbundletype(alg).bundletype()[0]
218 return util.compengines.forbundletype(alg).bundletype()[0]
211 except KeyError:
219 except KeyError:
212 return None
220 return None
213
221
214 b = readbundle(ui, fh, None)
222 b = readbundle(ui, fh, None)
215 if isinstance(b, changegroup.cg1unpacker):
223 if isinstance(b, changegroup.cg1unpacker):
216 alg = b._type
224 alg = b._type
217 if alg == '_truncatedBZ':
225 if alg == '_truncatedBZ':
218 alg = 'BZ'
226 alg = 'BZ'
219 comp = speccompression(alg)
227 comp = speccompression(alg)
220 if not comp:
228 if not comp:
221 raise error.Abort(_('unknown compression algorithm: %s') % alg)
229 raise error.Abort(_('unknown compression algorithm: %s') % alg)
222 return '%s-v1' % comp
230 return '%s-v1' % comp
223 elif isinstance(b, bundle2.unbundle20):
231 elif isinstance(b, bundle2.unbundle20):
224 if 'Compression' in b.params:
232 if 'Compression' in b.params:
225 comp = speccompression(b.params['Compression'])
233 comp = speccompression(b.params['Compression'])
226 if not comp:
234 if not comp:
227 raise error.Abort(_('unknown compression algorithm: %s') % comp)
235 raise error.Abort(_('unknown compression algorithm: %s') % comp)
228 else:
236 else:
229 comp = 'none'
237 comp = 'none'
230
238
231 version = None
239 version = None
232 for part in b.iterparts():
240 for part in b.iterparts():
233 if part.type == 'changegroup':
241 if part.type == 'changegroup':
234 version = part.params['version']
242 version = part.params['version']
235 if version in ('01', '02'):
243 if version in ('01', '02'):
236 version = 'v2'
244 version = 'v2'
237 else:
245 else:
238 raise error.Abort(_('changegroup version %s does not have '
246 raise error.Abort(_('changegroup version %s does not have '
239 'a known bundlespec') % version,
247 'a known bundlespec') % version,
240 hint=_('try upgrading your Mercurial '
248 hint=_('try upgrading your Mercurial '
241 'client'))
249 'client'))
242
250
243 if not version:
251 if not version:
244 raise error.Abort(_('could not identify changegroup version in '
252 raise error.Abort(_('could not identify changegroup version in '
245 'bundle'))
253 'bundle'))
246
254
247 return '%s-%s' % (comp, version)
255 return '%s-%s' % (comp, version)
248 elif isinstance(b, streamclone.streamcloneapplier):
256 elif isinstance(b, streamclone.streamcloneapplier):
249 requirements = streamclone.readbundle1header(fh)[2]
257 requirements = streamclone.readbundle1header(fh)[2]
250 params = 'requirements=%s' % ','.join(sorted(requirements))
258 return 'none-packed1;%s' % _formatrequirementsparams(requirements)
251 return 'none-packed1;%s' % urlreq.quote(params)
252 else:
259 else:
253 raise error.Abort(_('unknown bundle type: %s') % b)
260 raise error.Abort(_('unknown bundle type: %s') % b)
254
261
255 def _computeoutgoing(repo, heads, common):
262 def _computeoutgoing(repo, heads, common):
256 """Computes which revs are outgoing given a set of common
263 """Computes which revs are outgoing given a set of common
257 and a set of heads.
264 and a set of heads.
258
265
259 This is a separate function so extensions can have access to
266 This is a separate function so extensions can have access to
260 the logic.
267 the logic.
261
268
262 Returns a discovery.outgoing object.
269 Returns a discovery.outgoing object.
263 """
270 """
264 cl = repo.changelog
271 cl = repo.changelog
265 if common:
272 if common:
266 hasnode = cl.hasnode
273 hasnode = cl.hasnode
267 common = [n for n in common if hasnode(n)]
274 common = [n for n in common if hasnode(n)]
268 else:
275 else:
269 common = [nullid]
276 common = [nullid]
270 if not heads:
277 if not heads:
271 heads = cl.heads()
278 heads = cl.heads()
272 return discovery.outgoing(repo, common, heads)
279 return discovery.outgoing(repo, common, heads)
273
280
274 def _forcebundle1(op):
281 def _forcebundle1(op):
275 """return true if a pull/push must use bundle1
282 """return true if a pull/push must use bundle1
276
283
277 This function is used to allow testing of the older bundle version"""
284 This function is used to allow testing of the older bundle version"""
278 ui = op.repo.ui
285 ui = op.repo.ui
279 forcebundle1 = False
286 forcebundle1 = False
280 # The goal is this config is to allow developer to choose the bundle
287 # The goal is this config is to allow developer to choose the bundle
281 # version used during exchanged. This is especially handy during test.
288 # version used during exchanged. This is especially handy during test.
282 # Value is a list of bundle version to be picked from, highest version
289 # Value is a list of bundle version to be picked from, highest version
283 # should be used.
290 # should be used.
284 #
291 #
285 # developer config: devel.legacy.exchange
292 # developer config: devel.legacy.exchange
286 exchange = ui.configlist('devel', 'legacy.exchange')
293 exchange = ui.configlist('devel', 'legacy.exchange')
287 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
294 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
288 return forcebundle1 or not op.remote.capable('bundle2')
295 return forcebundle1 or not op.remote.capable('bundle2')
289
296
290 class pushoperation(object):
297 class pushoperation(object):
291 """A object that represent a single push operation
298 """A object that represent a single push operation
292
299
293 Its purpose is to carry push related state and very common operations.
300 Its purpose is to carry push related state and very common operations.
294
301
295 A new pushoperation should be created at the beginning of each push and
302 A new pushoperation should be created at the beginning of each push and
296 discarded afterward.
303 discarded afterward.
297 """
304 """
298
305
299 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
306 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
300 bookmarks=(), pushvars=None):
307 bookmarks=(), pushvars=None):
301 # repo we push from
308 # repo we push from
302 self.repo = repo
309 self.repo = repo
303 self.ui = repo.ui
310 self.ui = repo.ui
304 # repo we push to
311 # repo we push to
305 self.remote = remote
312 self.remote = remote
306 # force option provided
313 # force option provided
307 self.force = force
314 self.force = force
308 # revs to be pushed (None is "all")
315 # revs to be pushed (None is "all")
309 self.revs = revs
316 self.revs = revs
310 # bookmark explicitly pushed
317 # bookmark explicitly pushed
311 self.bookmarks = bookmarks
318 self.bookmarks = bookmarks
312 # allow push of new branch
319 # allow push of new branch
313 self.newbranch = newbranch
320 self.newbranch = newbranch
314 # step already performed
321 # step already performed
315 # (used to check what steps have been already performed through bundle2)
322 # (used to check what steps have been already performed through bundle2)
316 self.stepsdone = set()
323 self.stepsdone = set()
317 # Integer version of the changegroup push result
324 # Integer version of the changegroup push result
318 # - None means nothing to push
325 # - None means nothing to push
319 # - 0 means HTTP error
326 # - 0 means HTTP error
320 # - 1 means we pushed and remote head count is unchanged *or*
327 # - 1 means we pushed and remote head count is unchanged *or*
321 # we have outgoing changesets but refused to push
328 # we have outgoing changesets but refused to push
322 # - other values as described by addchangegroup()
329 # - other values as described by addchangegroup()
323 self.cgresult = None
330 self.cgresult = None
324 # Boolean value for the bookmark push
331 # Boolean value for the bookmark push
325 self.bkresult = None
332 self.bkresult = None
326 # discover.outgoing object (contains common and outgoing data)
333 # discover.outgoing object (contains common and outgoing data)
327 self.outgoing = None
334 self.outgoing = None
328 # all remote topological heads before the push
335 # all remote topological heads before the push
329 self.remoteheads = None
336 self.remoteheads = None
330 # Details of the remote branch pre and post push
337 # Details of the remote branch pre and post push
331 #
338 #
332 # mapping: {'branch': ([remoteheads],
339 # mapping: {'branch': ([remoteheads],
333 # [newheads],
340 # [newheads],
334 # [unsyncedheads],
341 # [unsyncedheads],
335 # [discardedheads])}
342 # [discardedheads])}
336 # - branch: the branch name
343 # - branch: the branch name
337 # - remoteheads: the list of remote heads known locally
344 # - remoteheads: the list of remote heads known locally
338 # None if the branch is new
345 # None if the branch is new
339 # - newheads: the new remote heads (known locally) with outgoing pushed
346 # - newheads: the new remote heads (known locally) with outgoing pushed
340 # - unsyncedheads: the list of remote heads unknown locally.
347 # - unsyncedheads: the list of remote heads unknown locally.
341 # - discardedheads: the list of remote heads made obsolete by the push
348 # - discardedheads: the list of remote heads made obsolete by the push
342 self.pushbranchmap = None
349 self.pushbranchmap = None
343 # testable as a boolean indicating if any nodes are missing locally.
350 # testable as a boolean indicating if any nodes are missing locally.
344 self.incoming = None
351 self.incoming = None
345 # summary of the remote phase situation
352 # summary of the remote phase situation
346 self.remotephases = None
353 self.remotephases = None
347 # phases changes that must be pushed along side the changesets
354 # phases changes that must be pushed along side the changesets
348 self.outdatedphases = None
355 self.outdatedphases = None
349 # phases changes that must be pushed if changeset push fails
356 # phases changes that must be pushed if changeset push fails
350 self.fallbackoutdatedphases = None
357 self.fallbackoutdatedphases = None
351 # outgoing obsmarkers
358 # outgoing obsmarkers
352 self.outobsmarkers = set()
359 self.outobsmarkers = set()
353 # outgoing bookmarks
360 # outgoing bookmarks
354 self.outbookmarks = []
361 self.outbookmarks = []
355 # transaction manager
362 # transaction manager
356 self.trmanager = None
363 self.trmanager = None
357 # map { pushkey partid -> callback handling failure}
364 # map { pushkey partid -> callback handling failure}
358 # used to handle exception from mandatory pushkey part failure
365 # used to handle exception from mandatory pushkey part failure
359 self.pkfailcb = {}
366 self.pkfailcb = {}
360 # an iterable of pushvars or None
367 # an iterable of pushvars or None
361 self.pushvars = pushvars
368 self.pushvars = pushvars
362
369
363 @util.propertycache
370 @util.propertycache
364 def futureheads(self):
371 def futureheads(self):
365 """future remote heads if the changeset push succeeds"""
372 """future remote heads if the changeset push succeeds"""
366 return self.outgoing.missingheads
373 return self.outgoing.missingheads
367
374
368 @util.propertycache
375 @util.propertycache
369 def fallbackheads(self):
376 def fallbackheads(self):
370 """future remote heads if the changeset push fails"""
377 """future remote heads if the changeset push fails"""
371 if self.revs is None:
378 if self.revs is None:
372 # not target to push, all common are relevant
379 # not target to push, all common are relevant
373 return self.outgoing.commonheads
380 return self.outgoing.commonheads
374 unfi = self.repo.unfiltered()
381 unfi = self.repo.unfiltered()
375 # I want cheads = heads(::missingheads and ::commonheads)
382 # I want cheads = heads(::missingheads and ::commonheads)
376 # (missingheads is revs with secret changeset filtered out)
383 # (missingheads is revs with secret changeset filtered out)
377 #
384 #
378 # This can be expressed as:
385 # This can be expressed as:
379 # cheads = ( (missingheads and ::commonheads)
386 # cheads = ( (missingheads and ::commonheads)
380 # + (commonheads and ::missingheads))"
387 # + (commonheads and ::missingheads))"
381 # )
388 # )
382 #
389 #
383 # while trying to push we already computed the following:
390 # while trying to push we already computed the following:
384 # common = (::commonheads)
391 # common = (::commonheads)
385 # missing = ((commonheads::missingheads) - commonheads)
392 # missing = ((commonheads::missingheads) - commonheads)
386 #
393 #
387 # We can pick:
394 # We can pick:
388 # * missingheads part of common (::commonheads)
395 # * missingheads part of common (::commonheads)
389 common = self.outgoing.common
396 common = self.outgoing.common
390 nm = self.repo.changelog.nodemap
397 nm = self.repo.changelog.nodemap
391 cheads = [node for node in self.revs if nm[node] in common]
398 cheads = [node for node in self.revs if nm[node] in common]
392 # and
399 # and
393 # * commonheads parents on missing
400 # * commonheads parents on missing
394 revset = unfi.set('%ln and parents(roots(%ln))',
401 revset = unfi.set('%ln and parents(roots(%ln))',
395 self.outgoing.commonheads,
402 self.outgoing.commonheads,
396 self.outgoing.missing)
403 self.outgoing.missing)
397 cheads.extend(c.node() for c in revset)
404 cheads.extend(c.node() for c in revset)
398 return cheads
405 return cheads
399
406
400 @property
407 @property
401 def commonheads(self):
408 def commonheads(self):
402 """set of all common heads after changeset bundle push"""
409 """set of all common heads after changeset bundle push"""
403 if self.cgresult:
410 if self.cgresult:
404 return self.futureheads
411 return self.futureheads
405 else:
412 else:
406 return self.fallbackheads
413 return self.fallbackheads
407
414
408 # mapping of message used when pushing bookmark
415 # mapping of message used when pushing bookmark
409 bookmsgmap = {'update': (_("updating bookmark %s\n"),
416 bookmsgmap = {'update': (_("updating bookmark %s\n"),
410 _('updating bookmark %s failed!\n')),
417 _('updating bookmark %s failed!\n')),
411 'export': (_("exporting bookmark %s\n"),
418 'export': (_("exporting bookmark %s\n"),
412 _('exporting bookmark %s failed!\n')),
419 _('exporting bookmark %s failed!\n')),
413 'delete': (_("deleting remote bookmark %s\n"),
420 'delete': (_("deleting remote bookmark %s\n"),
414 _('deleting remote bookmark %s failed!\n')),
421 _('deleting remote bookmark %s failed!\n')),
415 }
422 }
416
423
417
424
418 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
425 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
419 opargs=None):
426 opargs=None):
420 '''Push outgoing changesets (limited by revs) from a local
427 '''Push outgoing changesets (limited by revs) from a local
421 repository to remote. Return an integer:
428 repository to remote. Return an integer:
422 - None means nothing to push
429 - None means nothing to push
423 - 0 means HTTP error
430 - 0 means HTTP error
424 - 1 means we pushed and remote head count is unchanged *or*
431 - 1 means we pushed and remote head count is unchanged *or*
425 we have outgoing changesets but refused to push
432 we have outgoing changesets but refused to push
426 - other values as described by addchangegroup()
433 - other values as described by addchangegroup()
427 '''
434 '''
428 if opargs is None:
435 if opargs is None:
429 opargs = {}
436 opargs = {}
430 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
437 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
431 **pycompat.strkwargs(opargs))
438 **pycompat.strkwargs(opargs))
432 if pushop.remote.local():
439 if pushop.remote.local():
433 missing = (set(pushop.repo.requirements)
440 missing = (set(pushop.repo.requirements)
434 - pushop.remote.local().supported)
441 - pushop.remote.local().supported)
435 if missing:
442 if missing:
436 msg = _("required features are not"
443 msg = _("required features are not"
437 " supported in the destination:"
444 " supported in the destination:"
438 " %s") % (', '.join(sorted(missing)))
445 " %s") % (', '.join(sorted(missing)))
439 raise error.Abort(msg)
446 raise error.Abort(msg)
440
447
441 if not pushop.remote.canpush():
448 if not pushop.remote.canpush():
442 raise error.Abort(_("destination does not support push"))
449 raise error.Abort(_("destination does not support push"))
443
450
444 if not pushop.remote.capable('unbundle'):
451 if not pushop.remote.capable('unbundle'):
445 raise error.Abort(_('cannot push: destination does not support the '
452 raise error.Abort(_('cannot push: destination does not support the '
446 'unbundle wire protocol command'))
453 'unbundle wire protocol command'))
447
454
448 # get lock as we might write phase data
455 # get lock as we might write phase data
449 wlock = lock = None
456 wlock = lock = None
450 try:
457 try:
451 # bundle2 push may receive a reply bundle touching bookmarks or other
458 # bundle2 push may receive a reply bundle touching bookmarks or other
452 # things requiring the wlock. Take it now to ensure proper ordering.
459 # things requiring the wlock. Take it now to ensure proper ordering.
453 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
460 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
454 if (not _forcebundle1(pushop)) and maypushback:
461 if (not _forcebundle1(pushop)) and maypushback:
455 wlock = pushop.repo.wlock()
462 wlock = pushop.repo.wlock()
456 lock = pushop.repo.lock()
463 lock = pushop.repo.lock()
457 pushop.trmanager = transactionmanager(pushop.repo,
464 pushop.trmanager = transactionmanager(pushop.repo,
458 'push-response',
465 'push-response',
459 pushop.remote.url())
466 pushop.remote.url())
460 except IOError as err:
467 except IOError as err:
461 if err.errno != errno.EACCES:
468 if err.errno != errno.EACCES:
462 raise
469 raise
463 # source repo cannot be locked.
470 # source repo cannot be locked.
464 # We do not abort the push, but just disable the local phase
471 # We do not abort the push, but just disable the local phase
465 # synchronisation.
472 # synchronisation.
466 msg = 'cannot lock source repository: %s\n' % err
473 msg = 'cannot lock source repository: %s\n' % err
467 pushop.ui.debug(msg)
474 pushop.ui.debug(msg)
468
475
469 with wlock or util.nullcontextmanager(), \
476 with wlock or util.nullcontextmanager(), \
470 lock or util.nullcontextmanager(), \
477 lock or util.nullcontextmanager(), \
471 pushop.trmanager or util.nullcontextmanager():
478 pushop.trmanager or util.nullcontextmanager():
472 pushop.repo.checkpush(pushop)
479 pushop.repo.checkpush(pushop)
473 _pushdiscovery(pushop)
480 _pushdiscovery(pushop)
474 if not _forcebundle1(pushop):
481 if not _forcebundle1(pushop):
475 _pushbundle2(pushop)
482 _pushbundle2(pushop)
476 _pushchangeset(pushop)
483 _pushchangeset(pushop)
477 _pushsyncphase(pushop)
484 _pushsyncphase(pushop)
478 _pushobsolete(pushop)
485 _pushobsolete(pushop)
479 _pushbookmark(pushop)
486 _pushbookmark(pushop)
480
487
481 return pushop
488 return pushop
482
489
483 # list of steps to perform discovery before push
490 # list of steps to perform discovery before push
484 pushdiscoveryorder = []
491 pushdiscoveryorder = []
485
492
486 # Mapping between step name and function
493 # Mapping between step name and function
487 #
494 #
488 # This exists to help extensions wrap steps if necessary
495 # This exists to help extensions wrap steps if necessary
489 pushdiscoverymapping = {}
496 pushdiscoverymapping = {}
490
497
491 def pushdiscovery(stepname):
498 def pushdiscovery(stepname):
492 """decorator for function performing discovery before push
499 """decorator for function performing discovery before push
493
500
494 The function is added to the step -> function mapping and appended to the
501 The function is added to the step -> function mapping and appended to the
495 list of steps. Beware that decorated function will be added in order (this
502 list of steps. Beware that decorated function will be added in order (this
496 may matter).
503 may matter).
497
504
498 You can only use this decorator for a new step, if you want to wrap a step
505 You can only use this decorator for a new step, if you want to wrap a step
499 from an extension, change the pushdiscovery dictionary directly."""
506 from an extension, change the pushdiscovery dictionary directly."""
500 def dec(func):
507 def dec(func):
501 assert stepname not in pushdiscoverymapping
508 assert stepname not in pushdiscoverymapping
502 pushdiscoverymapping[stepname] = func
509 pushdiscoverymapping[stepname] = func
503 pushdiscoveryorder.append(stepname)
510 pushdiscoveryorder.append(stepname)
504 return func
511 return func
505 return dec
512 return dec
506
513
507 def _pushdiscovery(pushop):
514 def _pushdiscovery(pushop):
508 """Run all discovery steps"""
515 """Run all discovery steps"""
509 for stepname in pushdiscoveryorder:
516 for stepname in pushdiscoveryorder:
510 step = pushdiscoverymapping[stepname]
517 step = pushdiscoverymapping[stepname]
511 step(pushop)
518 step(pushop)
512
519
513 @pushdiscovery('changeset')
520 @pushdiscovery('changeset')
514 def _pushdiscoverychangeset(pushop):
521 def _pushdiscoverychangeset(pushop):
515 """discover the changeset that need to be pushed"""
522 """discover the changeset that need to be pushed"""
516 fci = discovery.findcommonincoming
523 fci = discovery.findcommonincoming
517 if pushop.revs:
524 if pushop.revs:
518 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
525 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
519 ancestorsof=pushop.revs)
526 ancestorsof=pushop.revs)
520 else:
527 else:
521 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
528 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
522 common, inc, remoteheads = commoninc
529 common, inc, remoteheads = commoninc
523 fco = discovery.findcommonoutgoing
530 fco = discovery.findcommonoutgoing
524 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
531 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
525 commoninc=commoninc, force=pushop.force)
532 commoninc=commoninc, force=pushop.force)
526 pushop.outgoing = outgoing
533 pushop.outgoing = outgoing
527 pushop.remoteheads = remoteheads
534 pushop.remoteheads = remoteheads
528 pushop.incoming = inc
535 pushop.incoming = inc
529
536
530 @pushdiscovery('phase')
537 @pushdiscovery('phase')
531 def _pushdiscoveryphase(pushop):
538 def _pushdiscoveryphase(pushop):
532 """discover the phase that needs to be pushed
539 """discover the phase that needs to be pushed
533
540
534 (computed for both success and failure case for changesets push)"""
541 (computed for both success and failure case for changesets push)"""
535 outgoing = pushop.outgoing
542 outgoing = pushop.outgoing
536 unfi = pushop.repo.unfiltered()
543 unfi = pushop.repo.unfiltered()
537 remotephases = pushop.remote.listkeys('phases')
544 remotephases = pushop.remote.listkeys('phases')
538 if (pushop.ui.configbool('ui', '_usedassubrepo')
545 if (pushop.ui.configbool('ui', '_usedassubrepo')
539 and remotephases # server supports phases
546 and remotephases # server supports phases
540 and not pushop.outgoing.missing # no changesets to be pushed
547 and not pushop.outgoing.missing # no changesets to be pushed
541 and remotephases.get('publishing', False)):
548 and remotephases.get('publishing', False)):
542 # When:
549 # When:
543 # - this is a subrepo push
550 # - this is a subrepo push
544 # - and remote support phase
551 # - and remote support phase
545 # - and no changeset are to be pushed
552 # - and no changeset are to be pushed
546 # - and remote is publishing
553 # - and remote is publishing
547 # We may be in issue 3781 case!
554 # We may be in issue 3781 case!
548 # We drop the possible phase synchronisation done by
555 # We drop the possible phase synchronisation done by
549 # courtesy to publish changesets possibly locally draft
556 # courtesy to publish changesets possibly locally draft
550 # on the remote.
557 # on the remote.
551 pushop.outdatedphases = []
558 pushop.outdatedphases = []
552 pushop.fallbackoutdatedphases = []
559 pushop.fallbackoutdatedphases = []
553 return
560 return
554
561
555 pushop.remotephases = phases.remotephasessummary(pushop.repo,
562 pushop.remotephases = phases.remotephasessummary(pushop.repo,
556 pushop.fallbackheads,
563 pushop.fallbackheads,
557 remotephases)
564 remotephases)
558 droots = pushop.remotephases.draftroots
565 droots = pushop.remotephases.draftroots
559
566
560 extracond = ''
567 extracond = ''
561 if not pushop.remotephases.publishing:
568 if not pushop.remotephases.publishing:
562 extracond = ' and public()'
569 extracond = ' and public()'
563 revset = 'heads((%%ln::%%ln) %s)' % extracond
570 revset = 'heads((%%ln::%%ln) %s)' % extracond
564 # Get the list of all revs draft on remote by public here.
571 # Get the list of all revs draft on remote by public here.
565 # XXX Beware that revset break if droots is not strictly
572 # XXX Beware that revset break if droots is not strictly
566 # XXX root we may want to ensure it is but it is costly
573 # XXX root we may want to ensure it is but it is costly
567 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
574 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
568 if not outgoing.missing:
575 if not outgoing.missing:
569 future = fallback
576 future = fallback
570 else:
577 else:
571 # adds changeset we are going to push as draft
578 # adds changeset we are going to push as draft
572 #
579 #
573 # should not be necessary for publishing server, but because of an
580 # should not be necessary for publishing server, but because of an
574 # issue fixed in xxxxx we have to do it anyway.
581 # issue fixed in xxxxx we have to do it anyway.
575 fdroots = list(unfi.set('roots(%ln + %ln::)',
582 fdroots = list(unfi.set('roots(%ln + %ln::)',
576 outgoing.missing, droots))
583 outgoing.missing, droots))
577 fdroots = [f.node() for f in fdroots]
584 fdroots = [f.node() for f in fdroots]
578 future = list(unfi.set(revset, fdroots, pushop.futureheads))
585 future = list(unfi.set(revset, fdroots, pushop.futureheads))
579 pushop.outdatedphases = future
586 pushop.outdatedphases = future
580 pushop.fallbackoutdatedphases = fallback
587 pushop.fallbackoutdatedphases = fallback
581
588
582 @pushdiscovery('obsmarker')
589 @pushdiscovery('obsmarker')
583 def _pushdiscoveryobsmarkers(pushop):
590 def _pushdiscoveryobsmarkers(pushop):
584 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
591 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
585 and pushop.repo.obsstore
592 and pushop.repo.obsstore
586 and 'obsolete' in pushop.remote.listkeys('namespaces')):
593 and 'obsolete' in pushop.remote.listkeys('namespaces')):
587 repo = pushop.repo
594 repo = pushop.repo
588 # very naive computation, that can be quite expensive on big repo.
595 # very naive computation, that can be quite expensive on big repo.
589 # However: evolution is currently slow on them anyway.
596 # However: evolution is currently slow on them anyway.
590 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
597 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
591 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
598 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
592
599
593 @pushdiscovery('bookmarks')
600 @pushdiscovery('bookmarks')
594 def _pushdiscoverybookmarks(pushop):
601 def _pushdiscoverybookmarks(pushop):
595 ui = pushop.ui
602 ui = pushop.ui
596 repo = pushop.repo.unfiltered()
603 repo = pushop.repo.unfiltered()
597 remote = pushop.remote
604 remote = pushop.remote
598 ui.debug("checking for updated bookmarks\n")
605 ui.debug("checking for updated bookmarks\n")
599 ancestors = ()
606 ancestors = ()
600 if pushop.revs:
607 if pushop.revs:
601 revnums = map(repo.changelog.rev, pushop.revs)
608 revnums = map(repo.changelog.rev, pushop.revs)
602 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
609 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
603 remotebookmark = remote.listkeys('bookmarks')
610 remotebookmark = remote.listkeys('bookmarks')
604
611
605 explicit = set([repo._bookmarks.expandname(bookmark)
612 explicit = set([repo._bookmarks.expandname(bookmark)
606 for bookmark in pushop.bookmarks])
613 for bookmark in pushop.bookmarks])
607
614
608 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
615 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
609 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
616 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
610
617
611 def safehex(x):
618 def safehex(x):
612 if x is None:
619 if x is None:
613 return x
620 return x
614 return hex(x)
621 return hex(x)
615
622
616 def hexifycompbookmarks(bookmarks):
623 def hexifycompbookmarks(bookmarks):
617 for b, scid, dcid in bookmarks:
624 for b, scid, dcid in bookmarks:
618 yield b, safehex(scid), safehex(dcid)
625 yield b, safehex(scid), safehex(dcid)
619
626
620 comp = [hexifycompbookmarks(marks) for marks in comp]
627 comp = [hexifycompbookmarks(marks) for marks in comp]
621 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
628 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
622
629
623 for b, scid, dcid in advsrc:
630 for b, scid, dcid in advsrc:
624 if b in explicit:
631 if b in explicit:
625 explicit.remove(b)
632 explicit.remove(b)
626 if not ancestors or repo[scid].rev() in ancestors:
633 if not ancestors or repo[scid].rev() in ancestors:
627 pushop.outbookmarks.append((b, dcid, scid))
634 pushop.outbookmarks.append((b, dcid, scid))
628 # search added bookmark
635 # search added bookmark
629 for b, scid, dcid in addsrc:
636 for b, scid, dcid in addsrc:
630 if b in explicit:
637 if b in explicit:
631 explicit.remove(b)
638 explicit.remove(b)
632 pushop.outbookmarks.append((b, '', scid))
639 pushop.outbookmarks.append((b, '', scid))
633 # search for overwritten bookmark
640 # search for overwritten bookmark
634 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
641 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
635 if b in explicit:
642 if b in explicit:
636 explicit.remove(b)
643 explicit.remove(b)
637 pushop.outbookmarks.append((b, dcid, scid))
644 pushop.outbookmarks.append((b, dcid, scid))
638 # search for bookmark to delete
645 # search for bookmark to delete
639 for b, scid, dcid in adddst:
646 for b, scid, dcid in adddst:
640 if b in explicit:
647 if b in explicit:
641 explicit.remove(b)
648 explicit.remove(b)
642 # treat as "deleted locally"
649 # treat as "deleted locally"
643 pushop.outbookmarks.append((b, dcid, ''))
650 pushop.outbookmarks.append((b, dcid, ''))
644 # identical bookmarks shouldn't get reported
651 # identical bookmarks shouldn't get reported
645 for b, scid, dcid in same:
652 for b, scid, dcid in same:
646 if b in explicit:
653 if b in explicit:
647 explicit.remove(b)
654 explicit.remove(b)
648
655
649 if explicit:
656 if explicit:
650 explicit = sorted(explicit)
657 explicit = sorted(explicit)
651 # we should probably list all of them
658 # we should probably list all of them
652 ui.warn(_('bookmark %s does not exist on the local '
659 ui.warn(_('bookmark %s does not exist on the local '
653 'or remote repository!\n') % explicit[0])
660 'or remote repository!\n') % explicit[0])
654 pushop.bkresult = 2
661 pushop.bkresult = 2
655
662
656 pushop.outbookmarks.sort()
663 pushop.outbookmarks.sort()
657
664
658 def _pushcheckoutgoing(pushop):
665 def _pushcheckoutgoing(pushop):
659 outgoing = pushop.outgoing
666 outgoing = pushop.outgoing
660 unfi = pushop.repo.unfiltered()
667 unfi = pushop.repo.unfiltered()
661 if not outgoing.missing:
668 if not outgoing.missing:
662 # nothing to push
669 # nothing to push
663 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
670 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
664 return False
671 return False
665 # something to push
672 # something to push
666 if not pushop.force:
673 if not pushop.force:
667 # if repo.obsstore == False --> no obsolete
674 # if repo.obsstore == False --> no obsolete
668 # then, save the iteration
675 # then, save the iteration
669 if unfi.obsstore:
676 if unfi.obsstore:
670 # this message are here for 80 char limit reason
677 # this message are here for 80 char limit reason
671 mso = _("push includes obsolete changeset: %s!")
678 mso = _("push includes obsolete changeset: %s!")
672 mspd = _("push includes phase-divergent changeset: %s!")
679 mspd = _("push includes phase-divergent changeset: %s!")
673 mscd = _("push includes content-divergent changeset: %s!")
680 mscd = _("push includes content-divergent changeset: %s!")
674 mst = {"orphan": _("push includes orphan changeset: %s!"),
681 mst = {"orphan": _("push includes orphan changeset: %s!"),
675 "phase-divergent": mspd,
682 "phase-divergent": mspd,
676 "content-divergent": mscd}
683 "content-divergent": mscd}
677 # If we are to push if there is at least one
684 # If we are to push if there is at least one
678 # obsolete or unstable changeset in missing, at
685 # obsolete or unstable changeset in missing, at
679 # least one of the missinghead will be obsolete or
686 # least one of the missinghead will be obsolete or
680 # unstable. So checking heads only is ok
687 # unstable. So checking heads only is ok
681 for node in outgoing.missingheads:
688 for node in outgoing.missingheads:
682 ctx = unfi[node]
689 ctx = unfi[node]
683 if ctx.obsolete():
690 if ctx.obsolete():
684 raise error.Abort(mso % ctx)
691 raise error.Abort(mso % ctx)
685 elif ctx.isunstable():
692 elif ctx.isunstable():
686 # TODO print more than one instability in the abort
693 # TODO print more than one instability in the abort
687 # message
694 # message
688 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
695 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
689
696
690 discovery.checkheads(pushop)
697 discovery.checkheads(pushop)
691 return True
698 return True
692
699
693 # List of names of steps to perform for an outgoing bundle2, order matters.
700 # List of names of steps to perform for an outgoing bundle2, order matters.
694 b2partsgenorder = []
701 b2partsgenorder = []
695
702
696 # Mapping between step name and function
703 # Mapping between step name and function
697 #
704 #
698 # This exists to help extensions wrap steps if necessary
705 # This exists to help extensions wrap steps if necessary
699 b2partsgenmapping = {}
706 b2partsgenmapping = {}
700
707
701 def b2partsgenerator(stepname, idx=None):
708 def b2partsgenerator(stepname, idx=None):
702 """decorator for function generating bundle2 part
709 """decorator for function generating bundle2 part
703
710
704 The function is added to the step -> function mapping and appended to the
711 The function is added to the step -> function mapping and appended to the
705 list of steps. Beware that decorated functions will be added in order
712 list of steps. Beware that decorated functions will be added in order
706 (this may matter).
713 (this may matter).
707
714
708 You can only use this decorator for new steps, if you want to wrap a step
715 You can only use this decorator for new steps, if you want to wrap a step
709 from an extension, attack the b2partsgenmapping dictionary directly."""
716 from an extension, attack the b2partsgenmapping dictionary directly."""
710 def dec(func):
717 def dec(func):
711 assert stepname not in b2partsgenmapping
718 assert stepname not in b2partsgenmapping
712 b2partsgenmapping[stepname] = func
719 b2partsgenmapping[stepname] = func
713 if idx is None:
720 if idx is None:
714 b2partsgenorder.append(stepname)
721 b2partsgenorder.append(stepname)
715 else:
722 else:
716 b2partsgenorder.insert(idx, stepname)
723 b2partsgenorder.insert(idx, stepname)
717 return func
724 return func
718 return dec
725 return dec
719
726
720 def _pushb2ctxcheckheads(pushop, bundler):
727 def _pushb2ctxcheckheads(pushop, bundler):
721 """Generate race condition checking parts
728 """Generate race condition checking parts
722
729
723 Exists as an independent function to aid extensions
730 Exists as an independent function to aid extensions
724 """
731 """
725 # * 'force' do not check for push race,
732 # * 'force' do not check for push race,
726 # * if we don't push anything, there are nothing to check.
733 # * if we don't push anything, there are nothing to check.
727 if not pushop.force and pushop.outgoing.missingheads:
734 if not pushop.force and pushop.outgoing.missingheads:
728 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
735 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
729 emptyremote = pushop.pushbranchmap is None
736 emptyremote = pushop.pushbranchmap is None
730 if not allowunrelated or emptyremote:
737 if not allowunrelated or emptyremote:
731 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
738 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
732 else:
739 else:
733 affected = set()
740 affected = set()
734 for branch, heads in pushop.pushbranchmap.iteritems():
741 for branch, heads in pushop.pushbranchmap.iteritems():
735 remoteheads, newheads, unsyncedheads, discardedheads = heads
742 remoteheads, newheads, unsyncedheads, discardedheads = heads
736 if remoteheads is not None:
743 if remoteheads is not None:
737 remote = set(remoteheads)
744 remote = set(remoteheads)
738 affected |= set(discardedheads) & remote
745 affected |= set(discardedheads) & remote
739 affected |= remote - set(newheads)
746 affected |= remote - set(newheads)
740 if affected:
747 if affected:
741 data = iter(sorted(affected))
748 data = iter(sorted(affected))
742 bundler.newpart('check:updated-heads', data=data)
749 bundler.newpart('check:updated-heads', data=data)
743
750
744 def _pushing(pushop):
751 def _pushing(pushop):
745 """return True if we are pushing anything"""
752 """return True if we are pushing anything"""
746 return bool(pushop.outgoing.missing
753 return bool(pushop.outgoing.missing
747 or pushop.outdatedphases
754 or pushop.outdatedphases
748 or pushop.outobsmarkers
755 or pushop.outobsmarkers
749 or pushop.outbookmarks)
756 or pushop.outbookmarks)
750
757
751 @b2partsgenerator('check-bookmarks')
758 @b2partsgenerator('check-bookmarks')
752 def _pushb2checkbookmarks(pushop, bundler):
759 def _pushb2checkbookmarks(pushop, bundler):
753 """insert bookmark move checking"""
760 """insert bookmark move checking"""
754 if not _pushing(pushop) or pushop.force:
761 if not _pushing(pushop) or pushop.force:
755 return
762 return
756 b2caps = bundle2.bundle2caps(pushop.remote)
763 b2caps = bundle2.bundle2caps(pushop.remote)
757 hasbookmarkcheck = 'bookmarks' in b2caps
764 hasbookmarkcheck = 'bookmarks' in b2caps
758 if not (pushop.outbookmarks and hasbookmarkcheck):
765 if not (pushop.outbookmarks and hasbookmarkcheck):
759 return
766 return
760 data = []
767 data = []
761 for book, old, new in pushop.outbookmarks:
768 for book, old, new in pushop.outbookmarks:
762 old = bin(old)
769 old = bin(old)
763 data.append((book, old))
770 data.append((book, old))
764 checkdata = bookmod.binaryencode(data)
771 checkdata = bookmod.binaryencode(data)
765 bundler.newpart('check:bookmarks', data=checkdata)
772 bundler.newpart('check:bookmarks', data=checkdata)
766
773
767 @b2partsgenerator('check-phases')
774 @b2partsgenerator('check-phases')
768 def _pushb2checkphases(pushop, bundler):
775 def _pushb2checkphases(pushop, bundler):
769 """insert phase move checking"""
776 """insert phase move checking"""
770 if not _pushing(pushop) or pushop.force:
777 if not _pushing(pushop) or pushop.force:
771 return
778 return
772 b2caps = bundle2.bundle2caps(pushop.remote)
779 b2caps = bundle2.bundle2caps(pushop.remote)
773 hasphaseheads = 'heads' in b2caps.get('phases', ())
780 hasphaseheads = 'heads' in b2caps.get('phases', ())
774 if pushop.remotephases is not None and hasphaseheads:
781 if pushop.remotephases is not None and hasphaseheads:
775 # check that the remote phase has not changed
782 # check that the remote phase has not changed
776 checks = [[] for p in phases.allphases]
783 checks = [[] for p in phases.allphases]
777 checks[phases.public].extend(pushop.remotephases.publicheads)
784 checks[phases.public].extend(pushop.remotephases.publicheads)
778 checks[phases.draft].extend(pushop.remotephases.draftroots)
785 checks[phases.draft].extend(pushop.remotephases.draftroots)
779 if any(checks):
786 if any(checks):
780 for nodes in checks:
787 for nodes in checks:
781 nodes.sort()
788 nodes.sort()
782 checkdata = phases.binaryencode(checks)
789 checkdata = phases.binaryencode(checks)
783 bundler.newpart('check:phases', data=checkdata)
790 bundler.newpart('check:phases', data=checkdata)
784
791
785 @b2partsgenerator('changeset')
792 @b2partsgenerator('changeset')
786 def _pushb2ctx(pushop, bundler):
793 def _pushb2ctx(pushop, bundler):
787 """handle changegroup push through bundle2
794 """handle changegroup push through bundle2
788
795
789 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
796 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
790 """
797 """
791 if 'changesets' in pushop.stepsdone:
798 if 'changesets' in pushop.stepsdone:
792 return
799 return
793 pushop.stepsdone.add('changesets')
800 pushop.stepsdone.add('changesets')
794 # Send known heads to the server for race detection.
801 # Send known heads to the server for race detection.
795 if not _pushcheckoutgoing(pushop):
802 if not _pushcheckoutgoing(pushop):
796 return
803 return
797 pushop.repo.prepushoutgoinghooks(pushop)
804 pushop.repo.prepushoutgoinghooks(pushop)
798
805
799 _pushb2ctxcheckheads(pushop, bundler)
806 _pushb2ctxcheckheads(pushop, bundler)
800
807
801 b2caps = bundle2.bundle2caps(pushop.remote)
808 b2caps = bundle2.bundle2caps(pushop.remote)
802 version = '01'
809 version = '01'
803 cgversions = b2caps.get('changegroup')
810 cgversions = b2caps.get('changegroup')
804 if cgversions: # 3.1 and 3.2 ship with an empty value
811 if cgversions: # 3.1 and 3.2 ship with an empty value
805 cgversions = [v for v in cgversions
812 cgversions = [v for v in cgversions
806 if v in changegroup.supportedoutgoingversions(
813 if v in changegroup.supportedoutgoingversions(
807 pushop.repo)]
814 pushop.repo)]
808 if not cgversions:
815 if not cgversions:
809 raise ValueError(_('no common changegroup version'))
816 raise ValueError(_('no common changegroup version'))
810 version = max(cgversions)
817 version = max(cgversions)
811 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
818 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
812 'push')
819 'push')
813 cgpart = bundler.newpart('changegroup', data=cgstream)
820 cgpart = bundler.newpart('changegroup', data=cgstream)
814 if cgversions:
821 if cgversions:
815 cgpart.addparam('version', version)
822 cgpart.addparam('version', version)
816 if 'treemanifest' in pushop.repo.requirements:
823 if 'treemanifest' in pushop.repo.requirements:
817 cgpart.addparam('treemanifest', '1')
824 cgpart.addparam('treemanifest', '1')
818 def handlereply(op):
825 def handlereply(op):
819 """extract addchangegroup returns from server reply"""
826 """extract addchangegroup returns from server reply"""
820 cgreplies = op.records.getreplies(cgpart.id)
827 cgreplies = op.records.getreplies(cgpart.id)
821 assert len(cgreplies['changegroup']) == 1
828 assert len(cgreplies['changegroup']) == 1
822 pushop.cgresult = cgreplies['changegroup'][0]['return']
829 pushop.cgresult = cgreplies['changegroup'][0]['return']
823 return handlereply
830 return handlereply
824
831
825 @b2partsgenerator('phase')
832 @b2partsgenerator('phase')
826 def _pushb2phases(pushop, bundler):
833 def _pushb2phases(pushop, bundler):
827 """handle phase push through bundle2"""
834 """handle phase push through bundle2"""
828 if 'phases' in pushop.stepsdone:
835 if 'phases' in pushop.stepsdone:
829 return
836 return
830 b2caps = bundle2.bundle2caps(pushop.remote)
837 b2caps = bundle2.bundle2caps(pushop.remote)
831 ui = pushop.repo.ui
838 ui = pushop.repo.ui
832
839
833 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
840 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
834 haspushkey = 'pushkey' in b2caps
841 haspushkey = 'pushkey' in b2caps
835 hasphaseheads = 'heads' in b2caps.get('phases', ())
842 hasphaseheads = 'heads' in b2caps.get('phases', ())
836
843
837 if hasphaseheads and not legacyphase:
844 if hasphaseheads and not legacyphase:
838 return _pushb2phaseheads(pushop, bundler)
845 return _pushb2phaseheads(pushop, bundler)
839 elif haspushkey:
846 elif haspushkey:
840 return _pushb2phasespushkey(pushop, bundler)
847 return _pushb2phasespushkey(pushop, bundler)
841
848
842 def _pushb2phaseheads(pushop, bundler):
849 def _pushb2phaseheads(pushop, bundler):
843 """push phase information through a bundle2 - binary part"""
850 """push phase information through a bundle2 - binary part"""
844 pushop.stepsdone.add('phases')
851 pushop.stepsdone.add('phases')
845 if pushop.outdatedphases:
852 if pushop.outdatedphases:
846 updates = [[] for p in phases.allphases]
853 updates = [[] for p in phases.allphases]
847 updates[0].extend(h.node() for h in pushop.outdatedphases)
854 updates[0].extend(h.node() for h in pushop.outdatedphases)
848 phasedata = phases.binaryencode(updates)
855 phasedata = phases.binaryencode(updates)
849 bundler.newpart('phase-heads', data=phasedata)
856 bundler.newpart('phase-heads', data=phasedata)
850
857
851 def _pushb2phasespushkey(pushop, bundler):
858 def _pushb2phasespushkey(pushop, bundler):
852 """push phase information through a bundle2 - pushkey part"""
859 """push phase information through a bundle2 - pushkey part"""
853 pushop.stepsdone.add('phases')
860 pushop.stepsdone.add('phases')
854 part2node = []
861 part2node = []
855
862
856 def handlefailure(pushop, exc):
863 def handlefailure(pushop, exc):
857 targetid = int(exc.partid)
864 targetid = int(exc.partid)
858 for partid, node in part2node:
865 for partid, node in part2node:
859 if partid == targetid:
866 if partid == targetid:
860 raise error.Abort(_('updating %s to public failed') % node)
867 raise error.Abort(_('updating %s to public failed') % node)
861
868
862 enc = pushkey.encode
869 enc = pushkey.encode
863 for newremotehead in pushop.outdatedphases:
870 for newremotehead in pushop.outdatedphases:
864 part = bundler.newpart('pushkey')
871 part = bundler.newpart('pushkey')
865 part.addparam('namespace', enc('phases'))
872 part.addparam('namespace', enc('phases'))
866 part.addparam('key', enc(newremotehead.hex()))
873 part.addparam('key', enc(newremotehead.hex()))
867 part.addparam('old', enc('%d' % phases.draft))
874 part.addparam('old', enc('%d' % phases.draft))
868 part.addparam('new', enc('%d' % phases.public))
875 part.addparam('new', enc('%d' % phases.public))
869 part2node.append((part.id, newremotehead))
876 part2node.append((part.id, newremotehead))
870 pushop.pkfailcb[part.id] = handlefailure
877 pushop.pkfailcb[part.id] = handlefailure
871
878
872 def handlereply(op):
879 def handlereply(op):
873 for partid, node in part2node:
880 for partid, node in part2node:
874 partrep = op.records.getreplies(partid)
881 partrep = op.records.getreplies(partid)
875 results = partrep['pushkey']
882 results = partrep['pushkey']
876 assert len(results) <= 1
883 assert len(results) <= 1
877 msg = None
884 msg = None
878 if not results:
885 if not results:
879 msg = _('server ignored update of %s to public!\n') % node
886 msg = _('server ignored update of %s to public!\n') % node
880 elif not int(results[0]['return']):
887 elif not int(results[0]['return']):
881 msg = _('updating %s to public failed!\n') % node
888 msg = _('updating %s to public failed!\n') % node
882 if msg is not None:
889 if msg is not None:
883 pushop.ui.warn(msg)
890 pushop.ui.warn(msg)
884 return handlereply
891 return handlereply
885
892
886 @b2partsgenerator('obsmarkers')
893 @b2partsgenerator('obsmarkers')
887 def _pushb2obsmarkers(pushop, bundler):
894 def _pushb2obsmarkers(pushop, bundler):
888 if 'obsmarkers' in pushop.stepsdone:
895 if 'obsmarkers' in pushop.stepsdone:
889 return
896 return
890 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
897 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
891 if obsolete.commonversion(remoteversions) is None:
898 if obsolete.commonversion(remoteversions) is None:
892 return
899 return
893 pushop.stepsdone.add('obsmarkers')
900 pushop.stepsdone.add('obsmarkers')
894 if pushop.outobsmarkers:
901 if pushop.outobsmarkers:
895 markers = sorted(pushop.outobsmarkers)
902 markers = sorted(pushop.outobsmarkers)
896 bundle2.buildobsmarkerspart(bundler, markers)
903 bundle2.buildobsmarkerspart(bundler, markers)
897
904
898 @b2partsgenerator('bookmarks')
905 @b2partsgenerator('bookmarks')
899 def _pushb2bookmarks(pushop, bundler):
906 def _pushb2bookmarks(pushop, bundler):
900 """handle bookmark push through bundle2"""
907 """handle bookmark push through bundle2"""
901 if 'bookmarks' in pushop.stepsdone:
908 if 'bookmarks' in pushop.stepsdone:
902 return
909 return
903 b2caps = bundle2.bundle2caps(pushop.remote)
910 b2caps = bundle2.bundle2caps(pushop.remote)
904
911
905 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
912 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
906 legacybooks = 'bookmarks' in legacy
913 legacybooks = 'bookmarks' in legacy
907
914
908 if not legacybooks and 'bookmarks' in b2caps:
915 if not legacybooks and 'bookmarks' in b2caps:
909 return _pushb2bookmarkspart(pushop, bundler)
916 return _pushb2bookmarkspart(pushop, bundler)
910 elif 'pushkey' in b2caps:
917 elif 'pushkey' in b2caps:
911 return _pushb2bookmarkspushkey(pushop, bundler)
918 return _pushb2bookmarkspushkey(pushop, bundler)
912
919
913 def _bmaction(old, new):
920 def _bmaction(old, new):
914 """small utility for bookmark pushing"""
921 """small utility for bookmark pushing"""
915 if not old:
922 if not old:
916 return 'export'
923 return 'export'
917 elif not new:
924 elif not new:
918 return 'delete'
925 return 'delete'
919 return 'update'
926 return 'update'
920
927
921 def _pushb2bookmarkspart(pushop, bundler):
928 def _pushb2bookmarkspart(pushop, bundler):
922 pushop.stepsdone.add('bookmarks')
929 pushop.stepsdone.add('bookmarks')
923 if not pushop.outbookmarks:
930 if not pushop.outbookmarks:
924 return
931 return
925
932
926 allactions = []
933 allactions = []
927 data = []
934 data = []
928 for book, old, new in pushop.outbookmarks:
935 for book, old, new in pushop.outbookmarks:
929 new = bin(new)
936 new = bin(new)
930 data.append((book, new))
937 data.append((book, new))
931 allactions.append((book, _bmaction(old, new)))
938 allactions.append((book, _bmaction(old, new)))
932 checkdata = bookmod.binaryencode(data)
939 checkdata = bookmod.binaryencode(data)
933 bundler.newpart('bookmarks', data=checkdata)
940 bundler.newpart('bookmarks', data=checkdata)
934
941
935 def handlereply(op):
942 def handlereply(op):
936 ui = pushop.ui
943 ui = pushop.ui
937 # if success
944 # if success
938 for book, action in allactions:
945 for book, action in allactions:
939 ui.status(bookmsgmap[action][0] % book)
946 ui.status(bookmsgmap[action][0] % book)
940
947
941 return handlereply
948 return handlereply
942
949
943 def _pushb2bookmarkspushkey(pushop, bundler):
950 def _pushb2bookmarkspushkey(pushop, bundler):
944 pushop.stepsdone.add('bookmarks')
951 pushop.stepsdone.add('bookmarks')
945 part2book = []
952 part2book = []
946 enc = pushkey.encode
953 enc = pushkey.encode
947
954
948 def handlefailure(pushop, exc):
955 def handlefailure(pushop, exc):
949 targetid = int(exc.partid)
956 targetid = int(exc.partid)
950 for partid, book, action in part2book:
957 for partid, book, action in part2book:
951 if partid == targetid:
958 if partid == targetid:
952 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
959 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
953 # we should not be called for part we did not generated
960 # we should not be called for part we did not generated
954 assert False
961 assert False
955
962
956 for book, old, new in pushop.outbookmarks:
963 for book, old, new in pushop.outbookmarks:
957 part = bundler.newpart('pushkey')
964 part = bundler.newpart('pushkey')
958 part.addparam('namespace', enc('bookmarks'))
965 part.addparam('namespace', enc('bookmarks'))
959 part.addparam('key', enc(book))
966 part.addparam('key', enc(book))
960 part.addparam('old', enc(old))
967 part.addparam('old', enc(old))
961 part.addparam('new', enc(new))
968 part.addparam('new', enc(new))
962 action = 'update'
969 action = 'update'
963 if not old:
970 if not old:
964 action = 'export'
971 action = 'export'
965 elif not new:
972 elif not new:
966 action = 'delete'
973 action = 'delete'
967 part2book.append((part.id, book, action))
974 part2book.append((part.id, book, action))
968 pushop.pkfailcb[part.id] = handlefailure
975 pushop.pkfailcb[part.id] = handlefailure
969
976
970 def handlereply(op):
977 def handlereply(op):
971 ui = pushop.ui
978 ui = pushop.ui
972 for partid, book, action in part2book:
979 for partid, book, action in part2book:
973 partrep = op.records.getreplies(partid)
980 partrep = op.records.getreplies(partid)
974 results = partrep['pushkey']
981 results = partrep['pushkey']
975 assert len(results) <= 1
982 assert len(results) <= 1
976 if not results:
983 if not results:
977 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
984 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
978 else:
985 else:
979 ret = int(results[0]['return'])
986 ret = int(results[0]['return'])
980 if ret:
987 if ret:
981 ui.status(bookmsgmap[action][0] % book)
988 ui.status(bookmsgmap[action][0] % book)
982 else:
989 else:
983 ui.warn(bookmsgmap[action][1] % book)
990 ui.warn(bookmsgmap[action][1] % book)
984 if pushop.bkresult is not None:
991 if pushop.bkresult is not None:
985 pushop.bkresult = 1
992 pushop.bkresult = 1
986 return handlereply
993 return handlereply
987
994
988 @b2partsgenerator('pushvars', idx=0)
995 @b2partsgenerator('pushvars', idx=0)
989 def _getbundlesendvars(pushop, bundler):
996 def _getbundlesendvars(pushop, bundler):
990 '''send shellvars via bundle2'''
997 '''send shellvars via bundle2'''
991 pushvars = pushop.pushvars
998 pushvars = pushop.pushvars
992 if pushvars:
999 if pushvars:
993 shellvars = {}
1000 shellvars = {}
994 for raw in pushvars:
1001 for raw in pushvars:
995 if '=' not in raw:
1002 if '=' not in raw:
996 msg = ("unable to parse variable '%s', should follow "
1003 msg = ("unable to parse variable '%s', should follow "
997 "'KEY=VALUE' or 'KEY=' format")
1004 "'KEY=VALUE' or 'KEY=' format")
998 raise error.Abort(msg % raw)
1005 raise error.Abort(msg % raw)
999 k, v = raw.split('=', 1)
1006 k, v = raw.split('=', 1)
1000 shellvars[k] = v
1007 shellvars[k] = v
1001
1008
1002 part = bundler.newpart('pushvars')
1009 part = bundler.newpart('pushvars')
1003
1010
1004 for key, value in shellvars.iteritems():
1011 for key, value in shellvars.iteritems():
1005 part.addparam(key, value, mandatory=False)
1012 part.addparam(key, value, mandatory=False)
1006
1013
1007 def _pushbundle2(pushop):
1014 def _pushbundle2(pushop):
1008 """push data to the remote using bundle2
1015 """push data to the remote using bundle2
1009
1016
1010 The only currently supported type of data is changegroup but this will
1017 The only currently supported type of data is changegroup but this will
1011 evolve in the future."""
1018 evolve in the future."""
1012 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1019 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1013 pushback = (pushop.trmanager
1020 pushback = (pushop.trmanager
1014 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1021 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1015
1022
1016 # create reply capability
1023 # create reply capability
1017 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1024 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1018 allowpushback=pushback,
1025 allowpushback=pushback,
1019 role='client'))
1026 role='client'))
1020 bundler.newpart('replycaps', data=capsblob)
1027 bundler.newpart('replycaps', data=capsblob)
1021 replyhandlers = []
1028 replyhandlers = []
1022 for partgenname in b2partsgenorder:
1029 for partgenname in b2partsgenorder:
1023 partgen = b2partsgenmapping[partgenname]
1030 partgen = b2partsgenmapping[partgenname]
1024 ret = partgen(pushop, bundler)
1031 ret = partgen(pushop, bundler)
1025 if callable(ret):
1032 if callable(ret):
1026 replyhandlers.append(ret)
1033 replyhandlers.append(ret)
1027 # do not push if nothing to push
1034 # do not push if nothing to push
1028 if bundler.nbparts <= 1:
1035 if bundler.nbparts <= 1:
1029 return
1036 return
1030 stream = util.chunkbuffer(bundler.getchunks())
1037 stream = util.chunkbuffer(bundler.getchunks())
1031 try:
1038 try:
1032 try:
1039 try:
1033 reply = pushop.remote.unbundle(
1040 reply = pushop.remote.unbundle(
1034 stream, ['force'], pushop.remote.url())
1041 stream, ['force'], pushop.remote.url())
1035 except error.BundleValueError as exc:
1042 except error.BundleValueError as exc:
1036 raise error.Abort(_('missing support for %s') % exc)
1043 raise error.Abort(_('missing support for %s') % exc)
1037 try:
1044 try:
1038 trgetter = None
1045 trgetter = None
1039 if pushback:
1046 if pushback:
1040 trgetter = pushop.trmanager.transaction
1047 trgetter = pushop.trmanager.transaction
1041 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1048 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1042 except error.BundleValueError as exc:
1049 except error.BundleValueError as exc:
1043 raise error.Abort(_('missing support for %s') % exc)
1050 raise error.Abort(_('missing support for %s') % exc)
1044 except bundle2.AbortFromPart as exc:
1051 except bundle2.AbortFromPart as exc:
1045 pushop.ui.status(_('remote: %s\n') % exc)
1052 pushop.ui.status(_('remote: %s\n') % exc)
1046 if exc.hint is not None:
1053 if exc.hint is not None:
1047 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1054 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1048 raise error.Abort(_('push failed on remote'))
1055 raise error.Abort(_('push failed on remote'))
1049 except error.PushkeyFailed as exc:
1056 except error.PushkeyFailed as exc:
1050 partid = int(exc.partid)
1057 partid = int(exc.partid)
1051 if partid not in pushop.pkfailcb:
1058 if partid not in pushop.pkfailcb:
1052 raise
1059 raise
1053 pushop.pkfailcb[partid](pushop, exc)
1060 pushop.pkfailcb[partid](pushop, exc)
1054 for rephand in replyhandlers:
1061 for rephand in replyhandlers:
1055 rephand(op)
1062 rephand(op)
1056
1063
1057 def _pushchangeset(pushop):
1064 def _pushchangeset(pushop):
1058 """Make the actual push of changeset bundle to remote repo"""
1065 """Make the actual push of changeset bundle to remote repo"""
1059 if 'changesets' in pushop.stepsdone:
1066 if 'changesets' in pushop.stepsdone:
1060 return
1067 return
1061 pushop.stepsdone.add('changesets')
1068 pushop.stepsdone.add('changesets')
1062 if not _pushcheckoutgoing(pushop):
1069 if not _pushcheckoutgoing(pushop):
1063 return
1070 return
1064
1071
1065 # Should have verified this in push().
1072 # Should have verified this in push().
1066 assert pushop.remote.capable('unbundle')
1073 assert pushop.remote.capable('unbundle')
1067
1074
1068 pushop.repo.prepushoutgoinghooks(pushop)
1075 pushop.repo.prepushoutgoinghooks(pushop)
1069 outgoing = pushop.outgoing
1076 outgoing = pushop.outgoing
1070 # TODO: get bundlecaps from remote
1077 # TODO: get bundlecaps from remote
1071 bundlecaps = None
1078 bundlecaps = None
1072 # create a changegroup from local
1079 # create a changegroup from local
1073 if pushop.revs is None and not (outgoing.excluded
1080 if pushop.revs is None and not (outgoing.excluded
1074 or pushop.repo.changelog.filteredrevs):
1081 or pushop.repo.changelog.filteredrevs):
1075 # push everything,
1082 # push everything,
1076 # use the fast path, no race possible on push
1083 # use the fast path, no race possible on push
1077 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1084 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1078 fastpath=True, bundlecaps=bundlecaps)
1085 fastpath=True, bundlecaps=bundlecaps)
1079 else:
1086 else:
1080 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1087 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1081 'push', bundlecaps=bundlecaps)
1088 'push', bundlecaps=bundlecaps)
1082
1089
1083 # apply changegroup to remote
1090 # apply changegroup to remote
1084 # local repo finds heads on server, finds out what
1091 # local repo finds heads on server, finds out what
1085 # revs it must push. once revs transferred, if server
1092 # revs it must push. once revs transferred, if server
1086 # finds it has different heads (someone else won
1093 # finds it has different heads (someone else won
1087 # commit/push race), server aborts.
1094 # commit/push race), server aborts.
1088 if pushop.force:
1095 if pushop.force:
1089 remoteheads = ['force']
1096 remoteheads = ['force']
1090 else:
1097 else:
1091 remoteheads = pushop.remoteheads
1098 remoteheads = pushop.remoteheads
1092 # ssh: return remote's addchangegroup()
1099 # ssh: return remote's addchangegroup()
1093 # http: return remote's addchangegroup() or 0 for error
1100 # http: return remote's addchangegroup() or 0 for error
1094 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1101 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1095 pushop.repo.url())
1102 pushop.repo.url())
1096
1103
1097 def _pushsyncphase(pushop):
1104 def _pushsyncphase(pushop):
1098 """synchronise phase information locally and remotely"""
1105 """synchronise phase information locally and remotely"""
1099 cheads = pushop.commonheads
1106 cheads = pushop.commonheads
1100 # even when we don't push, exchanging phase data is useful
1107 # even when we don't push, exchanging phase data is useful
1101 remotephases = pushop.remote.listkeys('phases')
1108 remotephases = pushop.remote.listkeys('phases')
1102 if (pushop.ui.configbool('ui', '_usedassubrepo')
1109 if (pushop.ui.configbool('ui', '_usedassubrepo')
1103 and remotephases # server supports phases
1110 and remotephases # server supports phases
1104 and pushop.cgresult is None # nothing was pushed
1111 and pushop.cgresult is None # nothing was pushed
1105 and remotephases.get('publishing', False)):
1112 and remotephases.get('publishing', False)):
1106 # When:
1113 # When:
1107 # - this is a subrepo push
1114 # - this is a subrepo push
1108 # - and remote support phase
1115 # - and remote support phase
1109 # - and no changeset was pushed
1116 # - and no changeset was pushed
1110 # - and remote is publishing
1117 # - and remote is publishing
1111 # We may be in issue 3871 case!
1118 # We may be in issue 3871 case!
1112 # We drop the possible phase synchronisation done by
1119 # We drop the possible phase synchronisation done by
1113 # courtesy to publish changesets possibly locally draft
1120 # courtesy to publish changesets possibly locally draft
1114 # on the remote.
1121 # on the remote.
1115 remotephases = {'publishing': 'True'}
1122 remotephases = {'publishing': 'True'}
1116 if not remotephases: # old server or public only reply from non-publishing
1123 if not remotephases: # old server or public only reply from non-publishing
1117 _localphasemove(pushop, cheads)
1124 _localphasemove(pushop, cheads)
1118 # don't push any phase data as there is nothing to push
1125 # don't push any phase data as there is nothing to push
1119 else:
1126 else:
1120 ana = phases.analyzeremotephases(pushop.repo, cheads,
1127 ana = phases.analyzeremotephases(pushop.repo, cheads,
1121 remotephases)
1128 remotephases)
1122 pheads, droots = ana
1129 pheads, droots = ana
1123 ### Apply remote phase on local
1130 ### Apply remote phase on local
1124 if remotephases.get('publishing', False):
1131 if remotephases.get('publishing', False):
1125 _localphasemove(pushop, cheads)
1132 _localphasemove(pushop, cheads)
1126 else: # publish = False
1133 else: # publish = False
1127 _localphasemove(pushop, pheads)
1134 _localphasemove(pushop, pheads)
1128 _localphasemove(pushop, cheads, phases.draft)
1135 _localphasemove(pushop, cheads, phases.draft)
1129 ### Apply local phase on remote
1136 ### Apply local phase on remote
1130
1137
1131 if pushop.cgresult:
1138 if pushop.cgresult:
1132 if 'phases' in pushop.stepsdone:
1139 if 'phases' in pushop.stepsdone:
1133 # phases already pushed though bundle2
1140 # phases already pushed though bundle2
1134 return
1141 return
1135 outdated = pushop.outdatedphases
1142 outdated = pushop.outdatedphases
1136 else:
1143 else:
1137 outdated = pushop.fallbackoutdatedphases
1144 outdated = pushop.fallbackoutdatedphases
1138
1145
1139 pushop.stepsdone.add('phases')
1146 pushop.stepsdone.add('phases')
1140
1147
1141 # filter heads already turned public by the push
1148 # filter heads already turned public by the push
1142 outdated = [c for c in outdated if c.node() not in pheads]
1149 outdated = [c for c in outdated if c.node() not in pheads]
1143 # fallback to independent pushkey command
1150 # fallback to independent pushkey command
1144 for newremotehead in outdated:
1151 for newremotehead in outdated:
1145 r = pushop.remote.pushkey('phases',
1152 r = pushop.remote.pushkey('phases',
1146 newremotehead.hex(),
1153 newremotehead.hex(),
1147 str(phases.draft),
1154 str(phases.draft),
1148 str(phases.public))
1155 str(phases.public))
1149 if not r:
1156 if not r:
1150 pushop.ui.warn(_('updating %s to public failed!\n')
1157 pushop.ui.warn(_('updating %s to public failed!\n')
1151 % newremotehead)
1158 % newremotehead)
1152
1159
1153 def _localphasemove(pushop, nodes, phase=phases.public):
1160 def _localphasemove(pushop, nodes, phase=phases.public):
1154 """move <nodes> to <phase> in the local source repo"""
1161 """move <nodes> to <phase> in the local source repo"""
1155 if pushop.trmanager:
1162 if pushop.trmanager:
1156 phases.advanceboundary(pushop.repo,
1163 phases.advanceboundary(pushop.repo,
1157 pushop.trmanager.transaction(),
1164 pushop.trmanager.transaction(),
1158 phase,
1165 phase,
1159 nodes)
1166 nodes)
1160 else:
1167 else:
1161 # repo is not locked, do not change any phases!
1168 # repo is not locked, do not change any phases!
1162 # Informs the user that phases should have been moved when
1169 # Informs the user that phases should have been moved when
1163 # applicable.
1170 # applicable.
1164 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1171 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1165 phasestr = phases.phasenames[phase]
1172 phasestr = phases.phasenames[phase]
1166 if actualmoves:
1173 if actualmoves:
1167 pushop.ui.status(_('cannot lock source repo, skipping '
1174 pushop.ui.status(_('cannot lock source repo, skipping '
1168 'local %s phase update\n') % phasestr)
1175 'local %s phase update\n') % phasestr)
1169
1176
1170 def _pushobsolete(pushop):
1177 def _pushobsolete(pushop):
1171 """utility function to push obsolete markers to a remote"""
1178 """utility function to push obsolete markers to a remote"""
1172 if 'obsmarkers' in pushop.stepsdone:
1179 if 'obsmarkers' in pushop.stepsdone:
1173 return
1180 return
1174 repo = pushop.repo
1181 repo = pushop.repo
1175 remote = pushop.remote
1182 remote = pushop.remote
1176 pushop.stepsdone.add('obsmarkers')
1183 pushop.stepsdone.add('obsmarkers')
1177 if pushop.outobsmarkers:
1184 if pushop.outobsmarkers:
1178 pushop.ui.debug('try to push obsolete markers to remote\n')
1185 pushop.ui.debug('try to push obsolete markers to remote\n')
1179 rslts = []
1186 rslts = []
1180 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1187 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1181 for key in sorted(remotedata, reverse=True):
1188 for key in sorted(remotedata, reverse=True):
1182 # reverse sort to ensure we end with dump0
1189 # reverse sort to ensure we end with dump0
1183 data = remotedata[key]
1190 data = remotedata[key]
1184 rslts.append(remote.pushkey('obsolete', key, '', data))
1191 rslts.append(remote.pushkey('obsolete', key, '', data))
1185 if [r for r in rslts if not r]:
1192 if [r for r in rslts if not r]:
1186 msg = _('failed to push some obsolete markers!\n')
1193 msg = _('failed to push some obsolete markers!\n')
1187 repo.ui.warn(msg)
1194 repo.ui.warn(msg)
1188
1195
1189 def _pushbookmark(pushop):
1196 def _pushbookmark(pushop):
1190 """Update bookmark position on remote"""
1197 """Update bookmark position on remote"""
1191 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1198 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1192 return
1199 return
1193 pushop.stepsdone.add('bookmarks')
1200 pushop.stepsdone.add('bookmarks')
1194 ui = pushop.ui
1201 ui = pushop.ui
1195 remote = pushop.remote
1202 remote = pushop.remote
1196
1203
1197 for b, old, new in pushop.outbookmarks:
1204 for b, old, new in pushop.outbookmarks:
1198 action = 'update'
1205 action = 'update'
1199 if not old:
1206 if not old:
1200 action = 'export'
1207 action = 'export'
1201 elif not new:
1208 elif not new:
1202 action = 'delete'
1209 action = 'delete'
1203 if remote.pushkey('bookmarks', b, old, new):
1210 if remote.pushkey('bookmarks', b, old, new):
1204 ui.status(bookmsgmap[action][0] % b)
1211 ui.status(bookmsgmap[action][0] % b)
1205 else:
1212 else:
1206 ui.warn(bookmsgmap[action][1] % b)
1213 ui.warn(bookmsgmap[action][1] % b)
1207 # discovery can have set the value form invalid entry
1214 # discovery can have set the value form invalid entry
1208 if pushop.bkresult is not None:
1215 if pushop.bkresult is not None:
1209 pushop.bkresult = 1
1216 pushop.bkresult = 1
1210
1217
1211 class pulloperation(object):
1218 class pulloperation(object):
1212 """A object that represent a single pull operation
1219 """A object that represent a single pull operation
1213
1220
1214 It purpose is to carry pull related state and very common operation.
1221 It purpose is to carry pull related state and very common operation.
1215
1222
1216 A new should be created at the beginning of each pull and discarded
1223 A new should be created at the beginning of each pull and discarded
1217 afterward.
1224 afterward.
1218 """
1225 """
1219
1226
1220 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1227 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1221 remotebookmarks=None, streamclonerequested=None):
1228 remotebookmarks=None, streamclonerequested=None):
1222 # repo we pull into
1229 # repo we pull into
1223 self.repo = repo
1230 self.repo = repo
1224 # repo we pull from
1231 # repo we pull from
1225 self.remote = remote
1232 self.remote = remote
1226 # revision we try to pull (None is "all")
1233 # revision we try to pull (None is "all")
1227 self.heads = heads
1234 self.heads = heads
1228 # bookmark pulled explicitly
1235 # bookmark pulled explicitly
1229 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1236 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1230 for bookmark in bookmarks]
1237 for bookmark in bookmarks]
1231 # do we force pull?
1238 # do we force pull?
1232 self.force = force
1239 self.force = force
1233 # whether a streaming clone was requested
1240 # whether a streaming clone was requested
1234 self.streamclonerequested = streamclonerequested
1241 self.streamclonerequested = streamclonerequested
1235 # transaction manager
1242 # transaction manager
1236 self.trmanager = None
1243 self.trmanager = None
1237 # set of common changeset between local and remote before pull
1244 # set of common changeset between local and remote before pull
1238 self.common = None
1245 self.common = None
1239 # set of pulled head
1246 # set of pulled head
1240 self.rheads = None
1247 self.rheads = None
1241 # list of missing changeset to fetch remotely
1248 # list of missing changeset to fetch remotely
1242 self.fetch = None
1249 self.fetch = None
1243 # remote bookmarks data
1250 # remote bookmarks data
1244 self.remotebookmarks = remotebookmarks
1251 self.remotebookmarks = remotebookmarks
1245 # result of changegroup pulling (used as return code by pull)
1252 # result of changegroup pulling (used as return code by pull)
1246 self.cgresult = None
1253 self.cgresult = None
1247 # list of step already done
1254 # list of step already done
1248 self.stepsdone = set()
1255 self.stepsdone = set()
1249 # Whether we attempted a clone from pre-generated bundles.
1256 # Whether we attempted a clone from pre-generated bundles.
1250 self.clonebundleattempted = False
1257 self.clonebundleattempted = False
1251
1258
1252 @util.propertycache
1259 @util.propertycache
1253 def pulledsubset(self):
1260 def pulledsubset(self):
1254 """heads of the set of changeset target by the pull"""
1261 """heads of the set of changeset target by the pull"""
1255 # compute target subset
1262 # compute target subset
1256 if self.heads is None:
1263 if self.heads is None:
1257 # We pulled every thing possible
1264 # We pulled every thing possible
1258 # sync on everything common
1265 # sync on everything common
1259 c = set(self.common)
1266 c = set(self.common)
1260 ret = list(self.common)
1267 ret = list(self.common)
1261 for n in self.rheads:
1268 for n in self.rheads:
1262 if n not in c:
1269 if n not in c:
1263 ret.append(n)
1270 ret.append(n)
1264 return ret
1271 return ret
1265 else:
1272 else:
1266 # We pulled a specific subset
1273 # We pulled a specific subset
1267 # sync on this subset
1274 # sync on this subset
1268 return self.heads
1275 return self.heads
1269
1276
1270 @util.propertycache
1277 @util.propertycache
1271 def canusebundle2(self):
1278 def canusebundle2(self):
1272 return not _forcebundle1(self)
1279 return not _forcebundle1(self)
1273
1280
1274 @util.propertycache
1281 @util.propertycache
1275 def remotebundle2caps(self):
1282 def remotebundle2caps(self):
1276 return bundle2.bundle2caps(self.remote)
1283 return bundle2.bundle2caps(self.remote)
1277
1284
1278 def gettransaction(self):
1285 def gettransaction(self):
1279 # deprecated; talk to trmanager directly
1286 # deprecated; talk to trmanager directly
1280 return self.trmanager.transaction()
1287 return self.trmanager.transaction()
1281
1288
1282 class transactionmanager(util.transactional):
1289 class transactionmanager(util.transactional):
1283 """An object to manage the life cycle of a transaction
1290 """An object to manage the life cycle of a transaction
1284
1291
1285 It creates the transaction on demand and calls the appropriate hooks when
1292 It creates the transaction on demand and calls the appropriate hooks when
1286 closing the transaction."""
1293 closing the transaction."""
1287 def __init__(self, repo, source, url):
1294 def __init__(self, repo, source, url):
1288 self.repo = repo
1295 self.repo = repo
1289 self.source = source
1296 self.source = source
1290 self.url = url
1297 self.url = url
1291 self._tr = None
1298 self._tr = None
1292
1299
1293 def transaction(self):
1300 def transaction(self):
1294 """Return an open transaction object, constructing if necessary"""
1301 """Return an open transaction object, constructing if necessary"""
1295 if not self._tr:
1302 if not self._tr:
1296 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1303 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1297 self._tr = self.repo.transaction(trname)
1304 self._tr = self.repo.transaction(trname)
1298 self._tr.hookargs['source'] = self.source
1305 self._tr.hookargs['source'] = self.source
1299 self._tr.hookargs['url'] = self.url
1306 self._tr.hookargs['url'] = self.url
1300 return self._tr
1307 return self._tr
1301
1308
1302 def close(self):
1309 def close(self):
1303 """close transaction if created"""
1310 """close transaction if created"""
1304 if self._tr is not None:
1311 if self._tr is not None:
1305 self._tr.close()
1312 self._tr.close()
1306
1313
1307 def release(self):
1314 def release(self):
1308 """release transaction if created"""
1315 """release transaction if created"""
1309 if self._tr is not None:
1316 if self._tr is not None:
1310 self._tr.release()
1317 self._tr.release()
1311
1318
1312 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1319 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1313 streamclonerequested=None):
1320 streamclonerequested=None):
1314 """Fetch repository data from a remote.
1321 """Fetch repository data from a remote.
1315
1322
1316 This is the main function used to retrieve data from a remote repository.
1323 This is the main function used to retrieve data from a remote repository.
1317
1324
1318 ``repo`` is the local repository to clone into.
1325 ``repo`` is the local repository to clone into.
1319 ``remote`` is a peer instance.
1326 ``remote`` is a peer instance.
1320 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1327 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1321 default) means to pull everything from the remote.
1328 default) means to pull everything from the remote.
1322 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1329 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1323 default, all remote bookmarks are pulled.
1330 default, all remote bookmarks are pulled.
1324 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1331 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1325 initialization.
1332 initialization.
1326 ``streamclonerequested`` is a boolean indicating whether a "streaming
1333 ``streamclonerequested`` is a boolean indicating whether a "streaming
1327 clone" is requested. A "streaming clone" is essentially a raw file copy
1334 clone" is requested. A "streaming clone" is essentially a raw file copy
1328 of revlogs from the server. This only works when the local repository is
1335 of revlogs from the server. This only works when the local repository is
1329 empty. The default value of ``None`` means to respect the server
1336 empty. The default value of ``None`` means to respect the server
1330 configuration for preferring stream clones.
1337 configuration for preferring stream clones.
1331
1338
1332 Returns the ``pulloperation`` created for this pull.
1339 Returns the ``pulloperation`` created for this pull.
1333 """
1340 """
1334 if opargs is None:
1341 if opargs is None:
1335 opargs = {}
1342 opargs = {}
1336 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1343 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1337 streamclonerequested=streamclonerequested,
1344 streamclonerequested=streamclonerequested,
1338 **pycompat.strkwargs(opargs))
1345 **pycompat.strkwargs(opargs))
1339
1346
1340 peerlocal = pullop.remote.local()
1347 peerlocal = pullop.remote.local()
1341 if peerlocal:
1348 if peerlocal:
1342 missing = set(peerlocal.requirements) - pullop.repo.supported
1349 missing = set(peerlocal.requirements) - pullop.repo.supported
1343 if missing:
1350 if missing:
1344 msg = _("required features are not"
1351 msg = _("required features are not"
1345 " supported in the destination:"
1352 " supported in the destination:"
1346 " %s") % (', '.join(sorted(missing)))
1353 " %s") % (', '.join(sorted(missing)))
1347 raise error.Abort(msg)
1354 raise error.Abort(msg)
1348
1355
1349 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1356 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1350 with repo.wlock(), repo.lock(), pullop.trmanager:
1357 with repo.wlock(), repo.lock(), pullop.trmanager:
1351 # This should ideally be in _pullbundle2(). However, it needs to run
1358 # This should ideally be in _pullbundle2(). However, it needs to run
1352 # before discovery to avoid extra work.
1359 # before discovery to avoid extra work.
1353 _maybeapplyclonebundle(pullop)
1360 _maybeapplyclonebundle(pullop)
1354 streamclone.maybeperformlegacystreamclone(pullop)
1361 streamclone.maybeperformlegacystreamclone(pullop)
1355 _pulldiscovery(pullop)
1362 _pulldiscovery(pullop)
1356 if pullop.canusebundle2:
1363 if pullop.canusebundle2:
1357 _pullbundle2(pullop)
1364 _pullbundle2(pullop)
1358 _pullchangeset(pullop)
1365 _pullchangeset(pullop)
1359 _pullphase(pullop)
1366 _pullphase(pullop)
1360 _pullbookmarks(pullop)
1367 _pullbookmarks(pullop)
1361 _pullobsolete(pullop)
1368 _pullobsolete(pullop)
1362
1369
1363 # storing remotenames
1370 # storing remotenames
1364 if repo.ui.configbool('experimental', 'remotenames'):
1371 if repo.ui.configbool('experimental', 'remotenames'):
1365 logexchange.pullremotenames(repo, remote)
1372 logexchange.pullremotenames(repo, remote)
1366
1373
1367 return pullop
1374 return pullop
1368
1375
1369 # list of steps to perform discovery before pull
1376 # list of steps to perform discovery before pull
1370 pulldiscoveryorder = []
1377 pulldiscoveryorder = []
1371
1378
1372 # Mapping between step name and function
1379 # Mapping between step name and function
1373 #
1380 #
1374 # This exists to help extensions wrap steps if necessary
1381 # This exists to help extensions wrap steps if necessary
1375 pulldiscoverymapping = {}
1382 pulldiscoverymapping = {}
1376
1383
1377 def pulldiscovery(stepname):
1384 def pulldiscovery(stepname):
1378 """decorator for function performing discovery before pull
1385 """decorator for function performing discovery before pull
1379
1386
1380 The function is added to the step -> function mapping and appended to the
1387 The function is added to the step -> function mapping and appended to the
1381 list of steps. Beware that decorated function will be added in order (this
1388 list of steps. Beware that decorated function will be added in order (this
1382 may matter).
1389 may matter).
1383
1390
1384 You can only use this decorator for a new step, if you want to wrap a step
1391 You can only use this decorator for a new step, if you want to wrap a step
1385 from an extension, change the pulldiscovery dictionary directly."""
1392 from an extension, change the pulldiscovery dictionary directly."""
1386 def dec(func):
1393 def dec(func):
1387 assert stepname not in pulldiscoverymapping
1394 assert stepname not in pulldiscoverymapping
1388 pulldiscoverymapping[stepname] = func
1395 pulldiscoverymapping[stepname] = func
1389 pulldiscoveryorder.append(stepname)
1396 pulldiscoveryorder.append(stepname)
1390 return func
1397 return func
1391 return dec
1398 return dec
1392
1399
1393 def _pulldiscovery(pullop):
1400 def _pulldiscovery(pullop):
1394 """Run all discovery steps"""
1401 """Run all discovery steps"""
1395 for stepname in pulldiscoveryorder:
1402 for stepname in pulldiscoveryorder:
1396 step = pulldiscoverymapping[stepname]
1403 step = pulldiscoverymapping[stepname]
1397 step(pullop)
1404 step(pullop)
1398
1405
1399 @pulldiscovery('b1:bookmarks')
1406 @pulldiscovery('b1:bookmarks')
1400 def _pullbookmarkbundle1(pullop):
1407 def _pullbookmarkbundle1(pullop):
1401 """fetch bookmark data in bundle1 case
1408 """fetch bookmark data in bundle1 case
1402
1409
1403 If not using bundle2, we have to fetch bookmarks before changeset
1410 If not using bundle2, we have to fetch bookmarks before changeset
1404 discovery to reduce the chance and impact of race conditions."""
1411 discovery to reduce the chance and impact of race conditions."""
1405 if pullop.remotebookmarks is not None:
1412 if pullop.remotebookmarks is not None:
1406 return
1413 return
1407 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1414 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1408 # all known bundle2 servers now support listkeys, but lets be nice with
1415 # all known bundle2 servers now support listkeys, but lets be nice with
1409 # new implementation.
1416 # new implementation.
1410 return
1417 return
1411 books = pullop.remote.listkeys('bookmarks')
1418 books = pullop.remote.listkeys('bookmarks')
1412 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1419 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1413
1420
1414
1421
1415 @pulldiscovery('changegroup')
1422 @pulldiscovery('changegroup')
1416 def _pulldiscoverychangegroup(pullop):
1423 def _pulldiscoverychangegroup(pullop):
1417 """discovery phase for the pull
1424 """discovery phase for the pull
1418
1425
1419 Current handle changeset discovery only, will change handle all discovery
1426 Current handle changeset discovery only, will change handle all discovery
1420 at some point."""
1427 at some point."""
1421 tmp = discovery.findcommonincoming(pullop.repo,
1428 tmp = discovery.findcommonincoming(pullop.repo,
1422 pullop.remote,
1429 pullop.remote,
1423 heads=pullop.heads,
1430 heads=pullop.heads,
1424 force=pullop.force)
1431 force=pullop.force)
1425 common, fetch, rheads = tmp
1432 common, fetch, rheads = tmp
1426 nm = pullop.repo.unfiltered().changelog.nodemap
1433 nm = pullop.repo.unfiltered().changelog.nodemap
1427 if fetch and rheads:
1434 if fetch and rheads:
1428 # If a remote heads is filtered locally, put in back in common.
1435 # If a remote heads is filtered locally, put in back in common.
1429 #
1436 #
1430 # This is a hackish solution to catch most of "common but locally
1437 # This is a hackish solution to catch most of "common but locally
1431 # hidden situation". We do not performs discovery on unfiltered
1438 # hidden situation". We do not performs discovery on unfiltered
1432 # repository because it end up doing a pathological amount of round
1439 # repository because it end up doing a pathological amount of round
1433 # trip for w huge amount of changeset we do not care about.
1440 # trip for w huge amount of changeset we do not care about.
1434 #
1441 #
1435 # If a set of such "common but filtered" changeset exist on the server
1442 # If a set of such "common but filtered" changeset exist on the server
1436 # but are not including a remote heads, we'll not be able to detect it,
1443 # but are not including a remote heads, we'll not be able to detect it,
1437 scommon = set(common)
1444 scommon = set(common)
1438 for n in rheads:
1445 for n in rheads:
1439 if n in nm:
1446 if n in nm:
1440 if n not in scommon:
1447 if n not in scommon:
1441 common.append(n)
1448 common.append(n)
1442 if set(rheads).issubset(set(common)):
1449 if set(rheads).issubset(set(common)):
1443 fetch = []
1450 fetch = []
1444 pullop.common = common
1451 pullop.common = common
1445 pullop.fetch = fetch
1452 pullop.fetch = fetch
1446 pullop.rheads = rheads
1453 pullop.rheads = rheads
1447
1454
1448 def _pullbundle2(pullop):
1455 def _pullbundle2(pullop):
1449 """pull data using bundle2
1456 """pull data using bundle2
1450
1457
1451 For now, the only supported data are changegroup."""
1458 For now, the only supported data are changegroup."""
1452 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1459 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1453
1460
1454 # make ui easier to access
1461 # make ui easier to access
1455 ui = pullop.repo.ui
1462 ui = pullop.repo.ui
1456
1463
1457 # At the moment we don't do stream clones over bundle2. If that is
1464 # At the moment we don't do stream clones over bundle2. If that is
1458 # implemented then here's where the check for that will go.
1465 # implemented then here's where the check for that will go.
1459 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1466 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1460
1467
1461 # declare pull perimeters
1468 # declare pull perimeters
1462 kwargs['common'] = pullop.common
1469 kwargs['common'] = pullop.common
1463 kwargs['heads'] = pullop.heads or pullop.rheads
1470 kwargs['heads'] = pullop.heads or pullop.rheads
1464
1471
1465 if streaming:
1472 if streaming:
1466 kwargs['cg'] = False
1473 kwargs['cg'] = False
1467 kwargs['stream'] = True
1474 kwargs['stream'] = True
1468 pullop.stepsdone.add('changegroup')
1475 pullop.stepsdone.add('changegroup')
1469 pullop.stepsdone.add('phases')
1476 pullop.stepsdone.add('phases')
1470
1477
1471 else:
1478 else:
1472 # pulling changegroup
1479 # pulling changegroup
1473 pullop.stepsdone.add('changegroup')
1480 pullop.stepsdone.add('changegroup')
1474
1481
1475 kwargs['cg'] = pullop.fetch
1482 kwargs['cg'] = pullop.fetch
1476
1483
1477 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1484 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1478 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1485 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1479 if (not legacyphase and hasbinaryphase):
1486 if (not legacyphase and hasbinaryphase):
1480 kwargs['phases'] = True
1487 kwargs['phases'] = True
1481 pullop.stepsdone.add('phases')
1488 pullop.stepsdone.add('phases')
1482
1489
1483 if 'listkeys' in pullop.remotebundle2caps:
1490 if 'listkeys' in pullop.remotebundle2caps:
1484 if 'phases' not in pullop.stepsdone:
1491 if 'phases' not in pullop.stepsdone:
1485 kwargs['listkeys'] = ['phases']
1492 kwargs['listkeys'] = ['phases']
1486
1493
1487 bookmarksrequested = False
1494 bookmarksrequested = False
1488 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1495 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1489 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1496 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1490
1497
1491 if pullop.remotebookmarks is not None:
1498 if pullop.remotebookmarks is not None:
1492 pullop.stepsdone.add('request-bookmarks')
1499 pullop.stepsdone.add('request-bookmarks')
1493
1500
1494 if ('request-bookmarks' not in pullop.stepsdone
1501 if ('request-bookmarks' not in pullop.stepsdone
1495 and pullop.remotebookmarks is None
1502 and pullop.remotebookmarks is None
1496 and not legacybookmark and hasbinarybook):
1503 and not legacybookmark and hasbinarybook):
1497 kwargs['bookmarks'] = True
1504 kwargs['bookmarks'] = True
1498 bookmarksrequested = True
1505 bookmarksrequested = True
1499
1506
1500 if 'listkeys' in pullop.remotebundle2caps:
1507 if 'listkeys' in pullop.remotebundle2caps:
1501 if 'request-bookmarks' not in pullop.stepsdone:
1508 if 'request-bookmarks' not in pullop.stepsdone:
1502 # make sure to always includes bookmark data when migrating
1509 # make sure to always includes bookmark data when migrating
1503 # `hg incoming --bundle` to using this function.
1510 # `hg incoming --bundle` to using this function.
1504 pullop.stepsdone.add('request-bookmarks')
1511 pullop.stepsdone.add('request-bookmarks')
1505 kwargs.setdefault('listkeys', []).append('bookmarks')
1512 kwargs.setdefault('listkeys', []).append('bookmarks')
1506
1513
1507 # If this is a full pull / clone and the server supports the clone bundles
1514 # If this is a full pull / clone and the server supports the clone bundles
1508 # feature, tell the server whether we attempted a clone bundle. The
1515 # feature, tell the server whether we attempted a clone bundle. The
1509 # presence of this flag indicates the client supports clone bundles. This
1516 # presence of this flag indicates the client supports clone bundles. This
1510 # will enable the server to treat clients that support clone bundles
1517 # will enable the server to treat clients that support clone bundles
1511 # differently from those that don't.
1518 # differently from those that don't.
1512 if (pullop.remote.capable('clonebundles')
1519 if (pullop.remote.capable('clonebundles')
1513 and pullop.heads is None and list(pullop.common) == [nullid]):
1520 and pullop.heads is None and list(pullop.common) == [nullid]):
1514 kwargs['cbattempted'] = pullop.clonebundleattempted
1521 kwargs['cbattempted'] = pullop.clonebundleattempted
1515
1522
1516 if streaming:
1523 if streaming:
1517 pullop.repo.ui.status(_('streaming all changes\n'))
1524 pullop.repo.ui.status(_('streaming all changes\n'))
1518 elif not pullop.fetch:
1525 elif not pullop.fetch:
1519 pullop.repo.ui.status(_("no changes found\n"))
1526 pullop.repo.ui.status(_("no changes found\n"))
1520 pullop.cgresult = 0
1527 pullop.cgresult = 0
1521 else:
1528 else:
1522 if pullop.heads is None and list(pullop.common) == [nullid]:
1529 if pullop.heads is None and list(pullop.common) == [nullid]:
1523 pullop.repo.ui.status(_("requesting all changes\n"))
1530 pullop.repo.ui.status(_("requesting all changes\n"))
1524 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1531 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1525 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1532 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1526 if obsolete.commonversion(remoteversions) is not None:
1533 if obsolete.commonversion(remoteversions) is not None:
1527 kwargs['obsmarkers'] = True
1534 kwargs['obsmarkers'] = True
1528 pullop.stepsdone.add('obsmarkers')
1535 pullop.stepsdone.add('obsmarkers')
1529 _pullbundle2extraprepare(pullop, kwargs)
1536 _pullbundle2extraprepare(pullop, kwargs)
1530 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1537 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1531 try:
1538 try:
1532 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1539 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1533 op.modes['bookmarks'] = 'records'
1540 op.modes['bookmarks'] = 'records'
1534 bundle2.processbundle(pullop.repo, bundle, op=op)
1541 bundle2.processbundle(pullop.repo, bundle, op=op)
1535 except bundle2.AbortFromPart as exc:
1542 except bundle2.AbortFromPart as exc:
1536 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1543 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1537 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1544 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1538 except error.BundleValueError as exc:
1545 except error.BundleValueError as exc:
1539 raise error.Abort(_('missing support for %s') % exc)
1546 raise error.Abort(_('missing support for %s') % exc)
1540
1547
1541 if pullop.fetch:
1548 if pullop.fetch:
1542 pullop.cgresult = bundle2.combinechangegroupresults(op)
1549 pullop.cgresult = bundle2.combinechangegroupresults(op)
1543
1550
1544 # processing phases change
1551 # processing phases change
1545 for namespace, value in op.records['listkeys']:
1552 for namespace, value in op.records['listkeys']:
1546 if namespace == 'phases':
1553 if namespace == 'phases':
1547 _pullapplyphases(pullop, value)
1554 _pullapplyphases(pullop, value)
1548
1555
1549 # processing bookmark update
1556 # processing bookmark update
1550 if bookmarksrequested:
1557 if bookmarksrequested:
1551 books = {}
1558 books = {}
1552 for record in op.records['bookmarks']:
1559 for record in op.records['bookmarks']:
1553 books[record['bookmark']] = record["node"]
1560 books[record['bookmark']] = record["node"]
1554 pullop.remotebookmarks = books
1561 pullop.remotebookmarks = books
1555 else:
1562 else:
1556 for namespace, value in op.records['listkeys']:
1563 for namespace, value in op.records['listkeys']:
1557 if namespace == 'bookmarks':
1564 if namespace == 'bookmarks':
1558 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1565 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1559
1566
1560 # bookmark data were either already there or pulled in the bundle
1567 # bookmark data were either already there or pulled in the bundle
1561 if pullop.remotebookmarks is not None:
1568 if pullop.remotebookmarks is not None:
1562 _pullbookmarks(pullop)
1569 _pullbookmarks(pullop)
1563
1570
1564 def _pullbundle2extraprepare(pullop, kwargs):
1571 def _pullbundle2extraprepare(pullop, kwargs):
1565 """hook function so that extensions can extend the getbundle call"""
1572 """hook function so that extensions can extend the getbundle call"""
1566
1573
1567 def _pullchangeset(pullop):
1574 def _pullchangeset(pullop):
1568 """pull changeset from unbundle into the local repo"""
1575 """pull changeset from unbundle into the local repo"""
1569 # We delay the open of the transaction as late as possible so we
1576 # We delay the open of the transaction as late as possible so we
1570 # don't open transaction for nothing or you break future useful
1577 # don't open transaction for nothing or you break future useful
1571 # rollback call
1578 # rollback call
1572 if 'changegroup' in pullop.stepsdone:
1579 if 'changegroup' in pullop.stepsdone:
1573 return
1580 return
1574 pullop.stepsdone.add('changegroup')
1581 pullop.stepsdone.add('changegroup')
1575 if not pullop.fetch:
1582 if not pullop.fetch:
1576 pullop.repo.ui.status(_("no changes found\n"))
1583 pullop.repo.ui.status(_("no changes found\n"))
1577 pullop.cgresult = 0
1584 pullop.cgresult = 0
1578 return
1585 return
1579 tr = pullop.gettransaction()
1586 tr = pullop.gettransaction()
1580 if pullop.heads is None and list(pullop.common) == [nullid]:
1587 if pullop.heads is None and list(pullop.common) == [nullid]:
1581 pullop.repo.ui.status(_("requesting all changes\n"))
1588 pullop.repo.ui.status(_("requesting all changes\n"))
1582 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1589 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1583 # issue1320, avoid a race if remote changed after discovery
1590 # issue1320, avoid a race if remote changed after discovery
1584 pullop.heads = pullop.rheads
1591 pullop.heads = pullop.rheads
1585
1592
1586 if pullop.remote.capable('getbundle'):
1593 if pullop.remote.capable('getbundle'):
1587 # TODO: get bundlecaps from remote
1594 # TODO: get bundlecaps from remote
1588 cg = pullop.remote.getbundle('pull', common=pullop.common,
1595 cg = pullop.remote.getbundle('pull', common=pullop.common,
1589 heads=pullop.heads or pullop.rheads)
1596 heads=pullop.heads or pullop.rheads)
1590 elif pullop.heads is None:
1597 elif pullop.heads is None:
1591 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1598 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1592 elif not pullop.remote.capable('changegroupsubset'):
1599 elif not pullop.remote.capable('changegroupsubset'):
1593 raise error.Abort(_("partial pull cannot be done because "
1600 raise error.Abort(_("partial pull cannot be done because "
1594 "other repository doesn't support "
1601 "other repository doesn't support "
1595 "changegroupsubset."))
1602 "changegroupsubset."))
1596 else:
1603 else:
1597 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1604 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1598 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1605 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1599 pullop.remote.url())
1606 pullop.remote.url())
1600 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1607 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1601
1608
1602 def _pullphase(pullop):
1609 def _pullphase(pullop):
1603 # Get remote phases data from remote
1610 # Get remote phases data from remote
1604 if 'phases' in pullop.stepsdone:
1611 if 'phases' in pullop.stepsdone:
1605 return
1612 return
1606 remotephases = pullop.remote.listkeys('phases')
1613 remotephases = pullop.remote.listkeys('phases')
1607 _pullapplyphases(pullop, remotephases)
1614 _pullapplyphases(pullop, remotephases)
1608
1615
1609 def _pullapplyphases(pullop, remotephases):
1616 def _pullapplyphases(pullop, remotephases):
1610 """apply phase movement from observed remote state"""
1617 """apply phase movement from observed remote state"""
1611 if 'phases' in pullop.stepsdone:
1618 if 'phases' in pullop.stepsdone:
1612 return
1619 return
1613 pullop.stepsdone.add('phases')
1620 pullop.stepsdone.add('phases')
1614 publishing = bool(remotephases.get('publishing', False))
1621 publishing = bool(remotephases.get('publishing', False))
1615 if remotephases and not publishing:
1622 if remotephases and not publishing:
1616 # remote is new and non-publishing
1623 # remote is new and non-publishing
1617 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1624 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1618 pullop.pulledsubset,
1625 pullop.pulledsubset,
1619 remotephases)
1626 remotephases)
1620 dheads = pullop.pulledsubset
1627 dheads = pullop.pulledsubset
1621 else:
1628 else:
1622 # Remote is old or publishing all common changesets
1629 # Remote is old or publishing all common changesets
1623 # should be seen as public
1630 # should be seen as public
1624 pheads = pullop.pulledsubset
1631 pheads = pullop.pulledsubset
1625 dheads = []
1632 dheads = []
1626 unfi = pullop.repo.unfiltered()
1633 unfi = pullop.repo.unfiltered()
1627 phase = unfi._phasecache.phase
1634 phase = unfi._phasecache.phase
1628 rev = unfi.changelog.nodemap.get
1635 rev = unfi.changelog.nodemap.get
1629 public = phases.public
1636 public = phases.public
1630 draft = phases.draft
1637 draft = phases.draft
1631
1638
1632 # exclude changesets already public locally and update the others
1639 # exclude changesets already public locally and update the others
1633 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1640 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1634 if pheads:
1641 if pheads:
1635 tr = pullop.gettransaction()
1642 tr = pullop.gettransaction()
1636 phases.advanceboundary(pullop.repo, tr, public, pheads)
1643 phases.advanceboundary(pullop.repo, tr, public, pheads)
1637
1644
1638 # exclude changesets already draft locally and update the others
1645 # exclude changesets already draft locally and update the others
1639 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1646 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1640 if dheads:
1647 if dheads:
1641 tr = pullop.gettransaction()
1648 tr = pullop.gettransaction()
1642 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1649 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1643
1650
1644 def _pullbookmarks(pullop):
1651 def _pullbookmarks(pullop):
1645 """process the remote bookmark information to update the local one"""
1652 """process the remote bookmark information to update the local one"""
1646 if 'bookmarks' in pullop.stepsdone:
1653 if 'bookmarks' in pullop.stepsdone:
1647 return
1654 return
1648 pullop.stepsdone.add('bookmarks')
1655 pullop.stepsdone.add('bookmarks')
1649 repo = pullop.repo
1656 repo = pullop.repo
1650 remotebookmarks = pullop.remotebookmarks
1657 remotebookmarks = pullop.remotebookmarks
1651 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1658 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1652 pullop.remote.url(),
1659 pullop.remote.url(),
1653 pullop.gettransaction,
1660 pullop.gettransaction,
1654 explicit=pullop.explicitbookmarks)
1661 explicit=pullop.explicitbookmarks)
1655
1662
1656 def _pullobsolete(pullop):
1663 def _pullobsolete(pullop):
1657 """utility function to pull obsolete markers from a remote
1664 """utility function to pull obsolete markers from a remote
1658
1665
1659 The `gettransaction` is function that return the pull transaction, creating
1666 The `gettransaction` is function that return the pull transaction, creating
1660 one if necessary. We return the transaction to inform the calling code that
1667 one if necessary. We return the transaction to inform the calling code that
1661 a new transaction have been created (when applicable).
1668 a new transaction have been created (when applicable).
1662
1669
1663 Exists mostly to allow overriding for experimentation purpose"""
1670 Exists mostly to allow overriding for experimentation purpose"""
1664 if 'obsmarkers' in pullop.stepsdone:
1671 if 'obsmarkers' in pullop.stepsdone:
1665 return
1672 return
1666 pullop.stepsdone.add('obsmarkers')
1673 pullop.stepsdone.add('obsmarkers')
1667 tr = None
1674 tr = None
1668 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1675 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1669 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1676 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1670 remoteobs = pullop.remote.listkeys('obsolete')
1677 remoteobs = pullop.remote.listkeys('obsolete')
1671 if 'dump0' in remoteobs:
1678 if 'dump0' in remoteobs:
1672 tr = pullop.gettransaction()
1679 tr = pullop.gettransaction()
1673 markers = []
1680 markers = []
1674 for key in sorted(remoteobs, reverse=True):
1681 for key in sorted(remoteobs, reverse=True):
1675 if key.startswith('dump'):
1682 if key.startswith('dump'):
1676 data = util.b85decode(remoteobs[key])
1683 data = util.b85decode(remoteobs[key])
1677 version, newmarks = obsolete._readmarkers(data)
1684 version, newmarks = obsolete._readmarkers(data)
1678 markers += newmarks
1685 markers += newmarks
1679 if markers:
1686 if markers:
1680 pullop.repo.obsstore.add(tr, markers)
1687 pullop.repo.obsstore.add(tr, markers)
1681 pullop.repo.invalidatevolatilesets()
1688 pullop.repo.invalidatevolatilesets()
1682 return tr
1689 return tr
1683
1690
1684 def caps20to10(repo, role):
1691 def caps20to10(repo, role):
1685 """return a set with appropriate options to use bundle20 during getbundle"""
1692 """return a set with appropriate options to use bundle20 during getbundle"""
1686 caps = {'HG20'}
1693 caps = {'HG20'}
1687 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1694 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1688 caps.add('bundle2=' + urlreq.quote(capsblob))
1695 caps.add('bundle2=' + urlreq.quote(capsblob))
1689 return caps
1696 return caps
1690
1697
1691 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1698 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1692 getbundle2partsorder = []
1699 getbundle2partsorder = []
1693
1700
1694 # Mapping between step name and function
1701 # Mapping between step name and function
1695 #
1702 #
1696 # This exists to help extensions wrap steps if necessary
1703 # This exists to help extensions wrap steps if necessary
1697 getbundle2partsmapping = {}
1704 getbundle2partsmapping = {}
1698
1705
1699 def getbundle2partsgenerator(stepname, idx=None):
1706 def getbundle2partsgenerator(stepname, idx=None):
1700 """decorator for function generating bundle2 part for getbundle
1707 """decorator for function generating bundle2 part for getbundle
1701
1708
1702 The function is added to the step -> function mapping and appended to the
1709 The function is added to the step -> function mapping and appended to the
1703 list of steps. Beware that decorated functions will be added in order
1710 list of steps. Beware that decorated functions will be added in order
1704 (this may matter).
1711 (this may matter).
1705
1712
1706 You can only use this decorator for new steps, if you want to wrap a step
1713 You can only use this decorator for new steps, if you want to wrap a step
1707 from an extension, attack the getbundle2partsmapping dictionary directly."""
1714 from an extension, attack the getbundle2partsmapping dictionary directly."""
1708 def dec(func):
1715 def dec(func):
1709 assert stepname not in getbundle2partsmapping
1716 assert stepname not in getbundle2partsmapping
1710 getbundle2partsmapping[stepname] = func
1717 getbundle2partsmapping[stepname] = func
1711 if idx is None:
1718 if idx is None:
1712 getbundle2partsorder.append(stepname)
1719 getbundle2partsorder.append(stepname)
1713 else:
1720 else:
1714 getbundle2partsorder.insert(idx, stepname)
1721 getbundle2partsorder.insert(idx, stepname)
1715 return func
1722 return func
1716 return dec
1723 return dec
1717
1724
1718 def bundle2requested(bundlecaps):
1725 def bundle2requested(bundlecaps):
1719 if bundlecaps is not None:
1726 if bundlecaps is not None:
1720 return any(cap.startswith('HG2') for cap in bundlecaps)
1727 return any(cap.startswith('HG2') for cap in bundlecaps)
1721 return False
1728 return False
1722
1729
1723 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1730 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1724 **kwargs):
1731 **kwargs):
1725 """Return chunks constituting a bundle's raw data.
1732 """Return chunks constituting a bundle's raw data.
1726
1733
1727 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1734 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1728 passed.
1735 passed.
1729
1736
1730 Returns a 2-tuple of a dict with metadata about the generated bundle
1737 Returns a 2-tuple of a dict with metadata about the generated bundle
1731 and an iterator over raw chunks (of varying sizes).
1738 and an iterator over raw chunks (of varying sizes).
1732 """
1739 """
1733 kwargs = pycompat.byteskwargs(kwargs)
1740 kwargs = pycompat.byteskwargs(kwargs)
1734 info = {}
1741 info = {}
1735 usebundle2 = bundle2requested(bundlecaps)
1742 usebundle2 = bundle2requested(bundlecaps)
1736 # bundle10 case
1743 # bundle10 case
1737 if not usebundle2:
1744 if not usebundle2:
1738 if bundlecaps and not kwargs.get('cg', True):
1745 if bundlecaps and not kwargs.get('cg', True):
1739 raise ValueError(_('request for bundle10 must include changegroup'))
1746 raise ValueError(_('request for bundle10 must include changegroup'))
1740
1747
1741 if kwargs:
1748 if kwargs:
1742 raise ValueError(_('unsupported getbundle arguments: %s')
1749 raise ValueError(_('unsupported getbundle arguments: %s')
1743 % ', '.join(sorted(kwargs.keys())))
1750 % ', '.join(sorted(kwargs.keys())))
1744 outgoing = _computeoutgoing(repo, heads, common)
1751 outgoing = _computeoutgoing(repo, heads, common)
1745 info['bundleversion'] = 1
1752 info['bundleversion'] = 1
1746 return info, changegroup.makestream(repo, outgoing, '01', source,
1753 return info, changegroup.makestream(repo, outgoing, '01', source,
1747 bundlecaps=bundlecaps)
1754 bundlecaps=bundlecaps)
1748
1755
1749 # bundle20 case
1756 # bundle20 case
1750 info['bundleversion'] = 2
1757 info['bundleversion'] = 2
1751 b2caps = {}
1758 b2caps = {}
1752 for bcaps in bundlecaps:
1759 for bcaps in bundlecaps:
1753 if bcaps.startswith('bundle2='):
1760 if bcaps.startswith('bundle2='):
1754 blob = urlreq.unquote(bcaps[len('bundle2='):])
1761 blob = urlreq.unquote(bcaps[len('bundle2='):])
1755 b2caps.update(bundle2.decodecaps(blob))
1762 b2caps.update(bundle2.decodecaps(blob))
1756 bundler = bundle2.bundle20(repo.ui, b2caps)
1763 bundler = bundle2.bundle20(repo.ui, b2caps)
1757
1764
1758 kwargs['heads'] = heads
1765 kwargs['heads'] = heads
1759 kwargs['common'] = common
1766 kwargs['common'] = common
1760
1767
1761 for name in getbundle2partsorder:
1768 for name in getbundle2partsorder:
1762 func = getbundle2partsmapping[name]
1769 func = getbundle2partsmapping[name]
1763 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1770 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1764 **pycompat.strkwargs(kwargs))
1771 **pycompat.strkwargs(kwargs))
1765
1772
1766 info['prefercompressed'] = bundler.prefercompressed
1773 info['prefercompressed'] = bundler.prefercompressed
1767
1774
1768 return info, bundler.getchunks()
1775 return info, bundler.getchunks()
1769
1776
1770 @getbundle2partsgenerator('stream2')
1777 @getbundle2partsgenerator('stream2')
1771 def _getbundlestream2(bundler, repo, source, bundlecaps=None,
1778 def _getbundlestream2(bundler, repo, source, bundlecaps=None,
1772 b2caps=None, heads=None, common=None, **kwargs):
1779 b2caps=None, heads=None, common=None, **kwargs):
1773 if not kwargs.get('stream', False):
1780 if not kwargs.get('stream', False):
1774 return
1781 return
1775
1782
1776 if not streamclone.allowservergeneration(repo):
1783 if not streamclone.allowservergeneration(repo):
1777 raise error.Abort(_('stream data requested but server does not allow '
1784 raise error.Abort(_('stream data requested but server does not allow '
1778 'this feature'),
1785 'this feature'),
1779 hint=_('well-behaved clients should not be '
1786 hint=_('well-behaved clients should not be '
1780 'requesting stream data from servers not '
1787 'requesting stream data from servers not '
1781 'advertising it; the client may be buggy'))
1788 'advertising it; the client may be buggy'))
1782
1789
1783 # Stream clones don't compress well. And compression undermines a
1790 # Stream clones don't compress well. And compression undermines a
1784 # goal of stream clones, which is to be fast. Communicate the desire
1791 # goal of stream clones, which is to be fast. Communicate the desire
1785 # to avoid compression to consumers of the bundle.
1792 # to avoid compression to consumers of the bundle.
1786 bundler.prefercompressed = False
1793 bundler.prefercompressed = False
1787
1794
1788 filecount, bytecount, it = streamclone.generatev2(repo)
1795 filecount, bytecount, it = streamclone.generatev2(repo)
1789 requirements = ' '.join(sorted(repo.requirements))
1796 requirements = ' '.join(sorted(repo.requirements))
1790 part = bundler.newpart('stream2', data=it)
1797 part = bundler.newpart('stream2', data=it)
1791 part.addparam('bytecount', '%d' % bytecount, mandatory=True)
1798 part.addparam('bytecount', '%d' % bytecount, mandatory=True)
1792 part.addparam('filecount', '%d' % filecount, mandatory=True)
1799 part.addparam('filecount', '%d' % filecount, mandatory=True)
1793 part.addparam('requirements', requirements, mandatory=True)
1800 part.addparam('requirements', requirements, mandatory=True)
1794
1801
1795 @getbundle2partsgenerator('changegroup')
1802 @getbundle2partsgenerator('changegroup')
1796 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1803 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1797 b2caps=None, heads=None, common=None, **kwargs):
1804 b2caps=None, heads=None, common=None, **kwargs):
1798 """add a changegroup part to the requested bundle"""
1805 """add a changegroup part to the requested bundle"""
1799 cgstream = None
1806 cgstream = None
1800 if kwargs.get(r'cg', True):
1807 if kwargs.get(r'cg', True):
1801 # build changegroup bundle here.
1808 # build changegroup bundle here.
1802 version = '01'
1809 version = '01'
1803 cgversions = b2caps.get('changegroup')
1810 cgversions = b2caps.get('changegroup')
1804 if cgversions: # 3.1 and 3.2 ship with an empty value
1811 if cgversions: # 3.1 and 3.2 ship with an empty value
1805 cgversions = [v for v in cgversions
1812 cgversions = [v for v in cgversions
1806 if v in changegroup.supportedoutgoingversions(repo)]
1813 if v in changegroup.supportedoutgoingversions(repo)]
1807 if not cgversions:
1814 if not cgversions:
1808 raise ValueError(_('no common changegroup version'))
1815 raise ValueError(_('no common changegroup version'))
1809 version = max(cgversions)
1816 version = max(cgversions)
1810 outgoing = _computeoutgoing(repo, heads, common)
1817 outgoing = _computeoutgoing(repo, heads, common)
1811 if outgoing.missing:
1818 if outgoing.missing:
1812 cgstream = changegroup.makestream(repo, outgoing, version, source,
1819 cgstream = changegroup.makestream(repo, outgoing, version, source,
1813 bundlecaps=bundlecaps)
1820 bundlecaps=bundlecaps)
1814
1821
1815 if cgstream:
1822 if cgstream:
1816 part = bundler.newpart('changegroup', data=cgstream)
1823 part = bundler.newpart('changegroup', data=cgstream)
1817 if cgversions:
1824 if cgversions:
1818 part.addparam('version', version)
1825 part.addparam('version', version)
1819 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1826 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1820 mandatory=False)
1827 mandatory=False)
1821 if 'treemanifest' in repo.requirements:
1828 if 'treemanifest' in repo.requirements:
1822 part.addparam('treemanifest', '1')
1829 part.addparam('treemanifest', '1')
1823
1830
1824 @getbundle2partsgenerator('bookmarks')
1831 @getbundle2partsgenerator('bookmarks')
1825 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1832 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1826 b2caps=None, **kwargs):
1833 b2caps=None, **kwargs):
1827 """add a bookmark part to the requested bundle"""
1834 """add a bookmark part to the requested bundle"""
1828 if not kwargs.get(r'bookmarks', False):
1835 if not kwargs.get(r'bookmarks', False):
1829 return
1836 return
1830 if 'bookmarks' not in b2caps:
1837 if 'bookmarks' not in b2caps:
1831 raise ValueError(_('no common bookmarks exchange method'))
1838 raise ValueError(_('no common bookmarks exchange method'))
1832 books = bookmod.listbinbookmarks(repo)
1839 books = bookmod.listbinbookmarks(repo)
1833 data = bookmod.binaryencode(books)
1840 data = bookmod.binaryencode(books)
1834 if data:
1841 if data:
1835 bundler.newpart('bookmarks', data=data)
1842 bundler.newpart('bookmarks', data=data)
1836
1843
1837 @getbundle2partsgenerator('listkeys')
1844 @getbundle2partsgenerator('listkeys')
1838 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1845 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1839 b2caps=None, **kwargs):
1846 b2caps=None, **kwargs):
1840 """add parts containing listkeys namespaces to the requested bundle"""
1847 """add parts containing listkeys namespaces to the requested bundle"""
1841 listkeys = kwargs.get(r'listkeys', ())
1848 listkeys = kwargs.get(r'listkeys', ())
1842 for namespace in listkeys:
1849 for namespace in listkeys:
1843 part = bundler.newpart('listkeys')
1850 part = bundler.newpart('listkeys')
1844 part.addparam('namespace', namespace)
1851 part.addparam('namespace', namespace)
1845 keys = repo.listkeys(namespace).items()
1852 keys = repo.listkeys(namespace).items()
1846 part.data = pushkey.encodekeys(keys)
1853 part.data = pushkey.encodekeys(keys)
1847
1854
1848 @getbundle2partsgenerator('obsmarkers')
1855 @getbundle2partsgenerator('obsmarkers')
1849 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1856 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1850 b2caps=None, heads=None, **kwargs):
1857 b2caps=None, heads=None, **kwargs):
1851 """add an obsolescence markers part to the requested bundle"""
1858 """add an obsolescence markers part to the requested bundle"""
1852 if kwargs.get(r'obsmarkers', False):
1859 if kwargs.get(r'obsmarkers', False):
1853 if heads is None:
1860 if heads is None:
1854 heads = repo.heads()
1861 heads = repo.heads()
1855 subset = [c.node() for c in repo.set('::%ln', heads)]
1862 subset = [c.node() for c in repo.set('::%ln', heads)]
1856 markers = repo.obsstore.relevantmarkers(subset)
1863 markers = repo.obsstore.relevantmarkers(subset)
1857 markers = sorted(markers)
1864 markers = sorted(markers)
1858 bundle2.buildobsmarkerspart(bundler, markers)
1865 bundle2.buildobsmarkerspart(bundler, markers)
1859
1866
1860 @getbundle2partsgenerator('phases')
1867 @getbundle2partsgenerator('phases')
1861 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1868 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1862 b2caps=None, heads=None, **kwargs):
1869 b2caps=None, heads=None, **kwargs):
1863 """add phase heads part to the requested bundle"""
1870 """add phase heads part to the requested bundle"""
1864 if kwargs.get(r'phases', False):
1871 if kwargs.get(r'phases', False):
1865 if not 'heads' in b2caps.get('phases'):
1872 if not 'heads' in b2caps.get('phases'):
1866 raise ValueError(_('no common phases exchange method'))
1873 raise ValueError(_('no common phases exchange method'))
1867 if heads is None:
1874 if heads is None:
1868 heads = repo.heads()
1875 heads = repo.heads()
1869
1876
1870 headsbyphase = collections.defaultdict(set)
1877 headsbyphase = collections.defaultdict(set)
1871 if repo.publishing():
1878 if repo.publishing():
1872 headsbyphase[phases.public] = heads
1879 headsbyphase[phases.public] = heads
1873 else:
1880 else:
1874 # find the appropriate heads to move
1881 # find the appropriate heads to move
1875
1882
1876 phase = repo._phasecache.phase
1883 phase = repo._phasecache.phase
1877 node = repo.changelog.node
1884 node = repo.changelog.node
1878 rev = repo.changelog.rev
1885 rev = repo.changelog.rev
1879 for h in heads:
1886 for h in heads:
1880 headsbyphase[phase(repo, rev(h))].add(h)
1887 headsbyphase[phase(repo, rev(h))].add(h)
1881 seenphases = list(headsbyphase.keys())
1888 seenphases = list(headsbyphase.keys())
1882
1889
1883 # We do not handle anything but public and draft phase for now)
1890 # We do not handle anything but public and draft phase for now)
1884 if seenphases:
1891 if seenphases:
1885 assert max(seenphases) <= phases.draft
1892 assert max(seenphases) <= phases.draft
1886
1893
1887 # if client is pulling non-public changesets, we need to find
1894 # if client is pulling non-public changesets, we need to find
1888 # intermediate public heads.
1895 # intermediate public heads.
1889 draftheads = headsbyphase.get(phases.draft, set())
1896 draftheads = headsbyphase.get(phases.draft, set())
1890 if draftheads:
1897 if draftheads:
1891 publicheads = headsbyphase.get(phases.public, set())
1898 publicheads = headsbyphase.get(phases.public, set())
1892
1899
1893 revset = 'heads(only(%ln, %ln) and public())'
1900 revset = 'heads(only(%ln, %ln) and public())'
1894 extraheads = repo.revs(revset, draftheads, publicheads)
1901 extraheads = repo.revs(revset, draftheads, publicheads)
1895 for r in extraheads:
1902 for r in extraheads:
1896 headsbyphase[phases.public].add(node(r))
1903 headsbyphase[phases.public].add(node(r))
1897
1904
1898 # transform data in a format used by the encoding function
1905 # transform data in a format used by the encoding function
1899 phasemapping = []
1906 phasemapping = []
1900 for phase in phases.allphases:
1907 for phase in phases.allphases:
1901 phasemapping.append(sorted(headsbyphase[phase]))
1908 phasemapping.append(sorted(headsbyphase[phase]))
1902
1909
1903 # generate the actual part
1910 # generate the actual part
1904 phasedata = phases.binaryencode(phasemapping)
1911 phasedata = phases.binaryencode(phasemapping)
1905 bundler.newpart('phase-heads', data=phasedata)
1912 bundler.newpart('phase-heads', data=phasedata)
1906
1913
1907 @getbundle2partsgenerator('hgtagsfnodes')
1914 @getbundle2partsgenerator('hgtagsfnodes')
1908 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1915 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1909 b2caps=None, heads=None, common=None,
1916 b2caps=None, heads=None, common=None,
1910 **kwargs):
1917 **kwargs):
1911 """Transfer the .hgtags filenodes mapping.
1918 """Transfer the .hgtags filenodes mapping.
1912
1919
1913 Only values for heads in this bundle will be transferred.
1920 Only values for heads in this bundle will be transferred.
1914
1921
1915 The part data consists of pairs of 20 byte changeset node and .hgtags
1922 The part data consists of pairs of 20 byte changeset node and .hgtags
1916 filenodes raw values.
1923 filenodes raw values.
1917 """
1924 """
1918 # Don't send unless:
1925 # Don't send unless:
1919 # - changeset are being exchanged,
1926 # - changeset are being exchanged,
1920 # - the client supports it.
1927 # - the client supports it.
1921 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1928 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1922 return
1929 return
1923
1930
1924 outgoing = _computeoutgoing(repo, heads, common)
1931 outgoing = _computeoutgoing(repo, heads, common)
1925 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1932 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1926
1933
1927 def check_heads(repo, their_heads, context):
1934 def check_heads(repo, their_heads, context):
1928 """check if the heads of a repo have been modified
1935 """check if the heads of a repo have been modified
1929
1936
1930 Used by peer for unbundling.
1937 Used by peer for unbundling.
1931 """
1938 """
1932 heads = repo.heads()
1939 heads = repo.heads()
1933 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1940 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1934 if not (their_heads == ['force'] or their_heads == heads or
1941 if not (their_heads == ['force'] or their_heads == heads or
1935 their_heads == ['hashed', heads_hash]):
1942 their_heads == ['hashed', heads_hash]):
1936 # someone else committed/pushed/unbundled while we
1943 # someone else committed/pushed/unbundled while we
1937 # were transferring data
1944 # were transferring data
1938 raise error.PushRaced('repository changed while %s - '
1945 raise error.PushRaced('repository changed while %s - '
1939 'please try again' % context)
1946 'please try again' % context)
1940
1947
1941 def unbundle(repo, cg, heads, source, url):
1948 def unbundle(repo, cg, heads, source, url):
1942 """Apply a bundle to a repo.
1949 """Apply a bundle to a repo.
1943
1950
1944 this function makes sure the repo is locked during the application and have
1951 this function makes sure the repo is locked during the application and have
1945 mechanism to check that no push race occurred between the creation of the
1952 mechanism to check that no push race occurred between the creation of the
1946 bundle and its application.
1953 bundle and its application.
1947
1954
1948 If the push was raced as PushRaced exception is raised."""
1955 If the push was raced as PushRaced exception is raised."""
1949 r = 0
1956 r = 0
1950 # need a transaction when processing a bundle2 stream
1957 # need a transaction when processing a bundle2 stream
1951 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1958 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1952 lockandtr = [None, None, None]
1959 lockandtr = [None, None, None]
1953 recordout = None
1960 recordout = None
1954 # quick fix for output mismatch with bundle2 in 3.4
1961 # quick fix for output mismatch with bundle2 in 3.4
1955 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1962 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1956 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1963 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1957 captureoutput = True
1964 captureoutput = True
1958 try:
1965 try:
1959 # note: outside bundle1, 'heads' is expected to be empty and this
1966 # note: outside bundle1, 'heads' is expected to be empty and this
1960 # 'check_heads' call wil be a no-op
1967 # 'check_heads' call wil be a no-op
1961 check_heads(repo, heads, 'uploading changes')
1968 check_heads(repo, heads, 'uploading changes')
1962 # push can proceed
1969 # push can proceed
1963 if not isinstance(cg, bundle2.unbundle20):
1970 if not isinstance(cg, bundle2.unbundle20):
1964 # legacy case: bundle1 (changegroup 01)
1971 # legacy case: bundle1 (changegroup 01)
1965 txnname = "\n".join([source, util.hidepassword(url)])
1972 txnname = "\n".join([source, util.hidepassword(url)])
1966 with repo.lock(), repo.transaction(txnname) as tr:
1973 with repo.lock(), repo.transaction(txnname) as tr:
1967 op = bundle2.applybundle(repo, cg, tr, source, url)
1974 op = bundle2.applybundle(repo, cg, tr, source, url)
1968 r = bundle2.combinechangegroupresults(op)
1975 r = bundle2.combinechangegroupresults(op)
1969 else:
1976 else:
1970 r = None
1977 r = None
1971 try:
1978 try:
1972 def gettransaction():
1979 def gettransaction():
1973 if not lockandtr[2]:
1980 if not lockandtr[2]:
1974 lockandtr[0] = repo.wlock()
1981 lockandtr[0] = repo.wlock()
1975 lockandtr[1] = repo.lock()
1982 lockandtr[1] = repo.lock()
1976 lockandtr[2] = repo.transaction(source)
1983 lockandtr[2] = repo.transaction(source)
1977 lockandtr[2].hookargs['source'] = source
1984 lockandtr[2].hookargs['source'] = source
1978 lockandtr[2].hookargs['url'] = url
1985 lockandtr[2].hookargs['url'] = url
1979 lockandtr[2].hookargs['bundle2'] = '1'
1986 lockandtr[2].hookargs['bundle2'] = '1'
1980 return lockandtr[2]
1987 return lockandtr[2]
1981
1988
1982 # Do greedy locking by default until we're satisfied with lazy
1989 # Do greedy locking by default until we're satisfied with lazy
1983 # locking.
1990 # locking.
1984 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1991 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1985 gettransaction()
1992 gettransaction()
1986
1993
1987 op = bundle2.bundleoperation(repo, gettransaction,
1994 op = bundle2.bundleoperation(repo, gettransaction,
1988 captureoutput=captureoutput)
1995 captureoutput=captureoutput)
1989 try:
1996 try:
1990 op = bundle2.processbundle(repo, cg, op=op)
1997 op = bundle2.processbundle(repo, cg, op=op)
1991 finally:
1998 finally:
1992 r = op.reply
1999 r = op.reply
1993 if captureoutput and r is not None:
2000 if captureoutput and r is not None:
1994 repo.ui.pushbuffer(error=True, subproc=True)
2001 repo.ui.pushbuffer(error=True, subproc=True)
1995 def recordout(output):
2002 def recordout(output):
1996 r.newpart('output', data=output, mandatory=False)
2003 r.newpart('output', data=output, mandatory=False)
1997 if lockandtr[2] is not None:
2004 if lockandtr[2] is not None:
1998 lockandtr[2].close()
2005 lockandtr[2].close()
1999 except BaseException as exc:
2006 except BaseException as exc:
2000 exc.duringunbundle2 = True
2007 exc.duringunbundle2 = True
2001 if captureoutput and r is not None:
2008 if captureoutput and r is not None:
2002 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2009 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2003 def recordout(output):
2010 def recordout(output):
2004 part = bundle2.bundlepart('output', data=output,
2011 part = bundle2.bundlepart('output', data=output,
2005 mandatory=False)
2012 mandatory=False)
2006 parts.append(part)
2013 parts.append(part)
2007 raise
2014 raise
2008 finally:
2015 finally:
2009 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2016 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2010 if recordout is not None:
2017 if recordout is not None:
2011 recordout(repo.ui.popbuffer())
2018 recordout(repo.ui.popbuffer())
2012 return r
2019 return r
2013
2020
2014 def _maybeapplyclonebundle(pullop):
2021 def _maybeapplyclonebundle(pullop):
2015 """Apply a clone bundle from a remote, if possible."""
2022 """Apply a clone bundle from a remote, if possible."""
2016
2023
2017 repo = pullop.repo
2024 repo = pullop.repo
2018 remote = pullop.remote
2025 remote = pullop.remote
2019
2026
2020 if not repo.ui.configbool('ui', 'clonebundles'):
2027 if not repo.ui.configbool('ui', 'clonebundles'):
2021 return
2028 return
2022
2029
2023 # Only run if local repo is empty.
2030 # Only run if local repo is empty.
2024 if len(repo):
2031 if len(repo):
2025 return
2032 return
2026
2033
2027 if pullop.heads:
2034 if pullop.heads:
2028 return
2035 return
2029
2036
2030 if not remote.capable('clonebundles'):
2037 if not remote.capable('clonebundles'):
2031 return
2038 return
2032
2039
2033 res = remote._call('clonebundles')
2040 res = remote._call('clonebundles')
2034
2041
2035 # If we call the wire protocol command, that's good enough to record the
2042 # If we call the wire protocol command, that's good enough to record the
2036 # attempt.
2043 # attempt.
2037 pullop.clonebundleattempted = True
2044 pullop.clonebundleattempted = True
2038
2045
2039 entries = parseclonebundlesmanifest(repo, res)
2046 entries = parseclonebundlesmanifest(repo, res)
2040 if not entries:
2047 if not entries:
2041 repo.ui.note(_('no clone bundles available on remote; '
2048 repo.ui.note(_('no clone bundles available on remote; '
2042 'falling back to regular clone\n'))
2049 'falling back to regular clone\n'))
2043 return
2050 return
2044
2051
2045 entries = filterclonebundleentries(
2052 entries = filterclonebundleentries(
2046 repo, entries, streamclonerequested=pullop.streamclonerequested)
2053 repo, entries, streamclonerequested=pullop.streamclonerequested)
2047
2054
2048 if not entries:
2055 if not entries:
2049 # There is a thundering herd concern here. However, if a server
2056 # There is a thundering herd concern here. However, if a server
2050 # operator doesn't advertise bundles appropriate for its clients,
2057 # operator doesn't advertise bundles appropriate for its clients,
2051 # they deserve what's coming. Furthermore, from a client's
2058 # they deserve what's coming. Furthermore, from a client's
2052 # perspective, no automatic fallback would mean not being able to
2059 # perspective, no automatic fallback would mean not being able to
2053 # clone!
2060 # clone!
2054 repo.ui.warn(_('no compatible clone bundles available on server; '
2061 repo.ui.warn(_('no compatible clone bundles available on server; '
2055 'falling back to regular clone\n'))
2062 'falling back to regular clone\n'))
2056 repo.ui.warn(_('(you may want to report this to the server '
2063 repo.ui.warn(_('(you may want to report this to the server '
2057 'operator)\n'))
2064 'operator)\n'))
2058 return
2065 return
2059
2066
2060 entries = sortclonebundleentries(repo.ui, entries)
2067 entries = sortclonebundleentries(repo.ui, entries)
2061
2068
2062 url = entries[0]['URL']
2069 url = entries[0]['URL']
2063 repo.ui.status(_('applying clone bundle from %s\n') % url)
2070 repo.ui.status(_('applying clone bundle from %s\n') % url)
2064 if trypullbundlefromurl(repo.ui, repo, url):
2071 if trypullbundlefromurl(repo.ui, repo, url):
2065 repo.ui.status(_('finished applying clone bundle\n'))
2072 repo.ui.status(_('finished applying clone bundle\n'))
2066 # Bundle failed.
2073 # Bundle failed.
2067 #
2074 #
2068 # We abort by default to avoid the thundering herd of
2075 # We abort by default to avoid the thundering herd of
2069 # clients flooding a server that was expecting expensive
2076 # clients flooding a server that was expecting expensive
2070 # clone load to be offloaded.
2077 # clone load to be offloaded.
2071 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2078 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2072 repo.ui.warn(_('falling back to normal clone\n'))
2079 repo.ui.warn(_('falling back to normal clone\n'))
2073 else:
2080 else:
2074 raise error.Abort(_('error applying bundle'),
2081 raise error.Abort(_('error applying bundle'),
2075 hint=_('if this error persists, consider contacting '
2082 hint=_('if this error persists, consider contacting '
2076 'the server operator or disable clone '
2083 'the server operator or disable clone '
2077 'bundles via '
2084 'bundles via '
2078 '"--config ui.clonebundles=false"'))
2085 '"--config ui.clonebundles=false"'))
2079
2086
2080 def parseclonebundlesmanifest(repo, s):
2087 def parseclonebundlesmanifest(repo, s):
2081 """Parses the raw text of a clone bundles manifest.
2088 """Parses the raw text of a clone bundles manifest.
2082
2089
2083 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2090 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2084 to the URL and other keys are the attributes for the entry.
2091 to the URL and other keys are the attributes for the entry.
2085 """
2092 """
2086 m = []
2093 m = []
2087 for line in s.splitlines():
2094 for line in s.splitlines():
2088 fields = line.split()
2095 fields = line.split()
2089 if not fields:
2096 if not fields:
2090 continue
2097 continue
2091 attrs = {'URL': fields[0]}
2098 attrs = {'URL': fields[0]}
2092 for rawattr in fields[1:]:
2099 for rawattr in fields[1:]:
2093 key, value = rawattr.split('=', 1)
2100 key, value = rawattr.split('=', 1)
2094 key = urlreq.unquote(key)
2101 key = urlreq.unquote(key)
2095 value = urlreq.unquote(value)
2102 value = urlreq.unquote(value)
2096 attrs[key] = value
2103 attrs[key] = value
2097
2104
2098 # Parse BUNDLESPEC into components. This makes client-side
2105 # Parse BUNDLESPEC into components. This makes client-side
2099 # preferences easier to specify since you can prefer a single
2106 # preferences easier to specify since you can prefer a single
2100 # component of the BUNDLESPEC.
2107 # component of the BUNDLESPEC.
2101 if key == 'BUNDLESPEC':
2108 if key == 'BUNDLESPEC':
2102 try:
2109 try:
2103 comp, version, params = parsebundlespec(repo, value,
2110 comp, version, params = parsebundlespec(repo, value,
2104 externalnames=True)
2111 externalnames=True)
2105 attrs['COMPRESSION'] = comp
2112 attrs['COMPRESSION'] = comp
2106 attrs['VERSION'] = version
2113 attrs['VERSION'] = version
2107 except error.InvalidBundleSpecification:
2114 except error.InvalidBundleSpecification:
2108 pass
2115 pass
2109 except error.UnsupportedBundleSpecification:
2116 except error.UnsupportedBundleSpecification:
2110 pass
2117 pass
2111
2118
2112 m.append(attrs)
2119 m.append(attrs)
2113
2120
2114 return m
2121 return m
2115
2122
2116 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2123 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2117 """Remove incompatible clone bundle manifest entries.
2124 """Remove incompatible clone bundle manifest entries.
2118
2125
2119 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2126 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2120 and returns a new list consisting of only the entries that this client
2127 and returns a new list consisting of only the entries that this client
2121 should be able to apply.
2128 should be able to apply.
2122
2129
2123 There is no guarantee we'll be able to apply all returned entries because
2130 There is no guarantee we'll be able to apply all returned entries because
2124 the metadata we use to filter on may be missing or wrong.
2131 the metadata we use to filter on may be missing or wrong.
2125 """
2132 """
2126 newentries = []
2133 newentries = []
2127 for entry in entries:
2134 for entry in entries:
2128 spec = entry.get('BUNDLESPEC')
2135 spec = entry.get('BUNDLESPEC')
2129 if spec:
2136 if spec:
2130 try:
2137 try:
2131 comp, version, params = parsebundlespec(repo, spec, strict=True)
2138 comp, version, params = parsebundlespec(repo, spec, strict=True)
2132
2139
2133 # If a stream clone was requested, filter out non-streamclone
2140 # If a stream clone was requested, filter out non-streamclone
2134 # entries.
2141 # entries.
2135 if streamclonerequested and (comp != 'UN' or version != 's1'):
2142 if streamclonerequested and (comp != 'UN' or version != 's1'):
2136 repo.ui.debug('filtering %s because not a stream clone\n' %
2143 repo.ui.debug('filtering %s because not a stream clone\n' %
2137 entry['URL'])
2144 entry['URL'])
2138 continue
2145 continue
2139
2146
2140 except error.InvalidBundleSpecification as e:
2147 except error.InvalidBundleSpecification as e:
2141 repo.ui.debug(str(e) + '\n')
2148 repo.ui.debug(str(e) + '\n')
2142 continue
2149 continue
2143 except error.UnsupportedBundleSpecification as e:
2150 except error.UnsupportedBundleSpecification as e:
2144 repo.ui.debug('filtering %s because unsupported bundle '
2151 repo.ui.debug('filtering %s because unsupported bundle '
2145 'spec: %s\n' % (entry['URL'], str(e)))
2152 'spec: %s\n' % (entry['URL'], str(e)))
2146 continue
2153 continue
2147 # If we don't have a spec and requested a stream clone, we don't know
2154 # If we don't have a spec and requested a stream clone, we don't know
2148 # what the entry is so don't attempt to apply it.
2155 # what the entry is so don't attempt to apply it.
2149 elif streamclonerequested:
2156 elif streamclonerequested:
2150 repo.ui.debug('filtering %s because cannot determine if a stream '
2157 repo.ui.debug('filtering %s because cannot determine if a stream '
2151 'clone bundle\n' % entry['URL'])
2158 'clone bundle\n' % entry['URL'])
2152 continue
2159 continue
2153
2160
2154 if 'REQUIRESNI' in entry and not sslutil.hassni:
2161 if 'REQUIRESNI' in entry and not sslutil.hassni:
2155 repo.ui.debug('filtering %s because SNI not supported\n' %
2162 repo.ui.debug('filtering %s because SNI not supported\n' %
2156 entry['URL'])
2163 entry['URL'])
2157 continue
2164 continue
2158
2165
2159 newentries.append(entry)
2166 newentries.append(entry)
2160
2167
2161 return newentries
2168 return newentries
2162
2169
2163 class clonebundleentry(object):
2170 class clonebundleentry(object):
2164 """Represents an item in a clone bundles manifest.
2171 """Represents an item in a clone bundles manifest.
2165
2172
2166 This rich class is needed to support sorting since sorted() in Python 3
2173 This rich class is needed to support sorting since sorted() in Python 3
2167 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2174 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2168 won't work.
2175 won't work.
2169 """
2176 """
2170
2177
2171 def __init__(self, value, prefers):
2178 def __init__(self, value, prefers):
2172 self.value = value
2179 self.value = value
2173 self.prefers = prefers
2180 self.prefers = prefers
2174
2181
2175 def _cmp(self, other):
2182 def _cmp(self, other):
2176 for prefkey, prefvalue in self.prefers:
2183 for prefkey, prefvalue in self.prefers:
2177 avalue = self.value.get(prefkey)
2184 avalue = self.value.get(prefkey)
2178 bvalue = other.value.get(prefkey)
2185 bvalue = other.value.get(prefkey)
2179
2186
2180 # Special case for b missing attribute and a matches exactly.
2187 # Special case for b missing attribute and a matches exactly.
2181 if avalue is not None and bvalue is None and avalue == prefvalue:
2188 if avalue is not None and bvalue is None and avalue == prefvalue:
2182 return -1
2189 return -1
2183
2190
2184 # Special case for a missing attribute and b matches exactly.
2191 # Special case for a missing attribute and b matches exactly.
2185 if bvalue is not None and avalue is None and bvalue == prefvalue:
2192 if bvalue is not None and avalue is None and bvalue == prefvalue:
2186 return 1
2193 return 1
2187
2194
2188 # We can't compare unless attribute present on both.
2195 # We can't compare unless attribute present on both.
2189 if avalue is None or bvalue is None:
2196 if avalue is None or bvalue is None:
2190 continue
2197 continue
2191
2198
2192 # Same values should fall back to next attribute.
2199 # Same values should fall back to next attribute.
2193 if avalue == bvalue:
2200 if avalue == bvalue:
2194 continue
2201 continue
2195
2202
2196 # Exact matches come first.
2203 # Exact matches come first.
2197 if avalue == prefvalue:
2204 if avalue == prefvalue:
2198 return -1
2205 return -1
2199 if bvalue == prefvalue:
2206 if bvalue == prefvalue:
2200 return 1
2207 return 1
2201
2208
2202 # Fall back to next attribute.
2209 # Fall back to next attribute.
2203 continue
2210 continue
2204
2211
2205 # If we got here we couldn't sort by attributes and prefers. Fall
2212 # If we got here we couldn't sort by attributes and prefers. Fall
2206 # back to index order.
2213 # back to index order.
2207 return 0
2214 return 0
2208
2215
2209 def __lt__(self, other):
2216 def __lt__(self, other):
2210 return self._cmp(other) < 0
2217 return self._cmp(other) < 0
2211
2218
2212 def __gt__(self, other):
2219 def __gt__(self, other):
2213 return self._cmp(other) > 0
2220 return self._cmp(other) > 0
2214
2221
2215 def __eq__(self, other):
2222 def __eq__(self, other):
2216 return self._cmp(other) == 0
2223 return self._cmp(other) == 0
2217
2224
2218 def __le__(self, other):
2225 def __le__(self, other):
2219 return self._cmp(other) <= 0
2226 return self._cmp(other) <= 0
2220
2227
2221 def __ge__(self, other):
2228 def __ge__(self, other):
2222 return self._cmp(other) >= 0
2229 return self._cmp(other) >= 0
2223
2230
2224 def __ne__(self, other):
2231 def __ne__(self, other):
2225 return self._cmp(other) != 0
2232 return self._cmp(other) != 0
2226
2233
2227 def sortclonebundleentries(ui, entries):
2234 def sortclonebundleentries(ui, entries):
2228 prefers = ui.configlist('ui', 'clonebundleprefers')
2235 prefers = ui.configlist('ui', 'clonebundleprefers')
2229 if not prefers:
2236 if not prefers:
2230 return list(entries)
2237 return list(entries)
2231
2238
2232 prefers = [p.split('=', 1) for p in prefers]
2239 prefers = [p.split('=', 1) for p in prefers]
2233
2240
2234 items = sorted(clonebundleentry(v, prefers) for v in entries)
2241 items = sorted(clonebundleentry(v, prefers) for v in entries)
2235 return [i.value for i in items]
2242 return [i.value for i in items]
2236
2243
2237 def trypullbundlefromurl(ui, repo, url):
2244 def trypullbundlefromurl(ui, repo, url):
2238 """Attempt to apply a bundle from a URL."""
2245 """Attempt to apply a bundle from a URL."""
2239 with repo.lock(), repo.transaction('bundleurl') as tr:
2246 with repo.lock(), repo.transaction('bundleurl') as tr:
2240 try:
2247 try:
2241 fh = urlmod.open(ui, url)
2248 fh = urlmod.open(ui, url)
2242 cg = readbundle(ui, fh, 'stream')
2249 cg = readbundle(ui, fh, 'stream')
2243
2250
2244 if isinstance(cg, streamclone.streamcloneapplier):
2251 if isinstance(cg, streamclone.streamcloneapplier):
2245 cg.apply(repo)
2252 cg.apply(repo)
2246 else:
2253 else:
2247 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2254 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2248 return True
2255 return True
2249 except urlerr.httperror as e:
2256 except urlerr.httperror as e:
2250 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2257 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2251 except urlerr.urlerror as e:
2258 except urlerr.urlerror as e:
2252 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2259 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2253
2260
2254 return False
2261 return False
General Comments 0
You need to be logged in to leave comments. Login now