##// END OF EJS Templates
exchange: remove dead assignment or forcebundle1...
Martin von Zweigbergk -
r36593:df7b7d50 default
parent child Browse files
Show More
@@ -1,2264 +1,2263 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 bin,
16 bin,
17 hex,
17 hex,
18 nullid,
18 nullid,
19 )
19 )
20 from . import (
20 from . import (
21 bookmarks as bookmod,
21 bookmarks as bookmod,
22 bundle2,
22 bundle2,
23 changegroup,
23 changegroup,
24 discovery,
24 discovery,
25 error,
25 error,
26 lock as lockmod,
26 lock as lockmod,
27 logexchange,
27 logexchange,
28 obsolete,
28 obsolete,
29 phases,
29 phases,
30 pushkey,
30 pushkey,
31 pycompat,
31 pycompat,
32 scmutil,
32 scmutil,
33 sslutil,
33 sslutil,
34 streamclone,
34 streamclone,
35 url as urlmod,
35 url as urlmod,
36 util,
36 util,
37 )
37 )
38
38
39 urlerr = util.urlerr
39 urlerr = util.urlerr
40 urlreq = util.urlreq
40 urlreq = util.urlreq
41
41
42 # Maps bundle version human names to changegroup versions.
42 # Maps bundle version human names to changegroup versions.
43 _bundlespeccgversions = {'v1': '01',
43 _bundlespeccgversions = {'v1': '01',
44 'v2': '02',
44 'v2': '02',
45 'packed1': 's1',
45 'packed1': 's1',
46 'bundle2': '02', #legacy
46 'bundle2': '02', #legacy
47 }
47 }
48
48
49 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
49 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
50 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
50 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
51
51
52 def parsebundlespec(repo, spec, strict=True, externalnames=False):
52 def parsebundlespec(repo, spec, strict=True, externalnames=False):
53 """Parse a bundle string specification into parts.
53 """Parse a bundle string specification into parts.
54
54
55 Bundle specifications denote a well-defined bundle/exchange format.
55 Bundle specifications denote a well-defined bundle/exchange format.
56 The content of a given specification should not change over time in
56 The content of a given specification should not change over time in
57 order to ensure that bundles produced by a newer version of Mercurial are
57 order to ensure that bundles produced by a newer version of Mercurial are
58 readable from an older version.
58 readable from an older version.
59
59
60 The string currently has the form:
60 The string currently has the form:
61
61
62 <compression>-<type>[;<parameter0>[;<parameter1>]]
62 <compression>-<type>[;<parameter0>[;<parameter1>]]
63
63
64 Where <compression> is one of the supported compression formats
64 Where <compression> is one of the supported compression formats
65 and <type> is (currently) a version string. A ";" can follow the type and
65 and <type> is (currently) a version string. A ";" can follow the type and
66 all text afterwards is interpreted as URI encoded, ";" delimited key=value
66 all text afterwards is interpreted as URI encoded, ";" delimited key=value
67 pairs.
67 pairs.
68
68
69 If ``strict`` is True (the default) <compression> is required. Otherwise,
69 If ``strict`` is True (the default) <compression> is required. Otherwise,
70 it is optional.
70 it is optional.
71
71
72 If ``externalnames`` is False (the default), the human-centric names will
72 If ``externalnames`` is False (the default), the human-centric names will
73 be converted to their internal representation.
73 be converted to their internal representation.
74
74
75 Returns a 3-tuple of (compression, version, parameters). Compression will
75 Returns a 3-tuple of (compression, version, parameters). Compression will
76 be ``None`` if not in strict mode and a compression isn't defined.
76 be ``None`` if not in strict mode and a compression isn't defined.
77
77
78 An ``InvalidBundleSpecification`` is raised when the specification is
78 An ``InvalidBundleSpecification`` is raised when the specification is
79 not syntactically well formed.
79 not syntactically well formed.
80
80
81 An ``UnsupportedBundleSpecification`` is raised when the compression or
81 An ``UnsupportedBundleSpecification`` is raised when the compression or
82 bundle type/version is not recognized.
82 bundle type/version is not recognized.
83
83
84 Note: this function will likely eventually return a more complex data
84 Note: this function will likely eventually return a more complex data
85 structure, including bundle2 part information.
85 structure, including bundle2 part information.
86 """
86 """
87 def parseparams(s):
87 def parseparams(s):
88 if ';' not in s:
88 if ';' not in s:
89 return s, {}
89 return s, {}
90
90
91 params = {}
91 params = {}
92 version, paramstr = s.split(';', 1)
92 version, paramstr = s.split(';', 1)
93
93
94 for p in paramstr.split(';'):
94 for p in paramstr.split(';'):
95 if '=' not in p:
95 if '=' not in p:
96 raise error.InvalidBundleSpecification(
96 raise error.InvalidBundleSpecification(
97 _('invalid bundle specification: '
97 _('invalid bundle specification: '
98 'missing "=" in parameter: %s') % p)
98 'missing "=" in parameter: %s') % p)
99
99
100 key, value = p.split('=', 1)
100 key, value = p.split('=', 1)
101 key = urlreq.unquote(key)
101 key = urlreq.unquote(key)
102 value = urlreq.unquote(value)
102 value = urlreq.unquote(value)
103 params[key] = value
103 params[key] = value
104
104
105 return version, params
105 return version, params
106
106
107
107
108 if strict and '-' not in spec:
108 if strict and '-' not in spec:
109 raise error.InvalidBundleSpecification(
109 raise error.InvalidBundleSpecification(
110 _('invalid bundle specification; '
110 _('invalid bundle specification; '
111 'must be prefixed with compression: %s') % spec)
111 'must be prefixed with compression: %s') % spec)
112
112
113 if '-' in spec:
113 if '-' in spec:
114 compression, version = spec.split('-', 1)
114 compression, version = spec.split('-', 1)
115
115
116 if compression not in util.compengines.supportedbundlenames:
116 if compression not in util.compengines.supportedbundlenames:
117 raise error.UnsupportedBundleSpecification(
117 raise error.UnsupportedBundleSpecification(
118 _('%s compression is not supported') % compression)
118 _('%s compression is not supported') % compression)
119
119
120 version, params = parseparams(version)
120 version, params = parseparams(version)
121
121
122 if version not in _bundlespeccgversions:
122 if version not in _bundlespeccgversions:
123 raise error.UnsupportedBundleSpecification(
123 raise error.UnsupportedBundleSpecification(
124 _('%s is not a recognized bundle version') % version)
124 _('%s is not a recognized bundle version') % version)
125 else:
125 else:
126 # Value could be just the compression or just the version, in which
126 # Value could be just the compression or just the version, in which
127 # case some defaults are assumed (but only when not in strict mode).
127 # case some defaults are assumed (but only when not in strict mode).
128 assert not strict
128 assert not strict
129
129
130 spec, params = parseparams(spec)
130 spec, params = parseparams(spec)
131
131
132 if spec in util.compengines.supportedbundlenames:
132 if spec in util.compengines.supportedbundlenames:
133 compression = spec
133 compression = spec
134 version = 'v1'
134 version = 'v1'
135 # Generaldelta repos require v2.
135 # Generaldelta repos require v2.
136 if 'generaldelta' in repo.requirements:
136 if 'generaldelta' in repo.requirements:
137 version = 'v2'
137 version = 'v2'
138 # Modern compression engines require v2.
138 # Modern compression engines require v2.
139 if compression not in _bundlespecv1compengines:
139 if compression not in _bundlespecv1compengines:
140 version = 'v2'
140 version = 'v2'
141 elif spec in _bundlespeccgversions:
141 elif spec in _bundlespeccgversions:
142 if spec == 'packed1':
142 if spec == 'packed1':
143 compression = 'none'
143 compression = 'none'
144 else:
144 else:
145 compression = 'bzip2'
145 compression = 'bzip2'
146 version = spec
146 version = spec
147 else:
147 else:
148 raise error.UnsupportedBundleSpecification(
148 raise error.UnsupportedBundleSpecification(
149 _('%s is not a recognized bundle specification') % spec)
149 _('%s is not a recognized bundle specification') % spec)
150
150
151 # Bundle version 1 only supports a known set of compression engines.
151 # Bundle version 1 only supports a known set of compression engines.
152 if version == 'v1' and compression not in _bundlespecv1compengines:
152 if version == 'v1' and compression not in _bundlespecv1compengines:
153 raise error.UnsupportedBundleSpecification(
153 raise error.UnsupportedBundleSpecification(
154 _('compression engine %s is not supported on v1 bundles') %
154 _('compression engine %s is not supported on v1 bundles') %
155 compression)
155 compression)
156
156
157 # The specification for packed1 can optionally declare the data formats
157 # The specification for packed1 can optionally declare the data formats
158 # required to apply it. If we see this metadata, compare against what the
158 # required to apply it. If we see this metadata, compare against what the
159 # repo supports and error if the bundle isn't compatible.
159 # repo supports and error if the bundle isn't compatible.
160 if version == 'packed1' and 'requirements' in params:
160 if version == 'packed1' and 'requirements' in params:
161 requirements = set(params['requirements'].split(','))
161 requirements = set(params['requirements'].split(','))
162 missingreqs = requirements - repo.supportedformats
162 missingreqs = requirements - repo.supportedformats
163 if missingreqs:
163 if missingreqs:
164 raise error.UnsupportedBundleSpecification(
164 raise error.UnsupportedBundleSpecification(
165 _('missing support for repository features: %s') %
165 _('missing support for repository features: %s') %
166 ', '.join(sorted(missingreqs)))
166 ', '.join(sorted(missingreqs)))
167
167
168 if not externalnames:
168 if not externalnames:
169 engine = util.compengines.forbundlename(compression)
169 engine = util.compengines.forbundlename(compression)
170 compression = engine.bundletype()[1]
170 compression = engine.bundletype()[1]
171 version = _bundlespeccgversions[version]
171 version = _bundlespeccgversions[version]
172 return compression, version, params
172 return compression, version, params
173
173
174 def readbundle(ui, fh, fname, vfs=None):
174 def readbundle(ui, fh, fname, vfs=None):
175 header = changegroup.readexactly(fh, 4)
175 header = changegroup.readexactly(fh, 4)
176
176
177 alg = None
177 alg = None
178 if not fname:
178 if not fname:
179 fname = "stream"
179 fname = "stream"
180 if not header.startswith('HG') and header.startswith('\0'):
180 if not header.startswith('HG') and header.startswith('\0'):
181 fh = changegroup.headerlessfixup(fh, header)
181 fh = changegroup.headerlessfixup(fh, header)
182 header = "HG10"
182 header = "HG10"
183 alg = 'UN'
183 alg = 'UN'
184 elif vfs:
184 elif vfs:
185 fname = vfs.join(fname)
185 fname = vfs.join(fname)
186
186
187 magic, version = header[0:2], header[2:4]
187 magic, version = header[0:2], header[2:4]
188
188
189 if magic != 'HG':
189 if magic != 'HG':
190 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
190 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
191 if version == '10':
191 if version == '10':
192 if alg is None:
192 if alg is None:
193 alg = changegroup.readexactly(fh, 2)
193 alg = changegroup.readexactly(fh, 2)
194 return changegroup.cg1unpacker(fh, alg)
194 return changegroup.cg1unpacker(fh, alg)
195 elif version.startswith('2'):
195 elif version.startswith('2'):
196 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
196 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
197 elif version == 'S1':
197 elif version == 'S1':
198 return streamclone.streamcloneapplier(fh)
198 return streamclone.streamcloneapplier(fh)
199 else:
199 else:
200 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
200 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
201
201
202 def _formatrequirementsspec(requirements):
202 def _formatrequirementsspec(requirements):
203 return urlreq.quote(','.join(sorted(requirements)))
203 return urlreq.quote(','.join(sorted(requirements)))
204
204
205 def _formatrequirementsparams(requirements):
205 def _formatrequirementsparams(requirements):
206 requirements = _formatrequirementsspec(requirements)
206 requirements = _formatrequirementsspec(requirements)
207 params = "%s%s" % (urlreq.quote("requirements="), requirements)
207 params = "%s%s" % (urlreq.quote("requirements="), requirements)
208 return params
208 return params
209
209
210 def getbundlespec(ui, fh):
210 def getbundlespec(ui, fh):
211 """Infer the bundlespec from a bundle file handle.
211 """Infer the bundlespec from a bundle file handle.
212
212
213 The input file handle is seeked and the original seek position is not
213 The input file handle is seeked and the original seek position is not
214 restored.
214 restored.
215 """
215 """
216 def speccompression(alg):
216 def speccompression(alg):
217 try:
217 try:
218 return util.compengines.forbundletype(alg).bundletype()[0]
218 return util.compengines.forbundletype(alg).bundletype()[0]
219 except KeyError:
219 except KeyError:
220 return None
220 return None
221
221
222 b = readbundle(ui, fh, None)
222 b = readbundle(ui, fh, None)
223 if isinstance(b, changegroup.cg1unpacker):
223 if isinstance(b, changegroup.cg1unpacker):
224 alg = b._type
224 alg = b._type
225 if alg == '_truncatedBZ':
225 if alg == '_truncatedBZ':
226 alg = 'BZ'
226 alg = 'BZ'
227 comp = speccompression(alg)
227 comp = speccompression(alg)
228 if not comp:
228 if not comp:
229 raise error.Abort(_('unknown compression algorithm: %s') % alg)
229 raise error.Abort(_('unknown compression algorithm: %s') % alg)
230 return '%s-v1' % comp
230 return '%s-v1' % comp
231 elif isinstance(b, bundle2.unbundle20):
231 elif isinstance(b, bundle2.unbundle20):
232 if 'Compression' in b.params:
232 if 'Compression' in b.params:
233 comp = speccompression(b.params['Compression'])
233 comp = speccompression(b.params['Compression'])
234 if not comp:
234 if not comp:
235 raise error.Abort(_('unknown compression algorithm: %s') % comp)
235 raise error.Abort(_('unknown compression algorithm: %s') % comp)
236 else:
236 else:
237 comp = 'none'
237 comp = 'none'
238
238
239 version = None
239 version = None
240 for part in b.iterparts():
240 for part in b.iterparts():
241 if part.type == 'changegroup':
241 if part.type == 'changegroup':
242 version = part.params['version']
242 version = part.params['version']
243 if version in ('01', '02'):
243 if version in ('01', '02'):
244 version = 'v2'
244 version = 'v2'
245 else:
245 else:
246 raise error.Abort(_('changegroup version %s does not have '
246 raise error.Abort(_('changegroup version %s does not have '
247 'a known bundlespec') % version,
247 'a known bundlespec') % version,
248 hint=_('try upgrading your Mercurial '
248 hint=_('try upgrading your Mercurial '
249 'client'))
249 'client'))
250
250
251 if not version:
251 if not version:
252 raise error.Abort(_('could not identify changegroup version in '
252 raise error.Abort(_('could not identify changegroup version in '
253 'bundle'))
253 'bundle'))
254
254
255 return '%s-%s' % (comp, version)
255 return '%s-%s' % (comp, version)
256 elif isinstance(b, streamclone.streamcloneapplier):
256 elif isinstance(b, streamclone.streamcloneapplier):
257 requirements = streamclone.readbundle1header(fh)[2]
257 requirements = streamclone.readbundle1header(fh)[2]
258 return 'none-packed1;%s' % _formatrequirementsparams(requirements)
258 return 'none-packed1;%s' % _formatrequirementsparams(requirements)
259 else:
259 else:
260 raise error.Abort(_('unknown bundle type: %s') % b)
260 raise error.Abort(_('unknown bundle type: %s') % b)
261
261
262 def _computeoutgoing(repo, heads, common):
262 def _computeoutgoing(repo, heads, common):
263 """Computes which revs are outgoing given a set of common
263 """Computes which revs are outgoing given a set of common
264 and a set of heads.
264 and a set of heads.
265
265
266 This is a separate function so extensions can have access to
266 This is a separate function so extensions can have access to
267 the logic.
267 the logic.
268
268
269 Returns a discovery.outgoing object.
269 Returns a discovery.outgoing object.
270 """
270 """
271 cl = repo.changelog
271 cl = repo.changelog
272 if common:
272 if common:
273 hasnode = cl.hasnode
273 hasnode = cl.hasnode
274 common = [n for n in common if hasnode(n)]
274 common = [n for n in common if hasnode(n)]
275 else:
275 else:
276 common = [nullid]
276 common = [nullid]
277 if not heads:
277 if not heads:
278 heads = cl.heads()
278 heads = cl.heads()
279 return discovery.outgoing(repo, common, heads)
279 return discovery.outgoing(repo, common, heads)
280
280
281 def _forcebundle1(op):
281 def _forcebundle1(op):
282 """return true if a pull/push must use bundle1
282 """return true if a pull/push must use bundle1
283
283
284 This function is used to allow testing of the older bundle version"""
284 This function is used to allow testing of the older bundle version"""
285 ui = op.repo.ui
285 ui = op.repo.ui
286 forcebundle1 = False
287 # The goal is this config is to allow developer to choose the bundle
286 # The goal is this config is to allow developer to choose the bundle
288 # version used during exchanged. This is especially handy during test.
287 # version used during exchanged. This is especially handy during test.
289 # Value is a list of bundle version to be picked from, highest version
288 # Value is a list of bundle version to be picked from, highest version
290 # should be used.
289 # should be used.
291 #
290 #
292 # developer config: devel.legacy.exchange
291 # developer config: devel.legacy.exchange
293 exchange = ui.configlist('devel', 'legacy.exchange')
292 exchange = ui.configlist('devel', 'legacy.exchange')
294 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
293 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
295 return forcebundle1 or not op.remote.capable('bundle2')
294 return forcebundle1 or not op.remote.capable('bundle2')
296
295
297 class pushoperation(object):
296 class pushoperation(object):
298 """A object that represent a single push operation
297 """A object that represent a single push operation
299
298
300 Its purpose is to carry push related state and very common operations.
299 Its purpose is to carry push related state and very common operations.
301
300
302 A new pushoperation should be created at the beginning of each push and
301 A new pushoperation should be created at the beginning of each push and
303 discarded afterward.
302 discarded afterward.
304 """
303 """
305
304
306 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
305 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
307 bookmarks=(), pushvars=None):
306 bookmarks=(), pushvars=None):
308 # repo we push from
307 # repo we push from
309 self.repo = repo
308 self.repo = repo
310 self.ui = repo.ui
309 self.ui = repo.ui
311 # repo we push to
310 # repo we push to
312 self.remote = remote
311 self.remote = remote
313 # force option provided
312 # force option provided
314 self.force = force
313 self.force = force
315 # revs to be pushed (None is "all")
314 # revs to be pushed (None is "all")
316 self.revs = revs
315 self.revs = revs
317 # bookmark explicitly pushed
316 # bookmark explicitly pushed
318 self.bookmarks = bookmarks
317 self.bookmarks = bookmarks
319 # allow push of new branch
318 # allow push of new branch
320 self.newbranch = newbranch
319 self.newbranch = newbranch
321 # step already performed
320 # step already performed
322 # (used to check what steps have been already performed through bundle2)
321 # (used to check what steps have been already performed through bundle2)
323 self.stepsdone = set()
322 self.stepsdone = set()
324 # Integer version of the changegroup push result
323 # Integer version of the changegroup push result
325 # - None means nothing to push
324 # - None means nothing to push
326 # - 0 means HTTP error
325 # - 0 means HTTP error
327 # - 1 means we pushed and remote head count is unchanged *or*
326 # - 1 means we pushed and remote head count is unchanged *or*
328 # we have outgoing changesets but refused to push
327 # we have outgoing changesets but refused to push
329 # - other values as described by addchangegroup()
328 # - other values as described by addchangegroup()
330 self.cgresult = None
329 self.cgresult = None
331 # Boolean value for the bookmark push
330 # Boolean value for the bookmark push
332 self.bkresult = None
331 self.bkresult = None
333 # discover.outgoing object (contains common and outgoing data)
332 # discover.outgoing object (contains common and outgoing data)
334 self.outgoing = None
333 self.outgoing = None
335 # all remote topological heads before the push
334 # all remote topological heads before the push
336 self.remoteheads = None
335 self.remoteheads = None
337 # Details of the remote branch pre and post push
336 # Details of the remote branch pre and post push
338 #
337 #
339 # mapping: {'branch': ([remoteheads],
338 # mapping: {'branch': ([remoteheads],
340 # [newheads],
339 # [newheads],
341 # [unsyncedheads],
340 # [unsyncedheads],
342 # [discardedheads])}
341 # [discardedheads])}
343 # - branch: the branch name
342 # - branch: the branch name
344 # - remoteheads: the list of remote heads known locally
343 # - remoteheads: the list of remote heads known locally
345 # None if the branch is new
344 # None if the branch is new
346 # - newheads: the new remote heads (known locally) with outgoing pushed
345 # - newheads: the new remote heads (known locally) with outgoing pushed
347 # - unsyncedheads: the list of remote heads unknown locally.
346 # - unsyncedheads: the list of remote heads unknown locally.
348 # - discardedheads: the list of remote heads made obsolete by the push
347 # - discardedheads: the list of remote heads made obsolete by the push
349 self.pushbranchmap = None
348 self.pushbranchmap = None
350 # testable as a boolean indicating if any nodes are missing locally.
349 # testable as a boolean indicating if any nodes are missing locally.
351 self.incoming = None
350 self.incoming = None
352 # summary of the remote phase situation
351 # summary of the remote phase situation
353 self.remotephases = None
352 self.remotephases = None
354 # phases changes that must be pushed along side the changesets
353 # phases changes that must be pushed along side the changesets
355 self.outdatedphases = None
354 self.outdatedphases = None
356 # phases changes that must be pushed if changeset push fails
355 # phases changes that must be pushed if changeset push fails
357 self.fallbackoutdatedphases = None
356 self.fallbackoutdatedphases = None
358 # outgoing obsmarkers
357 # outgoing obsmarkers
359 self.outobsmarkers = set()
358 self.outobsmarkers = set()
360 # outgoing bookmarks
359 # outgoing bookmarks
361 self.outbookmarks = []
360 self.outbookmarks = []
362 # transaction manager
361 # transaction manager
363 self.trmanager = None
362 self.trmanager = None
364 # map { pushkey partid -> callback handling failure}
363 # map { pushkey partid -> callback handling failure}
365 # used to handle exception from mandatory pushkey part failure
364 # used to handle exception from mandatory pushkey part failure
366 self.pkfailcb = {}
365 self.pkfailcb = {}
367 # an iterable of pushvars or None
366 # an iterable of pushvars or None
368 self.pushvars = pushvars
367 self.pushvars = pushvars
369
368
370 @util.propertycache
369 @util.propertycache
371 def futureheads(self):
370 def futureheads(self):
372 """future remote heads if the changeset push succeeds"""
371 """future remote heads if the changeset push succeeds"""
373 return self.outgoing.missingheads
372 return self.outgoing.missingheads
374
373
375 @util.propertycache
374 @util.propertycache
376 def fallbackheads(self):
375 def fallbackheads(self):
377 """future remote heads if the changeset push fails"""
376 """future remote heads if the changeset push fails"""
378 if self.revs is None:
377 if self.revs is None:
379 # not target to push, all common are relevant
378 # not target to push, all common are relevant
380 return self.outgoing.commonheads
379 return self.outgoing.commonheads
381 unfi = self.repo.unfiltered()
380 unfi = self.repo.unfiltered()
382 # I want cheads = heads(::missingheads and ::commonheads)
381 # I want cheads = heads(::missingheads and ::commonheads)
383 # (missingheads is revs with secret changeset filtered out)
382 # (missingheads is revs with secret changeset filtered out)
384 #
383 #
385 # This can be expressed as:
384 # This can be expressed as:
386 # cheads = ( (missingheads and ::commonheads)
385 # cheads = ( (missingheads and ::commonheads)
387 # + (commonheads and ::missingheads))"
386 # + (commonheads and ::missingheads))"
388 # )
387 # )
389 #
388 #
390 # while trying to push we already computed the following:
389 # while trying to push we already computed the following:
391 # common = (::commonheads)
390 # common = (::commonheads)
392 # missing = ((commonheads::missingheads) - commonheads)
391 # missing = ((commonheads::missingheads) - commonheads)
393 #
392 #
394 # We can pick:
393 # We can pick:
395 # * missingheads part of common (::commonheads)
394 # * missingheads part of common (::commonheads)
396 common = self.outgoing.common
395 common = self.outgoing.common
397 nm = self.repo.changelog.nodemap
396 nm = self.repo.changelog.nodemap
398 cheads = [node for node in self.revs if nm[node] in common]
397 cheads = [node for node in self.revs if nm[node] in common]
399 # and
398 # and
400 # * commonheads parents on missing
399 # * commonheads parents on missing
401 revset = unfi.set('%ln and parents(roots(%ln))',
400 revset = unfi.set('%ln and parents(roots(%ln))',
402 self.outgoing.commonheads,
401 self.outgoing.commonheads,
403 self.outgoing.missing)
402 self.outgoing.missing)
404 cheads.extend(c.node() for c in revset)
403 cheads.extend(c.node() for c in revset)
405 return cheads
404 return cheads
406
405
407 @property
406 @property
408 def commonheads(self):
407 def commonheads(self):
409 """set of all common heads after changeset bundle push"""
408 """set of all common heads after changeset bundle push"""
410 if self.cgresult:
409 if self.cgresult:
411 return self.futureheads
410 return self.futureheads
412 else:
411 else:
413 return self.fallbackheads
412 return self.fallbackheads
414
413
415 # mapping of message used when pushing bookmark
414 # mapping of message used when pushing bookmark
416 bookmsgmap = {'update': (_("updating bookmark %s\n"),
415 bookmsgmap = {'update': (_("updating bookmark %s\n"),
417 _('updating bookmark %s failed!\n')),
416 _('updating bookmark %s failed!\n')),
418 'export': (_("exporting bookmark %s\n"),
417 'export': (_("exporting bookmark %s\n"),
419 _('exporting bookmark %s failed!\n')),
418 _('exporting bookmark %s failed!\n')),
420 'delete': (_("deleting remote bookmark %s\n"),
419 'delete': (_("deleting remote bookmark %s\n"),
421 _('deleting remote bookmark %s failed!\n')),
420 _('deleting remote bookmark %s failed!\n')),
422 }
421 }
423
422
424
423
425 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
424 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
426 opargs=None):
425 opargs=None):
427 '''Push outgoing changesets (limited by revs) from a local
426 '''Push outgoing changesets (limited by revs) from a local
428 repository to remote. Return an integer:
427 repository to remote. Return an integer:
429 - None means nothing to push
428 - None means nothing to push
430 - 0 means HTTP error
429 - 0 means HTTP error
431 - 1 means we pushed and remote head count is unchanged *or*
430 - 1 means we pushed and remote head count is unchanged *or*
432 we have outgoing changesets but refused to push
431 we have outgoing changesets but refused to push
433 - other values as described by addchangegroup()
432 - other values as described by addchangegroup()
434 '''
433 '''
435 if opargs is None:
434 if opargs is None:
436 opargs = {}
435 opargs = {}
437 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
436 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
438 **pycompat.strkwargs(opargs))
437 **pycompat.strkwargs(opargs))
439 if pushop.remote.local():
438 if pushop.remote.local():
440 missing = (set(pushop.repo.requirements)
439 missing = (set(pushop.repo.requirements)
441 - pushop.remote.local().supported)
440 - pushop.remote.local().supported)
442 if missing:
441 if missing:
443 msg = _("required features are not"
442 msg = _("required features are not"
444 " supported in the destination:"
443 " supported in the destination:"
445 " %s") % (', '.join(sorted(missing)))
444 " %s") % (', '.join(sorted(missing)))
446 raise error.Abort(msg)
445 raise error.Abort(msg)
447
446
448 if not pushop.remote.canpush():
447 if not pushop.remote.canpush():
449 raise error.Abort(_("destination does not support push"))
448 raise error.Abort(_("destination does not support push"))
450
449
451 if not pushop.remote.capable('unbundle'):
450 if not pushop.remote.capable('unbundle'):
452 raise error.Abort(_('cannot push: destination does not support the '
451 raise error.Abort(_('cannot push: destination does not support the '
453 'unbundle wire protocol command'))
452 'unbundle wire protocol command'))
454
453
455 # get lock as we might write phase data
454 # get lock as we might write phase data
456 wlock = lock = None
455 wlock = lock = None
457 try:
456 try:
458 # bundle2 push may receive a reply bundle touching bookmarks or other
457 # bundle2 push may receive a reply bundle touching bookmarks or other
459 # things requiring the wlock. Take it now to ensure proper ordering.
458 # things requiring the wlock. Take it now to ensure proper ordering.
460 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
459 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
461 if (not _forcebundle1(pushop)) and maypushback:
460 if (not _forcebundle1(pushop)) and maypushback:
462 wlock = pushop.repo.wlock()
461 wlock = pushop.repo.wlock()
463 lock = pushop.repo.lock()
462 lock = pushop.repo.lock()
464 pushop.trmanager = transactionmanager(pushop.repo,
463 pushop.trmanager = transactionmanager(pushop.repo,
465 'push-response',
464 'push-response',
466 pushop.remote.url())
465 pushop.remote.url())
467 except IOError as err:
466 except IOError as err:
468 if err.errno != errno.EACCES:
467 if err.errno != errno.EACCES:
469 raise
468 raise
470 # source repo cannot be locked.
469 # source repo cannot be locked.
471 # We do not abort the push, but just disable the local phase
470 # We do not abort the push, but just disable the local phase
472 # synchronisation.
471 # synchronisation.
473 msg = 'cannot lock source repository: %s\n' % err
472 msg = 'cannot lock source repository: %s\n' % err
474 pushop.ui.debug(msg)
473 pushop.ui.debug(msg)
475
474
476 with wlock or util.nullcontextmanager(), \
475 with wlock or util.nullcontextmanager(), \
477 lock or util.nullcontextmanager(), \
476 lock or util.nullcontextmanager(), \
478 pushop.trmanager or util.nullcontextmanager():
477 pushop.trmanager or util.nullcontextmanager():
479 pushop.repo.checkpush(pushop)
478 pushop.repo.checkpush(pushop)
480 _pushdiscovery(pushop)
479 _pushdiscovery(pushop)
481 if not _forcebundle1(pushop):
480 if not _forcebundle1(pushop):
482 _pushbundle2(pushop)
481 _pushbundle2(pushop)
483 _pushchangeset(pushop)
482 _pushchangeset(pushop)
484 _pushsyncphase(pushop)
483 _pushsyncphase(pushop)
485 _pushobsolete(pushop)
484 _pushobsolete(pushop)
486 _pushbookmark(pushop)
485 _pushbookmark(pushop)
487
486
488 return pushop
487 return pushop
489
488
490 # list of steps to perform discovery before push
489 # list of steps to perform discovery before push
491 pushdiscoveryorder = []
490 pushdiscoveryorder = []
492
491
493 # Mapping between step name and function
492 # Mapping between step name and function
494 #
493 #
495 # This exists to help extensions wrap steps if necessary
494 # This exists to help extensions wrap steps if necessary
496 pushdiscoverymapping = {}
495 pushdiscoverymapping = {}
497
496
498 def pushdiscovery(stepname):
497 def pushdiscovery(stepname):
499 """decorator for function performing discovery before push
498 """decorator for function performing discovery before push
500
499
501 The function is added to the step -> function mapping and appended to the
500 The function is added to the step -> function mapping and appended to the
502 list of steps. Beware that decorated function will be added in order (this
501 list of steps. Beware that decorated function will be added in order (this
503 may matter).
502 may matter).
504
503
505 You can only use this decorator for a new step, if you want to wrap a step
504 You can only use this decorator for a new step, if you want to wrap a step
506 from an extension, change the pushdiscovery dictionary directly."""
505 from an extension, change the pushdiscovery dictionary directly."""
507 def dec(func):
506 def dec(func):
508 assert stepname not in pushdiscoverymapping
507 assert stepname not in pushdiscoverymapping
509 pushdiscoverymapping[stepname] = func
508 pushdiscoverymapping[stepname] = func
510 pushdiscoveryorder.append(stepname)
509 pushdiscoveryorder.append(stepname)
511 return func
510 return func
512 return dec
511 return dec
513
512
514 def _pushdiscovery(pushop):
513 def _pushdiscovery(pushop):
515 """Run all discovery steps"""
514 """Run all discovery steps"""
516 for stepname in pushdiscoveryorder:
515 for stepname in pushdiscoveryorder:
517 step = pushdiscoverymapping[stepname]
516 step = pushdiscoverymapping[stepname]
518 step(pushop)
517 step(pushop)
519
518
520 @pushdiscovery('changeset')
519 @pushdiscovery('changeset')
521 def _pushdiscoverychangeset(pushop):
520 def _pushdiscoverychangeset(pushop):
522 """discover the changeset that need to be pushed"""
521 """discover the changeset that need to be pushed"""
523 fci = discovery.findcommonincoming
522 fci = discovery.findcommonincoming
524 if pushop.revs:
523 if pushop.revs:
525 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
524 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
526 ancestorsof=pushop.revs)
525 ancestorsof=pushop.revs)
527 else:
526 else:
528 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
527 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
529 common, inc, remoteheads = commoninc
528 common, inc, remoteheads = commoninc
530 fco = discovery.findcommonoutgoing
529 fco = discovery.findcommonoutgoing
531 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
530 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
532 commoninc=commoninc, force=pushop.force)
531 commoninc=commoninc, force=pushop.force)
533 pushop.outgoing = outgoing
532 pushop.outgoing = outgoing
534 pushop.remoteheads = remoteheads
533 pushop.remoteheads = remoteheads
535 pushop.incoming = inc
534 pushop.incoming = inc
536
535
537 @pushdiscovery('phase')
536 @pushdiscovery('phase')
538 def _pushdiscoveryphase(pushop):
537 def _pushdiscoveryphase(pushop):
539 """discover the phase that needs to be pushed
538 """discover the phase that needs to be pushed
540
539
541 (computed for both success and failure case for changesets push)"""
540 (computed for both success and failure case for changesets push)"""
542 outgoing = pushop.outgoing
541 outgoing = pushop.outgoing
543 unfi = pushop.repo.unfiltered()
542 unfi = pushop.repo.unfiltered()
544 remotephases = pushop.remote.listkeys('phases')
543 remotephases = pushop.remote.listkeys('phases')
545 if (pushop.ui.configbool('ui', '_usedassubrepo')
544 if (pushop.ui.configbool('ui', '_usedassubrepo')
546 and remotephases # server supports phases
545 and remotephases # server supports phases
547 and not pushop.outgoing.missing # no changesets to be pushed
546 and not pushop.outgoing.missing # no changesets to be pushed
548 and remotephases.get('publishing', False)):
547 and remotephases.get('publishing', False)):
549 # When:
548 # When:
550 # - this is a subrepo push
549 # - this is a subrepo push
551 # - and remote support phase
550 # - and remote support phase
552 # - and no changeset are to be pushed
551 # - and no changeset are to be pushed
553 # - and remote is publishing
552 # - and remote is publishing
554 # We may be in issue 3781 case!
553 # We may be in issue 3781 case!
555 # We drop the possible phase synchronisation done by
554 # We drop the possible phase synchronisation done by
556 # courtesy to publish changesets possibly locally draft
555 # courtesy to publish changesets possibly locally draft
557 # on the remote.
556 # on the remote.
558 pushop.outdatedphases = []
557 pushop.outdatedphases = []
559 pushop.fallbackoutdatedphases = []
558 pushop.fallbackoutdatedphases = []
560 return
559 return
561
560
562 pushop.remotephases = phases.remotephasessummary(pushop.repo,
561 pushop.remotephases = phases.remotephasessummary(pushop.repo,
563 pushop.fallbackheads,
562 pushop.fallbackheads,
564 remotephases)
563 remotephases)
565 droots = pushop.remotephases.draftroots
564 droots = pushop.remotephases.draftroots
566
565
567 extracond = ''
566 extracond = ''
568 if not pushop.remotephases.publishing:
567 if not pushop.remotephases.publishing:
569 extracond = ' and public()'
568 extracond = ' and public()'
570 revset = 'heads((%%ln::%%ln) %s)' % extracond
569 revset = 'heads((%%ln::%%ln) %s)' % extracond
571 # Get the list of all revs draft on remote by public here.
570 # Get the list of all revs draft on remote by public here.
572 # XXX Beware that revset break if droots is not strictly
571 # XXX Beware that revset break if droots is not strictly
573 # XXX root we may want to ensure it is but it is costly
572 # XXX root we may want to ensure it is but it is costly
574 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
573 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
575 if not outgoing.missing:
574 if not outgoing.missing:
576 future = fallback
575 future = fallback
577 else:
576 else:
578 # adds changeset we are going to push as draft
577 # adds changeset we are going to push as draft
579 #
578 #
580 # should not be necessary for publishing server, but because of an
579 # should not be necessary for publishing server, but because of an
581 # issue fixed in xxxxx we have to do it anyway.
580 # issue fixed in xxxxx we have to do it anyway.
582 fdroots = list(unfi.set('roots(%ln + %ln::)',
581 fdroots = list(unfi.set('roots(%ln + %ln::)',
583 outgoing.missing, droots))
582 outgoing.missing, droots))
584 fdroots = [f.node() for f in fdroots]
583 fdroots = [f.node() for f in fdroots]
585 future = list(unfi.set(revset, fdroots, pushop.futureheads))
584 future = list(unfi.set(revset, fdroots, pushop.futureheads))
586 pushop.outdatedphases = future
585 pushop.outdatedphases = future
587 pushop.fallbackoutdatedphases = fallback
586 pushop.fallbackoutdatedphases = fallback
588
587
589 @pushdiscovery('obsmarker')
588 @pushdiscovery('obsmarker')
590 def _pushdiscoveryobsmarkers(pushop):
589 def _pushdiscoveryobsmarkers(pushop):
591 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
590 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
592 and pushop.repo.obsstore
591 and pushop.repo.obsstore
593 and 'obsolete' in pushop.remote.listkeys('namespaces')):
592 and 'obsolete' in pushop.remote.listkeys('namespaces')):
594 repo = pushop.repo
593 repo = pushop.repo
595 # very naive computation, that can be quite expensive on big repo.
594 # very naive computation, that can be quite expensive on big repo.
596 # However: evolution is currently slow on them anyway.
595 # However: evolution is currently slow on them anyway.
597 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
596 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
598 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
597 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
599
598
600 @pushdiscovery('bookmarks')
599 @pushdiscovery('bookmarks')
601 def _pushdiscoverybookmarks(pushop):
600 def _pushdiscoverybookmarks(pushop):
602 ui = pushop.ui
601 ui = pushop.ui
603 repo = pushop.repo.unfiltered()
602 repo = pushop.repo.unfiltered()
604 remote = pushop.remote
603 remote = pushop.remote
605 ui.debug("checking for updated bookmarks\n")
604 ui.debug("checking for updated bookmarks\n")
606 ancestors = ()
605 ancestors = ()
607 if pushop.revs:
606 if pushop.revs:
608 revnums = map(repo.changelog.rev, pushop.revs)
607 revnums = map(repo.changelog.rev, pushop.revs)
609 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
608 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
610 remotebookmark = remote.listkeys('bookmarks')
609 remotebookmark = remote.listkeys('bookmarks')
611
610
612 explicit = set([repo._bookmarks.expandname(bookmark)
611 explicit = set([repo._bookmarks.expandname(bookmark)
613 for bookmark in pushop.bookmarks])
612 for bookmark in pushop.bookmarks])
614
613
615 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
614 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
616 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
615 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
617
616
618 def safehex(x):
617 def safehex(x):
619 if x is None:
618 if x is None:
620 return x
619 return x
621 return hex(x)
620 return hex(x)
622
621
623 def hexifycompbookmarks(bookmarks):
622 def hexifycompbookmarks(bookmarks):
624 for b, scid, dcid in bookmarks:
623 for b, scid, dcid in bookmarks:
625 yield b, safehex(scid), safehex(dcid)
624 yield b, safehex(scid), safehex(dcid)
626
625
627 comp = [hexifycompbookmarks(marks) for marks in comp]
626 comp = [hexifycompbookmarks(marks) for marks in comp]
628 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
627 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
629
628
630 for b, scid, dcid in advsrc:
629 for b, scid, dcid in advsrc:
631 if b in explicit:
630 if b in explicit:
632 explicit.remove(b)
631 explicit.remove(b)
633 if not ancestors or repo[scid].rev() in ancestors:
632 if not ancestors or repo[scid].rev() in ancestors:
634 pushop.outbookmarks.append((b, dcid, scid))
633 pushop.outbookmarks.append((b, dcid, scid))
635 # search added bookmark
634 # search added bookmark
636 for b, scid, dcid in addsrc:
635 for b, scid, dcid in addsrc:
637 if b in explicit:
636 if b in explicit:
638 explicit.remove(b)
637 explicit.remove(b)
639 pushop.outbookmarks.append((b, '', scid))
638 pushop.outbookmarks.append((b, '', scid))
640 # search for overwritten bookmark
639 # search for overwritten bookmark
641 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
640 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
642 if b in explicit:
641 if b in explicit:
643 explicit.remove(b)
642 explicit.remove(b)
644 pushop.outbookmarks.append((b, dcid, scid))
643 pushop.outbookmarks.append((b, dcid, scid))
645 # search for bookmark to delete
644 # search for bookmark to delete
646 for b, scid, dcid in adddst:
645 for b, scid, dcid in adddst:
647 if b in explicit:
646 if b in explicit:
648 explicit.remove(b)
647 explicit.remove(b)
649 # treat as "deleted locally"
648 # treat as "deleted locally"
650 pushop.outbookmarks.append((b, dcid, ''))
649 pushop.outbookmarks.append((b, dcid, ''))
651 # identical bookmarks shouldn't get reported
650 # identical bookmarks shouldn't get reported
652 for b, scid, dcid in same:
651 for b, scid, dcid in same:
653 if b in explicit:
652 if b in explicit:
654 explicit.remove(b)
653 explicit.remove(b)
655
654
656 if explicit:
655 if explicit:
657 explicit = sorted(explicit)
656 explicit = sorted(explicit)
658 # we should probably list all of them
657 # we should probably list all of them
659 ui.warn(_('bookmark %s does not exist on the local '
658 ui.warn(_('bookmark %s does not exist on the local '
660 'or remote repository!\n') % explicit[0])
659 'or remote repository!\n') % explicit[0])
661 pushop.bkresult = 2
660 pushop.bkresult = 2
662
661
663 pushop.outbookmarks.sort()
662 pushop.outbookmarks.sort()
664
663
665 def _pushcheckoutgoing(pushop):
664 def _pushcheckoutgoing(pushop):
666 outgoing = pushop.outgoing
665 outgoing = pushop.outgoing
667 unfi = pushop.repo.unfiltered()
666 unfi = pushop.repo.unfiltered()
668 if not outgoing.missing:
667 if not outgoing.missing:
669 # nothing to push
668 # nothing to push
670 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
669 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
671 return False
670 return False
672 # something to push
671 # something to push
673 if not pushop.force:
672 if not pushop.force:
674 # if repo.obsstore == False --> no obsolete
673 # if repo.obsstore == False --> no obsolete
675 # then, save the iteration
674 # then, save the iteration
676 if unfi.obsstore:
675 if unfi.obsstore:
677 # this message are here for 80 char limit reason
676 # this message are here for 80 char limit reason
678 mso = _("push includes obsolete changeset: %s!")
677 mso = _("push includes obsolete changeset: %s!")
679 mspd = _("push includes phase-divergent changeset: %s!")
678 mspd = _("push includes phase-divergent changeset: %s!")
680 mscd = _("push includes content-divergent changeset: %s!")
679 mscd = _("push includes content-divergent changeset: %s!")
681 mst = {"orphan": _("push includes orphan changeset: %s!"),
680 mst = {"orphan": _("push includes orphan changeset: %s!"),
682 "phase-divergent": mspd,
681 "phase-divergent": mspd,
683 "content-divergent": mscd}
682 "content-divergent": mscd}
684 # If we are to push if there is at least one
683 # If we are to push if there is at least one
685 # obsolete or unstable changeset in missing, at
684 # obsolete or unstable changeset in missing, at
686 # least one of the missinghead will be obsolete or
685 # least one of the missinghead will be obsolete or
687 # unstable. So checking heads only is ok
686 # unstable. So checking heads only is ok
688 for node in outgoing.missingheads:
687 for node in outgoing.missingheads:
689 ctx = unfi[node]
688 ctx = unfi[node]
690 if ctx.obsolete():
689 if ctx.obsolete():
691 raise error.Abort(mso % ctx)
690 raise error.Abort(mso % ctx)
692 elif ctx.isunstable():
691 elif ctx.isunstable():
693 # TODO print more than one instability in the abort
692 # TODO print more than one instability in the abort
694 # message
693 # message
695 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
694 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
696
695
697 discovery.checkheads(pushop)
696 discovery.checkheads(pushop)
698 return True
697 return True
699
698
700 # List of names of steps to perform for an outgoing bundle2, order matters.
699 # List of names of steps to perform for an outgoing bundle2, order matters.
701 b2partsgenorder = []
700 b2partsgenorder = []
702
701
703 # Mapping between step name and function
702 # Mapping between step name and function
704 #
703 #
705 # This exists to help extensions wrap steps if necessary
704 # This exists to help extensions wrap steps if necessary
706 b2partsgenmapping = {}
705 b2partsgenmapping = {}
707
706
708 def b2partsgenerator(stepname, idx=None):
707 def b2partsgenerator(stepname, idx=None):
709 """decorator for function generating bundle2 part
708 """decorator for function generating bundle2 part
710
709
711 The function is added to the step -> function mapping and appended to the
710 The function is added to the step -> function mapping and appended to the
712 list of steps. Beware that decorated functions will be added in order
711 list of steps. Beware that decorated functions will be added in order
713 (this may matter).
712 (this may matter).
714
713
715 You can only use this decorator for new steps, if you want to wrap a step
714 You can only use this decorator for new steps, if you want to wrap a step
716 from an extension, attack the b2partsgenmapping dictionary directly."""
715 from an extension, attack the b2partsgenmapping dictionary directly."""
717 def dec(func):
716 def dec(func):
718 assert stepname not in b2partsgenmapping
717 assert stepname not in b2partsgenmapping
719 b2partsgenmapping[stepname] = func
718 b2partsgenmapping[stepname] = func
720 if idx is None:
719 if idx is None:
721 b2partsgenorder.append(stepname)
720 b2partsgenorder.append(stepname)
722 else:
721 else:
723 b2partsgenorder.insert(idx, stepname)
722 b2partsgenorder.insert(idx, stepname)
724 return func
723 return func
725 return dec
724 return dec
726
725
727 def _pushb2ctxcheckheads(pushop, bundler):
726 def _pushb2ctxcheckheads(pushop, bundler):
728 """Generate race condition checking parts
727 """Generate race condition checking parts
729
728
730 Exists as an independent function to aid extensions
729 Exists as an independent function to aid extensions
731 """
730 """
732 # * 'force' do not check for push race,
731 # * 'force' do not check for push race,
733 # * if we don't push anything, there are nothing to check.
732 # * if we don't push anything, there are nothing to check.
734 if not pushop.force and pushop.outgoing.missingheads:
733 if not pushop.force and pushop.outgoing.missingheads:
735 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
734 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
736 emptyremote = pushop.pushbranchmap is None
735 emptyremote = pushop.pushbranchmap is None
737 if not allowunrelated or emptyremote:
736 if not allowunrelated or emptyremote:
738 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
737 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
739 else:
738 else:
740 affected = set()
739 affected = set()
741 for branch, heads in pushop.pushbranchmap.iteritems():
740 for branch, heads in pushop.pushbranchmap.iteritems():
742 remoteheads, newheads, unsyncedheads, discardedheads = heads
741 remoteheads, newheads, unsyncedheads, discardedheads = heads
743 if remoteheads is not None:
742 if remoteheads is not None:
744 remote = set(remoteheads)
743 remote = set(remoteheads)
745 affected |= set(discardedheads) & remote
744 affected |= set(discardedheads) & remote
746 affected |= remote - set(newheads)
745 affected |= remote - set(newheads)
747 if affected:
746 if affected:
748 data = iter(sorted(affected))
747 data = iter(sorted(affected))
749 bundler.newpart('check:updated-heads', data=data)
748 bundler.newpart('check:updated-heads', data=data)
750
749
751 def _pushing(pushop):
750 def _pushing(pushop):
752 """return True if we are pushing anything"""
751 """return True if we are pushing anything"""
753 return bool(pushop.outgoing.missing
752 return bool(pushop.outgoing.missing
754 or pushop.outdatedphases
753 or pushop.outdatedphases
755 or pushop.outobsmarkers
754 or pushop.outobsmarkers
756 or pushop.outbookmarks)
755 or pushop.outbookmarks)
757
756
758 @b2partsgenerator('check-bookmarks')
757 @b2partsgenerator('check-bookmarks')
759 def _pushb2checkbookmarks(pushop, bundler):
758 def _pushb2checkbookmarks(pushop, bundler):
760 """insert bookmark move checking"""
759 """insert bookmark move checking"""
761 if not _pushing(pushop) or pushop.force:
760 if not _pushing(pushop) or pushop.force:
762 return
761 return
763 b2caps = bundle2.bundle2caps(pushop.remote)
762 b2caps = bundle2.bundle2caps(pushop.remote)
764 hasbookmarkcheck = 'bookmarks' in b2caps
763 hasbookmarkcheck = 'bookmarks' in b2caps
765 if not (pushop.outbookmarks and hasbookmarkcheck):
764 if not (pushop.outbookmarks and hasbookmarkcheck):
766 return
765 return
767 data = []
766 data = []
768 for book, old, new in pushop.outbookmarks:
767 for book, old, new in pushop.outbookmarks:
769 old = bin(old)
768 old = bin(old)
770 data.append((book, old))
769 data.append((book, old))
771 checkdata = bookmod.binaryencode(data)
770 checkdata = bookmod.binaryencode(data)
772 bundler.newpart('check:bookmarks', data=checkdata)
771 bundler.newpart('check:bookmarks', data=checkdata)
773
772
774 @b2partsgenerator('check-phases')
773 @b2partsgenerator('check-phases')
775 def _pushb2checkphases(pushop, bundler):
774 def _pushb2checkphases(pushop, bundler):
776 """insert phase move checking"""
775 """insert phase move checking"""
777 if not _pushing(pushop) or pushop.force:
776 if not _pushing(pushop) or pushop.force:
778 return
777 return
779 b2caps = bundle2.bundle2caps(pushop.remote)
778 b2caps = bundle2.bundle2caps(pushop.remote)
780 hasphaseheads = 'heads' in b2caps.get('phases', ())
779 hasphaseheads = 'heads' in b2caps.get('phases', ())
781 if pushop.remotephases is not None and hasphaseheads:
780 if pushop.remotephases is not None and hasphaseheads:
782 # check that the remote phase has not changed
781 # check that the remote phase has not changed
783 checks = [[] for p in phases.allphases]
782 checks = [[] for p in phases.allphases]
784 checks[phases.public].extend(pushop.remotephases.publicheads)
783 checks[phases.public].extend(pushop.remotephases.publicheads)
785 checks[phases.draft].extend(pushop.remotephases.draftroots)
784 checks[phases.draft].extend(pushop.remotephases.draftroots)
786 if any(checks):
785 if any(checks):
787 for nodes in checks:
786 for nodes in checks:
788 nodes.sort()
787 nodes.sort()
789 checkdata = phases.binaryencode(checks)
788 checkdata = phases.binaryencode(checks)
790 bundler.newpart('check:phases', data=checkdata)
789 bundler.newpart('check:phases', data=checkdata)
791
790
792 @b2partsgenerator('changeset')
791 @b2partsgenerator('changeset')
793 def _pushb2ctx(pushop, bundler):
792 def _pushb2ctx(pushop, bundler):
794 """handle changegroup push through bundle2
793 """handle changegroup push through bundle2
795
794
796 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
795 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
797 """
796 """
798 if 'changesets' in pushop.stepsdone:
797 if 'changesets' in pushop.stepsdone:
799 return
798 return
800 pushop.stepsdone.add('changesets')
799 pushop.stepsdone.add('changesets')
801 # Send known heads to the server for race detection.
800 # Send known heads to the server for race detection.
802 if not _pushcheckoutgoing(pushop):
801 if not _pushcheckoutgoing(pushop):
803 return
802 return
804 pushop.repo.prepushoutgoinghooks(pushop)
803 pushop.repo.prepushoutgoinghooks(pushop)
805
804
806 _pushb2ctxcheckheads(pushop, bundler)
805 _pushb2ctxcheckheads(pushop, bundler)
807
806
808 b2caps = bundle2.bundle2caps(pushop.remote)
807 b2caps = bundle2.bundle2caps(pushop.remote)
809 version = '01'
808 version = '01'
810 cgversions = b2caps.get('changegroup')
809 cgversions = b2caps.get('changegroup')
811 if cgversions: # 3.1 and 3.2 ship with an empty value
810 if cgversions: # 3.1 and 3.2 ship with an empty value
812 cgversions = [v for v in cgversions
811 cgversions = [v for v in cgversions
813 if v in changegroup.supportedoutgoingversions(
812 if v in changegroup.supportedoutgoingversions(
814 pushop.repo)]
813 pushop.repo)]
815 if not cgversions:
814 if not cgversions:
816 raise ValueError(_('no common changegroup version'))
815 raise ValueError(_('no common changegroup version'))
817 version = max(cgversions)
816 version = max(cgversions)
818 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
817 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
819 'push')
818 'push')
820 cgpart = bundler.newpart('changegroup', data=cgstream)
819 cgpart = bundler.newpart('changegroup', data=cgstream)
821 if cgversions:
820 if cgversions:
822 cgpart.addparam('version', version)
821 cgpart.addparam('version', version)
823 if 'treemanifest' in pushop.repo.requirements:
822 if 'treemanifest' in pushop.repo.requirements:
824 cgpart.addparam('treemanifest', '1')
823 cgpart.addparam('treemanifest', '1')
825 def handlereply(op):
824 def handlereply(op):
826 """extract addchangegroup returns from server reply"""
825 """extract addchangegroup returns from server reply"""
827 cgreplies = op.records.getreplies(cgpart.id)
826 cgreplies = op.records.getreplies(cgpart.id)
828 assert len(cgreplies['changegroup']) == 1
827 assert len(cgreplies['changegroup']) == 1
829 pushop.cgresult = cgreplies['changegroup'][0]['return']
828 pushop.cgresult = cgreplies['changegroup'][0]['return']
830 return handlereply
829 return handlereply
831
830
832 @b2partsgenerator('phase')
831 @b2partsgenerator('phase')
833 def _pushb2phases(pushop, bundler):
832 def _pushb2phases(pushop, bundler):
834 """handle phase push through bundle2"""
833 """handle phase push through bundle2"""
835 if 'phases' in pushop.stepsdone:
834 if 'phases' in pushop.stepsdone:
836 return
835 return
837 b2caps = bundle2.bundle2caps(pushop.remote)
836 b2caps = bundle2.bundle2caps(pushop.remote)
838 ui = pushop.repo.ui
837 ui = pushop.repo.ui
839
838
840 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
839 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
841 haspushkey = 'pushkey' in b2caps
840 haspushkey = 'pushkey' in b2caps
842 hasphaseheads = 'heads' in b2caps.get('phases', ())
841 hasphaseheads = 'heads' in b2caps.get('phases', ())
843
842
844 if hasphaseheads and not legacyphase:
843 if hasphaseheads and not legacyphase:
845 return _pushb2phaseheads(pushop, bundler)
844 return _pushb2phaseheads(pushop, bundler)
846 elif haspushkey:
845 elif haspushkey:
847 return _pushb2phasespushkey(pushop, bundler)
846 return _pushb2phasespushkey(pushop, bundler)
848
847
849 def _pushb2phaseheads(pushop, bundler):
848 def _pushb2phaseheads(pushop, bundler):
850 """push phase information through a bundle2 - binary part"""
849 """push phase information through a bundle2 - binary part"""
851 pushop.stepsdone.add('phases')
850 pushop.stepsdone.add('phases')
852 if pushop.outdatedphases:
851 if pushop.outdatedphases:
853 updates = [[] for p in phases.allphases]
852 updates = [[] for p in phases.allphases]
854 updates[0].extend(h.node() for h in pushop.outdatedphases)
853 updates[0].extend(h.node() for h in pushop.outdatedphases)
855 phasedata = phases.binaryencode(updates)
854 phasedata = phases.binaryencode(updates)
856 bundler.newpart('phase-heads', data=phasedata)
855 bundler.newpart('phase-heads', data=phasedata)
857
856
858 def _pushb2phasespushkey(pushop, bundler):
857 def _pushb2phasespushkey(pushop, bundler):
859 """push phase information through a bundle2 - pushkey part"""
858 """push phase information through a bundle2 - pushkey part"""
860 pushop.stepsdone.add('phases')
859 pushop.stepsdone.add('phases')
861 part2node = []
860 part2node = []
862
861
863 def handlefailure(pushop, exc):
862 def handlefailure(pushop, exc):
864 targetid = int(exc.partid)
863 targetid = int(exc.partid)
865 for partid, node in part2node:
864 for partid, node in part2node:
866 if partid == targetid:
865 if partid == targetid:
867 raise error.Abort(_('updating %s to public failed') % node)
866 raise error.Abort(_('updating %s to public failed') % node)
868
867
869 enc = pushkey.encode
868 enc = pushkey.encode
870 for newremotehead in pushop.outdatedphases:
869 for newremotehead in pushop.outdatedphases:
871 part = bundler.newpart('pushkey')
870 part = bundler.newpart('pushkey')
872 part.addparam('namespace', enc('phases'))
871 part.addparam('namespace', enc('phases'))
873 part.addparam('key', enc(newremotehead.hex()))
872 part.addparam('key', enc(newremotehead.hex()))
874 part.addparam('old', enc('%d' % phases.draft))
873 part.addparam('old', enc('%d' % phases.draft))
875 part.addparam('new', enc('%d' % phases.public))
874 part.addparam('new', enc('%d' % phases.public))
876 part2node.append((part.id, newremotehead))
875 part2node.append((part.id, newremotehead))
877 pushop.pkfailcb[part.id] = handlefailure
876 pushop.pkfailcb[part.id] = handlefailure
878
877
879 def handlereply(op):
878 def handlereply(op):
880 for partid, node in part2node:
879 for partid, node in part2node:
881 partrep = op.records.getreplies(partid)
880 partrep = op.records.getreplies(partid)
882 results = partrep['pushkey']
881 results = partrep['pushkey']
883 assert len(results) <= 1
882 assert len(results) <= 1
884 msg = None
883 msg = None
885 if not results:
884 if not results:
886 msg = _('server ignored update of %s to public!\n') % node
885 msg = _('server ignored update of %s to public!\n') % node
887 elif not int(results[0]['return']):
886 elif not int(results[0]['return']):
888 msg = _('updating %s to public failed!\n') % node
887 msg = _('updating %s to public failed!\n') % node
889 if msg is not None:
888 if msg is not None:
890 pushop.ui.warn(msg)
889 pushop.ui.warn(msg)
891 return handlereply
890 return handlereply
892
891
893 @b2partsgenerator('obsmarkers')
892 @b2partsgenerator('obsmarkers')
894 def _pushb2obsmarkers(pushop, bundler):
893 def _pushb2obsmarkers(pushop, bundler):
895 if 'obsmarkers' in pushop.stepsdone:
894 if 'obsmarkers' in pushop.stepsdone:
896 return
895 return
897 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
896 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
898 if obsolete.commonversion(remoteversions) is None:
897 if obsolete.commonversion(remoteversions) is None:
899 return
898 return
900 pushop.stepsdone.add('obsmarkers')
899 pushop.stepsdone.add('obsmarkers')
901 if pushop.outobsmarkers:
900 if pushop.outobsmarkers:
902 markers = sorted(pushop.outobsmarkers)
901 markers = sorted(pushop.outobsmarkers)
903 bundle2.buildobsmarkerspart(bundler, markers)
902 bundle2.buildobsmarkerspart(bundler, markers)
904
903
905 @b2partsgenerator('bookmarks')
904 @b2partsgenerator('bookmarks')
906 def _pushb2bookmarks(pushop, bundler):
905 def _pushb2bookmarks(pushop, bundler):
907 """handle bookmark push through bundle2"""
906 """handle bookmark push through bundle2"""
908 if 'bookmarks' in pushop.stepsdone:
907 if 'bookmarks' in pushop.stepsdone:
909 return
908 return
910 b2caps = bundle2.bundle2caps(pushop.remote)
909 b2caps = bundle2.bundle2caps(pushop.remote)
911
910
912 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
911 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
913 legacybooks = 'bookmarks' in legacy
912 legacybooks = 'bookmarks' in legacy
914
913
915 if not legacybooks and 'bookmarks' in b2caps:
914 if not legacybooks and 'bookmarks' in b2caps:
916 return _pushb2bookmarkspart(pushop, bundler)
915 return _pushb2bookmarkspart(pushop, bundler)
917 elif 'pushkey' in b2caps:
916 elif 'pushkey' in b2caps:
918 return _pushb2bookmarkspushkey(pushop, bundler)
917 return _pushb2bookmarkspushkey(pushop, bundler)
919
918
920 def _bmaction(old, new):
919 def _bmaction(old, new):
921 """small utility for bookmark pushing"""
920 """small utility for bookmark pushing"""
922 if not old:
921 if not old:
923 return 'export'
922 return 'export'
924 elif not new:
923 elif not new:
925 return 'delete'
924 return 'delete'
926 return 'update'
925 return 'update'
927
926
928 def _pushb2bookmarkspart(pushop, bundler):
927 def _pushb2bookmarkspart(pushop, bundler):
929 pushop.stepsdone.add('bookmarks')
928 pushop.stepsdone.add('bookmarks')
930 if not pushop.outbookmarks:
929 if not pushop.outbookmarks:
931 return
930 return
932
931
933 allactions = []
932 allactions = []
934 data = []
933 data = []
935 for book, old, new in pushop.outbookmarks:
934 for book, old, new in pushop.outbookmarks:
936 new = bin(new)
935 new = bin(new)
937 data.append((book, new))
936 data.append((book, new))
938 allactions.append((book, _bmaction(old, new)))
937 allactions.append((book, _bmaction(old, new)))
939 checkdata = bookmod.binaryencode(data)
938 checkdata = bookmod.binaryencode(data)
940 bundler.newpart('bookmarks', data=checkdata)
939 bundler.newpart('bookmarks', data=checkdata)
941
940
942 def handlereply(op):
941 def handlereply(op):
943 ui = pushop.ui
942 ui = pushop.ui
944 # if success
943 # if success
945 for book, action in allactions:
944 for book, action in allactions:
946 ui.status(bookmsgmap[action][0] % book)
945 ui.status(bookmsgmap[action][0] % book)
947
946
948 return handlereply
947 return handlereply
949
948
950 def _pushb2bookmarkspushkey(pushop, bundler):
949 def _pushb2bookmarkspushkey(pushop, bundler):
951 pushop.stepsdone.add('bookmarks')
950 pushop.stepsdone.add('bookmarks')
952 part2book = []
951 part2book = []
953 enc = pushkey.encode
952 enc = pushkey.encode
954
953
955 def handlefailure(pushop, exc):
954 def handlefailure(pushop, exc):
956 targetid = int(exc.partid)
955 targetid = int(exc.partid)
957 for partid, book, action in part2book:
956 for partid, book, action in part2book:
958 if partid == targetid:
957 if partid == targetid:
959 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
958 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
960 # we should not be called for part we did not generated
959 # we should not be called for part we did not generated
961 assert False
960 assert False
962
961
963 for book, old, new in pushop.outbookmarks:
962 for book, old, new in pushop.outbookmarks:
964 part = bundler.newpart('pushkey')
963 part = bundler.newpart('pushkey')
965 part.addparam('namespace', enc('bookmarks'))
964 part.addparam('namespace', enc('bookmarks'))
966 part.addparam('key', enc(book))
965 part.addparam('key', enc(book))
967 part.addparam('old', enc(old))
966 part.addparam('old', enc(old))
968 part.addparam('new', enc(new))
967 part.addparam('new', enc(new))
969 action = 'update'
968 action = 'update'
970 if not old:
969 if not old:
971 action = 'export'
970 action = 'export'
972 elif not new:
971 elif not new:
973 action = 'delete'
972 action = 'delete'
974 part2book.append((part.id, book, action))
973 part2book.append((part.id, book, action))
975 pushop.pkfailcb[part.id] = handlefailure
974 pushop.pkfailcb[part.id] = handlefailure
976
975
977 def handlereply(op):
976 def handlereply(op):
978 ui = pushop.ui
977 ui = pushop.ui
979 for partid, book, action in part2book:
978 for partid, book, action in part2book:
980 partrep = op.records.getreplies(partid)
979 partrep = op.records.getreplies(partid)
981 results = partrep['pushkey']
980 results = partrep['pushkey']
982 assert len(results) <= 1
981 assert len(results) <= 1
983 if not results:
982 if not results:
984 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
983 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
985 else:
984 else:
986 ret = int(results[0]['return'])
985 ret = int(results[0]['return'])
987 if ret:
986 if ret:
988 ui.status(bookmsgmap[action][0] % book)
987 ui.status(bookmsgmap[action][0] % book)
989 else:
988 else:
990 ui.warn(bookmsgmap[action][1] % book)
989 ui.warn(bookmsgmap[action][1] % book)
991 if pushop.bkresult is not None:
990 if pushop.bkresult is not None:
992 pushop.bkresult = 1
991 pushop.bkresult = 1
993 return handlereply
992 return handlereply
994
993
995 @b2partsgenerator('pushvars', idx=0)
994 @b2partsgenerator('pushvars', idx=0)
996 def _getbundlesendvars(pushop, bundler):
995 def _getbundlesendvars(pushop, bundler):
997 '''send shellvars via bundle2'''
996 '''send shellvars via bundle2'''
998 pushvars = pushop.pushvars
997 pushvars = pushop.pushvars
999 if pushvars:
998 if pushvars:
1000 shellvars = {}
999 shellvars = {}
1001 for raw in pushvars:
1000 for raw in pushvars:
1002 if '=' not in raw:
1001 if '=' not in raw:
1003 msg = ("unable to parse variable '%s', should follow "
1002 msg = ("unable to parse variable '%s', should follow "
1004 "'KEY=VALUE' or 'KEY=' format")
1003 "'KEY=VALUE' or 'KEY=' format")
1005 raise error.Abort(msg % raw)
1004 raise error.Abort(msg % raw)
1006 k, v = raw.split('=', 1)
1005 k, v = raw.split('=', 1)
1007 shellvars[k] = v
1006 shellvars[k] = v
1008
1007
1009 part = bundler.newpart('pushvars')
1008 part = bundler.newpart('pushvars')
1010
1009
1011 for key, value in shellvars.iteritems():
1010 for key, value in shellvars.iteritems():
1012 part.addparam(key, value, mandatory=False)
1011 part.addparam(key, value, mandatory=False)
1013
1012
1014 def _pushbundle2(pushop):
1013 def _pushbundle2(pushop):
1015 """push data to the remote using bundle2
1014 """push data to the remote using bundle2
1016
1015
1017 The only currently supported type of data is changegroup but this will
1016 The only currently supported type of data is changegroup but this will
1018 evolve in the future."""
1017 evolve in the future."""
1019 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1018 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1020 pushback = (pushop.trmanager
1019 pushback = (pushop.trmanager
1021 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1020 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1022
1021
1023 # create reply capability
1022 # create reply capability
1024 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1023 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1025 allowpushback=pushback,
1024 allowpushback=pushback,
1026 role='client'))
1025 role='client'))
1027 bundler.newpart('replycaps', data=capsblob)
1026 bundler.newpart('replycaps', data=capsblob)
1028 replyhandlers = []
1027 replyhandlers = []
1029 for partgenname in b2partsgenorder:
1028 for partgenname in b2partsgenorder:
1030 partgen = b2partsgenmapping[partgenname]
1029 partgen = b2partsgenmapping[partgenname]
1031 ret = partgen(pushop, bundler)
1030 ret = partgen(pushop, bundler)
1032 if callable(ret):
1031 if callable(ret):
1033 replyhandlers.append(ret)
1032 replyhandlers.append(ret)
1034 # do not push if nothing to push
1033 # do not push if nothing to push
1035 if bundler.nbparts <= 1:
1034 if bundler.nbparts <= 1:
1036 return
1035 return
1037 stream = util.chunkbuffer(bundler.getchunks())
1036 stream = util.chunkbuffer(bundler.getchunks())
1038 try:
1037 try:
1039 try:
1038 try:
1040 reply = pushop.remote.unbundle(
1039 reply = pushop.remote.unbundle(
1041 stream, ['force'], pushop.remote.url())
1040 stream, ['force'], pushop.remote.url())
1042 except error.BundleValueError as exc:
1041 except error.BundleValueError as exc:
1043 raise error.Abort(_('missing support for %s') % exc)
1042 raise error.Abort(_('missing support for %s') % exc)
1044 try:
1043 try:
1045 trgetter = None
1044 trgetter = None
1046 if pushback:
1045 if pushback:
1047 trgetter = pushop.trmanager.transaction
1046 trgetter = pushop.trmanager.transaction
1048 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1047 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1049 except error.BundleValueError as exc:
1048 except error.BundleValueError as exc:
1050 raise error.Abort(_('missing support for %s') % exc)
1049 raise error.Abort(_('missing support for %s') % exc)
1051 except bundle2.AbortFromPart as exc:
1050 except bundle2.AbortFromPart as exc:
1052 pushop.ui.status(_('remote: %s\n') % exc)
1051 pushop.ui.status(_('remote: %s\n') % exc)
1053 if exc.hint is not None:
1052 if exc.hint is not None:
1054 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1053 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1055 raise error.Abort(_('push failed on remote'))
1054 raise error.Abort(_('push failed on remote'))
1056 except error.PushkeyFailed as exc:
1055 except error.PushkeyFailed as exc:
1057 partid = int(exc.partid)
1056 partid = int(exc.partid)
1058 if partid not in pushop.pkfailcb:
1057 if partid not in pushop.pkfailcb:
1059 raise
1058 raise
1060 pushop.pkfailcb[partid](pushop, exc)
1059 pushop.pkfailcb[partid](pushop, exc)
1061 for rephand in replyhandlers:
1060 for rephand in replyhandlers:
1062 rephand(op)
1061 rephand(op)
1063
1062
1064 def _pushchangeset(pushop):
1063 def _pushchangeset(pushop):
1065 """Make the actual push of changeset bundle to remote repo"""
1064 """Make the actual push of changeset bundle to remote repo"""
1066 if 'changesets' in pushop.stepsdone:
1065 if 'changesets' in pushop.stepsdone:
1067 return
1066 return
1068 pushop.stepsdone.add('changesets')
1067 pushop.stepsdone.add('changesets')
1069 if not _pushcheckoutgoing(pushop):
1068 if not _pushcheckoutgoing(pushop):
1070 return
1069 return
1071
1070
1072 # Should have verified this in push().
1071 # Should have verified this in push().
1073 assert pushop.remote.capable('unbundle')
1072 assert pushop.remote.capable('unbundle')
1074
1073
1075 pushop.repo.prepushoutgoinghooks(pushop)
1074 pushop.repo.prepushoutgoinghooks(pushop)
1076 outgoing = pushop.outgoing
1075 outgoing = pushop.outgoing
1077 # TODO: get bundlecaps from remote
1076 # TODO: get bundlecaps from remote
1078 bundlecaps = None
1077 bundlecaps = None
1079 # create a changegroup from local
1078 # create a changegroup from local
1080 if pushop.revs is None and not (outgoing.excluded
1079 if pushop.revs is None and not (outgoing.excluded
1081 or pushop.repo.changelog.filteredrevs):
1080 or pushop.repo.changelog.filteredrevs):
1082 # push everything,
1081 # push everything,
1083 # use the fast path, no race possible on push
1082 # use the fast path, no race possible on push
1084 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1083 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1085 fastpath=True, bundlecaps=bundlecaps)
1084 fastpath=True, bundlecaps=bundlecaps)
1086 else:
1085 else:
1087 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1086 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1088 'push', bundlecaps=bundlecaps)
1087 'push', bundlecaps=bundlecaps)
1089
1088
1090 # apply changegroup to remote
1089 # apply changegroup to remote
1091 # local repo finds heads on server, finds out what
1090 # local repo finds heads on server, finds out what
1092 # revs it must push. once revs transferred, if server
1091 # revs it must push. once revs transferred, if server
1093 # finds it has different heads (someone else won
1092 # finds it has different heads (someone else won
1094 # commit/push race), server aborts.
1093 # commit/push race), server aborts.
1095 if pushop.force:
1094 if pushop.force:
1096 remoteheads = ['force']
1095 remoteheads = ['force']
1097 else:
1096 else:
1098 remoteheads = pushop.remoteheads
1097 remoteheads = pushop.remoteheads
1099 # ssh: return remote's addchangegroup()
1098 # ssh: return remote's addchangegroup()
1100 # http: return remote's addchangegroup() or 0 for error
1099 # http: return remote's addchangegroup() or 0 for error
1101 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1100 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1102 pushop.repo.url())
1101 pushop.repo.url())
1103
1102
1104 def _pushsyncphase(pushop):
1103 def _pushsyncphase(pushop):
1105 """synchronise phase information locally and remotely"""
1104 """synchronise phase information locally and remotely"""
1106 cheads = pushop.commonheads
1105 cheads = pushop.commonheads
1107 # even when we don't push, exchanging phase data is useful
1106 # even when we don't push, exchanging phase data is useful
1108 remotephases = pushop.remote.listkeys('phases')
1107 remotephases = pushop.remote.listkeys('phases')
1109 if (pushop.ui.configbool('ui', '_usedassubrepo')
1108 if (pushop.ui.configbool('ui', '_usedassubrepo')
1110 and remotephases # server supports phases
1109 and remotephases # server supports phases
1111 and pushop.cgresult is None # nothing was pushed
1110 and pushop.cgresult is None # nothing was pushed
1112 and remotephases.get('publishing', False)):
1111 and remotephases.get('publishing', False)):
1113 # When:
1112 # When:
1114 # - this is a subrepo push
1113 # - this is a subrepo push
1115 # - and remote support phase
1114 # - and remote support phase
1116 # - and no changeset was pushed
1115 # - and no changeset was pushed
1117 # - and remote is publishing
1116 # - and remote is publishing
1118 # We may be in issue 3871 case!
1117 # We may be in issue 3871 case!
1119 # We drop the possible phase synchronisation done by
1118 # We drop the possible phase synchronisation done by
1120 # courtesy to publish changesets possibly locally draft
1119 # courtesy to publish changesets possibly locally draft
1121 # on the remote.
1120 # on the remote.
1122 remotephases = {'publishing': 'True'}
1121 remotephases = {'publishing': 'True'}
1123 if not remotephases: # old server or public only reply from non-publishing
1122 if not remotephases: # old server or public only reply from non-publishing
1124 _localphasemove(pushop, cheads)
1123 _localphasemove(pushop, cheads)
1125 # don't push any phase data as there is nothing to push
1124 # don't push any phase data as there is nothing to push
1126 else:
1125 else:
1127 ana = phases.analyzeremotephases(pushop.repo, cheads,
1126 ana = phases.analyzeremotephases(pushop.repo, cheads,
1128 remotephases)
1127 remotephases)
1129 pheads, droots = ana
1128 pheads, droots = ana
1130 ### Apply remote phase on local
1129 ### Apply remote phase on local
1131 if remotephases.get('publishing', False):
1130 if remotephases.get('publishing', False):
1132 _localphasemove(pushop, cheads)
1131 _localphasemove(pushop, cheads)
1133 else: # publish = False
1132 else: # publish = False
1134 _localphasemove(pushop, pheads)
1133 _localphasemove(pushop, pheads)
1135 _localphasemove(pushop, cheads, phases.draft)
1134 _localphasemove(pushop, cheads, phases.draft)
1136 ### Apply local phase on remote
1135 ### Apply local phase on remote
1137
1136
1138 if pushop.cgresult:
1137 if pushop.cgresult:
1139 if 'phases' in pushop.stepsdone:
1138 if 'phases' in pushop.stepsdone:
1140 # phases already pushed though bundle2
1139 # phases already pushed though bundle2
1141 return
1140 return
1142 outdated = pushop.outdatedphases
1141 outdated = pushop.outdatedphases
1143 else:
1142 else:
1144 outdated = pushop.fallbackoutdatedphases
1143 outdated = pushop.fallbackoutdatedphases
1145
1144
1146 pushop.stepsdone.add('phases')
1145 pushop.stepsdone.add('phases')
1147
1146
1148 # filter heads already turned public by the push
1147 # filter heads already turned public by the push
1149 outdated = [c for c in outdated if c.node() not in pheads]
1148 outdated = [c for c in outdated if c.node() not in pheads]
1150 # fallback to independent pushkey command
1149 # fallback to independent pushkey command
1151 for newremotehead in outdated:
1150 for newremotehead in outdated:
1152 r = pushop.remote.pushkey('phases',
1151 r = pushop.remote.pushkey('phases',
1153 newremotehead.hex(),
1152 newremotehead.hex(),
1154 ('%d' % phases.draft),
1153 ('%d' % phases.draft),
1155 ('%d' % phases.public))
1154 ('%d' % phases.public))
1156 if not r:
1155 if not r:
1157 pushop.ui.warn(_('updating %s to public failed!\n')
1156 pushop.ui.warn(_('updating %s to public failed!\n')
1158 % newremotehead)
1157 % newremotehead)
1159
1158
1160 def _localphasemove(pushop, nodes, phase=phases.public):
1159 def _localphasemove(pushop, nodes, phase=phases.public):
1161 """move <nodes> to <phase> in the local source repo"""
1160 """move <nodes> to <phase> in the local source repo"""
1162 if pushop.trmanager:
1161 if pushop.trmanager:
1163 phases.advanceboundary(pushop.repo,
1162 phases.advanceboundary(pushop.repo,
1164 pushop.trmanager.transaction(),
1163 pushop.trmanager.transaction(),
1165 phase,
1164 phase,
1166 nodes)
1165 nodes)
1167 else:
1166 else:
1168 # repo is not locked, do not change any phases!
1167 # repo is not locked, do not change any phases!
1169 # Informs the user that phases should have been moved when
1168 # Informs the user that phases should have been moved when
1170 # applicable.
1169 # applicable.
1171 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1170 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1172 phasestr = phases.phasenames[phase]
1171 phasestr = phases.phasenames[phase]
1173 if actualmoves:
1172 if actualmoves:
1174 pushop.ui.status(_('cannot lock source repo, skipping '
1173 pushop.ui.status(_('cannot lock source repo, skipping '
1175 'local %s phase update\n') % phasestr)
1174 'local %s phase update\n') % phasestr)
1176
1175
1177 def _pushobsolete(pushop):
1176 def _pushobsolete(pushop):
1178 """utility function to push obsolete markers to a remote"""
1177 """utility function to push obsolete markers to a remote"""
1179 if 'obsmarkers' in pushop.stepsdone:
1178 if 'obsmarkers' in pushop.stepsdone:
1180 return
1179 return
1181 repo = pushop.repo
1180 repo = pushop.repo
1182 remote = pushop.remote
1181 remote = pushop.remote
1183 pushop.stepsdone.add('obsmarkers')
1182 pushop.stepsdone.add('obsmarkers')
1184 if pushop.outobsmarkers:
1183 if pushop.outobsmarkers:
1185 pushop.ui.debug('try to push obsolete markers to remote\n')
1184 pushop.ui.debug('try to push obsolete markers to remote\n')
1186 rslts = []
1185 rslts = []
1187 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1186 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1188 for key in sorted(remotedata, reverse=True):
1187 for key in sorted(remotedata, reverse=True):
1189 # reverse sort to ensure we end with dump0
1188 # reverse sort to ensure we end with dump0
1190 data = remotedata[key]
1189 data = remotedata[key]
1191 rslts.append(remote.pushkey('obsolete', key, '', data))
1190 rslts.append(remote.pushkey('obsolete', key, '', data))
1192 if [r for r in rslts if not r]:
1191 if [r for r in rslts if not r]:
1193 msg = _('failed to push some obsolete markers!\n')
1192 msg = _('failed to push some obsolete markers!\n')
1194 repo.ui.warn(msg)
1193 repo.ui.warn(msg)
1195
1194
1196 def _pushbookmark(pushop):
1195 def _pushbookmark(pushop):
1197 """Update bookmark position on remote"""
1196 """Update bookmark position on remote"""
1198 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1197 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1199 return
1198 return
1200 pushop.stepsdone.add('bookmarks')
1199 pushop.stepsdone.add('bookmarks')
1201 ui = pushop.ui
1200 ui = pushop.ui
1202 remote = pushop.remote
1201 remote = pushop.remote
1203
1202
1204 for b, old, new in pushop.outbookmarks:
1203 for b, old, new in pushop.outbookmarks:
1205 action = 'update'
1204 action = 'update'
1206 if not old:
1205 if not old:
1207 action = 'export'
1206 action = 'export'
1208 elif not new:
1207 elif not new:
1209 action = 'delete'
1208 action = 'delete'
1210 if remote.pushkey('bookmarks', b, old, new):
1209 if remote.pushkey('bookmarks', b, old, new):
1211 ui.status(bookmsgmap[action][0] % b)
1210 ui.status(bookmsgmap[action][0] % b)
1212 else:
1211 else:
1213 ui.warn(bookmsgmap[action][1] % b)
1212 ui.warn(bookmsgmap[action][1] % b)
1214 # discovery can have set the value form invalid entry
1213 # discovery can have set the value form invalid entry
1215 if pushop.bkresult is not None:
1214 if pushop.bkresult is not None:
1216 pushop.bkresult = 1
1215 pushop.bkresult = 1
1217
1216
1218 class pulloperation(object):
1217 class pulloperation(object):
1219 """A object that represent a single pull operation
1218 """A object that represent a single pull operation
1220
1219
1221 It purpose is to carry pull related state and very common operation.
1220 It purpose is to carry pull related state and very common operation.
1222
1221
1223 A new should be created at the beginning of each pull and discarded
1222 A new should be created at the beginning of each pull and discarded
1224 afterward.
1223 afterward.
1225 """
1224 """
1226
1225
1227 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1226 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1228 remotebookmarks=None, streamclonerequested=None):
1227 remotebookmarks=None, streamclonerequested=None):
1229 # repo we pull into
1228 # repo we pull into
1230 self.repo = repo
1229 self.repo = repo
1231 # repo we pull from
1230 # repo we pull from
1232 self.remote = remote
1231 self.remote = remote
1233 # revision we try to pull (None is "all")
1232 # revision we try to pull (None is "all")
1234 self.heads = heads
1233 self.heads = heads
1235 # bookmark pulled explicitly
1234 # bookmark pulled explicitly
1236 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1235 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1237 for bookmark in bookmarks]
1236 for bookmark in bookmarks]
1238 # do we force pull?
1237 # do we force pull?
1239 self.force = force
1238 self.force = force
1240 # whether a streaming clone was requested
1239 # whether a streaming clone was requested
1241 self.streamclonerequested = streamclonerequested
1240 self.streamclonerequested = streamclonerequested
1242 # transaction manager
1241 # transaction manager
1243 self.trmanager = None
1242 self.trmanager = None
1244 # set of common changeset between local and remote before pull
1243 # set of common changeset between local and remote before pull
1245 self.common = None
1244 self.common = None
1246 # set of pulled head
1245 # set of pulled head
1247 self.rheads = None
1246 self.rheads = None
1248 # list of missing changeset to fetch remotely
1247 # list of missing changeset to fetch remotely
1249 self.fetch = None
1248 self.fetch = None
1250 # remote bookmarks data
1249 # remote bookmarks data
1251 self.remotebookmarks = remotebookmarks
1250 self.remotebookmarks = remotebookmarks
1252 # result of changegroup pulling (used as return code by pull)
1251 # result of changegroup pulling (used as return code by pull)
1253 self.cgresult = None
1252 self.cgresult = None
1254 # list of step already done
1253 # list of step already done
1255 self.stepsdone = set()
1254 self.stepsdone = set()
1256 # Whether we attempted a clone from pre-generated bundles.
1255 # Whether we attempted a clone from pre-generated bundles.
1257 self.clonebundleattempted = False
1256 self.clonebundleattempted = False
1258
1257
1259 @util.propertycache
1258 @util.propertycache
1260 def pulledsubset(self):
1259 def pulledsubset(self):
1261 """heads of the set of changeset target by the pull"""
1260 """heads of the set of changeset target by the pull"""
1262 # compute target subset
1261 # compute target subset
1263 if self.heads is None:
1262 if self.heads is None:
1264 # We pulled every thing possible
1263 # We pulled every thing possible
1265 # sync on everything common
1264 # sync on everything common
1266 c = set(self.common)
1265 c = set(self.common)
1267 ret = list(self.common)
1266 ret = list(self.common)
1268 for n in self.rheads:
1267 for n in self.rheads:
1269 if n not in c:
1268 if n not in c:
1270 ret.append(n)
1269 ret.append(n)
1271 return ret
1270 return ret
1272 else:
1271 else:
1273 # We pulled a specific subset
1272 # We pulled a specific subset
1274 # sync on this subset
1273 # sync on this subset
1275 return self.heads
1274 return self.heads
1276
1275
1277 @util.propertycache
1276 @util.propertycache
1278 def canusebundle2(self):
1277 def canusebundle2(self):
1279 return not _forcebundle1(self)
1278 return not _forcebundle1(self)
1280
1279
1281 @util.propertycache
1280 @util.propertycache
1282 def remotebundle2caps(self):
1281 def remotebundle2caps(self):
1283 return bundle2.bundle2caps(self.remote)
1282 return bundle2.bundle2caps(self.remote)
1284
1283
1285 def gettransaction(self):
1284 def gettransaction(self):
1286 # deprecated; talk to trmanager directly
1285 # deprecated; talk to trmanager directly
1287 return self.trmanager.transaction()
1286 return self.trmanager.transaction()
1288
1287
1289 class transactionmanager(util.transactional):
1288 class transactionmanager(util.transactional):
1290 """An object to manage the life cycle of a transaction
1289 """An object to manage the life cycle of a transaction
1291
1290
1292 It creates the transaction on demand and calls the appropriate hooks when
1291 It creates the transaction on demand and calls the appropriate hooks when
1293 closing the transaction."""
1292 closing the transaction."""
1294 def __init__(self, repo, source, url):
1293 def __init__(self, repo, source, url):
1295 self.repo = repo
1294 self.repo = repo
1296 self.source = source
1295 self.source = source
1297 self.url = url
1296 self.url = url
1298 self._tr = None
1297 self._tr = None
1299
1298
1300 def transaction(self):
1299 def transaction(self):
1301 """Return an open transaction object, constructing if necessary"""
1300 """Return an open transaction object, constructing if necessary"""
1302 if not self._tr:
1301 if not self._tr:
1303 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1302 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1304 self._tr = self.repo.transaction(trname)
1303 self._tr = self.repo.transaction(trname)
1305 self._tr.hookargs['source'] = self.source
1304 self._tr.hookargs['source'] = self.source
1306 self._tr.hookargs['url'] = self.url
1305 self._tr.hookargs['url'] = self.url
1307 return self._tr
1306 return self._tr
1308
1307
1309 def close(self):
1308 def close(self):
1310 """close transaction if created"""
1309 """close transaction if created"""
1311 if self._tr is not None:
1310 if self._tr is not None:
1312 self._tr.close()
1311 self._tr.close()
1313
1312
1314 def release(self):
1313 def release(self):
1315 """release transaction if created"""
1314 """release transaction if created"""
1316 if self._tr is not None:
1315 if self._tr is not None:
1317 self._tr.release()
1316 self._tr.release()
1318
1317
1319 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1318 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1320 streamclonerequested=None):
1319 streamclonerequested=None):
1321 """Fetch repository data from a remote.
1320 """Fetch repository data from a remote.
1322
1321
1323 This is the main function used to retrieve data from a remote repository.
1322 This is the main function used to retrieve data from a remote repository.
1324
1323
1325 ``repo`` is the local repository to clone into.
1324 ``repo`` is the local repository to clone into.
1326 ``remote`` is a peer instance.
1325 ``remote`` is a peer instance.
1327 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1326 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1328 default) means to pull everything from the remote.
1327 default) means to pull everything from the remote.
1329 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1328 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1330 default, all remote bookmarks are pulled.
1329 default, all remote bookmarks are pulled.
1331 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1330 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1332 initialization.
1331 initialization.
1333 ``streamclonerequested`` is a boolean indicating whether a "streaming
1332 ``streamclonerequested`` is a boolean indicating whether a "streaming
1334 clone" is requested. A "streaming clone" is essentially a raw file copy
1333 clone" is requested. A "streaming clone" is essentially a raw file copy
1335 of revlogs from the server. This only works when the local repository is
1334 of revlogs from the server. This only works when the local repository is
1336 empty. The default value of ``None`` means to respect the server
1335 empty. The default value of ``None`` means to respect the server
1337 configuration for preferring stream clones.
1336 configuration for preferring stream clones.
1338
1337
1339 Returns the ``pulloperation`` created for this pull.
1338 Returns the ``pulloperation`` created for this pull.
1340 """
1339 """
1341 if opargs is None:
1340 if opargs is None:
1342 opargs = {}
1341 opargs = {}
1343 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1342 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1344 streamclonerequested=streamclonerequested,
1343 streamclonerequested=streamclonerequested,
1345 **pycompat.strkwargs(opargs))
1344 **pycompat.strkwargs(opargs))
1346
1345
1347 peerlocal = pullop.remote.local()
1346 peerlocal = pullop.remote.local()
1348 if peerlocal:
1347 if peerlocal:
1349 missing = set(peerlocal.requirements) - pullop.repo.supported
1348 missing = set(peerlocal.requirements) - pullop.repo.supported
1350 if missing:
1349 if missing:
1351 msg = _("required features are not"
1350 msg = _("required features are not"
1352 " supported in the destination:"
1351 " supported in the destination:"
1353 " %s") % (', '.join(sorted(missing)))
1352 " %s") % (', '.join(sorted(missing)))
1354 raise error.Abort(msg)
1353 raise error.Abort(msg)
1355
1354
1356 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1355 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1357 with repo.wlock(), repo.lock(), pullop.trmanager:
1356 with repo.wlock(), repo.lock(), pullop.trmanager:
1358 # This should ideally be in _pullbundle2(). However, it needs to run
1357 # This should ideally be in _pullbundle2(). However, it needs to run
1359 # before discovery to avoid extra work.
1358 # before discovery to avoid extra work.
1360 _maybeapplyclonebundle(pullop)
1359 _maybeapplyclonebundle(pullop)
1361 streamclone.maybeperformlegacystreamclone(pullop)
1360 streamclone.maybeperformlegacystreamclone(pullop)
1362 _pulldiscovery(pullop)
1361 _pulldiscovery(pullop)
1363 if pullop.canusebundle2:
1362 if pullop.canusebundle2:
1364 _pullbundle2(pullop)
1363 _pullbundle2(pullop)
1365 _pullchangeset(pullop)
1364 _pullchangeset(pullop)
1366 _pullphase(pullop)
1365 _pullphase(pullop)
1367 _pullbookmarks(pullop)
1366 _pullbookmarks(pullop)
1368 _pullobsolete(pullop)
1367 _pullobsolete(pullop)
1369
1368
1370 # storing remotenames
1369 # storing remotenames
1371 if repo.ui.configbool('experimental', 'remotenames'):
1370 if repo.ui.configbool('experimental', 'remotenames'):
1372 logexchange.pullremotenames(repo, remote)
1371 logexchange.pullremotenames(repo, remote)
1373
1372
1374 return pullop
1373 return pullop
1375
1374
1376 # list of steps to perform discovery before pull
1375 # list of steps to perform discovery before pull
1377 pulldiscoveryorder = []
1376 pulldiscoveryorder = []
1378
1377
1379 # Mapping between step name and function
1378 # Mapping between step name and function
1380 #
1379 #
1381 # This exists to help extensions wrap steps if necessary
1380 # This exists to help extensions wrap steps if necessary
1382 pulldiscoverymapping = {}
1381 pulldiscoverymapping = {}
1383
1382
1384 def pulldiscovery(stepname):
1383 def pulldiscovery(stepname):
1385 """decorator for function performing discovery before pull
1384 """decorator for function performing discovery before pull
1386
1385
1387 The function is added to the step -> function mapping and appended to the
1386 The function is added to the step -> function mapping and appended to the
1388 list of steps. Beware that decorated function will be added in order (this
1387 list of steps. Beware that decorated function will be added in order (this
1389 may matter).
1388 may matter).
1390
1389
1391 You can only use this decorator for a new step, if you want to wrap a step
1390 You can only use this decorator for a new step, if you want to wrap a step
1392 from an extension, change the pulldiscovery dictionary directly."""
1391 from an extension, change the pulldiscovery dictionary directly."""
1393 def dec(func):
1392 def dec(func):
1394 assert stepname not in pulldiscoverymapping
1393 assert stepname not in pulldiscoverymapping
1395 pulldiscoverymapping[stepname] = func
1394 pulldiscoverymapping[stepname] = func
1396 pulldiscoveryorder.append(stepname)
1395 pulldiscoveryorder.append(stepname)
1397 return func
1396 return func
1398 return dec
1397 return dec
1399
1398
1400 def _pulldiscovery(pullop):
1399 def _pulldiscovery(pullop):
1401 """Run all discovery steps"""
1400 """Run all discovery steps"""
1402 for stepname in pulldiscoveryorder:
1401 for stepname in pulldiscoveryorder:
1403 step = pulldiscoverymapping[stepname]
1402 step = pulldiscoverymapping[stepname]
1404 step(pullop)
1403 step(pullop)
1405
1404
1406 @pulldiscovery('b1:bookmarks')
1405 @pulldiscovery('b1:bookmarks')
1407 def _pullbookmarkbundle1(pullop):
1406 def _pullbookmarkbundle1(pullop):
1408 """fetch bookmark data in bundle1 case
1407 """fetch bookmark data in bundle1 case
1409
1408
1410 If not using bundle2, we have to fetch bookmarks before changeset
1409 If not using bundle2, we have to fetch bookmarks before changeset
1411 discovery to reduce the chance and impact of race conditions."""
1410 discovery to reduce the chance and impact of race conditions."""
1412 if pullop.remotebookmarks is not None:
1411 if pullop.remotebookmarks is not None:
1413 return
1412 return
1414 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1413 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1415 # all known bundle2 servers now support listkeys, but lets be nice with
1414 # all known bundle2 servers now support listkeys, but lets be nice with
1416 # new implementation.
1415 # new implementation.
1417 return
1416 return
1418 books = pullop.remote.listkeys('bookmarks')
1417 books = pullop.remote.listkeys('bookmarks')
1419 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1418 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1420
1419
1421
1420
1422 @pulldiscovery('changegroup')
1421 @pulldiscovery('changegroup')
1423 def _pulldiscoverychangegroup(pullop):
1422 def _pulldiscoverychangegroup(pullop):
1424 """discovery phase for the pull
1423 """discovery phase for the pull
1425
1424
1426 Current handle changeset discovery only, will change handle all discovery
1425 Current handle changeset discovery only, will change handle all discovery
1427 at some point."""
1426 at some point."""
1428 tmp = discovery.findcommonincoming(pullop.repo,
1427 tmp = discovery.findcommonincoming(pullop.repo,
1429 pullop.remote,
1428 pullop.remote,
1430 heads=pullop.heads,
1429 heads=pullop.heads,
1431 force=pullop.force)
1430 force=pullop.force)
1432 common, fetch, rheads = tmp
1431 common, fetch, rheads = tmp
1433 nm = pullop.repo.unfiltered().changelog.nodemap
1432 nm = pullop.repo.unfiltered().changelog.nodemap
1434 if fetch and rheads:
1433 if fetch and rheads:
1435 # If a remote heads is filtered locally, put in back in common.
1434 # If a remote heads is filtered locally, put in back in common.
1436 #
1435 #
1437 # This is a hackish solution to catch most of "common but locally
1436 # This is a hackish solution to catch most of "common but locally
1438 # hidden situation". We do not performs discovery on unfiltered
1437 # hidden situation". We do not performs discovery on unfiltered
1439 # repository because it end up doing a pathological amount of round
1438 # repository because it end up doing a pathological amount of round
1440 # trip for w huge amount of changeset we do not care about.
1439 # trip for w huge amount of changeset we do not care about.
1441 #
1440 #
1442 # If a set of such "common but filtered" changeset exist on the server
1441 # If a set of such "common but filtered" changeset exist on the server
1443 # but are not including a remote heads, we'll not be able to detect it,
1442 # but are not including a remote heads, we'll not be able to detect it,
1444 scommon = set(common)
1443 scommon = set(common)
1445 for n in rheads:
1444 for n in rheads:
1446 if n in nm:
1445 if n in nm:
1447 if n not in scommon:
1446 if n not in scommon:
1448 common.append(n)
1447 common.append(n)
1449 if set(rheads).issubset(set(common)):
1448 if set(rheads).issubset(set(common)):
1450 fetch = []
1449 fetch = []
1451 pullop.common = common
1450 pullop.common = common
1452 pullop.fetch = fetch
1451 pullop.fetch = fetch
1453 pullop.rheads = rheads
1452 pullop.rheads = rheads
1454
1453
1455 def _pullbundle2(pullop):
1454 def _pullbundle2(pullop):
1456 """pull data using bundle2
1455 """pull data using bundle2
1457
1456
1458 For now, the only supported data are changegroup."""
1457 For now, the only supported data are changegroup."""
1459 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1458 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1460
1459
1461 # make ui easier to access
1460 # make ui easier to access
1462 ui = pullop.repo.ui
1461 ui = pullop.repo.ui
1463
1462
1464 # At the moment we don't do stream clones over bundle2. If that is
1463 # At the moment we don't do stream clones over bundle2. If that is
1465 # implemented then here's where the check for that will go.
1464 # implemented then here's where the check for that will go.
1466 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1465 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1467
1466
1468 # declare pull perimeters
1467 # declare pull perimeters
1469 kwargs['common'] = pullop.common
1468 kwargs['common'] = pullop.common
1470 kwargs['heads'] = pullop.heads or pullop.rheads
1469 kwargs['heads'] = pullop.heads or pullop.rheads
1471
1470
1472 if streaming:
1471 if streaming:
1473 kwargs['cg'] = False
1472 kwargs['cg'] = False
1474 kwargs['stream'] = True
1473 kwargs['stream'] = True
1475 pullop.stepsdone.add('changegroup')
1474 pullop.stepsdone.add('changegroup')
1476 pullop.stepsdone.add('phases')
1475 pullop.stepsdone.add('phases')
1477
1476
1478 else:
1477 else:
1479 # pulling changegroup
1478 # pulling changegroup
1480 pullop.stepsdone.add('changegroup')
1479 pullop.stepsdone.add('changegroup')
1481
1480
1482 kwargs['cg'] = pullop.fetch
1481 kwargs['cg'] = pullop.fetch
1483
1482
1484 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1483 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1485 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1484 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1486 if (not legacyphase and hasbinaryphase):
1485 if (not legacyphase and hasbinaryphase):
1487 kwargs['phases'] = True
1486 kwargs['phases'] = True
1488 pullop.stepsdone.add('phases')
1487 pullop.stepsdone.add('phases')
1489
1488
1490 if 'listkeys' in pullop.remotebundle2caps:
1489 if 'listkeys' in pullop.remotebundle2caps:
1491 if 'phases' not in pullop.stepsdone:
1490 if 'phases' not in pullop.stepsdone:
1492 kwargs['listkeys'] = ['phases']
1491 kwargs['listkeys'] = ['phases']
1493
1492
1494 bookmarksrequested = False
1493 bookmarksrequested = False
1495 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1494 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1496 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1495 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1497
1496
1498 if pullop.remotebookmarks is not None:
1497 if pullop.remotebookmarks is not None:
1499 pullop.stepsdone.add('request-bookmarks')
1498 pullop.stepsdone.add('request-bookmarks')
1500
1499
1501 if ('request-bookmarks' not in pullop.stepsdone
1500 if ('request-bookmarks' not in pullop.stepsdone
1502 and pullop.remotebookmarks is None
1501 and pullop.remotebookmarks is None
1503 and not legacybookmark and hasbinarybook):
1502 and not legacybookmark and hasbinarybook):
1504 kwargs['bookmarks'] = True
1503 kwargs['bookmarks'] = True
1505 bookmarksrequested = True
1504 bookmarksrequested = True
1506
1505
1507 if 'listkeys' in pullop.remotebundle2caps:
1506 if 'listkeys' in pullop.remotebundle2caps:
1508 if 'request-bookmarks' not in pullop.stepsdone:
1507 if 'request-bookmarks' not in pullop.stepsdone:
1509 # make sure to always includes bookmark data when migrating
1508 # make sure to always includes bookmark data when migrating
1510 # `hg incoming --bundle` to using this function.
1509 # `hg incoming --bundle` to using this function.
1511 pullop.stepsdone.add('request-bookmarks')
1510 pullop.stepsdone.add('request-bookmarks')
1512 kwargs.setdefault('listkeys', []).append('bookmarks')
1511 kwargs.setdefault('listkeys', []).append('bookmarks')
1513
1512
1514 # If this is a full pull / clone and the server supports the clone bundles
1513 # If this is a full pull / clone and the server supports the clone bundles
1515 # feature, tell the server whether we attempted a clone bundle. The
1514 # feature, tell the server whether we attempted a clone bundle. The
1516 # presence of this flag indicates the client supports clone bundles. This
1515 # presence of this flag indicates the client supports clone bundles. This
1517 # will enable the server to treat clients that support clone bundles
1516 # will enable the server to treat clients that support clone bundles
1518 # differently from those that don't.
1517 # differently from those that don't.
1519 if (pullop.remote.capable('clonebundles')
1518 if (pullop.remote.capable('clonebundles')
1520 and pullop.heads is None and list(pullop.common) == [nullid]):
1519 and pullop.heads is None and list(pullop.common) == [nullid]):
1521 kwargs['cbattempted'] = pullop.clonebundleattempted
1520 kwargs['cbattempted'] = pullop.clonebundleattempted
1522
1521
1523 if streaming:
1522 if streaming:
1524 pullop.repo.ui.status(_('streaming all changes\n'))
1523 pullop.repo.ui.status(_('streaming all changes\n'))
1525 elif not pullop.fetch:
1524 elif not pullop.fetch:
1526 pullop.repo.ui.status(_("no changes found\n"))
1525 pullop.repo.ui.status(_("no changes found\n"))
1527 pullop.cgresult = 0
1526 pullop.cgresult = 0
1528 else:
1527 else:
1529 if pullop.heads is None and list(pullop.common) == [nullid]:
1528 if pullop.heads is None and list(pullop.common) == [nullid]:
1530 pullop.repo.ui.status(_("requesting all changes\n"))
1529 pullop.repo.ui.status(_("requesting all changes\n"))
1531 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1530 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1532 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1531 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1533 if obsolete.commonversion(remoteversions) is not None:
1532 if obsolete.commonversion(remoteversions) is not None:
1534 kwargs['obsmarkers'] = True
1533 kwargs['obsmarkers'] = True
1535 pullop.stepsdone.add('obsmarkers')
1534 pullop.stepsdone.add('obsmarkers')
1536 _pullbundle2extraprepare(pullop, kwargs)
1535 _pullbundle2extraprepare(pullop, kwargs)
1537 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1536 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1538 try:
1537 try:
1539 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1538 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1540 op.modes['bookmarks'] = 'records'
1539 op.modes['bookmarks'] = 'records'
1541 bundle2.processbundle(pullop.repo, bundle, op=op)
1540 bundle2.processbundle(pullop.repo, bundle, op=op)
1542 except bundle2.AbortFromPart as exc:
1541 except bundle2.AbortFromPart as exc:
1543 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1542 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1544 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1543 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1545 except error.BundleValueError as exc:
1544 except error.BundleValueError as exc:
1546 raise error.Abort(_('missing support for %s') % exc)
1545 raise error.Abort(_('missing support for %s') % exc)
1547
1546
1548 if pullop.fetch:
1547 if pullop.fetch:
1549 pullop.cgresult = bundle2.combinechangegroupresults(op)
1548 pullop.cgresult = bundle2.combinechangegroupresults(op)
1550
1549
1551 # processing phases change
1550 # processing phases change
1552 for namespace, value in op.records['listkeys']:
1551 for namespace, value in op.records['listkeys']:
1553 if namespace == 'phases':
1552 if namespace == 'phases':
1554 _pullapplyphases(pullop, value)
1553 _pullapplyphases(pullop, value)
1555
1554
1556 # processing bookmark update
1555 # processing bookmark update
1557 if bookmarksrequested:
1556 if bookmarksrequested:
1558 books = {}
1557 books = {}
1559 for record in op.records['bookmarks']:
1558 for record in op.records['bookmarks']:
1560 books[record['bookmark']] = record["node"]
1559 books[record['bookmark']] = record["node"]
1561 pullop.remotebookmarks = books
1560 pullop.remotebookmarks = books
1562 else:
1561 else:
1563 for namespace, value in op.records['listkeys']:
1562 for namespace, value in op.records['listkeys']:
1564 if namespace == 'bookmarks':
1563 if namespace == 'bookmarks':
1565 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1564 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1566
1565
1567 # bookmark data were either already there or pulled in the bundle
1566 # bookmark data were either already there or pulled in the bundle
1568 if pullop.remotebookmarks is not None:
1567 if pullop.remotebookmarks is not None:
1569 _pullbookmarks(pullop)
1568 _pullbookmarks(pullop)
1570
1569
1571 def _pullbundle2extraprepare(pullop, kwargs):
1570 def _pullbundle2extraprepare(pullop, kwargs):
1572 """hook function so that extensions can extend the getbundle call"""
1571 """hook function so that extensions can extend the getbundle call"""
1573
1572
1574 def _pullchangeset(pullop):
1573 def _pullchangeset(pullop):
1575 """pull changeset from unbundle into the local repo"""
1574 """pull changeset from unbundle into the local repo"""
1576 # We delay the open of the transaction as late as possible so we
1575 # We delay the open of the transaction as late as possible so we
1577 # don't open transaction for nothing or you break future useful
1576 # don't open transaction for nothing or you break future useful
1578 # rollback call
1577 # rollback call
1579 if 'changegroup' in pullop.stepsdone:
1578 if 'changegroup' in pullop.stepsdone:
1580 return
1579 return
1581 pullop.stepsdone.add('changegroup')
1580 pullop.stepsdone.add('changegroup')
1582 if not pullop.fetch:
1581 if not pullop.fetch:
1583 pullop.repo.ui.status(_("no changes found\n"))
1582 pullop.repo.ui.status(_("no changes found\n"))
1584 pullop.cgresult = 0
1583 pullop.cgresult = 0
1585 return
1584 return
1586 tr = pullop.gettransaction()
1585 tr = pullop.gettransaction()
1587 if pullop.heads is None and list(pullop.common) == [nullid]:
1586 if pullop.heads is None and list(pullop.common) == [nullid]:
1588 pullop.repo.ui.status(_("requesting all changes\n"))
1587 pullop.repo.ui.status(_("requesting all changes\n"))
1589 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1588 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1590 # issue1320, avoid a race if remote changed after discovery
1589 # issue1320, avoid a race if remote changed after discovery
1591 pullop.heads = pullop.rheads
1590 pullop.heads = pullop.rheads
1592
1591
1593 if pullop.remote.capable('getbundle'):
1592 if pullop.remote.capable('getbundle'):
1594 # TODO: get bundlecaps from remote
1593 # TODO: get bundlecaps from remote
1595 cg = pullop.remote.getbundle('pull', common=pullop.common,
1594 cg = pullop.remote.getbundle('pull', common=pullop.common,
1596 heads=pullop.heads or pullop.rheads)
1595 heads=pullop.heads or pullop.rheads)
1597 elif pullop.heads is None:
1596 elif pullop.heads is None:
1598 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1597 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1599 elif not pullop.remote.capable('changegroupsubset'):
1598 elif not pullop.remote.capable('changegroupsubset'):
1600 raise error.Abort(_("partial pull cannot be done because "
1599 raise error.Abort(_("partial pull cannot be done because "
1601 "other repository doesn't support "
1600 "other repository doesn't support "
1602 "changegroupsubset."))
1601 "changegroupsubset."))
1603 else:
1602 else:
1604 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1603 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1605 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1604 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1606 pullop.remote.url())
1605 pullop.remote.url())
1607 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1606 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1608
1607
1609 def _pullphase(pullop):
1608 def _pullphase(pullop):
1610 # Get remote phases data from remote
1609 # Get remote phases data from remote
1611 if 'phases' in pullop.stepsdone:
1610 if 'phases' in pullop.stepsdone:
1612 return
1611 return
1613 remotephases = pullop.remote.listkeys('phases')
1612 remotephases = pullop.remote.listkeys('phases')
1614 _pullapplyphases(pullop, remotephases)
1613 _pullapplyphases(pullop, remotephases)
1615
1614
1616 def _pullapplyphases(pullop, remotephases):
1615 def _pullapplyphases(pullop, remotephases):
1617 """apply phase movement from observed remote state"""
1616 """apply phase movement from observed remote state"""
1618 if 'phases' in pullop.stepsdone:
1617 if 'phases' in pullop.stepsdone:
1619 return
1618 return
1620 pullop.stepsdone.add('phases')
1619 pullop.stepsdone.add('phases')
1621 publishing = bool(remotephases.get('publishing', False))
1620 publishing = bool(remotephases.get('publishing', False))
1622 if remotephases and not publishing:
1621 if remotephases and not publishing:
1623 # remote is new and non-publishing
1622 # remote is new and non-publishing
1624 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1623 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1625 pullop.pulledsubset,
1624 pullop.pulledsubset,
1626 remotephases)
1625 remotephases)
1627 dheads = pullop.pulledsubset
1626 dheads = pullop.pulledsubset
1628 else:
1627 else:
1629 # Remote is old or publishing all common changesets
1628 # Remote is old or publishing all common changesets
1630 # should be seen as public
1629 # should be seen as public
1631 pheads = pullop.pulledsubset
1630 pheads = pullop.pulledsubset
1632 dheads = []
1631 dheads = []
1633 unfi = pullop.repo.unfiltered()
1632 unfi = pullop.repo.unfiltered()
1634 phase = unfi._phasecache.phase
1633 phase = unfi._phasecache.phase
1635 rev = unfi.changelog.nodemap.get
1634 rev = unfi.changelog.nodemap.get
1636 public = phases.public
1635 public = phases.public
1637 draft = phases.draft
1636 draft = phases.draft
1638
1637
1639 # exclude changesets already public locally and update the others
1638 # exclude changesets already public locally and update the others
1640 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1639 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1641 if pheads:
1640 if pheads:
1642 tr = pullop.gettransaction()
1641 tr = pullop.gettransaction()
1643 phases.advanceboundary(pullop.repo, tr, public, pheads)
1642 phases.advanceboundary(pullop.repo, tr, public, pheads)
1644
1643
1645 # exclude changesets already draft locally and update the others
1644 # exclude changesets already draft locally and update the others
1646 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1645 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1647 if dheads:
1646 if dheads:
1648 tr = pullop.gettransaction()
1647 tr = pullop.gettransaction()
1649 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1648 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1650
1649
1651 def _pullbookmarks(pullop):
1650 def _pullbookmarks(pullop):
1652 """process the remote bookmark information to update the local one"""
1651 """process the remote bookmark information to update the local one"""
1653 if 'bookmarks' in pullop.stepsdone:
1652 if 'bookmarks' in pullop.stepsdone:
1654 return
1653 return
1655 pullop.stepsdone.add('bookmarks')
1654 pullop.stepsdone.add('bookmarks')
1656 repo = pullop.repo
1655 repo = pullop.repo
1657 remotebookmarks = pullop.remotebookmarks
1656 remotebookmarks = pullop.remotebookmarks
1658 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1657 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1659 pullop.remote.url(),
1658 pullop.remote.url(),
1660 pullop.gettransaction,
1659 pullop.gettransaction,
1661 explicit=pullop.explicitbookmarks)
1660 explicit=pullop.explicitbookmarks)
1662
1661
1663 def _pullobsolete(pullop):
1662 def _pullobsolete(pullop):
1664 """utility function to pull obsolete markers from a remote
1663 """utility function to pull obsolete markers from a remote
1665
1664
1666 The `gettransaction` is function that return the pull transaction, creating
1665 The `gettransaction` is function that return the pull transaction, creating
1667 one if necessary. We return the transaction to inform the calling code that
1666 one if necessary. We return the transaction to inform the calling code that
1668 a new transaction have been created (when applicable).
1667 a new transaction have been created (when applicable).
1669
1668
1670 Exists mostly to allow overriding for experimentation purpose"""
1669 Exists mostly to allow overriding for experimentation purpose"""
1671 if 'obsmarkers' in pullop.stepsdone:
1670 if 'obsmarkers' in pullop.stepsdone:
1672 return
1671 return
1673 pullop.stepsdone.add('obsmarkers')
1672 pullop.stepsdone.add('obsmarkers')
1674 tr = None
1673 tr = None
1675 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1674 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1676 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1675 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1677 remoteobs = pullop.remote.listkeys('obsolete')
1676 remoteobs = pullop.remote.listkeys('obsolete')
1678 if 'dump0' in remoteobs:
1677 if 'dump0' in remoteobs:
1679 tr = pullop.gettransaction()
1678 tr = pullop.gettransaction()
1680 markers = []
1679 markers = []
1681 for key in sorted(remoteobs, reverse=True):
1680 for key in sorted(remoteobs, reverse=True):
1682 if key.startswith('dump'):
1681 if key.startswith('dump'):
1683 data = util.b85decode(remoteobs[key])
1682 data = util.b85decode(remoteobs[key])
1684 version, newmarks = obsolete._readmarkers(data)
1683 version, newmarks = obsolete._readmarkers(data)
1685 markers += newmarks
1684 markers += newmarks
1686 if markers:
1685 if markers:
1687 pullop.repo.obsstore.add(tr, markers)
1686 pullop.repo.obsstore.add(tr, markers)
1688 pullop.repo.invalidatevolatilesets()
1687 pullop.repo.invalidatevolatilesets()
1689 return tr
1688 return tr
1690
1689
1691 def caps20to10(repo, role):
1690 def caps20to10(repo, role):
1692 """return a set with appropriate options to use bundle20 during getbundle"""
1691 """return a set with appropriate options to use bundle20 during getbundle"""
1693 caps = {'HG20'}
1692 caps = {'HG20'}
1694 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1693 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1695 caps.add('bundle2=' + urlreq.quote(capsblob))
1694 caps.add('bundle2=' + urlreq.quote(capsblob))
1696 return caps
1695 return caps
1697
1696
1698 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1697 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1699 getbundle2partsorder = []
1698 getbundle2partsorder = []
1700
1699
1701 # Mapping between step name and function
1700 # Mapping between step name and function
1702 #
1701 #
1703 # This exists to help extensions wrap steps if necessary
1702 # This exists to help extensions wrap steps if necessary
1704 getbundle2partsmapping = {}
1703 getbundle2partsmapping = {}
1705
1704
1706 def getbundle2partsgenerator(stepname, idx=None):
1705 def getbundle2partsgenerator(stepname, idx=None):
1707 """decorator for function generating bundle2 part for getbundle
1706 """decorator for function generating bundle2 part for getbundle
1708
1707
1709 The function is added to the step -> function mapping and appended to the
1708 The function is added to the step -> function mapping and appended to the
1710 list of steps. Beware that decorated functions will be added in order
1709 list of steps. Beware that decorated functions will be added in order
1711 (this may matter).
1710 (this may matter).
1712
1711
1713 You can only use this decorator for new steps, if you want to wrap a step
1712 You can only use this decorator for new steps, if you want to wrap a step
1714 from an extension, attack the getbundle2partsmapping dictionary directly."""
1713 from an extension, attack the getbundle2partsmapping dictionary directly."""
1715 def dec(func):
1714 def dec(func):
1716 assert stepname not in getbundle2partsmapping
1715 assert stepname not in getbundle2partsmapping
1717 getbundle2partsmapping[stepname] = func
1716 getbundle2partsmapping[stepname] = func
1718 if idx is None:
1717 if idx is None:
1719 getbundle2partsorder.append(stepname)
1718 getbundle2partsorder.append(stepname)
1720 else:
1719 else:
1721 getbundle2partsorder.insert(idx, stepname)
1720 getbundle2partsorder.insert(idx, stepname)
1722 return func
1721 return func
1723 return dec
1722 return dec
1724
1723
1725 def bundle2requested(bundlecaps):
1724 def bundle2requested(bundlecaps):
1726 if bundlecaps is not None:
1725 if bundlecaps is not None:
1727 return any(cap.startswith('HG2') for cap in bundlecaps)
1726 return any(cap.startswith('HG2') for cap in bundlecaps)
1728 return False
1727 return False
1729
1728
1730 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1729 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1731 **kwargs):
1730 **kwargs):
1732 """Return chunks constituting a bundle's raw data.
1731 """Return chunks constituting a bundle's raw data.
1733
1732
1734 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1733 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1735 passed.
1734 passed.
1736
1735
1737 Returns a 2-tuple of a dict with metadata about the generated bundle
1736 Returns a 2-tuple of a dict with metadata about the generated bundle
1738 and an iterator over raw chunks (of varying sizes).
1737 and an iterator over raw chunks (of varying sizes).
1739 """
1738 """
1740 kwargs = pycompat.byteskwargs(kwargs)
1739 kwargs = pycompat.byteskwargs(kwargs)
1741 info = {}
1740 info = {}
1742 usebundle2 = bundle2requested(bundlecaps)
1741 usebundle2 = bundle2requested(bundlecaps)
1743 # bundle10 case
1742 # bundle10 case
1744 if not usebundle2:
1743 if not usebundle2:
1745 if bundlecaps and not kwargs.get('cg', True):
1744 if bundlecaps and not kwargs.get('cg', True):
1746 raise ValueError(_('request for bundle10 must include changegroup'))
1745 raise ValueError(_('request for bundle10 must include changegroup'))
1747
1746
1748 if kwargs:
1747 if kwargs:
1749 raise ValueError(_('unsupported getbundle arguments: %s')
1748 raise ValueError(_('unsupported getbundle arguments: %s')
1750 % ', '.join(sorted(kwargs.keys())))
1749 % ', '.join(sorted(kwargs.keys())))
1751 outgoing = _computeoutgoing(repo, heads, common)
1750 outgoing = _computeoutgoing(repo, heads, common)
1752 info['bundleversion'] = 1
1751 info['bundleversion'] = 1
1753 return info, changegroup.makestream(repo, outgoing, '01', source,
1752 return info, changegroup.makestream(repo, outgoing, '01', source,
1754 bundlecaps=bundlecaps)
1753 bundlecaps=bundlecaps)
1755
1754
1756 # bundle20 case
1755 # bundle20 case
1757 info['bundleversion'] = 2
1756 info['bundleversion'] = 2
1758 b2caps = {}
1757 b2caps = {}
1759 for bcaps in bundlecaps:
1758 for bcaps in bundlecaps:
1760 if bcaps.startswith('bundle2='):
1759 if bcaps.startswith('bundle2='):
1761 blob = urlreq.unquote(bcaps[len('bundle2='):])
1760 blob = urlreq.unquote(bcaps[len('bundle2='):])
1762 b2caps.update(bundle2.decodecaps(blob))
1761 b2caps.update(bundle2.decodecaps(blob))
1763 bundler = bundle2.bundle20(repo.ui, b2caps)
1762 bundler = bundle2.bundle20(repo.ui, b2caps)
1764
1763
1765 kwargs['heads'] = heads
1764 kwargs['heads'] = heads
1766 kwargs['common'] = common
1765 kwargs['common'] = common
1767
1766
1768 for name in getbundle2partsorder:
1767 for name in getbundle2partsorder:
1769 func = getbundle2partsmapping[name]
1768 func = getbundle2partsmapping[name]
1770 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1769 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1771 **pycompat.strkwargs(kwargs))
1770 **pycompat.strkwargs(kwargs))
1772
1771
1773 info['prefercompressed'] = bundler.prefercompressed
1772 info['prefercompressed'] = bundler.prefercompressed
1774
1773
1775 return info, bundler.getchunks()
1774 return info, bundler.getchunks()
1776
1775
1777 @getbundle2partsgenerator('stream2')
1776 @getbundle2partsgenerator('stream2')
1778 def _getbundlestream2(bundler, repo, source, bundlecaps=None,
1777 def _getbundlestream2(bundler, repo, source, bundlecaps=None,
1779 b2caps=None, heads=None, common=None, **kwargs):
1778 b2caps=None, heads=None, common=None, **kwargs):
1780 if not kwargs.get('stream', False):
1779 if not kwargs.get('stream', False):
1781 return
1780 return
1782
1781
1783 if not streamclone.allowservergeneration(repo):
1782 if not streamclone.allowservergeneration(repo):
1784 raise error.Abort(_('stream data requested but server does not allow '
1783 raise error.Abort(_('stream data requested but server does not allow '
1785 'this feature'),
1784 'this feature'),
1786 hint=_('well-behaved clients should not be '
1785 hint=_('well-behaved clients should not be '
1787 'requesting stream data from servers not '
1786 'requesting stream data from servers not '
1788 'advertising it; the client may be buggy'))
1787 'advertising it; the client may be buggy'))
1789
1788
1790 # Stream clones don't compress well. And compression undermines a
1789 # Stream clones don't compress well. And compression undermines a
1791 # goal of stream clones, which is to be fast. Communicate the desire
1790 # goal of stream clones, which is to be fast. Communicate the desire
1792 # to avoid compression to consumers of the bundle.
1791 # to avoid compression to consumers of the bundle.
1793 bundler.prefercompressed = False
1792 bundler.prefercompressed = False
1794
1793
1795 filecount, bytecount, it = streamclone.generatev2(repo)
1794 filecount, bytecount, it = streamclone.generatev2(repo)
1796 requirements = _formatrequirementsspec(repo.requirements)
1795 requirements = _formatrequirementsspec(repo.requirements)
1797 part = bundler.newpart('stream2', data=it)
1796 part = bundler.newpart('stream2', data=it)
1798 part.addparam('bytecount', '%d' % bytecount, mandatory=True)
1797 part.addparam('bytecount', '%d' % bytecount, mandatory=True)
1799 part.addparam('filecount', '%d' % filecount, mandatory=True)
1798 part.addparam('filecount', '%d' % filecount, mandatory=True)
1800 part.addparam('requirements', requirements, mandatory=True)
1799 part.addparam('requirements', requirements, mandatory=True)
1801
1800
1802 @getbundle2partsgenerator('changegroup')
1801 @getbundle2partsgenerator('changegroup')
1803 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1802 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1804 b2caps=None, heads=None, common=None, **kwargs):
1803 b2caps=None, heads=None, common=None, **kwargs):
1805 """add a changegroup part to the requested bundle"""
1804 """add a changegroup part to the requested bundle"""
1806 cgstream = None
1805 cgstream = None
1807 if kwargs.get(r'cg', True):
1806 if kwargs.get(r'cg', True):
1808 # build changegroup bundle here.
1807 # build changegroup bundle here.
1809 version = '01'
1808 version = '01'
1810 cgversions = b2caps.get('changegroup')
1809 cgversions = b2caps.get('changegroup')
1811 if cgversions: # 3.1 and 3.2 ship with an empty value
1810 if cgversions: # 3.1 and 3.2 ship with an empty value
1812 cgversions = [v for v in cgversions
1811 cgversions = [v for v in cgversions
1813 if v in changegroup.supportedoutgoingversions(repo)]
1812 if v in changegroup.supportedoutgoingversions(repo)]
1814 if not cgversions:
1813 if not cgversions:
1815 raise ValueError(_('no common changegroup version'))
1814 raise ValueError(_('no common changegroup version'))
1816 version = max(cgversions)
1815 version = max(cgversions)
1817 outgoing = _computeoutgoing(repo, heads, common)
1816 outgoing = _computeoutgoing(repo, heads, common)
1818 if outgoing.missing:
1817 if outgoing.missing:
1819 cgstream = changegroup.makestream(repo, outgoing, version, source,
1818 cgstream = changegroup.makestream(repo, outgoing, version, source,
1820 bundlecaps=bundlecaps)
1819 bundlecaps=bundlecaps)
1821
1820
1822 if cgstream:
1821 if cgstream:
1823 part = bundler.newpart('changegroup', data=cgstream)
1822 part = bundler.newpart('changegroup', data=cgstream)
1824 if cgversions:
1823 if cgversions:
1825 part.addparam('version', version)
1824 part.addparam('version', version)
1826 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1825 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1827 mandatory=False)
1826 mandatory=False)
1828 if 'treemanifest' in repo.requirements:
1827 if 'treemanifest' in repo.requirements:
1829 part.addparam('treemanifest', '1')
1828 part.addparam('treemanifest', '1')
1830
1829
1831 @getbundle2partsgenerator('bookmarks')
1830 @getbundle2partsgenerator('bookmarks')
1832 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1831 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1833 b2caps=None, **kwargs):
1832 b2caps=None, **kwargs):
1834 """add a bookmark part to the requested bundle"""
1833 """add a bookmark part to the requested bundle"""
1835 if not kwargs.get(r'bookmarks', False):
1834 if not kwargs.get(r'bookmarks', False):
1836 return
1835 return
1837 if 'bookmarks' not in b2caps:
1836 if 'bookmarks' not in b2caps:
1838 raise ValueError(_('no common bookmarks exchange method'))
1837 raise ValueError(_('no common bookmarks exchange method'))
1839 books = bookmod.listbinbookmarks(repo)
1838 books = bookmod.listbinbookmarks(repo)
1840 data = bookmod.binaryencode(books)
1839 data = bookmod.binaryencode(books)
1841 if data:
1840 if data:
1842 bundler.newpart('bookmarks', data=data)
1841 bundler.newpart('bookmarks', data=data)
1843
1842
1844 @getbundle2partsgenerator('listkeys')
1843 @getbundle2partsgenerator('listkeys')
1845 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1844 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1846 b2caps=None, **kwargs):
1845 b2caps=None, **kwargs):
1847 """add parts containing listkeys namespaces to the requested bundle"""
1846 """add parts containing listkeys namespaces to the requested bundle"""
1848 listkeys = kwargs.get(r'listkeys', ())
1847 listkeys = kwargs.get(r'listkeys', ())
1849 for namespace in listkeys:
1848 for namespace in listkeys:
1850 part = bundler.newpart('listkeys')
1849 part = bundler.newpart('listkeys')
1851 part.addparam('namespace', namespace)
1850 part.addparam('namespace', namespace)
1852 keys = repo.listkeys(namespace).items()
1851 keys = repo.listkeys(namespace).items()
1853 part.data = pushkey.encodekeys(keys)
1852 part.data = pushkey.encodekeys(keys)
1854
1853
1855 @getbundle2partsgenerator('obsmarkers')
1854 @getbundle2partsgenerator('obsmarkers')
1856 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1855 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1857 b2caps=None, heads=None, **kwargs):
1856 b2caps=None, heads=None, **kwargs):
1858 """add an obsolescence markers part to the requested bundle"""
1857 """add an obsolescence markers part to the requested bundle"""
1859 if kwargs.get(r'obsmarkers', False):
1858 if kwargs.get(r'obsmarkers', False):
1860 if heads is None:
1859 if heads is None:
1861 heads = repo.heads()
1860 heads = repo.heads()
1862 subset = [c.node() for c in repo.set('::%ln', heads)]
1861 subset = [c.node() for c in repo.set('::%ln', heads)]
1863 markers = repo.obsstore.relevantmarkers(subset)
1862 markers = repo.obsstore.relevantmarkers(subset)
1864 markers = sorted(markers)
1863 markers = sorted(markers)
1865 bundle2.buildobsmarkerspart(bundler, markers)
1864 bundle2.buildobsmarkerspart(bundler, markers)
1866
1865
1867 @getbundle2partsgenerator('phases')
1866 @getbundle2partsgenerator('phases')
1868 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1867 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1869 b2caps=None, heads=None, **kwargs):
1868 b2caps=None, heads=None, **kwargs):
1870 """add phase heads part to the requested bundle"""
1869 """add phase heads part to the requested bundle"""
1871 if kwargs.get(r'phases', False):
1870 if kwargs.get(r'phases', False):
1872 if not 'heads' in b2caps.get('phases'):
1871 if not 'heads' in b2caps.get('phases'):
1873 raise ValueError(_('no common phases exchange method'))
1872 raise ValueError(_('no common phases exchange method'))
1874 if heads is None:
1873 if heads is None:
1875 heads = repo.heads()
1874 heads = repo.heads()
1876
1875
1877 headsbyphase = collections.defaultdict(set)
1876 headsbyphase = collections.defaultdict(set)
1878 if repo.publishing():
1877 if repo.publishing():
1879 headsbyphase[phases.public] = heads
1878 headsbyphase[phases.public] = heads
1880 else:
1879 else:
1881 # find the appropriate heads to move
1880 # find the appropriate heads to move
1882
1881
1883 phase = repo._phasecache.phase
1882 phase = repo._phasecache.phase
1884 node = repo.changelog.node
1883 node = repo.changelog.node
1885 rev = repo.changelog.rev
1884 rev = repo.changelog.rev
1886 for h in heads:
1885 for h in heads:
1887 headsbyphase[phase(repo, rev(h))].add(h)
1886 headsbyphase[phase(repo, rev(h))].add(h)
1888 seenphases = list(headsbyphase.keys())
1887 seenphases = list(headsbyphase.keys())
1889
1888
1890 # We do not handle anything but public and draft phase for now)
1889 # We do not handle anything but public and draft phase for now)
1891 if seenphases:
1890 if seenphases:
1892 assert max(seenphases) <= phases.draft
1891 assert max(seenphases) <= phases.draft
1893
1892
1894 # if client is pulling non-public changesets, we need to find
1893 # if client is pulling non-public changesets, we need to find
1895 # intermediate public heads.
1894 # intermediate public heads.
1896 draftheads = headsbyphase.get(phases.draft, set())
1895 draftheads = headsbyphase.get(phases.draft, set())
1897 if draftheads:
1896 if draftheads:
1898 publicheads = headsbyphase.get(phases.public, set())
1897 publicheads = headsbyphase.get(phases.public, set())
1899
1898
1900 revset = 'heads(only(%ln, %ln) and public())'
1899 revset = 'heads(only(%ln, %ln) and public())'
1901 extraheads = repo.revs(revset, draftheads, publicheads)
1900 extraheads = repo.revs(revset, draftheads, publicheads)
1902 for r in extraheads:
1901 for r in extraheads:
1903 headsbyphase[phases.public].add(node(r))
1902 headsbyphase[phases.public].add(node(r))
1904
1903
1905 # transform data in a format used by the encoding function
1904 # transform data in a format used by the encoding function
1906 phasemapping = []
1905 phasemapping = []
1907 for phase in phases.allphases:
1906 for phase in phases.allphases:
1908 phasemapping.append(sorted(headsbyphase[phase]))
1907 phasemapping.append(sorted(headsbyphase[phase]))
1909
1908
1910 # generate the actual part
1909 # generate the actual part
1911 phasedata = phases.binaryencode(phasemapping)
1910 phasedata = phases.binaryencode(phasemapping)
1912 bundler.newpart('phase-heads', data=phasedata)
1911 bundler.newpart('phase-heads', data=phasedata)
1913
1912
1914 @getbundle2partsgenerator('hgtagsfnodes')
1913 @getbundle2partsgenerator('hgtagsfnodes')
1915 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1914 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1916 b2caps=None, heads=None, common=None,
1915 b2caps=None, heads=None, common=None,
1917 **kwargs):
1916 **kwargs):
1918 """Transfer the .hgtags filenodes mapping.
1917 """Transfer the .hgtags filenodes mapping.
1919
1918
1920 Only values for heads in this bundle will be transferred.
1919 Only values for heads in this bundle will be transferred.
1921
1920
1922 The part data consists of pairs of 20 byte changeset node and .hgtags
1921 The part data consists of pairs of 20 byte changeset node and .hgtags
1923 filenodes raw values.
1922 filenodes raw values.
1924 """
1923 """
1925 # Don't send unless:
1924 # Don't send unless:
1926 # - changeset are being exchanged,
1925 # - changeset are being exchanged,
1927 # - the client supports it.
1926 # - the client supports it.
1928 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1927 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1929 return
1928 return
1930
1929
1931 outgoing = _computeoutgoing(repo, heads, common)
1930 outgoing = _computeoutgoing(repo, heads, common)
1932 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1931 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1933
1932
1934 def check_heads(repo, their_heads, context):
1933 def check_heads(repo, their_heads, context):
1935 """check if the heads of a repo have been modified
1934 """check if the heads of a repo have been modified
1936
1935
1937 Used by peer for unbundling.
1936 Used by peer for unbundling.
1938 """
1937 """
1939 heads = repo.heads()
1938 heads = repo.heads()
1940 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1939 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1941 if not (their_heads == ['force'] or their_heads == heads or
1940 if not (their_heads == ['force'] or their_heads == heads or
1942 their_heads == ['hashed', heads_hash]):
1941 their_heads == ['hashed', heads_hash]):
1943 # someone else committed/pushed/unbundled while we
1942 # someone else committed/pushed/unbundled while we
1944 # were transferring data
1943 # were transferring data
1945 raise error.PushRaced('repository changed while %s - '
1944 raise error.PushRaced('repository changed while %s - '
1946 'please try again' % context)
1945 'please try again' % context)
1947
1946
1948 def unbundle(repo, cg, heads, source, url):
1947 def unbundle(repo, cg, heads, source, url):
1949 """Apply a bundle to a repo.
1948 """Apply a bundle to a repo.
1950
1949
1951 this function makes sure the repo is locked during the application and have
1950 this function makes sure the repo is locked during the application and have
1952 mechanism to check that no push race occurred between the creation of the
1951 mechanism to check that no push race occurred between the creation of the
1953 bundle and its application.
1952 bundle and its application.
1954
1953
1955 If the push was raced as PushRaced exception is raised."""
1954 If the push was raced as PushRaced exception is raised."""
1956 r = 0
1955 r = 0
1957 # need a transaction when processing a bundle2 stream
1956 # need a transaction when processing a bundle2 stream
1958 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1957 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1959 lockandtr = [None, None, None]
1958 lockandtr = [None, None, None]
1960 recordout = None
1959 recordout = None
1961 # quick fix for output mismatch with bundle2 in 3.4
1960 # quick fix for output mismatch with bundle2 in 3.4
1962 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1961 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1963 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1962 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1964 captureoutput = True
1963 captureoutput = True
1965 try:
1964 try:
1966 # note: outside bundle1, 'heads' is expected to be empty and this
1965 # note: outside bundle1, 'heads' is expected to be empty and this
1967 # 'check_heads' call wil be a no-op
1966 # 'check_heads' call wil be a no-op
1968 check_heads(repo, heads, 'uploading changes')
1967 check_heads(repo, heads, 'uploading changes')
1969 # push can proceed
1968 # push can proceed
1970 if not isinstance(cg, bundle2.unbundle20):
1969 if not isinstance(cg, bundle2.unbundle20):
1971 # legacy case: bundle1 (changegroup 01)
1970 # legacy case: bundle1 (changegroup 01)
1972 txnname = "\n".join([source, util.hidepassword(url)])
1971 txnname = "\n".join([source, util.hidepassword(url)])
1973 with repo.lock(), repo.transaction(txnname) as tr:
1972 with repo.lock(), repo.transaction(txnname) as tr:
1974 op = bundle2.applybundle(repo, cg, tr, source, url)
1973 op = bundle2.applybundle(repo, cg, tr, source, url)
1975 r = bundle2.combinechangegroupresults(op)
1974 r = bundle2.combinechangegroupresults(op)
1976 else:
1975 else:
1977 r = None
1976 r = None
1978 try:
1977 try:
1979 def gettransaction():
1978 def gettransaction():
1980 if not lockandtr[2]:
1979 if not lockandtr[2]:
1981 lockandtr[0] = repo.wlock()
1980 lockandtr[0] = repo.wlock()
1982 lockandtr[1] = repo.lock()
1981 lockandtr[1] = repo.lock()
1983 lockandtr[2] = repo.transaction(source)
1982 lockandtr[2] = repo.transaction(source)
1984 lockandtr[2].hookargs['source'] = source
1983 lockandtr[2].hookargs['source'] = source
1985 lockandtr[2].hookargs['url'] = url
1984 lockandtr[2].hookargs['url'] = url
1986 lockandtr[2].hookargs['bundle2'] = '1'
1985 lockandtr[2].hookargs['bundle2'] = '1'
1987 return lockandtr[2]
1986 return lockandtr[2]
1988
1987
1989 # Do greedy locking by default until we're satisfied with lazy
1988 # Do greedy locking by default until we're satisfied with lazy
1990 # locking.
1989 # locking.
1991 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1990 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1992 gettransaction()
1991 gettransaction()
1993
1992
1994 op = bundle2.bundleoperation(repo, gettransaction,
1993 op = bundle2.bundleoperation(repo, gettransaction,
1995 captureoutput=captureoutput)
1994 captureoutput=captureoutput)
1996 try:
1995 try:
1997 op = bundle2.processbundle(repo, cg, op=op)
1996 op = bundle2.processbundle(repo, cg, op=op)
1998 finally:
1997 finally:
1999 r = op.reply
1998 r = op.reply
2000 if captureoutput and r is not None:
1999 if captureoutput and r is not None:
2001 repo.ui.pushbuffer(error=True, subproc=True)
2000 repo.ui.pushbuffer(error=True, subproc=True)
2002 def recordout(output):
2001 def recordout(output):
2003 r.newpart('output', data=output, mandatory=False)
2002 r.newpart('output', data=output, mandatory=False)
2004 if lockandtr[2] is not None:
2003 if lockandtr[2] is not None:
2005 lockandtr[2].close()
2004 lockandtr[2].close()
2006 except BaseException as exc:
2005 except BaseException as exc:
2007 exc.duringunbundle2 = True
2006 exc.duringunbundle2 = True
2008 if captureoutput and r is not None:
2007 if captureoutput and r is not None:
2009 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2008 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2010 def recordout(output):
2009 def recordout(output):
2011 part = bundle2.bundlepart('output', data=output,
2010 part = bundle2.bundlepart('output', data=output,
2012 mandatory=False)
2011 mandatory=False)
2013 parts.append(part)
2012 parts.append(part)
2014 raise
2013 raise
2015 finally:
2014 finally:
2016 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2015 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2017 if recordout is not None:
2016 if recordout is not None:
2018 recordout(repo.ui.popbuffer())
2017 recordout(repo.ui.popbuffer())
2019 return r
2018 return r
2020
2019
2021 def _maybeapplyclonebundle(pullop):
2020 def _maybeapplyclonebundle(pullop):
2022 """Apply a clone bundle from a remote, if possible."""
2021 """Apply a clone bundle from a remote, if possible."""
2023
2022
2024 repo = pullop.repo
2023 repo = pullop.repo
2025 remote = pullop.remote
2024 remote = pullop.remote
2026
2025
2027 if not repo.ui.configbool('ui', 'clonebundles'):
2026 if not repo.ui.configbool('ui', 'clonebundles'):
2028 return
2027 return
2029
2028
2030 # Only run if local repo is empty.
2029 # Only run if local repo is empty.
2031 if len(repo):
2030 if len(repo):
2032 return
2031 return
2033
2032
2034 if pullop.heads:
2033 if pullop.heads:
2035 return
2034 return
2036
2035
2037 if not remote.capable('clonebundles'):
2036 if not remote.capable('clonebundles'):
2038 return
2037 return
2039
2038
2040 res = remote._call('clonebundles')
2039 res = remote._call('clonebundles')
2041
2040
2042 # If we call the wire protocol command, that's good enough to record the
2041 # If we call the wire protocol command, that's good enough to record the
2043 # attempt.
2042 # attempt.
2044 pullop.clonebundleattempted = True
2043 pullop.clonebundleattempted = True
2045
2044
2046 entries = parseclonebundlesmanifest(repo, res)
2045 entries = parseclonebundlesmanifest(repo, res)
2047 if not entries:
2046 if not entries:
2048 repo.ui.note(_('no clone bundles available on remote; '
2047 repo.ui.note(_('no clone bundles available on remote; '
2049 'falling back to regular clone\n'))
2048 'falling back to regular clone\n'))
2050 return
2049 return
2051
2050
2052 entries = filterclonebundleentries(
2051 entries = filterclonebundleentries(
2053 repo, entries, streamclonerequested=pullop.streamclonerequested)
2052 repo, entries, streamclonerequested=pullop.streamclonerequested)
2054
2053
2055 if not entries:
2054 if not entries:
2056 # There is a thundering herd concern here. However, if a server
2055 # There is a thundering herd concern here. However, if a server
2057 # operator doesn't advertise bundles appropriate for its clients,
2056 # operator doesn't advertise bundles appropriate for its clients,
2058 # they deserve what's coming. Furthermore, from a client's
2057 # they deserve what's coming. Furthermore, from a client's
2059 # perspective, no automatic fallback would mean not being able to
2058 # perspective, no automatic fallback would mean not being able to
2060 # clone!
2059 # clone!
2061 repo.ui.warn(_('no compatible clone bundles available on server; '
2060 repo.ui.warn(_('no compatible clone bundles available on server; '
2062 'falling back to regular clone\n'))
2061 'falling back to regular clone\n'))
2063 repo.ui.warn(_('(you may want to report this to the server '
2062 repo.ui.warn(_('(you may want to report this to the server '
2064 'operator)\n'))
2063 'operator)\n'))
2065 return
2064 return
2066
2065
2067 entries = sortclonebundleentries(repo.ui, entries)
2066 entries = sortclonebundleentries(repo.ui, entries)
2068
2067
2069 url = entries[0]['URL']
2068 url = entries[0]['URL']
2070 repo.ui.status(_('applying clone bundle from %s\n') % url)
2069 repo.ui.status(_('applying clone bundle from %s\n') % url)
2071 if trypullbundlefromurl(repo.ui, repo, url):
2070 if trypullbundlefromurl(repo.ui, repo, url):
2072 repo.ui.status(_('finished applying clone bundle\n'))
2071 repo.ui.status(_('finished applying clone bundle\n'))
2073 # Bundle failed.
2072 # Bundle failed.
2074 #
2073 #
2075 # We abort by default to avoid the thundering herd of
2074 # We abort by default to avoid the thundering herd of
2076 # clients flooding a server that was expecting expensive
2075 # clients flooding a server that was expecting expensive
2077 # clone load to be offloaded.
2076 # clone load to be offloaded.
2078 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2077 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2079 repo.ui.warn(_('falling back to normal clone\n'))
2078 repo.ui.warn(_('falling back to normal clone\n'))
2080 else:
2079 else:
2081 raise error.Abort(_('error applying bundle'),
2080 raise error.Abort(_('error applying bundle'),
2082 hint=_('if this error persists, consider contacting '
2081 hint=_('if this error persists, consider contacting '
2083 'the server operator or disable clone '
2082 'the server operator or disable clone '
2084 'bundles via '
2083 'bundles via '
2085 '"--config ui.clonebundles=false"'))
2084 '"--config ui.clonebundles=false"'))
2086
2085
2087 def parseclonebundlesmanifest(repo, s):
2086 def parseclonebundlesmanifest(repo, s):
2088 """Parses the raw text of a clone bundles manifest.
2087 """Parses the raw text of a clone bundles manifest.
2089
2088
2090 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2089 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2091 to the URL and other keys are the attributes for the entry.
2090 to the URL and other keys are the attributes for the entry.
2092 """
2091 """
2093 m = []
2092 m = []
2094 for line in s.splitlines():
2093 for line in s.splitlines():
2095 fields = line.split()
2094 fields = line.split()
2096 if not fields:
2095 if not fields:
2097 continue
2096 continue
2098 attrs = {'URL': fields[0]}
2097 attrs = {'URL': fields[0]}
2099 for rawattr in fields[1:]:
2098 for rawattr in fields[1:]:
2100 key, value = rawattr.split('=', 1)
2099 key, value = rawattr.split('=', 1)
2101 key = urlreq.unquote(key)
2100 key = urlreq.unquote(key)
2102 value = urlreq.unquote(value)
2101 value = urlreq.unquote(value)
2103 attrs[key] = value
2102 attrs[key] = value
2104
2103
2105 # Parse BUNDLESPEC into components. This makes client-side
2104 # Parse BUNDLESPEC into components. This makes client-side
2106 # preferences easier to specify since you can prefer a single
2105 # preferences easier to specify since you can prefer a single
2107 # component of the BUNDLESPEC.
2106 # component of the BUNDLESPEC.
2108 if key == 'BUNDLESPEC':
2107 if key == 'BUNDLESPEC':
2109 try:
2108 try:
2110 comp, version, params = parsebundlespec(repo, value,
2109 comp, version, params = parsebundlespec(repo, value,
2111 externalnames=True)
2110 externalnames=True)
2112 attrs['COMPRESSION'] = comp
2111 attrs['COMPRESSION'] = comp
2113 attrs['VERSION'] = version
2112 attrs['VERSION'] = version
2114 except error.InvalidBundleSpecification:
2113 except error.InvalidBundleSpecification:
2115 pass
2114 pass
2116 except error.UnsupportedBundleSpecification:
2115 except error.UnsupportedBundleSpecification:
2117 pass
2116 pass
2118
2117
2119 m.append(attrs)
2118 m.append(attrs)
2120
2119
2121 return m
2120 return m
2122
2121
2123 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2122 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2124 """Remove incompatible clone bundle manifest entries.
2123 """Remove incompatible clone bundle manifest entries.
2125
2124
2126 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2125 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2127 and returns a new list consisting of only the entries that this client
2126 and returns a new list consisting of only the entries that this client
2128 should be able to apply.
2127 should be able to apply.
2129
2128
2130 There is no guarantee we'll be able to apply all returned entries because
2129 There is no guarantee we'll be able to apply all returned entries because
2131 the metadata we use to filter on may be missing or wrong.
2130 the metadata we use to filter on may be missing or wrong.
2132 """
2131 """
2133 newentries = []
2132 newentries = []
2134 for entry in entries:
2133 for entry in entries:
2135 spec = entry.get('BUNDLESPEC')
2134 spec = entry.get('BUNDLESPEC')
2136 if spec:
2135 if spec:
2137 try:
2136 try:
2138 comp, version, params = parsebundlespec(repo, spec, strict=True)
2137 comp, version, params = parsebundlespec(repo, spec, strict=True)
2139
2138
2140 # If a stream clone was requested, filter out non-streamclone
2139 # If a stream clone was requested, filter out non-streamclone
2141 # entries.
2140 # entries.
2142 if streamclonerequested and (comp != 'UN' or version != 's1'):
2141 if streamclonerequested and (comp != 'UN' or version != 's1'):
2143 repo.ui.debug('filtering %s because not a stream clone\n' %
2142 repo.ui.debug('filtering %s because not a stream clone\n' %
2144 entry['URL'])
2143 entry['URL'])
2145 continue
2144 continue
2146
2145
2147 except error.InvalidBundleSpecification as e:
2146 except error.InvalidBundleSpecification as e:
2148 repo.ui.debug(str(e) + '\n')
2147 repo.ui.debug(str(e) + '\n')
2149 continue
2148 continue
2150 except error.UnsupportedBundleSpecification as e:
2149 except error.UnsupportedBundleSpecification as e:
2151 repo.ui.debug('filtering %s because unsupported bundle '
2150 repo.ui.debug('filtering %s because unsupported bundle '
2152 'spec: %s\n' % (
2151 'spec: %s\n' % (
2153 entry['URL'], util.forcebytestr(e)))
2152 entry['URL'], util.forcebytestr(e)))
2154 continue
2153 continue
2155 # If we don't have a spec and requested a stream clone, we don't know
2154 # If we don't have a spec and requested a stream clone, we don't know
2156 # what the entry is so don't attempt to apply it.
2155 # what the entry is so don't attempt to apply it.
2157 elif streamclonerequested:
2156 elif streamclonerequested:
2158 repo.ui.debug('filtering %s because cannot determine if a stream '
2157 repo.ui.debug('filtering %s because cannot determine if a stream '
2159 'clone bundle\n' % entry['URL'])
2158 'clone bundle\n' % entry['URL'])
2160 continue
2159 continue
2161
2160
2162 if 'REQUIRESNI' in entry and not sslutil.hassni:
2161 if 'REQUIRESNI' in entry and not sslutil.hassni:
2163 repo.ui.debug('filtering %s because SNI not supported\n' %
2162 repo.ui.debug('filtering %s because SNI not supported\n' %
2164 entry['URL'])
2163 entry['URL'])
2165 continue
2164 continue
2166
2165
2167 newentries.append(entry)
2166 newentries.append(entry)
2168
2167
2169 return newentries
2168 return newentries
2170
2169
2171 class clonebundleentry(object):
2170 class clonebundleentry(object):
2172 """Represents an item in a clone bundles manifest.
2171 """Represents an item in a clone bundles manifest.
2173
2172
2174 This rich class is needed to support sorting since sorted() in Python 3
2173 This rich class is needed to support sorting since sorted() in Python 3
2175 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2174 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2176 won't work.
2175 won't work.
2177 """
2176 """
2178
2177
2179 def __init__(self, value, prefers):
2178 def __init__(self, value, prefers):
2180 self.value = value
2179 self.value = value
2181 self.prefers = prefers
2180 self.prefers = prefers
2182
2181
2183 def _cmp(self, other):
2182 def _cmp(self, other):
2184 for prefkey, prefvalue in self.prefers:
2183 for prefkey, prefvalue in self.prefers:
2185 avalue = self.value.get(prefkey)
2184 avalue = self.value.get(prefkey)
2186 bvalue = other.value.get(prefkey)
2185 bvalue = other.value.get(prefkey)
2187
2186
2188 # Special case for b missing attribute and a matches exactly.
2187 # Special case for b missing attribute and a matches exactly.
2189 if avalue is not None and bvalue is None and avalue == prefvalue:
2188 if avalue is not None and bvalue is None and avalue == prefvalue:
2190 return -1
2189 return -1
2191
2190
2192 # Special case for a missing attribute and b matches exactly.
2191 # Special case for a missing attribute and b matches exactly.
2193 if bvalue is not None and avalue is None and bvalue == prefvalue:
2192 if bvalue is not None and avalue is None and bvalue == prefvalue:
2194 return 1
2193 return 1
2195
2194
2196 # We can't compare unless attribute present on both.
2195 # We can't compare unless attribute present on both.
2197 if avalue is None or bvalue is None:
2196 if avalue is None or bvalue is None:
2198 continue
2197 continue
2199
2198
2200 # Same values should fall back to next attribute.
2199 # Same values should fall back to next attribute.
2201 if avalue == bvalue:
2200 if avalue == bvalue:
2202 continue
2201 continue
2203
2202
2204 # Exact matches come first.
2203 # Exact matches come first.
2205 if avalue == prefvalue:
2204 if avalue == prefvalue:
2206 return -1
2205 return -1
2207 if bvalue == prefvalue:
2206 if bvalue == prefvalue:
2208 return 1
2207 return 1
2209
2208
2210 # Fall back to next attribute.
2209 # Fall back to next attribute.
2211 continue
2210 continue
2212
2211
2213 # If we got here we couldn't sort by attributes and prefers. Fall
2212 # If we got here we couldn't sort by attributes and prefers. Fall
2214 # back to index order.
2213 # back to index order.
2215 return 0
2214 return 0
2216
2215
2217 def __lt__(self, other):
2216 def __lt__(self, other):
2218 return self._cmp(other) < 0
2217 return self._cmp(other) < 0
2219
2218
2220 def __gt__(self, other):
2219 def __gt__(self, other):
2221 return self._cmp(other) > 0
2220 return self._cmp(other) > 0
2222
2221
2223 def __eq__(self, other):
2222 def __eq__(self, other):
2224 return self._cmp(other) == 0
2223 return self._cmp(other) == 0
2225
2224
2226 def __le__(self, other):
2225 def __le__(self, other):
2227 return self._cmp(other) <= 0
2226 return self._cmp(other) <= 0
2228
2227
2229 def __ge__(self, other):
2228 def __ge__(self, other):
2230 return self._cmp(other) >= 0
2229 return self._cmp(other) >= 0
2231
2230
2232 def __ne__(self, other):
2231 def __ne__(self, other):
2233 return self._cmp(other) != 0
2232 return self._cmp(other) != 0
2234
2233
2235 def sortclonebundleentries(ui, entries):
2234 def sortclonebundleentries(ui, entries):
2236 prefers = ui.configlist('ui', 'clonebundleprefers')
2235 prefers = ui.configlist('ui', 'clonebundleprefers')
2237 if not prefers:
2236 if not prefers:
2238 return list(entries)
2237 return list(entries)
2239
2238
2240 prefers = [p.split('=', 1) for p in prefers]
2239 prefers = [p.split('=', 1) for p in prefers]
2241
2240
2242 items = sorted(clonebundleentry(v, prefers) for v in entries)
2241 items = sorted(clonebundleentry(v, prefers) for v in entries)
2243 return [i.value for i in items]
2242 return [i.value for i in items]
2244
2243
2245 def trypullbundlefromurl(ui, repo, url):
2244 def trypullbundlefromurl(ui, repo, url):
2246 """Attempt to apply a bundle from a URL."""
2245 """Attempt to apply a bundle from a URL."""
2247 with repo.lock(), repo.transaction('bundleurl') as tr:
2246 with repo.lock(), repo.transaction('bundleurl') as tr:
2248 try:
2247 try:
2249 fh = urlmod.open(ui, url)
2248 fh = urlmod.open(ui, url)
2250 cg = readbundle(ui, fh, 'stream')
2249 cg = readbundle(ui, fh, 'stream')
2251
2250
2252 if isinstance(cg, streamclone.streamcloneapplier):
2251 if isinstance(cg, streamclone.streamcloneapplier):
2253 cg.apply(repo)
2252 cg.apply(repo)
2254 else:
2253 else:
2255 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2254 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2256 return True
2255 return True
2257 except urlerr.httperror as e:
2256 except urlerr.httperror as e:
2258 ui.warn(_('HTTP error fetching bundle: %s\n') %
2257 ui.warn(_('HTTP error fetching bundle: %s\n') %
2259 util.forcebytestr(e))
2258 util.forcebytestr(e))
2260 except urlerr.urlerror as e:
2259 except urlerr.urlerror as e:
2261 ui.warn(_('error fetching bundle: %s\n') %
2260 ui.warn(_('error fetching bundle: %s\n') %
2262 util.forcebytestr(e.reason))
2261 util.forcebytestr(e.reason))
2263
2262
2264 return False
2263 return False
General Comments 0
You need to be logged in to leave comments. Login now