##// END OF EJS Templates
revbranchcache: add the necessary bit to send 'rbc' data over bundle2...
Boris Feld -
r36984:c0e90df1 default
parent child Browse files
Show More
@@ -1,2272 +1,2294 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 bin,
16 bin,
17 hex,
17 hex,
18 nullid,
18 nullid,
19 )
19 )
20 from . import (
20 from . import (
21 bookmarks as bookmod,
21 bookmarks as bookmod,
22 bundle2,
22 bundle2,
23 changegroup,
23 changegroup,
24 discovery,
24 discovery,
25 error,
25 error,
26 lock as lockmod,
26 lock as lockmod,
27 logexchange,
27 logexchange,
28 obsolete,
28 obsolete,
29 phases,
29 phases,
30 pushkey,
30 pushkey,
31 pycompat,
31 pycompat,
32 scmutil,
32 scmutil,
33 sslutil,
33 sslutil,
34 streamclone,
34 streamclone,
35 url as urlmod,
35 url as urlmod,
36 util,
36 util,
37 )
37 )
38
38
39 urlerr = util.urlerr
39 urlerr = util.urlerr
40 urlreq = util.urlreq
40 urlreq = util.urlreq
41
41
42 # Maps bundle version human names to changegroup versions.
42 # Maps bundle version human names to changegroup versions.
43 _bundlespeccgversions = {'v1': '01',
43 _bundlespeccgversions = {'v1': '01',
44 'v2': '02',
44 'v2': '02',
45 'packed1': 's1',
45 'packed1': 's1',
46 'bundle2': '02', #legacy
46 'bundle2': '02', #legacy
47 }
47 }
48
48
49 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
49 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
50 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
50 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
51
51
52 def parsebundlespec(repo, spec, strict=True, externalnames=False):
52 def parsebundlespec(repo, spec, strict=True, externalnames=False):
53 """Parse a bundle string specification into parts.
53 """Parse a bundle string specification into parts.
54
54
55 Bundle specifications denote a well-defined bundle/exchange format.
55 Bundle specifications denote a well-defined bundle/exchange format.
56 The content of a given specification should not change over time in
56 The content of a given specification should not change over time in
57 order to ensure that bundles produced by a newer version of Mercurial are
57 order to ensure that bundles produced by a newer version of Mercurial are
58 readable from an older version.
58 readable from an older version.
59
59
60 The string currently has the form:
60 The string currently has the form:
61
61
62 <compression>-<type>[;<parameter0>[;<parameter1>]]
62 <compression>-<type>[;<parameter0>[;<parameter1>]]
63
63
64 Where <compression> is one of the supported compression formats
64 Where <compression> is one of the supported compression formats
65 and <type> is (currently) a version string. A ";" can follow the type and
65 and <type> is (currently) a version string. A ";" can follow the type and
66 all text afterwards is interpreted as URI encoded, ";" delimited key=value
66 all text afterwards is interpreted as URI encoded, ";" delimited key=value
67 pairs.
67 pairs.
68
68
69 If ``strict`` is True (the default) <compression> is required. Otherwise,
69 If ``strict`` is True (the default) <compression> is required. Otherwise,
70 it is optional.
70 it is optional.
71
71
72 If ``externalnames`` is False (the default), the human-centric names will
72 If ``externalnames`` is False (the default), the human-centric names will
73 be converted to their internal representation.
73 be converted to their internal representation.
74
74
75 Returns a 3-tuple of (compression, version, parameters). Compression will
75 Returns a 3-tuple of (compression, version, parameters). Compression will
76 be ``None`` if not in strict mode and a compression isn't defined.
76 be ``None`` if not in strict mode and a compression isn't defined.
77
77
78 An ``InvalidBundleSpecification`` is raised when the specification is
78 An ``InvalidBundleSpecification`` is raised when the specification is
79 not syntactically well formed.
79 not syntactically well formed.
80
80
81 An ``UnsupportedBundleSpecification`` is raised when the compression or
81 An ``UnsupportedBundleSpecification`` is raised when the compression or
82 bundle type/version is not recognized.
82 bundle type/version is not recognized.
83
83
84 Note: this function will likely eventually return a more complex data
84 Note: this function will likely eventually return a more complex data
85 structure, including bundle2 part information.
85 structure, including bundle2 part information.
86 """
86 """
87 def parseparams(s):
87 def parseparams(s):
88 if ';' not in s:
88 if ';' not in s:
89 return s, {}
89 return s, {}
90
90
91 params = {}
91 params = {}
92 version, paramstr = s.split(';', 1)
92 version, paramstr = s.split(';', 1)
93
93
94 for p in paramstr.split(';'):
94 for p in paramstr.split(';'):
95 if '=' not in p:
95 if '=' not in p:
96 raise error.InvalidBundleSpecification(
96 raise error.InvalidBundleSpecification(
97 _('invalid bundle specification: '
97 _('invalid bundle specification: '
98 'missing "=" in parameter: %s') % p)
98 'missing "=" in parameter: %s') % p)
99
99
100 key, value = p.split('=', 1)
100 key, value = p.split('=', 1)
101 key = urlreq.unquote(key)
101 key = urlreq.unquote(key)
102 value = urlreq.unquote(value)
102 value = urlreq.unquote(value)
103 params[key] = value
103 params[key] = value
104
104
105 return version, params
105 return version, params
106
106
107
107
108 if strict and '-' not in spec:
108 if strict and '-' not in spec:
109 raise error.InvalidBundleSpecification(
109 raise error.InvalidBundleSpecification(
110 _('invalid bundle specification; '
110 _('invalid bundle specification; '
111 'must be prefixed with compression: %s') % spec)
111 'must be prefixed with compression: %s') % spec)
112
112
113 if '-' in spec:
113 if '-' in spec:
114 compression, version = spec.split('-', 1)
114 compression, version = spec.split('-', 1)
115
115
116 if compression not in util.compengines.supportedbundlenames:
116 if compression not in util.compengines.supportedbundlenames:
117 raise error.UnsupportedBundleSpecification(
117 raise error.UnsupportedBundleSpecification(
118 _('%s compression is not supported') % compression)
118 _('%s compression is not supported') % compression)
119
119
120 version, params = parseparams(version)
120 version, params = parseparams(version)
121
121
122 if version not in _bundlespeccgversions:
122 if version not in _bundlespeccgversions:
123 raise error.UnsupportedBundleSpecification(
123 raise error.UnsupportedBundleSpecification(
124 _('%s is not a recognized bundle version') % version)
124 _('%s is not a recognized bundle version') % version)
125 else:
125 else:
126 # Value could be just the compression or just the version, in which
126 # Value could be just the compression or just the version, in which
127 # case some defaults are assumed (but only when not in strict mode).
127 # case some defaults are assumed (but only when not in strict mode).
128 assert not strict
128 assert not strict
129
129
130 spec, params = parseparams(spec)
130 spec, params = parseparams(spec)
131
131
132 if spec in util.compengines.supportedbundlenames:
132 if spec in util.compengines.supportedbundlenames:
133 compression = spec
133 compression = spec
134 version = 'v1'
134 version = 'v1'
135 # Generaldelta repos require v2.
135 # Generaldelta repos require v2.
136 if 'generaldelta' in repo.requirements:
136 if 'generaldelta' in repo.requirements:
137 version = 'v2'
137 version = 'v2'
138 # Modern compression engines require v2.
138 # Modern compression engines require v2.
139 if compression not in _bundlespecv1compengines:
139 if compression not in _bundlespecv1compengines:
140 version = 'v2'
140 version = 'v2'
141 elif spec in _bundlespeccgversions:
141 elif spec in _bundlespeccgversions:
142 if spec == 'packed1':
142 if spec == 'packed1':
143 compression = 'none'
143 compression = 'none'
144 else:
144 else:
145 compression = 'bzip2'
145 compression = 'bzip2'
146 version = spec
146 version = spec
147 else:
147 else:
148 raise error.UnsupportedBundleSpecification(
148 raise error.UnsupportedBundleSpecification(
149 _('%s is not a recognized bundle specification') % spec)
149 _('%s is not a recognized bundle specification') % spec)
150
150
151 # Bundle version 1 only supports a known set of compression engines.
151 # Bundle version 1 only supports a known set of compression engines.
152 if version == 'v1' and compression not in _bundlespecv1compengines:
152 if version == 'v1' and compression not in _bundlespecv1compengines:
153 raise error.UnsupportedBundleSpecification(
153 raise error.UnsupportedBundleSpecification(
154 _('compression engine %s is not supported on v1 bundles') %
154 _('compression engine %s is not supported on v1 bundles') %
155 compression)
155 compression)
156
156
157 # The specification for packed1 can optionally declare the data formats
157 # The specification for packed1 can optionally declare the data formats
158 # required to apply it. If we see this metadata, compare against what the
158 # required to apply it. If we see this metadata, compare against what the
159 # repo supports and error if the bundle isn't compatible.
159 # repo supports and error if the bundle isn't compatible.
160 if version == 'packed1' and 'requirements' in params:
160 if version == 'packed1' and 'requirements' in params:
161 requirements = set(params['requirements'].split(','))
161 requirements = set(params['requirements'].split(','))
162 missingreqs = requirements - repo.supportedformats
162 missingreqs = requirements - repo.supportedformats
163 if missingreqs:
163 if missingreqs:
164 raise error.UnsupportedBundleSpecification(
164 raise error.UnsupportedBundleSpecification(
165 _('missing support for repository features: %s') %
165 _('missing support for repository features: %s') %
166 ', '.join(sorted(missingreqs)))
166 ', '.join(sorted(missingreqs)))
167
167
168 if not externalnames:
168 if not externalnames:
169 engine = util.compengines.forbundlename(compression)
169 engine = util.compengines.forbundlename(compression)
170 compression = engine.bundletype()[1]
170 compression = engine.bundletype()[1]
171 version = _bundlespeccgversions[version]
171 version = _bundlespeccgversions[version]
172 return compression, version, params
172 return compression, version, params
173
173
174 def readbundle(ui, fh, fname, vfs=None):
174 def readbundle(ui, fh, fname, vfs=None):
175 header = changegroup.readexactly(fh, 4)
175 header = changegroup.readexactly(fh, 4)
176
176
177 alg = None
177 alg = None
178 if not fname:
178 if not fname:
179 fname = "stream"
179 fname = "stream"
180 if not header.startswith('HG') and header.startswith('\0'):
180 if not header.startswith('HG') and header.startswith('\0'):
181 fh = changegroup.headerlessfixup(fh, header)
181 fh = changegroup.headerlessfixup(fh, header)
182 header = "HG10"
182 header = "HG10"
183 alg = 'UN'
183 alg = 'UN'
184 elif vfs:
184 elif vfs:
185 fname = vfs.join(fname)
185 fname = vfs.join(fname)
186
186
187 magic, version = header[0:2], header[2:4]
187 magic, version = header[0:2], header[2:4]
188
188
189 if magic != 'HG':
189 if magic != 'HG':
190 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
190 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
191 if version == '10':
191 if version == '10':
192 if alg is None:
192 if alg is None:
193 alg = changegroup.readexactly(fh, 2)
193 alg = changegroup.readexactly(fh, 2)
194 return changegroup.cg1unpacker(fh, alg)
194 return changegroup.cg1unpacker(fh, alg)
195 elif version.startswith('2'):
195 elif version.startswith('2'):
196 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
196 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
197 elif version == 'S1':
197 elif version == 'S1':
198 return streamclone.streamcloneapplier(fh)
198 return streamclone.streamcloneapplier(fh)
199 else:
199 else:
200 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
200 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
201
201
202 def _formatrequirementsspec(requirements):
202 def _formatrequirementsspec(requirements):
203 return urlreq.quote(','.join(sorted(requirements)))
203 return urlreq.quote(','.join(sorted(requirements)))
204
204
205 def _formatrequirementsparams(requirements):
205 def _formatrequirementsparams(requirements):
206 requirements = _formatrequirementsspec(requirements)
206 requirements = _formatrequirementsspec(requirements)
207 params = "%s%s" % (urlreq.quote("requirements="), requirements)
207 params = "%s%s" % (urlreq.quote("requirements="), requirements)
208 return params
208 return params
209
209
210 def getbundlespec(ui, fh):
210 def getbundlespec(ui, fh):
211 """Infer the bundlespec from a bundle file handle.
211 """Infer the bundlespec from a bundle file handle.
212
212
213 The input file handle is seeked and the original seek position is not
213 The input file handle is seeked and the original seek position is not
214 restored.
214 restored.
215 """
215 """
216 def speccompression(alg):
216 def speccompression(alg):
217 try:
217 try:
218 return util.compengines.forbundletype(alg).bundletype()[0]
218 return util.compengines.forbundletype(alg).bundletype()[0]
219 except KeyError:
219 except KeyError:
220 return None
220 return None
221
221
222 b = readbundle(ui, fh, None)
222 b = readbundle(ui, fh, None)
223 if isinstance(b, changegroup.cg1unpacker):
223 if isinstance(b, changegroup.cg1unpacker):
224 alg = b._type
224 alg = b._type
225 if alg == '_truncatedBZ':
225 if alg == '_truncatedBZ':
226 alg = 'BZ'
226 alg = 'BZ'
227 comp = speccompression(alg)
227 comp = speccompression(alg)
228 if not comp:
228 if not comp:
229 raise error.Abort(_('unknown compression algorithm: %s') % alg)
229 raise error.Abort(_('unknown compression algorithm: %s') % alg)
230 return '%s-v1' % comp
230 return '%s-v1' % comp
231 elif isinstance(b, bundle2.unbundle20):
231 elif isinstance(b, bundle2.unbundle20):
232 if 'Compression' in b.params:
232 if 'Compression' in b.params:
233 comp = speccompression(b.params['Compression'])
233 comp = speccompression(b.params['Compression'])
234 if not comp:
234 if not comp:
235 raise error.Abort(_('unknown compression algorithm: %s') % comp)
235 raise error.Abort(_('unknown compression algorithm: %s') % comp)
236 else:
236 else:
237 comp = 'none'
237 comp = 'none'
238
238
239 version = None
239 version = None
240 for part in b.iterparts():
240 for part in b.iterparts():
241 if part.type == 'changegroup':
241 if part.type == 'changegroup':
242 version = part.params['version']
242 version = part.params['version']
243 if version in ('01', '02'):
243 if version in ('01', '02'):
244 version = 'v2'
244 version = 'v2'
245 else:
245 else:
246 raise error.Abort(_('changegroup version %s does not have '
246 raise error.Abort(_('changegroup version %s does not have '
247 'a known bundlespec') % version,
247 'a known bundlespec') % version,
248 hint=_('try upgrading your Mercurial '
248 hint=_('try upgrading your Mercurial '
249 'client'))
249 'client'))
250
250
251 if not version:
251 if not version:
252 raise error.Abort(_('could not identify changegroup version in '
252 raise error.Abort(_('could not identify changegroup version in '
253 'bundle'))
253 'bundle'))
254
254
255 return '%s-%s' % (comp, version)
255 return '%s-%s' % (comp, version)
256 elif isinstance(b, streamclone.streamcloneapplier):
256 elif isinstance(b, streamclone.streamcloneapplier):
257 requirements = streamclone.readbundle1header(fh)[2]
257 requirements = streamclone.readbundle1header(fh)[2]
258 return 'none-packed1;%s' % _formatrequirementsparams(requirements)
258 return 'none-packed1;%s' % _formatrequirementsparams(requirements)
259 else:
259 else:
260 raise error.Abort(_('unknown bundle type: %s') % b)
260 raise error.Abort(_('unknown bundle type: %s') % b)
261
261
262 def _computeoutgoing(repo, heads, common):
262 def _computeoutgoing(repo, heads, common):
263 """Computes which revs are outgoing given a set of common
263 """Computes which revs are outgoing given a set of common
264 and a set of heads.
264 and a set of heads.
265
265
266 This is a separate function so extensions can have access to
266 This is a separate function so extensions can have access to
267 the logic.
267 the logic.
268
268
269 Returns a discovery.outgoing object.
269 Returns a discovery.outgoing object.
270 """
270 """
271 cl = repo.changelog
271 cl = repo.changelog
272 if common:
272 if common:
273 hasnode = cl.hasnode
273 hasnode = cl.hasnode
274 common = [n for n in common if hasnode(n)]
274 common = [n for n in common if hasnode(n)]
275 else:
275 else:
276 common = [nullid]
276 common = [nullid]
277 if not heads:
277 if not heads:
278 heads = cl.heads()
278 heads = cl.heads()
279 return discovery.outgoing(repo, common, heads)
279 return discovery.outgoing(repo, common, heads)
280
280
281 def _forcebundle1(op):
281 def _forcebundle1(op):
282 """return true if a pull/push must use bundle1
282 """return true if a pull/push must use bundle1
283
283
284 This function is used to allow testing of the older bundle version"""
284 This function is used to allow testing of the older bundle version"""
285 ui = op.repo.ui
285 ui = op.repo.ui
286 # The goal is this config is to allow developer to choose the bundle
286 # The goal is this config is to allow developer to choose the bundle
287 # version used during exchanged. This is especially handy during test.
287 # version used during exchanged. This is especially handy during test.
288 # Value is a list of bundle version to be picked from, highest version
288 # Value is a list of bundle version to be picked from, highest version
289 # should be used.
289 # should be used.
290 #
290 #
291 # developer config: devel.legacy.exchange
291 # developer config: devel.legacy.exchange
292 exchange = ui.configlist('devel', 'legacy.exchange')
292 exchange = ui.configlist('devel', 'legacy.exchange')
293 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
293 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
294 return forcebundle1 or not op.remote.capable('bundle2')
294 return forcebundle1 or not op.remote.capable('bundle2')
295
295
296 class pushoperation(object):
296 class pushoperation(object):
297 """A object that represent a single push operation
297 """A object that represent a single push operation
298
298
299 Its purpose is to carry push related state and very common operations.
299 Its purpose is to carry push related state and very common operations.
300
300
301 A new pushoperation should be created at the beginning of each push and
301 A new pushoperation should be created at the beginning of each push and
302 discarded afterward.
302 discarded afterward.
303 """
303 """
304
304
305 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
305 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
306 bookmarks=(), pushvars=None):
306 bookmarks=(), pushvars=None):
307 # repo we push from
307 # repo we push from
308 self.repo = repo
308 self.repo = repo
309 self.ui = repo.ui
309 self.ui = repo.ui
310 # repo we push to
310 # repo we push to
311 self.remote = remote
311 self.remote = remote
312 # force option provided
312 # force option provided
313 self.force = force
313 self.force = force
314 # revs to be pushed (None is "all")
314 # revs to be pushed (None is "all")
315 self.revs = revs
315 self.revs = revs
316 # bookmark explicitly pushed
316 # bookmark explicitly pushed
317 self.bookmarks = bookmarks
317 self.bookmarks = bookmarks
318 # allow push of new branch
318 # allow push of new branch
319 self.newbranch = newbranch
319 self.newbranch = newbranch
320 # step already performed
320 # step already performed
321 # (used to check what steps have been already performed through bundle2)
321 # (used to check what steps have been already performed through bundle2)
322 self.stepsdone = set()
322 self.stepsdone = set()
323 # Integer version of the changegroup push result
323 # Integer version of the changegroup push result
324 # - None means nothing to push
324 # - None means nothing to push
325 # - 0 means HTTP error
325 # - 0 means HTTP error
326 # - 1 means we pushed and remote head count is unchanged *or*
326 # - 1 means we pushed and remote head count is unchanged *or*
327 # we have outgoing changesets but refused to push
327 # we have outgoing changesets but refused to push
328 # - other values as described by addchangegroup()
328 # - other values as described by addchangegroup()
329 self.cgresult = None
329 self.cgresult = None
330 # Boolean value for the bookmark push
330 # Boolean value for the bookmark push
331 self.bkresult = None
331 self.bkresult = None
332 # discover.outgoing object (contains common and outgoing data)
332 # discover.outgoing object (contains common and outgoing data)
333 self.outgoing = None
333 self.outgoing = None
334 # all remote topological heads before the push
334 # all remote topological heads before the push
335 self.remoteheads = None
335 self.remoteheads = None
336 # Details of the remote branch pre and post push
336 # Details of the remote branch pre and post push
337 #
337 #
338 # mapping: {'branch': ([remoteheads],
338 # mapping: {'branch': ([remoteheads],
339 # [newheads],
339 # [newheads],
340 # [unsyncedheads],
340 # [unsyncedheads],
341 # [discardedheads])}
341 # [discardedheads])}
342 # - branch: the branch name
342 # - branch: the branch name
343 # - remoteheads: the list of remote heads known locally
343 # - remoteheads: the list of remote heads known locally
344 # None if the branch is new
344 # None if the branch is new
345 # - newheads: the new remote heads (known locally) with outgoing pushed
345 # - newheads: the new remote heads (known locally) with outgoing pushed
346 # - unsyncedheads: the list of remote heads unknown locally.
346 # - unsyncedheads: the list of remote heads unknown locally.
347 # - discardedheads: the list of remote heads made obsolete by the push
347 # - discardedheads: the list of remote heads made obsolete by the push
348 self.pushbranchmap = None
348 self.pushbranchmap = None
349 # testable as a boolean indicating if any nodes are missing locally.
349 # testable as a boolean indicating if any nodes are missing locally.
350 self.incoming = None
350 self.incoming = None
351 # summary of the remote phase situation
351 # summary of the remote phase situation
352 self.remotephases = None
352 self.remotephases = None
353 # phases changes that must be pushed along side the changesets
353 # phases changes that must be pushed along side the changesets
354 self.outdatedphases = None
354 self.outdatedphases = None
355 # phases changes that must be pushed if changeset push fails
355 # phases changes that must be pushed if changeset push fails
356 self.fallbackoutdatedphases = None
356 self.fallbackoutdatedphases = None
357 # outgoing obsmarkers
357 # outgoing obsmarkers
358 self.outobsmarkers = set()
358 self.outobsmarkers = set()
359 # outgoing bookmarks
359 # outgoing bookmarks
360 self.outbookmarks = []
360 self.outbookmarks = []
361 # transaction manager
361 # transaction manager
362 self.trmanager = None
362 self.trmanager = None
363 # map { pushkey partid -> callback handling failure}
363 # map { pushkey partid -> callback handling failure}
364 # used to handle exception from mandatory pushkey part failure
364 # used to handle exception from mandatory pushkey part failure
365 self.pkfailcb = {}
365 self.pkfailcb = {}
366 # an iterable of pushvars or None
366 # an iterable of pushvars or None
367 self.pushvars = pushvars
367 self.pushvars = pushvars
368
368
369 @util.propertycache
369 @util.propertycache
370 def futureheads(self):
370 def futureheads(self):
371 """future remote heads if the changeset push succeeds"""
371 """future remote heads if the changeset push succeeds"""
372 return self.outgoing.missingheads
372 return self.outgoing.missingheads
373
373
374 @util.propertycache
374 @util.propertycache
375 def fallbackheads(self):
375 def fallbackheads(self):
376 """future remote heads if the changeset push fails"""
376 """future remote heads if the changeset push fails"""
377 if self.revs is None:
377 if self.revs is None:
378 # not target to push, all common are relevant
378 # not target to push, all common are relevant
379 return self.outgoing.commonheads
379 return self.outgoing.commonheads
380 unfi = self.repo.unfiltered()
380 unfi = self.repo.unfiltered()
381 # I want cheads = heads(::missingheads and ::commonheads)
381 # I want cheads = heads(::missingheads and ::commonheads)
382 # (missingheads is revs with secret changeset filtered out)
382 # (missingheads is revs with secret changeset filtered out)
383 #
383 #
384 # This can be expressed as:
384 # This can be expressed as:
385 # cheads = ( (missingheads and ::commonheads)
385 # cheads = ( (missingheads and ::commonheads)
386 # + (commonheads and ::missingheads))"
386 # + (commonheads and ::missingheads))"
387 # )
387 # )
388 #
388 #
389 # while trying to push we already computed the following:
389 # while trying to push we already computed the following:
390 # common = (::commonheads)
390 # common = (::commonheads)
391 # missing = ((commonheads::missingheads) - commonheads)
391 # missing = ((commonheads::missingheads) - commonheads)
392 #
392 #
393 # We can pick:
393 # We can pick:
394 # * missingheads part of common (::commonheads)
394 # * missingheads part of common (::commonheads)
395 common = self.outgoing.common
395 common = self.outgoing.common
396 nm = self.repo.changelog.nodemap
396 nm = self.repo.changelog.nodemap
397 cheads = [node for node in self.revs if nm[node] in common]
397 cheads = [node for node in self.revs if nm[node] in common]
398 # and
398 # and
399 # * commonheads parents on missing
399 # * commonheads parents on missing
400 revset = unfi.set('%ln and parents(roots(%ln))',
400 revset = unfi.set('%ln and parents(roots(%ln))',
401 self.outgoing.commonheads,
401 self.outgoing.commonheads,
402 self.outgoing.missing)
402 self.outgoing.missing)
403 cheads.extend(c.node() for c in revset)
403 cheads.extend(c.node() for c in revset)
404 return cheads
404 return cheads
405
405
406 @property
406 @property
407 def commonheads(self):
407 def commonheads(self):
408 """set of all common heads after changeset bundle push"""
408 """set of all common heads after changeset bundle push"""
409 if self.cgresult:
409 if self.cgresult:
410 return self.futureheads
410 return self.futureheads
411 else:
411 else:
412 return self.fallbackheads
412 return self.fallbackheads
413
413
414 # mapping of message used when pushing bookmark
414 # mapping of message used when pushing bookmark
415 bookmsgmap = {'update': (_("updating bookmark %s\n"),
415 bookmsgmap = {'update': (_("updating bookmark %s\n"),
416 _('updating bookmark %s failed!\n')),
416 _('updating bookmark %s failed!\n')),
417 'export': (_("exporting bookmark %s\n"),
417 'export': (_("exporting bookmark %s\n"),
418 _('exporting bookmark %s failed!\n')),
418 _('exporting bookmark %s failed!\n')),
419 'delete': (_("deleting remote bookmark %s\n"),
419 'delete': (_("deleting remote bookmark %s\n"),
420 _('deleting remote bookmark %s failed!\n')),
420 _('deleting remote bookmark %s failed!\n')),
421 }
421 }
422
422
423
423
424 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
424 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
425 opargs=None):
425 opargs=None):
426 '''Push outgoing changesets (limited by revs) from a local
426 '''Push outgoing changesets (limited by revs) from a local
427 repository to remote. Return an integer:
427 repository to remote. Return an integer:
428 - None means nothing to push
428 - None means nothing to push
429 - 0 means HTTP error
429 - 0 means HTTP error
430 - 1 means we pushed and remote head count is unchanged *or*
430 - 1 means we pushed and remote head count is unchanged *or*
431 we have outgoing changesets but refused to push
431 we have outgoing changesets but refused to push
432 - other values as described by addchangegroup()
432 - other values as described by addchangegroup()
433 '''
433 '''
434 if opargs is None:
434 if opargs is None:
435 opargs = {}
435 opargs = {}
436 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
436 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
437 **pycompat.strkwargs(opargs))
437 **pycompat.strkwargs(opargs))
438 if pushop.remote.local():
438 if pushop.remote.local():
439 missing = (set(pushop.repo.requirements)
439 missing = (set(pushop.repo.requirements)
440 - pushop.remote.local().supported)
440 - pushop.remote.local().supported)
441 if missing:
441 if missing:
442 msg = _("required features are not"
442 msg = _("required features are not"
443 " supported in the destination:"
443 " supported in the destination:"
444 " %s") % (', '.join(sorted(missing)))
444 " %s") % (', '.join(sorted(missing)))
445 raise error.Abort(msg)
445 raise error.Abort(msg)
446
446
447 if not pushop.remote.canpush():
447 if not pushop.remote.canpush():
448 raise error.Abort(_("destination does not support push"))
448 raise error.Abort(_("destination does not support push"))
449
449
450 if not pushop.remote.capable('unbundle'):
450 if not pushop.remote.capable('unbundle'):
451 raise error.Abort(_('cannot push: destination does not support the '
451 raise error.Abort(_('cannot push: destination does not support the '
452 'unbundle wire protocol command'))
452 'unbundle wire protocol command'))
453
453
454 # get lock as we might write phase data
454 # get lock as we might write phase data
455 wlock = lock = None
455 wlock = lock = None
456 try:
456 try:
457 # bundle2 push may receive a reply bundle touching bookmarks or other
457 # bundle2 push may receive a reply bundle touching bookmarks or other
458 # things requiring the wlock. Take it now to ensure proper ordering.
458 # things requiring the wlock. Take it now to ensure proper ordering.
459 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
459 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
460 if (not _forcebundle1(pushop)) and maypushback:
460 if (not _forcebundle1(pushop)) and maypushback:
461 wlock = pushop.repo.wlock()
461 wlock = pushop.repo.wlock()
462 lock = pushop.repo.lock()
462 lock = pushop.repo.lock()
463 pushop.trmanager = transactionmanager(pushop.repo,
463 pushop.trmanager = transactionmanager(pushop.repo,
464 'push-response',
464 'push-response',
465 pushop.remote.url())
465 pushop.remote.url())
466 except IOError as err:
466 except IOError as err:
467 if err.errno != errno.EACCES:
467 if err.errno != errno.EACCES:
468 raise
468 raise
469 # source repo cannot be locked.
469 # source repo cannot be locked.
470 # We do not abort the push, but just disable the local phase
470 # We do not abort the push, but just disable the local phase
471 # synchronisation.
471 # synchronisation.
472 msg = 'cannot lock source repository: %s\n' % err
472 msg = 'cannot lock source repository: %s\n' % err
473 pushop.ui.debug(msg)
473 pushop.ui.debug(msg)
474
474
475 with wlock or util.nullcontextmanager(), \
475 with wlock or util.nullcontextmanager(), \
476 lock or util.nullcontextmanager(), \
476 lock or util.nullcontextmanager(), \
477 pushop.trmanager or util.nullcontextmanager():
477 pushop.trmanager or util.nullcontextmanager():
478 pushop.repo.checkpush(pushop)
478 pushop.repo.checkpush(pushop)
479 _pushdiscovery(pushop)
479 _pushdiscovery(pushop)
480 if not _forcebundle1(pushop):
480 if not _forcebundle1(pushop):
481 _pushbundle2(pushop)
481 _pushbundle2(pushop)
482 _pushchangeset(pushop)
482 _pushchangeset(pushop)
483 _pushsyncphase(pushop)
483 _pushsyncphase(pushop)
484 _pushobsolete(pushop)
484 _pushobsolete(pushop)
485 _pushbookmark(pushop)
485 _pushbookmark(pushop)
486
486
487 return pushop
487 return pushop
488
488
489 # list of steps to perform discovery before push
489 # list of steps to perform discovery before push
490 pushdiscoveryorder = []
490 pushdiscoveryorder = []
491
491
492 # Mapping between step name and function
492 # Mapping between step name and function
493 #
493 #
494 # This exists to help extensions wrap steps if necessary
494 # This exists to help extensions wrap steps if necessary
495 pushdiscoverymapping = {}
495 pushdiscoverymapping = {}
496
496
497 def pushdiscovery(stepname):
497 def pushdiscovery(stepname):
498 """decorator for function performing discovery before push
498 """decorator for function performing discovery before push
499
499
500 The function is added to the step -> function mapping and appended to the
500 The function is added to the step -> function mapping and appended to the
501 list of steps. Beware that decorated function will be added in order (this
501 list of steps. Beware that decorated function will be added in order (this
502 may matter).
502 may matter).
503
503
504 You can only use this decorator for a new step, if you want to wrap a step
504 You can only use this decorator for a new step, if you want to wrap a step
505 from an extension, change the pushdiscovery dictionary directly."""
505 from an extension, change the pushdiscovery dictionary directly."""
506 def dec(func):
506 def dec(func):
507 assert stepname not in pushdiscoverymapping
507 assert stepname not in pushdiscoverymapping
508 pushdiscoverymapping[stepname] = func
508 pushdiscoverymapping[stepname] = func
509 pushdiscoveryorder.append(stepname)
509 pushdiscoveryorder.append(stepname)
510 return func
510 return func
511 return dec
511 return dec
512
512
513 def _pushdiscovery(pushop):
513 def _pushdiscovery(pushop):
514 """Run all discovery steps"""
514 """Run all discovery steps"""
515 for stepname in pushdiscoveryorder:
515 for stepname in pushdiscoveryorder:
516 step = pushdiscoverymapping[stepname]
516 step = pushdiscoverymapping[stepname]
517 step(pushop)
517 step(pushop)
518
518
519 @pushdiscovery('changeset')
519 @pushdiscovery('changeset')
520 def _pushdiscoverychangeset(pushop):
520 def _pushdiscoverychangeset(pushop):
521 """discover the changeset that need to be pushed"""
521 """discover the changeset that need to be pushed"""
522 fci = discovery.findcommonincoming
522 fci = discovery.findcommonincoming
523 if pushop.revs:
523 if pushop.revs:
524 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
524 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
525 ancestorsof=pushop.revs)
525 ancestorsof=pushop.revs)
526 else:
526 else:
527 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
527 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
528 common, inc, remoteheads = commoninc
528 common, inc, remoteheads = commoninc
529 fco = discovery.findcommonoutgoing
529 fco = discovery.findcommonoutgoing
530 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
530 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
531 commoninc=commoninc, force=pushop.force)
531 commoninc=commoninc, force=pushop.force)
532 pushop.outgoing = outgoing
532 pushop.outgoing = outgoing
533 pushop.remoteheads = remoteheads
533 pushop.remoteheads = remoteheads
534 pushop.incoming = inc
534 pushop.incoming = inc
535
535
536 @pushdiscovery('phase')
536 @pushdiscovery('phase')
537 def _pushdiscoveryphase(pushop):
537 def _pushdiscoveryphase(pushop):
538 """discover the phase that needs to be pushed
538 """discover the phase that needs to be pushed
539
539
540 (computed for both success and failure case for changesets push)"""
540 (computed for both success and failure case for changesets push)"""
541 outgoing = pushop.outgoing
541 outgoing = pushop.outgoing
542 unfi = pushop.repo.unfiltered()
542 unfi = pushop.repo.unfiltered()
543 remotephases = pushop.remote.listkeys('phases')
543 remotephases = pushop.remote.listkeys('phases')
544 if (pushop.ui.configbool('ui', '_usedassubrepo')
544 if (pushop.ui.configbool('ui', '_usedassubrepo')
545 and remotephases # server supports phases
545 and remotephases # server supports phases
546 and not pushop.outgoing.missing # no changesets to be pushed
546 and not pushop.outgoing.missing # no changesets to be pushed
547 and remotephases.get('publishing', False)):
547 and remotephases.get('publishing', False)):
548 # When:
548 # When:
549 # - this is a subrepo push
549 # - this is a subrepo push
550 # - and remote support phase
550 # - and remote support phase
551 # - and no changeset are to be pushed
551 # - and no changeset are to be pushed
552 # - and remote is publishing
552 # - and remote is publishing
553 # We may be in issue 3781 case!
553 # We may be in issue 3781 case!
554 # We drop the possible phase synchronisation done by
554 # We drop the possible phase synchronisation done by
555 # courtesy to publish changesets possibly locally draft
555 # courtesy to publish changesets possibly locally draft
556 # on the remote.
556 # on the remote.
557 pushop.outdatedphases = []
557 pushop.outdatedphases = []
558 pushop.fallbackoutdatedphases = []
558 pushop.fallbackoutdatedphases = []
559 return
559 return
560
560
561 pushop.remotephases = phases.remotephasessummary(pushop.repo,
561 pushop.remotephases = phases.remotephasessummary(pushop.repo,
562 pushop.fallbackheads,
562 pushop.fallbackheads,
563 remotephases)
563 remotephases)
564 droots = pushop.remotephases.draftroots
564 droots = pushop.remotephases.draftroots
565
565
566 extracond = ''
566 extracond = ''
567 if not pushop.remotephases.publishing:
567 if not pushop.remotephases.publishing:
568 extracond = ' and public()'
568 extracond = ' and public()'
569 revset = 'heads((%%ln::%%ln) %s)' % extracond
569 revset = 'heads((%%ln::%%ln) %s)' % extracond
570 # Get the list of all revs draft on remote by public here.
570 # Get the list of all revs draft on remote by public here.
571 # XXX Beware that revset break if droots is not strictly
571 # XXX Beware that revset break if droots is not strictly
572 # XXX root we may want to ensure it is but it is costly
572 # XXX root we may want to ensure it is but it is costly
573 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
573 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
574 if not outgoing.missing:
574 if not outgoing.missing:
575 future = fallback
575 future = fallback
576 else:
576 else:
577 # adds changeset we are going to push as draft
577 # adds changeset we are going to push as draft
578 #
578 #
579 # should not be necessary for publishing server, but because of an
579 # should not be necessary for publishing server, but because of an
580 # issue fixed in xxxxx we have to do it anyway.
580 # issue fixed in xxxxx we have to do it anyway.
581 fdroots = list(unfi.set('roots(%ln + %ln::)',
581 fdroots = list(unfi.set('roots(%ln + %ln::)',
582 outgoing.missing, droots))
582 outgoing.missing, droots))
583 fdroots = [f.node() for f in fdroots]
583 fdroots = [f.node() for f in fdroots]
584 future = list(unfi.set(revset, fdroots, pushop.futureheads))
584 future = list(unfi.set(revset, fdroots, pushop.futureheads))
585 pushop.outdatedphases = future
585 pushop.outdatedphases = future
586 pushop.fallbackoutdatedphases = fallback
586 pushop.fallbackoutdatedphases = fallback
587
587
588 @pushdiscovery('obsmarker')
588 @pushdiscovery('obsmarker')
589 def _pushdiscoveryobsmarkers(pushop):
589 def _pushdiscoveryobsmarkers(pushop):
590 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
590 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
591 and pushop.repo.obsstore
591 and pushop.repo.obsstore
592 and 'obsolete' in pushop.remote.listkeys('namespaces')):
592 and 'obsolete' in pushop.remote.listkeys('namespaces')):
593 repo = pushop.repo
593 repo = pushop.repo
594 # very naive computation, that can be quite expensive on big repo.
594 # very naive computation, that can be quite expensive on big repo.
595 # However: evolution is currently slow on them anyway.
595 # However: evolution is currently slow on them anyway.
596 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
596 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
597 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
597 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
598
598
599 @pushdiscovery('bookmarks')
599 @pushdiscovery('bookmarks')
600 def _pushdiscoverybookmarks(pushop):
600 def _pushdiscoverybookmarks(pushop):
601 ui = pushop.ui
601 ui = pushop.ui
602 repo = pushop.repo.unfiltered()
602 repo = pushop.repo.unfiltered()
603 remote = pushop.remote
603 remote = pushop.remote
604 ui.debug("checking for updated bookmarks\n")
604 ui.debug("checking for updated bookmarks\n")
605 ancestors = ()
605 ancestors = ()
606 if pushop.revs:
606 if pushop.revs:
607 revnums = map(repo.changelog.rev, pushop.revs)
607 revnums = map(repo.changelog.rev, pushop.revs)
608 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
608 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
609 remotebookmark = remote.listkeys('bookmarks')
609 remotebookmark = remote.listkeys('bookmarks')
610
610
611 explicit = set([repo._bookmarks.expandname(bookmark)
611 explicit = set([repo._bookmarks.expandname(bookmark)
612 for bookmark in pushop.bookmarks])
612 for bookmark in pushop.bookmarks])
613
613
614 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
614 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
615 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
615 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
616
616
617 def safehex(x):
617 def safehex(x):
618 if x is None:
618 if x is None:
619 return x
619 return x
620 return hex(x)
620 return hex(x)
621
621
622 def hexifycompbookmarks(bookmarks):
622 def hexifycompbookmarks(bookmarks):
623 return [(b, safehex(scid), safehex(dcid))
623 return [(b, safehex(scid), safehex(dcid))
624 for (b, scid, dcid) in bookmarks]
624 for (b, scid, dcid) in bookmarks]
625
625
626 comp = [hexifycompbookmarks(marks) for marks in comp]
626 comp = [hexifycompbookmarks(marks) for marks in comp]
627 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
627 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
628
628
629 def _processcompared(pushop, pushed, explicit, remotebms, comp):
629 def _processcompared(pushop, pushed, explicit, remotebms, comp):
630 """take decision on bookmark to pull from the remote bookmark
630 """take decision on bookmark to pull from the remote bookmark
631
631
632 Exist to help extensions who want to alter this behavior.
632 Exist to help extensions who want to alter this behavior.
633 """
633 """
634 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
634 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
635
635
636 repo = pushop.repo
636 repo = pushop.repo
637
637
638 for b, scid, dcid in advsrc:
638 for b, scid, dcid in advsrc:
639 if b in explicit:
639 if b in explicit:
640 explicit.remove(b)
640 explicit.remove(b)
641 if not pushed or repo[scid].rev() in pushed:
641 if not pushed or repo[scid].rev() in pushed:
642 pushop.outbookmarks.append((b, dcid, scid))
642 pushop.outbookmarks.append((b, dcid, scid))
643 # search added bookmark
643 # search added bookmark
644 for b, scid, dcid in addsrc:
644 for b, scid, dcid in addsrc:
645 if b in explicit:
645 if b in explicit:
646 explicit.remove(b)
646 explicit.remove(b)
647 pushop.outbookmarks.append((b, '', scid))
647 pushop.outbookmarks.append((b, '', scid))
648 # search for overwritten bookmark
648 # search for overwritten bookmark
649 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
649 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
650 if b in explicit:
650 if b in explicit:
651 explicit.remove(b)
651 explicit.remove(b)
652 pushop.outbookmarks.append((b, dcid, scid))
652 pushop.outbookmarks.append((b, dcid, scid))
653 # search for bookmark to delete
653 # search for bookmark to delete
654 for b, scid, dcid in adddst:
654 for b, scid, dcid in adddst:
655 if b in explicit:
655 if b in explicit:
656 explicit.remove(b)
656 explicit.remove(b)
657 # treat as "deleted locally"
657 # treat as "deleted locally"
658 pushop.outbookmarks.append((b, dcid, ''))
658 pushop.outbookmarks.append((b, dcid, ''))
659 # identical bookmarks shouldn't get reported
659 # identical bookmarks shouldn't get reported
660 for b, scid, dcid in same:
660 for b, scid, dcid in same:
661 if b in explicit:
661 if b in explicit:
662 explicit.remove(b)
662 explicit.remove(b)
663
663
664 if explicit:
664 if explicit:
665 explicit = sorted(explicit)
665 explicit = sorted(explicit)
666 # we should probably list all of them
666 # we should probably list all of them
667 pushop.ui.warn(_('bookmark %s does not exist on the local '
667 pushop.ui.warn(_('bookmark %s does not exist on the local '
668 'or remote repository!\n') % explicit[0])
668 'or remote repository!\n') % explicit[0])
669 pushop.bkresult = 2
669 pushop.bkresult = 2
670
670
671 pushop.outbookmarks.sort()
671 pushop.outbookmarks.sort()
672
672
673 def _pushcheckoutgoing(pushop):
673 def _pushcheckoutgoing(pushop):
674 outgoing = pushop.outgoing
674 outgoing = pushop.outgoing
675 unfi = pushop.repo.unfiltered()
675 unfi = pushop.repo.unfiltered()
676 if not outgoing.missing:
676 if not outgoing.missing:
677 # nothing to push
677 # nothing to push
678 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
678 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
679 return False
679 return False
680 # something to push
680 # something to push
681 if not pushop.force:
681 if not pushop.force:
682 # if repo.obsstore == False --> no obsolete
682 # if repo.obsstore == False --> no obsolete
683 # then, save the iteration
683 # then, save the iteration
684 if unfi.obsstore:
684 if unfi.obsstore:
685 # this message are here for 80 char limit reason
685 # this message are here for 80 char limit reason
686 mso = _("push includes obsolete changeset: %s!")
686 mso = _("push includes obsolete changeset: %s!")
687 mspd = _("push includes phase-divergent changeset: %s!")
687 mspd = _("push includes phase-divergent changeset: %s!")
688 mscd = _("push includes content-divergent changeset: %s!")
688 mscd = _("push includes content-divergent changeset: %s!")
689 mst = {"orphan": _("push includes orphan changeset: %s!"),
689 mst = {"orphan": _("push includes orphan changeset: %s!"),
690 "phase-divergent": mspd,
690 "phase-divergent": mspd,
691 "content-divergent": mscd}
691 "content-divergent": mscd}
692 # If we are to push if there is at least one
692 # If we are to push if there is at least one
693 # obsolete or unstable changeset in missing, at
693 # obsolete or unstable changeset in missing, at
694 # least one of the missinghead will be obsolete or
694 # least one of the missinghead will be obsolete or
695 # unstable. So checking heads only is ok
695 # unstable. So checking heads only is ok
696 for node in outgoing.missingheads:
696 for node in outgoing.missingheads:
697 ctx = unfi[node]
697 ctx = unfi[node]
698 if ctx.obsolete():
698 if ctx.obsolete():
699 raise error.Abort(mso % ctx)
699 raise error.Abort(mso % ctx)
700 elif ctx.isunstable():
700 elif ctx.isunstable():
701 # TODO print more than one instability in the abort
701 # TODO print more than one instability in the abort
702 # message
702 # message
703 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
703 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
704
704
705 discovery.checkheads(pushop)
705 discovery.checkheads(pushop)
706 return True
706 return True
707
707
708 # List of names of steps to perform for an outgoing bundle2, order matters.
708 # List of names of steps to perform for an outgoing bundle2, order matters.
709 b2partsgenorder = []
709 b2partsgenorder = []
710
710
711 # Mapping between step name and function
711 # Mapping between step name and function
712 #
712 #
713 # This exists to help extensions wrap steps if necessary
713 # This exists to help extensions wrap steps if necessary
714 b2partsgenmapping = {}
714 b2partsgenmapping = {}
715
715
716 def b2partsgenerator(stepname, idx=None):
716 def b2partsgenerator(stepname, idx=None):
717 """decorator for function generating bundle2 part
717 """decorator for function generating bundle2 part
718
718
719 The function is added to the step -> function mapping and appended to the
719 The function is added to the step -> function mapping and appended to the
720 list of steps. Beware that decorated functions will be added in order
720 list of steps. Beware that decorated functions will be added in order
721 (this may matter).
721 (this may matter).
722
722
723 You can only use this decorator for new steps, if you want to wrap a step
723 You can only use this decorator for new steps, if you want to wrap a step
724 from an extension, attack the b2partsgenmapping dictionary directly."""
724 from an extension, attack the b2partsgenmapping dictionary directly."""
725 def dec(func):
725 def dec(func):
726 assert stepname not in b2partsgenmapping
726 assert stepname not in b2partsgenmapping
727 b2partsgenmapping[stepname] = func
727 b2partsgenmapping[stepname] = func
728 if idx is None:
728 if idx is None:
729 b2partsgenorder.append(stepname)
729 b2partsgenorder.append(stepname)
730 else:
730 else:
731 b2partsgenorder.insert(idx, stepname)
731 b2partsgenorder.insert(idx, stepname)
732 return func
732 return func
733 return dec
733 return dec
734
734
735 def _pushb2ctxcheckheads(pushop, bundler):
735 def _pushb2ctxcheckheads(pushop, bundler):
736 """Generate race condition checking parts
736 """Generate race condition checking parts
737
737
738 Exists as an independent function to aid extensions
738 Exists as an independent function to aid extensions
739 """
739 """
740 # * 'force' do not check for push race,
740 # * 'force' do not check for push race,
741 # * if we don't push anything, there are nothing to check.
741 # * if we don't push anything, there are nothing to check.
742 if not pushop.force and pushop.outgoing.missingheads:
742 if not pushop.force and pushop.outgoing.missingheads:
743 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
743 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
744 emptyremote = pushop.pushbranchmap is None
744 emptyremote = pushop.pushbranchmap is None
745 if not allowunrelated or emptyremote:
745 if not allowunrelated or emptyremote:
746 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
746 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
747 else:
747 else:
748 affected = set()
748 affected = set()
749 for branch, heads in pushop.pushbranchmap.iteritems():
749 for branch, heads in pushop.pushbranchmap.iteritems():
750 remoteheads, newheads, unsyncedheads, discardedheads = heads
750 remoteheads, newheads, unsyncedheads, discardedheads = heads
751 if remoteheads is not None:
751 if remoteheads is not None:
752 remote = set(remoteheads)
752 remote = set(remoteheads)
753 affected |= set(discardedheads) & remote
753 affected |= set(discardedheads) & remote
754 affected |= remote - set(newheads)
754 affected |= remote - set(newheads)
755 if affected:
755 if affected:
756 data = iter(sorted(affected))
756 data = iter(sorted(affected))
757 bundler.newpart('check:updated-heads', data=data)
757 bundler.newpart('check:updated-heads', data=data)
758
758
759 def _pushing(pushop):
759 def _pushing(pushop):
760 """return True if we are pushing anything"""
760 """return True if we are pushing anything"""
761 return bool(pushop.outgoing.missing
761 return bool(pushop.outgoing.missing
762 or pushop.outdatedphases
762 or pushop.outdatedphases
763 or pushop.outobsmarkers
763 or pushop.outobsmarkers
764 or pushop.outbookmarks)
764 or pushop.outbookmarks)
765
765
766 @b2partsgenerator('check-bookmarks')
766 @b2partsgenerator('check-bookmarks')
767 def _pushb2checkbookmarks(pushop, bundler):
767 def _pushb2checkbookmarks(pushop, bundler):
768 """insert bookmark move checking"""
768 """insert bookmark move checking"""
769 if not _pushing(pushop) or pushop.force:
769 if not _pushing(pushop) or pushop.force:
770 return
770 return
771 b2caps = bundle2.bundle2caps(pushop.remote)
771 b2caps = bundle2.bundle2caps(pushop.remote)
772 hasbookmarkcheck = 'bookmarks' in b2caps
772 hasbookmarkcheck = 'bookmarks' in b2caps
773 if not (pushop.outbookmarks and hasbookmarkcheck):
773 if not (pushop.outbookmarks and hasbookmarkcheck):
774 return
774 return
775 data = []
775 data = []
776 for book, old, new in pushop.outbookmarks:
776 for book, old, new in pushop.outbookmarks:
777 old = bin(old)
777 old = bin(old)
778 data.append((book, old))
778 data.append((book, old))
779 checkdata = bookmod.binaryencode(data)
779 checkdata = bookmod.binaryencode(data)
780 bundler.newpart('check:bookmarks', data=checkdata)
780 bundler.newpart('check:bookmarks', data=checkdata)
781
781
782 @b2partsgenerator('check-phases')
782 @b2partsgenerator('check-phases')
783 def _pushb2checkphases(pushop, bundler):
783 def _pushb2checkphases(pushop, bundler):
784 """insert phase move checking"""
784 """insert phase move checking"""
785 if not _pushing(pushop) or pushop.force:
785 if not _pushing(pushop) or pushop.force:
786 return
786 return
787 b2caps = bundle2.bundle2caps(pushop.remote)
787 b2caps = bundle2.bundle2caps(pushop.remote)
788 hasphaseheads = 'heads' in b2caps.get('phases', ())
788 hasphaseheads = 'heads' in b2caps.get('phases', ())
789 if pushop.remotephases is not None and hasphaseheads:
789 if pushop.remotephases is not None and hasphaseheads:
790 # check that the remote phase has not changed
790 # check that the remote phase has not changed
791 checks = [[] for p in phases.allphases]
791 checks = [[] for p in phases.allphases]
792 checks[phases.public].extend(pushop.remotephases.publicheads)
792 checks[phases.public].extend(pushop.remotephases.publicheads)
793 checks[phases.draft].extend(pushop.remotephases.draftroots)
793 checks[phases.draft].extend(pushop.remotephases.draftroots)
794 if any(checks):
794 if any(checks):
795 for nodes in checks:
795 for nodes in checks:
796 nodes.sort()
796 nodes.sort()
797 checkdata = phases.binaryencode(checks)
797 checkdata = phases.binaryencode(checks)
798 bundler.newpart('check:phases', data=checkdata)
798 bundler.newpart('check:phases', data=checkdata)
799
799
800 @b2partsgenerator('changeset')
800 @b2partsgenerator('changeset')
801 def _pushb2ctx(pushop, bundler):
801 def _pushb2ctx(pushop, bundler):
802 """handle changegroup push through bundle2
802 """handle changegroup push through bundle2
803
803
804 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
804 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
805 """
805 """
806 if 'changesets' in pushop.stepsdone:
806 if 'changesets' in pushop.stepsdone:
807 return
807 return
808 pushop.stepsdone.add('changesets')
808 pushop.stepsdone.add('changesets')
809 # Send known heads to the server for race detection.
809 # Send known heads to the server for race detection.
810 if not _pushcheckoutgoing(pushop):
810 if not _pushcheckoutgoing(pushop):
811 return
811 return
812 pushop.repo.prepushoutgoinghooks(pushop)
812 pushop.repo.prepushoutgoinghooks(pushop)
813
813
814 _pushb2ctxcheckheads(pushop, bundler)
814 _pushb2ctxcheckheads(pushop, bundler)
815
815
816 b2caps = bundle2.bundle2caps(pushop.remote)
816 b2caps = bundle2.bundle2caps(pushop.remote)
817 version = '01'
817 version = '01'
818 cgversions = b2caps.get('changegroup')
818 cgversions = b2caps.get('changegroup')
819 if cgversions: # 3.1 and 3.2 ship with an empty value
819 if cgversions: # 3.1 and 3.2 ship with an empty value
820 cgversions = [v for v in cgversions
820 cgversions = [v for v in cgversions
821 if v in changegroup.supportedoutgoingversions(
821 if v in changegroup.supportedoutgoingversions(
822 pushop.repo)]
822 pushop.repo)]
823 if not cgversions:
823 if not cgversions:
824 raise ValueError(_('no common changegroup version'))
824 raise ValueError(_('no common changegroup version'))
825 version = max(cgversions)
825 version = max(cgversions)
826 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
826 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
827 'push')
827 'push')
828 cgpart = bundler.newpart('changegroup', data=cgstream)
828 cgpart = bundler.newpart('changegroup', data=cgstream)
829 if cgversions:
829 if cgversions:
830 cgpart.addparam('version', version)
830 cgpart.addparam('version', version)
831 if 'treemanifest' in pushop.repo.requirements:
831 if 'treemanifest' in pushop.repo.requirements:
832 cgpart.addparam('treemanifest', '1')
832 cgpart.addparam('treemanifest', '1')
833 def handlereply(op):
833 def handlereply(op):
834 """extract addchangegroup returns from server reply"""
834 """extract addchangegroup returns from server reply"""
835 cgreplies = op.records.getreplies(cgpart.id)
835 cgreplies = op.records.getreplies(cgpart.id)
836 assert len(cgreplies['changegroup']) == 1
836 assert len(cgreplies['changegroup']) == 1
837 pushop.cgresult = cgreplies['changegroup'][0]['return']
837 pushop.cgresult = cgreplies['changegroup'][0]['return']
838 return handlereply
838 return handlereply
839
839
840 @b2partsgenerator('phase')
840 @b2partsgenerator('phase')
841 def _pushb2phases(pushop, bundler):
841 def _pushb2phases(pushop, bundler):
842 """handle phase push through bundle2"""
842 """handle phase push through bundle2"""
843 if 'phases' in pushop.stepsdone:
843 if 'phases' in pushop.stepsdone:
844 return
844 return
845 b2caps = bundle2.bundle2caps(pushop.remote)
845 b2caps = bundle2.bundle2caps(pushop.remote)
846 ui = pushop.repo.ui
846 ui = pushop.repo.ui
847
847
848 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
848 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
849 haspushkey = 'pushkey' in b2caps
849 haspushkey = 'pushkey' in b2caps
850 hasphaseheads = 'heads' in b2caps.get('phases', ())
850 hasphaseheads = 'heads' in b2caps.get('phases', ())
851
851
852 if hasphaseheads and not legacyphase:
852 if hasphaseheads and not legacyphase:
853 return _pushb2phaseheads(pushop, bundler)
853 return _pushb2phaseheads(pushop, bundler)
854 elif haspushkey:
854 elif haspushkey:
855 return _pushb2phasespushkey(pushop, bundler)
855 return _pushb2phasespushkey(pushop, bundler)
856
856
857 def _pushb2phaseheads(pushop, bundler):
857 def _pushb2phaseheads(pushop, bundler):
858 """push phase information through a bundle2 - binary part"""
858 """push phase information through a bundle2 - binary part"""
859 pushop.stepsdone.add('phases')
859 pushop.stepsdone.add('phases')
860 if pushop.outdatedphases:
860 if pushop.outdatedphases:
861 updates = [[] for p in phases.allphases]
861 updates = [[] for p in phases.allphases]
862 updates[0].extend(h.node() for h in pushop.outdatedphases)
862 updates[0].extend(h.node() for h in pushop.outdatedphases)
863 phasedata = phases.binaryencode(updates)
863 phasedata = phases.binaryencode(updates)
864 bundler.newpart('phase-heads', data=phasedata)
864 bundler.newpart('phase-heads', data=phasedata)
865
865
866 def _pushb2phasespushkey(pushop, bundler):
866 def _pushb2phasespushkey(pushop, bundler):
867 """push phase information through a bundle2 - pushkey part"""
867 """push phase information through a bundle2 - pushkey part"""
868 pushop.stepsdone.add('phases')
868 pushop.stepsdone.add('phases')
869 part2node = []
869 part2node = []
870
870
871 def handlefailure(pushop, exc):
871 def handlefailure(pushop, exc):
872 targetid = int(exc.partid)
872 targetid = int(exc.partid)
873 for partid, node in part2node:
873 for partid, node in part2node:
874 if partid == targetid:
874 if partid == targetid:
875 raise error.Abort(_('updating %s to public failed') % node)
875 raise error.Abort(_('updating %s to public failed') % node)
876
876
877 enc = pushkey.encode
877 enc = pushkey.encode
878 for newremotehead in pushop.outdatedphases:
878 for newremotehead in pushop.outdatedphases:
879 part = bundler.newpart('pushkey')
879 part = bundler.newpart('pushkey')
880 part.addparam('namespace', enc('phases'))
880 part.addparam('namespace', enc('phases'))
881 part.addparam('key', enc(newremotehead.hex()))
881 part.addparam('key', enc(newremotehead.hex()))
882 part.addparam('old', enc('%d' % phases.draft))
882 part.addparam('old', enc('%d' % phases.draft))
883 part.addparam('new', enc('%d' % phases.public))
883 part.addparam('new', enc('%d' % phases.public))
884 part2node.append((part.id, newremotehead))
884 part2node.append((part.id, newremotehead))
885 pushop.pkfailcb[part.id] = handlefailure
885 pushop.pkfailcb[part.id] = handlefailure
886
886
887 def handlereply(op):
887 def handlereply(op):
888 for partid, node in part2node:
888 for partid, node in part2node:
889 partrep = op.records.getreplies(partid)
889 partrep = op.records.getreplies(partid)
890 results = partrep['pushkey']
890 results = partrep['pushkey']
891 assert len(results) <= 1
891 assert len(results) <= 1
892 msg = None
892 msg = None
893 if not results:
893 if not results:
894 msg = _('server ignored update of %s to public!\n') % node
894 msg = _('server ignored update of %s to public!\n') % node
895 elif not int(results[0]['return']):
895 elif not int(results[0]['return']):
896 msg = _('updating %s to public failed!\n') % node
896 msg = _('updating %s to public failed!\n') % node
897 if msg is not None:
897 if msg is not None:
898 pushop.ui.warn(msg)
898 pushop.ui.warn(msg)
899 return handlereply
899 return handlereply
900
900
901 @b2partsgenerator('obsmarkers')
901 @b2partsgenerator('obsmarkers')
902 def _pushb2obsmarkers(pushop, bundler):
902 def _pushb2obsmarkers(pushop, bundler):
903 if 'obsmarkers' in pushop.stepsdone:
903 if 'obsmarkers' in pushop.stepsdone:
904 return
904 return
905 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
905 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
906 if obsolete.commonversion(remoteversions) is None:
906 if obsolete.commonversion(remoteversions) is None:
907 return
907 return
908 pushop.stepsdone.add('obsmarkers')
908 pushop.stepsdone.add('obsmarkers')
909 if pushop.outobsmarkers:
909 if pushop.outobsmarkers:
910 markers = sorted(pushop.outobsmarkers)
910 markers = sorted(pushop.outobsmarkers)
911 bundle2.buildobsmarkerspart(bundler, markers)
911 bundle2.buildobsmarkerspart(bundler, markers)
912
912
913 @b2partsgenerator('bookmarks')
913 @b2partsgenerator('bookmarks')
914 def _pushb2bookmarks(pushop, bundler):
914 def _pushb2bookmarks(pushop, bundler):
915 """handle bookmark push through bundle2"""
915 """handle bookmark push through bundle2"""
916 if 'bookmarks' in pushop.stepsdone:
916 if 'bookmarks' in pushop.stepsdone:
917 return
917 return
918 b2caps = bundle2.bundle2caps(pushop.remote)
918 b2caps = bundle2.bundle2caps(pushop.remote)
919
919
920 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
920 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
921 legacybooks = 'bookmarks' in legacy
921 legacybooks = 'bookmarks' in legacy
922
922
923 if not legacybooks and 'bookmarks' in b2caps:
923 if not legacybooks and 'bookmarks' in b2caps:
924 return _pushb2bookmarkspart(pushop, bundler)
924 return _pushb2bookmarkspart(pushop, bundler)
925 elif 'pushkey' in b2caps:
925 elif 'pushkey' in b2caps:
926 return _pushb2bookmarkspushkey(pushop, bundler)
926 return _pushb2bookmarkspushkey(pushop, bundler)
927
927
928 def _bmaction(old, new):
928 def _bmaction(old, new):
929 """small utility for bookmark pushing"""
929 """small utility for bookmark pushing"""
930 if not old:
930 if not old:
931 return 'export'
931 return 'export'
932 elif not new:
932 elif not new:
933 return 'delete'
933 return 'delete'
934 return 'update'
934 return 'update'
935
935
936 def _pushb2bookmarkspart(pushop, bundler):
936 def _pushb2bookmarkspart(pushop, bundler):
937 pushop.stepsdone.add('bookmarks')
937 pushop.stepsdone.add('bookmarks')
938 if not pushop.outbookmarks:
938 if not pushop.outbookmarks:
939 return
939 return
940
940
941 allactions = []
941 allactions = []
942 data = []
942 data = []
943 for book, old, new in pushop.outbookmarks:
943 for book, old, new in pushop.outbookmarks:
944 new = bin(new)
944 new = bin(new)
945 data.append((book, new))
945 data.append((book, new))
946 allactions.append((book, _bmaction(old, new)))
946 allactions.append((book, _bmaction(old, new)))
947 checkdata = bookmod.binaryencode(data)
947 checkdata = bookmod.binaryencode(data)
948 bundler.newpart('bookmarks', data=checkdata)
948 bundler.newpart('bookmarks', data=checkdata)
949
949
950 def handlereply(op):
950 def handlereply(op):
951 ui = pushop.ui
951 ui = pushop.ui
952 # if success
952 # if success
953 for book, action in allactions:
953 for book, action in allactions:
954 ui.status(bookmsgmap[action][0] % book)
954 ui.status(bookmsgmap[action][0] % book)
955
955
956 return handlereply
956 return handlereply
957
957
958 def _pushb2bookmarkspushkey(pushop, bundler):
958 def _pushb2bookmarkspushkey(pushop, bundler):
959 pushop.stepsdone.add('bookmarks')
959 pushop.stepsdone.add('bookmarks')
960 part2book = []
960 part2book = []
961 enc = pushkey.encode
961 enc = pushkey.encode
962
962
963 def handlefailure(pushop, exc):
963 def handlefailure(pushop, exc):
964 targetid = int(exc.partid)
964 targetid = int(exc.partid)
965 for partid, book, action in part2book:
965 for partid, book, action in part2book:
966 if partid == targetid:
966 if partid == targetid:
967 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
967 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
968 # we should not be called for part we did not generated
968 # we should not be called for part we did not generated
969 assert False
969 assert False
970
970
971 for book, old, new in pushop.outbookmarks:
971 for book, old, new in pushop.outbookmarks:
972 part = bundler.newpart('pushkey')
972 part = bundler.newpart('pushkey')
973 part.addparam('namespace', enc('bookmarks'))
973 part.addparam('namespace', enc('bookmarks'))
974 part.addparam('key', enc(book))
974 part.addparam('key', enc(book))
975 part.addparam('old', enc(old))
975 part.addparam('old', enc(old))
976 part.addparam('new', enc(new))
976 part.addparam('new', enc(new))
977 action = 'update'
977 action = 'update'
978 if not old:
978 if not old:
979 action = 'export'
979 action = 'export'
980 elif not new:
980 elif not new:
981 action = 'delete'
981 action = 'delete'
982 part2book.append((part.id, book, action))
982 part2book.append((part.id, book, action))
983 pushop.pkfailcb[part.id] = handlefailure
983 pushop.pkfailcb[part.id] = handlefailure
984
984
985 def handlereply(op):
985 def handlereply(op):
986 ui = pushop.ui
986 ui = pushop.ui
987 for partid, book, action in part2book:
987 for partid, book, action in part2book:
988 partrep = op.records.getreplies(partid)
988 partrep = op.records.getreplies(partid)
989 results = partrep['pushkey']
989 results = partrep['pushkey']
990 assert len(results) <= 1
990 assert len(results) <= 1
991 if not results:
991 if not results:
992 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
992 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
993 else:
993 else:
994 ret = int(results[0]['return'])
994 ret = int(results[0]['return'])
995 if ret:
995 if ret:
996 ui.status(bookmsgmap[action][0] % book)
996 ui.status(bookmsgmap[action][0] % book)
997 else:
997 else:
998 ui.warn(bookmsgmap[action][1] % book)
998 ui.warn(bookmsgmap[action][1] % book)
999 if pushop.bkresult is not None:
999 if pushop.bkresult is not None:
1000 pushop.bkresult = 1
1000 pushop.bkresult = 1
1001 return handlereply
1001 return handlereply
1002
1002
1003 @b2partsgenerator('pushvars', idx=0)
1003 @b2partsgenerator('pushvars', idx=0)
1004 def _getbundlesendvars(pushop, bundler):
1004 def _getbundlesendvars(pushop, bundler):
1005 '''send shellvars via bundle2'''
1005 '''send shellvars via bundle2'''
1006 pushvars = pushop.pushvars
1006 pushvars = pushop.pushvars
1007 if pushvars:
1007 if pushvars:
1008 shellvars = {}
1008 shellvars = {}
1009 for raw in pushvars:
1009 for raw in pushvars:
1010 if '=' not in raw:
1010 if '=' not in raw:
1011 msg = ("unable to parse variable '%s', should follow "
1011 msg = ("unable to parse variable '%s', should follow "
1012 "'KEY=VALUE' or 'KEY=' format")
1012 "'KEY=VALUE' or 'KEY=' format")
1013 raise error.Abort(msg % raw)
1013 raise error.Abort(msg % raw)
1014 k, v = raw.split('=', 1)
1014 k, v = raw.split('=', 1)
1015 shellvars[k] = v
1015 shellvars[k] = v
1016
1016
1017 part = bundler.newpart('pushvars')
1017 part = bundler.newpart('pushvars')
1018
1018
1019 for key, value in shellvars.iteritems():
1019 for key, value in shellvars.iteritems():
1020 part.addparam(key, value, mandatory=False)
1020 part.addparam(key, value, mandatory=False)
1021
1021
1022 def _pushbundle2(pushop):
1022 def _pushbundle2(pushop):
1023 """push data to the remote using bundle2
1023 """push data to the remote using bundle2
1024
1024
1025 The only currently supported type of data is changegroup but this will
1025 The only currently supported type of data is changegroup but this will
1026 evolve in the future."""
1026 evolve in the future."""
1027 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1027 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1028 pushback = (pushop.trmanager
1028 pushback = (pushop.trmanager
1029 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1029 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1030
1030
1031 # create reply capability
1031 # create reply capability
1032 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1032 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1033 allowpushback=pushback,
1033 allowpushback=pushback,
1034 role='client'))
1034 role='client'))
1035 bundler.newpart('replycaps', data=capsblob)
1035 bundler.newpart('replycaps', data=capsblob)
1036 replyhandlers = []
1036 replyhandlers = []
1037 for partgenname in b2partsgenorder:
1037 for partgenname in b2partsgenorder:
1038 partgen = b2partsgenmapping[partgenname]
1038 partgen = b2partsgenmapping[partgenname]
1039 ret = partgen(pushop, bundler)
1039 ret = partgen(pushop, bundler)
1040 if callable(ret):
1040 if callable(ret):
1041 replyhandlers.append(ret)
1041 replyhandlers.append(ret)
1042 # do not push if nothing to push
1042 # do not push if nothing to push
1043 if bundler.nbparts <= 1:
1043 if bundler.nbparts <= 1:
1044 return
1044 return
1045 stream = util.chunkbuffer(bundler.getchunks())
1045 stream = util.chunkbuffer(bundler.getchunks())
1046 try:
1046 try:
1047 try:
1047 try:
1048 reply = pushop.remote.unbundle(
1048 reply = pushop.remote.unbundle(
1049 stream, ['force'], pushop.remote.url())
1049 stream, ['force'], pushop.remote.url())
1050 except error.BundleValueError as exc:
1050 except error.BundleValueError as exc:
1051 raise error.Abort(_('missing support for %s') % exc)
1051 raise error.Abort(_('missing support for %s') % exc)
1052 try:
1052 try:
1053 trgetter = None
1053 trgetter = None
1054 if pushback:
1054 if pushback:
1055 trgetter = pushop.trmanager.transaction
1055 trgetter = pushop.trmanager.transaction
1056 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1056 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1057 except error.BundleValueError as exc:
1057 except error.BundleValueError as exc:
1058 raise error.Abort(_('missing support for %s') % exc)
1058 raise error.Abort(_('missing support for %s') % exc)
1059 except bundle2.AbortFromPart as exc:
1059 except bundle2.AbortFromPart as exc:
1060 pushop.ui.status(_('remote: %s\n') % exc)
1060 pushop.ui.status(_('remote: %s\n') % exc)
1061 if exc.hint is not None:
1061 if exc.hint is not None:
1062 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1062 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1063 raise error.Abort(_('push failed on remote'))
1063 raise error.Abort(_('push failed on remote'))
1064 except error.PushkeyFailed as exc:
1064 except error.PushkeyFailed as exc:
1065 partid = int(exc.partid)
1065 partid = int(exc.partid)
1066 if partid not in pushop.pkfailcb:
1066 if partid not in pushop.pkfailcb:
1067 raise
1067 raise
1068 pushop.pkfailcb[partid](pushop, exc)
1068 pushop.pkfailcb[partid](pushop, exc)
1069 for rephand in replyhandlers:
1069 for rephand in replyhandlers:
1070 rephand(op)
1070 rephand(op)
1071
1071
1072 def _pushchangeset(pushop):
1072 def _pushchangeset(pushop):
1073 """Make the actual push of changeset bundle to remote repo"""
1073 """Make the actual push of changeset bundle to remote repo"""
1074 if 'changesets' in pushop.stepsdone:
1074 if 'changesets' in pushop.stepsdone:
1075 return
1075 return
1076 pushop.stepsdone.add('changesets')
1076 pushop.stepsdone.add('changesets')
1077 if not _pushcheckoutgoing(pushop):
1077 if not _pushcheckoutgoing(pushop):
1078 return
1078 return
1079
1079
1080 # Should have verified this in push().
1080 # Should have verified this in push().
1081 assert pushop.remote.capable('unbundle')
1081 assert pushop.remote.capable('unbundle')
1082
1082
1083 pushop.repo.prepushoutgoinghooks(pushop)
1083 pushop.repo.prepushoutgoinghooks(pushop)
1084 outgoing = pushop.outgoing
1084 outgoing = pushop.outgoing
1085 # TODO: get bundlecaps from remote
1085 # TODO: get bundlecaps from remote
1086 bundlecaps = None
1086 bundlecaps = None
1087 # create a changegroup from local
1087 # create a changegroup from local
1088 if pushop.revs is None and not (outgoing.excluded
1088 if pushop.revs is None and not (outgoing.excluded
1089 or pushop.repo.changelog.filteredrevs):
1089 or pushop.repo.changelog.filteredrevs):
1090 # push everything,
1090 # push everything,
1091 # use the fast path, no race possible on push
1091 # use the fast path, no race possible on push
1092 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1092 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1093 fastpath=True, bundlecaps=bundlecaps)
1093 fastpath=True, bundlecaps=bundlecaps)
1094 else:
1094 else:
1095 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1095 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1096 'push', bundlecaps=bundlecaps)
1096 'push', bundlecaps=bundlecaps)
1097
1097
1098 # apply changegroup to remote
1098 # apply changegroup to remote
1099 # local repo finds heads on server, finds out what
1099 # local repo finds heads on server, finds out what
1100 # revs it must push. once revs transferred, if server
1100 # revs it must push. once revs transferred, if server
1101 # finds it has different heads (someone else won
1101 # finds it has different heads (someone else won
1102 # commit/push race), server aborts.
1102 # commit/push race), server aborts.
1103 if pushop.force:
1103 if pushop.force:
1104 remoteheads = ['force']
1104 remoteheads = ['force']
1105 else:
1105 else:
1106 remoteheads = pushop.remoteheads
1106 remoteheads = pushop.remoteheads
1107 # ssh: return remote's addchangegroup()
1107 # ssh: return remote's addchangegroup()
1108 # http: return remote's addchangegroup() or 0 for error
1108 # http: return remote's addchangegroup() or 0 for error
1109 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1109 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1110 pushop.repo.url())
1110 pushop.repo.url())
1111
1111
1112 def _pushsyncphase(pushop):
1112 def _pushsyncphase(pushop):
1113 """synchronise phase information locally and remotely"""
1113 """synchronise phase information locally and remotely"""
1114 cheads = pushop.commonheads
1114 cheads = pushop.commonheads
1115 # even when we don't push, exchanging phase data is useful
1115 # even when we don't push, exchanging phase data is useful
1116 remotephases = pushop.remote.listkeys('phases')
1116 remotephases = pushop.remote.listkeys('phases')
1117 if (pushop.ui.configbool('ui', '_usedassubrepo')
1117 if (pushop.ui.configbool('ui', '_usedassubrepo')
1118 and remotephases # server supports phases
1118 and remotephases # server supports phases
1119 and pushop.cgresult is None # nothing was pushed
1119 and pushop.cgresult is None # nothing was pushed
1120 and remotephases.get('publishing', False)):
1120 and remotephases.get('publishing', False)):
1121 # When:
1121 # When:
1122 # - this is a subrepo push
1122 # - this is a subrepo push
1123 # - and remote support phase
1123 # - and remote support phase
1124 # - and no changeset was pushed
1124 # - and no changeset was pushed
1125 # - and remote is publishing
1125 # - and remote is publishing
1126 # We may be in issue 3871 case!
1126 # We may be in issue 3871 case!
1127 # We drop the possible phase synchronisation done by
1127 # We drop the possible phase synchronisation done by
1128 # courtesy to publish changesets possibly locally draft
1128 # courtesy to publish changesets possibly locally draft
1129 # on the remote.
1129 # on the remote.
1130 remotephases = {'publishing': 'True'}
1130 remotephases = {'publishing': 'True'}
1131 if not remotephases: # old server or public only reply from non-publishing
1131 if not remotephases: # old server or public only reply from non-publishing
1132 _localphasemove(pushop, cheads)
1132 _localphasemove(pushop, cheads)
1133 # don't push any phase data as there is nothing to push
1133 # don't push any phase data as there is nothing to push
1134 else:
1134 else:
1135 ana = phases.analyzeremotephases(pushop.repo, cheads,
1135 ana = phases.analyzeremotephases(pushop.repo, cheads,
1136 remotephases)
1136 remotephases)
1137 pheads, droots = ana
1137 pheads, droots = ana
1138 ### Apply remote phase on local
1138 ### Apply remote phase on local
1139 if remotephases.get('publishing', False):
1139 if remotephases.get('publishing', False):
1140 _localphasemove(pushop, cheads)
1140 _localphasemove(pushop, cheads)
1141 else: # publish = False
1141 else: # publish = False
1142 _localphasemove(pushop, pheads)
1142 _localphasemove(pushop, pheads)
1143 _localphasemove(pushop, cheads, phases.draft)
1143 _localphasemove(pushop, cheads, phases.draft)
1144 ### Apply local phase on remote
1144 ### Apply local phase on remote
1145
1145
1146 if pushop.cgresult:
1146 if pushop.cgresult:
1147 if 'phases' in pushop.stepsdone:
1147 if 'phases' in pushop.stepsdone:
1148 # phases already pushed though bundle2
1148 # phases already pushed though bundle2
1149 return
1149 return
1150 outdated = pushop.outdatedphases
1150 outdated = pushop.outdatedphases
1151 else:
1151 else:
1152 outdated = pushop.fallbackoutdatedphases
1152 outdated = pushop.fallbackoutdatedphases
1153
1153
1154 pushop.stepsdone.add('phases')
1154 pushop.stepsdone.add('phases')
1155
1155
1156 # filter heads already turned public by the push
1156 # filter heads already turned public by the push
1157 outdated = [c for c in outdated if c.node() not in pheads]
1157 outdated = [c for c in outdated if c.node() not in pheads]
1158 # fallback to independent pushkey command
1158 # fallback to independent pushkey command
1159 for newremotehead in outdated:
1159 for newremotehead in outdated:
1160 r = pushop.remote.pushkey('phases',
1160 r = pushop.remote.pushkey('phases',
1161 newremotehead.hex(),
1161 newremotehead.hex(),
1162 ('%d' % phases.draft),
1162 ('%d' % phases.draft),
1163 ('%d' % phases.public))
1163 ('%d' % phases.public))
1164 if not r:
1164 if not r:
1165 pushop.ui.warn(_('updating %s to public failed!\n')
1165 pushop.ui.warn(_('updating %s to public failed!\n')
1166 % newremotehead)
1166 % newremotehead)
1167
1167
1168 def _localphasemove(pushop, nodes, phase=phases.public):
1168 def _localphasemove(pushop, nodes, phase=phases.public):
1169 """move <nodes> to <phase> in the local source repo"""
1169 """move <nodes> to <phase> in the local source repo"""
1170 if pushop.trmanager:
1170 if pushop.trmanager:
1171 phases.advanceboundary(pushop.repo,
1171 phases.advanceboundary(pushop.repo,
1172 pushop.trmanager.transaction(),
1172 pushop.trmanager.transaction(),
1173 phase,
1173 phase,
1174 nodes)
1174 nodes)
1175 else:
1175 else:
1176 # repo is not locked, do not change any phases!
1176 # repo is not locked, do not change any phases!
1177 # Informs the user that phases should have been moved when
1177 # Informs the user that phases should have been moved when
1178 # applicable.
1178 # applicable.
1179 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1179 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1180 phasestr = phases.phasenames[phase]
1180 phasestr = phases.phasenames[phase]
1181 if actualmoves:
1181 if actualmoves:
1182 pushop.ui.status(_('cannot lock source repo, skipping '
1182 pushop.ui.status(_('cannot lock source repo, skipping '
1183 'local %s phase update\n') % phasestr)
1183 'local %s phase update\n') % phasestr)
1184
1184
1185 def _pushobsolete(pushop):
1185 def _pushobsolete(pushop):
1186 """utility function to push obsolete markers to a remote"""
1186 """utility function to push obsolete markers to a remote"""
1187 if 'obsmarkers' in pushop.stepsdone:
1187 if 'obsmarkers' in pushop.stepsdone:
1188 return
1188 return
1189 repo = pushop.repo
1189 repo = pushop.repo
1190 remote = pushop.remote
1190 remote = pushop.remote
1191 pushop.stepsdone.add('obsmarkers')
1191 pushop.stepsdone.add('obsmarkers')
1192 if pushop.outobsmarkers:
1192 if pushop.outobsmarkers:
1193 pushop.ui.debug('try to push obsolete markers to remote\n')
1193 pushop.ui.debug('try to push obsolete markers to remote\n')
1194 rslts = []
1194 rslts = []
1195 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1195 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1196 for key in sorted(remotedata, reverse=True):
1196 for key in sorted(remotedata, reverse=True):
1197 # reverse sort to ensure we end with dump0
1197 # reverse sort to ensure we end with dump0
1198 data = remotedata[key]
1198 data = remotedata[key]
1199 rslts.append(remote.pushkey('obsolete', key, '', data))
1199 rslts.append(remote.pushkey('obsolete', key, '', data))
1200 if [r for r in rslts if not r]:
1200 if [r for r in rslts if not r]:
1201 msg = _('failed to push some obsolete markers!\n')
1201 msg = _('failed to push some obsolete markers!\n')
1202 repo.ui.warn(msg)
1202 repo.ui.warn(msg)
1203
1203
1204 def _pushbookmark(pushop):
1204 def _pushbookmark(pushop):
1205 """Update bookmark position on remote"""
1205 """Update bookmark position on remote"""
1206 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1206 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1207 return
1207 return
1208 pushop.stepsdone.add('bookmarks')
1208 pushop.stepsdone.add('bookmarks')
1209 ui = pushop.ui
1209 ui = pushop.ui
1210 remote = pushop.remote
1210 remote = pushop.remote
1211
1211
1212 for b, old, new in pushop.outbookmarks:
1212 for b, old, new in pushop.outbookmarks:
1213 action = 'update'
1213 action = 'update'
1214 if not old:
1214 if not old:
1215 action = 'export'
1215 action = 'export'
1216 elif not new:
1216 elif not new:
1217 action = 'delete'
1217 action = 'delete'
1218 if remote.pushkey('bookmarks', b, old, new):
1218 if remote.pushkey('bookmarks', b, old, new):
1219 ui.status(bookmsgmap[action][0] % b)
1219 ui.status(bookmsgmap[action][0] % b)
1220 else:
1220 else:
1221 ui.warn(bookmsgmap[action][1] % b)
1221 ui.warn(bookmsgmap[action][1] % b)
1222 # discovery can have set the value form invalid entry
1222 # discovery can have set the value form invalid entry
1223 if pushop.bkresult is not None:
1223 if pushop.bkresult is not None:
1224 pushop.bkresult = 1
1224 pushop.bkresult = 1
1225
1225
1226 class pulloperation(object):
1226 class pulloperation(object):
1227 """A object that represent a single pull operation
1227 """A object that represent a single pull operation
1228
1228
1229 It purpose is to carry pull related state and very common operation.
1229 It purpose is to carry pull related state and very common operation.
1230
1230
1231 A new should be created at the beginning of each pull and discarded
1231 A new should be created at the beginning of each pull and discarded
1232 afterward.
1232 afterward.
1233 """
1233 """
1234
1234
1235 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1235 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1236 remotebookmarks=None, streamclonerequested=None):
1236 remotebookmarks=None, streamclonerequested=None):
1237 # repo we pull into
1237 # repo we pull into
1238 self.repo = repo
1238 self.repo = repo
1239 # repo we pull from
1239 # repo we pull from
1240 self.remote = remote
1240 self.remote = remote
1241 # revision we try to pull (None is "all")
1241 # revision we try to pull (None is "all")
1242 self.heads = heads
1242 self.heads = heads
1243 # bookmark pulled explicitly
1243 # bookmark pulled explicitly
1244 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1244 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1245 for bookmark in bookmarks]
1245 for bookmark in bookmarks]
1246 # do we force pull?
1246 # do we force pull?
1247 self.force = force
1247 self.force = force
1248 # whether a streaming clone was requested
1248 # whether a streaming clone was requested
1249 self.streamclonerequested = streamclonerequested
1249 self.streamclonerequested = streamclonerequested
1250 # transaction manager
1250 # transaction manager
1251 self.trmanager = None
1251 self.trmanager = None
1252 # set of common changeset between local and remote before pull
1252 # set of common changeset between local and remote before pull
1253 self.common = None
1253 self.common = None
1254 # set of pulled head
1254 # set of pulled head
1255 self.rheads = None
1255 self.rheads = None
1256 # list of missing changeset to fetch remotely
1256 # list of missing changeset to fetch remotely
1257 self.fetch = None
1257 self.fetch = None
1258 # remote bookmarks data
1258 # remote bookmarks data
1259 self.remotebookmarks = remotebookmarks
1259 self.remotebookmarks = remotebookmarks
1260 # result of changegroup pulling (used as return code by pull)
1260 # result of changegroup pulling (used as return code by pull)
1261 self.cgresult = None
1261 self.cgresult = None
1262 # list of step already done
1262 # list of step already done
1263 self.stepsdone = set()
1263 self.stepsdone = set()
1264 # Whether we attempted a clone from pre-generated bundles.
1264 # Whether we attempted a clone from pre-generated bundles.
1265 self.clonebundleattempted = False
1265 self.clonebundleattempted = False
1266
1266
1267 @util.propertycache
1267 @util.propertycache
1268 def pulledsubset(self):
1268 def pulledsubset(self):
1269 """heads of the set of changeset target by the pull"""
1269 """heads of the set of changeset target by the pull"""
1270 # compute target subset
1270 # compute target subset
1271 if self.heads is None:
1271 if self.heads is None:
1272 # We pulled every thing possible
1272 # We pulled every thing possible
1273 # sync on everything common
1273 # sync on everything common
1274 c = set(self.common)
1274 c = set(self.common)
1275 ret = list(self.common)
1275 ret = list(self.common)
1276 for n in self.rheads:
1276 for n in self.rheads:
1277 if n not in c:
1277 if n not in c:
1278 ret.append(n)
1278 ret.append(n)
1279 return ret
1279 return ret
1280 else:
1280 else:
1281 # We pulled a specific subset
1281 # We pulled a specific subset
1282 # sync on this subset
1282 # sync on this subset
1283 return self.heads
1283 return self.heads
1284
1284
1285 @util.propertycache
1285 @util.propertycache
1286 def canusebundle2(self):
1286 def canusebundle2(self):
1287 return not _forcebundle1(self)
1287 return not _forcebundle1(self)
1288
1288
1289 @util.propertycache
1289 @util.propertycache
1290 def remotebundle2caps(self):
1290 def remotebundle2caps(self):
1291 return bundle2.bundle2caps(self.remote)
1291 return bundle2.bundle2caps(self.remote)
1292
1292
1293 def gettransaction(self):
1293 def gettransaction(self):
1294 # deprecated; talk to trmanager directly
1294 # deprecated; talk to trmanager directly
1295 return self.trmanager.transaction()
1295 return self.trmanager.transaction()
1296
1296
1297 class transactionmanager(util.transactional):
1297 class transactionmanager(util.transactional):
1298 """An object to manage the life cycle of a transaction
1298 """An object to manage the life cycle of a transaction
1299
1299
1300 It creates the transaction on demand and calls the appropriate hooks when
1300 It creates the transaction on demand and calls the appropriate hooks when
1301 closing the transaction."""
1301 closing the transaction."""
1302 def __init__(self, repo, source, url):
1302 def __init__(self, repo, source, url):
1303 self.repo = repo
1303 self.repo = repo
1304 self.source = source
1304 self.source = source
1305 self.url = url
1305 self.url = url
1306 self._tr = None
1306 self._tr = None
1307
1307
1308 def transaction(self):
1308 def transaction(self):
1309 """Return an open transaction object, constructing if necessary"""
1309 """Return an open transaction object, constructing if necessary"""
1310 if not self._tr:
1310 if not self._tr:
1311 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1311 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1312 self._tr = self.repo.transaction(trname)
1312 self._tr = self.repo.transaction(trname)
1313 self._tr.hookargs['source'] = self.source
1313 self._tr.hookargs['source'] = self.source
1314 self._tr.hookargs['url'] = self.url
1314 self._tr.hookargs['url'] = self.url
1315 return self._tr
1315 return self._tr
1316
1316
1317 def close(self):
1317 def close(self):
1318 """close transaction if created"""
1318 """close transaction if created"""
1319 if self._tr is not None:
1319 if self._tr is not None:
1320 self._tr.close()
1320 self._tr.close()
1321
1321
1322 def release(self):
1322 def release(self):
1323 """release transaction if created"""
1323 """release transaction if created"""
1324 if self._tr is not None:
1324 if self._tr is not None:
1325 self._tr.release()
1325 self._tr.release()
1326
1326
1327 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1327 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1328 streamclonerequested=None):
1328 streamclonerequested=None):
1329 """Fetch repository data from a remote.
1329 """Fetch repository data from a remote.
1330
1330
1331 This is the main function used to retrieve data from a remote repository.
1331 This is the main function used to retrieve data from a remote repository.
1332
1332
1333 ``repo`` is the local repository to clone into.
1333 ``repo`` is the local repository to clone into.
1334 ``remote`` is a peer instance.
1334 ``remote`` is a peer instance.
1335 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1335 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1336 default) means to pull everything from the remote.
1336 default) means to pull everything from the remote.
1337 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1337 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1338 default, all remote bookmarks are pulled.
1338 default, all remote bookmarks are pulled.
1339 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1339 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1340 initialization.
1340 initialization.
1341 ``streamclonerequested`` is a boolean indicating whether a "streaming
1341 ``streamclonerequested`` is a boolean indicating whether a "streaming
1342 clone" is requested. A "streaming clone" is essentially a raw file copy
1342 clone" is requested. A "streaming clone" is essentially a raw file copy
1343 of revlogs from the server. This only works when the local repository is
1343 of revlogs from the server. This only works when the local repository is
1344 empty. The default value of ``None`` means to respect the server
1344 empty. The default value of ``None`` means to respect the server
1345 configuration for preferring stream clones.
1345 configuration for preferring stream clones.
1346
1346
1347 Returns the ``pulloperation`` created for this pull.
1347 Returns the ``pulloperation`` created for this pull.
1348 """
1348 """
1349 if opargs is None:
1349 if opargs is None:
1350 opargs = {}
1350 opargs = {}
1351 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1351 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1352 streamclonerequested=streamclonerequested,
1352 streamclonerequested=streamclonerequested,
1353 **pycompat.strkwargs(opargs))
1353 **pycompat.strkwargs(opargs))
1354
1354
1355 peerlocal = pullop.remote.local()
1355 peerlocal = pullop.remote.local()
1356 if peerlocal:
1356 if peerlocal:
1357 missing = set(peerlocal.requirements) - pullop.repo.supported
1357 missing = set(peerlocal.requirements) - pullop.repo.supported
1358 if missing:
1358 if missing:
1359 msg = _("required features are not"
1359 msg = _("required features are not"
1360 " supported in the destination:"
1360 " supported in the destination:"
1361 " %s") % (', '.join(sorted(missing)))
1361 " %s") % (', '.join(sorted(missing)))
1362 raise error.Abort(msg)
1362 raise error.Abort(msg)
1363
1363
1364 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1364 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1365 with repo.wlock(), repo.lock(), pullop.trmanager:
1365 with repo.wlock(), repo.lock(), pullop.trmanager:
1366 # This should ideally be in _pullbundle2(). However, it needs to run
1366 # This should ideally be in _pullbundle2(). However, it needs to run
1367 # before discovery to avoid extra work.
1367 # before discovery to avoid extra work.
1368 _maybeapplyclonebundle(pullop)
1368 _maybeapplyclonebundle(pullop)
1369 streamclone.maybeperformlegacystreamclone(pullop)
1369 streamclone.maybeperformlegacystreamclone(pullop)
1370 _pulldiscovery(pullop)
1370 _pulldiscovery(pullop)
1371 if pullop.canusebundle2:
1371 if pullop.canusebundle2:
1372 _pullbundle2(pullop)
1372 _pullbundle2(pullop)
1373 _pullchangeset(pullop)
1373 _pullchangeset(pullop)
1374 _pullphase(pullop)
1374 _pullphase(pullop)
1375 _pullbookmarks(pullop)
1375 _pullbookmarks(pullop)
1376 _pullobsolete(pullop)
1376 _pullobsolete(pullop)
1377
1377
1378 # storing remotenames
1378 # storing remotenames
1379 if repo.ui.configbool('experimental', 'remotenames'):
1379 if repo.ui.configbool('experimental', 'remotenames'):
1380 logexchange.pullremotenames(repo, remote)
1380 logexchange.pullremotenames(repo, remote)
1381
1381
1382 return pullop
1382 return pullop
1383
1383
1384 # list of steps to perform discovery before pull
1384 # list of steps to perform discovery before pull
1385 pulldiscoveryorder = []
1385 pulldiscoveryorder = []
1386
1386
1387 # Mapping between step name and function
1387 # Mapping between step name and function
1388 #
1388 #
1389 # This exists to help extensions wrap steps if necessary
1389 # This exists to help extensions wrap steps if necessary
1390 pulldiscoverymapping = {}
1390 pulldiscoverymapping = {}
1391
1391
1392 def pulldiscovery(stepname):
1392 def pulldiscovery(stepname):
1393 """decorator for function performing discovery before pull
1393 """decorator for function performing discovery before pull
1394
1394
1395 The function is added to the step -> function mapping and appended to the
1395 The function is added to the step -> function mapping and appended to the
1396 list of steps. Beware that decorated function will be added in order (this
1396 list of steps. Beware that decorated function will be added in order (this
1397 may matter).
1397 may matter).
1398
1398
1399 You can only use this decorator for a new step, if you want to wrap a step
1399 You can only use this decorator for a new step, if you want to wrap a step
1400 from an extension, change the pulldiscovery dictionary directly."""
1400 from an extension, change the pulldiscovery dictionary directly."""
1401 def dec(func):
1401 def dec(func):
1402 assert stepname not in pulldiscoverymapping
1402 assert stepname not in pulldiscoverymapping
1403 pulldiscoverymapping[stepname] = func
1403 pulldiscoverymapping[stepname] = func
1404 pulldiscoveryorder.append(stepname)
1404 pulldiscoveryorder.append(stepname)
1405 return func
1405 return func
1406 return dec
1406 return dec
1407
1407
1408 def _pulldiscovery(pullop):
1408 def _pulldiscovery(pullop):
1409 """Run all discovery steps"""
1409 """Run all discovery steps"""
1410 for stepname in pulldiscoveryorder:
1410 for stepname in pulldiscoveryorder:
1411 step = pulldiscoverymapping[stepname]
1411 step = pulldiscoverymapping[stepname]
1412 step(pullop)
1412 step(pullop)
1413
1413
1414 @pulldiscovery('b1:bookmarks')
1414 @pulldiscovery('b1:bookmarks')
1415 def _pullbookmarkbundle1(pullop):
1415 def _pullbookmarkbundle1(pullop):
1416 """fetch bookmark data in bundle1 case
1416 """fetch bookmark data in bundle1 case
1417
1417
1418 If not using bundle2, we have to fetch bookmarks before changeset
1418 If not using bundle2, we have to fetch bookmarks before changeset
1419 discovery to reduce the chance and impact of race conditions."""
1419 discovery to reduce the chance and impact of race conditions."""
1420 if pullop.remotebookmarks is not None:
1420 if pullop.remotebookmarks is not None:
1421 return
1421 return
1422 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1422 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1423 # all known bundle2 servers now support listkeys, but lets be nice with
1423 # all known bundle2 servers now support listkeys, but lets be nice with
1424 # new implementation.
1424 # new implementation.
1425 return
1425 return
1426 books = pullop.remote.listkeys('bookmarks')
1426 books = pullop.remote.listkeys('bookmarks')
1427 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1427 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1428
1428
1429
1429
1430 @pulldiscovery('changegroup')
1430 @pulldiscovery('changegroup')
1431 def _pulldiscoverychangegroup(pullop):
1431 def _pulldiscoverychangegroup(pullop):
1432 """discovery phase for the pull
1432 """discovery phase for the pull
1433
1433
1434 Current handle changeset discovery only, will change handle all discovery
1434 Current handle changeset discovery only, will change handle all discovery
1435 at some point."""
1435 at some point."""
1436 tmp = discovery.findcommonincoming(pullop.repo,
1436 tmp = discovery.findcommonincoming(pullop.repo,
1437 pullop.remote,
1437 pullop.remote,
1438 heads=pullop.heads,
1438 heads=pullop.heads,
1439 force=pullop.force)
1439 force=pullop.force)
1440 common, fetch, rheads = tmp
1440 common, fetch, rheads = tmp
1441 nm = pullop.repo.unfiltered().changelog.nodemap
1441 nm = pullop.repo.unfiltered().changelog.nodemap
1442 if fetch and rheads:
1442 if fetch and rheads:
1443 # If a remote heads is filtered locally, put in back in common.
1443 # If a remote heads is filtered locally, put in back in common.
1444 #
1444 #
1445 # This is a hackish solution to catch most of "common but locally
1445 # This is a hackish solution to catch most of "common but locally
1446 # hidden situation". We do not performs discovery on unfiltered
1446 # hidden situation". We do not performs discovery on unfiltered
1447 # repository because it end up doing a pathological amount of round
1447 # repository because it end up doing a pathological amount of round
1448 # trip for w huge amount of changeset we do not care about.
1448 # trip for w huge amount of changeset we do not care about.
1449 #
1449 #
1450 # If a set of such "common but filtered" changeset exist on the server
1450 # If a set of such "common but filtered" changeset exist on the server
1451 # but are not including a remote heads, we'll not be able to detect it,
1451 # but are not including a remote heads, we'll not be able to detect it,
1452 scommon = set(common)
1452 scommon = set(common)
1453 for n in rheads:
1453 for n in rheads:
1454 if n in nm:
1454 if n in nm:
1455 if n not in scommon:
1455 if n not in scommon:
1456 common.append(n)
1456 common.append(n)
1457 if set(rheads).issubset(set(common)):
1457 if set(rheads).issubset(set(common)):
1458 fetch = []
1458 fetch = []
1459 pullop.common = common
1459 pullop.common = common
1460 pullop.fetch = fetch
1460 pullop.fetch = fetch
1461 pullop.rheads = rheads
1461 pullop.rheads = rheads
1462
1462
1463 def _pullbundle2(pullop):
1463 def _pullbundle2(pullop):
1464 """pull data using bundle2
1464 """pull data using bundle2
1465
1465
1466 For now, the only supported data are changegroup."""
1466 For now, the only supported data are changegroup."""
1467 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1467 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1468
1468
1469 # make ui easier to access
1469 # make ui easier to access
1470 ui = pullop.repo.ui
1470 ui = pullop.repo.ui
1471
1471
1472 # At the moment we don't do stream clones over bundle2. If that is
1472 # At the moment we don't do stream clones over bundle2. If that is
1473 # implemented then here's where the check for that will go.
1473 # implemented then here's where the check for that will go.
1474 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1474 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1475
1475
1476 # declare pull perimeters
1476 # declare pull perimeters
1477 kwargs['common'] = pullop.common
1477 kwargs['common'] = pullop.common
1478 kwargs['heads'] = pullop.heads or pullop.rheads
1478 kwargs['heads'] = pullop.heads or pullop.rheads
1479
1479
1480 if streaming:
1480 if streaming:
1481 kwargs['cg'] = False
1481 kwargs['cg'] = False
1482 kwargs['stream'] = True
1482 kwargs['stream'] = True
1483 pullop.stepsdone.add('changegroup')
1483 pullop.stepsdone.add('changegroup')
1484 pullop.stepsdone.add('phases')
1484 pullop.stepsdone.add('phases')
1485
1485
1486 else:
1486 else:
1487 # pulling changegroup
1487 # pulling changegroup
1488 pullop.stepsdone.add('changegroup')
1488 pullop.stepsdone.add('changegroup')
1489
1489
1490 kwargs['cg'] = pullop.fetch
1490 kwargs['cg'] = pullop.fetch
1491
1491
1492 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1492 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1493 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1493 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1494 if (not legacyphase and hasbinaryphase):
1494 if (not legacyphase and hasbinaryphase):
1495 kwargs['phases'] = True
1495 kwargs['phases'] = True
1496 pullop.stepsdone.add('phases')
1496 pullop.stepsdone.add('phases')
1497
1497
1498 if 'listkeys' in pullop.remotebundle2caps:
1498 if 'listkeys' in pullop.remotebundle2caps:
1499 if 'phases' not in pullop.stepsdone:
1499 if 'phases' not in pullop.stepsdone:
1500 kwargs['listkeys'] = ['phases']
1500 kwargs['listkeys'] = ['phases']
1501
1501
1502 bookmarksrequested = False
1502 bookmarksrequested = False
1503 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1503 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1504 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1504 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1505
1505
1506 if pullop.remotebookmarks is not None:
1506 if pullop.remotebookmarks is not None:
1507 pullop.stepsdone.add('request-bookmarks')
1507 pullop.stepsdone.add('request-bookmarks')
1508
1508
1509 if ('request-bookmarks' not in pullop.stepsdone
1509 if ('request-bookmarks' not in pullop.stepsdone
1510 and pullop.remotebookmarks is None
1510 and pullop.remotebookmarks is None
1511 and not legacybookmark and hasbinarybook):
1511 and not legacybookmark and hasbinarybook):
1512 kwargs['bookmarks'] = True
1512 kwargs['bookmarks'] = True
1513 bookmarksrequested = True
1513 bookmarksrequested = True
1514
1514
1515 if 'listkeys' in pullop.remotebundle2caps:
1515 if 'listkeys' in pullop.remotebundle2caps:
1516 if 'request-bookmarks' not in pullop.stepsdone:
1516 if 'request-bookmarks' not in pullop.stepsdone:
1517 # make sure to always includes bookmark data when migrating
1517 # make sure to always includes bookmark data when migrating
1518 # `hg incoming --bundle` to using this function.
1518 # `hg incoming --bundle` to using this function.
1519 pullop.stepsdone.add('request-bookmarks')
1519 pullop.stepsdone.add('request-bookmarks')
1520 kwargs.setdefault('listkeys', []).append('bookmarks')
1520 kwargs.setdefault('listkeys', []).append('bookmarks')
1521
1521
1522 # If this is a full pull / clone and the server supports the clone bundles
1522 # If this is a full pull / clone and the server supports the clone bundles
1523 # feature, tell the server whether we attempted a clone bundle. The
1523 # feature, tell the server whether we attempted a clone bundle. The
1524 # presence of this flag indicates the client supports clone bundles. This
1524 # presence of this flag indicates the client supports clone bundles. This
1525 # will enable the server to treat clients that support clone bundles
1525 # will enable the server to treat clients that support clone bundles
1526 # differently from those that don't.
1526 # differently from those that don't.
1527 if (pullop.remote.capable('clonebundles')
1527 if (pullop.remote.capable('clonebundles')
1528 and pullop.heads is None and list(pullop.common) == [nullid]):
1528 and pullop.heads is None and list(pullop.common) == [nullid]):
1529 kwargs['cbattempted'] = pullop.clonebundleattempted
1529 kwargs['cbattempted'] = pullop.clonebundleattempted
1530
1530
1531 if streaming:
1531 if streaming:
1532 pullop.repo.ui.status(_('streaming all changes\n'))
1532 pullop.repo.ui.status(_('streaming all changes\n'))
1533 elif not pullop.fetch:
1533 elif not pullop.fetch:
1534 pullop.repo.ui.status(_("no changes found\n"))
1534 pullop.repo.ui.status(_("no changes found\n"))
1535 pullop.cgresult = 0
1535 pullop.cgresult = 0
1536 else:
1536 else:
1537 if pullop.heads is None and list(pullop.common) == [nullid]:
1537 if pullop.heads is None and list(pullop.common) == [nullid]:
1538 pullop.repo.ui.status(_("requesting all changes\n"))
1538 pullop.repo.ui.status(_("requesting all changes\n"))
1539 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1539 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1540 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1540 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1541 if obsolete.commonversion(remoteversions) is not None:
1541 if obsolete.commonversion(remoteversions) is not None:
1542 kwargs['obsmarkers'] = True
1542 kwargs['obsmarkers'] = True
1543 pullop.stepsdone.add('obsmarkers')
1543 pullop.stepsdone.add('obsmarkers')
1544 _pullbundle2extraprepare(pullop, kwargs)
1544 _pullbundle2extraprepare(pullop, kwargs)
1545 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1545 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1546 try:
1546 try:
1547 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1547 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1548 op.modes['bookmarks'] = 'records'
1548 op.modes['bookmarks'] = 'records'
1549 bundle2.processbundle(pullop.repo, bundle, op=op)
1549 bundle2.processbundle(pullop.repo, bundle, op=op)
1550 except bundle2.AbortFromPart as exc:
1550 except bundle2.AbortFromPart as exc:
1551 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1551 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1552 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1552 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1553 except error.BundleValueError as exc:
1553 except error.BundleValueError as exc:
1554 raise error.Abort(_('missing support for %s') % exc)
1554 raise error.Abort(_('missing support for %s') % exc)
1555
1555
1556 if pullop.fetch:
1556 if pullop.fetch:
1557 pullop.cgresult = bundle2.combinechangegroupresults(op)
1557 pullop.cgresult = bundle2.combinechangegroupresults(op)
1558
1558
1559 # processing phases change
1559 # processing phases change
1560 for namespace, value in op.records['listkeys']:
1560 for namespace, value in op.records['listkeys']:
1561 if namespace == 'phases':
1561 if namespace == 'phases':
1562 _pullapplyphases(pullop, value)
1562 _pullapplyphases(pullop, value)
1563
1563
1564 # processing bookmark update
1564 # processing bookmark update
1565 if bookmarksrequested:
1565 if bookmarksrequested:
1566 books = {}
1566 books = {}
1567 for record in op.records['bookmarks']:
1567 for record in op.records['bookmarks']:
1568 books[record['bookmark']] = record["node"]
1568 books[record['bookmark']] = record["node"]
1569 pullop.remotebookmarks = books
1569 pullop.remotebookmarks = books
1570 else:
1570 else:
1571 for namespace, value in op.records['listkeys']:
1571 for namespace, value in op.records['listkeys']:
1572 if namespace == 'bookmarks':
1572 if namespace == 'bookmarks':
1573 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1573 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1574
1574
1575 # bookmark data were either already there or pulled in the bundle
1575 # bookmark data were either already there or pulled in the bundle
1576 if pullop.remotebookmarks is not None:
1576 if pullop.remotebookmarks is not None:
1577 _pullbookmarks(pullop)
1577 _pullbookmarks(pullop)
1578
1578
1579 def _pullbundle2extraprepare(pullop, kwargs):
1579 def _pullbundle2extraprepare(pullop, kwargs):
1580 """hook function so that extensions can extend the getbundle call"""
1580 """hook function so that extensions can extend the getbundle call"""
1581
1581
1582 def _pullchangeset(pullop):
1582 def _pullchangeset(pullop):
1583 """pull changeset from unbundle into the local repo"""
1583 """pull changeset from unbundle into the local repo"""
1584 # We delay the open of the transaction as late as possible so we
1584 # We delay the open of the transaction as late as possible so we
1585 # don't open transaction for nothing or you break future useful
1585 # don't open transaction for nothing or you break future useful
1586 # rollback call
1586 # rollback call
1587 if 'changegroup' in pullop.stepsdone:
1587 if 'changegroup' in pullop.stepsdone:
1588 return
1588 return
1589 pullop.stepsdone.add('changegroup')
1589 pullop.stepsdone.add('changegroup')
1590 if not pullop.fetch:
1590 if not pullop.fetch:
1591 pullop.repo.ui.status(_("no changes found\n"))
1591 pullop.repo.ui.status(_("no changes found\n"))
1592 pullop.cgresult = 0
1592 pullop.cgresult = 0
1593 return
1593 return
1594 tr = pullop.gettransaction()
1594 tr = pullop.gettransaction()
1595 if pullop.heads is None and list(pullop.common) == [nullid]:
1595 if pullop.heads is None and list(pullop.common) == [nullid]:
1596 pullop.repo.ui.status(_("requesting all changes\n"))
1596 pullop.repo.ui.status(_("requesting all changes\n"))
1597 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1597 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1598 # issue1320, avoid a race if remote changed after discovery
1598 # issue1320, avoid a race if remote changed after discovery
1599 pullop.heads = pullop.rheads
1599 pullop.heads = pullop.rheads
1600
1600
1601 if pullop.remote.capable('getbundle'):
1601 if pullop.remote.capable('getbundle'):
1602 # TODO: get bundlecaps from remote
1602 # TODO: get bundlecaps from remote
1603 cg = pullop.remote.getbundle('pull', common=pullop.common,
1603 cg = pullop.remote.getbundle('pull', common=pullop.common,
1604 heads=pullop.heads or pullop.rheads)
1604 heads=pullop.heads or pullop.rheads)
1605 elif pullop.heads is None:
1605 elif pullop.heads is None:
1606 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1606 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1607 elif not pullop.remote.capable('changegroupsubset'):
1607 elif not pullop.remote.capable('changegroupsubset'):
1608 raise error.Abort(_("partial pull cannot be done because "
1608 raise error.Abort(_("partial pull cannot be done because "
1609 "other repository doesn't support "
1609 "other repository doesn't support "
1610 "changegroupsubset."))
1610 "changegroupsubset."))
1611 else:
1611 else:
1612 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1612 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1613 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1613 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1614 pullop.remote.url())
1614 pullop.remote.url())
1615 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1615 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1616
1616
1617 def _pullphase(pullop):
1617 def _pullphase(pullop):
1618 # Get remote phases data from remote
1618 # Get remote phases data from remote
1619 if 'phases' in pullop.stepsdone:
1619 if 'phases' in pullop.stepsdone:
1620 return
1620 return
1621 remotephases = pullop.remote.listkeys('phases')
1621 remotephases = pullop.remote.listkeys('phases')
1622 _pullapplyphases(pullop, remotephases)
1622 _pullapplyphases(pullop, remotephases)
1623
1623
1624 def _pullapplyphases(pullop, remotephases):
1624 def _pullapplyphases(pullop, remotephases):
1625 """apply phase movement from observed remote state"""
1625 """apply phase movement from observed remote state"""
1626 if 'phases' in pullop.stepsdone:
1626 if 'phases' in pullop.stepsdone:
1627 return
1627 return
1628 pullop.stepsdone.add('phases')
1628 pullop.stepsdone.add('phases')
1629 publishing = bool(remotephases.get('publishing', False))
1629 publishing = bool(remotephases.get('publishing', False))
1630 if remotephases and not publishing:
1630 if remotephases and not publishing:
1631 # remote is new and non-publishing
1631 # remote is new and non-publishing
1632 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1632 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1633 pullop.pulledsubset,
1633 pullop.pulledsubset,
1634 remotephases)
1634 remotephases)
1635 dheads = pullop.pulledsubset
1635 dheads = pullop.pulledsubset
1636 else:
1636 else:
1637 # Remote is old or publishing all common changesets
1637 # Remote is old or publishing all common changesets
1638 # should be seen as public
1638 # should be seen as public
1639 pheads = pullop.pulledsubset
1639 pheads = pullop.pulledsubset
1640 dheads = []
1640 dheads = []
1641 unfi = pullop.repo.unfiltered()
1641 unfi = pullop.repo.unfiltered()
1642 phase = unfi._phasecache.phase
1642 phase = unfi._phasecache.phase
1643 rev = unfi.changelog.nodemap.get
1643 rev = unfi.changelog.nodemap.get
1644 public = phases.public
1644 public = phases.public
1645 draft = phases.draft
1645 draft = phases.draft
1646
1646
1647 # exclude changesets already public locally and update the others
1647 # exclude changesets already public locally and update the others
1648 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1648 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1649 if pheads:
1649 if pheads:
1650 tr = pullop.gettransaction()
1650 tr = pullop.gettransaction()
1651 phases.advanceboundary(pullop.repo, tr, public, pheads)
1651 phases.advanceboundary(pullop.repo, tr, public, pheads)
1652
1652
1653 # exclude changesets already draft locally and update the others
1653 # exclude changesets already draft locally and update the others
1654 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1654 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1655 if dheads:
1655 if dheads:
1656 tr = pullop.gettransaction()
1656 tr = pullop.gettransaction()
1657 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1657 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1658
1658
1659 def _pullbookmarks(pullop):
1659 def _pullbookmarks(pullop):
1660 """process the remote bookmark information to update the local one"""
1660 """process the remote bookmark information to update the local one"""
1661 if 'bookmarks' in pullop.stepsdone:
1661 if 'bookmarks' in pullop.stepsdone:
1662 return
1662 return
1663 pullop.stepsdone.add('bookmarks')
1663 pullop.stepsdone.add('bookmarks')
1664 repo = pullop.repo
1664 repo = pullop.repo
1665 remotebookmarks = pullop.remotebookmarks
1665 remotebookmarks = pullop.remotebookmarks
1666 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1666 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1667 pullop.remote.url(),
1667 pullop.remote.url(),
1668 pullop.gettransaction,
1668 pullop.gettransaction,
1669 explicit=pullop.explicitbookmarks)
1669 explicit=pullop.explicitbookmarks)
1670
1670
1671 def _pullobsolete(pullop):
1671 def _pullobsolete(pullop):
1672 """utility function to pull obsolete markers from a remote
1672 """utility function to pull obsolete markers from a remote
1673
1673
1674 The `gettransaction` is function that return the pull transaction, creating
1674 The `gettransaction` is function that return the pull transaction, creating
1675 one if necessary. We return the transaction to inform the calling code that
1675 one if necessary. We return the transaction to inform the calling code that
1676 a new transaction have been created (when applicable).
1676 a new transaction have been created (when applicable).
1677
1677
1678 Exists mostly to allow overriding for experimentation purpose"""
1678 Exists mostly to allow overriding for experimentation purpose"""
1679 if 'obsmarkers' in pullop.stepsdone:
1679 if 'obsmarkers' in pullop.stepsdone:
1680 return
1680 return
1681 pullop.stepsdone.add('obsmarkers')
1681 pullop.stepsdone.add('obsmarkers')
1682 tr = None
1682 tr = None
1683 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1683 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1684 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1684 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1685 remoteobs = pullop.remote.listkeys('obsolete')
1685 remoteobs = pullop.remote.listkeys('obsolete')
1686 if 'dump0' in remoteobs:
1686 if 'dump0' in remoteobs:
1687 tr = pullop.gettransaction()
1687 tr = pullop.gettransaction()
1688 markers = []
1688 markers = []
1689 for key in sorted(remoteobs, reverse=True):
1689 for key in sorted(remoteobs, reverse=True):
1690 if key.startswith('dump'):
1690 if key.startswith('dump'):
1691 data = util.b85decode(remoteobs[key])
1691 data = util.b85decode(remoteobs[key])
1692 version, newmarks = obsolete._readmarkers(data)
1692 version, newmarks = obsolete._readmarkers(data)
1693 markers += newmarks
1693 markers += newmarks
1694 if markers:
1694 if markers:
1695 pullop.repo.obsstore.add(tr, markers)
1695 pullop.repo.obsstore.add(tr, markers)
1696 pullop.repo.invalidatevolatilesets()
1696 pullop.repo.invalidatevolatilesets()
1697 return tr
1697 return tr
1698
1698
1699 def caps20to10(repo, role):
1699 def caps20to10(repo, role):
1700 """return a set with appropriate options to use bundle20 during getbundle"""
1700 """return a set with appropriate options to use bundle20 during getbundle"""
1701 caps = {'HG20'}
1701 caps = {'HG20'}
1702 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1702 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1703 caps.add('bundle2=' + urlreq.quote(capsblob))
1703 caps.add('bundle2=' + urlreq.quote(capsblob))
1704 return caps
1704 return caps
1705
1705
1706 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1706 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1707 getbundle2partsorder = []
1707 getbundle2partsorder = []
1708
1708
1709 # Mapping between step name and function
1709 # Mapping between step name and function
1710 #
1710 #
1711 # This exists to help extensions wrap steps if necessary
1711 # This exists to help extensions wrap steps if necessary
1712 getbundle2partsmapping = {}
1712 getbundle2partsmapping = {}
1713
1713
1714 def getbundle2partsgenerator(stepname, idx=None):
1714 def getbundle2partsgenerator(stepname, idx=None):
1715 """decorator for function generating bundle2 part for getbundle
1715 """decorator for function generating bundle2 part for getbundle
1716
1716
1717 The function is added to the step -> function mapping and appended to the
1717 The function is added to the step -> function mapping and appended to the
1718 list of steps. Beware that decorated functions will be added in order
1718 list of steps. Beware that decorated functions will be added in order
1719 (this may matter).
1719 (this may matter).
1720
1720
1721 You can only use this decorator for new steps, if you want to wrap a step
1721 You can only use this decorator for new steps, if you want to wrap a step
1722 from an extension, attack the getbundle2partsmapping dictionary directly."""
1722 from an extension, attack the getbundle2partsmapping dictionary directly."""
1723 def dec(func):
1723 def dec(func):
1724 assert stepname not in getbundle2partsmapping
1724 assert stepname not in getbundle2partsmapping
1725 getbundle2partsmapping[stepname] = func
1725 getbundle2partsmapping[stepname] = func
1726 if idx is None:
1726 if idx is None:
1727 getbundle2partsorder.append(stepname)
1727 getbundle2partsorder.append(stepname)
1728 else:
1728 else:
1729 getbundle2partsorder.insert(idx, stepname)
1729 getbundle2partsorder.insert(idx, stepname)
1730 return func
1730 return func
1731 return dec
1731 return dec
1732
1732
1733 def bundle2requested(bundlecaps):
1733 def bundle2requested(bundlecaps):
1734 if bundlecaps is not None:
1734 if bundlecaps is not None:
1735 return any(cap.startswith('HG2') for cap in bundlecaps)
1735 return any(cap.startswith('HG2') for cap in bundlecaps)
1736 return False
1736 return False
1737
1737
1738 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1738 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1739 **kwargs):
1739 **kwargs):
1740 """Return chunks constituting a bundle's raw data.
1740 """Return chunks constituting a bundle's raw data.
1741
1741
1742 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1742 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1743 passed.
1743 passed.
1744
1744
1745 Returns a 2-tuple of a dict with metadata about the generated bundle
1745 Returns a 2-tuple of a dict with metadata about the generated bundle
1746 and an iterator over raw chunks (of varying sizes).
1746 and an iterator over raw chunks (of varying sizes).
1747 """
1747 """
1748 kwargs = pycompat.byteskwargs(kwargs)
1748 kwargs = pycompat.byteskwargs(kwargs)
1749 info = {}
1749 info = {}
1750 usebundle2 = bundle2requested(bundlecaps)
1750 usebundle2 = bundle2requested(bundlecaps)
1751 # bundle10 case
1751 # bundle10 case
1752 if not usebundle2:
1752 if not usebundle2:
1753 if bundlecaps and not kwargs.get('cg', True):
1753 if bundlecaps and not kwargs.get('cg', True):
1754 raise ValueError(_('request for bundle10 must include changegroup'))
1754 raise ValueError(_('request for bundle10 must include changegroup'))
1755
1755
1756 if kwargs:
1756 if kwargs:
1757 raise ValueError(_('unsupported getbundle arguments: %s')
1757 raise ValueError(_('unsupported getbundle arguments: %s')
1758 % ', '.join(sorted(kwargs.keys())))
1758 % ', '.join(sorted(kwargs.keys())))
1759 outgoing = _computeoutgoing(repo, heads, common)
1759 outgoing = _computeoutgoing(repo, heads, common)
1760 info['bundleversion'] = 1
1760 info['bundleversion'] = 1
1761 return info, changegroup.makestream(repo, outgoing, '01', source,
1761 return info, changegroup.makestream(repo, outgoing, '01', source,
1762 bundlecaps=bundlecaps)
1762 bundlecaps=bundlecaps)
1763
1763
1764 # bundle20 case
1764 # bundle20 case
1765 info['bundleversion'] = 2
1765 info['bundleversion'] = 2
1766 b2caps = {}
1766 b2caps = {}
1767 for bcaps in bundlecaps:
1767 for bcaps in bundlecaps:
1768 if bcaps.startswith('bundle2='):
1768 if bcaps.startswith('bundle2='):
1769 blob = urlreq.unquote(bcaps[len('bundle2='):])
1769 blob = urlreq.unquote(bcaps[len('bundle2='):])
1770 b2caps.update(bundle2.decodecaps(blob))
1770 b2caps.update(bundle2.decodecaps(blob))
1771 bundler = bundle2.bundle20(repo.ui, b2caps)
1771 bundler = bundle2.bundle20(repo.ui, b2caps)
1772
1772
1773 kwargs['heads'] = heads
1773 kwargs['heads'] = heads
1774 kwargs['common'] = common
1774 kwargs['common'] = common
1775
1775
1776 for name in getbundle2partsorder:
1776 for name in getbundle2partsorder:
1777 func = getbundle2partsmapping[name]
1777 func = getbundle2partsmapping[name]
1778 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1778 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1779 **pycompat.strkwargs(kwargs))
1779 **pycompat.strkwargs(kwargs))
1780
1780
1781 info['prefercompressed'] = bundler.prefercompressed
1781 info['prefercompressed'] = bundler.prefercompressed
1782
1782
1783 return info, bundler.getchunks()
1783 return info, bundler.getchunks()
1784
1784
1785 @getbundle2partsgenerator('stream2')
1785 @getbundle2partsgenerator('stream2')
1786 def _getbundlestream2(bundler, repo, source, bundlecaps=None,
1786 def _getbundlestream2(bundler, repo, source, bundlecaps=None,
1787 b2caps=None, heads=None, common=None, **kwargs):
1787 b2caps=None, heads=None, common=None, **kwargs):
1788 if not kwargs.get('stream', False):
1788 if not kwargs.get('stream', False):
1789 return
1789 return
1790
1790
1791 if not streamclone.allowservergeneration(repo):
1791 if not streamclone.allowservergeneration(repo):
1792 raise error.Abort(_('stream data requested but server does not allow '
1792 raise error.Abort(_('stream data requested but server does not allow '
1793 'this feature'),
1793 'this feature'),
1794 hint=_('well-behaved clients should not be '
1794 hint=_('well-behaved clients should not be '
1795 'requesting stream data from servers not '
1795 'requesting stream data from servers not '
1796 'advertising it; the client may be buggy'))
1796 'advertising it; the client may be buggy'))
1797
1797
1798 # Stream clones don't compress well. And compression undermines a
1798 # Stream clones don't compress well. And compression undermines a
1799 # goal of stream clones, which is to be fast. Communicate the desire
1799 # goal of stream clones, which is to be fast. Communicate the desire
1800 # to avoid compression to consumers of the bundle.
1800 # to avoid compression to consumers of the bundle.
1801 bundler.prefercompressed = False
1801 bundler.prefercompressed = False
1802
1802
1803 filecount, bytecount, it = streamclone.generatev2(repo)
1803 filecount, bytecount, it = streamclone.generatev2(repo)
1804 requirements = _formatrequirementsspec(repo.requirements)
1804 requirements = _formatrequirementsspec(repo.requirements)
1805 part = bundler.newpart('stream2', data=it)
1805 part = bundler.newpart('stream2', data=it)
1806 part.addparam('bytecount', '%d' % bytecount, mandatory=True)
1806 part.addparam('bytecount', '%d' % bytecount, mandatory=True)
1807 part.addparam('filecount', '%d' % filecount, mandatory=True)
1807 part.addparam('filecount', '%d' % filecount, mandatory=True)
1808 part.addparam('requirements', requirements, mandatory=True)
1808 part.addparam('requirements', requirements, mandatory=True)
1809
1809
1810 @getbundle2partsgenerator('changegroup')
1810 @getbundle2partsgenerator('changegroup')
1811 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1811 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1812 b2caps=None, heads=None, common=None, **kwargs):
1812 b2caps=None, heads=None, common=None, **kwargs):
1813 """add a changegroup part to the requested bundle"""
1813 """add a changegroup part to the requested bundle"""
1814 cgstream = None
1814 cgstream = None
1815 if kwargs.get(r'cg', True):
1815 if kwargs.get(r'cg', True):
1816 # build changegroup bundle here.
1816 # build changegroup bundle here.
1817 version = '01'
1817 version = '01'
1818 cgversions = b2caps.get('changegroup')
1818 cgversions = b2caps.get('changegroup')
1819 if cgversions: # 3.1 and 3.2 ship with an empty value
1819 if cgversions: # 3.1 and 3.2 ship with an empty value
1820 cgversions = [v for v in cgversions
1820 cgversions = [v for v in cgversions
1821 if v in changegroup.supportedoutgoingversions(repo)]
1821 if v in changegroup.supportedoutgoingversions(repo)]
1822 if not cgversions:
1822 if not cgversions:
1823 raise ValueError(_('no common changegroup version'))
1823 raise ValueError(_('no common changegroup version'))
1824 version = max(cgversions)
1824 version = max(cgversions)
1825 outgoing = _computeoutgoing(repo, heads, common)
1825 outgoing = _computeoutgoing(repo, heads, common)
1826 if outgoing.missing:
1826 if outgoing.missing:
1827 cgstream = changegroup.makestream(repo, outgoing, version, source,
1827 cgstream = changegroup.makestream(repo, outgoing, version, source,
1828 bundlecaps=bundlecaps)
1828 bundlecaps=bundlecaps)
1829
1829
1830 if cgstream:
1830 if cgstream:
1831 part = bundler.newpart('changegroup', data=cgstream)
1831 part = bundler.newpart('changegroup', data=cgstream)
1832 if cgversions:
1832 if cgversions:
1833 part.addparam('version', version)
1833 part.addparam('version', version)
1834 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1834 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1835 mandatory=False)
1835 mandatory=False)
1836 if 'treemanifest' in repo.requirements:
1836 if 'treemanifest' in repo.requirements:
1837 part.addparam('treemanifest', '1')
1837 part.addparam('treemanifest', '1')
1838
1838
1839 @getbundle2partsgenerator('bookmarks')
1839 @getbundle2partsgenerator('bookmarks')
1840 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1840 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1841 b2caps=None, **kwargs):
1841 b2caps=None, **kwargs):
1842 """add a bookmark part to the requested bundle"""
1842 """add a bookmark part to the requested bundle"""
1843 if not kwargs.get(r'bookmarks', False):
1843 if not kwargs.get(r'bookmarks', False):
1844 return
1844 return
1845 if 'bookmarks' not in b2caps:
1845 if 'bookmarks' not in b2caps:
1846 raise ValueError(_('no common bookmarks exchange method'))
1846 raise ValueError(_('no common bookmarks exchange method'))
1847 books = bookmod.listbinbookmarks(repo)
1847 books = bookmod.listbinbookmarks(repo)
1848 data = bookmod.binaryencode(books)
1848 data = bookmod.binaryencode(books)
1849 if data:
1849 if data:
1850 bundler.newpart('bookmarks', data=data)
1850 bundler.newpart('bookmarks', data=data)
1851
1851
1852 @getbundle2partsgenerator('listkeys')
1852 @getbundle2partsgenerator('listkeys')
1853 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1853 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1854 b2caps=None, **kwargs):
1854 b2caps=None, **kwargs):
1855 """add parts containing listkeys namespaces to the requested bundle"""
1855 """add parts containing listkeys namespaces to the requested bundle"""
1856 listkeys = kwargs.get(r'listkeys', ())
1856 listkeys = kwargs.get(r'listkeys', ())
1857 for namespace in listkeys:
1857 for namespace in listkeys:
1858 part = bundler.newpart('listkeys')
1858 part = bundler.newpart('listkeys')
1859 part.addparam('namespace', namespace)
1859 part.addparam('namespace', namespace)
1860 keys = repo.listkeys(namespace).items()
1860 keys = repo.listkeys(namespace).items()
1861 part.data = pushkey.encodekeys(keys)
1861 part.data = pushkey.encodekeys(keys)
1862
1862
1863 @getbundle2partsgenerator('obsmarkers')
1863 @getbundle2partsgenerator('obsmarkers')
1864 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1864 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1865 b2caps=None, heads=None, **kwargs):
1865 b2caps=None, heads=None, **kwargs):
1866 """add an obsolescence markers part to the requested bundle"""
1866 """add an obsolescence markers part to the requested bundle"""
1867 if kwargs.get(r'obsmarkers', False):
1867 if kwargs.get(r'obsmarkers', False):
1868 if heads is None:
1868 if heads is None:
1869 heads = repo.heads()
1869 heads = repo.heads()
1870 subset = [c.node() for c in repo.set('::%ln', heads)]
1870 subset = [c.node() for c in repo.set('::%ln', heads)]
1871 markers = repo.obsstore.relevantmarkers(subset)
1871 markers = repo.obsstore.relevantmarkers(subset)
1872 markers = sorted(markers)
1872 markers = sorted(markers)
1873 bundle2.buildobsmarkerspart(bundler, markers)
1873 bundle2.buildobsmarkerspart(bundler, markers)
1874
1874
1875 @getbundle2partsgenerator('phases')
1875 @getbundle2partsgenerator('phases')
1876 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1876 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1877 b2caps=None, heads=None, **kwargs):
1877 b2caps=None, heads=None, **kwargs):
1878 """add phase heads part to the requested bundle"""
1878 """add phase heads part to the requested bundle"""
1879 if kwargs.get(r'phases', False):
1879 if kwargs.get(r'phases', False):
1880 if not 'heads' in b2caps.get('phases'):
1880 if not 'heads' in b2caps.get('phases'):
1881 raise ValueError(_('no common phases exchange method'))
1881 raise ValueError(_('no common phases exchange method'))
1882 if heads is None:
1882 if heads is None:
1883 heads = repo.heads()
1883 heads = repo.heads()
1884
1884
1885 headsbyphase = collections.defaultdict(set)
1885 headsbyphase = collections.defaultdict(set)
1886 if repo.publishing():
1886 if repo.publishing():
1887 headsbyphase[phases.public] = heads
1887 headsbyphase[phases.public] = heads
1888 else:
1888 else:
1889 # find the appropriate heads to move
1889 # find the appropriate heads to move
1890
1890
1891 phase = repo._phasecache.phase
1891 phase = repo._phasecache.phase
1892 node = repo.changelog.node
1892 node = repo.changelog.node
1893 rev = repo.changelog.rev
1893 rev = repo.changelog.rev
1894 for h in heads:
1894 for h in heads:
1895 headsbyphase[phase(repo, rev(h))].add(h)
1895 headsbyphase[phase(repo, rev(h))].add(h)
1896 seenphases = list(headsbyphase.keys())
1896 seenphases = list(headsbyphase.keys())
1897
1897
1898 # We do not handle anything but public and draft phase for now)
1898 # We do not handle anything but public and draft phase for now)
1899 if seenphases:
1899 if seenphases:
1900 assert max(seenphases) <= phases.draft
1900 assert max(seenphases) <= phases.draft
1901
1901
1902 # if client is pulling non-public changesets, we need to find
1902 # if client is pulling non-public changesets, we need to find
1903 # intermediate public heads.
1903 # intermediate public heads.
1904 draftheads = headsbyphase.get(phases.draft, set())
1904 draftheads = headsbyphase.get(phases.draft, set())
1905 if draftheads:
1905 if draftheads:
1906 publicheads = headsbyphase.get(phases.public, set())
1906 publicheads = headsbyphase.get(phases.public, set())
1907
1907
1908 revset = 'heads(only(%ln, %ln) and public())'
1908 revset = 'heads(only(%ln, %ln) and public())'
1909 extraheads = repo.revs(revset, draftheads, publicheads)
1909 extraheads = repo.revs(revset, draftheads, publicheads)
1910 for r in extraheads:
1910 for r in extraheads:
1911 headsbyphase[phases.public].add(node(r))
1911 headsbyphase[phases.public].add(node(r))
1912
1912
1913 # transform data in a format used by the encoding function
1913 # transform data in a format used by the encoding function
1914 phasemapping = []
1914 phasemapping = []
1915 for phase in phases.allphases:
1915 for phase in phases.allphases:
1916 phasemapping.append(sorted(headsbyphase[phase]))
1916 phasemapping.append(sorted(headsbyphase[phase]))
1917
1917
1918 # generate the actual part
1918 # generate the actual part
1919 phasedata = phases.binaryencode(phasemapping)
1919 phasedata = phases.binaryencode(phasemapping)
1920 bundler.newpart('phase-heads', data=phasedata)
1920 bundler.newpart('phase-heads', data=phasedata)
1921
1921
1922 @getbundle2partsgenerator('hgtagsfnodes')
1922 @getbundle2partsgenerator('hgtagsfnodes')
1923 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1923 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1924 b2caps=None, heads=None, common=None,
1924 b2caps=None, heads=None, common=None,
1925 **kwargs):
1925 **kwargs):
1926 """Transfer the .hgtags filenodes mapping.
1926 """Transfer the .hgtags filenodes mapping.
1927
1927
1928 Only values for heads in this bundle will be transferred.
1928 Only values for heads in this bundle will be transferred.
1929
1929
1930 The part data consists of pairs of 20 byte changeset node and .hgtags
1930 The part data consists of pairs of 20 byte changeset node and .hgtags
1931 filenodes raw values.
1931 filenodes raw values.
1932 """
1932 """
1933 # Don't send unless:
1933 # Don't send unless:
1934 # - changeset are being exchanged,
1934 # - changeset are being exchanged,
1935 # - the client supports it.
1935 # - the client supports it.
1936 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1936 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1937 return
1937 return
1938
1938
1939 outgoing = _computeoutgoing(repo, heads, common)
1939 outgoing = _computeoutgoing(repo, heads, common)
1940 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1940 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1941
1941
1942 @getbundle2partsgenerator('cache:rev-branch-cache')
1943 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
1944 b2caps=None, heads=None, common=None,
1945 **kwargs):
1946 """Transfer the rev-branch-cache mapping
1947
1948 The payload is a series of data related to each branch
1949
1950 1) branch name length
1951 2) number of open heads
1952 3) number of closed heads
1953 4) open heads nodes
1954 5) closed heads nodes
1955 """
1956 # Don't send unless:
1957 # - changeset are being exchanged,
1958 # - the client supports it.
1959 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
1960 return
1961 outgoing = _computeoutgoing(repo, heads, common)
1962 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
1963
1942 def check_heads(repo, their_heads, context):
1964 def check_heads(repo, their_heads, context):
1943 """check if the heads of a repo have been modified
1965 """check if the heads of a repo have been modified
1944
1966
1945 Used by peer for unbundling.
1967 Used by peer for unbundling.
1946 """
1968 """
1947 heads = repo.heads()
1969 heads = repo.heads()
1948 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1970 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1949 if not (their_heads == ['force'] or their_heads == heads or
1971 if not (their_heads == ['force'] or their_heads == heads or
1950 their_heads == ['hashed', heads_hash]):
1972 their_heads == ['hashed', heads_hash]):
1951 # someone else committed/pushed/unbundled while we
1973 # someone else committed/pushed/unbundled while we
1952 # were transferring data
1974 # were transferring data
1953 raise error.PushRaced('repository changed while %s - '
1975 raise error.PushRaced('repository changed while %s - '
1954 'please try again' % context)
1976 'please try again' % context)
1955
1977
1956 def unbundle(repo, cg, heads, source, url):
1978 def unbundle(repo, cg, heads, source, url):
1957 """Apply a bundle to a repo.
1979 """Apply a bundle to a repo.
1958
1980
1959 this function makes sure the repo is locked during the application and have
1981 this function makes sure the repo is locked during the application and have
1960 mechanism to check that no push race occurred between the creation of the
1982 mechanism to check that no push race occurred between the creation of the
1961 bundle and its application.
1983 bundle and its application.
1962
1984
1963 If the push was raced as PushRaced exception is raised."""
1985 If the push was raced as PushRaced exception is raised."""
1964 r = 0
1986 r = 0
1965 # need a transaction when processing a bundle2 stream
1987 # need a transaction when processing a bundle2 stream
1966 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1988 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1967 lockandtr = [None, None, None]
1989 lockandtr = [None, None, None]
1968 recordout = None
1990 recordout = None
1969 # quick fix for output mismatch with bundle2 in 3.4
1991 # quick fix for output mismatch with bundle2 in 3.4
1970 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1992 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1971 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1993 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1972 captureoutput = True
1994 captureoutput = True
1973 try:
1995 try:
1974 # note: outside bundle1, 'heads' is expected to be empty and this
1996 # note: outside bundle1, 'heads' is expected to be empty and this
1975 # 'check_heads' call wil be a no-op
1997 # 'check_heads' call wil be a no-op
1976 check_heads(repo, heads, 'uploading changes')
1998 check_heads(repo, heads, 'uploading changes')
1977 # push can proceed
1999 # push can proceed
1978 if not isinstance(cg, bundle2.unbundle20):
2000 if not isinstance(cg, bundle2.unbundle20):
1979 # legacy case: bundle1 (changegroup 01)
2001 # legacy case: bundle1 (changegroup 01)
1980 txnname = "\n".join([source, util.hidepassword(url)])
2002 txnname = "\n".join([source, util.hidepassword(url)])
1981 with repo.lock(), repo.transaction(txnname) as tr:
2003 with repo.lock(), repo.transaction(txnname) as tr:
1982 op = bundle2.applybundle(repo, cg, tr, source, url)
2004 op = bundle2.applybundle(repo, cg, tr, source, url)
1983 r = bundle2.combinechangegroupresults(op)
2005 r = bundle2.combinechangegroupresults(op)
1984 else:
2006 else:
1985 r = None
2007 r = None
1986 try:
2008 try:
1987 def gettransaction():
2009 def gettransaction():
1988 if not lockandtr[2]:
2010 if not lockandtr[2]:
1989 lockandtr[0] = repo.wlock()
2011 lockandtr[0] = repo.wlock()
1990 lockandtr[1] = repo.lock()
2012 lockandtr[1] = repo.lock()
1991 lockandtr[2] = repo.transaction(source)
2013 lockandtr[2] = repo.transaction(source)
1992 lockandtr[2].hookargs['source'] = source
2014 lockandtr[2].hookargs['source'] = source
1993 lockandtr[2].hookargs['url'] = url
2015 lockandtr[2].hookargs['url'] = url
1994 lockandtr[2].hookargs['bundle2'] = '1'
2016 lockandtr[2].hookargs['bundle2'] = '1'
1995 return lockandtr[2]
2017 return lockandtr[2]
1996
2018
1997 # Do greedy locking by default until we're satisfied with lazy
2019 # Do greedy locking by default until we're satisfied with lazy
1998 # locking.
2020 # locking.
1999 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2021 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2000 gettransaction()
2022 gettransaction()
2001
2023
2002 op = bundle2.bundleoperation(repo, gettransaction,
2024 op = bundle2.bundleoperation(repo, gettransaction,
2003 captureoutput=captureoutput)
2025 captureoutput=captureoutput)
2004 try:
2026 try:
2005 op = bundle2.processbundle(repo, cg, op=op)
2027 op = bundle2.processbundle(repo, cg, op=op)
2006 finally:
2028 finally:
2007 r = op.reply
2029 r = op.reply
2008 if captureoutput and r is not None:
2030 if captureoutput and r is not None:
2009 repo.ui.pushbuffer(error=True, subproc=True)
2031 repo.ui.pushbuffer(error=True, subproc=True)
2010 def recordout(output):
2032 def recordout(output):
2011 r.newpart('output', data=output, mandatory=False)
2033 r.newpart('output', data=output, mandatory=False)
2012 if lockandtr[2] is not None:
2034 if lockandtr[2] is not None:
2013 lockandtr[2].close()
2035 lockandtr[2].close()
2014 except BaseException as exc:
2036 except BaseException as exc:
2015 exc.duringunbundle2 = True
2037 exc.duringunbundle2 = True
2016 if captureoutput and r is not None:
2038 if captureoutput and r is not None:
2017 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2039 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2018 def recordout(output):
2040 def recordout(output):
2019 part = bundle2.bundlepart('output', data=output,
2041 part = bundle2.bundlepart('output', data=output,
2020 mandatory=False)
2042 mandatory=False)
2021 parts.append(part)
2043 parts.append(part)
2022 raise
2044 raise
2023 finally:
2045 finally:
2024 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2046 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2025 if recordout is not None:
2047 if recordout is not None:
2026 recordout(repo.ui.popbuffer())
2048 recordout(repo.ui.popbuffer())
2027 return r
2049 return r
2028
2050
2029 def _maybeapplyclonebundle(pullop):
2051 def _maybeapplyclonebundle(pullop):
2030 """Apply a clone bundle from a remote, if possible."""
2052 """Apply a clone bundle from a remote, if possible."""
2031
2053
2032 repo = pullop.repo
2054 repo = pullop.repo
2033 remote = pullop.remote
2055 remote = pullop.remote
2034
2056
2035 if not repo.ui.configbool('ui', 'clonebundles'):
2057 if not repo.ui.configbool('ui', 'clonebundles'):
2036 return
2058 return
2037
2059
2038 # Only run if local repo is empty.
2060 # Only run if local repo is empty.
2039 if len(repo):
2061 if len(repo):
2040 return
2062 return
2041
2063
2042 if pullop.heads:
2064 if pullop.heads:
2043 return
2065 return
2044
2066
2045 if not remote.capable('clonebundles'):
2067 if not remote.capable('clonebundles'):
2046 return
2068 return
2047
2069
2048 res = remote._call('clonebundles')
2070 res = remote._call('clonebundles')
2049
2071
2050 # If we call the wire protocol command, that's good enough to record the
2072 # If we call the wire protocol command, that's good enough to record the
2051 # attempt.
2073 # attempt.
2052 pullop.clonebundleattempted = True
2074 pullop.clonebundleattempted = True
2053
2075
2054 entries = parseclonebundlesmanifest(repo, res)
2076 entries = parseclonebundlesmanifest(repo, res)
2055 if not entries:
2077 if not entries:
2056 repo.ui.note(_('no clone bundles available on remote; '
2078 repo.ui.note(_('no clone bundles available on remote; '
2057 'falling back to regular clone\n'))
2079 'falling back to regular clone\n'))
2058 return
2080 return
2059
2081
2060 entries = filterclonebundleentries(
2082 entries = filterclonebundleentries(
2061 repo, entries, streamclonerequested=pullop.streamclonerequested)
2083 repo, entries, streamclonerequested=pullop.streamclonerequested)
2062
2084
2063 if not entries:
2085 if not entries:
2064 # There is a thundering herd concern here. However, if a server
2086 # There is a thundering herd concern here. However, if a server
2065 # operator doesn't advertise bundles appropriate for its clients,
2087 # operator doesn't advertise bundles appropriate for its clients,
2066 # they deserve what's coming. Furthermore, from a client's
2088 # they deserve what's coming. Furthermore, from a client's
2067 # perspective, no automatic fallback would mean not being able to
2089 # perspective, no automatic fallback would mean not being able to
2068 # clone!
2090 # clone!
2069 repo.ui.warn(_('no compatible clone bundles available on server; '
2091 repo.ui.warn(_('no compatible clone bundles available on server; '
2070 'falling back to regular clone\n'))
2092 'falling back to regular clone\n'))
2071 repo.ui.warn(_('(you may want to report this to the server '
2093 repo.ui.warn(_('(you may want to report this to the server '
2072 'operator)\n'))
2094 'operator)\n'))
2073 return
2095 return
2074
2096
2075 entries = sortclonebundleentries(repo.ui, entries)
2097 entries = sortclonebundleentries(repo.ui, entries)
2076
2098
2077 url = entries[0]['URL']
2099 url = entries[0]['URL']
2078 repo.ui.status(_('applying clone bundle from %s\n') % url)
2100 repo.ui.status(_('applying clone bundle from %s\n') % url)
2079 if trypullbundlefromurl(repo.ui, repo, url):
2101 if trypullbundlefromurl(repo.ui, repo, url):
2080 repo.ui.status(_('finished applying clone bundle\n'))
2102 repo.ui.status(_('finished applying clone bundle\n'))
2081 # Bundle failed.
2103 # Bundle failed.
2082 #
2104 #
2083 # We abort by default to avoid the thundering herd of
2105 # We abort by default to avoid the thundering herd of
2084 # clients flooding a server that was expecting expensive
2106 # clients flooding a server that was expecting expensive
2085 # clone load to be offloaded.
2107 # clone load to be offloaded.
2086 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2108 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2087 repo.ui.warn(_('falling back to normal clone\n'))
2109 repo.ui.warn(_('falling back to normal clone\n'))
2088 else:
2110 else:
2089 raise error.Abort(_('error applying bundle'),
2111 raise error.Abort(_('error applying bundle'),
2090 hint=_('if this error persists, consider contacting '
2112 hint=_('if this error persists, consider contacting '
2091 'the server operator or disable clone '
2113 'the server operator or disable clone '
2092 'bundles via '
2114 'bundles via '
2093 '"--config ui.clonebundles=false"'))
2115 '"--config ui.clonebundles=false"'))
2094
2116
2095 def parseclonebundlesmanifest(repo, s):
2117 def parseclonebundlesmanifest(repo, s):
2096 """Parses the raw text of a clone bundles manifest.
2118 """Parses the raw text of a clone bundles manifest.
2097
2119
2098 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2120 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2099 to the URL and other keys are the attributes for the entry.
2121 to the URL and other keys are the attributes for the entry.
2100 """
2122 """
2101 m = []
2123 m = []
2102 for line in s.splitlines():
2124 for line in s.splitlines():
2103 fields = line.split()
2125 fields = line.split()
2104 if not fields:
2126 if not fields:
2105 continue
2127 continue
2106 attrs = {'URL': fields[0]}
2128 attrs = {'URL': fields[0]}
2107 for rawattr in fields[1:]:
2129 for rawattr in fields[1:]:
2108 key, value = rawattr.split('=', 1)
2130 key, value = rawattr.split('=', 1)
2109 key = urlreq.unquote(key)
2131 key = urlreq.unquote(key)
2110 value = urlreq.unquote(value)
2132 value = urlreq.unquote(value)
2111 attrs[key] = value
2133 attrs[key] = value
2112
2134
2113 # Parse BUNDLESPEC into components. This makes client-side
2135 # Parse BUNDLESPEC into components. This makes client-side
2114 # preferences easier to specify since you can prefer a single
2136 # preferences easier to specify since you can prefer a single
2115 # component of the BUNDLESPEC.
2137 # component of the BUNDLESPEC.
2116 if key == 'BUNDLESPEC':
2138 if key == 'BUNDLESPEC':
2117 try:
2139 try:
2118 comp, version, params = parsebundlespec(repo, value,
2140 comp, version, params = parsebundlespec(repo, value,
2119 externalnames=True)
2141 externalnames=True)
2120 attrs['COMPRESSION'] = comp
2142 attrs['COMPRESSION'] = comp
2121 attrs['VERSION'] = version
2143 attrs['VERSION'] = version
2122 except error.InvalidBundleSpecification:
2144 except error.InvalidBundleSpecification:
2123 pass
2145 pass
2124 except error.UnsupportedBundleSpecification:
2146 except error.UnsupportedBundleSpecification:
2125 pass
2147 pass
2126
2148
2127 m.append(attrs)
2149 m.append(attrs)
2128
2150
2129 return m
2151 return m
2130
2152
2131 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2153 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2132 """Remove incompatible clone bundle manifest entries.
2154 """Remove incompatible clone bundle manifest entries.
2133
2155
2134 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2156 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2135 and returns a new list consisting of only the entries that this client
2157 and returns a new list consisting of only the entries that this client
2136 should be able to apply.
2158 should be able to apply.
2137
2159
2138 There is no guarantee we'll be able to apply all returned entries because
2160 There is no guarantee we'll be able to apply all returned entries because
2139 the metadata we use to filter on may be missing or wrong.
2161 the metadata we use to filter on may be missing or wrong.
2140 """
2162 """
2141 newentries = []
2163 newentries = []
2142 for entry in entries:
2164 for entry in entries:
2143 spec = entry.get('BUNDLESPEC')
2165 spec = entry.get('BUNDLESPEC')
2144 if spec:
2166 if spec:
2145 try:
2167 try:
2146 comp, version, params = parsebundlespec(repo, spec, strict=True)
2168 comp, version, params = parsebundlespec(repo, spec, strict=True)
2147
2169
2148 # If a stream clone was requested, filter out non-streamclone
2170 # If a stream clone was requested, filter out non-streamclone
2149 # entries.
2171 # entries.
2150 if streamclonerequested and (comp != 'UN' or version != 's1'):
2172 if streamclonerequested and (comp != 'UN' or version != 's1'):
2151 repo.ui.debug('filtering %s because not a stream clone\n' %
2173 repo.ui.debug('filtering %s because not a stream clone\n' %
2152 entry['URL'])
2174 entry['URL'])
2153 continue
2175 continue
2154
2176
2155 except error.InvalidBundleSpecification as e:
2177 except error.InvalidBundleSpecification as e:
2156 repo.ui.debug(str(e) + '\n')
2178 repo.ui.debug(str(e) + '\n')
2157 continue
2179 continue
2158 except error.UnsupportedBundleSpecification as e:
2180 except error.UnsupportedBundleSpecification as e:
2159 repo.ui.debug('filtering %s because unsupported bundle '
2181 repo.ui.debug('filtering %s because unsupported bundle '
2160 'spec: %s\n' % (
2182 'spec: %s\n' % (
2161 entry['URL'], util.forcebytestr(e)))
2183 entry['URL'], util.forcebytestr(e)))
2162 continue
2184 continue
2163 # If we don't have a spec and requested a stream clone, we don't know
2185 # If we don't have a spec and requested a stream clone, we don't know
2164 # what the entry is so don't attempt to apply it.
2186 # what the entry is so don't attempt to apply it.
2165 elif streamclonerequested:
2187 elif streamclonerequested:
2166 repo.ui.debug('filtering %s because cannot determine if a stream '
2188 repo.ui.debug('filtering %s because cannot determine if a stream '
2167 'clone bundle\n' % entry['URL'])
2189 'clone bundle\n' % entry['URL'])
2168 continue
2190 continue
2169
2191
2170 if 'REQUIRESNI' in entry and not sslutil.hassni:
2192 if 'REQUIRESNI' in entry and not sslutil.hassni:
2171 repo.ui.debug('filtering %s because SNI not supported\n' %
2193 repo.ui.debug('filtering %s because SNI not supported\n' %
2172 entry['URL'])
2194 entry['URL'])
2173 continue
2195 continue
2174
2196
2175 newentries.append(entry)
2197 newentries.append(entry)
2176
2198
2177 return newentries
2199 return newentries
2178
2200
2179 class clonebundleentry(object):
2201 class clonebundleentry(object):
2180 """Represents an item in a clone bundles manifest.
2202 """Represents an item in a clone bundles manifest.
2181
2203
2182 This rich class is needed to support sorting since sorted() in Python 3
2204 This rich class is needed to support sorting since sorted() in Python 3
2183 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2205 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2184 won't work.
2206 won't work.
2185 """
2207 """
2186
2208
2187 def __init__(self, value, prefers):
2209 def __init__(self, value, prefers):
2188 self.value = value
2210 self.value = value
2189 self.prefers = prefers
2211 self.prefers = prefers
2190
2212
2191 def _cmp(self, other):
2213 def _cmp(self, other):
2192 for prefkey, prefvalue in self.prefers:
2214 for prefkey, prefvalue in self.prefers:
2193 avalue = self.value.get(prefkey)
2215 avalue = self.value.get(prefkey)
2194 bvalue = other.value.get(prefkey)
2216 bvalue = other.value.get(prefkey)
2195
2217
2196 # Special case for b missing attribute and a matches exactly.
2218 # Special case for b missing attribute and a matches exactly.
2197 if avalue is not None and bvalue is None and avalue == prefvalue:
2219 if avalue is not None and bvalue is None and avalue == prefvalue:
2198 return -1
2220 return -1
2199
2221
2200 # Special case for a missing attribute and b matches exactly.
2222 # Special case for a missing attribute and b matches exactly.
2201 if bvalue is not None and avalue is None and bvalue == prefvalue:
2223 if bvalue is not None and avalue is None and bvalue == prefvalue:
2202 return 1
2224 return 1
2203
2225
2204 # We can't compare unless attribute present on both.
2226 # We can't compare unless attribute present on both.
2205 if avalue is None or bvalue is None:
2227 if avalue is None or bvalue is None:
2206 continue
2228 continue
2207
2229
2208 # Same values should fall back to next attribute.
2230 # Same values should fall back to next attribute.
2209 if avalue == bvalue:
2231 if avalue == bvalue:
2210 continue
2232 continue
2211
2233
2212 # Exact matches come first.
2234 # Exact matches come first.
2213 if avalue == prefvalue:
2235 if avalue == prefvalue:
2214 return -1
2236 return -1
2215 if bvalue == prefvalue:
2237 if bvalue == prefvalue:
2216 return 1
2238 return 1
2217
2239
2218 # Fall back to next attribute.
2240 # Fall back to next attribute.
2219 continue
2241 continue
2220
2242
2221 # If we got here we couldn't sort by attributes and prefers. Fall
2243 # If we got here we couldn't sort by attributes and prefers. Fall
2222 # back to index order.
2244 # back to index order.
2223 return 0
2245 return 0
2224
2246
2225 def __lt__(self, other):
2247 def __lt__(self, other):
2226 return self._cmp(other) < 0
2248 return self._cmp(other) < 0
2227
2249
2228 def __gt__(self, other):
2250 def __gt__(self, other):
2229 return self._cmp(other) > 0
2251 return self._cmp(other) > 0
2230
2252
2231 def __eq__(self, other):
2253 def __eq__(self, other):
2232 return self._cmp(other) == 0
2254 return self._cmp(other) == 0
2233
2255
2234 def __le__(self, other):
2256 def __le__(self, other):
2235 return self._cmp(other) <= 0
2257 return self._cmp(other) <= 0
2236
2258
2237 def __ge__(self, other):
2259 def __ge__(self, other):
2238 return self._cmp(other) >= 0
2260 return self._cmp(other) >= 0
2239
2261
2240 def __ne__(self, other):
2262 def __ne__(self, other):
2241 return self._cmp(other) != 0
2263 return self._cmp(other) != 0
2242
2264
2243 def sortclonebundleentries(ui, entries):
2265 def sortclonebundleentries(ui, entries):
2244 prefers = ui.configlist('ui', 'clonebundleprefers')
2266 prefers = ui.configlist('ui', 'clonebundleprefers')
2245 if not prefers:
2267 if not prefers:
2246 return list(entries)
2268 return list(entries)
2247
2269
2248 prefers = [p.split('=', 1) for p in prefers]
2270 prefers = [p.split('=', 1) for p in prefers]
2249
2271
2250 items = sorted(clonebundleentry(v, prefers) for v in entries)
2272 items = sorted(clonebundleentry(v, prefers) for v in entries)
2251 return [i.value for i in items]
2273 return [i.value for i in items]
2252
2274
2253 def trypullbundlefromurl(ui, repo, url):
2275 def trypullbundlefromurl(ui, repo, url):
2254 """Attempt to apply a bundle from a URL."""
2276 """Attempt to apply a bundle from a URL."""
2255 with repo.lock(), repo.transaction('bundleurl') as tr:
2277 with repo.lock(), repo.transaction('bundleurl') as tr:
2256 try:
2278 try:
2257 fh = urlmod.open(ui, url)
2279 fh = urlmod.open(ui, url)
2258 cg = readbundle(ui, fh, 'stream')
2280 cg = readbundle(ui, fh, 'stream')
2259
2281
2260 if isinstance(cg, streamclone.streamcloneapplier):
2282 if isinstance(cg, streamclone.streamcloneapplier):
2261 cg.apply(repo)
2283 cg.apply(repo)
2262 else:
2284 else:
2263 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2285 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2264 return True
2286 return True
2265 except urlerr.httperror as e:
2287 except urlerr.httperror as e:
2266 ui.warn(_('HTTP error fetching bundle: %s\n') %
2288 ui.warn(_('HTTP error fetching bundle: %s\n') %
2267 util.forcebytestr(e))
2289 util.forcebytestr(e))
2268 except urlerr.urlerror as e:
2290 except urlerr.urlerror as e:
2269 ui.warn(_('error fetching bundle: %s\n') %
2291 ui.warn(_('error fetching bundle: %s\n') %
2270 util.forcebytestr(e.reason))
2292 util.forcebytestr(e.reason))
2271
2293
2272 return False
2294 return False
General Comments 0
You need to be logged in to leave comments. Login now