##// END OF EJS Templates
push-discovery: extract the bookmark comparison logic in its own function...
Boris Feld -
r36956:8fd9b56e default
parent child Browse files
Show More
@@ -1,2263 +1,2272 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 bin,
16 bin,
17 hex,
17 hex,
18 nullid,
18 nullid,
19 )
19 )
20 from . import (
20 from . import (
21 bookmarks as bookmod,
21 bookmarks as bookmod,
22 bundle2,
22 bundle2,
23 changegroup,
23 changegroup,
24 discovery,
24 discovery,
25 error,
25 error,
26 lock as lockmod,
26 lock as lockmod,
27 logexchange,
27 logexchange,
28 obsolete,
28 obsolete,
29 phases,
29 phases,
30 pushkey,
30 pushkey,
31 pycompat,
31 pycompat,
32 scmutil,
32 scmutil,
33 sslutil,
33 sslutil,
34 streamclone,
34 streamclone,
35 url as urlmod,
35 url as urlmod,
36 util,
36 util,
37 )
37 )
38
38
39 urlerr = util.urlerr
39 urlerr = util.urlerr
40 urlreq = util.urlreq
40 urlreq = util.urlreq
41
41
42 # Maps bundle version human names to changegroup versions.
42 # Maps bundle version human names to changegroup versions.
43 _bundlespeccgversions = {'v1': '01',
43 _bundlespeccgversions = {'v1': '01',
44 'v2': '02',
44 'v2': '02',
45 'packed1': 's1',
45 'packed1': 's1',
46 'bundle2': '02', #legacy
46 'bundle2': '02', #legacy
47 }
47 }
48
48
49 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
49 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
50 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
50 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
51
51
52 def parsebundlespec(repo, spec, strict=True, externalnames=False):
52 def parsebundlespec(repo, spec, strict=True, externalnames=False):
53 """Parse a bundle string specification into parts.
53 """Parse a bundle string specification into parts.
54
54
55 Bundle specifications denote a well-defined bundle/exchange format.
55 Bundle specifications denote a well-defined bundle/exchange format.
56 The content of a given specification should not change over time in
56 The content of a given specification should not change over time in
57 order to ensure that bundles produced by a newer version of Mercurial are
57 order to ensure that bundles produced by a newer version of Mercurial are
58 readable from an older version.
58 readable from an older version.
59
59
60 The string currently has the form:
60 The string currently has the form:
61
61
62 <compression>-<type>[;<parameter0>[;<parameter1>]]
62 <compression>-<type>[;<parameter0>[;<parameter1>]]
63
63
64 Where <compression> is one of the supported compression formats
64 Where <compression> is one of the supported compression formats
65 and <type> is (currently) a version string. A ";" can follow the type and
65 and <type> is (currently) a version string. A ";" can follow the type and
66 all text afterwards is interpreted as URI encoded, ";" delimited key=value
66 all text afterwards is interpreted as URI encoded, ";" delimited key=value
67 pairs.
67 pairs.
68
68
69 If ``strict`` is True (the default) <compression> is required. Otherwise,
69 If ``strict`` is True (the default) <compression> is required. Otherwise,
70 it is optional.
70 it is optional.
71
71
72 If ``externalnames`` is False (the default), the human-centric names will
72 If ``externalnames`` is False (the default), the human-centric names will
73 be converted to their internal representation.
73 be converted to their internal representation.
74
74
75 Returns a 3-tuple of (compression, version, parameters). Compression will
75 Returns a 3-tuple of (compression, version, parameters). Compression will
76 be ``None`` if not in strict mode and a compression isn't defined.
76 be ``None`` if not in strict mode and a compression isn't defined.
77
77
78 An ``InvalidBundleSpecification`` is raised when the specification is
78 An ``InvalidBundleSpecification`` is raised when the specification is
79 not syntactically well formed.
79 not syntactically well formed.
80
80
81 An ``UnsupportedBundleSpecification`` is raised when the compression or
81 An ``UnsupportedBundleSpecification`` is raised when the compression or
82 bundle type/version is not recognized.
82 bundle type/version is not recognized.
83
83
84 Note: this function will likely eventually return a more complex data
84 Note: this function will likely eventually return a more complex data
85 structure, including bundle2 part information.
85 structure, including bundle2 part information.
86 """
86 """
87 def parseparams(s):
87 def parseparams(s):
88 if ';' not in s:
88 if ';' not in s:
89 return s, {}
89 return s, {}
90
90
91 params = {}
91 params = {}
92 version, paramstr = s.split(';', 1)
92 version, paramstr = s.split(';', 1)
93
93
94 for p in paramstr.split(';'):
94 for p in paramstr.split(';'):
95 if '=' not in p:
95 if '=' not in p:
96 raise error.InvalidBundleSpecification(
96 raise error.InvalidBundleSpecification(
97 _('invalid bundle specification: '
97 _('invalid bundle specification: '
98 'missing "=" in parameter: %s') % p)
98 'missing "=" in parameter: %s') % p)
99
99
100 key, value = p.split('=', 1)
100 key, value = p.split('=', 1)
101 key = urlreq.unquote(key)
101 key = urlreq.unquote(key)
102 value = urlreq.unquote(value)
102 value = urlreq.unquote(value)
103 params[key] = value
103 params[key] = value
104
104
105 return version, params
105 return version, params
106
106
107
107
108 if strict and '-' not in spec:
108 if strict and '-' not in spec:
109 raise error.InvalidBundleSpecification(
109 raise error.InvalidBundleSpecification(
110 _('invalid bundle specification; '
110 _('invalid bundle specification; '
111 'must be prefixed with compression: %s') % spec)
111 'must be prefixed with compression: %s') % spec)
112
112
113 if '-' in spec:
113 if '-' in spec:
114 compression, version = spec.split('-', 1)
114 compression, version = spec.split('-', 1)
115
115
116 if compression not in util.compengines.supportedbundlenames:
116 if compression not in util.compengines.supportedbundlenames:
117 raise error.UnsupportedBundleSpecification(
117 raise error.UnsupportedBundleSpecification(
118 _('%s compression is not supported') % compression)
118 _('%s compression is not supported') % compression)
119
119
120 version, params = parseparams(version)
120 version, params = parseparams(version)
121
121
122 if version not in _bundlespeccgversions:
122 if version not in _bundlespeccgversions:
123 raise error.UnsupportedBundleSpecification(
123 raise error.UnsupportedBundleSpecification(
124 _('%s is not a recognized bundle version') % version)
124 _('%s is not a recognized bundle version') % version)
125 else:
125 else:
126 # Value could be just the compression or just the version, in which
126 # Value could be just the compression or just the version, in which
127 # case some defaults are assumed (but only when not in strict mode).
127 # case some defaults are assumed (but only when not in strict mode).
128 assert not strict
128 assert not strict
129
129
130 spec, params = parseparams(spec)
130 spec, params = parseparams(spec)
131
131
132 if spec in util.compengines.supportedbundlenames:
132 if spec in util.compengines.supportedbundlenames:
133 compression = spec
133 compression = spec
134 version = 'v1'
134 version = 'v1'
135 # Generaldelta repos require v2.
135 # Generaldelta repos require v2.
136 if 'generaldelta' in repo.requirements:
136 if 'generaldelta' in repo.requirements:
137 version = 'v2'
137 version = 'v2'
138 # Modern compression engines require v2.
138 # Modern compression engines require v2.
139 if compression not in _bundlespecv1compengines:
139 if compression not in _bundlespecv1compengines:
140 version = 'v2'
140 version = 'v2'
141 elif spec in _bundlespeccgversions:
141 elif spec in _bundlespeccgversions:
142 if spec == 'packed1':
142 if spec == 'packed1':
143 compression = 'none'
143 compression = 'none'
144 else:
144 else:
145 compression = 'bzip2'
145 compression = 'bzip2'
146 version = spec
146 version = spec
147 else:
147 else:
148 raise error.UnsupportedBundleSpecification(
148 raise error.UnsupportedBundleSpecification(
149 _('%s is not a recognized bundle specification') % spec)
149 _('%s is not a recognized bundle specification') % spec)
150
150
151 # Bundle version 1 only supports a known set of compression engines.
151 # Bundle version 1 only supports a known set of compression engines.
152 if version == 'v1' and compression not in _bundlespecv1compengines:
152 if version == 'v1' and compression not in _bundlespecv1compengines:
153 raise error.UnsupportedBundleSpecification(
153 raise error.UnsupportedBundleSpecification(
154 _('compression engine %s is not supported on v1 bundles') %
154 _('compression engine %s is not supported on v1 bundles') %
155 compression)
155 compression)
156
156
157 # The specification for packed1 can optionally declare the data formats
157 # The specification for packed1 can optionally declare the data formats
158 # required to apply it. If we see this metadata, compare against what the
158 # required to apply it. If we see this metadata, compare against what the
159 # repo supports and error if the bundle isn't compatible.
159 # repo supports and error if the bundle isn't compatible.
160 if version == 'packed1' and 'requirements' in params:
160 if version == 'packed1' and 'requirements' in params:
161 requirements = set(params['requirements'].split(','))
161 requirements = set(params['requirements'].split(','))
162 missingreqs = requirements - repo.supportedformats
162 missingreqs = requirements - repo.supportedformats
163 if missingreqs:
163 if missingreqs:
164 raise error.UnsupportedBundleSpecification(
164 raise error.UnsupportedBundleSpecification(
165 _('missing support for repository features: %s') %
165 _('missing support for repository features: %s') %
166 ', '.join(sorted(missingreqs)))
166 ', '.join(sorted(missingreqs)))
167
167
168 if not externalnames:
168 if not externalnames:
169 engine = util.compengines.forbundlename(compression)
169 engine = util.compengines.forbundlename(compression)
170 compression = engine.bundletype()[1]
170 compression = engine.bundletype()[1]
171 version = _bundlespeccgversions[version]
171 version = _bundlespeccgversions[version]
172 return compression, version, params
172 return compression, version, params
173
173
174 def readbundle(ui, fh, fname, vfs=None):
174 def readbundle(ui, fh, fname, vfs=None):
175 header = changegroup.readexactly(fh, 4)
175 header = changegroup.readexactly(fh, 4)
176
176
177 alg = None
177 alg = None
178 if not fname:
178 if not fname:
179 fname = "stream"
179 fname = "stream"
180 if not header.startswith('HG') and header.startswith('\0'):
180 if not header.startswith('HG') and header.startswith('\0'):
181 fh = changegroup.headerlessfixup(fh, header)
181 fh = changegroup.headerlessfixup(fh, header)
182 header = "HG10"
182 header = "HG10"
183 alg = 'UN'
183 alg = 'UN'
184 elif vfs:
184 elif vfs:
185 fname = vfs.join(fname)
185 fname = vfs.join(fname)
186
186
187 magic, version = header[0:2], header[2:4]
187 magic, version = header[0:2], header[2:4]
188
188
189 if magic != 'HG':
189 if magic != 'HG':
190 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
190 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
191 if version == '10':
191 if version == '10':
192 if alg is None:
192 if alg is None:
193 alg = changegroup.readexactly(fh, 2)
193 alg = changegroup.readexactly(fh, 2)
194 return changegroup.cg1unpacker(fh, alg)
194 return changegroup.cg1unpacker(fh, alg)
195 elif version.startswith('2'):
195 elif version.startswith('2'):
196 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
196 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
197 elif version == 'S1':
197 elif version == 'S1':
198 return streamclone.streamcloneapplier(fh)
198 return streamclone.streamcloneapplier(fh)
199 else:
199 else:
200 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
200 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
201
201
202 def _formatrequirementsspec(requirements):
202 def _formatrequirementsspec(requirements):
203 return urlreq.quote(','.join(sorted(requirements)))
203 return urlreq.quote(','.join(sorted(requirements)))
204
204
205 def _formatrequirementsparams(requirements):
205 def _formatrequirementsparams(requirements):
206 requirements = _formatrequirementsspec(requirements)
206 requirements = _formatrequirementsspec(requirements)
207 params = "%s%s" % (urlreq.quote("requirements="), requirements)
207 params = "%s%s" % (urlreq.quote("requirements="), requirements)
208 return params
208 return params
209
209
210 def getbundlespec(ui, fh):
210 def getbundlespec(ui, fh):
211 """Infer the bundlespec from a bundle file handle.
211 """Infer the bundlespec from a bundle file handle.
212
212
213 The input file handle is seeked and the original seek position is not
213 The input file handle is seeked and the original seek position is not
214 restored.
214 restored.
215 """
215 """
216 def speccompression(alg):
216 def speccompression(alg):
217 try:
217 try:
218 return util.compengines.forbundletype(alg).bundletype()[0]
218 return util.compengines.forbundletype(alg).bundletype()[0]
219 except KeyError:
219 except KeyError:
220 return None
220 return None
221
221
222 b = readbundle(ui, fh, None)
222 b = readbundle(ui, fh, None)
223 if isinstance(b, changegroup.cg1unpacker):
223 if isinstance(b, changegroup.cg1unpacker):
224 alg = b._type
224 alg = b._type
225 if alg == '_truncatedBZ':
225 if alg == '_truncatedBZ':
226 alg = 'BZ'
226 alg = 'BZ'
227 comp = speccompression(alg)
227 comp = speccompression(alg)
228 if not comp:
228 if not comp:
229 raise error.Abort(_('unknown compression algorithm: %s') % alg)
229 raise error.Abort(_('unknown compression algorithm: %s') % alg)
230 return '%s-v1' % comp
230 return '%s-v1' % comp
231 elif isinstance(b, bundle2.unbundle20):
231 elif isinstance(b, bundle2.unbundle20):
232 if 'Compression' in b.params:
232 if 'Compression' in b.params:
233 comp = speccompression(b.params['Compression'])
233 comp = speccompression(b.params['Compression'])
234 if not comp:
234 if not comp:
235 raise error.Abort(_('unknown compression algorithm: %s') % comp)
235 raise error.Abort(_('unknown compression algorithm: %s') % comp)
236 else:
236 else:
237 comp = 'none'
237 comp = 'none'
238
238
239 version = None
239 version = None
240 for part in b.iterparts():
240 for part in b.iterparts():
241 if part.type == 'changegroup':
241 if part.type == 'changegroup':
242 version = part.params['version']
242 version = part.params['version']
243 if version in ('01', '02'):
243 if version in ('01', '02'):
244 version = 'v2'
244 version = 'v2'
245 else:
245 else:
246 raise error.Abort(_('changegroup version %s does not have '
246 raise error.Abort(_('changegroup version %s does not have '
247 'a known bundlespec') % version,
247 'a known bundlespec') % version,
248 hint=_('try upgrading your Mercurial '
248 hint=_('try upgrading your Mercurial '
249 'client'))
249 'client'))
250
250
251 if not version:
251 if not version:
252 raise error.Abort(_('could not identify changegroup version in '
252 raise error.Abort(_('could not identify changegroup version in '
253 'bundle'))
253 'bundle'))
254
254
255 return '%s-%s' % (comp, version)
255 return '%s-%s' % (comp, version)
256 elif isinstance(b, streamclone.streamcloneapplier):
256 elif isinstance(b, streamclone.streamcloneapplier):
257 requirements = streamclone.readbundle1header(fh)[2]
257 requirements = streamclone.readbundle1header(fh)[2]
258 return 'none-packed1;%s' % _formatrequirementsparams(requirements)
258 return 'none-packed1;%s' % _formatrequirementsparams(requirements)
259 else:
259 else:
260 raise error.Abort(_('unknown bundle type: %s') % b)
260 raise error.Abort(_('unknown bundle type: %s') % b)
261
261
262 def _computeoutgoing(repo, heads, common):
262 def _computeoutgoing(repo, heads, common):
263 """Computes which revs are outgoing given a set of common
263 """Computes which revs are outgoing given a set of common
264 and a set of heads.
264 and a set of heads.
265
265
266 This is a separate function so extensions can have access to
266 This is a separate function so extensions can have access to
267 the logic.
267 the logic.
268
268
269 Returns a discovery.outgoing object.
269 Returns a discovery.outgoing object.
270 """
270 """
271 cl = repo.changelog
271 cl = repo.changelog
272 if common:
272 if common:
273 hasnode = cl.hasnode
273 hasnode = cl.hasnode
274 common = [n for n in common if hasnode(n)]
274 common = [n for n in common if hasnode(n)]
275 else:
275 else:
276 common = [nullid]
276 common = [nullid]
277 if not heads:
277 if not heads:
278 heads = cl.heads()
278 heads = cl.heads()
279 return discovery.outgoing(repo, common, heads)
279 return discovery.outgoing(repo, common, heads)
280
280
281 def _forcebundle1(op):
281 def _forcebundle1(op):
282 """return true if a pull/push must use bundle1
282 """return true if a pull/push must use bundle1
283
283
284 This function is used to allow testing of the older bundle version"""
284 This function is used to allow testing of the older bundle version"""
285 ui = op.repo.ui
285 ui = op.repo.ui
286 # The goal is this config is to allow developer to choose the bundle
286 # The goal is this config is to allow developer to choose the bundle
287 # version used during exchanged. This is especially handy during test.
287 # version used during exchanged. This is especially handy during test.
288 # Value is a list of bundle version to be picked from, highest version
288 # Value is a list of bundle version to be picked from, highest version
289 # should be used.
289 # should be used.
290 #
290 #
291 # developer config: devel.legacy.exchange
291 # developer config: devel.legacy.exchange
292 exchange = ui.configlist('devel', 'legacy.exchange')
292 exchange = ui.configlist('devel', 'legacy.exchange')
293 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
293 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
294 return forcebundle1 or not op.remote.capable('bundle2')
294 return forcebundle1 or not op.remote.capable('bundle2')
295
295
296 class pushoperation(object):
296 class pushoperation(object):
297 """A object that represent a single push operation
297 """A object that represent a single push operation
298
298
299 Its purpose is to carry push related state and very common operations.
299 Its purpose is to carry push related state and very common operations.
300
300
301 A new pushoperation should be created at the beginning of each push and
301 A new pushoperation should be created at the beginning of each push and
302 discarded afterward.
302 discarded afterward.
303 """
303 """
304
304
305 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
305 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
306 bookmarks=(), pushvars=None):
306 bookmarks=(), pushvars=None):
307 # repo we push from
307 # repo we push from
308 self.repo = repo
308 self.repo = repo
309 self.ui = repo.ui
309 self.ui = repo.ui
310 # repo we push to
310 # repo we push to
311 self.remote = remote
311 self.remote = remote
312 # force option provided
312 # force option provided
313 self.force = force
313 self.force = force
314 # revs to be pushed (None is "all")
314 # revs to be pushed (None is "all")
315 self.revs = revs
315 self.revs = revs
316 # bookmark explicitly pushed
316 # bookmark explicitly pushed
317 self.bookmarks = bookmarks
317 self.bookmarks = bookmarks
318 # allow push of new branch
318 # allow push of new branch
319 self.newbranch = newbranch
319 self.newbranch = newbranch
320 # step already performed
320 # step already performed
321 # (used to check what steps have been already performed through bundle2)
321 # (used to check what steps have been already performed through bundle2)
322 self.stepsdone = set()
322 self.stepsdone = set()
323 # Integer version of the changegroup push result
323 # Integer version of the changegroup push result
324 # - None means nothing to push
324 # - None means nothing to push
325 # - 0 means HTTP error
325 # - 0 means HTTP error
326 # - 1 means we pushed and remote head count is unchanged *or*
326 # - 1 means we pushed and remote head count is unchanged *or*
327 # we have outgoing changesets but refused to push
327 # we have outgoing changesets but refused to push
328 # - other values as described by addchangegroup()
328 # - other values as described by addchangegroup()
329 self.cgresult = None
329 self.cgresult = None
330 # Boolean value for the bookmark push
330 # Boolean value for the bookmark push
331 self.bkresult = None
331 self.bkresult = None
332 # discover.outgoing object (contains common and outgoing data)
332 # discover.outgoing object (contains common and outgoing data)
333 self.outgoing = None
333 self.outgoing = None
334 # all remote topological heads before the push
334 # all remote topological heads before the push
335 self.remoteheads = None
335 self.remoteheads = None
336 # Details of the remote branch pre and post push
336 # Details of the remote branch pre and post push
337 #
337 #
338 # mapping: {'branch': ([remoteheads],
338 # mapping: {'branch': ([remoteheads],
339 # [newheads],
339 # [newheads],
340 # [unsyncedheads],
340 # [unsyncedheads],
341 # [discardedheads])}
341 # [discardedheads])}
342 # - branch: the branch name
342 # - branch: the branch name
343 # - remoteheads: the list of remote heads known locally
343 # - remoteheads: the list of remote heads known locally
344 # None if the branch is new
344 # None if the branch is new
345 # - newheads: the new remote heads (known locally) with outgoing pushed
345 # - newheads: the new remote heads (known locally) with outgoing pushed
346 # - unsyncedheads: the list of remote heads unknown locally.
346 # - unsyncedheads: the list of remote heads unknown locally.
347 # - discardedheads: the list of remote heads made obsolete by the push
347 # - discardedheads: the list of remote heads made obsolete by the push
348 self.pushbranchmap = None
348 self.pushbranchmap = None
349 # testable as a boolean indicating if any nodes are missing locally.
349 # testable as a boolean indicating if any nodes are missing locally.
350 self.incoming = None
350 self.incoming = None
351 # summary of the remote phase situation
351 # summary of the remote phase situation
352 self.remotephases = None
352 self.remotephases = None
353 # phases changes that must be pushed along side the changesets
353 # phases changes that must be pushed along side the changesets
354 self.outdatedphases = None
354 self.outdatedphases = None
355 # phases changes that must be pushed if changeset push fails
355 # phases changes that must be pushed if changeset push fails
356 self.fallbackoutdatedphases = None
356 self.fallbackoutdatedphases = None
357 # outgoing obsmarkers
357 # outgoing obsmarkers
358 self.outobsmarkers = set()
358 self.outobsmarkers = set()
359 # outgoing bookmarks
359 # outgoing bookmarks
360 self.outbookmarks = []
360 self.outbookmarks = []
361 # transaction manager
361 # transaction manager
362 self.trmanager = None
362 self.trmanager = None
363 # map { pushkey partid -> callback handling failure}
363 # map { pushkey partid -> callback handling failure}
364 # used to handle exception from mandatory pushkey part failure
364 # used to handle exception from mandatory pushkey part failure
365 self.pkfailcb = {}
365 self.pkfailcb = {}
366 # an iterable of pushvars or None
366 # an iterable of pushvars or None
367 self.pushvars = pushvars
367 self.pushvars = pushvars
368
368
369 @util.propertycache
369 @util.propertycache
370 def futureheads(self):
370 def futureheads(self):
371 """future remote heads if the changeset push succeeds"""
371 """future remote heads if the changeset push succeeds"""
372 return self.outgoing.missingheads
372 return self.outgoing.missingheads
373
373
374 @util.propertycache
374 @util.propertycache
375 def fallbackheads(self):
375 def fallbackheads(self):
376 """future remote heads if the changeset push fails"""
376 """future remote heads if the changeset push fails"""
377 if self.revs is None:
377 if self.revs is None:
378 # not target to push, all common are relevant
378 # not target to push, all common are relevant
379 return self.outgoing.commonheads
379 return self.outgoing.commonheads
380 unfi = self.repo.unfiltered()
380 unfi = self.repo.unfiltered()
381 # I want cheads = heads(::missingheads and ::commonheads)
381 # I want cheads = heads(::missingheads and ::commonheads)
382 # (missingheads is revs with secret changeset filtered out)
382 # (missingheads is revs with secret changeset filtered out)
383 #
383 #
384 # This can be expressed as:
384 # This can be expressed as:
385 # cheads = ( (missingheads and ::commonheads)
385 # cheads = ( (missingheads and ::commonheads)
386 # + (commonheads and ::missingheads))"
386 # + (commonheads and ::missingheads))"
387 # )
387 # )
388 #
388 #
389 # while trying to push we already computed the following:
389 # while trying to push we already computed the following:
390 # common = (::commonheads)
390 # common = (::commonheads)
391 # missing = ((commonheads::missingheads) - commonheads)
391 # missing = ((commonheads::missingheads) - commonheads)
392 #
392 #
393 # We can pick:
393 # We can pick:
394 # * missingheads part of common (::commonheads)
394 # * missingheads part of common (::commonheads)
395 common = self.outgoing.common
395 common = self.outgoing.common
396 nm = self.repo.changelog.nodemap
396 nm = self.repo.changelog.nodemap
397 cheads = [node for node in self.revs if nm[node] in common]
397 cheads = [node for node in self.revs if nm[node] in common]
398 # and
398 # and
399 # * commonheads parents on missing
399 # * commonheads parents on missing
400 revset = unfi.set('%ln and parents(roots(%ln))',
400 revset = unfi.set('%ln and parents(roots(%ln))',
401 self.outgoing.commonheads,
401 self.outgoing.commonheads,
402 self.outgoing.missing)
402 self.outgoing.missing)
403 cheads.extend(c.node() for c in revset)
403 cheads.extend(c.node() for c in revset)
404 return cheads
404 return cheads
405
405
406 @property
406 @property
407 def commonheads(self):
407 def commonheads(self):
408 """set of all common heads after changeset bundle push"""
408 """set of all common heads after changeset bundle push"""
409 if self.cgresult:
409 if self.cgresult:
410 return self.futureheads
410 return self.futureheads
411 else:
411 else:
412 return self.fallbackheads
412 return self.fallbackheads
413
413
414 # mapping of message used when pushing bookmark
414 # mapping of message used when pushing bookmark
415 bookmsgmap = {'update': (_("updating bookmark %s\n"),
415 bookmsgmap = {'update': (_("updating bookmark %s\n"),
416 _('updating bookmark %s failed!\n')),
416 _('updating bookmark %s failed!\n')),
417 'export': (_("exporting bookmark %s\n"),
417 'export': (_("exporting bookmark %s\n"),
418 _('exporting bookmark %s failed!\n')),
418 _('exporting bookmark %s failed!\n')),
419 'delete': (_("deleting remote bookmark %s\n"),
419 'delete': (_("deleting remote bookmark %s\n"),
420 _('deleting remote bookmark %s failed!\n')),
420 _('deleting remote bookmark %s failed!\n')),
421 }
421 }
422
422
423
423
424 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
424 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
425 opargs=None):
425 opargs=None):
426 '''Push outgoing changesets (limited by revs) from a local
426 '''Push outgoing changesets (limited by revs) from a local
427 repository to remote. Return an integer:
427 repository to remote. Return an integer:
428 - None means nothing to push
428 - None means nothing to push
429 - 0 means HTTP error
429 - 0 means HTTP error
430 - 1 means we pushed and remote head count is unchanged *or*
430 - 1 means we pushed and remote head count is unchanged *or*
431 we have outgoing changesets but refused to push
431 we have outgoing changesets but refused to push
432 - other values as described by addchangegroup()
432 - other values as described by addchangegroup()
433 '''
433 '''
434 if opargs is None:
434 if opargs is None:
435 opargs = {}
435 opargs = {}
436 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
436 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
437 **pycompat.strkwargs(opargs))
437 **pycompat.strkwargs(opargs))
438 if pushop.remote.local():
438 if pushop.remote.local():
439 missing = (set(pushop.repo.requirements)
439 missing = (set(pushop.repo.requirements)
440 - pushop.remote.local().supported)
440 - pushop.remote.local().supported)
441 if missing:
441 if missing:
442 msg = _("required features are not"
442 msg = _("required features are not"
443 " supported in the destination:"
443 " supported in the destination:"
444 " %s") % (', '.join(sorted(missing)))
444 " %s") % (', '.join(sorted(missing)))
445 raise error.Abort(msg)
445 raise error.Abort(msg)
446
446
447 if not pushop.remote.canpush():
447 if not pushop.remote.canpush():
448 raise error.Abort(_("destination does not support push"))
448 raise error.Abort(_("destination does not support push"))
449
449
450 if not pushop.remote.capable('unbundle'):
450 if not pushop.remote.capable('unbundle'):
451 raise error.Abort(_('cannot push: destination does not support the '
451 raise error.Abort(_('cannot push: destination does not support the '
452 'unbundle wire protocol command'))
452 'unbundle wire protocol command'))
453
453
454 # get lock as we might write phase data
454 # get lock as we might write phase data
455 wlock = lock = None
455 wlock = lock = None
456 try:
456 try:
457 # bundle2 push may receive a reply bundle touching bookmarks or other
457 # bundle2 push may receive a reply bundle touching bookmarks or other
458 # things requiring the wlock. Take it now to ensure proper ordering.
458 # things requiring the wlock. Take it now to ensure proper ordering.
459 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
459 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
460 if (not _forcebundle1(pushop)) and maypushback:
460 if (not _forcebundle1(pushop)) and maypushback:
461 wlock = pushop.repo.wlock()
461 wlock = pushop.repo.wlock()
462 lock = pushop.repo.lock()
462 lock = pushop.repo.lock()
463 pushop.trmanager = transactionmanager(pushop.repo,
463 pushop.trmanager = transactionmanager(pushop.repo,
464 'push-response',
464 'push-response',
465 pushop.remote.url())
465 pushop.remote.url())
466 except IOError as err:
466 except IOError as err:
467 if err.errno != errno.EACCES:
467 if err.errno != errno.EACCES:
468 raise
468 raise
469 # source repo cannot be locked.
469 # source repo cannot be locked.
470 # We do not abort the push, but just disable the local phase
470 # We do not abort the push, but just disable the local phase
471 # synchronisation.
471 # synchronisation.
472 msg = 'cannot lock source repository: %s\n' % err
472 msg = 'cannot lock source repository: %s\n' % err
473 pushop.ui.debug(msg)
473 pushop.ui.debug(msg)
474
474
475 with wlock or util.nullcontextmanager(), \
475 with wlock or util.nullcontextmanager(), \
476 lock or util.nullcontextmanager(), \
476 lock or util.nullcontextmanager(), \
477 pushop.trmanager or util.nullcontextmanager():
477 pushop.trmanager or util.nullcontextmanager():
478 pushop.repo.checkpush(pushop)
478 pushop.repo.checkpush(pushop)
479 _pushdiscovery(pushop)
479 _pushdiscovery(pushop)
480 if not _forcebundle1(pushop):
480 if not _forcebundle1(pushop):
481 _pushbundle2(pushop)
481 _pushbundle2(pushop)
482 _pushchangeset(pushop)
482 _pushchangeset(pushop)
483 _pushsyncphase(pushop)
483 _pushsyncphase(pushop)
484 _pushobsolete(pushop)
484 _pushobsolete(pushop)
485 _pushbookmark(pushop)
485 _pushbookmark(pushop)
486
486
487 return pushop
487 return pushop
488
488
489 # list of steps to perform discovery before push
489 # list of steps to perform discovery before push
490 pushdiscoveryorder = []
490 pushdiscoveryorder = []
491
491
492 # Mapping between step name and function
492 # Mapping between step name and function
493 #
493 #
494 # This exists to help extensions wrap steps if necessary
494 # This exists to help extensions wrap steps if necessary
495 pushdiscoverymapping = {}
495 pushdiscoverymapping = {}
496
496
497 def pushdiscovery(stepname):
497 def pushdiscovery(stepname):
498 """decorator for function performing discovery before push
498 """decorator for function performing discovery before push
499
499
500 The function is added to the step -> function mapping and appended to the
500 The function is added to the step -> function mapping and appended to the
501 list of steps. Beware that decorated function will be added in order (this
501 list of steps. Beware that decorated function will be added in order (this
502 may matter).
502 may matter).
503
503
504 You can only use this decorator for a new step, if you want to wrap a step
504 You can only use this decorator for a new step, if you want to wrap a step
505 from an extension, change the pushdiscovery dictionary directly."""
505 from an extension, change the pushdiscovery dictionary directly."""
506 def dec(func):
506 def dec(func):
507 assert stepname not in pushdiscoverymapping
507 assert stepname not in pushdiscoverymapping
508 pushdiscoverymapping[stepname] = func
508 pushdiscoverymapping[stepname] = func
509 pushdiscoveryorder.append(stepname)
509 pushdiscoveryorder.append(stepname)
510 return func
510 return func
511 return dec
511 return dec
512
512
513 def _pushdiscovery(pushop):
513 def _pushdiscovery(pushop):
514 """Run all discovery steps"""
514 """Run all discovery steps"""
515 for stepname in pushdiscoveryorder:
515 for stepname in pushdiscoveryorder:
516 step = pushdiscoverymapping[stepname]
516 step = pushdiscoverymapping[stepname]
517 step(pushop)
517 step(pushop)
518
518
519 @pushdiscovery('changeset')
519 @pushdiscovery('changeset')
520 def _pushdiscoverychangeset(pushop):
520 def _pushdiscoverychangeset(pushop):
521 """discover the changeset that need to be pushed"""
521 """discover the changeset that need to be pushed"""
522 fci = discovery.findcommonincoming
522 fci = discovery.findcommonincoming
523 if pushop.revs:
523 if pushop.revs:
524 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
524 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
525 ancestorsof=pushop.revs)
525 ancestorsof=pushop.revs)
526 else:
526 else:
527 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
527 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
528 common, inc, remoteheads = commoninc
528 common, inc, remoteheads = commoninc
529 fco = discovery.findcommonoutgoing
529 fco = discovery.findcommonoutgoing
530 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
530 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
531 commoninc=commoninc, force=pushop.force)
531 commoninc=commoninc, force=pushop.force)
532 pushop.outgoing = outgoing
532 pushop.outgoing = outgoing
533 pushop.remoteheads = remoteheads
533 pushop.remoteheads = remoteheads
534 pushop.incoming = inc
534 pushop.incoming = inc
535
535
536 @pushdiscovery('phase')
536 @pushdiscovery('phase')
537 def _pushdiscoveryphase(pushop):
537 def _pushdiscoveryphase(pushop):
538 """discover the phase that needs to be pushed
538 """discover the phase that needs to be pushed
539
539
540 (computed for both success and failure case for changesets push)"""
540 (computed for both success and failure case for changesets push)"""
541 outgoing = pushop.outgoing
541 outgoing = pushop.outgoing
542 unfi = pushop.repo.unfiltered()
542 unfi = pushop.repo.unfiltered()
543 remotephases = pushop.remote.listkeys('phases')
543 remotephases = pushop.remote.listkeys('phases')
544 if (pushop.ui.configbool('ui', '_usedassubrepo')
544 if (pushop.ui.configbool('ui', '_usedassubrepo')
545 and remotephases # server supports phases
545 and remotephases # server supports phases
546 and not pushop.outgoing.missing # no changesets to be pushed
546 and not pushop.outgoing.missing # no changesets to be pushed
547 and remotephases.get('publishing', False)):
547 and remotephases.get('publishing', False)):
548 # When:
548 # When:
549 # - this is a subrepo push
549 # - this is a subrepo push
550 # - and remote support phase
550 # - and remote support phase
551 # - and no changeset are to be pushed
551 # - and no changeset are to be pushed
552 # - and remote is publishing
552 # - and remote is publishing
553 # We may be in issue 3781 case!
553 # We may be in issue 3781 case!
554 # We drop the possible phase synchronisation done by
554 # We drop the possible phase synchronisation done by
555 # courtesy to publish changesets possibly locally draft
555 # courtesy to publish changesets possibly locally draft
556 # on the remote.
556 # on the remote.
557 pushop.outdatedphases = []
557 pushop.outdatedphases = []
558 pushop.fallbackoutdatedphases = []
558 pushop.fallbackoutdatedphases = []
559 return
559 return
560
560
561 pushop.remotephases = phases.remotephasessummary(pushop.repo,
561 pushop.remotephases = phases.remotephasessummary(pushop.repo,
562 pushop.fallbackheads,
562 pushop.fallbackheads,
563 remotephases)
563 remotephases)
564 droots = pushop.remotephases.draftroots
564 droots = pushop.remotephases.draftroots
565
565
566 extracond = ''
566 extracond = ''
567 if not pushop.remotephases.publishing:
567 if not pushop.remotephases.publishing:
568 extracond = ' and public()'
568 extracond = ' and public()'
569 revset = 'heads((%%ln::%%ln) %s)' % extracond
569 revset = 'heads((%%ln::%%ln) %s)' % extracond
570 # Get the list of all revs draft on remote by public here.
570 # Get the list of all revs draft on remote by public here.
571 # XXX Beware that revset break if droots is not strictly
571 # XXX Beware that revset break if droots is not strictly
572 # XXX root we may want to ensure it is but it is costly
572 # XXX root we may want to ensure it is but it is costly
573 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
573 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
574 if not outgoing.missing:
574 if not outgoing.missing:
575 future = fallback
575 future = fallback
576 else:
576 else:
577 # adds changeset we are going to push as draft
577 # adds changeset we are going to push as draft
578 #
578 #
579 # should not be necessary for publishing server, but because of an
579 # should not be necessary for publishing server, but because of an
580 # issue fixed in xxxxx we have to do it anyway.
580 # issue fixed in xxxxx we have to do it anyway.
581 fdroots = list(unfi.set('roots(%ln + %ln::)',
581 fdroots = list(unfi.set('roots(%ln + %ln::)',
582 outgoing.missing, droots))
582 outgoing.missing, droots))
583 fdroots = [f.node() for f in fdroots]
583 fdroots = [f.node() for f in fdroots]
584 future = list(unfi.set(revset, fdroots, pushop.futureheads))
584 future = list(unfi.set(revset, fdroots, pushop.futureheads))
585 pushop.outdatedphases = future
585 pushop.outdatedphases = future
586 pushop.fallbackoutdatedphases = fallback
586 pushop.fallbackoutdatedphases = fallback
587
587
588 @pushdiscovery('obsmarker')
588 @pushdiscovery('obsmarker')
589 def _pushdiscoveryobsmarkers(pushop):
589 def _pushdiscoveryobsmarkers(pushop):
590 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
590 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
591 and pushop.repo.obsstore
591 and pushop.repo.obsstore
592 and 'obsolete' in pushop.remote.listkeys('namespaces')):
592 and 'obsolete' in pushop.remote.listkeys('namespaces')):
593 repo = pushop.repo
593 repo = pushop.repo
594 # very naive computation, that can be quite expensive on big repo.
594 # very naive computation, that can be quite expensive on big repo.
595 # However: evolution is currently slow on them anyway.
595 # However: evolution is currently slow on them anyway.
596 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
596 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
597 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
597 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
598
598
599 @pushdiscovery('bookmarks')
599 @pushdiscovery('bookmarks')
600 def _pushdiscoverybookmarks(pushop):
600 def _pushdiscoverybookmarks(pushop):
601 ui = pushop.ui
601 ui = pushop.ui
602 repo = pushop.repo.unfiltered()
602 repo = pushop.repo.unfiltered()
603 remote = pushop.remote
603 remote = pushop.remote
604 ui.debug("checking for updated bookmarks\n")
604 ui.debug("checking for updated bookmarks\n")
605 ancestors = ()
605 ancestors = ()
606 if pushop.revs:
606 if pushop.revs:
607 revnums = map(repo.changelog.rev, pushop.revs)
607 revnums = map(repo.changelog.rev, pushop.revs)
608 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
608 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
609 remotebookmark = remote.listkeys('bookmarks')
609 remotebookmark = remote.listkeys('bookmarks')
610
610
611 explicit = set([repo._bookmarks.expandname(bookmark)
611 explicit = set([repo._bookmarks.expandname(bookmark)
612 for bookmark in pushop.bookmarks])
612 for bookmark in pushop.bookmarks])
613
613
614 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
614 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
615 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
615 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
616
616
617 def safehex(x):
617 def safehex(x):
618 if x is None:
618 if x is None:
619 return x
619 return x
620 return hex(x)
620 return hex(x)
621
621
622 def hexifycompbookmarks(bookmarks):
622 def hexifycompbookmarks(bookmarks):
623 return [(b, safehex(scid), safehex(dcid))
623 return [(b, safehex(scid), safehex(dcid))
624 for (b, scid, dcid) in bookmarks]
624 for (b, scid, dcid) in bookmarks]
625
625
626 comp = [hexifycompbookmarks(marks) for marks in comp]
626 comp = [hexifycompbookmarks(marks) for marks in comp]
627 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
628
629 def _processcompared(pushop, pushed, explicit, remotebms, comp):
630 """take decision on bookmark to pull from the remote bookmark
631
632 Exist to help extensions who want to alter this behavior.
633 """
627 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
634 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
628
635
636 repo = pushop.repo
637
629 for b, scid, dcid in advsrc:
638 for b, scid, dcid in advsrc:
630 if b in explicit:
639 if b in explicit:
631 explicit.remove(b)
640 explicit.remove(b)
632 if not ancestors or repo[scid].rev() in ancestors:
641 if not pushed or repo[scid].rev() in pushed:
633 pushop.outbookmarks.append((b, dcid, scid))
642 pushop.outbookmarks.append((b, dcid, scid))
634 # search added bookmark
643 # search added bookmark
635 for b, scid, dcid in addsrc:
644 for b, scid, dcid in addsrc:
636 if b in explicit:
645 if b in explicit:
637 explicit.remove(b)
646 explicit.remove(b)
638 pushop.outbookmarks.append((b, '', scid))
647 pushop.outbookmarks.append((b, '', scid))
639 # search for overwritten bookmark
648 # search for overwritten bookmark
640 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
649 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
641 if b in explicit:
650 if b in explicit:
642 explicit.remove(b)
651 explicit.remove(b)
643 pushop.outbookmarks.append((b, dcid, scid))
652 pushop.outbookmarks.append((b, dcid, scid))
644 # search for bookmark to delete
653 # search for bookmark to delete
645 for b, scid, dcid in adddst:
654 for b, scid, dcid in adddst:
646 if b in explicit:
655 if b in explicit:
647 explicit.remove(b)
656 explicit.remove(b)
648 # treat as "deleted locally"
657 # treat as "deleted locally"
649 pushop.outbookmarks.append((b, dcid, ''))
658 pushop.outbookmarks.append((b, dcid, ''))
650 # identical bookmarks shouldn't get reported
659 # identical bookmarks shouldn't get reported
651 for b, scid, dcid in same:
660 for b, scid, dcid in same:
652 if b in explicit:
661 if b in explicit:
653 explicit.remove(b)
662 explicit.remove(b)
654
663
655 if explicit:
664 if explicit:
656 explicit = sorted(explicit)
665 explicit = sorted(explicit)
657 # we should probably list all of them
666 # we should probably list all of them
658 ui.warn(_('bookmark %s does not exist on the local '
667 pushop.ui.warn(_('bookmark %s does not exist on the local '
659 'or remote repository!\n') % explicit[0])
668 'or remote repository!\n') % explicit[0])
660 pushop.bkresult = 2
669 pushop.bkresult = 2
661
670
662 pushop.outbookmarks.sort()
671 pushop.outbookmarks.sort()
663
672
664 def _pushcheckoutgoing(pushop):
673 def _pushcheckoutgoing(pushop):
665 outgoing = pushop.outgoing
674 outgoing = pushop.outgoing
666 unfi = pushop.repo.unfiltered()
675 unfi = pushop.repo.unfiltered()
667 if not outgoing.missing:
676 if not outgoing.missing:
668 # nothing to push
677 # nothing to push
669 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
678 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
670 return False
679 return False
671 # something to push
680 # something to push
672 if not pushop.force:
681 if not pushop.force:
673 # if repo.obsstore == False --> no obsolete
682 # if repo.obsstore == False --> no obsolete
674 # then, save the iteration
683 # then, save the iteration
675 if unfi.obsstore:
684 if unfi.obsstore:
676 # this message are here for 80 char limit reason
685 # this message are here for 80 char limit reason
677 mso = _("push includes obsolete changeset: %s!")
686 mso = _("push includes obsolete changeset: %s!")
678 mspd = _("push includes phase-divergent changeset: %s!")
687 mspd = _("push includes phase-divergent changeset: %s!")
679 mscd = _("push includes content-divergent changeset: %s!")
688 mscd = _("push includes content-divergent changeset: %s!")
680 mst = {"orphan": _("push includes orphan changeset: %s!"),
689 mst = {"orphan": _("push includes orphan changeset: %s!"),
681 "phase-divergent": mspd,
690 "phase-divergent": mspd,
682 "content-divergent": mscd}
691 "content-divergent": mscd}
683 # If we are to push if there is at least one
692 # If we are to push if there is at least one
684 # obsolete or unstable changeset in missing, at
693 # obsolete or unstable changeset in missing, at
685 # least one of the missinghead will be obsolete or
694 # least one of the missinghead will be obsolete or
686 # unstable. So checking heads only is ok
695 # unstable. So checking heads only is ok
687 for node in outgoing.missingheads:
696 for node in outgoing.missingheads:
688 ctx = unfi[node]
697 ctx = unfi[node]
689 if ctx.obsolete():
698 if ctx.obsolete():
690 raise error.Abort(mso % ctx)
699 raise error.Abort(mso % ctx)
691 elif ctx.isunstable():
700 elif ctx.isunstable():
692 # TODO print more than one instability in the abort
701 # TODO print more than one instability in the abort
693 # message
702 # message
694 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
703 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
695
704
696 discovery.checkheads(pushop)
705 discovery.checkheads(pushop)
697 return True
706 return True
698
707
699 # List of names of steps to perform for an outgoing bundle2, order matters.
708 # List of names of steps to perform for an outgoing bundle2, order matters.
700 b2partsgenorder = []
709 b2partsgenorder = []
701
710
702 # Mapping between step name and function
711 # Mapping between step name and function
703 #
712 #
704 # This exists to help extensions wrap steps if necessary
713 # This exists to help extensions wrap steps if necessary
705 b2partsgenmapping = {}
714 b2partsgenmapping = {}
706
715
707 def b2partsgenerator(stepname, idx=None):
716 def b2partsgenerator(stepname, idx=None):
708 """decorator for function generating bundle2 part
717 """decorator for function generating bundle2 part
709
718
710 The function is added to the step -> function mapping and appended to the
719 The function is added to the step -> function mapping and appended to the
711 list of steps. Beware that decorated functions will be added in order
720 list of steps. Beware that decorated functions will be added in order
712 (this may matter).
721 (this may matter).
713
722
714 You can only use this decorator for new steps, if you want to wrap a step
723 You can only use this decorator for new steps, if you want to wrap a step
715 from an extension, attack the b2partsgenmapping dictionary directly."""
724 from an extension, attack the b2partsgenmapping dictionary directly."""
716 def dec(func):
725 def dec(func):
717 assert stepname not in b2partsgenmapping
726 assert stepname not in b2partsgenmapping
718 b2partsgenmapping[stepname] = func
727 b2partsgenmapping[stepname] = func
719 if idx is None:
728 if idx is None:
720 b2partsgenorder.append(stepname)
729 b2partsgenorder.append(stepname)
721 else:
730 else:
722 b2partsgenorder.insert(idx, stepname)
731 b2partsgenorder.insert(idx, stepname)
723 return func
732 return func
724 return dec
733 return dec
725
734
726 def _pushb2ctxcheckheads(pushop, bundler):
735 def _pushb2ctxcheckheads(pushop, bundler):
727 """Generate race condition checking parts
736 """Generate race condition checking parts
728
737
729 Exists as an independent function to aid extensions
738 Exists as an independent function to aid extensions
730 """
739 """
731 # * 'force' do not check for push race,
740 # * 'force' do not check for push race,
732 # * if we don't push anything, there are nothing to check.
741 # * if we don't push anything, there are nothing to check.
733 if not pushop.force and pushop.outgoing.missingheads:
742 if not pushop.force and pushop.outgoing.missingheads:
734 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
743 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
735 emptyremote = pushop.pushbranchmap is None
744 emptyremote = pushop.pushbranchmap is None
736 if not allowunrelated or emptyremote:
745 if not allowunrelated or emptyremote:
737 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
746 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
738 else:
747 else:
739 affected = set()
748 affected = set()
740 for branch, heads in pushop.pushbranchmap.iteritems():
749 for branch, heads in pushop.pushbranchmap.iteritems():
741 remoteheads, newheads, unsyncedheads, discardedheads = heads
750 remoteheads, newheads, unsyncedheads, discardedheads = heads
742 if remoteheads is not None:
751 if remoteheads is not None:
743 remote = set(remoteheads)
752 remote = set(remoteheads)
744 affected |= set(discardedheads) & remote
753 affected |= set(discardedheads) & remote
745 affected |= remote - set(newheads)
754 affected |= remote - set(newheads)
746 if affected:
755 if affected:
747 data = iter(sorted(affected))
756 data = iter(sorted(affected))
748 bundler.newpart('check:updated-heads', data=data)
757 bundler.newpart('check:updated-heads', data=data)
749
758
750 def _pushing(pushop):
759 def _pushing(pushop):
751 """return True if we are pushing anything"""
760 """return True if we are pushing anything"""
752 return bool(pushop.outgoing.missing
761 return bool(pushop.outgoing.missing
753 or pushop.outdatedphases
762 or pushop.outdatedphases
754 or pushop.outobsmarkers
763 or pushop.outobsmarkers
755 or pushop.outbookmarks)
764 or pushop.outbookmarks)
756
765
757 @b2partsgenerator('check-bookmarks')
766 @b2partsgenerator('check-bookmarks')
758 def _pushb2checkbookmarks(pushop, bundler):
767 def _pushb2checkbookmarks(pushop, bundler):
759 """insert bookmark move checking"""
768 """insert bookmark move checking"""
760 if not _pushing(pushop) or pushop.force:
769 if not _pushing(pushop) or pushop.force:
761 return
770 return
762 b2caps = bundle2.bundle2caps(pushop.remote)
771 b2caps = bundle2.bundle2caps(pushop.remote)
763 hasbookmarkcheck = 'bookmarks' in b2caps
772 hasbookmarkcheck = 'bookmarks' in b2caps
764 if not (pushop.outbookmarks and hasbookmarkcheck):
773 if not (pushop.outbookmarks and hasbookmarkcheck):
765 return
774 return
766 data = []
775 data = []
767 for book, old, new in pushop.outbookmarks:
776 for book, old, new in pushop.outbookmarks:
768 old = bin(old)
777 old = bin(old)
769 data.append((book, old))
778 data.append((book, old))
770 checkdata = bookmod.binaryencode(data)
779 checkdata = bookmod.binaryencode(data)
771 bundler.newpart('check:bookmarks', data=checkdata)
780 bundler.newpart('check:bookmarks', data=checkdata)
772
781
773 @b2partsgenerator('check-phases')
782 @b2partsgenerator('check-phases')
774 def _pushb2checkphases(pushop, bundler):
783 def _pushb2checkphases(pushop, bundler):
775 """insert phase move checking"""
784 """insert phase move checking"""
776 if not _pushing(pushop) or pushop.force:
785 if not _pushing(pushop) or pushop.force:
777 return
786 return
778 b2caps = bundle2.bundle2caps(pushop.remote)
787 b2caps = bundle2.bundle2caps(pushop.remote)
779 hasphaseheads = 'heads' in b2caps.get('phases', ())
788 hasphaseheads = 'heads' in b2caps.get('phases', ())
780 if pushop.remotephases is not None and hasphaseheads:
789 if pushop.remotephases is not None and hasphaseheads:
781 # check that the remote phase has not changed
790 # check that the remote phase has not changed
782 checks = [[] for p in phases.allphases]
791 checks = [[] for p in phases.allphases]
783 checks[phases.public].extend(pushop.remotephases.publicheads)
792 checks[phases.public].extend(pushop.remotephases.publicheads)
784 checks[phases.draft].extend(pushop.remotephases.draftroots)
793 checks[phases.draft].extend(pushop.remotephases.draftroots)
785 if any(checks):
794 if any(checks):
786 for nodes in checks:
795 for nodes in checks:
787 nodes.sort()
796 nodes.sort()
788 checkdata = phases.binaryencode(checks)
797 checkdata = phases.binaryencode(checks)
789 bundler.newpart('check:phases', data=checkdata)
798 bundler.newpart('check:phases', data=checkdata)
790
799
791 @b2partsgenerator('changeset')
800 @b2partsgenerator('changeset')
792 def _pushb2ctx(pushop, bundler):
801 def _pushb2ctx(pushop, bundler):
793 """handle changegroup push through bundle2
802 """handle changegroup push through bundle2
794
803
795 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
804 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
796 """
805 """
797 if 'changesets' in pushop.stepsdone:
806 if 'changesets' in pushop.stepsdone:
798 return
807 return
799 pushop.stepsdone.add('changesets')
808 pushop.stepsdone.add('changesets')
800 # Send known heads to the server for race detection.
809 # Send known heads to the server for race detection.
801 if not _pushcheckoutgoing(pushop):
810 if not _pushcheckoutgoing(pushop):
802 return
811 return
803 pushop.repo.prepushoutgoinghooks(pushop)
812 pushop.repo.prepushoutgoinghooks(pushop)
804
813
805 _pushb2ctxcheckheads(pushop, bundler)
814 _pushb2ctxcheckheads(pushop, bundler)
806
815
807 b2caps = bundle2.bundle2caps(pushop.remote)
816 b2caps = bundle2.bundle2caps(pushop.remote)
808 version = '01'
817 version = '01'
809 cgversions = b2caps.get('changegroup')
818 cgversions = b2caps.get('changegroup')
810 if cgversions: # 3.1 and 3.2 ship with an empty value
819 if cgversions: # 3.1 and 3.2 ship with an empty value
811 cgversions = [v for v in cgversions
820 cgversions = [v for v in cgversions
812 if v in changegroup.supportedoutgoingversions(
821 if v in changegroup.supportedoutgoingversions(
813 pushop.repo)]
822 pushop.repo)]
814 if not cgversions:
823 if not cgversions:
815 raise ValueError(_('no common changegroup version'))
824 raise ValueError(_('no common changegroup version'))
816 version = max(cgversions)
825 version = max(cgversions)
817 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
826 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
818 'push')
827 'push')
819 cgpart = bundler.newpart('changegroup', data=cgstream)
828 cgpart = bundler.newpart('changegroup', data=cgstream)
820 if cgversions:
829 if cgversions:
821 cgpart.addparam('version', version)
830 cgpart.addparam('version', version)
822 if 'treemanifest' in pushop.repo.requirements:
831 if 'treemanifest' in pushop.repo.requirements:
823 cgpart.addparam('treemanifest', '1')
832 cgpart.addparam('treemanifest', '1')
824 def handlereply(op):
833 def handlereply(op):
825 """extract addchangegroup returns from server reply"""
834 """extract addchangegroup returns from server reply"""
826 cgreplies = op.records.getreplies(cgpart.id)
835 cgreplies = op.records.getreplies(cgpart.id)
827 assert len(cgreplies['changegroup']) == 1
836 assert len(cgreplies['changegroup']) == 1
828 pushop.cgresult = cgreplies['changegroup'][0]['return']
837 pushop.cgresult = cgreplies['changegroup'][0]['return']
829 return handlereply
838 return handlereply
830
839
831 @b2partsgenerator('phase')
840 @b2partsgenerator('phase')
832 def _pushb2phases(pushop, bundler):
841 def _pushb2phases(pushop, bundler):
833 """handle phase push through bundle2"""
842 """handle phase push through bundle2"""
834 if 'phases' in pushop.stepsdone:
843 if 'phases' in pushop.stepsdone:
835 return
844 return
836 b2caps = bundle2.bundle2caps(pushop.remote)
845 b2caps = bundle2.bundle2caps(pushop.remote)
837 ui = pushop.repo.ui
846 ui = pushop.repo.ui
838
847
839 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
848 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
840 haspushkey = 'pushkey' in b2caps
849 haspushkey = 'pushkey' in b2caps
841 hasphaseheads = 'heads' in b2caps.get('phases', ())
850 hasphaseheads = 'heads' in b2caps.get('phases', ())
842
851
843 if hasphaseheads and not legacyphase:
852 if hasphaseheads and not legacyphase:
844 return _pushb2phaseheads(pushop, bundler)
853 return _pushb2phaseheads(pushop, bundler)
845 elif haspushkey:
854 elif haspushkey:
846 return _pushb2phasespushkey(pushop, bundler)
855 return _pushb2phasespushkey(pushop, bundler)
847
856
848 def _pushb2phaseheads(pushop, bundler):
857 def _pushb2phaseheads(pushop, bundler):
849 """push phase information through a bundle2 - binary part"""
858 """push phase information through a bundle2 - binary part"""
850 pushop.stepsdone.add('phases')
859 pushop.stepsdone.add('phases')
851 if pushop.outdatedphases:
860 if pushop.outdatedphases:
852 updates = [[] for p in phases.allphases]
861 updates = [[] for p in phases.allphases]
853 updates[0].extend(h.node() for h in pushop.outdatedphases)
862 updates[0].extend(h.node() for h in pushop.outdatedphases)
854 phasedata = phases.binaryencode(updates)
863 phasedata = phases.binaryencode(updates)
855 bundler.newpart('phase-heads', data=phasedata)
864 bundler.newpart('phase-heads', data=phasedata)
856
865
857 def _pushb2phasespushkey(pushop, bundler):
866 def _pushb2phasespushkey(pushop, bundler):
858 """push phase information through a bundle2 - pushkey part"""
867 """push phase information through a bundle2 - pushkey part"""
859 pushop.stepsdone.add('phases')
868 pushop.stepsdone.add('phases')
860 part2node = []
869 part2node = []
861
870
862 def handlefailure(pushop, exc):
871 def handlefailure(pushop, exc):
863 targetid = int(exc.partid)
872 targetid = int(exc.partid)
864 for partid, node in part2node:
873 for partid, node in part2node:
865 if partid == targetid:
874 if partid == targetid:
866 raise error.Abort(_('updating %s to public failed') % node)
875 raise error.Abort(_('updating %s to public failed') % node)
867
876
868 enc = pushkey.encode
877 enc = pushkey.encode
869 for newremotehead in pushop.outdatedphases:
878 for newremotehead in pushop.outdatedphases:
870 part = bundler.newpart('pushkey')
879 part = bundler.newpart('pushkey')
871 part.addparam('namespace', enc('phases'))
880 part.addparam('namespace', enc('phases'))
872 part.addparam('key', enc(newremotehead.hex()))
881 part.addparam('key', enc(newremotehead.hex()))
873 part.addparam('old', enc('%d' % phases.draft))
882 part.addparam('old', enc('%d' % phases.draft))
874 part.addparam('new', enc('%d' % phases.public))
883 part.addparam('new', enc('%d' % phases.public))
875 part2node.append((part.id, newremotehead))
884 part2node.append((part.id, newremotehead))
876 pushop.pkfailcb[part.id] = handlefailure
885 pushop.pkfailcb[part.id] = handlefailure
877
886
878 def handlereply(op):
887 def handlereply(op):
879 for partid, node in part2node:
888 for partid, node in part2node:
880 partrep = op.records.getreplies(partid)
889 partrep = op.records.getreplies(partid)
881 results = partrep['pushkey']
890 results = partrep['pushkey']
882 assert len(results) <= 1
891 assert len(results) <= 1
883 msg = None
892 msg = None
884 if not results:
893 if not results:
885 msg = _('server ignored update of %s to public!\n') % node
894 msg = _('server ignored update of %s to public!\n') % node
886 elif not int(results[0]['return']):
895 elif not int(results[0]['return']):
887 msg = _('updating %s to public failed!\n') % node
896 msg = _('updating %s to public failed!\n') % node
888 if msg is not None:
897 if msg is not None:
889 pushop.ui.warn(msg)
898 pushop.ui.warn(msg)
890 return handlereply
899 return handlereply
891
900
892 @b2partsgenerator('obsmarkers')
901 @b2partsgenerator('obsmarkers')
893 def _pushb2obsmarkers(pushop, bundler):
902 def _pushb2obsmarkers(pushop, bundler):
894 if 'obsmarkers' in pushop.stepsdone:
903 if 'obsmarkers' in pushop.stepsdone:
895 return
904 return
896 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
905 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
897 if obsolete.commonversion(remoteversions) is None:
906 if obsolete.commonversion(remoteversions) is None:
898 return
907 return
899 pushop.stepsdone.add('obsmarkers')
908 pushop.stepsdone.add('obsmarkers')
900 if pushop.outobsmarkers:
909 if pushop.outobsmarkers:
901 markers = sorted(pushop.outobsmarkers)
910 markers = sorted(pushop.outobsmarkers)
902 bundle2.buildobsmarkerspart(bundler, markers)
911 bundle2.buildobsmarkerspart(bundler, markers)
903
912
904 @b2partsgenerator('bookmarks')
913 @b2partsgenerator('bookmarks')
905 def _pushb2bookmarks(pushop, bundler):
914 def _pushb2bookmarks(pushop, bundler):
906 """handle bookmark push through bundle2"""
915 """handle bookmark push through bundle2"""
907 if 'bookmarks' in pushop.stepsdone:
916 if 'bookmarks' in pushop.stepsdone:
908 return
917 return
909 b2caps = bundle2.bundle2caps(pushop.remote)
918 b2caps = bundle2.bundle2caps(pushop.remote)
910
919
911 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
920 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
912 legacybooks = 'bookmarks' in legacy
921 legacybooks = 'bookmarks' in legacy
913
922
914 if not legacybooks and 'bookmarks' in b2caps:
923 if not legacybooks and 'bookmarks' in b2caps:
915 return _pushb2bookmarkspart(pushop, bundler)
924 return _pushb2bookmarkspart(pushop, bundler)
916 elif 'pushkey' in b2caps:
925 elif 'pushkey' in b2caps:
917 return _pushb2bookmarkspushkey(pushop, bundler)
926 return _pushb2bookmarkspushkey(pushop, bundler)
918
927
919 def _bmaction(old, new):
928 def _bmaction(old, new):
920 """small utility for bookmark pushing"""
929 """small utility for bookmark pushing"""
921 if not old:
930 if not old:
922 return 'export'
931 return 'export'
923 elif not new:
932 elif not new:
924 return 'delete'
933 return 'delete'
925 return 'update'
934 return 'update'
926
935
927 def _pushb2bookmarkspart(pushop, bundler):
936 def _pushb2bookmarkspart(pushop, bundler):
928 pushop.stepsdone.add('bookmarks')
937 pushop.stepsdone.add('bookmarks')
929 if not pushop.outbookmarks:
938 if not pushop.outbookmarks:
930 return
939 return
931
940
932 allactions = []
941 allactions = []
933 data = []
942 data = []
934 for book, old, new in pushop.outbookmarks:
943 for book, old, new in pushop.outbookmarks:
935 new = bin(new)
944 new = bin(new)
936 data.append((book, new))
945 data.append((book, new))
937 allactions.append((book, _bmaction(old, new)))
946 allactions.append((book, _bmaction(old, new)))
938 checkdata = bookmod.binaryencode(data)
947 checkdata = bookmod.binaryencode(data)
939 bundler.newpart('bookmarks', data=checkdata)
948 bundler.newpart('bookmarks', data=checkdata)
940
949
941 def handlereply(op):
950 def handlereply(op):
942 ui = pushop.ui
951 ui = pushop.ui
943 # if success
952 # if success
944 for book, action in allactions:
953 for book, action in allactions:
945 ui.status(bookmsgmap[action][0] % book)
954 ui.status(bookmsgmap[action][0] % book)
946
955
947 return handlereply
956 return handlereply
948
957
949 def _pushb2bookmarkspushkey(pushop, bundler):
958 def _pushb2bookmarkspushkey(pushop, bundler):
950 pushop.stepsdone.add('bookmarks')
959 pushop.stepsdone.add('bookmarks')
951 part2book = []
960 part2book = []
952 enc = pushkey.encode
961 enc = pushkey.encode
953
962
954 def handlefailure(pushop, exc):
963 def handlefailure(pushop, exc):
955 targetid = int(exc.partid)
964 targetid = int(exc.partid)
956 for partid, book, action in part2book:
965 for partid, book, action in part2book:
957 if partid == targetid:
966 if partid == targetid:
958 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
967 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
959 # we should not be called for part we did not generated
968 # we should not be called for part we did not generated
960 assert False
969 assert False
961
970
962 for book, old, new in pushop.outbookmarks:
971 for book, old, new in pushop.outbookmarks:
963 part = bundler.newpart('pushkey')
972 part = bundler.newpart('pushkey')
964 part.addparam('namespace', enc('bookmarks'))
973 part.addparam('namespace', enc('bookmarks'))
965 part.addparam('key', enc(book))
974 part.addparam('key', enc(book))
966 part.addparam('old', enc(old))
975 part.addparam('old', enc(old))
967 part.addparam('new', enc(new))
976 part.addparam('new', enc(new))
968 action = 'update'
977 action = 'update'
969 if not old:
978 if not old:
970 action = 'export'
979 action = 'export'
971 elif not new:
980 elif not new:
972 action = 'delete'
981 action = 'delete'
973 part2book.append((part.id, book, action))
982 part2book.append((part.id, book, action))
974 pushop.pkfailcb[part.id] = handlefailure
983 pushop.pkfailcb[part.id] = handlefailure
975
984
976 def handlereply(op):
985 def handlereply(op):
977 ui = pushop.ui
986 ui = pushop.ui
978 for partid, book, action in part2book:
987 for partid, book, action in part2book:
979 partrep = op.records.getreplies(partid)
988 partrep = op.records.getreplies(partid)
980 results = partrep['pushkey']
989 results = partrep['pushkey']
981 assert len(results) <= 1
990 assert len(results) <= 1
982 if not results:
991 if not results:
983 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
992 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
984 else:
993 else:
985 ret = int(results[0]['return'])
994 ret = int(results[0]['return'])
986 if ret:
995 if ret:
987 ui.status(bookmsgmap[action][0] % book)
996 ui.status(bookmsgmap[action][0] % book)
988 else:
997 else:
989 ui.warn(bookmsgmap[action][1] % book)
998 ui.warn(bookmsgmap[action][1] % book)
990 if pushop.bkresult is not None:
999 if pushop.bkresult is not None:
991 pushop.bkresult = 1
1000 pushop.bkresult = 1
992 return handlereply
1001 return handlereply
993
1002
994 @b2partsgenerator('pushvars', idx=0)
1003 @b2partsgenerator('pushvars', idx=0)
995 def _getbundlesendvars(pushop, bundler):
1004 def _getbundlesendvars(pushop, bundler):
996 '''send shellvars via bundle2'''
1005 '''send shellvars via bundle2'''
997 pushvars = pushop.pushvars
1006 pushvars = pushop.pushvars
998 if pushvars:
1007 if pushvars:
999 shellvars = {}
1008 shellvars = {}
1000 for raw in pushvars:
1009 for raw in pushvars:
1001 if '=' not in raw:
1010 if '=' not in raw:
1002 msg = ("unable to parse variable '%s', should follow "
1011 msg = ("unable to parse variable '%s', should follow "
1003 "'KEY=VALUE' or 'KEY=' format")
1012 "'KEY=VALUE' or 'KEY=' format")
1004 raise error.Abort(msg % raw)
1013 raise error.Abort(msg % raw)
1005 k, v = raw.split('=', 1)
1014 k, v = raw.split('=', 1)
1006 shellvars[k] = v
1015 shellvars[k] = v
1007
1016
1008 part = bundler.newpart('pushvars')
1017 part = bundler.newpart('pushvars')
1009
1018
1010 for key, value in shellvars.iteritems():
1019 for key, value in shellvars.iteritems():
1011 part.addparam(key, value, mandatory=False)
1020 part.addparam(key, value, mandatory=False)
1012
1021
1013 def _pushbundle2(pushop):
1022 def _pushbundle2(pushop):
1014 """push data to the remote using bundle2
1023 """push data to the remote using bundle2
1015
1024
1016 The only currently supported type of data is changegroup but this will
1025 The only currently supported type of data is changegroup but this will
1017 evolve in the future."""
1026 evolve in the future."""
1018 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1027 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1019 pushback = (pushop.trmanager
1028 pushback = (pushop.trmanager
1020 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1029 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1021
1030
1022 # create reply capability
1031 # create reply capability
1023 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1032 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1024 allowpushback=pushback,
1033 allowpushback=pushback,
1025 role='client'))
1034 role='client'))
1026 bundler.newpart('replycaps', data=capsblob)
1035 bundler.newpart('replycaps', data=capsblob)
1027 replyhandlers = []
1036 replyhandlers = []
1028 for partgenname in b2partsgenorder:
1037 for partgenname in b2partsgenorder:
1029 partgen = b2partsgenmapping[partgenname]
1038 partgen = b2partsgenmapping[partgenname]
1030 ret = partgen(pushop, bundler)
1039 ret = partgen(pushop, bundler)
1031 if callable(ret):
1040 if callable(ret):
1032 replyhandlers.append(ret)
1041 replyhandlers.append(ret)
1033 # do not push if nothing to push
1042 # do not push if nothing to push
1034 if bundler.nbparts <= 1:
1043 if bundler.nbparts <= 1:
1035 return
1044 return
1036 stream = util.chunkbuffer(bundler.getchunks())
1045 stream = util.chunkbuffer(bundler.getchunks())
1037 try:
1046 try:
1038 try:
1047 try:
1039 reply = pushop.remote.unbundle(
1048 reply = pushop.remote.unbundle(
1040 stream, ['force'], pushop.remote.url())
1049 stream, ['force'], pushop.remote.url())
1041 except error.BundleValueError as exc:
1050 except error.BundleValueError as exc:
1042 raise error.Abort(_('missing support for %s') % exc)
1051 raise error.Abort(_('missing support for %s') % exc)
1043 try:
1052 try:
1044 trgetter = None
1053 trgetter = None
1045 if pushback:
1054 if pushback:
1046 trgetter = pushop.trmanager.transaction
1055 trgetter = pushop.trmanager.transaction
1047 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1056 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1048 except error.BundleValueError as exc:
1057 except error.BundleValueError as exc:
1049 raise error.Abort(_('missing support for %s') % exc)
1058 raise error.Abort(_('missing support for %s') % exc)
1050 except bundle2.AbortFromPart as exc:
1059 except bundle2.AbortFromPart as exc:
1051 pushop.ui.status(_('remote: %s\n') % exc)
1060 pushop.ui.status(_('remote: %s\n') % exc)
1052 if exc.hint is not None:
1061 if exc.hint is not None:
1053 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1062 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1054 raise error.Abort(_('push failed on remote'))
1063 raise error.Abort(_('push failed on remote'))
1055 except error.PushkeyFailed as exc:
1064 except error.PushkeyFailed as exc:
1056 partid = int(exc.partid)
1065 partid = int(exc.partid)
1057 if partid not in pushop.pkfailcb:
1066 if partid not in pushop.pkfailcb:
1058 raise
1067 raise
1059 pushop.pkfailcb[partid](pushop, exc)
1068 pushop.pkfailcb[partid](pushop, exc)
1060 for rephand in replyhandlers:
1069 for rephand in replyhandlers:
1061 rephand(op)
1070 rephand(op)
1062
1071
1063 def _pushchangeset(pushop):
1072 def _pushchangeset(pushop):
1064 """Make the actual push of changeset bundle to remote repo"""
1073 """Make the actual push of changeset bundle to remote repo"""
1065 if 'changesets' in pushop.stepsdone:
1074 if 'changesets' in pushop.stepsdone:
1066 return
1075 return
1067 pushop.stepsdone.add('changesets')
1076 pushop.stepsdone.add('changesets')
1068 if not _pushcheckoutgoing(pushop):
1077 if not _pushcheckoutgoing(pushop):
1069 return
1078 return
1070
1079
1071 # Should have verified this in push().
1080 # Should have verified this in push().
1072 assert pushop.remote.capable('unbundle')
1081 assert pushop.remote.capable('unbundle')
1073
1082
1074 pushop.repo.prepushoutgoinghooks(pushop)
1083 pushop.repo.prepushoutgoinghooks(pushop)
1075 outgoing = pushop.outgoing
1084 outgoing = pushop.outgoing
1076 # TODO: get bundlecaps from remote
1085 # TODO: get bundlecaps from remote
1077 bundlecaps = None
1086 bundlecaps = None
1078 # create a changegroup from local
1087 # create a changegroup from local
1079 if pushop.revs is None and not (outgoing.excluded
1088 if pushop.revs is None and not (outgoing.excluded
1080 or pushop.repo.changelog.filteredrevs):
1089 or pushop.repo.changelog.filteredrevs):
1081 # push everything,
1090 # push everything,
1082 # use the fast path, no race possible on push
1091 # use the fast path, no race possible on push
1083 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1092 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1084 fastpath=True, bundlecaps=bundlecaps)
1093 fastpath=True, bundlecaps=bundlecaps)
1085 else:
1094 else:
1086 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1095 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1087 'push', bundlecaps=bundlecaps)
1096 'push', bundlecaps=bundlecaps)
1088
1097
1089 # apply changegroup to remote
1098 # apply changegroup to remote
1090 # local repo finds heads on server, finds out what
1099 # local repo finds heads on server, finds out what
1091 # revs it must push. once revs transferred, if server
1100 # revs it must push. once revs transferred, if server
1092 # finds it has different heads (someone else won
1101 # finds it has different heads (someone else won
1093 # commit/push race), server aborts.
1102 # commit/push race), server aborts.
1094 if pushop.force:
1103 if pushop.force:
1095 remoteheads = ['force']
1104 remoteheads = ['force']
1096 else:
1105 else:
1097 remoteheads = pushop.remoteheads
1106 remoteheads = pushop.remoteheads
1098 # ssh: return remote's addchangegroup()
1107 # ssh: return remote's addchangegroup()
1099 # http: return remote's addchangegroup() or 0 for error
1108 # http: return remote's addchangegroup() or 0 for error
1100 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1109 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1101 pushop.repo.url())
1110 pushop.repo.url())
1102
1111
1103 def _pushsyncphase(pushop):
1112 def _pushsyncphase(pushop):
1104 """synchronise phase information locally and remotely"""
1113 """synchronise phase information locally and remotely"""
1105 cheads = pushop.commonheads
1114 cheads = pushop.commonheads
1106 # even when we don't push, exchanging phase data is useful
1115 # even when we don't push, exchanging phase data is useful
1107 remotephases = pushop.remote.listkeys('phases')
1116 remotephases = pushop.remote.listkeys('phases')
1108 if (pushop.ui.configbool('ui', '_usedassubrepo')
1117 if (pushop.ui.configbool('ui', '_usedassubrepo')
1109 and remotephases # server supports phases
1118 and remotephases # server supports phases
1110 and pushop.cgresult is None # nothing was pushed
1119 and pushop.cgresult is None # nothing was pushed
1111 and remotephases.get('publishing', False)):
1120 and remotephases.get('publishing', False)):
1112 # When:
1121 # When:
1113 # - this is a subrepo push
1122 # - this is a subrepo push
1114 # - and remote support phase
1123 # - and remote support phase
1115 # - and no changeset was pushed
1124 # - and no changeset was pushed
1116 # - and remote is publishing
1125 # - and remote is publishing
1117 # We may be in issue 3871 case!
1126 # We may be in issue 3871 case!
1118 # We drop the possible phase synchronisation done by
1127 # We drop the possible phase synchronisation done by
1119 # courtesy to publish changesets possibly locally draft
1128 # courtesy to publish changesets possibly locally draft
1120 # on the remote.
1129 # on the remote.
1121 remotephases = {'publishing': 'True'}
1130 remotephases = {'publishing': 'True'}
1122 if not remotephases: # old server or public only reply from non-publishing
1131 if not remotephases: # old server or public only reply from non-publishing
1123 _localphasemove(pushop, cheads)
1132 _localphasemove(pushop, cheads)
1124 # don't push any phase data as there is nothing to push
1133 # don't push any phase data as there is nothing to push
1125 else:
1134 else:
1126 ana = phases.analyzeremotephases(pushop.repo, cheads,
1135 ana = phases.analyzeremotephases(pushop.repo, cheads,
1127 remotephases)
1136 remotephases)
1128 pheads, droots = ana
1137 pheads, droots = ana
1129 ### Apply remote phase on local
1138 ### Apply remote phase on local
1130 if remotephases.get('publishing', False):
1139 if remotephases.get('publishing', False):
1131 _localphasemove(pushop, cheads)
1140 _localphasemove(pushop, cheads)
1132 else: # publish = False
1141 else: # publish = False
1133 _localphasemove(pushop, pheads)
1142 _localphasemove(pushop, pheads)
1134 _localphasemove(pushop, cheads, phases.draft)
1143 _localphasemove(pushop, cheads, phases.draft)
1135 ### Apply local phase on remote
1144 ### Apply local phase on remote
1136
1145
1137 if pushop.cgresult:
1146 if pushop.cgresult:
1138 if 'phases' in pushop.stepsdone:
1147 if 'phases' in pushop.stepsdone:
1139 # phases already pushed though bundle2
1148 # phases already pushed though bundle2
1140 return
1149 return
1141 outdated = pushop.outdatedphases
1150 outdated = pushop.outdatedphases
1142 else:
1151 else:
1143 outdated = pushop.fallbackoutdatedphases
1152 outdated = pushop.fallbackoutdatedphases
1144
1153
1145 pushop.stepsdone.add('phases')
1154 pushop.stepsdone.add('phases')
1146
1155
1147 # filter heads already turned public by the push
1156 # filter heads already turned public by the push
1148 outdated = [c for c in outdated if c.node() not in pheads]
1157 outdated = [c for c in outdated if c.node() not in pheads]
1149 # fallback to independent pushkey command
1158 # fallback to independent pushkey command
1150 for newremotehead in outdated:
1159 for newremotehead in outdated:
1151 r = pushop.remote.pushkey('phases',
1160 r = pushop.remote.pushkey('phases',
1152 newremotehead.hex(),
1161 newremotehead.hex(),
1153 ('%d' % phases.draft),
1162 ('%d' % phases.draft),
1154 ('%d' % phases.public))
1163 ('%d' % phases.public))
1155 if not r:
1164 if not r:
1156 pushop.ui.warn(_('updating %s to public failed!\n')
1165 pushop.ui.warn(_('updating %s to public failed!\n')
1157 % newremotehead)
1166 % newremotehead)
1158
1167
1159 def _localphasemove(pushop, nodes, phase=phases.public):
1168 def _localphasemove(pushop, nodes, phase=phases.public):
1160 """move <nodes> to <phase> in the local source repo"""
1169 """move <nodes> to <phase> in the local source repo"""
1161 if pushop.trmanager:
1170 if pushop.trmanager:
1162 phases.advanceboundary(pushop.repo,
1171 phases.advanceboundary(pushop.repo,
1163 pushop.trmanager.transaction(),
1172 pushop.trmanager.transaction(),
1164 phase,
1173 phase,
1165 nodes)
1174 nodes)
1166 else:
1175 else:
1167 # repo is not locked, do not change any phases!
1176 # repo is not locked, do not change any phases!
1168 # Informs the user that phases should have been moved when
1177 # Informs the user that phases should have been moved when
1169 # applicable.
1178 # applicable.
1170 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1179 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1171 phasestr = phases.phasenames[phase]
1180 phasestr = phases.phasenames[phase]
1172 if actualmoves:
1181 if actualmoves:
1173 pushop.ui.status(_('cannot lock source repo, skipping '
1182 pushop.ui.status(_('cannot lock source repo, skipping '
1174 'local %s phase update\n') % phasestr)
1183 'local %s phase update\n') % phasestr)
1175
1184
1176 def _pushobsolete(pushop):
1185 def _pushobsolete(pushop):
1177 """utility function to push obsolete markers to a remote"""
1186 """utility function to push obsolete markers to a remote"""
1178 if 'obsmarkers' in pushop.stepsdone:
1187 if 'obsmarkers' in pushop.stepsdone:
1179 return
1188 return
1180 repo = pushop.repo
1189 repo = pushop.repo
1181 remote = pushop.remote
1190 remote = pushop.remote
1182 pushop.stepsdone.add('obsmarkers')
1191 pushop.stepsdone.add('obsmarkers')
1183 if pushop.outobsmarkers:
1192 if pushop.outobsmarkers:
1184 pushop.ui.debug('try to push obsolete markers to remote\n')
1193 pushop.ui.debug('try to push obsolete markers to remote\n')
1185 rslts = []
1194 rslts = []
1186 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1195 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1187 for key in sorted(remotedata, reverse=True):
1196 for key in sorted(remotedata, reverse=True):
1188 # reverse sort to ensure we end with dump0
1197 # reverse sort to ensure we end with dump0
1189 data = remotedata[key]
1198 data = remotedata[key]
1190 rslts.append(remote.pushkey('obsolete', key, '', data))
1199 rslts.append(remote.pushkey('obsolete', key, '', data))
1191 if [r for r in rslts if not r]:
1200 if [r for r in rslts if not r]:
1192 msg = _('failed to push some obsolete markers!\n')
1201 msg = _('failed to push some obsolete markers!\n')
1193 repo.ui.warn(msg)
1202 repo.ui.warn(msg)
1194
1203
1195 def _pushbookmark(pushop):
1204 def _pushbookmark(pushop):
1196 """Update bookmark position on remote"""
1205 """Update bookmark position on remote"""
1197 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1206 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1198 return
1207 return
1199 pushop.stepsdone.add('bookmarks')
1208 pushop.stepsdone.add('bookmarks')
1200 ui = pushop.ui
1209 ui = pushop.ui
1201 remote = pushop.remote
1210 remote = pushop.remote
1202
1211
1203 for b, old, new in pushop.outbookmarks:
1212 for b, old, new in pushop.outbookmarks:
1204 action = 'update'
1213 action = 'update'
1205 if not old:
1214 if not old:
1206 action = 'export'
1215 action = 'export'
1207 elif not new:
1216 elif not new:
1208 action = 'delete'
1217 action = 'delete'
1209 if remote.pushkey('bookmarks', b, old, new):
1218 if remote.pushkey('bookmarks', b, old, new):
1210 ui.status(bookmsgmap[action][0] % b)
1219 ui.status(bookmsgmap[action][0] % b)
1211 else:
1220 else:
1212 ui.warn(bookmsgmap[action][1] % b)
1221 ui.warn(bookmsgmap[action][1] % b)
1213 # discovery can have set the value form invalid entry
1222 # discovery can have set the value form invalid entry
1214 if pushop.bkresult is not None:
1223 if pushop.bkresult is not None:
1215 pushop.bkresult = 1
1224 pushop.bkresult = 1
1216
1225
1217 class pulloperation(object):
1226 class pulloperation(object):
1218 """A object that represent a single pull operation
1227 """A object that represent a single pull operation
1219
1228
1220 It purpose is to carry pull related state and very common operation.
1229 It purpose is to carry pull related state and very common operation.
1221
1230
1222 A new should be created at the beginning of each pull and discarded
1231 A new should be created at the beginning of each pull and discarded
1223 afterward.
1232 afterward.
1224 """
1233 """
1225
1234
1226 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1235 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1227 remotebookmarks=None, streamclonerequested=None):
1236 remotebookmarks=None, streamclonerequested=None):
1228 # repo we pull into
1237 # repo we pull into
1229 self.repo = repo
1238 self.repo = repo
1230 # repo we pull from
1239 # repo we pull from
1231 self.remote = remote
1240 self.remote = remote
1232 # revision we try to pull (None is "all")
1241 # revision we try to pull (None is "all")
1233 self.heads = heads
1242 self.heads = heads
1234 # bookmark pulled explicitly
1243 # bookmark pulled explicitly
1235 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1244 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1236 for bookmark in bookmarks]
1245 for bookmark in bookmarks]
1237 # do we force pull?
1246 # do we force pull?
1238 self.force = force
1247 self.force = force
1239 # whether a streaming clone was requested
1248 # whether a streaming clone was requested
1240 self.streamclonerequested = streamclonerequested
1249 self.streamclonerequested = streamclonerequested
1241 # transaction manager
1250 # transaction manager
1242 self.trmanager = None
1251 self.trmanager = None
1243 # set of common changeset between local and remote before pull
1252 # set of common changeset between local and remote before pull
1244 self.common = None
1253 self.common = None
1245 # set of pulled head
1254 # set of pulled head
1246 self.rheads = None
1255 self.rheads = None
1247 # list of missing changeset to fetch remotely
1256 # list of missing changeset to fetch remotely
1248 self.fetch = None
1257 self.fetch = None
1249 # remote bookmarks data
1258 # remote bookmarks data
1250 self.remotebookmarks = remotebookmarks
1259 self.remotebookmarks = remotebookmarks
1251 # result of changegroup pulling (used as return code by pull)
1260 # result of changegroup pulling (used as return code by pull)
1252 self.cgresult = None
1261 self.cgresult = None
1253 # list of step already done
1262 # list of step already done
1254 self.stepsdone = set()
1263 self.stepsdone = set()
1255 # Whether we attempted a clone from pre-generated bundles.
1264 # Whether we attempted a clone from pre-generated bundles.
1256 self.clonebundleattempted = False
1265 self.clonebundleattempted = False
1257
1266
1258 @util.propertycache
1267 @util.propertycache
1259 def pulledsubset(self):
1268 def pulledsubset(self):
1260 """heads of the set of changeset target by the pull"""
1269 """heads of the set of changeset target by the pull"""
1261 # compute target subset
1270 # compute target subset
1262 if self.heads is None:
1271 if self.heads is None:
1263 # We pulled every thing possible
1272 # We pulled every thing possible
1264 # sync on everything common
1273 # sync on everything common
1265 c = set(self.common)
1274 c = set(self.common)
1266 ret = list(self.common)
1275 ret = list(self.common)
1267 for n in self.rheads:
1276 for n in self.rheads:
1268 if n not in c:
1277 if n not in c:
1269 ret.append(n)
1278 ret.append(n)
1270 return ret
1279 return ret
1271 else:
1280 else:
1272 # We pulled a specific subset
1281 # We pulled a specific subset
1273 # sync on this subset
1282 # sync on this subset
1274 return self.heads
1283 return self.heads
1275
1284
1276 @util.propertycache
1285 @util.propertycache
1277 def canusebundle2(self):
1286 def canusebundle2(self):
1278 return not _forcebundle1(self)
1287 return not _forcebundle1(self)
1279
1288
1280 @util.propertycache
1289 @util.propertycache
1281 def remotebundle2caps(self):
1290 def remotebundle2caps(self):
1282 return bundle2.bundle2caps(self.remote)
1291 return bundle2.bundle2caps(self.remote)
1283
1292
1284 def gettransaction(self):
1293 def gettransaction(self):
1285 # deprecated; talk to trmanager directly
1294 # deprecated; talk to trmanager directly
1286 return self.trmanager.transaction()
1295 return self.trmanager.transaction()
1287
1296
1288 class transactionmanager(util.transactional):
1297 class transactionmanager(util.transactional):
1289 """An object to manage the life cycle of a transaction
1298 """An object to manage the life cycle of a transaction
1290
1299
1291 It creates the transaction on demand and calls the appropriate hooks when
1300 It creates the transaction on demand and calls the appropriate hooks when
1292 closing the transaction."""
1301 closing the transaction."""
1293 def __init__(self, repo, source, url):
1302 def __init__(self, repo, source, url):
1294 self.repo = repo
1303 self.repo = repo
1295 self.source = source
1304 self.source = source
1296 self.url = url
1305 self.url = url
1297 self._tr = None
1306 self._tr = None
1298
1307
1299 def transaction(self):
1308 def transaction(self):
1300 """Return an open transaction object, constructing if necessary"""
1309 """Return an open transaction object, constructing if necessary"""
1301 if not self._tr:
1310 if not self._tr:
1302 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1311 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1303 self._tr = self.repo.transaction(trname)
1312 self._tr = self.repo.transaction(trname)
1304 self._tr.hookargs['source'] = self.source
1313 self._tr.hookargs['source'] = self.source
1305 self._tr.hookargs['url'] = self.url
1314 self._tr.hookargs['url'] = self.url
1306 return self._tr
1315 return self._tr
1307
1316
1308 def close(self):
1317 def close(self):
1309 """close transaction if created"""
1318 """close transaction if created"""
1310 if self._tr is not None:
1319 if self._tr is not None:
1311 self._tr.close()
1320 self._tr.close()
1312
1321
1313 def release(self):
1322 def release(self):
1314 """release transaction if created"""
1323 """release transaction if created"""
1315 if self._tr is not None:
1324 if self._tr is not None:
1316 self._tr.release()
1325 self._tr.release()
1317
1326
1318 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1327 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1319 streamclonerequested=None):
1328 streamclonerequested=None):
1320 """Fetch repository data from a remote.
1329 """Fetch repository data from a remote.
1321
1330
1322 This is the main function used to retrieve data from a remote repository.
1331 This is the main function used to retrieve data from a remote repository.
1323
1332
1324 ``repo`` is the local repository to clone into.
1333 ``repo`` is the local repository to clone into.
1325 ``remote`` is a peer instance.
1334 ``remote`` is a peer instance.
1326 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1335 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1327 default) means to pull everything from the remote.
1336 default) means to pull everything from the remote.
1328 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1337 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1329 default, all remote bookmarks are pulled.
1338 default, all remote bookmarks are pulled.
1330 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1339 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1331 initialization.
1340 initialization.
1332 ``streamclonerequested`` is a boolean indicating whether a "streaming
1341 ``streamclonerequested`` is a boolean indicating whether a "streaming
1333 clone" is requested. A "streaming clone" is essentially a raw file copy
1342 clone" is requested. A "streaming clone" is essentially a raw file copy
1334 of revlogs from the server. This only works when the local repository is
1343 of revlogs from the server. This only works when the local repository is
1335 empty. The default value of ``None`` means to respect the server
1344 empty. The default value of ``None`` means to respect the server
1336 configuration for preferring stream clones.
1345 configuration for preferring stream clones.
1337
1346
1338 Returns the ``pulloperation`` created for this pull.
1347 Returns the ``pulloperation`` created for this pull.
1339 """
1348 """
1340 if opargs is None:
1349 if opargs is None:
1341 opargs = {}
1350 opargs = {}
1342 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1351 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1343 streamclonerequested=streamclonerequested,
1352 streamclonerequested=streamclonerequested,
1344 **pycompat.strkwargs(opargs))
1353 **pycompat.strkwargs(opargs))
1345
1354
1346 peerlocal = pullop.remote.local()
1355 peerlocal = pullop.remote.local()
1347 if peerlocal:
1356 if peerlocal:
1348 missing = set(peerlocal.requirements) - pullop.repo.supported
1357 missing = set(peerlocal.requirements) - pullop.repo.supported
1349 if missing:
1358 if missing:
1350 msg = _("required features are not"
1359 msg = _("required features are not"
1351 " supported in the destination:"
1360 " supported in the destination:"
1352 " %s") % (', '.join(sorted(missing)))
1361 " %s") % (', '.join(sorted(missing)))
1353 raise error.Abort(msg)
1362 raise error.Abort(msg)
1354
1363
1355 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1364 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1356 with repo.wlock(), repo.lock(), pullop.trmanager:
1365 with repo.wlock(), repo.lock(), pullop.trmanager:
1357 # This should ideally be in _pullbundle2(). However, it needs to run
1366 # This should ideally be in _pullbundle2(). However, it needs to run
1358 # before discovery to avoid extra work.
1367 # before discovery to avoid extra work.
1359 _maybeapplyclonebundle(pullop)
1368 _maybeapplyclonebundle(pullop)
1360 streamclone.maybeperformlegacystreamclone(pullop)
1369 streamclone.maybeperformlegacystreamclone(pullop)
1361 _pulldiscovery(pullop)
1370 _pulldiscovery(pullop)
1362 if pullop.canusebundle2:
1371 if pullop.canusebundle2:
1363 _pullbundle2(pullop)
1372 _pullbundle2(pullop)
1364 _pullchangeset(pullop)
1373 _pullchangeset(pullop)
1365 _pullphase(pullop)
1374 _pullphase(pullop)
1366 _pullbookmarks(pullop)
1375 _pullbookmarks(pullop)
1367 _pullobsolete(pullop)
1376 _pullobsolete(pullop)
1368
1377
1369 # storing remotenames
1378 # storing remotenames
1370 if repo.ui.configbool('experimental', 'remotenames'):
1379 if repo.ui.configbool('experimental', 'remotenames'):
1371 logexchange.pullremotenames(repo, remote)
1380 logexchange.pullremotenames(repo, remote)
1372
1381
1373 return pullop
1382 return pullop
1374
1383
1375 # list of steps to perform discovery before pull
1384 # list of steps to perform discovery before pull
1376 pulldiscoveryorder = []
1385 pulldiscoveryorder = []
1377
1386
1378 # Mapping between step name and function
1387 # Mapping between step name and function
1379 #
1388 #
1380 # This exists to help extensions wrap steps if necessary
1389 # This exists to help extensions wrap steps if necessary
1381 pulldiscoverymapping = {}
1390 pulldiscoverymapping = {}
1382
1391
1383 def pulldiscovery(stepname):
1392 def pulldiscovery(stepname):
1384 """decorator for function performing discovery before pull
1393 """decorator for function performing discovery before pull
1385
1394
1386 The function is added to the step -> function mapping and appended to the
1395 The function is added to the step -> function mapping and appended to the
1387 list of steps. Beware that decorated function will be added in order (this
1396 list of steps. Beware that decorated function will be added in order (this
1388 may matter).
1397 may matter).
1389
1398
1390 You can only use this decorator for a new step, if you want to wrap a step
1399 You can only use this decorator for a new step, if you want to wrap a step
1391 from an extension, change the pulldiscovery dictionary directly."""
1400 from an extension, change the pulldiscovery dictionary directly."""
1392 def dec(func):
1401 def dec(func):
1393 assert stepname not in pulldiscoverymapping
1402 assert stepname not in pulldiscoverymapping
1394 pulldiscoverymapping[stepname] = func
1403 pulldiscoverymapping[stepname] = func
1395 pulldiscoveryorder.append(stepname)
1404 pulldiscoveryorder.append(stepname)
1396 return func
1405 return func
1397 return dec
1406 return dec
1398
1407
1399 def _pulldiscovery(pullop):
1408 def _pulldiscovery(pullop):
1400 """Run all discovery steps"""
1409 """Run all discovery steps"""
1401 for stepname in pulldiscoveryorder:
1410 for stepname in pulldiscoveryorder:
1402 step = pulldiscoverymapping[stepname]
1411 step = pulldiscoverymapping[stepname]
1403 step(pullop)
1412 step(pullop)
1404
1413
1405 @pulldiscovery('b1:bookmarks')
1414 @pulldiscovery('b1:bookmarks')
1406 def _pullbookmarkbundle1(pullop):
1415 def _pullbookmarkbundle1(pullop):
1407 """fetch bookmark data in bundle1 case
1416 """fetch bookmark data in bundle1 case
1408
1417
1409 If not using bundle2, we have to fetch bookmarks before changeset
1418 If not using bundle2, we have to fetch bookmarks before changeset
1410 discovery to reduce the chance and impact of race conditions."""
1419 discovery to reduce the chance and impact of race conditions."""
1411 if pullop.remotebookmarks is not None:
1420 if pullop.remotebookmarks is not None:
1412 return
1421 return
1413 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1422 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1414 # all known bundle2 servers now support listkeys, but lets be nice with
1423 # all known bundle2 servers now support listkeys, but lets be nice with
1415 # new implementation.
1424 # new implementation.
1416 return
1425 return
1417 books = pullop.remote.listkeys('bookmarks')
1426 books = pullop.remote.listkeys('bookmarks')
1418 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1427 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1419
1428
1420
1429
1421 @pulldiscovery('changegroup')
1430 @pulldiscovery('changegroup')
1422 def _pulldiscoverychangegroup(pullop):
1431 def _pulldiscoverychangegroup(pullop):
1423 """discovery phase for the pull
1432 """discovery phase for the pull
1424
1433
1425 Current handle changeset discovery only, will change handle all discovery
1434 Current handle changeset discovery only, will change handle all discovery
1426 at some point."""
1435 at some point."""
1427 tmp = discovery.findcommonincoming(pullop.repo,
1436 tmp = discovery.findcommonincoming(pullop.repo,
1428 pullop.remote,
1437 pullop.remote,
1429 heads=pullop.heads,
1438 heads=pullop.heads,
1430 force=pullop.force)
1439 force=pullop.force)
1431 common, fetch, rheads = tmp
1440 common, fetch, rheads = tmp
1432 nm = pullop.repo.unfiltered().changelog.nodemap
1441 nm = pullop.repo.unfiltered().changelog.nodemap
1433 if fetch and rheads:
1442 if fetch and rheads:
1434 # If a remote heads is filtered locally, put in back in common.
1443 # If a remote heads is filtered locally, put in back in common.
1435 #
1444 #
1436 # This is a hackish solution to catch most of "common but locally
1445 # This is a hackish solution to catch most of "common but locally
1437 # hidden situation". We do not performs discovery on unfiltered
1446 # hidden situation". We do not performs discovery on unfiltered
1438 # repository because it end up doing a pathological amount of round
1447 # repository because it end up doing a pathological amount of round
1439 # trip for w huge amount of changeset we do not care about.
1448 # trip for w huge amount of changeset we do not care about.
1440 #
1449 #
1441 # If a set of such "common but filtered" changeset exist on the server
1450 # If a set of such "common but filtered" changeset exist on the server
1442 # but are not including a remote heads, we'll not be able to detect it,
1451 # but are not including a remote heads, we'll not be able to detect it,
1443 scommon = set(common)
1452 scommon = set(common)
1444 for n in rheads:
1453 for n in rheads:
1445 if n in nm:
1454 if n in nm:
1446 if n not in scommon:
1455 if n not in scommon:
1447 common.append(n)
1456 common.append(n)
1448 if set(rheads).issubset(set(common)):
1457 if set(rheads).issubset(set(common)):
1449 fetch = []
1458 fetch = []
1450 pullop.common = common
1459 pullop.common = common
1451 pullop.fetch = fetch
1460 pullop.fetch = fetch
1452 pullop.rheads = rheads
1461 pullop.rheads = rheads
1453
1462
1454 def _pullbundle2(pullop):
1463 def _pullbundle2(pullop):
1455 """pull data using bundle2
1464 """pull data using bundle2
1456
1465
1457 For now, the only supported data are changegroup."""
1466 For now, the only supported data are changegroup."""
1458 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1467 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1459
1468
1460 # make ui easier to access
1469 # make ui easier to access
1461 ui = pullop.repo.ui
1470 ui = pullop.repo.ui
1462
1471
1463 # At the moment we don't do stream clones over bundle2. If that is
1472 # At the moment we don't do stream clones over bundle2. If that is
1464 # implemented then here's where the check for that will go.
1473 # implemented then here's where the check for that will go.
1465 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1474 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1466
1475
1467 # declare pull perimeters
1476 # declare pull perimeters
1468 kwargs['common'] = pullop.common
1477 kwargs['common'] = pullop.common
1469 kwargs['heads'] = pullop.heads or pullop.rheads
1478 kwargs['heads'] = pullop.heads or pullop.rheads
1470
1479
1471 if streaming:
1480 if streaming:
1472 kwargs['cg'] = False
1481 kwargs['cg'] = False
1473 kwargs['stream'] = True
1482 kwargs['stream'] = True
1474 pullop.stepsdone.add('changegroup')
1483 pullop.stepsdone.add('changegroup')
1475 pullop.stepsdone.add('phases')
1484 pullop.stepsdone.add('phases')
1476
1485
1477 else:
1486 else:
1478 # pulling changegroup
1487 # pulling changegroup
1479 pullop.stepsdone.add('changegroup')
1488 pullop.stepsdone.add('changegroup')
1480
1489
1481 kwargs['cg'] = pullop.fetch
1490 kwargs['cg'] = pullop.fetch
1482
1491
1483 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1492 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1484 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1493 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1485 if (not legacyphase and hasbinaryphase):
1494 if (not legacyphase and hasbinaryphase):
1486 kwargs['phases'] = True
1495 kwargs['phases'] = True
1487 pullop.stepsdone.add('phases')
1496 pullop.stepsdone.add('phases')
1488
1497
1489 if 'listkeys' in pullop.remotebundle2caps:
1498 if 'listkeys' in pullop.remotebundle2caps:
1490 if 'phases' not in pullop.stepsdone:
1499 if 'phases' not in pullop.stepsdone:
1491 kwargs['listkeys'] = ['phases']
1500 kwargs['listkeys'] = ['phases']
1492
1501
1493 bookmarksrequested = False
1502 bookmarksrequested = False
1494 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1503 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1495 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1504 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1496
1505
1497 if pullop.remotebookmarks is not None:
1506 if pullop.remotebookmarks is not None:
1498 pullop.stepsdone.add('request-bookmarks')
1507 pullop.stepsdone.add('request-bookmarks')
1499
1508
1500 if ('request-bookmarks' not in pullop.stepsdone
1509 if ('request-bookmarks' not in pullop.stepsdone
1501 and pullop.remotebookmarks is None
1510 and pullop.remotebookmarks is None
1502 and not legacybookmark and hasbinarybook):
1511 and not legacybookmark and hasbinarybook):
1503 kwargs['bookmarks'] = True
1512 kwargs['bookmarks'] = True
1504 bookmarksrequested = True
1513 bookmarksrequested = True
1505
1514
1506 if 'listkeys' in pullop.remotebundle2caps:
1515 if 'listkeys' in pullop.remotebundle2caps:
1507 if 'request-bookmarks' not in pullop.stepsdone:
1516 if 'request-bookmarks' not in pullop.stepsdone:
1508 # make sure to always includes bookmark data when migrating
1517 # make sure to always includes bookmark data when migrating
1509 # `hg incoming --bundle` to using this function.
1518 # `hg incoming --bundle` to using this function.
1510 pullop.stepsdone.add('request-bookmarks')
1519 pullop.stepsdone.add('request-bookmarks')
1511 kwargs.setdefault('listkeys', []).append('bookmarks')
1520 kwargs.setdefault('listkeys', []).append('bookmarks')
1512
1521
1513 # If this is a full pull / clone and the server supports the clone bundles
1522 # If this is a full pull / clone and the server supports the clone bundles
1514 # feature, tell the server whether we attempted a clone bundle. The
1523 # feature, tell the server whether we attempted a clone bundle. The
1515 # presence of this flag indicates the client supports clone bundles. This
1524 # presence of this flag indicates the client supports clone bundles. This
1516 # will enable the server to treat clients that support clone bundles
1525 # will enable the server to treat clients that support clone bundles
1517 # differently from those that don't.
1526 # differently from those that don't.
1518 if (pullop.remote.capable('clonebundles')
1527 if (pullop.remote.capable('clonebundles')
1519 and pullop.heads is None and list(pullop.common) == [nullid]):
1528 and pullop.heads is None and list(pullop.common) == [nullid]):
1520 kwargs['cbattempted'] = pullop.clonebundleattempted
1529 kwargs['cbattempted'] = pullop.clonebundleattempted
1521
1530
1522 if streaming:
1531 if streaming:
1523 pullop.repo.ui.status(_('streaming all changes\n'))
1532 pullop.repo.ui.status(_('streaming all changes\n'))
1524 elif not pullop.fetch:
1533 elif not pullop.fetch:
1525 pullop.repo.ui.status(_("no changes found\n"))
1534 pullop.repo.ui.status(_("no changes found\n"))
1526 pullop.cgresult = 0
1535 pullop.cgresult = 0
1527 else:
1536 else:
1528 if pullop.heads is None and list(pullop.common) == [nullid]:
1537 if pullop.heads is None and list(pullop.common) == [nullid]:
1529 pullop.repo.ui.status(_("requesting all changes\n"))
1538 pullop.repo.ui.status(_("requesting all changes\n"))
1530 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1539 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1531 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1540 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1532 if obsolete.commonversion(remoteversions) is not None:
1541 if obsolete.commonversion(remoteversions) is not None:
1533 kwargs['obsmarkers'] = True
1542 kwargs['obsmarkers'] = True
1534 pullop.stepsdone.add('obsmarkers')
1543 pullop.stepsdone.add('obsmarkers')
1535 _pullbundle2extraprepare(pullop, kwargs)
1544 _pullbundle2extraprepare(pullop, kwargs)
1536 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1545 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1537 try:
1546 try:
1538 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1547 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction)
1539 op.modes['bookmarks'] = 'records'
1548 op.modes['bookmarks'] = 'records'
1540 bundle2.processbundle(pullop.repo, bundle, op=op)
1549 bundle2.processbundle(pullop.repo, bundle, op=op)
1541 except bundle2.AbortFromPart as exc:
1550 except bundle2.AbortFromPart as exc:
1542 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1551 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1543 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1552 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1544 except error.BundleValueError as exc:
1553 except error.BundleValueError as exc:
1545 raise error.Abort(_('missing support for %s') % exc)
1554 raise error.Abort(_('missing support for %s') % exc)
1546
1555
1547 if pullop.fetch:
1556 if pullop.fetch:
1548 pullop.cgresult = bundle2.combinechangegroupresults(op)
1557 pullop.cgresult = bundle2.combinechangegroupresults(op)
1549
1558
1550 # processing phases change
1559 # processing phases change
1551 for namespace, value in op.records['listkeys']:
1560 for namespace, value in op.records['listkeys']:
1552 if namespace == 'phases':
1561 if namespace == 'phases':
1553 _pullapplyphases(pullop, value)
1562 _pullapplyphases(pullop, value)
1554
1563
1555 # processing bookmark update
1564 # processing bookmark update
1556 if bookmarksrequested:
1565 if bookmarksrequested:
1557 books = {}
1566 books = {}
1558 for record in op.records['bookmarks']:
1567 for record in op.records['bookmarks']:
1559 books[record['bookmark']] = record["node"]
1568 books[record['bookmark']] = record["node"]
1560 pullop.remotebookmarks = books
1569 pullop.remotebookmarks = books
1561 else:
1570 else:
1562 for namespace, value in op.records['listkeys']:
1571 for namespace, value in op.records['listkeys']:
1563 if namespace == 'bookmarks':
1572 if namespace == 'bookmarks':
1564 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1573 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1565
1574
1566 # bookmark data were either already there or pulled in the bundle
1575 # bookmark data were either already there or pulled in the bundle
1567 if pullop.remotebookmarks is not None:
1576 if pullop.remotebookmarks is not None:
1568 _pullbookmarks(pullop)
1577 _pullbookmarks(pullop)
1569
1578
1570 def _pullbundle2extraprepare(pullop, kwargs):
1579 def _pullbundle2extraprepare(pullop, kwargs):
1571 """hook function so that extensions can extend the getbundle call"""
1580 """hook function so that extensions can extend the getbundle call"""
1572
1581
1573 def _pullchangeset(pullop):
1582 def _pullchangeset(pullop):
1574 """pull changeset from unbundle into the local repo"""
1583 """pull changeset from unbundle into the local repo"""
1575 # We delay the open of the transaction as late as possible so we
1584 # We delay the open of the transaction as late as possible so we
1576 # don't open transaction for nothing or you break future useful
1585 # don't open transaction for nothing or you break future useful
1577 # rollback call
1586 # rollback call
1578 if 'changegroup' in pullop.stepsdone:
1587 if 'changegroup' in pullop.stepsdone:
1579 return
1588 return
1580 pullop.stepsdone.add('changegroup')
1589 pullop.stepsdone.add('changegroup')
1581 if not pullop.fetch:
1590 if not pullop.fetch:
1582 pullop.repo.ui.status(_("no changes found\n"))
1591 pullop.repo.ui.status(_("no changes found\n"))
1583 pullop.cgresult = 0
1592 pullop.cgresult = 0
1584 return
1593 return
1585 tr = pullop.gettransaction()
1594 tr = pullop.gettransaction()
1586 if pullop.heads is None and list(pullop.common) == [nullid]:
1595 if pullop.heads is None and list(pullop.common) == [nullid]:
1587 pullop.repo.ui.status(_("requesting all changes\n"))
1596 pullop.repo.ui.status(_("requesting all changes\n"))
1588 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1597 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1589 # issue1320, avoid a race if remote changed after discovery
1598 # issue1320, avoid a race if remote changed after discovery
1590 pullop.heads = pullop.rheads
1599 pullop.heads = pullop.rheads
1591
1600
1592 if pullop.remote.capable('getbundle'):
1601 if pullop.remote.capable('getbundle'):
1593 # TODO: get bundlecaps from remote
1602 # TODO: get bundlecaps from remote
1594 cg = pullop.remote.getbundle('pull', common=pullop.common,
1603 cg = pullop.remote.getbundle('pull', common=pullop.common,
1595 heads=pullop.heads or pullop.rheads)
1604 heads=pullop.heads or pullop.rheads)
1596 elif pullop.heads is None:
1605 elif pullop.heads is None:
1597 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1606 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1598 elif not pullop.remote.capable('changegroupsubset'):
1607 elif not pullop.remote.capable('changegroupsubset'):
1599 raise error.Abort(_("partial pull cannot be done because "
1608 raise error.Abort(_("partial pull cannot be done because "
1600 "other repository doesn't support "
1609 "other repository doesn't support "
1601 "changegroupsubset."))
1610 "changegroupsubset."))
1602 else:
1611 else:
1603 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1612 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1604 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1613 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1605 pullop.remote.url())
1614 pullop.remote.url())
1606 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1615 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1607
1616
1608 def _pullphase(pullop):
1617 def _pullphase(pullop):
1609 # Get remote phases data from remote
1618 # Get remote phases data from remote
1610 if 'phases' in pullop.stepsdone:
1619 if 'phases' in pullop.stepsdone:
1611 return
1620 return
1612 remotephases = pullop.remote.listkeys('phases')
1621 remotephases = pullop.remote.listkeys('phases')
1613 _pullapplyphases(pullop, remotephases)
1622 _pullapplyphases(pullop, remotephases)
1614
1623
1615 def _pullapplyphases(pullop, remotephases):
1624 def _pullapplyphases(pullop, remotephases):
1616 """apply phase movement from observed remote state"""
1625 """apply phase movement from observed remote state"""
1617 if 'phases' in pullop.stepsdone:
1626 if 'phases' in pullop.stepsdone:
1618 return
1627 return
1619 pullop.stepsdone.add('phases')
1628 pullop.stepsdone.add('phases')
1620 publishing = bool(remotephases.get('publishing', False))
1629 publishing = bool(remotephases.get('publishing', False))
1621 if remotephases and not publishing:
1630 if remotephases and not publishing:
1622 # remote is new and non-publishing
1631 # remote is new and non-publishing
1623 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1632 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1624 pullop.pulledsubset,
1633 pullop.pulledsubset,
1625 remotephases)
1634 remotephases)
1626 dheads = pullop.pulledsubset
1635 dheads = pullop.pulledsubset
1627 else:
1636 else:
1628 # Remote is old or publishing all common changesets
1637 # Remote is old or publishing all common changesets
1629 # should be seen as public
1638 # should be seen as public
1630 pheads = pullop.pulledsubset
1639 pheads = pullop.pulledsubset
1631 dheads = []
1640 dheads = []
1632 unfi = pullop.repo.unfiltered()
1641 unfi = pullop.repo.unfiltered()
1633 phase = unfi._phasecache.phase
1642 phase = unfi._phasecache.phase
1634 rev = unfi.changelog.nodemap.get
1643 rev = unfi.changelog.nodemap.get
1635 public = phases.public
1644 public = phases.public
1636 draft = phases.draft
1645 draft = phases.draft
1637
1646
1638 # exclude changesets already public locally and update the others
1647 # exclude changesets already public locally and update the others
1639 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1648 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1640 if pheads:
1649 if pheads:
1641 tr = pullop.gettransaction()
1650 tr = pullop.gettransaction()
1642 phases.advanceboundary(pullop.repo, tr, public, pheads)
1651 phases.advanceboundary(pullop.repo, tr, public, pheads)
1643
1652
1644 # exclude changesets already draft locally and update the others
1653 # exclude changesets already draft locally and update the others
1645 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1654 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1646 if dheads:
1655 if dheads:
1647 tr = pullop.gettransaction()
1656 tr = pullop.gettransaction()
1648 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1657 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1649
1658
1650 def _pullbookmarks(pullop):
1659 def _pullbookmarks(pullop):
1651 """process the remote bookmark information to update the local one"""
1660 """process the remote bookmark information to update the local one"""
1652 if 'bookmarks' in pullop.stepsdone:
1661 if 'bookmarks' in pullop.stepsdone:
1653 return
1662 return
1654 pullop.stepsdone.add('bookmarks')
1663 pullop.stepsdone.add('bookmarks')
1655 repo = pullop.repo
1664 repo = pullop.repo
1656 remotebookmarks = pullop.remotebookmarks
1665 remotebookmarks = pullop.remotebookmarks
1657 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1666 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1658 pullop.remote.url(),
1667 pullop.remote.url(),
1659 pullop.gettransaction,
1668 pullop.gettransaction,
1660 explicit=pullop.explicitbookmarks)
1669 explicit=pullop.explicitbookmarks)
1661
1670
1662 def _pullobsolete(pullop):
1671 def _pullobsolete(pullop):
1663 """utility function to pull obsolete markers from a remote
1672 """utility function to pull obsolete markers from a remote
1664
1673
1665 The `gettransaction` is function that return the pull transaction, creating
1674 The `gettransaction` is function that return the pull transaction, creating
1666 one if necessary. We return the transaction to inform the calling code that
1675 one if necessary. We return the transaction to inform the calling code that
1667 a new transaction have been created (when applicable).
1676 a new transaction have been created (when applicable).
1668
1677
1669 Exists mostly to allow overriding for experimentation purpose"""
1678 Exists mostly to allow overriding for experimentation purpose"""
1670 if 'obsmarkers' in pullop.stepsdone:
1679 if 'obsmarkers' in pullop.stepsdone:
1671 return
1680 return
1672 pullop.stepsdone.add('obsmarkers')
1681 pullop.stepsdone.add('obsmarkers')
1673 tr = None
1682 tr = None
1674 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1683 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1675 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1684 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1676 remoteobs = pullop.remote.listkeys('obsolete')
1685 remoteobs = pullop.remote.listkeys('obsolete')
1677 if 'dump0' in remoteobs:
1686 if 'dump0' in remoteobs:
1678 tr = pullop.gettransaction()
1687 tr = pullop.gettransaction()
1679 markers = []
1688 markers = []
1680 for key in sorted(remoteobs, reverse=True):
1689 for key in sorted(remoteobs, reverse=True):
1681 if key.startswith('dump'):
1690 if key.startswith('dump'):
1682 data = util.b85decode(remoteobs[key])
1691 data = util.b85decode(remoteobs[key])
1683 version, newmarks = obsolete._readmarkers(data)
1692 version, newmarks = obsolete._readmarkers(data)
1684 markers += newmarks
1693 markers += newmarks
1685 if markers:
1694 if markers:
1686 pullop.repo.obsstore.add(tr, markers)
1695 pullop.repo.obsstore.add(tr, markers)
1687 pullop.repo.invalidatevolatilesets()
1696 pullop.repo.invalidatevolatilesets()
1688 return tr
1697 return tr
1689
1698
1690 def caps20to10(repo, role):
1699 def caps20to10(repo, role):
1691 """return a set with appropriate options to use bundle20 during getbundle"""
1700 """return a set with appropriate options to use bundle20 during getbundle"""
1692 caps = {'HG20'}
1701 caps = {'HG20'}
1693 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1702 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1694 caps.add('bundle2=' + urlreq.quote(capsblob))
1703 caps.add('bundle2=' + urlreq.quote(capsblob))
1695 return caps
1704 return caps
1696
1705
1697 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1706 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1698 getbundle2partsorder = []
1707 getbundle2partsorder = []
1699
1708
1700 # Mapping between step name and function
1709 # Mapping between step name and function
1701 #
1710 #
1702 # This exists to help extensions wrap steps if necessary
1711 # This exists to help extensions wrap steps if necessary
1703 getbundle2partsmapping = {}
1712 getbundle2partsmapping = {}
1704
1713
1705 def getbundle2partsgenerator(stepname, idx=None):
1714 def getbundle2partsgenerator(stepname, idx=None):
1706 """decorator for function generating bundle2 part for getbundle
1715 """decorator for function generating bundle2 part for getbundle
1707
1716
1708 The function is added to the step -> function mapping and appended to the
1717 The function is added to the step -> function mapping and appended to the
1709 list of steps. Beware that decorated functions will be added in order
1718 list of steps. Beware that decorated functions will be added in order
1710 (this may matter).
1719 (this may matter).
1711
1720
1712 You can only use this decorator for new steps, if you want to wrap a step
1721 You can only use this decorator for new steps, if you want to wrap a step
1713 from an extension, attack the getbundle2partsmapping dictionary directly."""
1722 from an extension, attack the getbundle2partsmapping dictionary directly."""
1714 def dec(func):
1723 def dec(func):
1715 assert stepname not in getbundle2partsmapping
1724 assert stepname not in getbundle2partsmapping
1716 getbundle2partsmapping[stepname] = func
1725 getbundle2partsmapping[stepname] = func
1717 if idx is None:
1726 if idx is None:
1718 getbundle2partsorder.append(stepname)
1727 getbundle2partsorder.append(stepname)
1719 else:
1728 else:
1720 getbundle2partsorder.insert(idx, stepname)
1729 getbundle2partsorder.insert(idx, stepname)
1721 return func
1730 return func
1722 return dec
1731 return dec
1723
1732
1724 def bundle2requested(bundlecaps):
1733 def bundle2requested(bundlecaps):
1725 if bundlecaps is not None:
1734 if bundlecaps is not None:
1726 return any(cap.startswith('HG2') for cap in bundlecaps)
1735 return any(cap.startswith('HG2') for cap in bundlecaps)
1727 return False
1736 return False
1728
1737
1729 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1738 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1730 **kwargs):
1739 **kwargs):
1731 """Return chunks constituting a bundle's raw data.
1740 """Return chunks constituting a bundle's raw data.
1732
1741
1733 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1742 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1734 passed.
1743 passed.
1735
1744
1736 Returns a 2-tuple of a dict with metadata about the generated bundle
1745 Returns a 2-tuple of a dict with metadata about the generated bundle
1737 and an iterator over raw chunks (of varying sizes).
1746 and an iterator over raw chunks (of varying sizes).
1738 """
1747 """
1739 kwargs = pycompat.byteskwargs(kwargs)
1748 kwargs = pycompat.byteskwargs(kwargs)
1740 info = {}
1749 info = {}
1741 usebundle2 = bundle2requested(bundlecaps)
1750 usebundle2 = bundle2requested(bundlecaps)
1742 # bundle10 case
1751 # bundle10 case
1743 if not usebundle2:
1752 if not usebundle2:
1744 if bundlecaps and not kwargs.get('cg', True):
1753 if bundlecaps and not kwargs.get('cg', True):
1745 raise ValueError(_('request for bundle10 must include changegroup'))
1754 raise ValueError(_('request for bundle10 must include changegroup'))
1746
1755
1747 if kwargs:
1756 if kwargs:
1748 raise ValueError(_('unsupported getbundle arguments: %s')
1757 raise ValueError(_('unsupported getbundle arguments: %s')
1749 % ', '.join(sorted(kwargs.keys())))
1758 % ', '.join(sorted(kwargs.keys())))
1750 outgoing = _computeoutgoing(repo, heads, common)
1759 outgoing = _computeoutgoing(repo, heads, common)
1751 info['bundleversion'] = 1
1760 info['bundleversion'] = 1
1752 return info, changegroup.makestream(repo, outgoing, '01', source,
1761 return info, changegroup.makestream(repo, outgoing, '01', source,
1753 bundlecaps=bundlecaps)
1762 bundlecaps=bundlecaps)
1754
1763
1755 # bundle20 case
1764 # bundle20 case
1756 info['bundleversion'] = 2
1765 info['bundleversion'] = 2
1757 b2caps = {}
1766 b2caps = {}
1758 for bcaps in bundlecaps:
1767 for bcaps in bundlecaps:
1759 if bcaps.startswith('bundle2='):
1768 if bcaps.startswith('bundle2='):
1760 blob = urlreq.unquote(bcaps[len('bundle2='):])
1769 blob = urlreq.unquote(bcaps[len('bundle2='):])
1761 b2caps.update(bundle2.decodecaps(blob))
1770 b2caps.update(bundle2.decodecaps(blob))
1762 bundler = bundle2.bundle20(repo.ui, b2caps)
1771 bundler = bundle2.bundle20(repo.ui, b2caps)
1763
1772
1764 kwargs['heads'] = heads
1773 kwargs['heads'] = heads
1765 kwargs['common'] = common
1774 kwargs['common'] = common
1766
1775
1767 for name in getbundle2partsorder:
1776 for name in getbundle2partsorder:
1768 func = getbundle2partsmapping[name]
1777 func = getbundle2partsmapping[name]
1769 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1778 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1770 **pycompat.strkwargs(kwargs))
1779 **pycompat.strkwargs(kwargs))
1771
1780
1772 info['prefercompressed'] = bundler.prefercompressed
1781 info['prefercompressed'] = bundler.prefercompressed
1773
1782
1774 return info, bundler.getchunks()
1783 return info, bundler.getchunks()
1775
1784
1776 @getbundle2partsgenerator('stream2')
1785 @getbundle2partsgenerator('stream2')
1777 def _getbundlestream2(bundler, repo, source, bundlecaps=None,
1786 def _getbundlestream2(bundler, repo, source, bundlecaps=None,
1778 b2caps=None, heads=None, common=None, **kwargs):
1787 b2caps=None, heads=None, common=None, **kwargs):
1779 if not kwargs.get('stream', False):
1788 if not kwargs.get('stream', False):
1780 return
1789 return
1781
1790
1782 if not streamclone.allowservergeneration(repo):
1791 if not streamclone.allowservergeneration(repo):
1783 raise error.Abort(_('stream data requested but server does not allow '
1792 raise error.Abort(_('stream data requested but server does not allow '
1784 'this feature'),
1793 'this feature'),
1785 hint=_('well-behaved clients should not be '
1794 hint=_('well-behaved clients should not be '
1786 'requesting stream data from servers not '
1795 'requesting stream data from servers not '
1787 'advertising it; the client may be buggy'))
1796 'advertising it; the client may be buggy'))
1788
1797
1789 # Stream clones don't compress well. And compression undermines a
1798 # Stream clones don't compress well. And compression undermines a
1790 # goal of stream clones, which is to be fast. Communicate the desire
1799 # goal of stream clones, which is to be fast. Communicate the desire
1791 # to avoid compression to consumers of the bundle.
1800 # to avoid compression to consumers of the bundle.
1792 bundler.prefercompressed = False
1801 bundler.prefercompressed = False
1793
1802
1794 filecount, bytecount, it = streamclone.generatev2(repo)
1803 filecount, bytecount, it = streamclone.generatev2(repo)
1795 requirements = _formatrequirementsspec(repo.requirements)
1804 requirements = _formatrequirementsspec(repo.requirements)
1796 part = bundler.newpart('stream2', data=it)
1805 part = bundler.newpart('stream2', data=it)
1797 part.addparam('bytecount', '%d' % bytecount, mandatory=True)
1806 part.addparam('bytecount', '%d' % bytecount, mandatory=True)
1798 part.addparam('filecount', '%d' % filecount, mandatory=True)
1807 part.addparam('filecount', '%d' % filecount, mandatory=True)
1799 part.addparam('requirements', requirements, mandatory=True)
1808 part.addparam('requirements', requirements, mandatory=True)
1800
1809
1801 @getbundle2partsgenerator('changegroup')
1810 @getbundle2partsgenerator('changegroup')
1802 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1811 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1803 b2caps=None, heads=None, common=None, **kwargs):
1812 b2caps=None, heads=None, common=None, **kwargs):
1804 """add a changegroup part to the requested bundle"""
1813 """add a changegroup part to the requested bundle"""
1805 cgstream = None
1814 cgstream = None
1806 if kwargs.get(r'cg', True):
1815 if kwargs.get(r'cg', True):
1807 # build changegroup bundle here.
1816 # build changegroup bundle here.
1808 version = '01'
1817 version = '01'
1809 cgversions = b2caps.get('changegroup')
1818 cgversions = b2caps.get('changegroup')
1810 if cgversions: # 3.1 and 3.2 ship with an empty value
1819 if cgversions: # 3.1 and 3.2 ship with an empty value
1811 cgversions = [v for v in cgversions
1820 cgversions = [v for v in cgversions
1812 if v in changegroup.supportedoutgoingversions(repo)]
1821 if v in changegroup.supportedoutgoingversions(repo)]
1813 if not cgversions:
1822 if not cgversions:
1814 raise ValueError(_('no common changegroup version'))
1823 raise ValueError(_('no common changegroup version'))
1815 version = max(cgversions)
1824 version = max(cgversions)
1816 outgoing = _computeoutgoing(repo, heads, common)
1825 outgoing = _computeoutgoing(repo, heads, common)
1817 if outgoing.missing:
1826 if outgoing.missing:
1818 cgstream = changegroup.makestream(repo, outgoing, version, source,
1827 cgstream = changegroup.makestream(repo, outgoing, version, source,
1819 bundlecaps=bundlecaps)
1828 bundlecaps=bundlecaps)
1820
1829
1821 if cgstream:
1830 if cgstream:
1822 part = bundler.newpart('changegroup', data=cgstream)
1831 part = bundler.newpart('changegroup', data=cgstream)
1823 if cgversions:
1832 if cgversions:
1824 part.addparam('version', version)
1833 part.addparam('version', version)
1825 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1834 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1826 mandatory=False)
1835 mandatory=False)
1827 if 'treemanifest' in repo.requirements:
1836 if 'treemanifest' in repo.requirements:
1828 part.addparam('treemanifest', '1')
1837 part.addparam('treemanifest', '1')
1829
1838
1830 @getbundle2partsgenerator('bookmarks')
1839 @getbundle2partsgenerator('bookmarks')
1831 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1840 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1832 b2caps=None, **kwargs):
1841 b2caps=None, **kwargs):
1833 """add a bookmark part to the requested bundle"""
1842 """add a bookmark part to the requested bundle"""
1834 if not kwargs.get(r'bookmarks', False):
1843 if not kwargs.get(r'bookmarks', False):
1835 return
1844 return
1836 if 'bookmarks' not in b2caps:
1845 if 'bookmarks' not in b2caps:
1837 raise ValueError(_('no common bookmarks exchange method'))
1846 raise ValueError(_('no common bookmarks exchange method'))
1838 books = bookmod.listbinbookmarks(repo)
1847 books = bookmod.listbinbookmarks(repo)
1839 data = bookmod.binaryencode(books)
1848 data = bookmod.binaryencode(books)
1840 if data:
1849 if data:
1841 bundler.newpart('bookmarks', data=data)
1850 bundler.newpart('bookmarks', data=data)
1842
1851
1843 @getbundle2partsgenerator('listkeys')
1852 @getbundle2partsgenerator('listkeys')
1844 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1853 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1845 b2caps=None, **kwargs):
1854 b2caps=None, **kwargs):
1846 """add parts containing listkeys namespaces to the requested bundle"""
1855 """add parts containing listkeys namespaces to the requested bundle"""
1847 listkeys = kwargs.get(r'listkeys', ())
1856 listkeys = kwargs.get(r'listkeys', ())
1848 for namespace in listkeys:
1857 for namespace in listkeys:
1849 part = bundler.newpart('listkeys')
1858 part = bundler.newpart('listkeys')
1850 part.addparam('namespace', namespace)
1859 part.addparam('namespace', namespace)
1851 keys = repo.listkeys(namespace).items()
1860 keys = repo.listkeys(namespace).items()
1852 part.data = pushkey.encodekeys(keys)
1861 part.data = pushkey.encodekeys(keys)
1853
1862
1854 @getbundle2partsgenerator('obsmarkers')
1863 @getbundle2partsgenerator('obsmarkers')
1855 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1864 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1856 b2caps=None, heads=None, **kwargs):
1865 b2caps=None, heads=None, **kwargs):
1857 """add an obsolescence markers part to the requested bundle"""
1866 """add an obsolescence markers part to the requested bundle"""
1858 if kwargs.get(r'obsmarkers', False):
1867 if kwargs.get(r'obsmarkers', False):
1859 if heads is None:
1868 if heads is None:
1860 heads = repo.heads()
1869 heads = repo.heads()
1861 subset = [c.node() for c in repo.set('::%ln', heads)]
1870 subset = [c.node() for c in repo.set('::%ln', heads)]
1862 markers = repo.obsstore.relevantmarkers(subset)
1871 markers = repo.obsstore.relevantmarkers(subset)
1863 markers = sorted(markers)
1872 markers = sorted(markers)
1864 bundle2.buildobsmarkerspart(bundler, markers)
1873 bundle2.buildobsmarkerspart(bundler, markers)
1865
1874
1866 @getbundle2partsgenerator('phases')
1875 @getbundle2partsgenerator('phases')
1867 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1876 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1868 b2caps=None, heads=None, **kwargs):
1877 b2caps=None, heads=None, **kwargs):
1869 """add phase heads part to the requested bundle"""
1878 """add phase heads part to the requested bundle"""
1870 if kwargs.get(r'phases', False):
1879 if kwargs.get(r'phases', False):
1871 if not 'heads' in b2caps.get('phases'):
1880 if not 'heads' in b2caps.get('phases'):
1872 raise ValueError(_('no common phases exchange method'))
1881 raise ValueError(_('no common phases exchange method'))
1873 if heads is None:
1882 if heads is None:
1874 heads = repo.heads()
1883 heads = repo.heads()
1875
1884
1876 headsbyphase = collections.defaultdict(set)
1885 headsbyphase = collections.defaultdict(set)
1877 if repo.publishing():
1886 if repo.publishing():
1878 headsbyphase[phases.public] = heads
1887 headsbyphase[phases.public] = heads
1879 else:
1888 else:
1880 # find the appropriate heads to move
1889 # find the appropriate heads to move
1881
1890
1882 phase = repo._phasecache.phase
1891 phase = repo._phasecache.phase
1883 node = repo.changelog.node
1892 node = repo.changelog.node
1884 rev = repo.changelog.rev
1893 rev = repo.changelog.rev
1885 for h in heads:
1894 for h in heads:
1886 headsbyphase[phase(repo, rev(h))].add(h)
1895 headsbyphase[phase(repo, rev(h))].add(h)
1887 seenphases = list(headsbyphase.keys())
1896 seenphases = list(headsbyphase.keys())
1888
1897
1889 # We do not handle anything but public and draft phase for now)
1898 # We do not handle anything but public and draft phase for now)
1890 if seenphases:
1899 if seenphases:
1891 assert max(seenphases) <= phases.draft
1900 assert max(seenphases) <= phases.draft
1892
1901
1893 # if client is pulling non-public changesets, we need to find
1902 # if client is pulling non-public changesets, we need to find
1894 # intermediate public heads.
1903 # intermediate public heads.
1895 draftheads = headsbyphase.get(phases.draft, set())
1904 draftheads = headsbyphase.get(phases.draft, set())
1896 if draftheads:
1905 if draftheads:
1897 publicheads = headsbyphase.get(phases.public, set())
1906 publicheads = headsbyphase.get(phases.public, set())
1898
1907
1899 revset = 'heads(only(%ln, %ln) and public())'
1908 revset = 'heads(only(%ln, %ln) and public())'
1900 extraheads = repo.revs(revset, draftheads, publicheads)
1909 extraheads = repo.revs(revset, draftheads, publicheads)
1901 for r in extraheads:
1910 for r in extraheads:
1902 headsbyphase[phases.public].add(node(r))
1911 headsbyphase[phases.public].add(node(r))
1903
1912
1904 # transform data in a format used by the encoding function
1913 # transform data in a format used by the encoding function
1905 phasemapping = []
1914 phasemapping = []
1906 for phase in phases.allphases:
1915 for phase in phases.allphases:
1907 phasemapping.append(sorted(headsbyphase[phase]))
1916 phasemapping.append(sorted(headsbyphase[phase]))
1908
1917
1909 # generate the actual part
1918 # generate the actual part
1910 phasedata = phases.binaryencode(phasemapping)
1919 phasedata = phases.binaryencode(phasemapping)
1911 bundler.newpart('phase-heads', data=phasedata)
1920 bundler.newpart('phase-heads', data=phasedata)
1912
1921
1913 @getbundle2partsgenerator('hgtagsfnodes')
1922 @getbundle2partsgenerator('hgtagsfnodes')
1914 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1923 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1915 b2caps=None, heads=None, common=None,
1924 b2caps=None, heads=None, common=None,
1916 **kwargs):
1925 **kwargs):
1917 """Transfer the .hgtags filenodes mapping.
1926 """Transfer the .hgtags filenodes mapping.
1918
1927
1919 Only values for heads in this bundle will be transferred.
1928 Only values for heads in this bundle will be transferred.
1920
1929
1921 The part data consists of pairs of 20 byte changeset node and .hgtags
1930 The part data consists of pairs of 20 byte changeset node and .hgtags
1922 filenodes raw values.
1931 filenodes raw values.
1923 """
1932 """
1924 # Don't send unless:
1933 # Don't send unless:
1925 # - changeset are being exchanged,
1934 # - changeset are being exchanged,
1926 # - the client supports it.
1935 # - the client supports it.
1927 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1936 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
1928 return
1937 return
1929
1938
1930 outgoing = _computeoutgoing(repo, heads, common)
1939 outgoing = _computeoutgoing(repo, heads, common)
1931 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1940 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1932
1941
1933 def check_heads(repo, their_heads, context):
1942 def check_heads(repo, their_heads, context):
1934 """check if the heads of a repo have been modified
1943 """check if the heads of a repo have been modified
1935
1944
1936 Used by peer for unbundling.
1945 Used by peer for unbundling.
1937 """
1946 """
1938 heads = repo.heads()
1947 heads = repo.heads()
1939 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1948 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1940 if not (their_heads == ['force'] or their_heads == heads or
1949 if not (their_heads == ['force'] or their_heads == heads or
1941 their_heads == ['hashed', heads_hash]):
1950 their_heads == ['hashed', heads_hash]):
1942 # someone else committed/pushed/unbundled while we
1951 # someone else committed/pushed/unbundled while we
1943 # were transferring data
1952 # were transferring data
1944 raise error.PushRaced('repository changed while %s - '
1953 raise error.PushRaced('repository changed while %s - '
1945 'please try again' % context)
1954 'please try again' % context)
1946
1955
1947 def unbundle(repo, cg, heads, source, url):
1956 def unbundle(repo, cg, heads, source, url):
1948 """Apply a bundle to a repo.
1957 """Apply a bundle to a repo.
1949
1958
1950 this function makes sure the repo is locked during the application and have
1959 this function makes sure the repo is locked during the application and have
1951 mechanism to check that no push race occurred between the creation of the
1960 mechanism to check that no push race occurred between the creation of the
1952 bundle and its application.
1961 bundle and its application.
1953
1962
1954 If the push was raced as PushRaced exception is raised."""
1963 If the push was raced as PushRaced exception is raised."""
1955 r = 0
1964 r = 0
1956 # need a transaction when processing a bundle2 stream
1965 # need a transaction when processing a bundle2 stream
1957 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1966 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1958 lockandtr = [None, None, None]
1967 lockandtr = [None, None, None]
1959 recordout = None
1968 recordout = None
1960 # quick fix for output mismatch with bundle2 in 3.4
1969 # quick fix for output mismatch with bundle2 in 3.4
1961 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1970 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1962 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1971 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1963 captureoutput = True
1972 captureoutput = True
1964 try:
1973 try:
1965 # note: outside bundle1, 'heads' is expected to be empty and this
1974 # note: outside bundle1, 'heads' is expected to be empty and this
1966 # 'check_heads' call wil be a no-op
1975 # 'check_heads' call wil be a no-op
1967 check_heads(repo, heads, 'uploading changes')
1976 check_heads(repo, heads, 'uploading changes')
1968 # push can proceed
1977 # push can proceed
1969 if not isinstance(cg, bundle2.unbundle20):
1978 if not isinstance(cg, bundle2.unbundle20):
1970 # legacy case: bundle1 (changegroup 01)
1979 # legacy case: bundle1 (changegroup 01)
1971 txnname = "\n".join([source, util.hidepassword(url)])
1980 txnname = "\n".join([source, util.hidepassword(url)])
1972 with repo.lock(), repo.transaction(txnname) as tr:
1981 with repo.lock(), repo.transaction(txnname) as tr:
1973 op = bundle2.applybundle(repo, cg, tr, source, url)
1982 op = bundle2.applybundle(repo, cg, tr, source, url)
1974 r = bundle2.combinechangegroupresults(op)
1983 r = bundle2.combinechangegroupresults(op)
1975 else:
1984 else:
1976 r = None
1985 r = None
1977 try:
1986 try:
1978 def gettransaction():
1987 def gettransaction():
1979 if not lockandtr[2]:
1988 if not lockandtr[2]:
1980 lockandtr[0] = repo.wlock()
1989 lockandtr[0] = repo.wlock()
1981 lockandtr[1] = repo.lock()
1990 lockandtr[1] = repo.lock()
1982 lockandtr[2] = repo.transaction(source)
1991 lockandtr[2] = repo.transaction(source)
1983 lockandtr[2].hookargs['source'] = source
1992 lockandtr[2].hookargs['source'] = source
1984 lockandtr[2].hookargs['url'] = url
1993 lockandtr[2].hookargs['url'] = url
1985 lockandtr[2].hookargs['bundle2'] = '1'
1994 lockandtr[2].hookargs['bundle2'] = '1'
1986 return lockandtr[2]
1995 return lockandtr[2]
1987
1996
1988 # Do greedy locking by default until we're satisfied with lazy
1997 # Do greedy locking by default until we're satisfied with lazy
1989 # locking.
1998 # locking.
1990 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1999 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1991 gettransaction()
2000 gettransaction()
1992
2001
1993 op = bundle2.bundleoperation(repo, gettransaction,
2002 op = bundle2.bundleoperation(repo, gettransaction,
1994 captureoutput=captureoutput)
2003 captureoutput=captureoutput)
1995 try:
2004 try:
1996 op = bundle2.processbundle(repo, cg, op=op)
2005 op = bundle2.processbundle(repo, cg, op=op)
1997 finally:
2006 finally:
1998 r = op.reply
2007 r = op.reply
1999 if captureoutput and r is not None:
2008 if captureoutput and r is not None:
2000 repo.ui.pushbuffer(error=True, subproc=True)
2009 repo.ui.pushbuffer(error=True, subproc=True)
2001 def recordout(output):
2010 def recordout(output):
2002 r.newpart('output', data=output, mandatory=False)
2011 r.newpart('output', data=output, mandatory=False)
2003 if lockandtr[2] is not None:
2012 if lockandtr[2] is not None:
2004 lockandtr[2].close()
2013 lockandtr[2].close()
2005 except BaseException as exc:
2014 except BaseException as exc:
2006 exc.duringunbundle2 = True
2015 exc.duringunbundle2 = True
2007 if captureoutput and r is not None:
2016 if captureoutput and r is not None:
2008 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2017 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2009 def recordout(output):
2018 def recordout(output):
2010 part = bundle2.bundlepart('output', data=output,
2019 part = bundle2.bundlepart('output', data=output,
2011 mandatory=False)
2020 mandatory=False)
2012 parts.append(part)
2021 parts.append(part)
2013 raise
2022 raise
2014 finally:
2023 finally:
2015 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2024 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2016 if recordout is not None:
2025 if recordout is not None:
2017 recordout(repo.ui.popbuffer())
2026 recordout(repo.ui.popbuffer())
2018 return r
2027 return r
2019
2028
2020 def _maybeapplyclonebundle(pullop):
2029 def _maybeapplyclonebundle(pullop):
2021 """Apply a clone bundle from a remote, if possible."""
2030 """Apply a clone bundle from a remote, if possible."""
2022
2031
2023 repo = pullop.repo
2032 repo = pullop.repo
2024 remote = pullop.remote
2033 remote = pullop.remote
2025
2034
2026 if not repo.ui.configbool('ui', 'clonebundles'):
2035 if not repo.ui.configbool('ui', 'clonebundles'):
2027 return
2036 return
2028
2037
2029 # Only run if local repo is empty.
2038 # Only run if local repo is empty.
2030 if len(repo):
2039 if len(repo):
2031 return
2040 return
2032
2041
2033 if pullop.heads:
2042 if pullop.heads:
2034 return
2043 return
2035
2044
2036 if not remote.capable('clonebundles'):
2045 if not remote.capable('clonebundles'):
2037 return
2046 return
2038
2047
2039 res = remote._call('clonebundles')
2048 res = remote._call('clonebundles')
2040
2049
2041 # If we call the wire protocol command, that's good enough to record the
2050 # If we call the wire protocol command, that's good enough to record the
2042 # attempt.
2051 # attempt.
2043 pullop.clonebundleattempted = True
2052 pullop.clonebundleattempted = True
2044
2053
2045 entries = parseclonebundlesmanifest(repo, res)
2054 entries = parseclonebundlesmanifest(repo, res)
2046 if not entries:
2055 if not entries:
2047 repo.ui.note(_('no clone bundles available on remote; '
2056 repo.ui.note(_('no clone bundles available on remote; '
2048 'falling back to regular clone\n'))
2057 'falling back to regular clone\n'))
2049 return
2058 return
2050
2059
2051 entries = filterclonebundleentries(
2060 entries = filterclonebundleentries(
2052 repo, entries, streamclonerequested=pullop.streamclonerequested)
2061 repo, entries, streamclonerequested=pullop.streamclonerequested)
2053
2062
2054 if not entries:
2063 if not entries:
2055 # There is a thundering herd concern here. However, if a server
2064 # There is a thundering herd concern here. However, if a server
2056 # operator doesn't advertise bundles appropriate for its clients,
2065 # operator doesn't advertise bundles appropriate for its clients,
2057 # they deserve what's coming. Furthermore, from a client's
2066 # they deserve what's coming. Furthermore, from a client's
2058 # perspective, no automatic fallback would mean not being able to
2067 # perspective, no automatic fallback would mean not being able to
2059 # clone!
2068 # clone!
2060 repo.ui.warn(_('no compatible clone bundles available on server; '
2069 repo.ui.warn(_('no compatible clone bundles available on server; '
2061 'falling back to regular clone\n'))
2070 'falling back to regular clone\n'))
2062 repo.ui.warn(_('(you may want to report this to the server '
2071 repo.ui.warn(_('(you may want to report this to the server '
2063 'operator)\n'))
2072 'operator)\n'))
2064 return
2073 return
2065
2074
2066 entries = sortclonebundleentries(repo.ui, entries)
2075 entries = sortclonebundleentries(repo.ui, entries)
2067
2076
2068 url = entries[0]['URL']
2077 url = entries[0]['URL']
2069 repo.ui.status(_('applying clone bundle from %s\n') % url)
2078 repo.ui.status(_('applying clone bundle from %s\n') % url)
2070 if trypullbundlefromurl(repo.ui, repo, url):
2079 if trypullbundlefromurl(repo.ui, repo, url):
2071 repo.ui.status(_('finished applying clone bundle\n'))
2080 repo.ui.status(_('finished applying clone bundle\n'))
2072 # Bundle failed.
2081 # Bundle failed.
2073 #
2082 #
2074 # We abort by default to avoid the thundering herd of
2083 # We abort by default to avoid the thundering herd of
2075 # clients flooding a server that was expecting expensive
2084 # clients flooding a server that was expecting expensive
2076 # clone load to be offloaded.
2085 # clone load to be offloaded.
2077 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2086 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2078 repo.ui.warn(_('falling back to normal clone\n'))
2087 repo.ui.warn(_('falling back to normal clone\n'))
2079 else:
2088 else:
2080 raise error.Abort(_('error applying bundle'),
2089 raise error.Abort(_('error applying bundle'),
2081 hint=_('if this error persists, consider contacting '
2090 hint=_('if this error persists, consider contacting '
2082 'the server operator or disable clone '
2091 'the server operator or disable clone '
2083 'bundles via '
2092 'bundles via '
2084 '"--config ui.clonebundles=false"'))
2093 '"--config ui.clonebundles=false"'))
2085
2094
2086 def parseclonebundlesmanifest(repo, s):
2095 def parseclonebundlesmanifest(repo, s):
2087 """Parses the raw text of a clone bundles manifest.
2096 """Parses the raw text of a clone bundles manifest.
2088
2097
2089 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2098 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2090 to the URL and other keys are the attributes for the entry.
2099 to the URL and other keys are the attributes for the entry.
2091 """
2100 """
2092 m = []
2101 m = []
2093 for line in s.splitlines():
2102 for line in s.splitlines():
2094 fields = line.split()
2103 fields = line.split()
2095 if not fields:
2104 if not fields:
2096 continue
2105 continue
2097 attrs = {'URL': fields[0]}
2106 attrs = {'URL': fields[0]}
2098 for rawattr in fields[1:]:
2107 for rawattr in fields[1:]:
2099 key, value = rawattr.split('=', 1)
2108 key, value = rawattr.split('=', 1)
2100 key = urlreq.unquote(key)
2109 key = urlreq.unquote(key)
2101 value = urlreq.unquote(value)
2110 value = urlreq.unquote(value)
2102 attrs[key] = value
2111 attrs[key] = value
2103
2112
2104 # Parse BUNDLESPEC into components. This makes client-side
2113 # Parse BUNDLESPEC into components. This makes client-side
2105 # preferences easier to specify since you can prefer a single
2114 # preferences easier to specify since you can prefer a single
2106 # component of the BUNDLESPEC.
2115 # component of the BUNDLESPEC.
2107 if key == 'BUNDLESPEC':
2116 if key == 'BUNDLESPEC':
2108 try:
2117 try:
2109 comp, version, params = parsebundlespec(repo, value,
2118 comp, version, params = parsebundlespec(repo, value,
2110 externalnames=True)
2119 externalnames=True)
2111 attrs['COMPRESSION'] = comp
2120 attrs['COMPRESSION'] = comp
2112 attrs['VERSION'] = version
2121 attrs['VERSION'] = version
2113 except error.InvalidBundleSpecification:
2122 except error.InvalidBundleSpecification:
2114 pass
2123 pass
2115 except error.UnsupportedBundleSpecification:
2124 except error.UnsupportedBundleSpecification:
2116 pass
2125 pass
2117
2126
2118 m.append(attrs)
2127 m.append(attrs)
2119
2128
2120 return m
2129 return m
2121
2130
2122 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2131 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2123 """Remove incompatible clone bundle manifest entries.
2132 """Remove incompatible clone bundle manifest entries.
2124
2133
2125 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2134 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2126 and returns a new list consisting of only the entries that this client
2135 and returns a new list consisting of only the entries that this client
2127 should be able to apply.
2136 should be able to apply.
2128
2137
2129 There is no guarantee we'll be able to apply all returned entries because
2138 There is no guarantee we'll be able to apply all returned entries because
2130 the metadata we use to filter on may be missing or wrong.
2139 the metadata we use to filter on may be missing or wrong.
2131 """
2140 """
2132 newentries = []
2141 newentries = []
2133 for entry in entries:
2142 for entry in entries:
2134 spec = entry.get('BUNDLESPEC')
2143 spec = entry.get('BUNDLESPEC')
2135 if spec:
2144 if spec:
2136 try:
2145 try:
2137 comp, version, params = parsebundlespec(repo, spec, strict=True)
2146 comp, version, params = parsebundlespec(repo, spec, strict=True)
2138
2147
2139 # If a stream clone was requested, filter out non-streamclone
2148 # If a stream clone was requested, filter out non-streamclone
2140 # entries.
2149 # entries.
2141 if streamclonerequested and (comp != 'UN' or version != 's1'):
2150 if streamclonerequested and (comp != 'UN' or version != 's1'):
2142 repo.ui.debug('filtering %s because not a stream clone\n' %
2151 repo.ui.debug('filtering %s because not a stream clone\n' %
2143 entry['URL'])
2152 entry['URL'])
2144 continue
2153 continue
2145
2154
2146 except error.InvalidBundleSpecification as e:
2155 except error.InvalidBundleSpecification as e:
2147 repo.ui.debug(str(e) + '\n')
2156 repo.ui.debug(str(e) + '\n')
2148 continue
2157 continue
2149 except error.UnsupportedBundleSpecification as e:
2158 except error.UnsupportedBundleSpecification as e:
2150 repo.ui.debug('filtering %s because unsupported bundle '
2159 repo.ui.debug('filtering %s because unsupported bundle '
2151 'spec: %s\n' % (
2160 'spec: %s\n' % (
2152 entry['URL'], util.forcebytestr(e)))
2161 entry['URL'], util.forcebytestr(e)))
2153 continue
2162 continue
2154 # If we don't have a spec and requested a stream clone, we don't know
2163 # If we don't have a spec and requested a stream clone, we don't know
2155 # what the entry is so don't attempt to apply it.
2164 # what the entry is so don't attempt to apply it.
2156 elif streamclonerequested:
2165 elif streamclonerequested:
2157 repo.ui.debug('filtering %s because cannot determine if a stream '
2166 repo.ui.debug('filtering %s because cannot determine if a stream '
2158 'clone bundle\n' % entry['URL'])
2167 'clone bundle\n' % entry['URL'])
2159 continue
2168 continue
2160
2169
2161 if 'REQUIRESNI' in entry and not sslutil.hassni:
2170 if 'REQUIRESNI' in entry and not sslutil.hassni:
2162 repo.ui.debug('filtering %s because SNI not supported\n' %
2171 repo.ui.debug('filtering %s because SNI not supported\n' %
2163 entry['URL'])
2172 entry['URL'])
2164 continue
2173 continue
2165
2174
2166 newentries.append(entry)
2175 newentries.append(entry)
2167
2176
2168 return newentries
2177 return newentries
2169
2178
2170 class clonebundleentry(object):
2179 class clonebundleentry(object):
2171 """Represents an item in a clone bundles manifest.
2180 """Represents an item in a clone bundles manifest.
2172
2181
2173 This rich class is needed to support sorting since sorted() in Python 3
2182 This rich class is needed to support sorting since sorted() in Python 3
2174 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2183 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2175 won't work.
2184 won't work.
2176 """
2185 """
2177
2186
2178 def __init__(self, value, prefers):
2187 def __init__(self, value, prefers):
2179 self.value = value
2188 self.value = value
2180 self.prefers = prefers
2189 self.prefers = prefers
2181
2190
2182 def _cmp(self, other):
2191 def _cmp(self, other):
2183 for prefkey, prefvalue in self.prefers:
2192 for prefkey, prefvalue in self.prefers:
2184 avalue = self.value.get(prefkey)
2193 avalue = self.value.get(prefkey)
2185 bvalue = other.value.get(prefkey)
2194 bvalue = other.value.get(prefkey)
2186
2195
2187 # Special case for b missing attribute and a matches exactly.
2196 # Special case for b missing attribute and a matches exactly.
2188 if avalue is not None and bvalue is None and avalue == prefvalue:
2197 if avalue is not None and bvalue is None and avalue == prefvalue:
2189 return -1
2198 return -1
2190
2199
2191 # Special case for a missing attribute and b matches exactly.
2200 # Special case for a missing attribute and b matches exactly.
2192 if bvalue is not None and avalue is None and bvalue == prefvalue:
2201 if bvalue is not None and avalue is None and bvalue == prefvalue:
2193 return 1
2202 return 1
2194
2203
2195 # We can't compare unless attribute present on both.
2204 # We can't compare unless attribute present on both.
2196 if avalue is None or bvalue is None:
2205 if avalue is None or bvalue is None:
2197 continue
2206 continue
2198
2207
2199 # Same values should fall back to next attribute.
2208 # Same values should fall back to next attribute.
2200 if avalue == bvalue:
2209 if avalue == bvalue:
2201 continue
2210 continue
2202
2211
2203 # Exact matches come first.
2212 # Exact matches come first.
2204 if avalue == prefvalue:
2213 if avalue == prefvalue:
2205 return -1
2214 return -1
2206 if bvalue == prefvalue:
2215 if bvalue == prefvalue:
2207 return 1
2216 return 1
2208
2217
2209 # Fall back to next attribute.
2218 # Fall back to next attribute.
2210 continue
2219 continue
2211
2220
2212 # If we got here we couldn't sort by attributes and prefers. Fall
2221 # If we got here we couldn't sort by attributes and prefers. Fall
2213 # back to index order.
2222 # back to index order.
2214 return 0
2223 return 0
2215
2224
2216 def __lt__(self, other):
2225 def __lt__(self, other):
2217 return self._cmp(other) < 0
2226 return self._cmp(other) < 0
2218
2227
2219 def __gt__(self, other):
2228 def __gt__(self, other):
2220 return self._cmp(other) > 0
2229 return self._cmp(other) > 0
2221
2230
2222 def __eq__(self, other):
2231 def __eq__(self, other):
2223 return self._cmp(other) == 0
2232 return self._cmp(other) == 0
2224
2233
2225 def __le__(self, other):
2234 def __le__(self, other):
2226 return self._cmp(other) <= 0
2235 return self._cmp(other) <= 0
2227
2236
2228 def __ge__(self, other):
2237 def __ge__(self, other):
2229 return self._cmp(other) >= 0
2238 return self._cmp(other) >= 0
2230
2239
2231 def __ne__(self, other):
2240 def __ne__(self, other):
2232 return self._cmp(other) != 0
2241 return self._cmp(other) != 0
2233
2242
2234 def sortclonebundleentries(ui, entries):
2243 def sortclonebundleentries(ui, entries):
2235 prefers = ui.configlist('ui', 'clonebundleprefers')
2244 prefers = ui.configlist('ui', 'clonebundleprefers')
2236 if not prefers:
2245 if not prefers:
2237 return list(entries)
2246 return list(entries)
2238
2247
2239 prefers = [p.split('=', 1) for p in prefers]
2248 prefers = [p.split('=', 1) for p in prefers]
2240
2249
2241 items = sorted(clonebundleentry(v, prefers) for v in entries)
2250 items = sorted(clonebundleentry(v, prefers) for v in entries)
2242 return [i.value for i in items]
2251 return [i.value for i in items]
2243
2252
2244 def trypullbundlefromurl(ui, repo, url):
2253 def trypullbundlefromurl(ui, repo, url):
2245 """Attempt to apply a bundle from a URL."""
2254 """Attempt to apply a bundle from a URL."""
2246 with repo.lock(), repo.transaction('bundleurl') as tr:
2255 with repo.lock(), repo.transaction('bundleurl') as tr:
2247 try:
2256 try:
2248 fh = urlmod.open(ui, url)
2257 fh = urlmod.open(ui, url)
2249 cg = readbundle(ui, fh, 'stream')
2258 cg = readbundle(ui, fh, 'stream')
2250
2259
2251 if isinstance(cg, streamclone.streamcloneapplier):
2260 if isinstance(cg, streamclone.streamcloneapplier):
2252 cg.apply(repo)
2261 cg.apply(repo)
2253 else:
2262 else:
2254 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2263 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2255 return True
2264 return True
2256 except urlerr.httperror as e:
2265 except urlerr.httperror as e:
2257 ui.warn(_('HTTP error fetching bundle: %s\n') %
2266 ui.warn(_('HTTP error fetching bundle: %s\n') %
2258 util.forcebytestr(e))
2267 util.forcebytestr(e))
2259 except urlerr.urlerror as e:
2268 except urlerr.urlerror as e:
2260 ui.warn(_('error fetching bundle: %s\n') %
2269 ui.warn(_('error fetching bundle: %s\n') %
2261 util.forcebytestr(e.reason))
2270 util.forcebytestr(e.reason))
2262
2271
2263 return False
2272 return False
General Comments 0
You need to be logged in to leave comments. Login now