##// END OF EJS Templates
exchange: drop now-unnecessary "local" from lock name variables...
Martin von Zweigbergk -
r33788:20d663a1 default
parent child Browse files
Show More
@@ -1,2011 +1,2011 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 bookmarks as bookmod,
19 bookmarks as bookmod,
20 bundle2,
20 bundle2,
21 changegroup,
21 changegroup,
22 discovery,
22 discovery,
23 error,
23 error,
24 lock as lockmod,
24 lock as lockmod,
25 obsolete,
25 obsolete,
26 phases,
26 phases,
27 pushkey,
27 pushkey,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sslutil,
30 sslutil,
31 streamclone,
31 streamclone,
32 url as urlmod,
32 url as urlmod,
33 util,
33 util,
34 )
34 )
35
35
36 urlerr = util.urlerr
36 urlerr = util.urlerr
37 urlreq = util.urlreq
37 urlreq = util.urlreq
38
38
39 # Maps bundle version human names to changegroup versions.
39 # Maps bundle version human names to changegroup versions.
40 _bundlespeccgversions = {'v1': '01',
40 _bundlespeccgversions = {'v1': '01',
41 'v2': '02',
41 'v2': '02',
42 'packed1': 's1',
42 'packed1': 's1',
43 'bundle2': '02', #legacy
43 'bundle2': '02', #legacy
44 }
44 }
45
45
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
48
48
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 """Parse a bundle string specification into parts.
50 """Parse a bundle string specification into parts.
51
51
52 Bundle specifications denote a well-defined bundle/exchange format.
52 Bundle specifications denote a well-defined bundle/exchange format.
53 The content of a given specification should not change over time in
53 The content of a given specification should not change over time in
54 order to ensure that bundles produced by a newer version of Mercurial are
54 order to ensure that bundles produced by a newer version of Mercurial are
55 readable from an older version.
55 readable from an older version.
56
56
57 The string currently has the form:
57 The string currently has the form:
58
58
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
60
60
61 Where <compression> is one of the supported compression formats
61 Where <compression> is one of the supported compression formats
62 and <type> is (currently) a version string. A ";" can follow the type and
62 and <type> is (currently) a version string. A ";" can follow the type and
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 pairs.
64 pairs.
65
65
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 it is optional.
67 it is optional.
68
68
69 If ``externalnames`` is False (the default), the human-centric names will
69 If ``externalnames`` is False (the default), the human-centric names will
70 be converted to their internal representation.
70 be converted to their internal representation.
71
71
72 Returns a 3-tuple of (compression, version, parameters). Compression will
72 Returns a 3-tuple of (compression, version, parameters). Compression will
73 be ``None`` if not in strict mode and a compression isn't defined.
73 be ``None`` if not in strict mode and a compression isn't defined.
74
74
75 An ``InvalidBundleSpecification`` is raised when the specification is
75 An ``InvalidBundleSpecification`` is raised when the specification is
76 not syntactically well formed.
76 not syntactically well formed.
77
77
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 bundle type/version is not recognized.
79 bundle type/version is not recognized.
80
80
81 Note: this function will likely eventually return a more complex data
81 Note: this function will likely eventually return a more complex data
82 structure, including bundle2 part information.
82 structure, including bundle2 part information.
83 """
83 """
84 def parseparams(s):
84 def parseparams(s):
85 if ';' not in s:
85 if ';' not in s:
86 return s, {}
86 return s, {}
87
87
88 params = {}
88 params = {}
89 version, paramstr = s.split(';', 1)
89 version, paramstr = s.split(';', 1)
90
90
91 for p in paramstr.split(';'):
91 for p in paramstr.split(';'):
92 if '=' not in p:
92 if '=' not in p:
93 raise error.InvalidBundleSpecification(
93 raise error.InvalidBundleSpecification(
94 _('invalid bundle specification: '
94 _('invalid bundle specification: '
95 'missing "=" in parameter: %s') % p)
95 'missing "=" in parameter: %s') % p)
96
96
97 key, value = p.split('=', 1)
97 key, value = p.split('=', 1)
98 key = urlreq.unquote(key)
98 key = urlreq.unquote(key)
99 value = urlreq.unquote(value)
99 value = urlreq.unquote(value)
100 params[key] = value
100 params[key] = value
101
101
102 return version, params
102 return version, params
103
103
104
104
105 if strict and '-' not in spec:
105 if strict and '-' not in spec:
106 raise error.InvalidBundleSpecification(
106 raise error.InvalidBundleSpecification(
107 _('invalid bundle specification; '
107 _('invalid bundle specification; '
108 'must be prefixed with compression: %s') % spec)
108 'must be prefixed with compression: %s') % spec)
109
109
110 if '-' in spec:
110 if '-' in spec:
111 compression, version = spec.split('-', 1)
111 compression, version = spec.split('-', 1)
112
112
113 if compression not in util.compengines.supportedbundlenames:
113 if compression not in util.compengines.supportedbundlenames:
114 raise error.UnsupportedBundleSpecification(
114 raise error.UnsupportedBundleSpecification(
115 _('%s compression is not supported') % compression)
115 _('%s compression is not supported') % compression)
116
116
117 version, params = parseparams(version)
117 version, params = parseparams(version)
118
118
119 if version not in _bundlespeccgversions:
119 if version not in _bundlespeccgversions:
120 raise error.UnsupportedBundleSpecification(
120 raise error.UnsupportedBundleSpecification(
121 _('%s is not a recognized bundle version') % version)
121 _('%s is not a recognized bundle version') % version)
122 else:
122 else:
123 # Value could be just the compression or just the version, in which
123 # Value could be just the compression or just the version, in which
124 # case some defaults are assumed (but only when not in strict mode).
124 # case some defaults are assumed (but only when not in strict mode).
125 assert not strict
125 assert not strict
126
126
127 spec, params = parseparams(spec)
127 spec, params = parseparams(spec)
128
128
129 if spec in util.compengines.supportedbundlenames:
129 if spec in util.compengines.supportedbundlenames:
130 compression = spec
130 compression = spec
131 version = 'v1'
131 version = 'v1'
132 # Generaldelta repos require v2.
132 # Generaldelta repos require v2.
133 if 'generaldelta' in repo.requirements:
133 if 'generaldelta' in repo.requirements:
134 version = 'v2'
134 version = 'v2'
135 # Modern compression engines require v2.
135 # Modern compression engines require v2.
136 if compression not in _bundlespecv1compengines:
136 if compression not in _bundlespecv1compengines:
137 version = 'v2'
137 version = 'v2'
138 elif spec in _bundlespeccgversions:
138 elif spec in _bundlespeccgversions:
139 if spec == 'packed1':
139 if spec == 'packed1':
140 compression = 'none'
140 compression = 'none'
141 else:
141 else:
142 compression = 'bzip2'
142 compression = 'bzip2'
143 version = spec
143 version = spec
144 else:
144 else:
145 raise error.UnsupportedBundleSpecification(
145 raise error.UnsupportedBundleSpecification(
146 _('%s is not a recognized bundle specification') % spec)
146 _('%s is not a recognized bundle specification') % spec)
147
147
148 # Bundle version 1 only supports a known set of compression engines.
148 # Bundle version 1 only supports a known set of compression engines.
149 if version == 'v1' and compression not in _bundlespecv1compengines:
149 if version == 'v1' and compression not in _bundlespecv1compengines:
150 raise error.UnsupportedBundleSpecification(
150 raise error.UnsupportedBundleSpecification(
151 _('compression engine %s is not supported on v1 bundles') %
151 _('compression engine %s is not supported on v1 bundles') %
152 compression)
152 compression)
153
153
154 # The specification for packed1 can optionally declare the data formats
154 # The specification for packed1 can optionally declare the data formats
155 # required to apply it. If we see this metadata, compare against what the
155 # required to apply it. If we see this metadata, compare against what the
156 # repo supports and error if the bundle isn't compatible.
156 # repo supports and error if the bundle isn't compatible.
157 if version == 'packed1' and 'requirements' in params:
157 if version == 'packed1' and 'requirements' in params:
158 requirements = set(params['requirements'].split(','))
158 requirements = set(params['requirements'].split(','))
159 missingreqs = requirements - repo.supportedformats
159 missingreqs = requirements - repo.supportedformats
160 if missingreqs:
160 if missingreqs:
161 raise error.UnsupportedBundleSpecification(
161 raise error.UnsupportedBundleSpecification(
162 _('missing support for repository features: %s') %
162 _('missing support for repository features: %s') %
163 ', '.join(sorted(missingreqs)))
163 ', '.join(sorted(missingreqs)))
164
164
165 if not externalnames:
165 if not externalnames:
166 engine = util.compengines.forbundlename(compression)
166 engine = util.compengines.forbundlename(compression)
167 compression = engine.bundletype()[1]
167 compression = engine.bundletype()[1]
168 version = _bundlespeccgversions[version]
168 version = _bundlespeccgversions[version]
169 return compression, version, params
169 return compression, version, params
170
170
171 def readbundle(ui, fh, fname, vfs=None):
171 def readbundle(ui, fh, fname, vfs=None):
172 header = changegroup.readexactly(fh, 4)
172 header = changegroup.readexactly(fh, 4)
173
173
174 alg = None
174 alg = None
175 if not fname:
175 if not fname:
176 fname = "stream"
176 fname = "stream"
177 if not header.startswith('HG') and header.startswith('\0'):
177 if not header.startswith('HG') and header.startswith('\0'):
178 fh = changegroup.headerlessfixup(fh, header)
178 fh = changegroup.headerlessfixup(fh, header)
179 header = "HG10"
179 header = "HG10"
180 alg = 'UN'
180 alg = 'UN'
181 elif vfs:
181 elif vfs:
182 fname = vfs.join(fname)
182 fname = vfs.join(fname)
183
183
184 magic, version = header[0:2], header[2:4]
184 magic, version = header[0:2], header[2:4]
185
185
186 if magic != 'HG':
186 if magic != 'HG':
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 if version == '10':
188 if version == '10':
189 if alg is None:
189 if alg is None:
190 alg = changegroup.readexactly(fh, 2)
190 alg = changegroup.readexactly(fh, 2)
191 return changegroup.cg1unpacker(fh, alg)
191 return changegroup.cg1unpacker(fh, alg)
192 elif version.startswith('2'):
192 elif version.startswith('2'):
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 elif version == 'S1':
194 elif version == 'S1':
195 return streamclone.streamcloneapplier(fh)
195 return streamclone.streamcloneapplier(fh)
196 else:
196 else:
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198
198
199 def getbundlespec(ui, fh):
199 def getbundlespec(ui, fh):
200 """Infer the bundlespec from a bundle file handle.
200 """Infer the bundlespec from a bundle file handle.
201
201
202 The input file handle is seeked and the original seek position is not
202 The input file handle is seeked and the original seek position is not
203 restored.
203 restored.
204 """
204 """
205 def speccompression(alg):
205 def speccompression(alg):
206 try:
206 try:
207 return util.compengines.forbundletype(alg).bundletype()[0]
207 return util.compengines.forbundletype(alg).bundletype()[0]
208 except KeyError:
208 except KeyError:
209 return None
209 return None
210
210
211 b = readbundle(ui, fh, None)
211 b = readbundle(ui, fh, None)
212 if isinstance(b, changegroup.cg1unpacker):
212 if isinstance(b, changegroup.cg1unpacker):
213 alg = b._type
213 alg = b._type
214 if alg == '_truncatedBZ':
214 if alg == '_truncatedBZ':
215 alg = 'BZ'
215 alg = 'BZ'
216 comp = speccompression(alg)
216 comp = speccompression(alg)
217 if not comp:
217 if not comp:
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 return '%s-v1' % comp
219 return '%s-v1' % comp
220 elif isinstance(b, bundle2.unbundle20):
220 elif isinstance(b, bundle2.unbundle20):
221 if 'Compression' in b.params:
221 if 'Compression' in b.params:
222 comp = speccompression(b.params['Compression'])
222 comp = speccompression(b.params['Compression'])
223 if not comp:
223 if not comp:
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 else:
225 else:
226 comp = 'none'
226 comp = 'none'
227
227
228 version = None
228 version = None
229 for part in b.iterparts():
229 for part in b.iterparts():
230 if part.type == 'changegroup':
230 if part.type == 'changegroup':
231 version = part.params['version']
231 version = part.params['version']
232 if version in ('01', '02'):
232 if version in ('01', '02'):
233 version = 'v2'
233 version = 'v2'
234 else:
234 else:
235 raise error.Abort(_('changegroup version %s does not have '
235 raise error.Abort(_('changegroup version %s does not have '
236 'a known bundlespec') % version,
236 'a known bundlespec') % version,
237 hint=_('try upgrading your Mercurial '
237 hint=_('try upgrading your Mercurial '
238 'client'))
238 'client'))
239
239
240 if not version:
240 if not version:
241 raise error.Abort(_('could not identify changegroup version in '
241 raise error.Abort(_('could not identify changegroup version in '
242 'bundle'))
242 'bundle'))
243
243
244 return '%s-%s' % (comp, version)
244 return '%s-%s' % (comp, version)
245 elif isinstance(b, streamclone.streamcloneapplier):
245 elif isinstance(b, streamclone.streamcloneapplier):
246 requirements = streamclone.readbundle1header(fh)[2]
246 requirements = streamclone.readbundle1header(fh)[2]
247 params = 'requirements=%s' % ','.join(sorted(requirements))
247 params = 'requirements=%s' % ','.join(sorted(requirements))
248 return 'none-packed1;%s' % urlreq.quote(params)
248 return 'none-packed1;%s' % urlreq.quote(params)
249 else:
249 else:
250 raise error.Abort(_('unknown bundle type: %s') % b)
250 raise error.Abort(_('unknown bundle type: %s') % b)
251
251
252 def _computeoutgoing(repo, heads, common):
252 def _computeoutgoing(repo, heads, common):
253 """Computes which revs are outgoing given a set of common
253 """Computes which revs are outgoing given a set of common
254 and a set of heads.
254 and a set of heads.
255
255
256 This is a separate function so extensions can have access to
256 This is a separate function so extensions can have access to
257 the logic.
257 the logic.
258
258
259 Returns a discovery.outgoing object.
259 Returns a discovery.outgoing object.
260 """
260 """
261 cl = repo.changelog
261 cl = repo.changelog
262 if common:
262 if common:
263 hasnode = cl.hasnode
263 hasnode = cl.hasnode
264 common = [n for n in common if hasnode(n)]
264 common = [n for n in common if hasnode(n)]
265 else:
265 else:
266 common = [nullid]
266 common = [nullid]
267 if not heads:
267 if not heads:
268 heads = cl.heads()
268 heads = cl.heads()
269 return discovery.outgoing(repo, common, heads)
269 return discovery.outgoing(repo, common, heads)
270
270
271 def _forcebundle1(op):
271 def _forcebundle1(op):
272 """return true if a pull/push must use bundle1
272 """return true if a pull/push must use bundle1
273
273
274 This function is used to allow testing of the older bundle version"""
274 This function is used to allow testing of the older bundle version"""
275 ui = op.repo.ui
275 ui = op.repo.ui
276 forcebundle1 = False
276 forcebundle1 = False
277 # The goal is this config is to allow developer to choose the bundle
277 # The goal is this config is to allow developer to choose the bundle
278 # version used during exchanged. This is especially handy during test.
278 # version used during exchanged. This is especially handy during test.
279 # Value is a list of bundle version to be picked from, highest version
279 # Value is a list of bundle version to be picked from, highest version
280 # should be used.
280 # should be used.
281 #
281 #
282 # developer config: devel.legacy.exchange
282 # developer config: devel.legacy.exchange
283 exchange = ui.configlist('devel', 'legacy.exchange')
283 exchange = ui.configlist('devel', 'legacy.exchange')
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
285 return forcebundle1 or not op.remote.capable('bundle2')
285 return forcebundle1 or not op.remote.capable('bundle2')
286
286
287 class pushoperation(object):
287 class pushoperation(object):
288 """A object that represent a single push operation
288 """A object that represent a single push operation
289
289
290 Its purpose is to carry push related state and very common operations.
290 Its purpose is to carry push related state and very common operations.
291
291
292 A new pushoperation should be created at the beginning of each push and
292 A new pushoperation should be created at the beginning of each push and
293 discarded afterward.
293 discarded afterward.
294 """
294 """
295
295
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
297 bookmarks=()):
297 bookmarks=()):
298 # repo we push from
298 # repo we push from
299 self.repo = repo
299 self.repo = repo
300 self.ui = repo.ui
300 self.ui = repo.ui
301 # repo we push to
301 # repo we push to
302 self.remote = remote
302 self.remote = remote
303 # force option provided
303 # force option provided
304 self.force = force
304 self.force = force
305 # revs to be pushed (None is "all")
305 # revs to be pushed (None is "all")
306 self.revs = revs
306 self.revs = revs
307 # bookmark explicitly pushed
307 # bookmark explicitly pushed
308 self.bookmarks = bookmarks
308 self.bookmarks = bookmarks
309 # allow push of new branch
309 # allow push of new branch
310 self.newbranch = newbranch
310 self.newbranch = newbranch
311 # step already performed
311 # step already performed
312 # (used to check what steps have been already performed through bundle2)
312 # (used to check what steps have been already performed through bundle2)
313 self.stepsdone = set()
313 self.stepsdone = set()
314 # Integer version of the changegroup push result
314 # Integer version of the changegroup push result
315 # - None means nothing to push
315 # - None means nothing to push
316 # - 0 means HTTP error
316 # - 0 means HTTP error
317 # - 1 means we pushed and remote head count is unchanged *or*
317 # - 1 means we pushed and remote head count is unchanged *or*
318 # we have outgoing changesets but refused to push
318 # we have outgoing changesets but refused to push
319 # - other values as described by addchangegroup()
319 # - other values as described by addchangegroup()
320 self.cgresult = None
320 self.cgresult = None
321 # Boolean value for the bookmark push
321 # Boolean value for the bookmark push
322 self.bkresult = None
322 self.bkresult = None
323 # discover.outgoing object (contains common and outgoing data)
323 # discover.outgoing object (contains common and outgoing data)
324 self.outgoing = None
324 self.outgoing = None
325 # all remote topological heads before the push
325 # all remote topological heads before the push
326 self.remoteheads = None
326 self.remoteheads = None
327 # Details of the remote branch pre and post push
327 # Details of the remote branch pre and post push
328 #
328 #
329 # mapping: {'branch': ([remoteheads],
329 # mapping: {'branch': ([remoteheads],
330 # [newheads],
330 # [newheads],
331 # [unsyncedheads],
331 # [unsyncedheads],
332 # [discardedheads])}
332 # [discardedheads])}
333 # - branch: the branch name
333 # - branch: the branch name
334 # - remoteheads: the list of remote heads known locally
334 # - remoteheads: the list of remote heads known locally
335 # None if the branch is new
335 # None if the branch is new
336 # - newheads: the new remote heads (known locally) with outgoing pushed
336 # - newheads: the new remote heads (known locally) with outgoing pushed
337 # - unsyncedheads: the list of remote heads unknown locally.
337 # - unsyncedheads: the list of remote heads unknown locally.
338 # - discardedheads: the list of remote heads made obsolete by the push
338 # - discardedheads: the list of remote heads made obsolete by the push
339 self.pushbranchmap = None
339 self.pushbranchmap = None
340 # testable as a boolean indicating if any nodes are missing locally.
340 # testable as a boolean indicating if any nodes are missing locally.
341 self.incoming = None
341 self.incoming = None
342 # phases changes that must be pushed along side the changesets
342 # phases changes that must be pushed along side the changesets
343 self.outdatedphases = None
343 self.outdatedphases = None
344 # phases changes that must be pushed if changeset push fails
344 # phases changes that must be pushed if changeset push fails
345 self.fallbackoutdatedphases = None
345 self.fallbackoutdatedphases = None
346 # outgoing obsmarkers
346 # outgoing obsmarkers
347 self.outobsmarkers = set()
347 self.outobsmarkers = set()
348 # outgoing bookmarks
348 # outgoing bookmarks
349 self.outbookmarks = []
349 self.outbookmarks = []
350 # transaction manager
350 # transaction manager
351 self.trmanager = None
351 self.trmanager = None
352 # map { pushkey partid -> callback handling failure}
352 # map { pushkey partid -> callback handling failure}
353 # used to handle exception from mandatory pushkey part failure
353 # used to handle exception from mandatory pushkey part failure
354 self.pkfailcb = {}
354 self.pkfailcb = {}
355
355
356 @util.propertycache
356 @util.propertycache
357 def futureheads(self):
357 def futureheads(self):
358 """future remote heads if the changeset push succeeds"""
358 """future remote heads if the changeset push succeeds"""
359 return self.outgoing.missingheads
359 return self.outgoing.missingheads
360
360
361 @util.propertycache
361 @util.propertycache
362 def fallbackheads(self):
362 def fallbackheads(self):
363 """future remote heads if the changeset push fails"""
363 """future remote heads if the changeset push fails"""
364 if self.revs is None:
364 if self.revs is None:
365 # not target to push, all common are relevant
365 # not target to push, all common are relevant
366 return self.outgoing.commonheads
366 return self.outgoing.commonheads
367 unfi = self.repo.unfiltered()
367 unfi = self.repo.unfiltered()
368 # I want cheads = heads(::missingheads and ::commonheads)
368 # I want cheads = heads(::missingheads and ::commonheads)
369 # (missingheads is revs with secret changeset filtered out)
369 # (missingheads is revs with secret changeset filtered out)
370 #
370 #
371 # This can be expressed as:
371 # This can be expressed as:
372 # cheads = ( (missingheads and ::commonheads)
372 # cheads = ( (missingheads and ::commonheads)
373 # + (commonheads and ::missingheads))"
373 # + (commonheads and ::missingheads))"
374 # )
374 # )
375 #
375 #
376 # while trying to push we already computed the following:
376 # while trying to push we already computed the following:
377 # common = (::commonheads)
377 # common = (::commonheads)
378 # missing = ((commonheads::missingheads) - commonheads)
378 # missing = ((commonheads::missingheads) - commonheads)
379 #
379 #
380 # We can pick:
380 # We can pick:
381 # * missingheads part of common (::commonheads)
381 # * missingheads part of common (::commonheads)
382 common = self.outgoing.common
382 common = self.outgoing.common
383 nm = self.repo.changelog.nodemap
383 nm = self.repo.changelog.nodemap
384 cheads = [node for node in self.revs if nm[node] in common]
384 cheads = [node for node in self.revs if nm[node] in common]
385 # and
385 # and
386 # * commonheads parents on missing
386 # * commonheads parents on missing
387 revset = unfi.set('%ln and parents(roots(%ln))',
387 revset = unfi.set('%ln and parents(roots(%ln))',
388 self.outgoing.commonheads,
388 self.outgoing.commonheads,
389 self.outgoing.missing)
389 self.outgoing.missing)
390 cheads.extend(c.node() for c in revset)
390 cheads.extend(c.node() for c in revset)
391 return cheads
391 return cheads
392
392
393 @property
393 @property
394 def commonheads(self):
394 def commonheads(self):
395 """set of all common heads after changeset bundle push"""
395 """set of all common heads after changeset bundle push"""
396 if self.cgresult:
396 if self.cgresult:
397 return self.futureheads
397 return self.futureheads
398 else:
398 else:
399 return self.fallbackheads
399 return self.fallbackheads
400
400
401 # mapping of message used when pushing bookmark
401 # mapping of message used when pushing bookmark
402 bookmsgmap = {'update': (_("updating bookmark %s\n"),
402 bookmsgmap = {'update': (_("updating bookmark %s\n"),
403 _('updating bookmark %s failed!\n')),
403 _('updating bookmark %s failed!\n')),
404 'export': (_("exporting bookmark %s\n"),
404 'export': (_("exporting bookmark %s\n"),
405 _('exporting bookmark %s failed!\n')),
405 _('exporting bookmark %s failed!\n')),
406 'delete': (_("deleting remote bookmark %s\n"),
406 'delete': (_("deleting remote bookmark %s\n"),
407 _('deleting remote bookmark %s failed!\n')),
407 _('deleting remote bookmark %s failed!\n')),
408 }
408 }
409
409
410
410
411 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
411 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
412 opargs=None):
412 opargs=None):
413 '''Push outgoing changesets (limited by revs) from a local
413 '''Push outgoing changesets (limited by revs) from a local
414 repository to remote. Return an integer:
414 repository to remote. Return an integer:
415 - None means nothing to push
415 - None means nothing to push
416 - 0 means HTTP error
416 - 0 means HTTP error
417 - 1 means we pushed and remote head count is unchanged *or*
417 - 1 means we pushed and remote head count is unchanged *or*
418 we have outgoing changesets but refused to push
418 we have outgoing changesets but refused to push
419 - other values as described by addchangegroup()
419 - other values as described by addchangegroup()
420 '''
420 '''
421 if opargs is None:
421 if opargs is None:
422 opargs = {}
422 opargs = {}
423 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
423 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
424 **opargs)
424 **opargs)
425 if pushop.remote.local():
425 if pushop.remote.local():
426 missing = (set(pushop.repo.requirements)
426 missing = (set(pushop.repo.requirements)
427 - pushop.remote.local().supported)
427 - pushop.remote.local().supported)
428 if missing:
428 if missing:
429 msg = _("required features are not"
429 msg = _("required features are not"
430 " supported in the destination:"
430 " supported in the destination:"
431 " %s") % (', '.join(sorted(missing)))
431 " %s") % (', '.join(sorted(missing)))
432 raise error.Abort(msg)
432 raise error.Abort(msg)
433
433
434 if not pushop.remote.canpush():
434 if not pushop.remote.canpush():
435 raise error.Abort(_("destination does not support push"))
435 raise error.Abort(_("destination does not support push"))
436
436
437 if not pushop.remote.capable('unbundle'):
437 if not pushop.remote.capable('unbundle'):
438 raise error.Abort(_('cannot push: destination does not support the '
438 raise error.Abort(_('cannot push: destination does not support the '
439 'unbundle wire protocol command'))
439 'unbundle wire protocol command'))
440
440
441 # get local lock as we might write phase data
441 # get lock as we might write phase data
442 localwlock = locallock = None
442 wlock = lock = None
443 locallocked = False
443 locked = False
444 try:
444 try:
445 # bundle2 push may receive a reply bundle touching bookmarks or other
445 # bundle2 push may receive a reply bundle touching bookmarks or other
446 # things requiring the wlock. Take it now to ensure proper ordering.
446 # things requiring the wlock. Take it now to ensure proper ordering.
447 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
447 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
448 if (not _forcebundle1(pushop)) and maypushback:
448 if (not _forcebundle1(pushop)) and maypushback:
449 localwlock = pushop.repo.wlock()
449 wlock = pushop.repo.wlock()
450 locallock = pushop.repo.lock()
450 lock = pushop.repo.lock()
451 locallocked = True
451 locked = True
452 except IOError as err:
452 except IOError as err:
453 if err.errno != errno.EACCES:
453 if err.errno != errno.EACCES:
454 raise
454 raise
455 # source repo cannot be locked.
455 # source repo cannot be locked.
456 # We do not abort the push, but just disable the local phase
456 # We do not abort the push, but just disable the local phase
457 # synchronisation.
457 # synchronisation.
458 msg = 'cannot lock source repository: %s\n' % err
458 msg = 'cannot lock source repository: %s\n' % err
459 pushop.ui.debug(msg)
459 pushop.ui.debug(msg)
460 try:
460 try:
461 if locallocked:
461 if locked:
462 pushop.trmanager = transactionmanager(pushop.repo,
462 pushop.trmanager = transactionmanager(pushop.repo,
463 'push-response',
463 'push-response',
464 pushop.remote.url())
464 pushop.remote.url())
465 pushop.repo.checkpush(pushop)
465 pushop.repo.checkpush(pushop)
466 _pushdiscovery(pushop)
466 _pushdiscovery(pushop)
467 if not _forcebundle1(pushop):
467 if not _forcebundle1(pushop):
468 _pushbundle2(pushop)
468 _pushbundle2(pushop)
469 _pushchangeset(pushop)
469 _pushchangeset(pushop)
470 _pushsyncphase(pushop)
470 _pushsyncphase(pushop)
471 _pushobsolete(pushop)
471 _pushobsolete(pushop)
472 _pushbookmark(pushop)
472 _pushbookmark(pushop)
473
473
474 if pushop.trmanager:
474 if pushop.trmanager:
475 pushop.trmanager.close()
475 pushop.trmanager.close()
476 finally:
476 finally:
477 if pushop.trmanager:
477 if pushop.trmanager:
478 pushop.trmanager.release()
478 pushop.trmanager.release()
479 if locallock is not None:
479 if lock is not None:
480 locallock.release()
480 lock.release()
481 if localwlock is not None:
481 if wlock is not None:
482 localwlock.release()
482 wlock.release()
483
483
484 return pushop
484 return pushop
485
485
486 # list of steps to perform discovery before push
486 # list of steps to perform discovery before push
487 pushdiscoveryorder = []
487 pushdiscoveryorder = []
488
488
489 # Mapping between step name and function
489 # Mapping between step name and function
490 #
490 #
491 # This exists to help extensions wrap steps if necessary
491 # This exists to help extensions wrap steps if necessary
492 pushdiscoverymapping = {}
492 pushdiscoverymapping = {}
493
493
494 def pushdiscovery(stepname):
494 def pushdiscovery(stepname):
495 """decorator for function performing discovery before push
495 """decorator for function performing discovery before push
496
496
497 The function is added to the step -> function mapping and appended to the
497 The function is added to the step -> function mapping and appended to the
498 list of steps. Beware that decorated function will be added in order (this
498 list of steps. Beware that decorated function will be added in order (this
499 may matter).
499 may matter).
500
500
501 You can only use this decorator for a new step, if you want to wrap a step
501 You can only use this decorator for a new step, if you want to wrap a step
502 from an extension, change the pushdiscovery dictionary directly."""
502 from an extension, change the pushdiscovery dictionary directly."""
503 def dec(func):
503 def dec(func):
504 assert stepname not in pushdiscoverymapping
504 assert stepname not in pushdiscoverymapping
505 pushdiscoverymapping[stepname] = func
505 pushdiscoverymapping[stepname] = func
506 pushdiscoveryorder.append(stepname)
506 pushdiscoveryorder.append(stepname)
507 return func
507 return func
508 return dec
508 return dec
509
509
510 def _pushdiscovery(pushop):
510 def _pushdiscovery(pushop):
511 """Run all discovery steps"""
511 """Run all discovery steps"""
512 for stepname in pushdiscoveryorder:
512 for stepname in pushdiscoveryorder:
513 step = pushdiscoverymapping[stepname]
513 step = pushdiscoverymapping[stepname]
514 step(pushop)
514 step(pushop)
515
515
516 @pushdiscovery('changeset')
516 @pushdiscovery('changeset')
517 def _pushdiscoverychangeset(pushop):
517 def _pushdiscoverychangeset(pushop):
518 """discover the changeset that need to be pushed"""
518 """discover the changeset that need to be pushed"""
519 fci = discovery.findcommonincoming
519 fci = discovery.findcommonincoming
520 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
520 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
521 common, inc, remoteheads = commoninc
521 common, inc, remoteheads = commoninc
522 fco = discovery.findcommonoutgoing
522 fco = discovery.findcommonoutgoing
523 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
523 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
524 commoninc=commoninc, force=pushop.force)
524 commoninc=commoninc, force=pushop.force)
525 pushop.outgoing = outgoing
525 pushop.outgoing = outgoing
526 pushop.remoteheads = remoteheads
526 pushop.remoteheads = remoteheads
527 pushop.incoming = inc
527 pushop.incoming = inc
528
528
529 @pushdiscovery('phase')
529 @pushdiscovery('phase')
530 def _pushdiscoveryphase(pushop):
530 def _pushdiscoveryphase(pushop):
531 """discover the phase that needs to be pushed
531 """discover the phase that needs to be pushed
532
532
533 (computed for both success and failure case for changesets push)"""
533 (computed for both success and failure case for changesets push)"""
534 outgoing = pushop.outgoing
534 outgoing = pushop.outgoing
535 unfi = pushop.repo.unfiltered()
535 unfi = pushop.repo.unfiltered()
536 remotephases = pushop.remote.listkeys('phases')
536 remotephases = pushop.remote.listkeys('phases')
537 publishing = remotephases.get('publishing', False)
537 publishing = remotephases.get('publishing', False)
538 if (pushop.ui.configbool('ui', '_usedassubrepo')
538 if (pushop.ui.configbool('ui', '_usedassubrepo')
539 and remotephases # server supports phases
539 and remotephases # server supports phases
540 and not pushop.outgoing.missing # no changesets to be pushed
540 and not pushop.outgoing.missing # no changesets to be pushed
541 and publishing):
541 and publishing):
542 # When:
542 # When:
543 # - this is a subrepo push
543 # - this is a subrepo push
544 # - and remote support phase
544 # - and remote support phase
545 # - and no changeset are to be pushed
545 # - and no changeset are to be pushed
546 # - and remote is publishing
546 # - and remote is publishing
547 # We may be in issue 3871 case!
547 # We may be in issue 3871 case!
548 # We drop the possible phase synchronisation done by
548 # We drop the possible phase synchronisation done by
549 # courtesy to publish changesets possibly locally draft
549 # courtesy to publish changesets possibly locally draft
550 # on the remote.
550 # on the remote.
551 remotephases = {'publishing': 'True'}
551 remotephases = {'publishing': 'True'}
552 ana = phases.analyzeremotephases(pushop.repo,
552 ana = phases.analyzeremotephases(pushop.repo,
553 pushop.fallbackheads,
553 pushop.fallbackheads,
554 remotephases)
554 remotephases)
555 pheads, droots = ana
555 pheads, droots = ana
556 extracond = ''
556 extracond = ''
557 if not publishing:
557 if not publishing:
558 extracond = ' and public()'
558 extracond = ' and public()'
559 revset = 'heads((%%ln::%%ln) %s)' % extracond
559 revset = 'heads((%%ln::%%ln) %s)' % extracond
560 # Get the list of all revs draft on remote by public here.
560 # Get the list of all revs draft on remote by public here.
561 # XXX Beware that revset break if droots is not strictly
561 # XXX Beware that revset break if droots is not strictly
562 # XXX root we may want to ensure it is but it is costly
562 # XXX root we may want to ensure it is but it is costly
563 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
563 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
564 if not outgoing.missing:
564 if not outgoing.missing:
565 future = fallback
565 future = fallback
566 else:
566 else:
567 # adds changeset we are going to push as draft
567 # adds changeset we are going to push as draft
568 #
568 #
569 # should not be necessary for publishing server, but because of an
569 # should not be necessary for publishing server, but because of an
570 # issue fixed in xxxxx we have to do it anyway.
570 # issue fixed in xxxxx we have to do it anyway.
571 fdroots = list(unfi.set('roots(%ln + %ln::)',
571 fdroots = list(unfi.set('roots(%ln + %ln::)',
572 outgoing.missing, droots))
572 outgoing.missing, droots))
573 fdroots = [f.node() for f in fdroots]
573 fdroots = [f.node() for f in fdroots]
574 future = list(unfi.set(revset, fdroots, pushop.futureheads))
574 future = list(unfi.set(revset, fdroots, pushop.futureheads))
575 pushop.outdatedphases = future
575 pushop.outdatedphases = future
576 pushop.fallbackoutdatedphases = fallback
576 pushop.fallbackoutdatedphases = fallback
577
577
578 @pushdiscovery('obsmarker')
578 @pushdiscovery('obsmarker')
579 def _pushdiscoveryobsmarkers(pushop):
579 def _pushdiscoveryobsmarkers(pushop):
580 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
580 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
581 and pushop.repo.obsstore
581 and pushop.repo.obsstore
582 and 'obsolete' in pushop.remote.listkeys('namespaces')):
582 and 'obsolete' in pushop.remote.listkeys('namespaces')):
583 repo = pushop.repo
583 repo = pushop.repo
584 # very naive computation, that can be quite expensive on big repo.
584 # very naive computation, that can be quite expensive on big repo.
585 # However: evolution is currently slow on them anyway.
585 # However: evolution is currently slow on them anyway.
586 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
586 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
587 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
587 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
588
588
589 @pushdiscovery('bookmarks')
589 @pushdiscovery('bookmarks')
590 def _pushdiscoverybookmarks(pushop):
590 def _pushdiscoverybookmarks(pushop):
591 ui = pushop.ui
591 ui = pushop.ui
592 repo = pushop.repo.unfiltered()
592 repo = pushop.repo.unfiltered()
593 remote = pushop.remote
593 remote = pushop.remote
594 ui.debug("checking for updated bookmarks\n")
594 ui.debug("checking for updated bookmarks\n")
595 ancestors = ()
595 ancestors = ()
596 if pushop.revs:
596 if pushop.revs:
597 revnums = map(repo.changelog.rev, pushop.revs)
597 revnums = map(repo.changelog.rev, pushop.revs)
598 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
598 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
599 remotebookmark = remote.listkeys('bookmarks')
599 remotebookmark = remote.listkeys('bookmarks')
600
600
601 explicit = set([repo._bookmarks.expandname(bookmark)
601 explicit = set([repo._bookmarks.expandname(bookmark)
602 for bookmark in pushop.bookmarks])
602 for bookmark in pushop.bookmarks])
603
603
604 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
604 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
605 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
605 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
606
606
607 def safehex(x):
607 def safehex(x):
608 if x is None:
608 if x is None:
609 return x
609 return x
610 return hex(x)
610 return hex(x)
611
611
612 def hexifycompbookmarks(bookmarks):
612 def hexifycompbookmarks(bookmarks):
613 for b, scid, dcid in bookmarks:
613 for b, scid, dcid in bookmarks:
614 yield b, safehex(scid), safehex(dcid)
614 yield b, safehex(scid), safehex(dcid)
615
615
616 comp = [hexifycompbookmarks(marks) for marks in comp]
616 comp = [hexifycompbookmarks(marks) for marks in comp]
617 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
617 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
618
618
619 for b, scid, dcid in advsrc:
619 for b, scid, dcid in advsrc:
620 if b in explicit:
620 if b in explicit:
621 explicit.remove(b)
621 explicit.remove(b)
622 if not ancestors or repo[scid].rev() in ancestors:
622 if not ancestors or repo[scid].rev() in ancestors:
623 pushop.outbookmarks.append((b, dcid, scid))
623 pushop.outbookmarks.append((b, dcid, scid))
624 # search added bookmark
624 # search added bookmark
625 for b, scid, dcid in addsrc:
625 for b, scid, dcid in addsrc:
626 if b in explicit:
626 if b in explicit:
627 explicit.remove(b)
627 explicit.remove(b)
628 pushop.outbookmarks.append((b, '', scid))
628 pushop.outbookmarks.append((b, '', scid))
629 # search for overwritten bookmark
629 # search for overwritten bookmark
630 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
630 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
631 if b in explicit:
631 if b in explicit:
632 explicit.remove(b)
632 explicit.remove(b)
633 pushop.outbookmarks.append((b, dcid, scid))
633 pushop.outbookmarks.append((b, dcid, scid))
634 # search for bookmark to delete
634 # search for bookmark to delete
635 for b, scid, dcid in adddst:
635 for b, scid, dcid in adddst:
636 if b in explicit:
636 if b in explicit:
637 explicit.remove(b)
637 explicit.remove(b)
638 # treat as "deleted locally"
638 # treat as "deleted locally"
639 pushop.outbookmarks.append((b, dcid, ''))
639 pushop.outbookmarks.append((b, dcid, ''))
640 # identical bookmarks shouldn't get reported
640 # identical bookmarks shouldn't get reported
641 for b, scid, dcid in same:
641 for b, scid, dcid in same:
642 if b in explicit:
642 if b in explicit:
643 explicit.remove(b)
643 explicit.remove(b)
644
644
645 if explicit:
645 if explicit:
646 explicit = sorted(explicit)
646 explicit = sorted(explicit)
647 # we should probably list all of them
647 # we should probably list all of them
648 ui.warn(_('bookmark %s does not exist on the local '
648 ui.warn(_('bookmark %s does not exist on the local '
649 'or remote repository!\n') % explicit[0])
649 'or remote repository!\n') % explicit[0])
650 pushop.bkresult = 2
650 pushop.bkresult = 2
651
651
652 pushop.outbookmarks.sort()
652 pushop.outbookmarks.sort()
653
653
654 def _pushcheckoutgoing(pushop):
654 def _pushcheckoutgoing(pushop):
655 outgoing = pushop.outgoing
655 outgoing = pushop.outgoing
656 unfi = pushop.repo.unfiltered()
656 unfi = pushop.repo.unfiltered()
657 if not outgoing.missing:
657 if not outgoing.missing:
658 # nothing to push
658 # nothing to push
659 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
659 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
660 return False
660 return False
661 # something to push
661 # something to push
662 if not pushop.force:
662 if not pushop.force:
663 # if repo.obsstore == False --> no obsolete
663 # if repo.obsstore == False --> no obsolete
664 # then, save the iteration
664 # then, save the iteration
665 if unfi.obsstore:
665 if unfi.obsstore:
666 # this message are here for 80 char limit reason
666 # this message are here for 80 char limit reason
667 mso = _("push includes obsolete changeset: %s!")
667 mso = _("push includes obsolete changeset: %s!")
668 mspd = _("push includes phase-divergent changeset: %s!")
668 mspd = _("push includes phase-divergent changeset: %s!")
669 mscd = _("push includes content-divergent changeset: %s!")
669 mscd = _("push includes content-divergent changeset: %s!")
670 mst = {"orphan": _("push includes orphan changeset: %s!"),
670 mst = {"orphan": _("push includes orphan changeset: %s!"),
671 "phase-divergent": mspd,
671 "phase-divergent": mspd,
672 "content-divergent": mscd}
672 "content-divergent": mscd}
673 # If we are to push if there is at least one
673 # If we are to push if there is at least one
674 # obsolete or unstable changeset in missing, at
674 # obsolete or unstable changeset in missing, at
675 # least one of the missinghead will be obsolete or
675 # least one of the missinghead will be obsolete or
676 # unstable. So checking heads only is ok
676 # unstable. So checking heads only is ok
677 for node in outgoing.missingheads:
677 for node in outgoing.missingheads:
678 ctx = unfi[node]
678 ctx = unfi[node]
679 if ctx.obsolete():
679 if ctx.obsolete():
680 raise error.Abort(mso % ctx)
680 raise error.Abort(mso % ctx)
681 elif ctx.isunstable():
681 elif ctx.isunstable():
682 # TODO print more than one instability in the abort
682 # TODO print more than one instability in the abort
683 # message
683 # message
684 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
684 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
685
685
686 discovery.checkheads(pushop)
686 discovery.checkheads(pushop)
687 return True
687 return True
688
688
689 # List of names of steps to perform for an outgoing bundle2, order matters.
689 # List of names of steps to perform for an outgoing bundle2, order matters.
690 b2partsgenorder = []
690 b2partsgenorder = []
691
691
692 # Mapping between step name and function
692 # Mapping between step name and function
693 #
693 #
694 # This exists to help extensions wrap steps if necessary
694 # This exists to help extensions wrap steps if necessary
695 b2partsgenmapping = {}
695 b2partsgenmapping = {}
696
696
697 def b2partsgenerator(stepname, idx=None):
697 def b2partsgenerator(stepname, idx=None):
698 """decorator for function generating bundle2 part
698 """decorator for function generating bundle2 part
699
699
700 The function is added to the step -> function mapping and appended to the
700 The function is added to the step -> function mapping and appended to the
701 list of steps. Beware that decorated functions will be added in order
701 list of steps. Beware that decorated functions will be added in order
702 (this may matter).
702 (this may matter).
703
703
704 You can only use this decorator for new steps, if you want to wrap a step
704 You can only use this decorator for new steps, if you want to wrap a step
705 from an extension, attack the b2partsgenmapping dictionary directly."""
705 from an extension, attack the b2partsgenmapping dictionary directly."""
706 def dec(func):
706 def dec(func):
707 assert stepname not in b2partsgenmapping
707 assert stepname not in b2partsgenmapping
708 b2partsgenmapping[stepname] = func
708 b2partsgenmapping[stepname] = func
709 if idx is None:
709 if idx is None:
710 b2partsgenorder.append(stepname)
710 b2partsgenorder.append(stepname)
711 else:
711 else:
712 b2partsgenorder.insert(idx, stepname)
712 b2partsgenorder.insert(idx, stepname)
713 return func
713 return func
714 return dec
714 return dec
715
715
716 def _pushb2ctxcheckheads(pushop, bundler):
716 def _pushb2ctxcheckheads(pushop, bundler):
717 """Generate race condition checking parts
717 """Generate race condition checking parts
718
718
719 Exists as an independent function to aid extensions
719 Exists as an independent function to aid extensions
720 """
720 """
721 # * 'force' do not check for push race,
721 # * 'force' do not check for push race,
722 # * if we don't push anything, there are nothing to check.
722 # * if we don't push anything, there are nothing to check.
723 if not pushop.force and pushop.outgoing.missingheads:
723 if not pushop.force and pushop.outgoing.missingheads:
724 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
724 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
725 emptyremote = pushop.pushbranchmap is None
725 emptyremote = pushop.pushbranchmap is None
726 if not allowunrelated or emptyremote:
726 if not allowunrelated or emptyremote:
727 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
727 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
728 else:
728 else:
729 affected = set()
729 affected = set()
730 for branch, heads in pushop.pushbranchmap.iteritems():
730 for branch, heads in pushop.pushbranchmap.iteritems():
731 remoteheads, newheads, unsyncedheads, discardedheads = heads
731 remoteheads, newheads, unsyncedheads, discardedheads = heads
732 if remoteheads is not None:
732 if remoteheads is not None:
733 remote = set(remoteheads)
733 remote = set(remoteheads)
734 affected |= set(discardedheads) & remote
734 affected |= set(discardedheads) & remote
735 affected |= remote - set(newheads)
735 affected |= remote - set(newheads)
736 if affected:
736 if affected:
737 data = iter(sorted(affected))
737 data = iter(sorted(affected))
738 bundler.newpart('check:updated-heads', data=data)
738 bundler.newpart('check:updated-heads', data=data)
739
739
740 @b2partsgenerator('changeset')
740 @b2partsgenerator('changeset')
741 def _pushb2ctx(pushop, bundler):
741 def _pushb2ctx(pushop, bundler):
742 """handle changegroup push through bundle2
742 """handle changegroup push through bundle2
743
743
744 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
744 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
745 """
745 """
746 if 'changesets' in pushop.stepsdone:
746 if 'changesets' in pushop.stepsdone:
747 return
747 return
748 pushop.stepsdone.add('changesets')
748 pushop.stepsdone.add('changesets')
749 # Send known heads to the server for race detection.
749 # Send known heads to the server for race detection.
750 if not _pushcheckoutgoing(pushop):
750 if not _pushcheckoutgoing(pushop):
751 return
751 return
752 pushop.repo.prepushoutgoinghooks(pushop)
752 pushop.repo.prepushoutgoinghooks(pushop)
753
753
754 _pushb2ctxcheckheads(pushop, bundler)
754 _pushb2ctxcheckheads(pushop, bundler)
755
755
756 b2caps = bundle2.bundle2caps(pushop.remote)
756 b2caps = bundle2.bundle2caps(pushop.remote)
757 version = '01'
757 version = '01'
758 cgversions = b2caps.get('changegroup')
758 cgversions = b2caps.get('changegroup')
759 if cgversions: # 3.1 and 3.2 ship with an empty value
759 if cgversions: # 3.1 and 3.2 ship with an empty value
760 cgversions = [v for v in cgversions
760 cgversions = [v for v in cgversions
761 if v in changegroup.supportedoutgoingversions(
761 if v in changegroup.supportedoutgoingversions(
762 pushop.repo)]
762 pushop.repo)]
763 if not cgversions:
763 if not cgversions:
764 raise ValueError(_('no common changegroup version'))
764 raise ValueError(_('no common changegroup version'))
765 version = max(cgversions)
765 version = max(cgversions)
766 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
766 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
767 pushop.outgoing,
767 pushop.outgoing,
768 version=version)
768 version=version)
769 cgpart = bundler.newpart('changegroup', data=cg)
769 cgpart = bundler.newpart('changegroup', data=cg)
770 if cgversions:
770 if cgversions:
771 cgpart.addparam('version', version)
771 cgpart.addparam('version', version)
772 if 'treemanifest' in pushop.repo.requirements:
772 if 'treemanifest' in pushop.repo.requirements:
773 cgpart.addparam('treemanifest', '1')
773 cgpart.addparam('treemanifest', '1')
774 def handlereply(op):
774 def handlereply(op):
775 """extract addchangegroup returns from server reply"""
775 """extract addchangegroup returns from server reply"""
776 cgreplies = op.records.getreplies(cgpart.id)
776 cgreplies = op.records.getreplies(cgpart.id)
777 assert len(cgreplies['changegroup']) == 1
777 assert len(cgreplies['changegroup']) == 1
778 pushop.cgresult = cgreplies['changegroup'][0]['return']
778 pushop.cgresult = cgreplies['changegroup'][0]['return']
779 return handlereply
779 return handlereply
780
780
781 @b2partsgenerator('phase')
781 @b2partsgenerator('phase')
782 def _pushb2phases(pushop, bundler):
782 def _pushb2phases(pushop, bundler):
783 """handle phase push through bundle2"""
783 """handle phase push through bundle2"""
784 if 'phases' in pushop.stepsdone:
784 if 'phases' in pushop.stepsdone:
785 return
785 return
786 b2caps = bundle2.bundle2caps(pushop.remote)
786 b2caps = bundle2.bundle2caps(pushop.remote)
787 if not 'pushkey' in b2caps:
787 if not 'pushkey' in b2caps:
788 return
788 return
789 pushop.stepsdone.add('phases')
789 pushop.stepsdone.add('phases')
790 part2node = []
790 part2node = []
791
791
792 def handlefailure(pushop, exc):
792 def handlefailure(pushop, exc):
793 targetid = int(exc.partid)
793 targetid = int(exc.partid)
794 for partid, node in part2node:
794 for partid, node in part2node:
795 if partid == targetid:
795 if partid == targetid:
796 raise error.Abort(_('updating %s to public failed') % node)
796 raise error.Abort(_('updating %s to public failed') % node)
797
797
798 enc = pushkey.encode
798 enc = pushkey.encode
799 for newremotehead in pushop.outdatedphases:
799 for newremotehead in pushop.outdatedphases:
800 part = bundler.newpart('pushkey')
800 part = bundler.newpart('pushkey')
801 part.addparam('namespace', enc('phases'))
801 part.addparam('namespace', enc('phases'))
802 part.addparam('key', enc(newremotehead.hex()))
802 part.addparam('key', enc(newremotehead.hex()))
803 part.addparam('old', enc(str(phases.draft)))
803 part.addparam('old', enc(str(phases.draft)))
804 part.addparam('new', enc(str(phases.public)))
804 part.addparam('new', enc(str(phases.public)))
805 part2node.append((part.id, newremotehead))
805 part2node.append((part.id, newremotehead))
806 pushop.pkfailcb[part.id] = handlefailure
806 pushop.pkfailcb[part.id] = handlefailure
807
807
808 def handlereply(op):
808 def handlereply(op):
809 for partid, node in part2node:
809 for partid, node in part2node:
810 partrep = op.records.getreplies(partid)
810 partrep = op.records.getreplies(partid)
811 results = partrep['pushkey']
811 results = partrep['pushkey']
812 assert len(results) <= 1
812 assert len(results) <= 1
813 msg = None
813 msg = None
814 if not results:
814 if not results:
815 msg = _('server ignored update of %s to public!\n') % node
815 msg = _('server ignored update of %s to public!\n') % node
816 elif not int(results[0]['return']):
816 elif not int(results[0]['return']):
817 msg = _('updating %s to public failed!\n') % node
817 msg = _('updating %s to public failed!\n') % node
818 if msg is not None:
818 if msg is not None:
819 pushop.ui.warn(msg)
819 pushop.ui.warn(msg)
820 return handlereply
820 return handlereply
821
821
822 @b2partsgenerator('obsmarkers')
822 @b2partsgenerator('obsmarkers')
823 def _pushb2obsmarkers(pushop, bundler):
823 def _pushb2obsmarkers(pushop, bundler):
824 if 'obsmarkers' in pushop.stepsdone:
824 if 'obsmarkers' in pushop.stepsdone:
825 return
825 return
826 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
826 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
827 if obsolete.commonversion(remoteversions) is None:
827 if obsolete.commonversion(remoteversions) is None:
828 return
828 return
829 pushop.stepsdone.add('obsmarkers')
829 pushop.stepsdone.add('obsmarkers')
830 if pushop.outobsmarkers:
830 if pushop.outobsmarkers:
831 markers = sorted(pushop.outobsmarkers)
831 markers = sorted(pushop.outobsmarkers)
832 bundle2.buildobsmarkerspart(bundler, markers)
832 bundle2.buildobsmarkerspart(bundler, markers)
833
833
834 @b2partsgenerator('bookmarks')
834 @b2partsgenerator('bookmarks')
835 def _pushb2bookmarks(pushop, bundler):
835 def _pushb2bookmarks(pushop, bundler):
836 """handle bookmark push through bundle2"""
836 """handle bookmark push through bundle2"""
837 if 'bookmarks' in pushop.stepsdone:
837 if 'bookmarks' in pushop.stepsdone:
838 return
838 return
839 b2caps = bundle2.bundle2caps(pushop.remote)
839 b2caps = bundle2.bundle2caps(pushop.remote)
840 if 'pushkey' not in b2caps:
840 if 'pushkey' not in b2caps:
841 return
841 return
842 pushop.stepsdone.add('bookmarks')
842 pushop.stepsdone.add('bookmarks')
843 part2book = []
843 part2book = []
844 enc = pushkey.encode
844 enc = pushkey.encode
845
845
846 def handlefailure(pushop, exc):
846 def handlefailure(pushop, exc):
847 targetid = int(exc.partid)
847 targetid = int(exc.partid)
848 for partid, book, action in part2book:
848 for partid, book, action in part2book:
849 if partid == targetid:
849 if partid == targetid:
850 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
850 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
851 # we should not be called for part we did not generated
851 # we should not be called for part we did not generated
852 assert False
852 assert False
853
853
854 for book, old, new in pushop.outbookmarks:
854 for book, old, new in pushop.outbookmarks:
855 part = bundler.newpart('pushkey')
855 part = bundler.newpart('pushkey')
856 part.addparam('namespace', enc('bookmarks'))
856 part.addparam('namespace', enc('bookmarks'))
857 part.addparam('key', enc(book))
857 part.addparam('key', enc(book))
858 part.addparam('old', enc(old))
858 part.addparam('old', enc(old))
859 part.addparam('new', enc(new))
859 part.addparam('new', enc(new))
860 action = 'update'
860 action = 'update'
861 if not old:
861 if not old:
862 action = 'export'
862 action = 'export'
863 elif not new:
863 elif not new:
864 action = 'delete'
864 action = 'delete'
865 part2book.append((part.id, book, action))
865 part2book.append((part.id, book, action))
866 pushop.pkfailcb[part.id] = handlefailure
866 pushop.pkfailcb[part.id] = handlefailure
867
867
868 def handlereply(op):
868 def handlereply(op):
869 ui = pushop.ui
869 ui = pushop.ui
870 for partid, book, action in part2book:
870 for partid, book, action in part2book:
871 partrep = op.records.getreplies(partid)
871 partrep = op.records.getreplies(partid)
872 results = partrep['pushkey']
872 results = partrep['pushkey']
873 assert len(results) <= 1
873 assert len(results) <= 1
874 if not results:
874 if not results:
875 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
875 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
876 else:
876 else:
877 ret = int(results[0]['return'])
877 ret = int(results[0]['return'])
878 if ret:
878 if ret:
879 ui.status(bookmsgmap[action][0] % book)
879 ui.status(bookmsgmap[action][0] % book)
880 else:
880 else:
881 ui.warn(bookmsgmap[action][1] % book)
881 ui.warn(bookmsgmap[action][1] % book)
882 if pushop.bkresult is not None:
882 if pushop.bkresult is not None:
883 pushop.bkresult = 1
883 pushop.bkresult = 1
884 return handlereply
884 return handlereply
885
885
886 @b2partsgenerator('pushvars', idx=0)
886 @b2partsgenerator('pushvars', idx=0)
887 def _getbundlesendvars(pushop, bundler):
887 def _getbundlesendvars(pushop, bundler):
888 '''send shellvars via bundle2'''
888 '''send shellvars via bundle2'''
889 if getattr(pushop.repo, '_shellvars', ()):
889 if getattr(pushop.repo, '_shellvars', ()):
890 part = bundler.newpart('pushvars')
890 part = bundler.newpart('pushvars')
891
891
892 for key, value in pushop.repo._shellvars.iteritems():
892 for key, value in pushop.repo._shellvars.iteritems():
893 part.addparam(key, value, mandatory=False)
893 part.addparam(key, value, mandatory=False)
894
894
895 def _pushbundle2(pushop):
895 def _pushbundle2(pushop):
896 """push data to the remote using bundle2
896 """push data to the remote using bundle2
897
897
898 The only currently supported type of data is changegroup but this will
898 The only currently supported type of data is changegroup but this will
899 evolve in the future."""
899 evolve in the future."""
900 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
900 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
901 pushback = (pushop.trmanager
901 pushback = (pushop.trmanager
902 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
902 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
903
903
904 # create reply capability
904 # create reply capability
905 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
905 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
906 allowpushback=pushback))
906 allowpushback=pushback))
907 bundler.newpart('replycaps', data=capsblob)
907 bundler.newpart('replycaps', data=capsblob)
908 replyhandlers = []
908 replyhandlers = []
909 for partgenname in b2partsgenorder:
909 for partgenname in b2partsgenorder:
910 partgen = b2partsgenmapping[partgenname]
910 partgen = b2partsgenmapping[partgenname]
911 ret = partgen(pushop, bundler)
911 ret = partgen(pushop, bundler)
912 if callable(ret):
912 if callable(ret):
913 replyhandlers.append(ret)
913 replyhandlers.append(ret)
914 # do not push if nothing to push
914 # do not push if nothing to push
915 if bundler.nbparts <= 1:
915 if bundler.nbparts <= 1:
916 return
916 return
917 stream = util.chunkbuffer(bundler.getchunks())
917 stream = util.chunkbuffer(bundler.getchunks())
918 try:
918 try:
919 try:
919 try:
920 reply = pushop.remote.unbundle(
920 reply = pushop.remote.unbundle(
921 stream, ['force'], pushop.remote.url())
921 stream, ['force'], pushop.remote.url())
922 except error.BundleValueError as exc:
922 except error.BundleValueError as exc:
923 raise error.Abort(_('missing support for %s') % exc)
923 raise error.Abort(_('missing support for %s') % exc)
924 try:
924 try:
925 trgetter = None
925 trgetter = None
926 if pushback:
926 if pushback:
927 trgetter = pushop.trmanager.transaction
927 trgetter = pushop.trmanager.transaction
928 op = bundle2.processbundle(pushop.repo, reply, trgetter)
928 op = bundle2.processbundle(pushop.repo, reply, trgetter)
929 except error.BundleValueError as exc:
929 except error.BundleValueError as exc:
930 raise error.Abort(_('missing support for %s') % exc)
930 raise error.Abort(_('missing support for %s') % exc)
931 except bundle2.AbortFromPart as exc:
931 except bundle2.AbortFromPart as exc:
932 pushop.ui.status(_('remote: %s\n') % exc)
932 pushop.ui.status(_('remote: %s\n') % exc)
933 if exc.hint is not None:
933 if exc.hint is not None:
934 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
934 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
935 raise error.Abort(_('push failed on remote'))
935 raise error.Abort(_('push failed on remote'))
936 except error.PushkeyFailed as exc:
936 except error.PushkeyFailed as exc:
937 partid = int(exc.partid)
937 partid = int(exc.partid)
938 if partid not in pushop.pkfailcb:
938 if partid not in pushop.pkfailcb:
939 raise
939 raise
940 pushop.pkfailcb[partid](pushop, exc)
940 pushop.pkfailcb[partid](pushop, exc)
941 for rephand in replyhandlers:
941 for rephand in replyhandlers:
942 rephand(op)
942 rephand(op)
943
943
944 def _pushchangeset(pushop):
944 def _pushchangeset(pushop):
945 """Make the actual push of changeset bundle to remote repo"""
945 """Make the actual push of changeset bundle to remote repo"""
946 if 'changesets' in pushop.stepsdone:
946 if 'changesets' in pushop.stepsdone:
947 return
947 return
948 pushop.stepsdone.add('changesets')
948 pushop.stepsdone.add('changesets')
949 if not _pushcheckoutgoing(pushop):
949 if not _pushcheckoutgoing(pushop):
950 return
950 return
951
951
952 # Should have verified this in push().
952 # Should have verified this in push().
953 assert pushop.remote.capable('unbundle')
953 assert pushop.remote.capable('unbundle')
954
954
955 pushop.repo.prepushoutgoinghooks(pushop)
955 pushop.repo.prepushoutgoinghooks(pushop)
956 outgoing = pushop.outgoing
956 outgoing = pushop.outgoing
957 # TODO: get bundlecaps from remote
957 # TODO: get bundlecaps from remote
958 bundlecaps = None
958 bundlecaps = None
959 # create a changegroup from local
959 # create a changegroup from local
960 if pushop.revs is None and not (outgoing.excluded
960 if pushop.revs is None and not (outgoing.excluded
961 or pushop.repo.changelog.filteredrevs):
961 or pushop.repo.changelog.filteredrevs):
962 # push everything,
962 # push everything,
963 # use the fast path, no race possible on push
963 # use the fast path, no race possible on push
964 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
964 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
965 cg = changegroup.getsubset(pushop.repo,
965 cg = changegroup.getsubset(pushop.repo,
966 outgoing,
966 outgoing,
967 bundler,
967 bundler,
968 'push',
968 'push',
969 fastpath=True)
969 fastpath=True)
970 else:
970 else:
971 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
971 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
972 bundlecaps=bundlecaps)
972 bundlecaps=bundlecaps)
973
973
974 # apply changegroup to remote
974 # apply changegroup to remote
975 # local repo finds heads on server, finds out what
975 # local repo finds heads on server, finds out what
976 # revs it must push. once revs transferred, if server
976 # revs it must push. once revs transferred, if server
977 # finds it has different heads (someone else won
977 # finds it has different heads (someone else won
978 # commit/push race), server aborts.
978 # commit/push race), server aborts.
979 if pushop.force:
979 if pushop.force:
980 remoteheads = ['force']
980 remoteheads = ['force']
981 else:
981 else:
982 remoteheads = pushop.remoteheads
982 remoteheads = pushop.remoteheads
983 # ssh: return remote's addchangegroup()
983 # ssh: return remote's addchangegroup()
984 # http: return remote's addchangegroup() or 0 for error
984 # http: return remote's addchangegroup() or 0 for error
985 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
985 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
986 pushop.repo.url())
986 pushop.repo.url())
987
987
988 def _pushsyncphase(pushop):
988 def _pushsyncphase(pushop):
989 """synchronise phase information locally and remotely"""
989 """synchronise phase information locally and remotely"""
990 cheads = pushop.commonheads
990 cheads = pushop.commonheads
991 # even when we don't push, exchanging phase data is useful
991 # even when we don't push, exchanging phase data is useful
992 remotephases = pushop.remote.listkeys('phases')
992 remotephases = pushop.remote.listkeys('phases')
993 if (pushop.ui.configbool('ui', '_usedassubrepo')
993 if (pushop.ui.configbool('ui', '_usedassubrepo')
994 and remotephases # server supports phases
994 and remotephases # server supports phases
995 and pushop.cgresult is None # nothing was pushed
995 and pushop.cgresult is None # nothing was pushed
996 and remotephases.get('publishing', False)):
996 and remotephases.get('publishing', False)):
997 # When:
997 # When:
998 # - this is a subrepo push
998 # - this is a subrepo push
999 # - and remote support phase
999 # - and remote support phase
1000 # - and no changeset was pushed
1000 # - and no changeset was pushed
1001 # - and remote is publishing
1001 # - and remote is publishing
1002 # We may be in issue 3871 case!
1002 # We may be in issue 3871 case!
1003 # We drop the possible phase synchronisation done by
1003 # We drop the possible phase synchronisation done by
1004 # courtesy to publish changesets possibly locally draft
1004 # courtesy to publish changesets possibly locally draft
1005 # on the remote.
1005 # on the remote.
1006 remotephases = {'publishing': 'True'}
1006 remotephases = {'publishing': 'True'}
1007 if not remotephases: # old server or public only reply from non-publishing
1007 if not remotephases: # old server or public only reply from non-publishing
1008 _localphasemove(pushop, cheads)
1008 _localphasemove(pushop, cheads)
1009 # don't push any phase data as there is nothing to push
1009 # don't push any phase data as there is nothing to push
1010 else:
1010 else:
1011 ana = phases.analyzeremotephases(pushop.repo, cheads,
1011 ana = phases.analyzeremotephases(pushop.repo, cheads,
1012 remotephases)
1012 remotephases)
1013 pheads, droots = ana
1013 pheads, droots = ana
1014 ### Apply remote phase on local
1014 ### Apply remote phase on local
1015 if remotephases.get('publishing', False):
1015 if remotephases.get('publishing', False):
1016 _localphasemove(pushop, cheads)
1016 _localphasemove(pushop, cheads)
1017 else: # publish = False
1017 else: # publish = False
1018 _localphasemove(pushop, pheads)
1018 _localphasemove(pushop, pheads)
1019 _localphasemove(pushop, cheads, phases.draft)
1019 _localphasemove(pushop, cheads, phases.draft)
1020 ### Apply local phase on remote
1020 ### Apply local phase on remote
1021
1021
1022 if pushop.cgresult:
1022 if pushop.cgresult:
1023 if 'phases' in pushop.stepsdone:
1023 if 'phases' in pushop.stepsdone:
1024 # phases already pushed though bundle2
1024 # phases already pushed though bundle2
1025 return
1025 return
1026 outdated = pushop.outdatedphases
1026 outdated = pushop.outdatedphases
1027 else:
1027 else:
1028 outdated = pushop.fallbackoutdatedphases
1028 outdated = pushop.fallbackoutdatedphases
1029
1029
1030 pushop.stepsdone.add('phases')
1030 pushop.stepsdone.add('phases')
1031
1031
1032 # filter heads already turned public by the push
1032 # filter heads already turned public by the push
1033 outdated = [c for c in outdated if c.node() not in pheads]
1033 outdated = [c for c in outdated if c.node() not in pheads]
1034 # fallback to independent pushkey command
1034 # fallback to independent pushkey command
1035 for newremotehead in outdated:
1035 for newremotehead in outdated:
1036 r = pushop.remote.pushkey('phases',
1036 r = pushop.remote.pushkey('phases',
1037 newremotehead.hex(),
1037 newremotehead.hex(),
1038 str(phases.draft),
1038 str(phases.draft),
1039 str(phases.public))
1039 str(phases.public))
1040 if not r:
1040 if not r:
1041 pushop.ui.warn(_('updating %s to public failed!\n')
1041 pushop.ui.warn(_('updating %s to public failed!\n')
1042 % newremotehead)
1042 % newremotehead)
1043
1043
1044 def _localphasemove(pushop, nodes, phase=phases.public):
1044 def _localphasemove(pushop, nodes, phase=phases.public):
1045 """move <nodes> to <phase> in the local source repo"""
1045 """move <nodes> to <phase> in the local source repo"""
1046 if pushop.trmanager:
1046 if pushop.trmanager:
1047 phases.advanceboundary(pushop.repo,
1047 phases.advanceboundary(pushop.repo,
1048 pushop.trmanager.transaction(),
1048 pushop.trmanager.transaction(),
1049 phase,
1049 phase,
1050 nodes)
1050 nodes)
1051 else:
1051 else:
1052 # repo is not locked, do not change any phases!
1052 # repo is not locked, do not change any phases!
1053 # Informs the user that phases should have been moved when
1053 # Informs the user that phases should have been moved when
1054 # applicable.
1054 # applicable.
1055 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1055 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1056 phasestr = phases.phasenames[phase]
1056 phasestr = phases.phasenames[phase]
1057 if actualmoves:
1057 if actualmoves:
1058 pushop.ui.status(_('cannot lock source repo, skipping '
1058 pushop.ui.status(_('cannot lock source repo, skipping '
1059 'local %s phase update\n') % phasestr)
1059 'local %s phase update\n') % phasestr)
1060
1060
1061 def _pushobsolete(pushop):
1061 def _pushobsolete(pushop):
1062 """utility function to push obsolete markers to a remote"""
1062 """utility function to push obsolete markers to a remote"""
1063 if 'obsmarkers' in pushop.stepsdone:
1063 if 'obsmarkers' in pushop.stepsdone:
1064 return
1064 return
1065 repo = pushop.repo
1065 repo = pushop.repo
1066 remote = pushop.remote
1066 remote = pushop.remote
1067 pushop.stepsdone.add('obsmarkers')
1067 pushop.stepsdone.add('obsmarkers')
1068 if pushop.outobsmarkers:
1068 if pushop.outobsmarkers:
1069 pushop.ui.debug('try to push obsolete markers to remote\n')
1069 pushop.ui.debug('try to push obsolete markers to remote\n')
1070 rslts = []
1070 rslts = []
1071 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1071 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1072 for key in sorted(remotedata, reverse=True):
1072 for key in sorted(remotedata, reverse=True):
1073 # reverse sort to ensure we end with dump0
1073 # reverse sort to ensure we end with dump0
1074 data = remotedata[key]
1074 data = remotedata[key]
1075 rslts.append(remote.pushkey('obsolete', key, '', data))
1075 rslts.append(remote.pushkey('obsolete', key, '', data))
1076 if [r for r in rslts if not r]:
1076 if [r for r in rslts if not r]:
1077 msg = _('failed to push some obsolete markers!\n')
1077 msg = _('failed to push some obsolete markers!\n')
1078 repo.ui.warn(msg)
1078 repo.ui.warn(msg)
1079
1079
1080 def _pushbookmark(pushop):
1080 def _pushbookmark(pushop):
1081 """Update bookmark position on remote"""
1081 """Update bookmark position on remote"""
1082 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1082 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1083 return
1083 return
1084 pushop.stepsdone.add('bookmarks')
1084 pushop.stepsdone.add('bookmarks')
1085 ui = pushop.ui
1085 ui = pushop.ui
1086 remote = pushop.remote
1086 remote = pushop.remote
1087
1087
1088 for b, old, new in pushop.outbookmarks:
1088 for b, old, new in pushop.outbookmarks:
1089 action = 'update'
1089 action = 'update'
1090 if not old:
1090 if not old:
1091 action = 'export'
1091 action = 'export'
1092 elif not new:
1092 elif not new:
1093 action = 'delete'
1093 action = 'delete'
1094 if remote.pushkey('bookmarks', b, old, new):
1094 if remote.pushkey('bookmarks', b, old, new):
1095 ui.status(bookmsgmap[action][0] % b)
1095 ui.status(bookmsgmap[action][0] % b)
1096 else:
1096 else:
1097 ui.warn(bookmsgmap[action][1] % b)
1097 ui.warn(bookmsgmap[action][1] % b)
1098 # discovery can have set the value form invalid entry
1098 # discovery can have set the value form invalid entry
1099 if pushop.bkresult is not None:
1099 if pushop.bkresult is not None:
1100 pushop.bkresult = 1
1100 pushop.bkresult = 1
1101
1101
1102 class pulloperation(object):
1102 class pulloperation(object):
1103 """A object that represent a single pull operation
1103 """A object that represent a single pull operation
1104
1104
1105 It purpose is to carry pull related state and very common operation.
1105 It purpose is to carry pull related state and very common operation.
1106
1106
1107 A new should be created at the beginning of each pull and discarded
1107 A new should be created at the beginning of each pull and discarded
1108 afterward.
1108 afterward.
1109 """
1109 """
1110
1110
1111 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1111 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1112 remotebookmarks=None, streamclonerequested=None):
1112 remotebookmarks=None, streamclonerequested=None):
1113 # repo we pull into
1113 # repo we pull into
1114 self.repo = repo
1114 self.repo = repo
1115 # repo we pull from
1115 # repo we pull from
1116 self.remote = remote
1116 self.remote = remote
1117 # revision we try to pull (None is "all")
1117 # revision we try to pull (None is "all")
1118 self.heads = heads
1118 self.heads = heads
1119 # bookmark pulled explicitly
1119 # bookmark pulled explicitly
1120 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1120 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1121 for bookmark in bookmarks]
1121 for bookmark in bookmarks]
1122 # do we force pull?
1122 # do we force pull?
1123 self.force = force
1123 self.force = force
1124 # whether a streaming clone was requested
1124 # whether a streaming clone was requested
1125 self.streamclonerequested = streamclonerequested
1125 self.streamclonerequested = streamclonerequested
1126 # transaction manager
1126 # transaction manager
1127 self.trmanager = None
1127 self.trmanager = None
1128 # set of common changeset between local and remote before pull
1128 # set of common changeset between local and remote before pull
1129 self.common = None
1129 self.common = None
1130 # set of pulled head
1130 # set of pulled head
1131 self.rheads = None
1131 self.rheads = None
1132 # list of missing changeset to fetch remotely
1132 # list of missing changeset to fetch remotely
1133 self.fetch = None
1133 self.fetch = None
1134 # remote bookmarks data
1134 # remote bookmarks data
1135 self.remotebookmarks = remotebookmarks
1135 self.remotebookmarks = remotebookmarks
1136 # result of changegroup pulling (used as return code by pull)
1136 # result of changegroup pulling (used as return code by pull)
1137 self.cgresult = None
1137 self.cgresult = None
1138 # list of step already done
1138 # list of step already done
1139 self.stepsdone = set()
1139 self.stepsdone = set()
1140 # Whether we attempted a clone from pre-generated bundles.
1140 # Whether we attempted a clone from pre-generated bundles.
1141 self.clonebundleattempted = False
1141 self.clonebundleattempted = False
1142
1142
1143 @util.propertycache
1143 @util.propertycache
1144 def pulledsubset(self):
1144 def pulledsubset(self):
1145 """heads of the set of changeset target by the pull"""
1145 """heads of the set of changeset target by the pull"""
1146 # compute target subset
1146 # compute target subset
1147 if self.heads is None:
1147 if self.heads is None:
1148 # We pulled every thing possible
1148 # We pulled every thing possible
1149 # sync on everything common
1149 # sync on everything common
1150 c = set(self.common)
1150 c = set(self.common)
1151 ret = list(self.common)
1151 ret = list(self.common)
1152 for n in self.rheads:
1152 for n in self.rheads:
1153 if n not in c:
1153 if n not in c:
1154 ret.append(n)
1154 ret.append(n)
1155 return ret
1155 return ret
1156 else:
1156 else:
1157 # We pulled a specific subset
1157 # We pulled a specific subset
1158 # sync on this subset
1158 # sync on this subset
1159 return self.heads
1159 return self.heads
1160
1160
1161 @util.propertycache
1161 @util.propertycache
1162 def canusebundle2(self):
1162 def canusebundle2(self):
1163 return not _forcebundle1(self)
1163 return not _forcebundle1(self)
1164
1164
1165 @util.propertycache
1165 @util.propertycache
1166 def remotebundle2caps(self):
1166 def remotebundle2caps(self):
1167 return bundle2.bundle2caps(self.remote)
1167 return bundle2.bundle2caps(self.remote)
1168
1168
1169 def gettransaction(self):
1169 def gettransaction(self):
1170 # deprecated; talk to trmanager directly
1170 # deprecated; talk to trmanager directly
1171 return self.trmanager.transaction()
1171 return self.trmanager.transaction()
1172
1172
1173 class transactionmanager(object):
1173 class transactionmanager(object):
1174 """An object to manage the life cycle of a transaction
1174 """An object to manage the life cycle of a transaction
1175
1175
1176 It creates the transaction on demand and calls the appropriate hooks when
1176 It creates the transaction on demand and calls the appropriate hooks when
1177 closing the transaction."""
1177 closing the transaction."""
1178 def __init__(self, repo, source, url):
1178 def __init__(self, repo, source, url):
1179 self.repo = repo
1179 self.repo = repo
1180 self.source = source
1180 self.source = source
1181 self.url = url
1181 self.url = url
1182 self._tr = None
1182 self._tr = None
1183
1183
1184 def transaction(self):
1184 def transaction(self):
1185 """Return an open transaction object, constructing if necessary"""
1185 """Return an open transaction object, constructing if necessary"""
1186 if not self._tr:
1186 if not self._tr:
1187 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1187 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1188 self._tr = self.repo.transaction(trname)
1188 self._tr = self.repo.transaction(trname)
1189 self._tr.hookargs['source'] = self.source
1189 self._tr.hookargs['source'] = self.source
1190 self._tr.hookargs['url'] = self.url
1190 self._tr.hookargs['url'] = self.url
1191 return self._tr
1191 return self._tr
1192
1192
1193 def close(self):
1193 def close(self):
1194 """close transaction if created"""
1194 """close transaction if created"""
1195 if self._tr is not None:
1195 if self._tr is not None:
1196 self._tr.close()
1196 self._tr.close()
1197
1197
1198 def release(self):
1198 def release(self):
1199 """release transaction if created"""
1199 """release transaction if created"""
1200 if self._tr is not None:
1200 if self._tr is not None:
1201 self._tr.release()
1201 self._tr.release()
1202
1202
1203 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1203 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1204 streamclonerequested=None):
1204 streamclonerequested=None):
1205 """Fetch repository data from a remote.
1205 """Fetch repository data from a remote.
1206
1206
1207 This is the main function used to retrieve data from a remote repository.
1207 This is the main function used to retrieve data from a remote repository.
1208
1208
1209 ``repo`` is the local repository to clone into.
1209 ``repo`` is the local repository to clone into.
1210 ``remote`` is a peer instance.
1210 ``remote`` is a peer instance.
1211 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1211 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1212 default) means to pull everything from the remote.
1212 default) means to pull everything from the remote.
1213 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1213 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1214 default, all remote bookmarks are pulled.
1214 default, all remote bookmarks are pulled.
1215 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1215 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1216 initialization.
1216 initialization.
1217 ``streamclonerequested`` is a boolean indicating whether a "streaming
1217 ``streamclonerequested`` is a boolean indicating whether a "streaming
1218 clone" is requested. A "streaming clone" is essentially a raw file copy
1218 clone" is requested. A "streaming clone" is essentially a raw file copy
1219 of revlogs from the server. This only works when the local repository is
1219 of revlogs from the server. This only works when the local repository is
1220 empty. The default value of ``None`` means to respect the server
1220 empty. The default value of ``None`` means to respect the server
1221 configuration for preferring stream clones.
1221 configuration for preferring stream clones.
1222
1222
1223 Returns the ``pulloperation`` created for this pull.
1223 Returns the ``pulloperation`` created for this pull.
1224 """
1224 """
1225 if opargs is None:
1225 if opargs is None:
1226 opargs = {}
1226 opargs = {}
1227 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1227 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1228 streamclonerequested=streamclonerequested, **opargs)
1228 streamclonerequested=streamclonerequested, **opargs)
1229
1229
1230 peerlocal = pullop.remote.local()
1230 peerlocal = pullop.remote.local()
1231 if peerlocal:
1231 if peerlocal:
1232 missing = set(peerlocal.requirements) - pullop.repo.supported
1232 missing = set(peerlocal.requirements) - pullop.repo.supported
1233 if missing:
1233 if missing:
1234 msg = _("required features are not"
1234 msg = _("required features are not"
1235 " supported in the destination:"
1235 " supported in the destination:"
1236 " %s") % (', '.join(sorted(missing)))
1236 " %s") % (', '.join(sorted(missing)))
1237 raise error.Abort(msg)
1237 raise error.Abort(msg)
1238
1238
1239 wlock = lock = None
1239 wlock = lock = None
1240 try:
1240 try:
1241 wlock = pullop.repo.wlock()
1241 wlock = pullop.repo.wlock()
1242 lock = pullop.repo.lock()
1242 lock = pullop.repo.lock()
1243 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1243 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1244 streamclone.maybeperformlegacystreamclone(pullop)
1244 streamclone.maybeperformlegacystreamclone(pullop)
1245 # This should ideally be in _pullbundle2(). However, it needs to run
1245 # This should ideally be in _pullbundle2(). However, it needs to run
1246 # before discovery to avoid extra work.
1246 # before discovery to avoid extra work.
1247 _maybeapplyclonebundle(pullop)
1247 _maybeapplyclonebundle(pullop)
1248 _pulldiscovery(pullop)
1248 _pulldiscovery(pullop)
1249 if pullop.canusebundle2:
1249 if pullop.canusebundle2:
1250 _pullbundle2(pullop)
1250 _pullbundle2(pullop)
1251 _pullchangeset(pullop)
1251 _pullchangeset(pullop)
1252 _pullphase(pullop)
1252 _pullphase(pullop)
1253 _pullbookmarks(pullop)
1253 _pullbookmarks(pullop)
1254 _pullobsolete(pullop)
1254 _pullobsolete(pullop)
1255 pullop.trmanager.close()
1255 pullop.trmanager.close()
1256 finally:
1256 finally:
1257 lockmod.release(pullop.trmanager, lock, wlock)
1257 lockmod.release(pullop.trmanager, lock, wlock)
1258
1258
1259 return pullop
1259 return pullop
1260
1260
1261 # list of steps to perform discovery before pull
1261 # list of steps to perform discovery before pull
1262 pulldiscoveryorder = []
1262 pulldiscoveryorder = []
1263
1263
1264 # Mapping between step name and function
1264 # Mapping between step name and function
1265 #
1265 #
1266 # This exists to help extensions wrap steps if necessary
1266 # This exists to help extensions wrap steps if necessary
1267 pulldiscoverymapping = {}
1267 pulldiscoverymapping = {}
1268
1268
1269 def pulldiscovery(stepname):
1269 def pulldiscovery(stepname):
1270 """decorator for function performing discovery before pull
1270 """decorator for function performing discovery before pull
1271
1271
1272 The function is added to the step -> function mapping and appended to the
1272 The function is added to the step -> function mapping and appended to the
1273 list of steps. Beware that decorated function will be added in order (this
1273 list of steps. Beware that decorated function will be added in order (this
1274 may matter).
1274 may matter).
1275
1275
1276 You can only use this decorator for a new step, if you want to wrap a step
1276 You can only use this decorator for a new step, if you want to wrap a step
1277 from an extension, change the pulldiscovery dictionary directly."""
1277 from an extension, change the pulldiscovery dictionary directly."""
1278 def dec(func):
1278 def dec(func):
1279 assert stepname not in pulldiscoverymapping
1279 assert stepname not in pulldiscoverymapping
1280 pulldiscoverymapping[stepname] = func
1280 pulldiscoverymapping[stepname] = func
1281 pulldiscoveryorder.append(stepname)
1281 pulldiscoveryorder.append(stepname)
1282 return func
1282 return func
1283 return dec
1283 return dec
1284
1284
1285 def _pulldiscovery(pullop):
1285 def _pulldiscovery(pullop):
1286 """Run all discovery steps"""
1286 """Run all discovery steps"""
1287 for stepname in pulldiscoveryorder:
1287 for stepname in pulldiscoveryorder:
1288 step = pulldiscoverymapping[stepname]
1288 step = pulldiscoverymapping[stepname]
1289 step(pullop)
1289 step(pullop)
1290
1290
1291 @pulldiscovery('b1:bookmarks')
1291 @pulldiscovery('b1:bookmarks')
1292 def _pullbookmarkbundle1(pullop):
1292 def _pullbookmarkbundle1(pullop):
1293 """fetch bookmark data in bundle1 case
1293 """fetch bookmark data in bundle1 case
1294
1294
1295 If not using bundle2, we have to fetch bookmarks before changeset
1295 If not using bundle2, we have to fetch bookmarks before changeset
1296 discovery to reduce the chance and impact of race conditions."""
1296 discovery to reduce the chance and impact of race conditions."""
1297 if pullop.remotebookmarks is not None:
1297 if pullop.remotebookmarks is not None:
1298 return
1298 return
1299 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1299 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1300 # all known bundle2 servers now support listkeys, but lets be nice with
1300 # all known bundle2 servers now support listkeys, but lets be nice with
1301 # new implementation.
1301 # new implementation.
1302 return
1302 return
1303 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1303 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1304
1304
1305
1305
1306 @pulldiscovery('changegroup')
1306 @pulldiscovery('changegroup')
1307 def _pulldiscoverychangegroup(pullop):
1307 def _pulldiscoverychangegroup(pullop):
1308 """discovery phase for the pull
1308 """discovery phase for the pull
1309
1309
1310 Current handle changeset discovery only, will change handle all discovery
1310 Current handle changeset discovery only, will change handle all discovery
1311 at some point."""
1311 at some point."""
1312 tmp = discovery.findcommonincoming(pullop.repo,
1312 tmp = discovery.findcommonincoming(pullop.repo,
1313 pullop.remote,
1313 pullop.remote,
1314 heads=pullop.heads,
1314 heads=pullop.heads,
1315 force=pullop.force)
1315 force=pullop.force)
1316 common, fetch, rheads = tmp
1316 common, fetch, rheads = tmp
1317 nm = pullop.repo.unfiltered().changelog.nodemap
1317 nm = pullop.repo.unfiltered().changelog.nodemap
1318 if fetch and rheads:
1318 if fetch and rheads:
1319 # If a remote heads in filtered locally, lets drop it from the unknown
1319 # If a remote heads in filtered locally, lets drop it from the unknown
1320 # remote heads and put in back in common.
1320 # remote heads and put in back in common.
1321 #
1321 #
1322 # This is a hackish solution to catch most of "common but locally
1322 # This is a hackish solution to catch most of "common but locally
1323 # hidden situation". We do not performs discovery on unfiltered
1323 # hidden situation". We do not performs discovery on unfiltered
1324 # repository because it end up doing a pathological amount of round
1324 # repository because it end up doing a pathological amount of round
1325 # trip for w huge amount of changeset we do not care about.
1325 # trip for w huge amount of changeset we do not care about.
1326 #
1326 #
1327 # If a set of such "common but filtered" changeset exist on the server
1327 # If a set of such "common but filtered" changeset exist on the server
1328 # but are not including a remote heads, we'll not be able to detect it,
1328 # but are not including a remote heads, we'll not be able to detect it,
1329 scommon = set(common)
1329 scommon = set(common)
1330 filteredrheads = []
1330 filteredrheads = []
1331 for n in rheads:
1331 for n in rheads:
1332 if n in nm:
1332 if n in nm:
1333 if n not in scommon:
1333 if n not in scommon:
1334 common.append(n)
1334 common.append(n)
1335 else:
1335 else:
1336 filteredrheads.append(n)
1336 filteredrheads.append(n)
1337 if not filteredrheads:
1337 if not filteredrheads:
1338 fetch = []
1338 fetch = []
1339 rheads = filteredrheads
1339 rheads = filteredrheads
1340 pullop.common = common
1340 pullop.common = common
1341 pullop.fetch = fetch
1341 pullop.fetch = fetch
1342 pullop.rheads = rheads
1342 pullop.rheads = rheads
1343
1343
1344 def _pullbundle2(pullop):
1344 def _pullbundle2(pullop):
1345 """pull data using bundle2
1345 """pull data using bundle2
1346
1346
1347 For now, the only supported data are changegroup."""
1347 For now, the only supported data are changegroup."""
1348 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1348 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1349
1349
1350 # At the moment we don't do stream clones over bundle2. If that is
1350 # At the moment we don't do stream clones over bundle2. If that is
1351 # implemented then here's where the check for that will go.
1351 # implemented then here's where the check for that will go.
1352 streaming = False
1352 streaming = False
1353
1353
1354 # pulling changegroup
1354 # pulling changegroup
1355 pullop.stepsdone.add('changegroup')
1355 pullop.stepsdone.add('changegroup')
1356
1356
1357 kwargs['common'] = pullop.common
1357 kwargs['common'] = pullop.common
1358 kwargs['heads'] = pullop.heads or pullop.rheads
1358 kwargs['heads'] = pullop.heads or pullop.rheads
1359 kwargs['cg'] = pullop.fetch
1359 kwargs['cg'] = pullop.fetch
1360 if 'listkeys' in pullop.remotebundle2caps:
1360 if 'listkeys' in pullop.remotebundle2caps:
1361 kwargs['listkeys'] = ['phases']
1361 kwargs['listkeys'] = ['phases']
1362 if pullop.remotebookmarks is None:
1362 if pullop.remotebookmarks is None:
1363 # make sure to always includes bookmark data when migrating
1363 # make sure to always includes bookmark data when migrating
1364 # `hg incoming --bundle` to using this function.
1364 # `hg incoming --bundle` to using this function.
1365 kwargs['listkeys'].append('bookmarks')
1365 kwargs['listkeys'].append('bookmarks')
1366
1366
1367 # If this is a full pull / clone and the server supports the clone bundles
1367 # If this is a full pull / clone and the server supports the clone bundles
1368 # feature, tell the server whether we attempted a clone bundle. The
1368 # feature, tell the server whether we attempted a clone bundle. The
1369 # presence of this flag indicates the client supports clone bundles. This
1369 # presence of this flag indicates the client supports clone bundles. This
1370 # will enable the server to treat clients that support clone bundles
1370 # will enable the server to treat clients that support clone bundles
1371 # differently from those that don't.
1371 # differently from those that don't.
1372 if (pullop.remote.capable('clonebundles')
1372 if (pullop.remote.capable('clonebundles')
1373 and pullop.heads is None and list(pullop.common) == [nullid]):
1373 and pullop.heads is None and list(pullop.common) == [nullid]):
1374 kwargs['cbattempted'] = pullop.clonebundleattempted
1374 kwargs['cbattempted'] = pullop.clonebundleattempted
1375
1375
1376 if streaming:
1376 if streaming:
1377 pullop.repo.ui.status(_('streaming all changes\n'))
1377 pullop.repo.ui.status(_('streaming all changes\n'))
1378 elif not pullop.fetch:
1378 elif not pullop.fetch:
1379 pullop.repo.ui.status(_("no changes found\n"))
1379 pullop.repo.ui.status(_("no changes found\n"))
1380 pullop.cgresult = 0
1380 pullop.cgresult = 0
1381 else:
1381 else:
1382 if pullop.heads is None and list(pullop.common) == [nullid]:
1382 if pullop.heads is None and list(pullop.common) == [nullid]:
1383 pullop.repo.ui.status(_("requesting all changes\n"))
1383 pullop.repo.ui.status(_("requesting all changes\n"))
1384 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1384 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1385 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1385 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1386 if obsolete.commonversion(remoteversions) is not None:
1386 if obsolete.commonversion(remoteversions) is not None:
1387 kwargs['obsmarkers'] = True
1387 kwargs['obsmarkers'] = True
1388 pullop.stepsdone.add('obsmarkers')
1388 pullop.stepsdone.add('obsmarkers')
1389 _pullbundle2extraprepare(pullop, kwargs)
1389 _pullbundle2extraprepare(pullop, kwargs)
1390 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1390 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1391 try:
1391 try:
1392 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1392 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1393 except bundle2.AbortFromPart as exc:
1393 except bundle2.AbortFromPart as exc:
1394 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1394 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1395 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1395 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1396 except error.BundleValueError as exc:
1396 except error.BundleValueError as exc:
1397 raise error.Abort(_('missing support for %s') % exc)
1397 raise error.Abort(_('missing support for %s') % exc)
1398
1398
1399 if pullop.fetch:
1399 if pullop.fetch:
1400 pullop.cgresult = bundle2.combinechangegroupresults(op)
1400 pullop.cgresult = bundle2.combinechangegroupresults(op)
1401
1401
1402 # processing phases change
1402 # processing phases change
1403 for namespace, value in op.records['listkeys']:
1403 for namespace, value in op.records['listkeys']:
1404 if namespace == 'phases':
1404 if namespace == 'phases':
1405 _pullapplyphases(pullop, value)
1405 _pullapplyphases(pullop, value)
1406
1406
1407 # processing bookmark update
1407 # processing bookmark update
1408 for namespace, value in op.records['listkeys']:
1408 for namespace, value in op.records['listkeys']:
1409 if namespace == 'bookmarks':
1409 if namespace == 'bookmarks':
1410 pullop.remotebookmarks = value
1410 pullop.remotebookmarks = value
1411
1411
1412 # bookmark data were either already there or pulled in the bundle
1412 # bookmark data were either already there or pulled in the bundle
1413 if pullop.remotebookmarks is not None:
1413 if pullop.remotebookmarks is not None:
1414 _pullbookmarks(pullop)
1414 _pullbookmarks(pullop)
1415
1415
1416 def _pullbundle2extraprepare(pullop, kwargs):
1416 def _pullbundle2extraprepare(pullop, kwargs):
1417 """hook function so that extensions can extend the getbundle call"""
1417 """hook function so that extensions can extend the getbundle call"""
1418 pass
1418 pass
1419
1419
1420 def _pullchangeset(pullop):
1420 def _pullchangeset(pullop):
1421 """pull changeset from unbundle into the local repo"""
1421 """pull changeset from unbundle into the local repo"""
1422 # We delay the open of the transaction as late as possible so we
1422 # We delay the open of the transaction as late as possible so we
1423 # don't open transaction for nothing or you break future useful
1423 # don't open transaction for nothing or you break future useful
1424 # rollback call
1424 # rollback call
1425 if 'changegroup' in pullop.stepsdone:
1425 if 'changegroup' in pullop.stepsdone:
1426 return
1426 return
1427 pullop.stepsdone.add('changegroup')
1427 pullop.stepsdone.add('changegroup')
1428 if not pullop.fetch:
1428 if not pullop.fetch:
1429 pullop.repo.ui.status(_("no changes found\n"))
1429 pullop.repo.ui.status(_("no changes found\n"))
1430 pullop.cgresult = 0
1430 pullop.cgresult = 0
1431 return
1431 return
1432 tr = pullop.gettransaction()
1432 tr = pullop.gettransaction()
1433 if pullop.heads is None and list(pullop.common) == [nullid]:
1433 if pullop.heads is None and list(pullop.common) == [nullid]:
1434 pullop.repo.ui.status(_("requesting all changes\n"))
1434 pullop.repo.ui.status(_("requesting all changes\n"))
1435 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1435 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1436 # issue1320, avoid a race if remote changed after discovery
1436 # issue1320, avoid a race if remote changed after discovery
1437 pullop.heads = pullop.rheads
1437 pullop.heads = pullop.rheads
1438
1438
1439 if pullop.remote.capable('getbundle'):
1439 if pullop.remote.capable('getbundle'):
1440 # TODO: get bundlecaps from remote
1440 # TODO: get bundlecaps from remote
1441 cg = pullop.remote.getbundle('pull', common=pullop.common,
1441 cg = pullop.remote.getbundle('pull', common=pullop.common,
1442 heads=pullop.heads or pullop.rheads)
1442 heads=pullop.heads or pullop.rheads)
1443 elif pullop.heads is None:
1443 elif pullop.heads is None:
1444 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1444 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1445 elif not pullop.remote.capable('changegroupsubset'):
1445 elif not pullop.remote.capable('changegroupsubset'):
1446 raise error.Abort(_("partial pull cannot be done because "
1446 raise error.Abort(_("partial pull cannot be done because "
1447 "other repository doesn't support "
1447 "other repository doesn't support "
1448 "changegroupsubset."))
1448 "changegroupsubset."))
1449 else:
1449 else:
1450 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1450 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1451 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1451 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1452 pullop.remote.url())
1452 pullop.remote.url())
1453 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1453 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1454
1454
1455 def _pullphase(pullop):
1455 def _pullphase(pullop):
1456 # Get remote phases data from remote
1456 # Get remote phases data from remote
1457 if 'phases' in pullop.stepsdone:
1457 if 'phases' in pullop.stepsdone:
1458 return
1458 return
1459 remotephases = pullop.remote.listkeys('phases')
1459 remotephases = pullop.remote.listkeys('phases')
1460 _pullapplyphases(pullop, remotephases)
1460 _pullapplyphases(pullop, remotephases)
1461
1461
1462 def _pullapplyphases(pullop, remotephases):
1462 def _pullapplyphases(pullop, remotephases):
1463 """apply phase movement from observed remote state"""
1463 """apply phase movement from observed remote state"""
1464 if 'phases' in pullop.stepsdone:
1464 if 'phases' in pullop.stepsdone:
1465 return
1465 return
1466 pullop.stepsdone.add('phases')
1466 pullop.stepsdone.add('phases')
1467 publishing = bool(remotephases.get('publishing', False))
1467 publishing = bool(remotephases.get('publishing', False))
1468 if remotephases and not publishing:
1468 if remotephases and not publishing:
1469 # remote is new and non-publishing
1469 # remote is new and non-publishing
1470 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1470 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1471 pullop.pulledsubset,
1471 pullop.pulledsubset,
1472 remotephases)
1472 remotephases)
1473 dheads = pullop.pulledsubset
1473 dheads = pullop.pulledsubset
1474 else:
1474 else:
1475 # Remote is old or publishing all common changesets
1475 # Remote is old or publishing all common changesets
1476 # should be seen as public
1476 # should be seen as public
1477 pheads = pullop.pulledsubset
1477 pheads = pullop.pulledsubset
1478 dheads = []
1478 dheads = []
1479 unfi = pullop.repo.unfiltered()
1479 unfi = pullop.repo.unfiltered()
1480 phase = unfi._phasecache.phase
1480 phase = unfi._phasecache.phase
1481 rev = unfi.changelog.nodemap.get
1481 rev = unfi.changelog.nodemap.get
1482 public = phases.public
1482 public = phases.public
1483 draft = phases.draft
1483 draft = phases.draft
1484
1484
1485 # exclude changesets already public locally and update the others
1485 # exclude changesets already public locally and update the others
1486 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1486 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1487 if pheads:
1487 if pheads:
1488 tr = pullop.gettransaction()
1488 tr = pullop.gettransaction()
1489 phases.advanceboundary(pullop.repo, tr, public, pheads)
1489 phases.advanceboundary(pullop.repo, tr, public, pheads)
1490
1490
1491 # exclude changesets already draft locally and update the others
1491 # exclude changesets already draft locally and update the others
1492 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1492 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1493 if dheads:
1493 if dheads:
1494 tr = pullop.gettransaction()
1494 tr = pullop.gettransaction()
1495 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1495 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1496
1496
1497 def _pullbookmarks(pullop):
1497 def _pullbookmarks(pullop):
1498 """process the remote bookmark information to update the local one"""
1498 """process the remote bookmark information to update the local one"""
1499 if 'bookmarks' in pullop.stepsdone:
1499 if 'bookmarks' in pullop.stepsdone:
1500 return
1500 return
1501 pullop.stepsdone.add('bookmarks')
1501 pullop.stepsdone.add('bookmarks')
1502 repo = pullop.repo
1502 repo = pullop.repo
1503 remotebookmarks = pullop.remotebookmarks
1503 remotebookmarks = pullop.remotebookmarks
1504 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1504 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1505 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1505 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1506 pullop.remote.url(),
1506 pullop.remote.url(),
1507 pullop.gettransaction,
1507 pullop.gettransaction,
1508 explicit=pullop.explicitbookmarks)
1508 explicit=pullop.explicitbookmarks)
1509
1509
1510 def _pullobsolete(pullop):
1510 def _pullobsolete(pullop):
1511 """utility function to pull obsolete markers from a remote
1511 """utility function to pull obsolete markers from a remote
1512
1512
1513 The `gettransaction` is function that return the pull transaction, creating
1513 The `gettransaction` is function that return the pull transaction, creating
1514 one if necessary. We return the transaction to inform the calling code that
1514 one if necessary. We return the transaction to inform the calling code that
1515 a new transaction have been created (when applicable).
1515 a new transaction have been created (when applicable).
1516
1516
1517 Exists mostly to allow overriding for experimentation purpose"""
1517 Exists mostly to allow overriding for experimentation purpose"""
1518 if 'obsmarkers' in pullop.stepsdone:
1518 if 'obsmarkers' in pullop.stepsdone:
1519 return
1519 return
1520 pullop.stepsdone.add('obsmarkers')
1520 pullop.stepsdone.add('obsmarkers')
1521 tr = None
1521 tr = None
1522 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1522 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1523 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1523 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1524 remoteobs = pullop.remote.listkeys('obsolete')
1524 remoteobs = pullop.remote.listkeys('obsolete')
1525 if 'dump0' in remoteobs:
1525 if 'dump0' in remoteobs:
1526 tr = pullop.gettransaction()
1526 tr = pullop.gettransaction()
1527 markers = []
1527 markers = []
1528 for key in sorted(remoteobs, reverse=True):
1528 for key in sorted(remoteobs, reverse=True):
1529 if key.startswith('dump'):
1529 if key.startswith('dump'):
1530 data = util.b85decode(remoteobs[key])
1530 data = util.b85decode(remoteobs[key])
1531 version, newmarks = obsolete._readmarkers(data)
1531 version, newmarks = obsolete._readmarkers(data)
1532 markers += newmarks
1532 markers += newmarks
1533 if markers:
1533 if markers:
1534 pullop.repo.obsstore.add(tr, markers)
1534 pullop.repo.obsstore.add(tr, markers)
1535 pullop.repo.invalidatevolatilesets()
1535 pullop.repo.invalidatevolatilesets()
1536 return tr
1536 return tr
1537
1537
1538 def caps20to10(repo):
1538 def caps20to10(repo):
1539 """return a set with appropriate options to use bundle20 during getbundle"""
1539 """return a set with appropriate options to use bundle20 during getbundle"""
1540 caps = {'HG20'}
1540 caps = {'HG20'}
1541 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1541 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1542 caps.add('bundle2=' + urlreq.quote(capsblob))
1542 caps.add('bundle2=' + urlreq.quote(capsblob))
1543 return caps
1543 return caps
1544
1544
1545 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1545 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1546 getbundle2partsorder = []
1546 getbundle2partsorder = []
1547
1547
1548 # Mapping between step name and function
1548 # Mapping between step name and function
1549 #
1549 #
1550 # This exists to help extensions wrap steps if necessary
1550 # This exists to help extensions wrap steps if necessary
1551 getbundle2partsmapping = {}
1551 getbundle2partsmapping = {}
1552
1552
1553 def getbundle2partsgenerator(stepname, idx=None):
1553 def getbundle2partsgenerator(stepname, idx=None):
1554 """decorator for function generating bundle2 part for getbundle
1554 """decorator for function generating bundle2 part for getbundle
1555
1555
1556 The function is added to the step -> function mapping and appended to the
1556 The function is added to the step -> function mapping and appended to the
1557 list of steps. Beware that decorated functions will be added in order
1557 list of steps. Beware that decorated functions will be added in order
1558 (this may matter).
1558 (this may matter).
1559
1559
1560 You can only use this decorator for new steps, if you want to wrap a step
1560 You can only use this decorator for new steps, if you want to wrap a step
1561 from an extension, attack the getbundle2partsmapping dictionary directly."""
1561 from an extension, attack the getbundle2partsmapping dictionary directly."""
1562 def dec(func):
1562 def dec(func):
1563 assert stepname not in getbundle2partsmapping
1563 assert stepname not in getbundle2partsmapping
1564 getbundle2partsmapping[stepname] = func
1564 getbundle2partsmapping[stepname] = func
1565 if idx is None:
1565 if idx is None:
1566 getbundle2partsorder.append(stepname)
1566 getbundle2partsorder.append(stepname)
1567 else:
1567 else:
1568 getbundle2partsorder.insert(idx, stepname)
1568 getbundle2partsorder.insert(idx, stepname)
1569 return func
1569 return func
1570 return dec
1570 return dec
1571
1571
1572 def bundle2requested(bundlecaps):
1572 def bundle2requested(bundlecaps):
1573 if bundlecaps is not None:
1573 if bundlecaps is not None:
1574 return any(cap.startswith('HG2') for cap in bundlecaps)
1574 return any(cap.startswith('HG2') for cap in bundlecaps)
1575 return False
1575 return False
1576
1576
1577 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1577 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1578 **kwargs):
1578 **kwargs):
1579 """Return chunks constituting a bundle's raw data.
1579 """Return chunks constituting a bundle's raw data.
1580
1580
1581 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1581 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1582 passed.
1582 passed.
1583
1583
1584 Returns an iterator over raw chunks (of varying sizes).
1584 Returns an iterator over raw chunks (of varying sizes).
1585 """
1585 """
1586 kwargs = pycompat.byteskwargs(kwargs)
1586 kwargs = pycompat.byteskwargs(kwargs)
1587 usebundle2 = bundle2requested(bundlecaps)
1587 usebundle2 = bundle2requested(bundlecaps)
1588 # bundle10 case
1588 # bundle10 case
1589 if not usebundle2:
1589 if not usebundle2:
1590 if bundlecaps and not kwargs.get('cg', True):
1590 if bundlecaps and not kwargs.get('cg', True):
1591 raise ValueError(_('request for bundle10 must include changegroup'))
1591 raise ValueError(_('request for bundle10 must include changegroup'))
1592
1592
1593 if kwargs:
1593 if kwargs:
1594 raise ValueError(_('unsupported getbundle arguments: %s')
1594 raise ValueError(_('unsupported getbundle arguments: %s')
1595 % ', '.join(sorted(kwargs.keys())))
1595 % ', '.join(sorted(kwargs.keys())))
1596 outgoing = _computeoutgoing(repo, heads, common)
1596 outgoing = _computeoutgoing(repo, heads, common)
1597 bundler = changegroup.getbundler('01', repo, bundlecaps)
1597 bundler = changegroup.getbundler('01', repo, bundlecaps)
1598 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1598 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1599
1599
1600 # bundle20 case
1600 # bundle20 case
1601 b2caps = {}
1601 b2caps = {}
1602 for bcaps in bundlecaps:
1602 for bcaps in bundlecaps:
1603 if bcaps.startswith('bundle2='):
1603 if bcaps.startswith('bundle2='):
1604 blob = urlreq.unquote(bcaps[len('bundle2='):])
1604 blob = urlreq.unquote(bcaps[len('bundle2='):])
1605 b2caps.update(bundle2.decodecaps(blob))
1605 b2caps.update(bundle2.decodecaps(blob))
1606 bundler = bundle2.bundle20(repo.ui, b2caps)
1606 bundler = bundle2.bundle20(repo.ui, b2caps)
1607
1607
1608 kwargs['heads'] = heads
1608 kwargs['heads'] = heads
1609 kwargs['common'] = common
1609 kwargs['common'] = common
1610
1610
1611 for name in getbundle2partsorder:
1611 for name in getbundle2partsorder:
1612 func = getbundle2partsmapping[name]
1612 func = getbundle2partsmapping[name]
1613 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1613 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1614 **pycompat.strkwargs(kwargs))
1614 **pycompat.strkwargs(kwargs))
1615
1615
1616 return bundler.getchunks()
1616 return bundler.getchunks()
1617
1617
1618 @getbundle2partsgenerator('changegroup')
1618 @getbundle2partsgenerator('changegroup')
1619 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1619 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1620 b2caps=None, heads=None, common=None, **kwargs):
1620 b2caps=None, heads=None, common=None, **kwargs):
1621 """add a changegroup part to the requested bundle"""
1621 """add a changegroup part to the requested bundle"""
1622 cg = None
1622 cg = None
1623 if kwargs.get('cg', True):
1623 if kwargs.get('cg', True):
1624 # build changegroup bundle here.
1624 # build changegroup bundle here.
1625 version = '01'
1625 version = '01'
1626 cgversions = b2caps.get('changegroup')
1626 cgversions = b2caps.get('changegroup')
1627 if cgversions: # 3.1 and 3.2 ship with an empty value
1627 if cgversions: # 3.1 and 3.2 ship with an empty value
1628 cgversions = [v for v in cgversions
1628 cgversions = [v for v in cgversions
1629 if v in changegroup.supportedoutgoingversions(repo)]
1629 if v in changegroup.supportedoutgoingversions(repo)]
1630 if not cgversions:
1630 if not cgversions:
1631 raise ValueError(_('no common changegroup version'))
1631 raise ValueError(_('no common changegroup version'))
1632 version = max(cgversions)
1632 version = max(cgversions)
1633 outgoing = _computeoutgoing(repo, heads, common)
1633 outgoing = _computeoutgoing(repo, heads, common)
1634 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1634 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1635 bundlecaps=bundlecaps,
1635 bundlecaps=bundlecaps,
1636 version=version)
1636 version=version)
1637
1637
1638 if cg:
1638 if cg:
1639 part = bundler.newpart('changegroup', data=cg)
1639 part = bundler.newpart('changegroup', data=cg)
1640 if cgversions:
1640 if cgversions:
1641 part.addparam('version', version)
1641 part.addparam('version', version)
1642 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1642 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1643 if 'treemanifest' in repo.requirements:
1643 if 'treemanifest' in repo.requirements:
1644 part.addparam('treemanifest', '1')
1644 part.addparam('treemanifest', '1')
1645
1645
1646 @getbundle2partsgenerator('listkeys')
1646 @getbundle2partsgenerator('listkeys')
1647 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1647 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1648 b2caps=None, **kwargs):
1648 b2caps=None, **kwargs):
1649 """add parts containing listkeys namespaces to the requested bundle"""
1649 """add parts containing listkeys namespaces to the requested bundle"""
1650 listkeys = kwargs.get('listkeys', ())
1650 listkeys = kwargs.get('listkeys', ())
1651 for namespace in listkeys:
1651 for namespace in listkeys:
1652 part = bundler.newpart('listkeys')
1652 part = bundler.newpart('listkeys')
1653 part.addparam('namespace', namespace)
1653 part.addparam('namespace', namespace)
1654 keys = repo.listkeys(namespace).items()
1654 keys = repo.listkeys(namespace).items()
1655 part.data = pushkey.encodekeys(keys)
1655 part.data = pushkey.encodekeys(keys)
1656
1656
1657 @getbundle2partsgenerator('obsmarkers')
1657 @getbundle2partsgenerator('obsmarkers')
1658 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1658 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1659 b2caps=None, heads=None, **kwargs):
1659 b2caps=None, heads=None, **kwargs):
1660 """add an obsolescence markers part to the requested bundle"""
1660 """add an obsolescence markers part to the requested bundle"""
1661 if kwargs.get('obsmarkers', False):
1661 if kwargs.get('obsmarkers', False):
1662 if heads is None:
1662 if heads is None:
1663 heads = repo.heads()
1663 heads = repo.heads()
1664 subset = [c.node() for c in repo.set('::%ln', heads)]
1664 subset = [c.node() for c in repo.set('::%ln', heads)]
1665 markers = repo.obsstore.relevantmarkers(subset)
1665 markers = repo.obsstore.relevantmarkers(subset)
1666 markers = sorted(markers)
1666 markers = sorted(markers)
1667 bundle2.buildobsmarkerspart(bundler, markers)
1667 bundle2.buildobsmarkerspart(bundler, markers)
1668
1668
1669 @getbundle2partsgenerator('hgtagsfnodes')
1669 @getbundle2partsgenerator('hgtagsfnodes')
1670 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1670 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1671 b2caps=None, heads=None, common=None,
1671 b2caps=None, heads=None, common=None,
1672 **kwargs):
1672 **kwargs):
1673 """Transfer the .hgtags filenodes mapping.
1673 """Transfer the .hgtags filenodes mapping.
1674
1674
1675 Only values for heads in this bundle will be transferred.
1675 Only values for heads in this bundle will be transferred.
1676
1676
1677 The part data consists of pairs of 20 byte changeset node and .hgtags
1677 The part data consists of pairs of 20 byte changeset node and .hgtags
1678 filenodes raw values.
1678 filenodes raw values.
1679 """
1679 """
1680 # Don't send unless:
1680 # Don't send unless:
1681 # - changeset are being exchanged,
1681 # - changeset are being exchanged,
1682 # - the client supports it.
1682 # - the client supports it.
1683 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1683 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1684 return
1684 return
1685
1685
1686 outgoing = _computeoutgoing(repo, heads, common)
1686 outgoing = _computeoutgoing(repo, heads, common)
1687 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1687 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1688
1688
1689 def _getbookmarks(repo, **kwargs):
1689 def _getbookmarks(repo, **kwargs):
1690 """Returns bookmark to node mapping.
1690 """Returns bookmark to node mapping.
1691
1691
1692 This function is primarily used to generate `bookmarks` bundle2 part.
1692 This function is primarily used to generate `bookmarks` bundle2 part.
1693 It is a separate function in order to make it easy to wrap it
1693 It is a separate function in order to make it easy to wrap it
1694 in extensions. Passing `kwargs` to the function makes it easy to
1694 in extensions. Passing `kwargs` to the function makes it easy to
1695 add new parameters in extensions.
1695 add new parameters in extensions.
1696 """
1696 """
1697
1697
1698 return dict(bookmod.listbinbookmarks(repo))
1698 return dict(bookmod.listbinbookmarks(repo))
1699
1699
1700 def check_heads(repo, their_heads, context):
1700 def check_heads(repo, their_heads, context):
1701 """check if the heads of a repo have been modified
1701 """check if the heads of a repo have been modified
1702
1702
1703 Used by peer for unbundling.
1703 Used by peer for unbundling.
1704 """
1704 """
1705 heads = repo.heads()
1705 heads = repo.heads()
1706 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1706 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1707 if not (their_heads == ['force'] or their_heads == heads or
1707 if not (their_heads == ['force'] or their_heads == heads or
1708 their_heads == ['hashed', heads_hash]):
1708 their_heads == ['hashed', heads_hash]):
1709 # someone else committed/pushed/unbundled while we
1709 # someone else committed/pushed/unbundled while we
1710 # were transferring data
1710 # were transferring data
1711 raise error.PushRaced('repository changed while %s - '
1711 raise error.PushRaced('repository changed while %s - '
1712 'please try again' % context)
1712 'please try again' % context)
1713
1713
1714 def unbundle(repo, cg, heads, source, url):
1714 def unbundle(repo, cg, heads, source, url):
1715 """Apply a bundle to a repo.
1715 """Apply a bundle to a repo.
1716
1716
1717 this function makes sure the repo is locked during the application and have
1717 this function makes sure the repo is locked during the application and have
1718 mechanism to check that no push race occurred between the creation of the
1718 mechanism to check that no push race occurred between the creation of the
1719 bundle and its application.
1719 bundle and its application.
1720
1720
1721 If the push was raced as PushRaced exception is raised."""
1721 If the push was raced as PushRaced exception is raised."""
1722 r = 0
1722 r = 0
1723 # need a transaction when processing a bundle2 stream
1723 # need a transaction when processing a bundle2 stream
1724 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1724 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1725 lockandtr = [None, None, None]
1725 lockandtr = [None, None, None]
1726 recordout = None
1726 recordout = None
1727 # quick fix for output mismatch with bundle2 in 3.4
1727 # quick fix for output mismatch with bundle2 in 3.4
1728 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1728 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1729 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1729 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1730 captureoutput = True
1730 captureoutput = True
1731 try:
1731 try:
1732 # note: outside bundle1, 'heads' is expected to be empty and this
1732 # note: outside bundle1, 'heads' is expected to be empty and this
1733 # 'check_heads' call wil be a no-op
1733 # 'check_heads' call wil be a no-op
1734 check_heads(repo, heads, 'uploading changes')
1734 check_heads(repo, heads, 'uploading changes')
1735 # push can proceed
1735 # push can proceed
1736 if not isinstance(cg, bundle2.unbundle20):
1736 if not isinstance(cg, bundle2.unbundle20):
1737 # legacy case: bundle1 (changegroup 01)
1737 # legacy case: bundle1 (changegroup 01)
1738 txnname = "\n".join([source, util.hidepassword(url)])
1738 txnname = "\n".join([source, util.hidepassword(url)])
1739 with repo.lock(), repo.transaction(txnname) as tr:
1739 with repo.lock(), repo.transaction(txnname) as tr:
1740 op = bundle2.applybundle(repo, cg, tr, source, url)
1740 op = bundle2.applybundle(repo, cg, tr, source, url)
1741 r = bundle2.combinechangegroupresults(op)
1741 r = bundle2.combinechangegroupresults(op)
1742 else:
1742 else:
1743 r = None
1743 r = None
1744 try:
1744 try:
1745 def gettransaction():
1745 def gettransaction():
1746 if not lockandtr[2]:
1746 if not lockandtr[2]:
1747 lockandtr[0] = repo.wlock()
1747 lockandtr[0] = repo.wlock()
1748 lockandtr[1] = repo.lock()
1748 lockandtr[1] = repo.lock()
1749 lockandtr[2] = repo.transaction(source)
1749 lockandtr[2] = repo.transaction(source)
1750 lockandtr[2].hookargs['source'] = source
1750 lockandtr[2].hookargs['source'] = source
1751 lockandtr[2].hookargs['url'] = url
1751 lockandtr[2].hookargs['url'] = url
1752 lockandtr[2].hookargs['bundle2'] = '1'
1752 lockandtr[2].hookargs['bundle2'] = '1'
1753 return lockandtr[2]
1753 return lockandtr[2]
1754
1754
1755 # Do greedy locking by default until we're satisfied with lazy
1755 # Do greedy locking by default until we're satisfied with lazy
1756 # locking.
1756 # locking.
1757 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1757 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1758 gettransaction()
1758 gettransaction()
1759
1759
1760 op = bundle2.bundleoperation(repo, gettransaction,
1760 op = bundle2.bundleoperation(repo, gettransaction,
1761 captureoutput=captureoutput)
1761 captureoutput=captureoutput)
1762 try:
1762 try:
1763 op = bundle2.processbundle(repo, cg, op=op)
1763 op = bundle2.processbundle(repo, cg, op=op)
1764 finally:
1764 finally:
1765 r = op.reply
1765 r = op.reply
1766 if captureoutput and r is not None:
1766 if captureoutput and r is not None:
1767 repo.ui.pushbuffer(error=True, subproc=True)
1767 repo.ui.pushbuffer(error=True, subproc=True)
1768 def recordout(output):
1768 def recordout(output):
1769 r.newpart('output', data=output, mandatory=False)
1769 r.newpart('output', data=output, mandatory=False)
1770 if lockandtr[2] is not None:
1770 if lockandtr[2] is not None:
1771 lockandtr[2].close()
1771 lockandtr[2].close()
1772 except BaseException as exc:
1772 except BaseException as exc:
1773 exc.duringunbundle2 = True
1773 exc.duringunbundle2 = True
1774 if captureoutput and r is not None:
1774 if captureoutput and r is not None:
1775 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1775 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1776 def recordout(output):
1776 def recordout(output):
1777 part = bundle2.bundlepart('output', data=output,
1777 part = bundle2.bundlepart('output', data=output,
1778 mandatory=False)
1778 mandatory=False)
1779 parts.append(part)
1779 parts.append(part)
1780 raise
1780 raise
1781 finally:
1781 finally:
1782 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1782 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1783 if recordout is not None:
1783 if recordout is not None:
1784 recordout(repo.ui.popbuffer())
1784 recordout(repo.ui.popbuffer())
1785 return r
1785 return r
1786
1786
1787 def _maybeapplyclonebundle(pullop):
1787 def _maybeapplyclonebundle(pullop):
1788 """Apply a clone bundle from a remote, if possible."""
1788 """Apply a clone bundle from a remote, if possible."""
1789
1789
1790 repo = pullop.repo
1790 repo = pullop.repo
1791 remote = pullop.remote
1791 remote = pullop.remote
1792
1792
1793 if not repo.ui.configbool('ui', 'clonebundles'):
1793 if not repo.ui.configbool('ui', 'clonebundles'):
1794 return
1794 return
1795
1795
1796 # Only run if local repo is empty.
1796 # Only run if local repo is empty.
1797 if len(repo):
1797 if len(repo):
1798 return
1798 return
1799
1799
1800 if pullop.heads:
1800 if pullop.heads:
1801 return
1801 return
1802
1802
1803 if not remote.capable('clonebundles'):
1803 if not remote.capable('clonebundles'):
1804 return
1804 return
1805
1805
1806 res = remote._call('clonebundles')
1806 res = remote._call('clonebundles')
1807
1807
1808 # If we call the wire protocol command, that's good enough to record the
1808 # If we call the wire protocol command, that's good enough to record the
1809 # attempt.
1809 # attempt.
1810 pullop.clonebundleattempted = True
1810 pullop.clonebundleattempted = True
1811
1811
1812 entries = parseclonebundlesmanifest(repo, res)
1812 entries = parseclonebundlesmanifest(repo, res)
1813 if not entries:
1813 if not entries:
1814 repo.ui.note(_('no clone bundles available on remote; '
1814 repo.ui.note(_('no clone bundles available on remote; '
1815 'falling back to regular clone\n'))
1815 'falling back to regular clone\n'))
1816 return
1816 return
1817
1817
1818 entries = filterclonebundleentries(repo, entries)
1818 entries = filterclonebundleentries(repo, entries)
1819 if not entries:
1819 if not entries:
1820 # There is a thundering herd concern here. However, if a server
1820 # There is a thundering herd concern here. However, if a server
1821 # operator doesn't advertise bundles appropriate for its clients,
1821 # operator doesn't advertise bundles appropriate for its clients,
1822 # they deserve what's coming. Furthermore, from a client's
1822 # they deserve what's coming. Furthermore, from a client's
1823 # perspective, no automatic fallback would mean not being able to
1823 # perspective, no automatic fallback would mean not being able to
1824 # clone!
1824 # clone!
1825 repo.ui.warn(_('no compatible clone bundles available on server; '
1825 repo.ui.warn(_('no compatible clone bundles available on server; '
1826 'falling back to regular clone\n'))
1826 'falling back to regular clone\n'))
1827 repo.ui.warn(_('(you may want to report this to the server '
1827 repo.ui.warn(_('(you may want to report this to the server '
1828 'operator)\n'))
1828 'operator)\n'))
1829 return
1829 return
1830
1830
1831 entries = sortclonebundleentries(repo.ui, entries)
1831 entries = sortclonebundleentries(repo.ui, entries)
1832
1832
1833 url = entries[0]['URL']
1833 url = entries[0]['URL']
1834 repo.ui.status(_('applying clone bundle from %s\n') % url)
1834 repo.ui.status(_('applying clone bundle from %s\n') % url)
1835 if trypullbundlefromurl(repo.ui, repo, url):
1835 if trypullbundlefromurl(repo.ui, repo, url):
1836 repo.ui.status(_('finished applying clone bundle\n'))
1836 repo.ui.status(_('finished applying clone bundle\n'))
1837 # Bundle failed.
1837 # Bundle failed.
1838 #
1838 #
1839 # We abort by default to avoid the thundering herd of
1839 # We abort by default to avoid the thundering herd of
1840 # clients flooding a server that was expecting expensive
1840 # clients flooding a server that was expecting expensive
1841 # clone load to be offloaded.
1841 # clone load to be offloaded.
1842 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1842 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1843 repo.ui.warn(_('falling back to normal clone\n'))
1843 repo.ui.warn(_('falling back to normal clone\n'))
1844 else:
1844 else:
1845 raise error.Abort(_('error applying bundle'),
1845 raise error.Abort(_('error applying bundle'),
1846 hint=_('if this error persists, consider contacting '
1846 hint=_('if this error persists, consider contacting '
1847 'the server operator or disable clone '
1847 'the server operator or disable clone '
1848 'bundles via '
1848 'bundles via '
1849 '"--config ui.clonebundles=false"'))
1849 '"--config ui.clonebundles=false"'))
1850
1850
1851 def parseclonebundlesmanifest(repo, s):
1851 def parseclonebundlesmanifest(repo, s):
1852 """Parses the raw text of a clone bundles manifest.
1852 """Parses the raw text of a clone bundles manifest.
1853
1853
1854 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1854 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1855 to the URL and other keys are the attributes for the entry.
1855 to the URL and other keys are the attributes for the entry.
1856 """
1856 """
1857 m = []
1857 m = []
1858 for line in s.splitlines():
1858 for line in s.splitlines():
1859 fields = line.split()
1859 fields = line.split()
1860 if not fields:
1860 if not fields:
1861 continue
1861 continue
1862 attrs = {'URL': fields[0]}
1862 attrs = {'URL': fields[0]}
1863 for rawattr in fields[1:]:
1863 for rawattr in fields[1:]:
1864 key, value = rawattr.split('=', 1)
1864 key, value = rawattr.split('=', 1)
1865 key = urlreq.unquote(key)
1865 key = urlreq.unquote(key)
1866 value = urlreq.unquote(value)
1866 value = urlreq.unquote(value)
1867 attrs[key] = value
1867 attrs[key] = value
1868
1868
1869 # Parse BUNDLESPEC into components. This makes client-side
1869 # Parse BUNDLESPEC into components. This makes client-side
1870 # preferences easier to specify since you can prefer a single
1870 # preferences easier to specify since you can prefer a single
1871 # component of the BUNDLESPEC.
1871 # component of the BUNDLESPEC.
1872 if key == 'BUNDLESPEC':
1872 if key == 'BUNDLESPEC':
1873 try:
1873 try:
1874 comp, version, params = parsebundlespec(repo, value,
1874 comp, version, params = parsebundlespec(repo, value,
1875 externalnames=True)
1875 externalnames=True)
1876 attrs['COMPRESSION'] = comp
1876 attrs['COMPRESSION'] = comp
1877 attrs['VERSION'] = version
1877 attrs['VERSION'] = version
1878 except error.InvalidBundleSpecification:
1878 except error.InvalidBundleSpecification:
1879 pass
1879 pass
1880 except error.UnsupportedBundleSpecification:
1880 except error.UnsupportedBundleSpecification:
1881 pass
1881 pass
1882
1882
1883 m.append(attrs)
1883 m.append(attrs)
1884
1884
1885 return m
1885 return m
1886
1886
1887 def filterclonebundleentries(repo, entries):
1887 def filterclonebundleentries(repo, entries):
1888 """Remove incompatible clone bundle manifest entries.
1888 """Remove incompatible clone bundle manifest entries.
1889
1889
1890 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1890 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1891 and returns a new list consisting of only the entries that this client
1891 and returns a new list consisting of only the entries that this client
1892 should be able to apply.
1892 should be able to apply.
1893
1893
1894 There is no guarantee we'll be able to apply all returned entries because
1894 There is no guarantee we'll be able to apply all returned entries because
1895 the metadata we use to filter on may be missing or wrong.
1895 the metadata we use to filter on may be missing or wrong.
1896 """
1896 """
1897 newentries = []
1897 newentries = []
1898 for entry in entries:
1898 for entry in entries:
1899 spec = entry.get('BUNDLESPEC')
1899 spec = entry.get('BUNDLESPEC')
1900 if spec:
1900 if spec:
1901 try:
1901 try:
1902 parsebundlespec(repo, spec, strict=True)
1902 parsebundlespec(repo, spec, strict=True)
1903 except error.InvalidBundleSpecification as e:
1903 except error.InvalidBundleSpecification as e:
1904 repo.ui.debug(str(e) + '\n')
1904 repo.ui.debug(str(e) + '\n')
1905 continue
1905 continue
1906 except error.UnsupportedBundleSpecification as e:
1906 except error.UnsupportedBundleSpecification as e:
1907 repo.ui.debug('filtering %s because unsupported bundle '
1907 repo.ui.debug('filtering %s because unsupported bundle '
1908 'spec: %s\n' % (entry['URL'], str(e)))
1908 'spec: %s\n' % (entry['URL'], str(e)))
1909 continue
1909 continue
1910
1910
1911 if 'REQUIRESNI' in entry and not sslutil.hassni:
1911 if 'REQUIRESNI' in entry and not sslutil.hassni:
1912 repo.ui.debug('filtering %s because SNI not supported\n' %
1912 repo.ui.debug('filtering %s because SNI not supported\n' %
1913 entry['URL'])
1913 entry['URL'])
1914 continue
1914 continue
1915
1915
1916 newentries.append(entry)
1916 newentries.append(entry)
1917
1917
1918 return newentries
1918 return newentries
1919
1919
1920 class clonebundleentry(object):
1920 class clonebundleentry(object):
1921 """Represents an item in a clone bundles manifest.
1921 """Represents an item in a clone bundles manifest.
1922
1922
1923 This rich class is needed to support sorting since sorted() in Python 3
1923 This rich class is needed to support sorting since sorted() in Python 3
1924 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1924 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1925 won't work.
1925 won't work.
1926 """
1926 """
1927
1927
1928 def __init__(self, value, prefers):
1928 def __init__(self, value, prefers):
1929 self.value = value
1929 self.value = value
1930 self.prefers = prefers
1930 self.prefers = prefers
1931
1931
1932 def _cmp(self, other):
1932 def _cmp(self, other):
1933 for prefkey, prefvalue in self.prefers:
1933 for prefkey, prefvalue in self.prefers:
1934 avalue = self.value.get(prefkey)
1934 avalue = self.value.get(prefkey)
1935 bvalue = other.value.get(prefkey)
1935 bvalue = other.value.get(prefkey)
1936
1936
1937 # Special case for b missing attribute and a matches exactly.
1937 # Special case for b missing attribute and a matches exactly.
1938 if avalue is not None and bvalue is None and avalue == prefvalue:
1938 if avalue is not None and bvalue is None and avalue == prefvalue:
1939 return -1
1939 return -1
1940
1940
1941 # Special case for a missing attribute and b matches exactly.
1941 # Special case for a missing attribute and b matches exactly.
1942 if bvalue is not None and avalue is None and bvalue == prefvalue:
1942 if bvalue is not None and avalue is None and bvalue == prefvalue:
1943 return 1
1943 return 1
1944
1944
1945 # We can't compare unless attribute present on both.
1945 # We can't compare unless attribute present on both.
1946 if avalue is None or bvalue is None:
1946 if avalue is None or bvalue is None:
1947 continue
1947 continue
1948
1948
1949 # Same values should fall back to next attribute.
1949 # Same values should fall back to next attribute.
1950 if avalue == bvalue:
1950 if avalue == bvalue:
1951 continue
1951 continue
1952
1952
1953 # Exact matches come first.
1953 # Exact matches come first.
1954 if avalue == prefvalue:
1954 if avalue == prefvalue:
1955 return -1
1955 return -1
1956 if bvalue == prefvalue:
1956 if bvalue == prefvalue:
1957 return 1
1957 return 1
1958
1958
1959 # Fall back to next attribute.
1959 # Fall back to next attribute.
1960 continue
1960 continue
1961
1961
1962 # If we got here we couldn't sort by attributes and prefers. Fall
1962 # If we got here we couldn't sort by attributes and prefers. Fall
1963 # back to index order.
1963 # back to index order.
1964 return 0
1964 return 0
1965
1965
1966 def __lt__(self, other):
1966 def __lt__(self, other):
1967 return self._cmp(other) < 0
1967 return self._cmp(other) < 0
1968
1968
1969 def __gt__(self, other):
1969 def __gt__(self, other):
1970 return self._cmp(other) > 0
1970 return self._cmp(other) > 0
1971
1971
1972 def __eq__(self, other):
1972 def __eq__(self, other):
1973 return self._cmp(other) == 0
1973 return self._cmp(other) == 0
1974
1974
1975 def __le__(self, other):
1975 def __le__(self, other):
1976 return self._cmp(other) <= 0
1976 return self._cmp(other) <= 0
1977
1977
1978 def __ge__(self, other):
1978 def __ge__(self, other):
1979 return self._cmp(other) >= 0
1979 return self._cmp(other) >= 0
1980
1980
1981 def __ne__(self, other):
1981 def __ne__(self, other):
1982 return self._cmp(other) != 0
1982 return self._cmp(other) != 0
1983
1983
1984 def sortclonebundleentries(ui, entries):
1984 def sortclonebundleentries(ui, entries):
1985 prefers = ui.configlist('ui', 'clonebundleprefers')
1985 prefers = ui.configlist('ui', 'clonebundleprefers')
1986 if not prefers:
1986 if not prefers:
1987 return list(entries)
1987 return list(entries)
1988
1988
1989 prefers = [p.split('=', 1) for p in prefers]
1989 prefers = [p.split('=', 1) for p in prefers]
1990
1990
1991 items = sorted(clonebundleentry(v, prefers) for v in entries)
1991 items = sorted(clonebundleentry(v, prefers) for v in entries)
1992 return [i.value for i in items]
1992 return [i.value for i in items]
1993
1993
1994 def trypullbundlefromurl(ui, repo, url):
1994 def trypullbundlefromurl(ui, repo, url):
1995 """Attempt to apply a bundle from a URL."""
1995 """Attempt to apply a bundle from a URL."""
1996 with repo.lock(), repo.transaction('bundleurl') as tr:
1996 with repo.lock(), repo.transaction('bundleurl') as tr:
1997 try:
1997 try:
1998 fh = urlmod.open(ui, url)
1998 fh = urlmod.open(ui, url)
1999 cg = readbundle(ui, fh, 'stream')
1999 cg = readbundle(ui, fh, 'stream')
2000
2000
2001 if isinstance(cg, streamclone.streamcloneapplier):
2001 if isinstance(cg, streamclone.streamcloneapplier):
2002 cg.apply(repo)
2002 cg.apply(repo)
2003 else:
2003 else:
2004 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2004 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2005 return True
2005 return True
2006 except urlerr.httperror as e:
2006 except urlerr.httperror as e:
2007 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2007 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2008 except urlerr.urlerror as e:
2008 except urlerr.urlerror as e:
2009 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2009 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2010
2010
2011 return False
2011 return False
General Comments 0
You need to be logged in to leave comments. Login now