##// END OF EJS Templates
exchange: remove need for "locked" variable...
Martin von Zweigbergk -
r33789:5904511f default
parent child Browse files
Show More
@@ -1,2011 +1,2009
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from . import (
18 from . import (
19 bookmarks as bookmod,
19 bookmarks as bookmod,
20 bundle2,
20 bundle2,
21 changegroup,
21 changegroup,
22 discovery,
22 discovery,
23 error,
23 error,
24 lock as lockmod,
24 lock as lockmod,
25 obsolete,
25 obsolete,
26 phases,
26 phases,
27 pushkey,
27 pushkey,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 sslutil,
30 sslutil,
31 streamclone,
31 streamclone,
32 url as urlmod,
32 url as urlmod,
33 util,
33 util,
34 )
34 )
35
35
36 urlerr = util.urlerr
36 urlerr = util.urlerr
37 urlreq = util.urlreq
37 urlreq = util.urlreq
38
38
39 # Maps bundle version human names to changegroup versions.
39 # Maps bundle version human names to changegroup versions.
40 _bundlespeccgversions = {'v1': '01',
40 _bundlespeccgversions = {'v1': '01',
41 'v2': '02',
41 'v2': '02',
42 'packed1': 's1',
42 'packed1': 's1',
43 'bundle2': '02', #legacy
43 'bundle2': '02', #legacy
44 }
44 }
45
45
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
46 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
47 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
48
48
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
49 def parsebundlespec(repo, spec, strict=True, externalnames=False):
50 """Parse a bundle string specification into parts.
50 """Parse a bundle string specification into parts.
51
51
52 Bundle specifications denote a well-defined bundle/exchange format.
52 Bundle specifications denote a well-defined bundle/exchange format.
53 The content of a given specification should not change over time in
53 The content of a given specification should not change over time in
54 order to ensure that bundles produced by a newer version of Mercurial are
54 order to ensure that bundles produced by a newer version of Mercurial are
55 readable from an older version.
55 readable from an older version.
56
56
57 The string currently has the form:
57 The string currently has the form:
58
58
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
59 <compression>-<type>[;<parameter0>[;<parameter1>]]
60
60
61 Where <compression> is one of the supported compression formats
61 Where <compression> is one of the supported compression formats
62 and <type> is (currently) a version string. A ";" can follow the type and
62 and <type> is (currently) a version string. A ";" can follow the type and
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
63 all text afterwards is interpreted as URI encoded, ";" delimited key=value
64 pairs.
64 pairs.
65
65
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
66 If ``strict`` is True (the default) <compression> is required. Otherwise,
67 it is optional.
67 it is optional.
68
68
69 If ``externalnames`` is False (the default), the human-centric names will
69 If ``externalnames`` is False (the default), the human-centric names will
70 be converted to their internal representation.
70 be converted to their internal representation.
71
71
72 Returns a 3-tuple of (compression, version, parameters). Compression will
72 Returns a 3-tuple of (compression, version, parameters). Compression will
73 be ``None`` if not in strict mode and a compression isn't defined.
73 be ``None`` if not in strict mode and a compression isn't defined.
74
74
75 An ``InvalidBundleSpecification`` is raised when the specification is
75 An ``InvalidBundleSpecification`` is raised when the specification is
76 not syntactically well formed.
76 not syntactically well formed.
77
77
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
78 An ``UnsupportedBundleSpecification`` is raised when the compression or
79 bundle type/version is not recognized.
79 bundle type/version is not recognized.
80
80
81 Note: this function will likely eventually return a more complex data
81 Note: this function will likely eventually return a more complex data
82 structure, including bundle2 part information.
82 structure, including bundle2 part information.
83 """
83 """
84 def parseparams(s):
84 def parseparams(s):
85 if ';' not in s:
85 if ';' not in s:
86 return s, {}
86 return s, {}
87
87
88 params = {}
88 params = {}
89 version, paramstr = s.split(';', 1)
89 version, paramstr = s.split(';', 1)
90
90
91 for p in paramstr.split(';'):
91 for p in paramstr.split(';'):
92 if '=' not in p:
92 if '=' not in p:
93 raise error.InvalidBundleSpecification(
93 raise error.InvalidBundleSpecification(
94 _('invalid bundle specification: '
94 _('invalid bundle specification: '
95 'missing "=" in parameter: %s') % p)
95 'missing "=" in parameter: %s') % p)
96
96
97 key, value = p.split('=', 1)
97 key, value = p.split('=', 1)
98 key = urlreq.unquote(key)
98 key = urlreq.unquote(key)
99 value = urlreq.unquote(value)
99 value = urlreq.unquote(value)
100 params[key] = value
100 params[key] = value
101
101
102 return version, params
102 return version, params
103
103
104
104
105 if strict and '-' not in spec:
105 if strict and '-' not in spec:
106 raise error.InvalidBundleSpecification(
106 raise error.InvalidBundleSpecification(
107 _('invalid bundle specification; '
107 _('invalid bundle specification; '
108 'must be prefixed with compression: %s') % spec)
108 'must be prefixed with compression: %s') % spec)
109
109
110 if '-' in spec:
110 if '-' in spec:
111 compression, version = spec.split('-', 1)
111 compression, version = spec.split('-', 1)
112
112
113 if compression not in util.compengines.supportedbundlenames:
113 if compression not in util.compengines.supportedbundlenames:
114 raise error.UnsupportedBundleSpecification(
114 raise error.UnsupportedBundleSpecification(
115 _('%s compression is not supported') % compression)
115 _('%s compression is not supported') % compression)
116
116
117 version, params = parseparams(version)
117 version, params = parseparams(version)
118
118
119 if version not in _bundlespeccgversions:
119 if version not in _bundlespeccgversions:
120 raise error.UnsupportedBundleSpecification(
120 raise error.UnsupportedBundleSpecification(
121 _('%s is not a recognized bundle version') % version)
121 _('%s is not a recognized bundle version') % version)
122 else:
122 else:
123 # Value could be just the compression or just the version, in which
123 # Value could be just the compression or just the version, in which
124 # case some defaults are assumed (but only when not in strict mode).
124 # case some defaults are assumed (but only when not in strict mode).
125 assert not strict
125 assert not strict
126
126
127 spec, params = parseparams(spec)
127 spec, params = parseparams(spec)
128
128
129 if spec in util.compengines.supportedbundlenames:
129 if spec in util.compengines.supportedbundlenames:
130 compression = spec
130 compression = spec
131 version = 'v1'
131 version = 'v1'
132 # Generaldelta repos require v2.
132 # Generaldelta repos require v2.
133 if 'generaldelta' in repo.requirements:
133 if 'generaldelta' in repo.requirements:
134 version = 'v2'
134 version = 'v2'
135 # Modern compression engines require v2.
135 # Modern compression engines require v2.
136 if compression not in _bundlespecv1compengines:
136 if compression not in _bundlespecv1compengines:
137 version = 'v2'
137 version = 'v2'
138 elif spec in _bundlespeccgversions:
138 elif spec in _bundlespeccgversions:
139 if spec == 'packed1':
139 if spec == 'packed1':
140 compression = 'none'
140 compression = 'none'
141 else:
141 else:
142 compression = 'bzip2'
142 compression = 'bzip2'
143 version = spec
143 version = spec
144 else:
144 else:
145 raise error.UnsupportedBundleSpecification(
145 raise error.UnsupportedBundleSpecification(
146 _('%s is not a recognized bundle specification') % spec)
146 _('%s is not a recognized bundle specification') % spec)
147
147
148 # Bundle version 1 only supports a known set of compression engines.
148 # Bundle version 1 only supports a known set of compression engines.
149 if version == 'v1' and compression not in _bundlespecv1compengines:
149 if version == 'v1' and compression not in _bundlespecv1compengines:
150 raise error.UnsupportedBundleSpecification(
150 raise error.UnsupportedBundleSpecification(
151 _('compression engine %s is not supported on v1 bundles') %
151 _('compression engine %s is not supported on v1 bundles') %
152 compression)
152 compression)
153
153
154 # The specification for packed1 can optionally declare the data formats
154 # The specification for packed1 can optionally declare the data formats
155 # required to apply it. If we see this metadata, compare against what the
155 # required to apply it. If we see this metadata, compare against what the
156 # repo supports and error if the bundle isn't compatible.
156 # repo supports and error if the bundle isn't compatible.
157 if version == 'packed1' and 'requirements' in params:
157 if version == 'packed1' and 'requirements' in params:
158 requirements = set(params['requirements'].split(','))
158 requirements = set(params['requirements'].split(','))
159 missingreqs = requirements - repo.supportedformats
159 missingreqs = requirements - repo.supportedformats
160 if missingreqs:
160 if missingreqs:
161 raise error.UnsupportedBundleSpecification(
161 raise error.UnsupportedBundleSpecification(
162 _('missing support for repository features: %s') %
162 _('missing support for repository features: %s') %
163 ', '.join(sorted(missingreqs)))
163 ', '.join(sorted(missingreqs)))
164
164
165 if not externalnames:
165 if not externalnames:
166 engine = util.compengines.forbundlename(compression)
166 engine = util.compengines.forbundlename(compression)
167 compression = engine.bundletype()[1]
167 compression = engine.bundletype()[1]
168 version = _bundlespeccgversions[version]
168 version = _bundlespeccgversions[version]
169 return compression, version, params
169 return compression, version, params
170
170
171 def readbundle(ui, fh, fname, vfs=None):
171 def readbundle(ui, fh, fname, vfs=None):
172 header = changegroup.readexactly(fh, 4)
172 header = changegroup.readexactly(fh, 4)
173
173
174 alg = None
174 alg = None
175 if not fname:
175 if not fname:
176 fname = "stream"
176 fname = "stream"
177 if not header.startswith('HG') and header.startswith('\0'):
177 if not header.startswith('HG') and header.startswith('\0'):
178 fh = changegroup.headerlessfixup(fh, header)
178 fh = changegroup.headerlessfixup(fh, header)
179 header = "HG10"
179 header = "HG10"
180 alg = 'UN'
180 alg = 'UN'
181 elif vfs:
181 elif vfs:
182 fname = vfs.join(fname)
182 fname = vfs.join(fname)
183
183
184 magic, version = header[0:2], header[2:4]
184 magic, version = header[0:2], header[2:4]
185
185
186 if magic != 'HG':
186 if magic != 'HG':
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
187 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
188 if version == '10':
188 if version == '10':
189 if alg is None:
189 if alg is None:
190 alg = changegroup.readexactly(fh, 2)
190 alg = changegroup.readexactly(fh, 2)
191 return changegroup.cg1unpacker(fh, alg)
191 return changegroup.cg1unpacker(fh, alg)
192 elif version.startswith('2'):
192 elif version.startswith('2'):
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
193 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
194 elif version == 'S1':
194 elif version == 'S1':
195 return streamclone.streamcloneapplier(fh)
195 return streamclone.streamcloneapplier(fh)
196 else:
196 else:
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
197 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
198
198
199 def getbundlespec(ui, fh):
199 def getbundlespec(ui, fh):
200 """Infer the bundlespec from a bundle file handle.
200 """Infer the bundlespec from a bundle file handle.
201
201
202 The input file handle is seeked and the original seek position is not
202 The input file handle is seeked and the original seek position is not
203 restored.
203 restored.
204 """
204 """
205 def speccompression(alg):
205 def speccompression(alg):
206 try:
206 try:
207 return util.compengines.forbundletype(alg).bundletype()[0]
207 return util.compengines.forbundletype(alg).bundletype()[0]
208 except KeyError:
208 except KeyError:
209 return None
209 return None
210
210
211 b = readbundle(ui, fh, None)
211 b = readbundle(ui, fh, None)
212 if isinstance(b, changegroup.cg1unpacker):
212 if isinstance(b, changegroup.cg1unpacker):
213 alg = b._type
213 alg = b._type
214 if alg == '_truncatedBZ':
214 if alg == '_truncatedBZ':
215 alg = 'BZ'
215 alg = 'BZ'
216 comp = speccompression(alg)
216 comp = speccompression(alg)
217 if not comp:
217 if not comp:
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
218 raise error.Abort(_('unknown compression algorithm: %s') % alg)
219 return '%s-v1' % comp
219 return '%s-v1' % comp
220 elif isinstance(b, bundle2.unbundle20):
220 elif isinstance(b, bundle2.unbundle20):
221 if 'Compression' in b.params:
221 if 'Compression' in b.params:
222 comp = speccompression(b.params['Compression'])
222 comp = speccompression(b.params['Compression'])
223 if not comp:
223 if not comp:
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
224 raise error.Abort(_('unknown compression algorithm: %s') % comp)
225 else:
225 else:
226 comp = 'none'
226 comp = 'none'
227
227
228 version = None
228 version = None
229 for part in b.iterparts():
229 for part in b.iterparts():
230 if part.type == 'changegroup':
230 if part.type == 'changegroup':
231 version = part.params['version']
231 version = part.params['version']
232 if version in ('01', '02'):
232 if version in ('01', '02'):
233 version = 'v2'
233 version = 'v2'
234 else:
234 else:
235 raise error.Abort(_('changegroup version %s does not have '
235 raise error.Abort(_('changegroup version %s does not have '
236 'a known bundlespec') % version,
236 'a known bundlespec') % version,
237 hint=_('try upgrading your Mercurial '
237 hint=_('try upgrading your Mercurial '
238 'client'))
238 'client'))
239
239
240 if not version:
240 if not version:
241 raise error.Abort(_('could not identify changegroup version in '
241 raise error.Abort(_('could not identify changegroup version in '
242 'bundle'))
242 'bundle'))
243
243
244 return '%s-%s' % (comp, version)
244 return '%s-%s' % (comp, version)
245 elif isinstance(b, streamclone.streamcloneapplier):
245 elif isinstance(b, streamclone.streamcloneapplier):
246 requirements = streamclone.readbundle1header(fh)[2]
246 requirements = streamclone.readbundle1header(fh)[2]
247 params = 'requirements=%s' % ','.join(sorted(requirements))
247 params = 'requirements=%s' % ','.join(sorted(requirements))
248 return 'none-packed1;%s' % urlreq.quote(params)
248 return 'none-packed1;%s' % urlreq.quote(params)
249 else:
249 else:
250 raise error.Abort(_('unknown bundle type: %s') % b)
250 raise error.Abort(_('unknown bundle type: %s') % b)
251
251
252 def _computeoutgoing(repo, heads, common):
252 def _computeoutgoing(repo, heads, common):
253 """Computes which revs are outgoing given a set of common
253 """Computes which revs are outgoing given a set of common
254 and a set of heads.
254 and a set of heads.
255
255
256 This is a separate function so extensions can have access to
256 This is a separate function so extensions can have access to
257 the logic.
257 the logic.
258
258
259 Returns a discovery.outgoing object.
259 Returns a discovery.outgoing object.
260 """
260 """
261 cl = repo.changelog
261 cl = repo.changelog
262 if common:
262 if common:
263 hasnode = cl.hasnode
263 hasnode = cl.hasnode
264 common = [n for n in common if hasnode(n)]
264 common = [n for n in common if hasnode(n)]
265 else:
265 else:
266 common = [nullid]
266 common = [nullid]
267 if not heads:
267 if not heads:
268 heads = cl.heads()
268 heads = cl.heads()
269 return discovery.outgoing(repo, common, heads)
269 return discovery.outgoing(repo, common, heads)
270
270
271 def _forcebundle1(op):
271 def _forcebundle1(op):
272 """return true if a pull/push must use bundle1
272 """return true if a pull/push must use bundle1
273
273
274 This function is used to allow testing of the older bundle version"""
274 This function is used to allow testing of the older bundle version"""
275 ui = op.repo.ui
275 ui = op.repo.ui
276 forcebundle1 = False
276 forcebundle1 = False
277 # The goal is this config is to allow developer to choose the bundle
277 # The goal is this config is to allow developer to choose the bundle
278 # version used during exchanged. This is especially handy during test.
278 # version used during exchanged. This is especially handy during test.
279 # Value is a list of bundle version to be picked from, highest version
279 # Value is a list of bundle version to be picked from, highest version
280 # should be used.
280 # should be used.
281 #
281 #
282 # developer config: devel.legacy.exchange
282 # developer config: devel.legacy.exchange
283 exchange = ui.configlist('devel', 'legacy.exchange')
283 exchange = ui.configlist('devel', 'legacy.exchange')
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
284 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
285 return forcebundle1 or not op.remote.capable('bundle2')
285 return forcebundle1 or not op.remote.capable('bundle2')
286
286
287 class pushoperation(object):
287 class pushoperation(object):
288 """A object that represent a single push operation
288 """A object that represent a single push operation
289
289
290 Its purpose is to carry push related state and very common operations.
290 Its purpose is to carry push related state and very common operations.
291
291
292 A new pushoperation should be created at the beginning of each push and
292 A new pushoperation should be created at the beginning of each push and
293 discarded afterward.
293 discarded afterward.
294 """
294 """
295
295
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
296 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
297 bookmarks=()):
297 bookmarks=()):
298 # repo we push from
298 # repo we push from
299 self.repo = repo
299 self.repo = repo
300 self.ui = repo.ui
300 self.ui = repo.ui
301 # repo we push to
301 # repo we push to
302 self.remote = remote
302 self.remote = remote
303 # force option provided
303 # force option provided
304 self.force = force
304 self.force = force
305 # revs to be pushed (None is "all")
305 # revs to be pushed (None is "all")
306 self.revs = revs
306 self.revs = revs
307 # bookmark explicitly pushed
307 # bookmark explicitly pushed
308 self.bookmarks = bookmarks
308 self.bookmarks = bookmarks
309 # allow push of new branch
309 # allow push of new branch
310 self.newbranch = newbranch
310 self.newbranch = newbranch
311 # step already performed
311 # step already performed
312 # (used to check what steps have been already performed through bundle2)
312 # (used to check what steps have been already performed through bundle2)
313 self.stepsdone = set()
313 self.stepsdone = set()
314 # Integer version of the changegroup push result
314 # Integer version of the changegroup push result
315 # - None means nothing to push
315 # - None means nothing to push
316 # - 0 means HTTP error
316 # - 0 means HTTP error
317 # - 1 means we pushed and remote head count is unchanged *or*
317 # - 1 means we pushed and remote head count is unchanged *or*
318 # we have outgoing changesets but refused to push
318 # we have outgoing changesets but refused to push
319 # - other values as described by addchangegroup()
319 # - other values as described by addchangegroup()
320 self.cgresult = None
320 self.cgresult = None
321 # Boolean value for the bookmark push
321 # Boolean value for the bookmark push
322 self.bkresult = None
322 self.bkresult = None
323 # discover.outgoing object (contains common and outgoing data)
323 # discover.outgoing object (contains common and outgoing data)
324 self.outgoing = None
324 self.outgoing = None
325 # all remote topological heads before the push
325 # all remote topological heads before the push
326 self.remoteheads = None
326 self.remoteheads = None
327 # Details of the remote branch pre and post push
327 # Details of the remote branch pre and post push
328 #
328 #
329 # mapping: {'branch': ([remoteheads],
329 # mapping: {'branch': ([remoteheads],
330 # [newheads],
330 # [newheads],
331 # [unsyncedheads],
331 # [unsyncedheads],
332 # [discardedheads])}
332 # [discardedheads])}
333 # - branch: the branch name
333 # - branch: the branch name
334 # - remoteheads: the list of remote heads known locally
334 # - remoteheads: the list of remote heads known locally
335 # None if the branch is new
335 # None if the branch is new
336 # - newheads: the new remote heads (known locally) with outgoing pushed
336 # - newheads: the new remote heads (known locally) with outgoing pushed
337 # - unsyncedheads: the list of remote heads unknown locally.
337 # - unsyncedheads: the list of remote heads unknown locally.
338 # - discardedheads: the list of remote heads made obsolete by the push
338 # - discardedheads: the list of remote heads made obsolete by the push
339 self.pushbranchmap = None
339 self.pushbranchmap = None
340 # testable as a boolean indicating if any nodes are missing locally.
340 # testable as a boolean indicating if any nodes are missing locally.
341 self.incoming = None
341 self.incoming = None
342 # phases changes that must be pushed along side the changesets
342 # phases changes that must be pushed along side the changesets
343 self.outdatedphases = None
343 self.outdatedphases = None
344 # phases changes that must be pushed if changeset push fails
344 # phases changes that must be pushed if changeset push fails
345 self.fallbackoutdatedphases = None
345 self.fallbackoutdatedphases = None
346 # outgoing obsmarkers
346 # outgoing obsmarkers
347 self.outobsmarkers = set()
347 self.outobsmarkers = set()
348 # outgoing bookmarks
348 # outgoing bookmarks
349 self.outbookmarks = []
349 self.outbookmarks = []
350 # transaction manager
350 # transaction manager
351 self.trmanager = None
351 self.trmanager = None
352 # map { pushkey partid -> callback handling failure}
352 # map { pushkey partid -> callback handling failure}
353 # used to handle exception from mandatory pushkey part failure
353 # used to handle exception from mandatory pushkey part failure
354 self.pkfailcb = {}
354 self.pkfailcb = {}
355
355
356 @util.propertycache
356 @util.propertycache
357 def futureheads(self):
357 def futureheads(self):
358 """future remote heads if the changeset push succeeds"""
358 """future remote heads if the changeset push succeeds"""
359 return self.outgoing.missingheads
359 return self.outgoing.missingheads
360
360
361 @util.propertycache
361 @util.propertycache
362 def fallbackheads(self):
362 def fallbackheads(self):
363 """future remote heads if the changeset push fails"""
363 """future remote heads if the changeset push fails"""
364 if self.revs is None:
364 if self.revs is None:
365 # not target to push, all common are relevant
365 # not target to push, all common are relevant
366 return self.outgoing.commonheads
366 return self.outgoing.commonheads
367 unfi = self.repo.unfiltered()
367 unfi = self.repo.unfiltered()
368 # I want cheads = heads(::missingheads and ::commonheads)
368 # I want cheads = heads(::missingheads and ::commonheads)
369 # (missingheads is revs with secret changeset filtered out)
369 # (missingheads is revs with secret changeset filtered out)
370 #
370 #
371 # This can be expressed as:
371 # This can be expressed as:
372 # cheads = ( (missingheads and ::commonheads)
372 # cheads = ( (missingheads and ::commonheads)
373 # + (commonheads and ::missingheads))"
373 # + (commonheads and ::missingheads))"
374 # )
374 # )
375 #
375 #
376 # while trying to push we already computed the following:
376 # while trying to push we already computed the following:
377 # common = (::commonheads)
377 # common = (::commonheads)
378 # missing = ((commonheads::missingheads) - commonheads)
378 # missing = ((commonheads::missingheads) - commonheads)
379 #
379 #
380 # We can pick:
380 # We can pick:
381 # * missingheads part of common (::commonheads)
381 # * missingheads part of common (::commonheads)
382 common = self.outgoing.common
382 common = self.outgoing.common
383 nm = self.repo.changelog.nodemap
383 nm = self.repo.changelog.nodemap
384 cheads = [node for node in self.revs if nm[node] in common]
384 cheads = [node for node in self.revs if nm[node] in common]
385 # and
385 # and
386 # * commonheads parents on missing
386 # * commonheads parents on missing
387 revset = unfi.set('%ln and parents(roots(%ln))',
387 revset = unfi.set('%ln and parents(roots(%ln))',
388 self.outgoing.commonheads,
388 self.outgoing.commonheads,
389 self.outgoing.missing)
389 self.outgoing.missing)
390 cheads.extend(c.node() for c in revset)
390 cheads.extend(c.node() for c in revset)
391 return cheads
391 return cheads
392
392
393 @property
393 @property
394 def commonheads(self):
394 def commonheads(self):
395 """set of all common heads after changeset bundle push"""
395 """set of all common heads after changeset bundle push"""
396 if self.cgresult:
396 if self.cgresult:
397 return self.futureheads
397 return self.futureheads
398 else:
398 else:
399 return self.fallbackheads
399 return self.fallbackheads
400
400
401 # mapping of message used when pushing bookmark
401 # mapping of message used when pushing bookmark
402 bookmsgmap = {'update': (_("updating bookmark %s\n"),
402 bookmsgmap = {'update': (_("updating bookmark %s\n"),
403 _('updating bookmark %s failed!\n')),
403 _('updating bookmark %s failed!\n')),
404 'export': (_("exporting bookmark %s\n"),
404 'export': (_("exporting bookmark %s\n"),
405 _('exporting bookmark %s failed!\n')),
405 _('exporting bookmark %s failed!\n')),
406 'delete': (_("deleting remote bookmark %s\n"),
406 'delete': (_("deleting remote bookmark %s\n"),
407 _('deleting remote bookmark %s failed!\n')),
407 _('deleting remote bookmark %s failed!\n')),
408 }
408 }
409
409
410
410
411 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
411 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
412 opargs=None):
412 opargs=None):
413 '''Push outgoing changesets (limited by revs) from a local
413 '''Push outgoing changesets (limited by revs) from a local
414 repository to remote. Return an integer:
414 repository to remote. Return an integer:
415 - None means nothing to push
415 - None means nothing to push
416 - 0 means HTTP error
416 - 0 means HTTP error
417 - 1 means we pushed and remote head count is unchanged *or*
417 - 1 means we pushed and remote head count is unchanged *or*
418 we have outgoing changesets but refused to push
418 we have outgoing changesets but refused to push
419 - other values as described by addchangegroup()
419 - other values as described by addchangegroup()
420 '''
420 '''
421 if opargs is None:
421 if opargs is None:
422 opargs = {}
422 opargs = {}
423 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
423 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
424 **opargs)
424 **opargs)
425 if pushop.remote.local():
425 if pushop.remote.local():
426 missing = (set(pushop.repo.requirements)
426 missing = (set(pushop.repo.requirements)
427 - pushop.remote.local().supported)
427 - pushop.remote.local().supported)
428 if missing:
428 if missing:
429 msg = _("required features are not"
429 msg = _("required features are not"
430 " supported in the destination:"
430 " supported in the destination:"
431 " %s") % (', '.join(sorted(missing)))
431 " %s") % (', '.join(sorted(missing)))
432 raise error.Abort(msg)
432 raise error.Abort(msg)
433
433
434 if not pushop.remote.canpush():
434 if not pushop.remote.canpush():
435 raise error.Abort(_("destination does not support push"))
435 raise error.Abort(_("destination does not support push"))
436
436
437 if not pushop.remote.capable('unbundle'):
437 if not pushop.remote.capable('unbundle'):
438 raise error.Abort(_('cannot push: destination does not support the '
438 raise error.Abort(_('cannot push: destination does not support the '
439 'unbundle wire protocol command'))
439 'unbundle wire protocol command'))
440
440
441 # get lock as we might write phase data
441 # get lock as we might write phase data
442 wlock = lock = None
442 wlock = lock = None
443 locked = False
444 try:
443 try:
445 # bundle2 push may receive a reply bundle touching bookmarks or other
444 # bundle2 push may receive a reply bundle touching bookmarks or other
446 # things requiring the wlock. Take it now to ensure proper ordering.
445 # things requiring the wlock. Take it now to ensure proper ordering.
447 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
446 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
448 if (not _forcebundle1(pushop)) and maypushback:
447 if (not _forcebundle1(pushop)) and maypushback:
449 wlock = pushop.repo.wlock()
448 wlock = pushop.repo.wlock()
450 lock = pushop.repo.lock()
449 lock = pushop.repo.lock()
451 locked = True
450 pushop.trmanager = transactionmanager(pushop.repo,
451 'push-response',
452 pushop.remote.url())
452 except IOError as err:
453 except IOError as err:
453 if err.errno != errno.EACCES:
454 if err.errno != errno.EACCES:
454 raise
455 raise
455 # source repo cannot be locked.
456 # source repo cannot be locked.
456 # We do not abort the push, but just disable the local phase
457 # We do not abort the push, but just disable the local phase
457 # synchronisation.
458 # synchronisation.
458 msg = 'cannot lock source repository: %s\n' % err
459 msg = 'cannot lock source repository: %s\n' % err
459 pushop.ui.debug(msg)
460 pushop.ui.debug(msg)
461
460 try:
462 try:
461 if locked:
462 pushop.trmanager = transactionmanager(pushop.repo,
463 'push-response',
464 pushop.remote.url())
465 pushop.repo.checkpush(pushop)
463 pushop.repo.checkpush(pushop)
466 _pushdiscovery(pushop)
464 _pushdiscovery(pushop)
467 if not _forcebundle1(pushop):
465 if not _forcebundle1(pushop):
468 _pushbundle2(pushop)
466 _pushbundle2(pushop)
469 _pushchangeset(pushop)
467 _pushchangeset(pushop)
470 _pushsyncphase(pushop)
468 _pushsyncphase(pushop)
471 _pushobsolete(pushop)
469 _pushobsolete(pushop)
472 _pushbookmark(pushop)
470 _pushbookmark(pushop)
473
471
474 if pushop.trmanager:
472 if pushop.trmanager:
475 pushop.trmanager.close()
473 pushop.trmanager.close()
476 finally:
474 finally:
477 if pushop.trmanager:
475 if pushop.trmanager:
478 pushop.trmanager.release()
476 pushop.trmanager.release()
479 if lock is not None:
477 if lock is not None:
480 lock.release()
478 lock.release()
481 if wlock is not None:
479 if wlock is not None:
482 wlock.release()
480 wlock.release()
483
481
484 return pushop
482 return pushop
485
483
486 # list of steps to perform discovery before push
484 # list of steps to perform discovery before push
487 pushdiscoveryorder = []
485 pushdiscoveryorder = []
488
486
489 # Mapping between step name and function
487 # Mapping between step name and function
490 #
488 #
491 # This exists to help extensions wrap steps if necessary
489 # This exists to help extensions wrap steps if necessary
492 pushdiscoverymapping = {}
490 pushdiscoverymapping = {}
493
491
494 def pushdiscovery(stepname):
492 def pushdiscovery(stepname):
495 """decorator for function performing discovery before push
493 """decorator for function performing discovery before push
496
494
497 The function is added to the step -> function mapping and appended to the
495 The function is added to the step -> function mapping and appended to the
498 list of steps. Beware that decorated function will be added in order (this
496 list of steps. Beware that decorated function will be added in order (this
499 may matter).
497 may matter).
500
498
501 You can only use this decorator for a new step, if you want to wrap a step
499 You can only use this decorator for a new step, if you want to wrap a step
502 from an extension, change the pushdiscovery dictionary directly."""
500 from an extension, change the pushdiscovery dictionary directly."""
503 def dec(func):
501 def dec(func):
504 assert stepname not in pushdiscoverymapping
502 assert stepname not in pushdiscoverymapping
505 pushdiscoverymapping[stepname] = func
503 pushdiscoverymapping[stepname] = func
506 pushdiscoveryorder.append(stepname)
504 pushdiscoveryorder.append(stepname)
507 return func
505 return func
508 return dec
506 return dec
509
507
510 def _pushdiscovery(pushop):
508 def _pushdiscovery(pushop):
511 """Run all discovery steps"""
509 """Run all discovery steps"""
512 for stepname in pushdiscoveryorder:
510 for stepname in pushdiscoveryorder:
513 step = pushdiscoverymapping[stepname]
511 step = pushdiscoverymapping[stepname]
514 step(pushop)
512 step(pushop)
515
513
516 @pushdiscovery('changeset')
514 @pushdiscovery('changeset')
517 def _pushdiscoverychangeset(pushop):
515 def _pushdiscoverychangeset(pushop):
518 """discover the changeset that need to be pushed"""
516 """discover the changeset that need to be pushed"""
519 fci = discovery.findcommonincoming
517 fci = discovery.findcommonincoming
520 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
518 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
521 common, inc, remoteheads = commoninc
519 common, inc, remoteheads = commoninc
522 fco = discovery.findcommonoutgoing
520 fco = discovery.findcommonoutgoing
523 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
521 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
524 commoninc=commoninc, force=pushop.force)
522 commoninc=commoninc, force=pushop.force)
525 pushop.outgoing = outgoing
523 pushop.outgoing = outgoing
526 pushop.remoteheads = remoteheads
524 pushop.remoteheads = remoteheads
527 pushop.incoming = inc
525 pushop.incoming = inc
528
526
529 @pushdiscovery('phase')
527 @pushdiscovery('phase')
530 def _pushdiscoveryphase(pushop):
528 def _pushdiscoveryphase(pushop):
531 """discover the phase that needs to be pushed
529 """discover the phase that needs to be pushed
532
530
533 (computed for both success and failure case for changesets push)"""
531 (computed for both success and failure case for changesets push)"""
534 outgoing = pushop.outgoing
532 outgoing = pushop.outgoing
535 unfi = pushop.repo.unfiltered()
533 unfi = pushop.repo.unfiltered()
536 remotephases = pushop.remote.listkeys('phases')
534 remotephases = pushop.remote.listkeys('phases')
537 publishing = remotephases.get('publishing', False)
535 publishing = remotephases.get('publishing', False)
538 if (pushop.ui.configbool('ui', '_usedassubrepo')
536 if (pushop.ui.configbool('ui', '_usedassubrepo')
539 and remotephases # server supports phases
537 and remotephases # server supports phases
540 and not pushop.outgoing.missing # no changesets to be pushed
538 and not pushop.outgoing.missing # no changesets to be pushed
541 and publishing):
539 and publishing):
542 # When:
540 # When:
543 # - this is a subrepo push
541 # - this is a subrepo push
544 # - and remote support phase
542 # - and remote support phase
545 # - and no changeset are to be pushed
543 # - and no changeset are to be pushed
546 # - and remote is publishing
544 # - and remote is publishing
547 # We may be in issue 3871 case!
545 # We may be in issue 3871 case!
548 # We drop the possible phase synchronisation done by
546 # We drop the possible phase synchronisation done by
549 # courtesy to publish changesets possibly locally draft
547 # courtesy to publish changesets possibly locally draft
550 # on the remote.
548 # on the remote.
551 remotephases = {'publishing': 'True'}
549 remotephases = {'publishing': 'True'}
552 ana = phases.analyzeremotephases(pushop.repo,
550 ana = phases.analyzeremotephases(pushop.repo,
553 pushop.fallbackheads,
551 pushop.fallbackheads,
554 remotephases)
552 remotephases)
555 pheads, droots = ana
553 pheads, droots = ana
556 extracond = ''
554 extracond = ''
557 if not publishing:
555 if not publishing:
558 extracond = ' and public()'
556 extracond = ' and public()'
559 revset = 'heads((%%ln::%%ln) %s)' % extracond
557 revset = 'heads((%%ln::%%ln) %s)' % extracond
560 # Get the list of all revs draft on remote by public here.
558 # Get the list of all revs draft on remote by public here.
561 # XXX Beware that revset break if droots is not strictly
559 # XXX Beware that revset break if droots is not strictly
562 # XXX root we may want to ensure it is but it is costly
560 # XXX root we may want to ensure it is but it is costly
563 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
561 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
564 if not outgoing.missing:
562 if not outgoing.missing:
565 future = fallback
563 future = fallback
566 else:
564 else:
567 # adds changeset we are going to push as draft
565 # adds changeset we are going to push as draft
568 #
566 #
569 # should not be necessary for publishing server, but because of an
567 # should not be necessary for publishing server, but because of an
570 # issue fixed in xxxxx we have to do it anyway.
568 # issue fixed in xxxxx we have to do it anyway.
571 fdroots = list(unfi.set('roots(%ln + %ln::)',
569 fdroots = list(unfi.set('roots(%ln + %ln::)',
572 outgoing.missing, droots))
570 outgoing.missing, droots))
573 fdroots = [f.node() for f in fdroots]
571 fdroots = [f.node() for f in fdroots]
574 future = list(unfi.set(revset, fdroots, pushop.futureheads))
572 future = list(unfi.set(revset, fdroots, pushop.futureheads))
575 pushop.outdatedphases = future
573 pushop.outdatedphases = future
576 pushop.fallbackoutdatedphases = fallback
574 pushop.fallbackoutdatedphases = fallback
577
575
578 @pushdiscovery('obsmarker')
576 @pushdiscovery('obsmarker')
579 def _pushdiscoveryobsmarkers(pushop):
577 def _pushdiscoveryobsmarkers(pushop):
580 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
578 if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt)
581 and pushop.repo.obsstore
579 and pushop.repo.obsstore
582 and 'obsolete' in pushop.remote.listkeys('namespaces')):
580 and 'obsolete' in pushop.remote.listkeys('namespaces')):
583 repo = pushop.repo
581 repo = pushop.repo
584 # very naive computation, that can be quite expensive on big repo.
582 # very naive computation, that can be quite expensive on big repo.
585 # However: evolution is currently slow on them anyway.
583 # However: evolution is currently slow on them anyway.
586 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
584 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
587 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
585 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
588
586
589 @pushdiscovery('bookmarks')
587 @pushdiscovery('bookmarks')
590 def _pushdiscoverybookmarks(pushop):
588 def _pushdiscoverybookmarks(pushop):
591 ui = pushop.ui
589 ui = pushop.ui
592 repo = pushop.repo.unfiltered()
590 repo = pushop.repo.unfiltered()
593 remote = pushop.remote
591 remote = pushop.remote
594 ui.debug("checking for updated bookmarks\n")
592 ui.debug("checking for updated bookmarks\n")
595 ancestors = ()
593 ancestors = ()
596 if pushop.revs:
594 if pushop.revs:
597 revnums = map(repo.changelog.rev, pushop.revs)
595 revnums = map(repo.changelog.rev, pushop.revs)
598 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
596 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
599 remotebookmark = remote.listkeys('bookmarks')
597 remotebookmark = remote.listkeys('bookmarks')
600
598
601 explicit = set([repo._bookmarks.expandname(bookmark)
599 explicit = set([repo._bookmarks.expandname(bookmark)
602 for bookmark in pushop.bookmarks])
600 for bookmark in pushop.bookmarks])
603
601
604 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
602 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
605 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
603 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
606
604
607 def safehex(x):
605 def safehex(x):
608 if x is None:
606 if x is None:
609 return x
607 return x
610 return hex(x)
608 return hex(x)
611
609
612 def hexifycompbookmarks(bookmarks):
610 def hexifycompbookmarks(bookmarks):
613 for b, scid, dcid in bookmarks:
611 for b, scid, dcid in bookmarks:
614 yield b, safehex(scid), safehex(dcid)
612 yield b, safehex(scid), safehex(dcid)
615
613
616 comp = [hexifycompbookmarks(marks) for marks in comp]
614 comp = [hexifycompbookmarks(marks) for marks in comp]
617 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
615 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
618
616
619 for b, scid, dcid in advsrc:
617 for b, scid, dcid in advsrc:
620 if b in explicit:
618 if b in explicit:
621 explicit.remove(b)
619 explicit.remove(b)
622 if not ancestors or repo[scid].rev() in ancestors:
620 if not ancestors or repo[scid].rev() in ancestors:
623 pushop.outbookmarks.append((b, dcid, scid))
621 pushop.outbookmarks.append((b, dcid, scid))
624 # search added bookmark
622 # search added bookmark
625 for b, scid, dcid in addsrc:
623 for b, scid, dcid in addsrc:
626 if b in explicit:
624 if b in explicit:
627 explicit.remove(b)
625 explicit.remove(b)
628 pushop.outbookmarks.append((b, '', scid))
626 pushop.outbookmarks.append((b, '', scid))
629 # search for overwritten bookmark
627 # search for overwritten bookmark
630 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
628 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
631 if b in explicit:
629 if b in explicit:
632 explicit.remove(b)
630 explicit.remove(b)
633 pushop.outbookmarks.append((b, dcid, scid))
631 pushop.outbookmarks.append((b, dcid, scid))
634 # search for bookmark to delete
632 # search for bookmark to delete
635 for b, scid, dcid in adddst:
633 for b, scid, dcid in adddst:
636 if b in explicit:
634 if b in explicit:
637 explicit.remove(b)
635 explicit.remove(b)
638 # treat as "deleted locally"
636 # treat as "deleted locally"
639 pushop.outbookmarks.append((b, dcid, ''))
637 pushop.outbookmarks.append((b, dcid, ''))
640 # identical bookmarks shouldn't get reported
638 # identical bookmarks shouldn't get reported
641 for b, scid, dcid in same:
639 for b, scid, dcid in same:
642 if b in explicit:
640 if b in explicit:
643 explicit.remove(b)
641 explicit.remove(b)
644
642
645 if explicit:
643 if explicit:
646 explicit = sorted(explicit)
644 explicit = sorted(explicit)
647 # we should probably list all of them
645 # we should probably list all of them
648 ui.warn(_('bookmark %s does not exist on the local '
646 ui.warn(_('bookmark %s does not exist on the local '
649 'or remote repository!\n') % explicit[0])
647 'or remote repository!\n') % explicit[0])
650 pushop.bkresult = 2
648 pushop.bkresult = 2
651
649
652 pushop.outbookmarks.sort()
650 pushop.outbookmarks.sort()
653
651
654 def _pushcheckoutgoing(pushop):
652 def _pushcheckoutgoing(pushop):
655 outgoing = pushop.outgoing
653 outgoing = pushop.outgoing
656 unfi = pushop.repo.unfiltered()
654 unfi = pushop.repo.unfiltered()
657 if not outgoing.missing:
655 if not outgoing.missing:
658 # nothing to push
656 # nothing to push
659 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
657 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
660 return False
658 return False
661 # something to push
659 # something to push
662 if not pushop.force:
660 if not pushop.force:
663 # if repo.obsstore == False --> no obsolete
661 # if repo.obsstore == False --> no obsolete
664 # then, save the iteration
662 # then, save the iteration
665 if unfi.obsstore:
663 if unfi.obsstore:
666 # this message are here for 80 char limit reason
664 # this message are here for 80 char limit reason
667 mso = _("push includes obsolete changeset: %s!")
665 mso = _("push includes obsolete changeset: %s!")
668 mspd = _("push includes phase-divergent changeset: %s!")
666 mspd = _("push includes phase-divergent changeset: %s!")
669 mscd = _("push includes content-divergent changeset: %s!")
667 mscd = _("push includes content-divergent changeset: %s!")
670 mst = {"orphan": _("push includes orphan changeset: %s!"),
668 mst = {"orphan": _("push includes orphan changeset: %s!"),
671 "phase-divergent": mspd,
669 "phase-divergent": mspd,
672 "content-divergent": mscd}
670 "content-divergent": mscd}
673 # If we are to push if there is at least one
671 # If we are to push if there is at least one
674 # obsolete or unstable changeset in missing, at
672 # obsolete or unstable changeset in missing, at
675 # least one of the missinghead will be obsolete or
673 # least one of the missinghead will be obsolete or
676 # unstable. So checking heads only is ok
674 # unstable. So checking heads only is ok
677 for node in outgoing.missingheads:
675 for node in outgoing.missingheads:
678 ctx = unfi[node]
676 ctx = unfi[node]
679 if ctx.obsolete():
677 if ctx.obsolete():
680 raise error.Abort(mso % ctx)
678 raise error.Abort(mso % ctx)
681 elif ctx.isunstable():
679 elif ctx.isunstable():
682 # TODO print more than one instability in the abort
680 # TODO print more than one instability in the abort
683 # message
681 # message
684 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
682 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
685
683
686 discovery.checkheads(pushop)
684 discovery.checkheads(pushop)
687 return True
685 return True
688
686
689 # List of names of steps to perform for an outgoing bundle2, order matters.
687 # List of names of steps to perform for an outgoing bundle2, order matters.
690 b2partsgenorder = []
688 b2partsgenorder = []
691
689
692 # Mapping between step name and function
690 # Mapping between step name and function
693 #
691 #
694 # This exists to help extensions wrap steps if necessary
692 # This exists to help extensions wrap steps if necessary
695 b2partsgenmapping = {}
693 b2partsgenmapping = {}
696
694
697 def b2partsgenerator(stepname, idx=None):
695 def b2partsgenerator(stepname, idx=None):
698 """decorator for function generating bundle2 part
696 """decorator for function generating bundle2 part
699
697
700 The function is added to the step -> function mapping and appended to the
698 The function is added to the step -> function mapping and appended to the
701 list of steps. Beware that decorated functions will be added in order
699 list of steps. Beware that decorated functions will be added in order
702 (this may matter).
700 (this may matter).
703
701
704 You can only use this decorator for new steps, if you want to wrap a step
702 You can only use this decorator for new steps, if you want to wrap a step
705 from an extension, attack the b2partsgenmapping dictionary directly."""
703 from an extension, attack the b2partsgenmapping dictionary directly."""
706 def dec(func):
704 def dec(func):
707 assert stepname not in b2partsgenmapping
705 assert stepname not in b2partsgenmapping
708 b2partsgenmapping[stepname] = func
706 b2partsgenmapping[stepname] = func
709 if idx is None:
707 if idx is None:
710 b2partsgenorder.append(stepname)
708 b2partsgenorder.append(stepname)
711 else:
709 else:
712 b2partsgenorder.insert(idx, stepname)
710 b2partsgenorder.insert(idx, stepname)
713 return func
711 return func
714 return dec
712 return dec
715
713
716 def _pushb2ctxcheckheads(pushop, bundler):
714 def _pushb2ctxcheckheads(pushop, bundler):
717 """Generate race condition checking parts
715 """Generate race condition checking parts
718
716
719 Exists as an independent function to aid extensions
717 Exists as an independent function to aid extensions
720 """
718 """
721 # * 'force' do not check for push race,
719 # * 'force' do not check for push race,
722 # * if we don't push anything, there are nothing to check.
720 # * if we don't push anything, there are nothing to check.
723 if not pushop.force and pushop.outgoing.missingheads:
721 if not pushop.force and pushop.outgoing.missingheads:
724 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
722 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
725 emptyremote = pushop.pushbranchmap is None
723 emptyremote = pushop.pushbranchmap is None
726 if not allowunrelated or emptyremote:
724 if not allowunrelated or emptyremote:
727 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
725 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
728 else:
726 else:
729 affected = set()
727 affected = set()
730 for branch, heads in pushop.pushbranchmap.iteritems():
728 for branch, heads in pushop.pushbranchmap.iteritems():
731 remoteheads, newheads, unsyncedheads, discardedheads = heads
729 remoteheads, newheads, unsyncedheads, discardedheads = heads
732 if remoteheads is not None:
730 if remoteheads is not None:
733 remote = set(remoteheads)
731 remote = set(remoteheads)
734 affected |= set(discardedheads) & remote
732 affected |= set(discardedheads) & remote
735 affected |= remote - set(newheads)
733 affected |= remote - set(newheads)
736 if affected:
734 if affected:
737 data = iter(sorted(affected))
735 data = iter(sorted(affected))
738 bundler.newpart('check:updated-heads', data=data)
736 bundler.newpart('check:updated-heads', data=data)
739
737
740 @b2partsgenerator('changeset')
738 @b2partsgenerator('changeset')
741 def _pushb2ctx(pushop, bundler):
739 def _pushb2ctx(pushop, bundler):
742 """handle changegroup push through bundle2
740 """handle changegroup push through bundle2
743
741
744 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
742 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
745 """
743 """
746 if 'changesets' in pushop.stepsdone:
744 if 'changesets' in pushop.stepsdone:
747 return
745 return
748 pushop.stepsdone.add('changesets')
746 pushop.stepsdone.add('changesets')
749 # Send known heads to the server for race detection.
747 # Send known heads to the server for race detection.
750 if not _pushcheckoutgoing(pushop):
748 if not _pushcheckoutgoing(pushop):
751 return
749 return
752 pushop.repo.prepushoutgoinghooks(pushop)
750 pushop.repo.prepushoutgoinghooks(pushop)
753
751
754 _pushb2ctxcheckheads(pushop, bundler)
752 _pushb2ctxcheckheads(pushop, bundler)
755
753
756 b2caps = bundle2.bundle2caps(pushop.remote)
754 b2caps = bundle2.bundle2caps(pushop.remote)
757 version = '01'
755 version = '01'
758 cgversions = b2caps.get('changegroup')
756 cgversions = b2caps.get('changegroup')
759 if cgversions: # 3.1 and 3.2 ship with an empty value
757 if cgversions: # 3.1 and 3.2 ship with an empty value
760 cgversions = [v for v in cgversions
758 cgversions = [v for v in cgversions
761 if v in changegroup.supportedoutgoingversions(
759 if v in changegroup.supportedoutgoingversions(
762 pushop.repo)]
760 pushop.repo)]
763 if not cgversions:
761 if not cgversions:
764 raise ValueError(_('no common changegroup version'))
762 raise ValueError(_('no common changegroup version'))
765 version = max(cgversions)
763 version = max(cgversions)
766 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
764 cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push',
767 pushop.outgoing,
765 pushop.outgoing,
768 version=version)
766 version=version)
769 cgpart = bundler.newpart('changegroup', data=cg)
767 cgpart = bundler.newpart('changegroup', data=cg)
770 if cgversions:
768 if cgversions:
771 cgpart.addparam('version', version)
769 cgpart.addparam('version', version)
772 if 'treemanifest' in pushop.repo.requirements:
770 if 'treemanifest' in pushop.repo.requirements:
773 cgpart.addparam('treemanifest', '1')
771 cgpart.addparam('treemanifest', '1')
774 def handlereply(op):
772 def handlereply(op):
775 """extract addchangegroup returns from server reply"""
773 """extract addchangegroup returns from server reply"""
776 cgreplies = op.records.getreplies(cgpart.id)
774 cgreplies = op.records.getreplies(cgpart.id)
777 assert len(cgreplies['changegroup']) == 1
775 assert len(cgreplies['changegroup']) == 1
778 pushop.cgresult = cgreplies['changegroup'][0]['return']
776 pushop.cgresult = cgreplies['changegroup'][0]['return']
779 return handlereply
777 return handlereply
780
778
781 @b2partsgenerator('phase')
779 @b2partsgenerator('phase')
782 def _pushb2phases(pushop, bundler):
780 def _pushb2phases(pushop, bundler):
783 """handle phase push through bundle2"""
781 """handle phase push through bundle2"""
784 if 'phases' in pushop.stepsdone:
782 if 'phases' in pushop.stepsdone:
785 return
783 return
786 b2caps = bundle2.bundle2caps(pushop.remote)
784 b2caps = bundle2.bundle2caps(pushop.remote)
787 if not 'pushkey' in b2caps:
785 if not 'pushkey' in b2caps:
788 return
786 return
789 pushop.stepsdone.add('phases')
787 pushop.stepsdone.add('phases')
790 part2node = []
788 part2node = []
791
789
792 def handlefailure(pushop, exc):
790 def handlefailure(pushop, exc):
793 targetid = int(exc.partid)
791 targetid = int(exc.partid)
794 for partid, node in part2node:
792 for partid, node in part2node:
795 if partid == targetid:
793 if partid == targetid:
796 raise error.Abort(_('updating %s to public failed') % node)
794 raise error.Abort(_('updating %s to public failed') % node)
797
795
798 enc = pushkey.encode
796 enc = pushkey.encode
799 for newremotehead in pushop.outdatedphases:
797 for newremotehead in pushop.outdatedphases:
800 part = bundler.newpart('pushkey')
798 part = bundler.newpart('pushkey')
801 part.addparam('namespace', enc('phases'))
799 part.addparam('namespace', enc('phases'))
802 part.addparam('key', enc(newremotehead.hex()))
800 part.addparam('key', enc(newremotehead.hex()))
803 part.addparam('old', enc(str(phases.draft)))
801 part.addparam('old', enc(str(phases.draft)))
804 part.addparam('new', enc(str(phases.public)))
802 part.addparam('new', enc(str(phases.public)))
805 part2node.append((part.id, newremotehead))
803 part2node.append((part.id, newremotehead))
806 pushop.pkfailcb[part.id] = handlefailure
804 pushop.pkfailcb[part.id] = handlefailure
807
805
808 def handlereply(op):
806 def handlereply(op):
809 for partid, node in part2node:
807 for partid, node in part2node:
810 partrep = op.records.getreplies(partid)
808 partrep = op.records.getreplies(partid)
811 results = partrep['pushkey']
809 results = partrep['pushkey']
812 assert len(results) <= 1
810 assert len(results) <= 1
813 msg = None
811 msg = None
814 if not results:
812 if not results:
815 msg = _('server ignored update of %s to public!\n') % node
813 msg = _('server ignored update of %s to public!\n') % node
816 elif not int(results[0]['return']):
814 elif not int(results[0]['return']):
817 msg = _('updating %s to public failed!\n') % node
815 msg = _('updating %s to public failed!\n') % node
818 if msg is not None:
816 if msg is not None:
819 pushop.ui.warn(msg)
817 pushop.ui.warn(msg)
820 return handlereply
818 return handlereply
821
819
822 @b2partsgenerator('obsmarkers')
820 @b2partsgenerator('obsmarkers')
823 def _pushb2obsmarkers(pushop, bundler):
821 def _pushb2obsmarkers(pushop, bundler):
824 if 'obsmarkers' in pushop.stepsdone:
822 if 'obsmarkers' in pushop.stepsdone:
825 return
823 return
826 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
824 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
827 if obsolete.commonversion(remoteversions) is None:
825 if obsolete.commonversion(remoteversions) is None:
828 return
826 return
829 pushop.stepsdone.add('obsmarkers')
827 pushop.stepsdone.add('obsmarkers')
830 if pushop.outobsmarkers:
828 if pushop.outobsmarkers:
831 markers = sorted(pushop.outobsmarkers)
829 markers = sorted(pushop.outobsmarkers)
832 bundle2.buildobsmarkerspart(bundler, markers)
830 bundle2.buildobsmarkerspart(bundler, markers)
833
831
834 @b2partsgenerator('bookmarks')
832 @b2partsgenerator('bookmarks')
835 def _pushb2bookmarks(pushop, bundler):
833 def _pushb2bookmarks(pushop, bundler):
836 """handle bookmark push through bundle2"""
834 """handle bookmark push through bundle2"""
837 if 'bookmarks' in pushop.stepsdone:
835 if 'bookmarks' in pushop.stepsdone:
838 return
836 return
839 b2caps = bundle2.bundle2caps(pushop.remote)
837 b2caps = bundle2.bundle2caps(pushop.remote)
840 if 'pushkey' not in b2caps:
838 if 'pushkey' not in b2caps:
841 return
839 return
842 pushop.stepsdone.add('bookmarks')
840 pushop.stepsdone.add('bookmarks')
843 part2book = []
841 part2book = []
844 enc = pushkey.encode
842 enc = pushkey.encode
845
843
846 def handlefailure(pushop, exc):
844 def handlefailure(pushop, exc):
847 targetid = int(exc.partid)
845 targetid = int(exc.partid)
848 for partid, book, action in part2book:
846 for partid, book, action in part2book:
849 if partid == targetid:
847 if partid == targetid:
850 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
848 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
851 # we should not be called for part we did not generated
849 # we should not be called for part we did not generated
852 assert False
850 assert False
853
851
854 for book, old, new in pushop.outbookmarks:
852 for book, old, new in pushop.outbookmarks:
855 part = bundler.newpart('pushkey')
853 part = bundler.newpart('pushkey')
856 part.addparam('namespace', enc('bookmarks'))
854 part.addparam('namespace', enc('bookmarks'))
857 part.addparam('key', enc(book))
855 part.addparam('key', enc(book))
858 part.addparam('old', enc(old))
856 part.addparam('old', enc(old))
859 part.addparam('new', enc(new))
857 part.addparam('new', enc(new))
860 action = 'update'
858 action = 'update'
861 if not old:
859 if not old:
862 action = 'export'
860 action = 'export'
863 elif not new:
861 elif not new:
864 action = 'delete'
862 action = 'delete'
865 part2book.append((part.id, book, action))
863 part2book.append((part.id, book, action))
866 pushop.pkfailcb[part.id] = handlefailure
864 pushop.pkfailcb[part.id] = handlefailure
867
865
868 def handlereply(op):
866 def handlereply(op):
869 ui = pushop.ui
867 ui = pushop.ui
870 for partid, book, action in part2book:
868 for partid, book, action in part2book:
871 partrep = op.records.getreplies(partid)
869 partrep = op.records.getreplies(partid)
872 results = partrep['pushkey']
870 results = partrep['pushkey']
873 assert len(results) <= 1
871 assert len(results) <= 1
874 if not results:
872 if not results:
875 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
873 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
876 else:
874 else:
877 ret = int(results[0]['return'])
875 ret = int(results[0]['return'])
878 if ret:
876 if ret:
879 ui.status(bookmsgmap[action][0] % book)
877 ui.status(bookmsgmap[action][0] % book)
880 else:
878 else:
881 ui.warn(bookmsgmap[action][1] % book)
879 ui.warn(bookmsgmap[action][1] % book)
882 if pushop.bkresult is not None:
880 if pushop.bkresult is not None:
883 pushop.bkresult = 1
881 pushop.bkresult = 1
884 return handlereply
882 return handlereply
885
883
886 @b2partsgenerator('pushvars', idx=0)
884 @b2partsgenerator('pushvars', idx=0)
887 def _getbundlesendvars(pushop, bundler):
885 def _getbundlesendvars(pushop, bundler):
888 '''send shellvars via bundle2'''
886 '''send shellvars via bundle2'''
889 if getattr(pushop.repo, '_shellvars', ()):
887 if getattr(pushop.repo, '_shellvars', ()):
890 part = bundler.newpart('pushvars')
888 part = bundler.newpart('pushvars')
891
889
892 for key, value in pushop.repo._shellvars.iteritems():
890 for key, value in pushop.repo._shellvars.iteritems():
893 part.addparam(key, value, mandatory=False)
891 part.addparam(key, value, mandatory=False)
894
892
895 def _pushbundle2(pushop):
893 def _pushbundle2(pushop):
896 """push data to the remote using bundle2
894 """push data to the remote using bundle2
897
895
898 The only currently supported type of data is changegroup but this will
896 The only currently supported type of data is changegroup but this will
899 evolve in the future."""
897 evolve in the future."""
900 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
898 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
901 pushback = (pushop.trmanager
899 pushback = (pushop.trmanager
902 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
900 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
903
901
904 # create reply capability
902 # create reply capability
905 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
903 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
906 allowpushback=pushback))
904 allowpushback=pushback))
907 bundler.newpart('replycaps', data=capsblob)
905 bundler.newpart('replycaps', data=capsblob)
908 replyhandlers = []
906 replyhandlers = []
909 for partgenname in b2partsgenorder:
907 for partgenname in b2partsgenorder:
910 partgen = b2partsgenmapping[partgenname]
908 partgen = b2partsgenmapping[partgenname]
911 ret = partgen(pushop, bundler)
909 ret = partgen(pushop, bundler)
912 if callable(ret):
910 if callable(ret):
913 replyhandlers.append(ret)
911 replyhandlers.append(ret)
914 # do not push if nothing to push
912 # do not push if nothing to push
915 if bundler.nbparts <= 1:
913 if bundler.nbparts <= 1:
916 return
914 return
917 stream = util.chunkbuffer(bundler.getchunks())
915 stream = util.chunkbuffer(bundler.getchunks())
918 try:
916 try:
919 try:
917 try:
920 reply = pushop.remote.unbundle(
918 reply = pushop.remote.unbundle(
921 stream, ['force'], pushop.remote.url())
919 stream, ['force'], pushop.remote.url())
922 except error.BundleValueError as exc:
920 except error.BundleValueError as exc:
923 raise error.Abort(_('missing support for %s') % exc)
921 raise error.Abort(_('missing support for %s') % exc)
924 try:
922 try:
925 trgetter = None
923 trgetter = None
926 if pushback:
924 if pushback:
927 trgetter = pushop.trmanager.transaction
925 trgetter = pushop.trmanager.transaction
928 op = bundle2.processbundle(pushop.repo, reply, trgetter)
926 op = bundle2.processbundle(pushop.repo, reply, trgetter)
929 except error.BundleValueError as exc:
927 except error.BundleValueError as exc:
930 raise error.Abort(_('missing support for %s') % exc)
928 raise error.Abort(_('missing support for %s') % exc)
931 except bundle2.AbortFromPart as exc:
929 except bundle2.AbortFromPart as exc:
932 pushop.ui.status(_('remote: %s\n') % exc)
930 pushop.ui.status(_('remote: %s\n') % exc)
933 if exc.hint is not None:
931 if exc.hint is not None:
934 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
932 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
935 raise error.Abort(_('push failed on remote'))
933 raise error.Abort(_('push failed on remote'))
936 except error.PushkeyFailed as exc:
934 except error.PushkeyFailed as exc:
937 partid = int(exc.partid)
935 partid = int(exc.partid)
938 if partid not in pushop.pkfailcb:
936 if partid not in pushop.pkfailcb:
939 raise
937 raise
940 pushop.pkfailcb[partid](pushop, exc)
938 pushop.pkfailcb[partid](pushop, exc)
941 for rephand in replyhandlers:
939 for rephand in replyhandlers:
942 rephand(op)
940 rephand(op)
943
941
944 def _pushchangeset(pushop):
942 def _pushchangeset(pushop):
945 """Make the actual push of changeset bundle to remote repo"""
943 """Make the actual push of changeset bundle to remote repo"""
946 if 'changesets' in pushop.stepsdone:
944 if 'changesets' in pushop.stepsdone:
947 return
945 return
948 pushop.stepsdone.add('changesets')
946 pushop.stepsdone.add('changesets')
949 if not _pushcheckoutgoing(pushop):
947 if not _pushcheckoutgoing(pushop):
950 return
948 return
951
949
952 # Should have verified this in push().
950 # Should have verified this in push().
953 assert pushop.remote.capable('unbundle')
951 assert pushop.remote.capable('unbundle')
954
952
955 pushop.repo.prepushoutgoinghooks(pushop)
953 pushop.repo.prepushoutgoinghooks(pushop)
956 outgoing = pushop.outgoing
954 outgoing = pushop.outgoing
957 # TODO: get bundlecaps from remote
955 # TODO: get bundlecaps from remote
958 bundlecaps = None
956 bundlecaps = None
959 # create a changegroup from local
957 # create a changegroup from local
960 if pushop.revs is None and not (outgoing.excluded
958 if pushop.revs is None and not (outgoing.excluded
961 or pushop.repo.changelog.filteredrevs):
959 or pushop.repo.changelog.filteredrevs):
962 # push everything,
960 # push everything,
963 # use the fast path, no race possible on push
961 # use the fast path, no race possible on push
964 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
962 bundler = changegroup.cg1packer(pushop.repo, bundlecaps)
965 cg = changegroup.getsubset(pushop.repo,
963 cg = changegroup.getsubset(pushop.repo,
966 outgoing,
964 outgoing,
967 bundler,
965 bundler,
968 'push',
966 'push',
969 fastpath=True)
967 fastpath=True)
970 else:
968 else:
971 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
969 cg = changegroup.getchangegroup(pushop.repo, 'push', outgoing,
972 bundlecaps=bundlecaps)
970 bundlecaps=bundlecaps)
973
971
974 # apply changegroup to remote
972 # apply changegroup to remote
975 # local repo finds heads on server, finds out what
973 # local repo finds heads on server, finds out what
976 # revs it must push. once revs transferred, if server
974 # revs it must push. once revs transferred, if server
977 # finds it has different heads (someone else won
975 # finds it has different heads (someone else won
978 # commit/push race), server aborts.
976 # commit/push race), server aborts.
979 if pushop.force:
977 if pushop.force:
980 remoteheads = ['force']
978 remoteheads = ['force']
981 else:
979 else:
982 remoteheads = pushop.remoteheads
980 remoteheads = pushop.remoteheads
983 # ssh: return remote's addchangegroup()
981 # ssh: return remote's addchangegroup()
984 # http: return remote's addchangegroup() or 0 for error
982 # http: return remote's addchangegroup() or 0 for error
985 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
983 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
986 pushop.repo.url())
984 pushop.repo.url())
987
985
988 def _pushsyncphase(pushop):
986 def _pushsyncphase(pushop):
989 """synchronise phase information locally and remotely"""
987 """synchronise phase information locally and remotely"""
990 cheads = pushop.commonheads
988 cheads = pushop.commonheads
991 # even when we don't push, exchanging phase data is useful
989 # even when we don't push, exchanging phase data is useful
992 remotephases = pushop.remote.listkeys('phases')
990 remotephases = pushop.remote.listkeys('phases')
993 if (pushop.ui.configbool('ui', '_usedassubrepo')
991 if (pushop.ui.configbool('ui', '_usedassubrepo')
994 and remotephases # server supports phases
992 and remotephases # server supports phases
995 and pushop.cgresult is None # nothing was pushed
993 and pushop.cgresult is None # nothing was pushed
996 and remotephases.get('publishing', False)):
994 and remotephases.get('publishing', False)):
997 # When:
995 # When:
998 # - this is a subrepo push
996 # - this is a subrepo push
999 # - and remote support phase
997 # - and remote support phase
1000 # - and no changeset was pushed
998 # - and no changeset was pushed
1001 # - and remote is publishing
999 # - and remote is publishing
1002 # We may be in issue 3871 case!
1000 # We may be in issue 3871 case!
1003 # We drop the possible phase synchronisation done by
1001 # We drop the possible phase synchronisation done by
1004 # courtesy to publish changesets possibly locally draft
1002 # courtesy to publish changesets possibly locally draft
1005 # on the remote.
1003 # on the remote.
1006 remotephases = {'publishing': 'True'}
1004 remotephases = {'publishing': 'True'}
1007 if not remotephases: # old server or public only reply from non-publishing
1005 if not remotephases: # old server or public only reply from non-publishing
1008 _localphasemove(pushop, cheads)
1006 _localphasemove(pushop, cheads)
1009 # don't push any phase data as there is nothing to push
1007 # don't push any phase data as there is nothing to push
1010 else:
1008 else:
1011 ana = phases.analyzeremotephases(pushop.repo, cheads,
1009 ana = phases.analyzeremotephases(pushop.repo, cheads,
1012 remotephases)
1010 remotephases)
1013 pheads, droots = ana
1011 pheads, droots = ana
1014 ### Apply remote phase on local
1012 ### Apply remote phase on local
1015 if remotephases.get('publishing', False):
1013 if remotephases.get('publishing', False):
1016 _localphasemove(pushop, cheads)
1014 _localphasemove(pushop, cheads)
1017 else: # publish = False
1015 else: # publish = False
1018 _localphasemove(pushop, pheads)
1016 _localphasemove(pushop, pheads)
1019 _localphasemove(pushop, cheads, phases.draft)
1017 _localphasemove(pushop, cheads, phases.draft)
1020 ### Apply local phase on remote
1018 ### Apply local phase on remote
1021
1019
1022 if pushop.cgresult:
1020 if pushop.cgresult:
1023 if 'phases' in pushop.stepsdone:
1021 if 'phases' in pushop.stepsdone:
1024 # phases already pushed though bundle2
1022 # phases already pushed though bundle2
1025 return
1023 return
1026 outdated = pushop.outdatedphases
1024 outdated = pushop.outdatedphases
1027 else:
1025 else:
1028 outdated = pushop.fallbackoutdatedphases
1026 outdated = pushop.fallbackoutdatedphases
1029
1027
1030 pushop.stepsdone.add('phases')
1028 pushop.stepsdone.add('phases')
1031
1029
1032 # filter heads already turned public by the push
1030 # filter heads already turned public by the push
1033 outdated = [c for c in outdated if c.node() not in pheads]
1031 outdated = [c for c in outdated if c.node() not in pheads]
1034 # fallback to independent pushkey command
1032 # fallback to independent pushkey command
1035 for newremotehead in outdated:
1033 for newremotehead in outdated:
1036 r = pushop.remote.pushkey('phases',
1034 r = pushop.remote.pushkey('phases',
1037 newremotehead.hex(),
1035 newremotehead.hex(),
1038 str(phases.draft),
1036 str(phases.draft),
1039 str(phases.public))
1037 str(phases.public))
1040 if not r:
1038 if not r:
1041 pushop.ui.warn(_('updating %s to public failed!\n')
1039 pushop.ui.warn(_('updating %s to public failed!\n')
1042 % newremotehead)
1040 % newremotehead)
1043
1041
1044 def _localphasemove(pushop, nodes, phase=phases.public):
1042 def _localphasemove(pushop, nodes, phase=phases.public):
1045 """move <nodes> to <phase> in the local source repo"""
1043 """move <nodes> to <phase> in the local source repo"""
1046 if pushop.trmanager:
1044 if pushop.trmanager:
1047 phases.advanceboundary(pushop.repo,
1045 phases.advanceboundary(pushop.repo,
1048 pushop.trmanager.transaction(),
1046 pushop.trmanager.transaction(),
1049 phase,
1047 phase,
1050 nodes)
1048 nodes)
1051 else:
1049 else:
1052 # repo is not locked, do not change any phases!
1050 # repo is not locked, do not change any phases!
1053 # Informs the user that phases should have been moved when
1051 # Informs the user that phases should have been moved when
1054 # applicable.
1052 # applicable.
1055 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1053 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1056 phasestr = phases.phasenames[phase]
1054 phasestr = phases.phasenames[phase]
1057 if actualmoves:
1055 if actualmoves:
1058 pushop.ui.status(_('cannot lock source repo, skipping '
1056 pushop.ui.status(_('cannot lock source repo, skipping '
1059 'local %s phase update\n') % phasestr)
1057 'local %s phase update\n') % phasestr)
1060
1058
1061 def _pushobsolete(pushop):
1059 def _pushobsolete(pushop):
1062 """utility function to push obsolete markers to a remote"""
1060 """utility function to push obsolete markers to a remote"""
1063 if 'obsmarkers' in pushop.stepsdone:
1061 if 'obsmarkers' in pushop.stepsdone:
1064 return
1062 return
1065 repo = pushop.repo
1063 repo = pushop.repo
1066 remote = pushop.remote
1064 remote = pushop.remote
1067 pushop.stepsdone.add('obsmarkers')
1065 pushop.stepsdone.add('obsmarkers')
1068 if pushop.outobsmarkers:
1066 if pushop.outobsmarkers:
1069 pushop.ui.debug('try to push obsolete markers to remote\n')
1067 pushop.ui.debug('try to push obsolete markers to remote\n')
1070 rslts = []
1068 rslts = []
1071 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1069 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1072 for key in sorted(remotedata, reverse=True):
1070 for key in sorted(remotedata, reverse=True):
1073 # reverse sort to ensure we end with dump0
1071 # reverse sort to ensure we end with dump0
1074 data = remotedata[key]
1072 data = remotedata[key]
1075 rslts.append(remote.pushkey('obsolete', key, '', data))
1073 rslts.append(remote.pushkey('obsolete', key, '', data))
1076 if [r for r in rslts if not r]:
1074 if [r for r in rslts if not r]:
1077 msg = _('failed to push some obsolete markers!\n')
1075 msg = _('failed to push some obsolete markers!\n')
1078 repo.ui.warn(msg)
1076 repo.ui.warn(msg)
1079
1077
1080 def _pushbookmark(pushop):
1078 def _pushbookmark(pushop):
1081 """Update bookmark position on remote"""
1079 """Update bookmark position on remote"""
1082 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1080 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1083 return
1081 return
1084 pushop.stepsdone.add('bookmarks')
1082 pushop.stepsdone.add('bookmarks')
1085 ui = pushop.ui
1083 ui = pushop.ui
1086 remote = pushop.remote
1084 remote = pushop.remote
1087
1085
1088 for b, old, new in pushop.outbookmarks:
1086 for b, old, new in pushop.outbookmarks:
1089 action = 'update'
1087 action = 'update'
1090 if not old:
1088 if not old:
1091 action = 'export'
1089 action = 'export'
1092 elif not new:
1090 elif not new:
1093 action = 'delete'
1091 action = 'delete'
1094 if remote.pushkey('bookmarks', b, old, new):
1092 if remote.pushkey('bookmarks', b, old, new):
1095 ui.status(bookmsgmap[action][0] % b)
1093 ui.status(bookmsgmap[action][0] % b)
1096 else:
1094 else:
1097 ui.warn(bookmsgmap[action][1] % b)
1095 ui.warn(bookmsgmap[action][1] % b)
1098 # discovery can have set the value form invalid entry
1096 # discovery can have set the value form invalid entry
1099 if pushop.bkresult is not None:
1097 if pushop.bkresult is not None:
1100 pushop.bkresult = 1
1098 pushop.bkresult = 1
1101
1099
1102 class pulloperation(object):
1100 class pulloperation(object):
1103 """A object that represent a single pull operation
1101 """A object that represent a single pull operation
1104
1102
1105 It purpose is to carry pull related state and very common operation.
1103 It purpose is to carry pull related state and very common operation.
1106
1104
1107 A new should be created at the beginning of each pull and discarded
1105 A new should be created at the beginning of each pull and discarded
1108 afterward.
1106 afterward.
1109 """
1107 """
1110
1108
1111 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1109 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1112 remotebookmarks=None, streamclonerequested=None):
1110 remotebookmarks=None, streamclonerequested=None):
1113 # repo we pull into
1111 # repo we pull into
1114 self.repo = repo
1112 self.repo = repo
1115 # repo we pull from
1113 # repo we pull from
1116 self.remote = remote
1114 self.remote = remote
1117 # revision we try to pull (None is "all")
1115 # revision we try to pull (None is "all")
1118 self.heads = heads
1116 self.heads = heads
1119 # bookmark pulled explicitly
1117 # bookmark pulled explicitly
1120 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1118 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1121 for bookmark in bookmarks]
1119 for bookmark in bookmarks]
1122 # do we force pull?
1120 # do we force pull?
1123 self.force = force
1121 self.force = force
1124 # whether a streaming clone was requested
1122 # whether a streaming clone was requested
1125 self.streamclonerequested = streamclonerequested
1123 self.streamclonerequested = streamclonerequested
1126 # transaction manager
1124 # transaction manager
1127 self.trmanager = None
1125 self.trmanager = None
1128 # set of common changeset between local and remote before pull
1126 # set of common changeset between local and remote before pull
1129 self.common = None
1127 self.common = None
1130 # set of pulled head
1128 # set of pulled head
1131 self.rheads = None
1129 self.rheads = None
1132 # list of missing changeset to fetch remotely
1130 # list of missing changeset to fetch remotely
1133 self.fetch = None
1131 self.fetch = None
1134 # remote bookmarks data
1132 # remote bookmarks data
1135 self.remotebookmarks = remotebookmarks
1133 self.remotebookmarks = remotebookmarks
1136 # result of changegroup pulling (used as return code by pull)
1134 # result of changegroup pulling (used as return code by pull)
1137 self.cgresult = None
1135 self.cgresult = None
1138 # list of step already done
1136 # list of step already done
1139 self.stepsdone = set()
1137 self.stepsdone = set()
1140 # Whether we attempted a clone from pre-generated bundles.
1138 # Whether we attempted a clone from pre-generated bundles.
1141 self.clonebundleattempted = False
1139 self.clonebundleattempted = False
1142
1140
1143 @util.propertycache
1141 @util.propertycache
1144 def pulledsubset(self):
1142 def pulledsubset(self):
1145 """heads of the set of changeset target by the pull"""
1143 """heads of the set of changeset target by the pull"""
1146 # compute target subset
1144 # compute target subset
1147 if self.heads is None:
1145 if self.heads is None:
1148 # We pulled every thing possible
1146 # We pulled every thing possible
1149 # sync on everything common
1147 # sync on everything common
1150 c = set(self.common)
1148 c = set(self.common)
1151 ret = list(self.common)
1149 ret = list(self.common)
1152 for n in self.rheads:
1150 for n in self.rheads:
1153 if n not in c:
1151 if n not in c:
1154 ret.append(n)
1152 ret.append(n)
1155 return ret
1153 return ret
1156 else:
1154 else:
1157 # We pulled a specific subset
1155 # We pulled a specific subset
1158 # sync on this subset
1156 # sync on this subset
1159 return self.heads
1157 return self.heads
1160
1158
1161 @util.propertycache
1159 @util.propertycache
1162 def canusebundle2(self):
1160 def canusebundle2(self):
1163 return not _forcebundle1(self)
1161 return not _forcebundle1(self)
1164
1162
1165 @util.propertycache
1163 @util.propertycache
1166 def remotebundle2caps(self):
1164 def remotebundle2caps(self):
1167 return bundle2.bundle2caps(self.remote)
1165 return bundle2.bundle2caps(self.remote)
1168
1166
1169 def gettransaction(self):
1167 def gettransaction(self):
1170 # deprecated; talk to trmanager directly
1168 # deprecated; talk to trmanager directly
1171 return self.trmanager.transaction()
1169 return self.trmanager.transaction()
1172
1170
1173 class transactionmanager(object):
1171 class transactionmanager(object):
1174 """An object to manage the life cycle of a transaction
1172 """An object to manage the life cycle of a transaction
1175
1173
1176 It creates the transaction on demand and calls the appropriate hooks when
1174 It creates the transaction on demand and calls the appropriate hooks when
1177 closing the transaction."""
1175 closing the transaction."""
1178 def __init__(self, repo, source, url):
1176 def __init__(self, repo, source, url):
1179 self.repo = repo
1177 self.repo = repo
1180 self.source = source
1178 self.source = source
1181 self.url = url
1179 self.url = url
1182 self._tr = None
1180 self._tr = None
1183
1181
1184 def transaction(self):
1182 def transaction(self):
1185 """Return an open transaction object, constructing if necessary"""
1183 """Return an open transaction object, constructing if necessary"""
1186 if not self._tr:
1184 if not self._tr:
1187 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1185 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1188 self._tr = self.repo.transaction(trname)
1186 self._tr = self.repo.transaction(trname)
1189 self._tr.hookargs['source'] = self.source
1187 self._tr.hookargs['source'] = self.source
1190 self._tr.hookargs['url'] = self.url
1188 self._tr.hookargs['url'] = self.url
1191 return self._tr
1189 return self._tr
1192
1190
1193 def close(self):
1191 def close(self):
1194 """close transaction if created"""
1192 """close transaction if created"""
1195 if self._tr is not None:
1193 if self._tr is not None:
1196 self._tr.close()
1194 self._tr.close()
1197
1195
1198 def release(self):
1196 def release(self):
1199 """release transaction if created"""
1197 """release transaction if created"""
1200 if self._tr is not None:
1198 if self._tr is not None:
1201 self._tr.release()
1199 self._tr.release()
1202
1200
1203 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1201 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1204 streamclonerequested=None):
1202 streamclonerequested=None):
1205 """Fetch repository data from a remote.
1203 """Fetch repository data from a remote.
1206
1204
1207 This is the main function used to retrieve data from a remote repository.
1205 This is the main function used to retrieve data from a remote repository.
1208
1206
1209 ``repo`` is the local repository to clone into.
1207 ``repo`` is the local repository to clone into.
1210 ``remote`` is a peer instance.
1208 ``remote`` is a peer instance.
1211 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1209 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1212 default) means to pull everything from the remote.
1210 default) means to pull everything from the remote.
1213 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1211 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1214 default, all remote bookmarks are pulled.
1212 default, all remote bookmarks are pulled.
1215 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1213 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1216 initialization.
1214 initialization.
1217 ``streamclonerequested`` is a boolean indicating whether a "streaming
1215 ``streamclonerequested`` is a boolean indicating whether a "streaming
1218 clone" is requested. A "streaming clone" is essentially a raw file copy
1216 clone" is requested. A "streaming clone" is essentially a raw file copy
1219 of revlogs from the server. This only works when the local repository is
1217 of revlogs from the server. This only works when the local repository is
1220 empty. The default value of ``None`` means to respect the server
1218 empty. The default value of ``None`` means to respect the server
1221 configuration for preferring stream clones.
1219 configuration for preferring stream clones.
1222
1220
1223 Returns the ``pulloperation`` created for this pull.
1221 Returns the ``pulloperation`` created for this pull.
1224 """
1222 """
1225 if opargs is None:
1223 if opargs is None:
1226 opargs = {}
1224 opargs = {}
1227 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1225 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1228 streamclonerequested=streamclonerequested, **opargs)
1226 streamclonerequested=streamclonerequested, **opargs)
1229
1227
1230 peerlocal = pullop.remote.local()
1228 peerlocal = pullop.remote.local()
1231 if peerlocal:
1229 if peerlocal:
1232 missing = set(peerlocal.requirements) - pullop.repo.supported
1230 missing = set(peerlocal.requirements) - pullop.repo.supported
1233 if missing:
1231 if missing:
1234 msg = _("required features are not"
1232 msg = _("required features are not"
1235 " supported in the destination:"
1233 " supported in the destination:"
1236 " %s") % (', '.join(sorted(missing)))
1234 " %s") % (', '.join(sorted(missing)))
1237 raise error.Abort(msg)
1235 raise error.Abort(msg)
1238
1236
1239 wlock = lock = None
1237 wlock = lock = None
1240 try:
1238 try:
1241 wlock = pullop.repo.wlock()
1239 wlock = pullop.repo.wlock()
1242 lock = pullop.repo.lock()
1240 lock = pullop.repo.lock()
1243 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1241 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1244 streamclone.maybeperformlegacystreamclone(pullop)
1242 streamclone.maybeperformlegacystreamclone(pullop)
1245 # This should ideally be in _pullbundle2(). However, it needs to run
1243 # This should ideally be in _pullbundle2(). However, it needs to run
1246 # before discovery to avoid extra work.
1244 # before discovery to avoid extra work.
1247 _maybeapplyclonebundle(pullop)
1245 _maybeapplyclonebundle(pullop)
1248 _pulldiscovery(pullop)
1246 _pulldiscovery(pullop)
1249 if pullop.canusebundle2:
1247 if pullop.canusebundle2:
1250 _pullbundle2(pullop)
1248 _pullbundle2(pullop)
1251 _pullchangeset(pullop)
1249 _pullchangeset(pullop)
1252 _pullphase(pullop)
1250 _pullphase(pullop)
1253 _pullbookmarks(pullop)
1251 _pullbookmarks(pullop)
1254 _pullobsolete(pullop)
1252 _pullobsolete(pullop)
1255 pullop.trmanager.close()
1253 pullop.trmanager.close()
1256 finally:
1254 finally:
1257 lockmod.release(pullop.trmanager, lock, wlock)
1255 lockmod.release(pullop.trmanager, lock, wlock)
1258
1256
1259 return pullop
1257 return pullop
1260
1258
1261 # list of steps to perform discovery before pull
1259 # list of steps to perform discovery before pull
1262 pulldiscoveryorder = []
1260 pulldiscoveryorder = []
1263
1261
1264 # Mapping between step name and function
1262 # Mapping between step name and function
1265 #
1263 #
1266 # This exists to help extensions wrap steps if necessary
1264 # This exists to help extensions wrap steps if necessary
1267 pulldiscoverymapping = {}
1265 pulldiscoverymapping = {}
1268
1266
1269 def pulldiscovery(stepname):
1267 def pulldiscovery(stepname):
1270 """decorator for function performing discovery before pull
1268 """decorator for function performing discovery before pull
1271
1269
1272 The function is added to the step -> function mapping and appended to the
1270 The function is added to the step -> function mapping and appended to the
1273 list of steps. Beware that decorated function will be added in order (this
1271 list of steps. Beware that decorated function will be added in order (this
1274 may matter).
1272 may matter).
1275
1273
1276 You can only use this decorator for a new step, if you want to wrap a step
1274 You can only use this decorator for a new step, if you want to wrap a step
1277 from an extension, change the pulldiscovery dictionary directly."""
1275 from an extension, change the pulldiscovery dictionary directly."""
1278 def dec(func):
1276 def dec(func):
1279 assert stepname not in pulldiscoverymapping
1277 assert stepname not in pulldiscoverymapping
1280 pulldiscoverymapping[stepname] = func
1278 pulldiscoverymapping[stepname] = func
1281 pulldiscoveryorder.append(stepname)
1279 pulldiscoveryorder.append(stepname)
1282 return func
1280 return func
1283 return dec
1281 return dec
1284
1282
1285 def _pulldiscovery(pullop):
1283 def _pulldiscovery(pullop):
1286 """Run all discovery steps"""
1284 """Run all discovery steps"""
1287 for stepname in pulldiscoveryorder:
1285 for stepname in pulldiscoveryorder:
1288 step = pulldiscoverymapping[stepname]
1286 step = pulldiscoverymapping[stepname]
1289 step(pullop)
1287 step(pullop)
1290
1288
1291 @pulldiscovery('b1:bookmarks')
1289 @pulldiscovery('b1:bookmarks')
1292 def _pullbookmarkbundle1(pullop):
1290 def _pullbookmarkbundle1(pullop):
1293 """fetch bookmark data in bundle1 case
1291 """fetch bookmark data in bundle1 case
1294
1292
1295 If not using bundle2, we have to fetch bookmarks before changeset
1293 If not using bundle2, we have to fetch bookmarks before changeset
1296 discovery to reduce the chance and impact of race conditions."""
1294 discovery to reduce the chance and impact of race conditions."""
1297 if pullop.remotebookmarks is not None:
1295 if pullop.remotebookmarks is not None:
1298 return
1296 return
1299 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1297 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1300 # all known bundle2 servers now support listkeys, but lets be nice with
1298 # all known bundle2 servers now support listkeys, but lets be nice with
1301 # new implementation.
1299 # new implementation.
1302 return
1300 return
1303 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1301 pullop.remotebookmarks = pullop.remote.listkeys('bookmarks')
1304
1302
1305
1303
1306 @pulldiscovery('changegroup')
1304 @pulldiscovery('changegroup')
1307 def _pulldiscoverychangegroup(pullop):
1305 def _pulldiscoverychangegroup(pullop):
1308 """discovery phase for the pull
1306 """discovery phase for the pull
1309
1307
1310 Current handle changeset discovery only, will change handle all discovery
1308 Current handle changeset discovery only, will change handle all discovery
1311 at some point."""
1309 at some point."""
1312 tmp = discovery.findcommonincoming(pullop.repo,
1310 tmp = discovery.findcommonincoming(pullop.repo,
1313 pullop.remote,
1311 pullop.remote,
1314 heads=pullop.heads,
1312 heads=pullop.heads,
1315 force=pullop.force)
1313 force=pullop.force)
1316 common, fetch, rheads = tmp
1314 common, fetch, rheads = tmp
1317 nm = pullop.repo.unfiltered().changelog.nodemap
1315 nm = pullop.repo.unfiltered().changelog.nodemap
1318 if fetch and rheads:
1316 if fetch and rheads:
1319 # If a remote heads in filtered locally, lets drop it from the unknown
1317 # If a remote heads in filtered locally, lets drop it from the unknown
1320 # remote heads and put in back in common.
1318 # remote heads and put in back in common.
1321 #
1319 #
1322 # This is a hackish solution to catch most of "common but locally
1320 # This is a hackish solution to catch most of "common but locally
1323 # hidden situation". We do not performs discovery on unfiltered
1321 # hidden situation". We do not performs discovery on unfiltered
1324 # repository because it end up doing a pathological amount of round
1322 # repository because it end up doing a pathological amount of round
1325 # trip for w huge amount of changeset we do not care about.
1323 # trip for w huge amount of changeset we do not care about.
1326 #
1324 #
1327 # If a set of such "common but filtered" changeset exist on the server
1325 # If a set of such "common but filtered" changeset exist on the server
1328 # but are not including a remote heads, we'll not be able to detect it,
1326 # but are not including a remote heads, we'll not be able to detect it,
1329 scommon = set(common)
1327 scommon = set(common)
1330 filteredrheads = []
1328 filteredrheads = []
1331 for n in rheads:
1329 for n in rheads:
1332 if n in nm:
1330 if n in nm:
1333 if n not in scommon:
1331 if n not in scommon:
1334 common.append(n)
1332 common.append(n)
1335 else:
1333 else:
1336 filteredrheads.append(n)
1334 filteredrheads.append(n)
1337 if not filteredrheads:
1335 if not filteredrheads:
1338 fetch = []
1336 fetch = []
1339 rheads = filteredrheads
1337 rheads = filteredrheads
1340 pullop.common = common
1338 pullop.common = common
1341 pullop.fetch = fetch
1339 pullop.fetch = fetch
1342 pullop.rheads = rheads
1340 pullop.rheads = rheads
1343
1341
1344 def _pullbundle2(pullop):
1342 def _pullbundle2(pullop):
1345 """pull data using bundle2
1343 """pull data using bundle2
1346
1344
1347 For now, the only supported data are changegroup."""
1345 For now, the only supported data are changegroup."""
1348 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1346 kwargs = {'bundlecaps': caps20to10(pullop.repo)}
1349
1347
1350 # At the moment we don't do stream clones over bundle2. If that is
1348 # At the moment we don't do stream clones over bundle2. If that is
1351 # implemented then here's where the check for that will go.
1349 # implemented then here's where the check for that will go.
1352 streaming = False
1350 streaming = False
1353
1351
1354 # pulling changegroup
1352 # pulling changegroup
1355 pullop.stepsdone.add('changegroup')
1353 pullop.stepsdone.add('changegroup')
1356
1354
1357 kwargs['common'] = pullop.common
1355 kwargs['common'] = pullop.common
1358 kwargs['heads'] = pullop.heads or pullop.rheads
1356 kwargs['heads'] = pullop.heads or pullop.rheads
1359 kwargs['cg'] = pullop.fetch
1357 kwargs['cg'] = pullop.fetch
1360 if 'listkeys' in pullop.remotebundle2caps:
1358 if 'listkeys' in pullop.remotebundle2caps:
1361 kwargs['listkeys'] = ['phases']
1359 kwargs['listkeys'] = ['phases']
1362 if pullop.remotebookmarks is None:
1360 if pullop.remotebookmarks is None:
1363 # make sure to always includes bookmark data when migrating
1361 # make sure to always includes bookmark data when migrating
1364 # `hg incoming --bundle` to using this function.
1362 # `hg incoming --bundle` to using this function.
1365 kwargs['listkeys'].append('bookmarks')
1363 kwargs['listkeys'].append('bookmarks')
1366
1364
1367 # If this is a full pull / clone and the server supports the clone bundles
1365 # If this is a full pull / clone and the server supports the clone bundles
1368 # feature, tell the server whether we attempted a clone bundle. The
1366 # feature, tell the server whether we attempted a clone bundle. The
1369 # presence of this flag indicates the client supports clone bundles. This
1367 # presence of this flag indicates the client supports clone bundles. This
1370 # will enable the server to treat clients that support clone bundles
1368 # will enable the server to treat clients that support clone bundles
1371 # differently from those that don't.
1369 # differently from those that don't.
1372 if (pullop.remote.capable('clonebundles')
1370 if (pullop.remote.capable('clonebundles')
1373 and pullop.heads is None and list(pullop.common) == [nullid]):
1371 and pullop.heads is None and list(pullop.common) == [nullid]):
1374 kwargs['cbattempted'] = pullop.clonebundleattempted
1372 kwargs['cbattempted'] = pullop.clonebundleattempted
1375
1373
1376 if streaming:
1374 if streaming:
1377 pullop.repo.ui.status(_('streaming all changes\n'))
1375 pullop.repo.ui.status(_('streaming all changes\n'))
1378 elif not pullop.fetch:
1376 elif not pullop.fetch:
1379 pullop.repo.ui.status(_("no changes found\n"))
1377 pullop.repo.ui.status(_("no changes found\n"))
1380 pullop.cgresult = 0
1378 pullop.cgresult = 0
1381 else:
1379 else:
1382 if pullop.heads is None and list(pullop.common) == [nullid]:
1380 if pullop.heads is None and list(pullop.common) == [nullid]:
1383 pullop.repo.ui.status(_("requesting all changes\n"))
1381 pullop.repo.ui.status(_("requesting all changes\n"))
1384 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1382 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1385 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1383 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1386 if obsolete.commonversion(remoteversions) is not None:
1384 if obsolete.commonversion(remoteversions) is not None:
1387 kwargs['obsmarkers'] = True
1385 kwargs['obsmarkers'] = True
1388 pullop.stepsdone.add('obsmarkers')
1386 pullop.stepsdone.add('obsmarkers')
1389 _pullbundle2extraprepare(pullop, kwargs)
1387 _pullbundle2extraprepare(pullop, kwargs)
1390 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1388 bundle = pullop.remote.getbundle('pull', **pycompat.strkwargs(kwargs))
1391 try:
1389 try:
1392 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1390 op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction)
1393 except bundle2.AbortFromPart as exc:
1391 except bundle2.AbortFromPart as exc:
1394 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1392 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1395 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1393 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1396 except error.BundleValueError as exc:
1394 except error.BundleValueError as exc:
1397 raise error.Abort(_('missing support for %s') % exc)
1395 raise error.Abort(_('missing support for %s') % exc)
1398
1396
1399 if pullop.fetch:
1397 if pullop.fetch:
1400 pullop.cgresult = bundle2.combinechangegroupresults(op)
1398 pullop.cgresult = bundle2.combinechangegroupresults(op)
1401
1399
1402 # processing phases change
1400 # processing phases change
1403 for namespace, value in op.records['listkeys']:
1401 for namespace, value in op.records['listkeys']:
1404 if namespace == 'phases':
1402 if namespace == 'phases':
1405 _pullapplyphases(pullop, value)
1403 _pullapplyphases(pullop, value)
1406
1404
1407 # processing bookmark update
1405 # processing bookmark update
1408 for namespace, value in op.records['listkeys']:
1406 for namespace, value in op.records['listkeys']:
1409 if namespace == 'bookmarks':
1407 if namespace == 'bookmarks':
1410 pullop.remotebookmarks = value
1408 pullop.remotebookmarks = value
1411
1409
1412 # bookmark data were either already there or pulled in the bundle
1410 # bookmark data were either already there or pulled in the bundle
1413 if pullop.remotebookmarks is not None:
1411 if pullop.remotebookmarks is not None:
1414 _pullbookmarks(pullop)
1412 _pullbookmarks(pullop)
1415
1413
1416 def _pullbundle2extraprepare(pullop, kwargs):
1414 def _pullbundle2extraprepare(pullop, kwargs):
1417 """hook function so that extensions can extend the getbundle call"""
1415 """hook function so that extensions can extend the getbundle call"""
1418 pass
1416 pass
1419
1417
1420 def _pullchangeset(pullop):
1418 def _pullchangeset(pullop):
1421 """pull changeset from unbundle into the local repo"""
1419 """pull changeset from unbundle into the local repo"""
1422 # We delay the open of the transaction as late as possible so we
1420 # We delay the open of the transaction as late as possible so we
1423 # don't open transaction for nothing or you break future useful
1421 # don't open transaction for nothing or you break future useful
1424 # rollback call
1422 # rollback call
1425 if 'changegroup' in pullop.stepsdone:
1423 if 'changegroup' in pullop.stepsdone:
1426 return
1424 return
1427 pullop.stepsdone.add('changegroup')
1425 pullop.stepsdone.add('changegroup')
1428 if not pullop.fetch:
1426 if not pullop.fetch:
1429 pullop.repo.ui.status(_("no changes found\n"))
1427 pullop.repo.ui.status(_("no changes found\n"))
1430 pullop.cgresult = 0
1428 pullop.cgresult = 0
1431 return
1429 return
1432 tr = pullop.gettransaction()
1430 tr = pullop.gettransaction()
1433 if pullop.heads is None and list(pullop.common) == [nullid]:
1431 if pullop.heads is None and list(pullop.common) == [nullid]:
1434 pullop.repo.ui.status(_("requesting all changes\n"))
1432 pullop.repo.ui.status(_("requesting all changes\n"))
1435 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1433 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1436 # issue1320, avoid a race if remote changed after discovery
1434 # issue1320, avoid a race if remote changed after discovery
1437 pullop.heads = pullop.rheads
1435 pullop.heads = pullop.rheads
1438
1436
1439 if pullop.remote.capable('getbundle'):
1437 if pullop.remote.capable('getbundle'):
1440 # TODO: get bundlecaps from remote
1438 # TODO: get bundlecaps from remote
1441 cg = pullop.remote.getbundle('pull', common=pullop.common,
1439 cg = pullop.remote.getbundle('pull', common=pullop.common,
1442 heads=pullop.heads or pullop.rheads)
1440 heads=pullop.heads or pullop.rheads)
1443 elif pullop.heads is None:
1441 elif pullop.heads is None:
1444 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1442 cg = pullop.remote.changegroup(pullop.fetch, 'pull')
1445 elif not pullop.remote.capable('changegroupsubset'):
1443 elif not pullop.remote.capable('changegroupsubset'):
1446 raise error.Abort(_("partial pull cannot be done because "
1444 raise error.Abort(_("partial pull cannot be done because "
1447 "other repository doesn't support "
1445 "other repository doesn't support "
1448 "changegroupsubset."))
1446 "changegroupsubset."))
1449 else:
1447 else:
1450 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1448 cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull')
1451 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1449 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1452 pullop.remote.url())
1450 pullop.remote.url())
1453 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1451 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1454
1452
1455 def _pullphase(pullop):
1453 def _pullphase(pullop):
1456 # Get remote phases data from remote
1454 # Get remote phases data from remote
1457 if 'phases' in pullop.stepsdone:
1455 if 'phases' in pullop.stepsdone:
1458 return
1456 return
1459 remotephases = pullop.remote.listkeys('phases')
1457 remotephases = pullop.remote.listkeys('phases')
1460 _pullapplyphases(pullop, remotephases)
1458 _pullapplyphases(pullop, remotephases)
1461
1459
1462 def _pullapplyphases(pullop, remotephases):
1460 def _pullapplyphases(pullop, remotephases):
1463 """apply phase movement from observed remote state"""
1461 """apply phase movement from observed remote state"""
1464 if 'phases' in pullop.stepsdone:
1462 if 'phases' in pullop.stepsdone:
1465 return
1463 return
1466 pullop.stepsdone.add('phases')
1464 pullop.stepsdone.add('phases')
1467 publishing = bool(remotephases.get('publishing', False))
1465 publishing = bool(remotephases.get('publishing', False))
1468 if remotephases and not publishing:
1466 if remotephases and not publishing:
1469 # remote is new and non-publishing
1467 # remote is new and non-publishing
1470 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1468 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1471 pullop.pulledsubset,
1469 pullop.pulledsubset,
1472 remotephases)
1470 remotephases)
1473 dheads = pullop.pulledsubset
1471 dheads = pullop.pulledsubset
1474 else:
1472 else:
1475 # Remote is old or publishing all common changesets
1473 # Remote is old or publishing all common changesets
1476 # should be seen as public
1474 # should be seen as public
1477 pheads = pullop.pulledsubset
1475 pheads = pullop.pulledsubset
1478 dheads = []
1476 dheads = []
1479 unfi = pullop.repo.unfiltered()
1477 unfi = pullop.repo.unfiltered()
1480 phase = unfi._phasecache.phase
1478 phase = unfi._phasecache.phase
1481 rev = unfi.changelog.nodemap.get
1479 rev = unfi.changelog.nodemap.get
1482 public = phases.public
1480 public = phases.public
1483 draft = phases.draft
1481 draft = phases.draft
1484
1482
1485 # exclude changesets already public locally and update the others
1483 # exclude changesets already public locally and update the others
1486 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1484 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1487 if pheads:
1485 if pheads:
1488 tr = pullop.gettransaction()
1486 tr = pullop.gettransaction()
1489 phases.advanceboundary(pullop.repo, tr, public, pheads)
1487 phases.advanceboundary(pullop.repo, tr, public, pheads)
1490
1488
1491 # exclude changesets already draft locally and update the others
1489 # exclude changesets already draft locally and update the others
1492 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1490 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1493 if dheads:
1491 if dheads:
1494 tr = pullop.gettransaction()
1492 tr = pullop.gettransaction()
1495 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1493 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1496
1494
1497 def _pullbookmarks(pullop):
1495 def _pullbookmarks(pullop):
1498 """process the remote bookmark information to update the local one"""
1496 """process the remote bookmark information to update the local one"""
1499 if 'bookmarks' in pullop.stepsdone:
1497 if 'bookmarks' in pullop.stepsdone:
1500 return
1498 return
1501 pullop.stepsdone.add('bookmarks')
1499 pullop.stepsdone.add('bookmarks')
1502 repo = pullop.repo
1500 repo = pullop.repo
1503 remotebookmarks = pullop.remotebookmarks
1501 remotebookmarks = pullop.remotebookmarks
1504 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1502 remotebookmarks = bookmod.unhexlifybookmarks(remotebookmarks)
1505 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1503 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1506 pullop.remote.url(),
1504 pullop.remote.url(),
1507 pullop.gettransaction,
1505 pullop.gettransaction,
1508 explicit=pullop.explicitbookmarks)
1506 explicit=pullop.explicitbookmarks)
1509
1507
1510 def _pullobsolete(pullop):
1508 def _pullobsolete(pullop):
1511 """utility function to pull obsolete markers from a remote
1509 """utility function to pull obsolete markers from a remote
1512
1510
1513 The `gettransaction` is function that return the pull transaction, creating
1511 The `gettransaction` is function that return the pull transaction, creating
1514 one if necessary. We return the transaction to inform the calling code that
1512 one if necessary. We return the transaction to inform the calling code that
1515 a new transaction have been created (when applicable).
1513 a new transaction have been created (when applicable).
1516
1514
1517 Exists mostly to allow overriding for experimentation purpose"""
1515 Exists mostly to allow overriding for experimentation purpose"""
1518 if 'obsmarkers' in pullop.stepsdone:
1516 if 'obsmarkers' in pullop.stepsdone:
1519 return
1517 return
1520 pullop.stepsdone.add('obsmarkers')
1518 pullop.stepsdone.add('obsmarkers')
1521 tr = None
1519 tr = None
1522 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1520 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1523 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1521 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1524 remoteobs = pullop.remote.listkeys('obsolete')
1522 remoteobs = pullop.remote.listkeys('obsolete')
1525 if 'dump0' in remoteobs:
1523 if 'dump0' in remoteobs:
1526 tr = pullop.gettransaction()
1524 tr = pullop.gettransaction()
1527 markers = []
1525 markers = []
1528 for key in sorted(remoteobs, reverse=True):
1526 for key in sorted(remoteobs, reverse=True):
1529 if key.startswith('dump'):
1527 if key.startswith('dump'):
1530 data = util.b85decode(remoteobs[key])
1528 data = util.b85decode(remoteobs[key])
1531 version, newmarks = obsolete._readmarkers(data)
1529 version, newmarks = obsolete._readmarkers(data)
1532 markers += newmarks
1530 markers += newmarks
1533 if markers:
1531 if markers:
1534 pullop.repo.obsstore.add(tr, markers)
1532 pullop.repo.obsstore.add(tr, markers)
1535 pullop.repo.invalidatevolatilesets()
1533 pullop.repo.invalidatevolatilesets()
1536 return tr
1534 return tr
1537
1535
1538 def caps20to10(repo):
1536 def caps20to10(repo):
1539 """return a set with appropriate options to use bundle20 during getbundle"""
1537 """return a set with appropriate options to use bundle20 during getbundle"""
1540 caps = {'HG20'}
1538 caps = {'HG20'}
1541 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1539 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
1542 caps.add('bundle2=' + urlreq.quote(capsblob))
1540 caps.add('bundle2=' + urlreq.quote(capsblob))
1543 return caps
1541 return caps
1544
1542
1545 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1543 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1546 getbundle2partsorder = []
1544 getbundle2partsorder = []
1547
1545
1548 # Mapping between step name and function
1546 # Mapping between step name and function
1549 #
1547 #
1550 # This exists to help extensions wrap steps if necessary
1548 # This exists to help extensions wrap steps if necessary
1551 getbundle2partsmapping = {}
1549 getbundle2partsmapping = {}
1552
1550
1553 def getbundle2partsgenerator(stepname, idx=None):
1551 def getbundle2partsgenerator(stepname, idx=None):
1554 """decorator for function generating bundle2 part for getbundle
1552 """decorator for function generating bundle2 part for getbundle
1555
1553
1556 The function is added to the step -> function mapping and appended to the
1554 The function is added to the step -> function mapping and appended to the
1557 list of steps. Beware that decorated functions will be added in order
1555 list of steps. Beware that decorated functions will be added in order
1558 (this may matter).
1556 (this may matter).
1559
1557
1560 You can only use this decorator for new steps, if you want to wrap a step
1558 You can only use this decorator for new steps, if you want to wrap a step
1561 from an extension, attack the getbundle2partsmapping dictionary directly."""
1559 from an extension, attack the getbundle2partsmapping dictionary directly."""
1562 def dec(func):
1560 def dec(func):
1563 assert stepname not in getbundle2partsmapping
1561 assert stepname not in getbundle2partsmapping
1564 getbundle2partsmapping[stepname] = func
1562 getbundle2partsmapping[stepname] = func
1565 if idx is None:
1563 if idx is None:
1566 getbundle2partsorder.append(stepname)
1564 getbundle2partsorder.append(stepname)
1567 else:
1565 else:
1568 getbundle2partsorder.insert(idx, stepname)
1566 getbundle2partsorder.insert(idx, stepname)
1569 return func
1567 return func
1570 return dec
1568 return dec
1571
1569
1572 def bundle2requested(bundlecaps):
1570 def bundle2requested(bundlecaps):
1573 if bundlecaps is not None:
1571 if bundlecaps is not None:
1574 return any(cap.startswith('HG2') for cap in bundlecaps)
1572 return any(cap.startswith('HG2') for cap in bundlecaps)
1575 return False
1573 return False
1576
1574
1577 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1575 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1578 **kwargs):
1576 **kwargs):
1579 """Return chunks constituting a bundle's raw data.
1577 """Return chunks constituting a bundle's raw data.
1580
1578
1581 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1579 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1582 passed.
1580 passed.
1583
1581
1584 Returns an iterator over raw chunks (of varying sizes).
1582 Returns an iterator over raw chunks (of varying sizes).
1585 """
1583 """
1586 kwargs = pycompat.byteskwargs(kwargs)
1584 kwargs = pycompat.byteskwargs(kwargs)
1587 usebundle2 = bundle2requested(bundlecaps)
1585 usebundle2 = bundle2requested(bundlecaps)
1588 # bundle10 case
1586 # bundle10 case
1589 if not usebundle2:
1587 if not usebundle2:
1590 if bundlecaps and not kwargs.get('cg', True):
1588 if bundlecaps and not kwargs.get('cg', True):
1591 raise ValueError(_('request for bundle10 must include changegroup'))
1589 raise ValueError(_('request for bundle10 must include changegroup'))
1592
1590
1593 if kwargs:
1591 if kwargs:
1594 raise ValueError(_('unsupported getbundle arguments: %s')
1592 raise ValueError(_('unsupported getbundle arguments: %s')
1595 % ', '.join(sorted(kwargs.keys())))
1593 % ', '.join(sorted(kwargs.keys())))
1596 outgoing = _computeoutgoing(repo, heads, common)
1594 outgoing = _computeoutgoing(repo, heads, common)
1597 bundler = changegroup.getbundler('01', repo, bundlecaps)
1595 bundler = changegroup.getbundler('01', repo, bundlecaps)
1598 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1596 return changegroup.getsubsetraw(repo, outgoing, bundler, source)
1599
1597
1600 # bundle20 case
1598 # bundle20 case
1601 b2caps = {}
1599 b2caps = {}
1602 for bcaps in bundlecaps:
1600 for bcaps in bundlecaps:
1603 if bcaps.startswith('bundle2='):
1601 if bcaps.startswith('bundle2='):
1604 blob = urlreq.unquote(bcaps[len('bundle2='):])
1602 blob = urlreq.unquote(bcaps[len('bundle2='):])
1605 b2caps.update(bundle2.decodecaps(blob))
1603 b2caps.update(bundle2.decodecaps(blob))
1606 bundler = bundle2.bundle20(repo.ui, b2caps)
1604 bundler = bundle2.bundle20(repo.ui, b2caps)
1607
1605
1608 kwargs['heads'] = heads
1606 kwargs['heads'] = heads
1609 kwargs['common'] = common
1607 kwargs['common'] = common
1610
1608
1611 for name in getbundle2partsorder:
1609 for name in getbundle2partsorder:
1612 func = getbundle2partsmapping[name]
1610 func = getbundle2partsmapping[name]
1613 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1611 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1614 **pycompat.strkwargs(kwargs))
1612 **pycompat.strkwargs(kwargs))
1615
1613
1616 return bundler.getchunks()
1614 return bundler.getchunks()
1617
1615
1618 @getbundle2partsgenerator('changegroup')
1616 @getbundle2partsgenerator('changegroup')
1619 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1617 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1620 b2caps=None, heads=None, common=None, **kwargs):
1618 b2caps=None, heads=None, common=None, **kwargs):
1621 """add a changegroup part to the requested bundle"""
1619 """add a changegroup part to the requested bundle"""
1622 cg = None
1620 cg = None
1623 if kwargs.get('cg', True):
1621 if kwargs.get('cg', True):
1624 # build changegroup bundle here.
1622 # build changegroup bundle here.
1625 version = '01'
1623 version = '01'
1626 cgversions = b2caps.get('changegroup')
1624 cgversions = b2caps.get('changegroup')
1627 if cgversions: # 3.1 and 3.2 ship with an empty value
1625 if cgversions: # 3.1 and 3.2 ship with an empty value
1628 cgversions = [v for v in cgversions
1626 cgversions = [v for v in cgversions
1629 if v in changegroup.supportedoutgoingversions(repo)]
1627 if v in changegroup.supportedoutgoingversions(repo)]
1630 if not cgversions:
1628 if not cgversions:
1631 raise ValueError(_('no common changegroup version'))
1629 raise ValueError(_('no common changegroup version'))
1632 version = max(cgversions)
1630 version = max(cgversions)
1633 outgoing = _computeoutgoing(repo, heads, common)
1631 outgoing = _computeoutgoing(repo, heads, common)
1634 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1632 cg = changegroup.getlocalchangegroupraw(repo, source, outgoing,
1635 bundlecaps=bundlecaps,
1633 bundlecaps=bundlecaps,
1636 version=version)
1634 version=version)
1637
1635
1638 if cg:
1636 if cg:
1639 part = bundler.newpart('changegroup', data=cg)
1637 part = bundler.newpart('changegroup', data=cg)
1640 if cgversions:
1638 if cgversions:
1641 part.addparam('version', version)
1639 part.addparam('version', version)
1642 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1640 part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False)
1643 if 'treemanifest' in repo.requirements:
1641 if 'treemanifest' in repo.requirements:
1644 part.addparam('treemanifest', '1')
1642 part.addparam('treemanifest', '1')
1645
1643
1646 @getbundle2partsgenerator('listkeys')
1644 @getbundle2partsgenerator('listkeys')
1647 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1645 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1648 b2caps=None, **kwargs):
1646 b2caps=None, **kwargs):
1649 """add parts containing listkeys namespaces to the requested bundle"""
1647 """add parts containing listkeys namespaces to the requested bundle"""
1650 listkeys = kwargs.get('listkeys', ())
1648 listkeys = kwargs.get('listkeys', ())
1651 for namespace in listkeys:
1649 for namespace in listkeys:
1652 part = bundler.newpart('listkeys')
1650 part = bundler.newpart('listkeys')
1653 part.addparam('namespace', namespace)
1651 part.addparam('namespace', namespace)
1654 keys = repo.listkeys(namespace).items()
1652 keys = repo.listkeys(namespace).items()
1655 part.data = pushkey.encodekeys(keys)
1653 part.data = pushkey.encodekeys(keys)
1656
1654
1657 @getbundle2partsgenerator('obsmarkers')
1655 @getbundle2partsgenerator('obsmarkers')
1658 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1656 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1659 b2caps=None, heads=None, **kwargs):
1657 b2caps=None, heads=None, **kwargs):
1660 """add an obsolescence markers part to the requested bundle"""
1658 """add an obsolescence markers part to the requested bundle"""
1661 if kwargs.get('obsmarkers', False):
1659 if kwargs.get('obsmarkers', False):
1662 if heads is None:
1660 if heads is None:
1663 heads = repo.heads()
1661 heads = repo.heads()
1664 subset = [c.node() for c in repo.set('::%ln', heads)]
1662 subset = [c.node() for c in repo.set('::%ln', heads)]
1665 markers = repo.obsstore.relevantmarkers(subset)
1663 markers = repo.obsstore.relevantmarkers(subset)
1666 markers = sorted(markers)
1664 markers = sorted(markers)
1667 bundle2.buildobsmarkerspart(bundler, markers)
1665 bundle2.buildobsmarkerspart(bundler, markers)
1668
1666
1669 @getbundle2partsgenerator('hgtagsfnodes')
1667 @getbundle2partsgenerator('hgtagsfnodes')
1670 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1668 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
1671 b2caps=None, heads=None, common=None,
1669 b2caps=None, heads=None, common=None,
1672 **kwargs):
1670 **kwargs):
1673 """Transfer the .hgtags filenodes mapping.
1671 """Transfer the .hgtags filenodes mapping.
1674
1672
1675 Only values for heads in this bundle will be transferred.
1673 Only values for heads in this bundle will be transferred.
1676
1674
1677 The part data consists of pairs of 20 byte changeset node and .hgtags
1675 The part data consists of pairs of 20 byte changeset node and .hgtags
1678 filenodes raw values.
1676 filenodes raw values.
1679 """
1677 """
1680 # Don't send unless:
1678 # Don't send unless:
1681 # - changeset are being exchanged,
1679 # - changeset are being exchanged,
1682 # - the client supports it.
1680 # - the client supports it.
1683 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1681 if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps):
1684 return
1682 return
1685
1683
1686 outgoing = _computeoutgoing(repo, heads, common)
1684 outgoing = _computeoutgoing(repo, heads, common)
1687 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1685 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
1688
1686
1689 def _getbookmarks(repo, **kwargs):
1687 def _getbookmarks(repo, **kwargs):
1690 """Returns bookmark to node mapping.
1688 """Returns bookmark to node mapping.
1691
1689
1692 This function is primarily used to generate `bookmarks` bundle2 part.
1690 This function is primarily used to generate `bookmarks` bundle2 part.
1693 It is a separate function in order to make it easy to wrap it
1691 It is a separate function in order to make it easy to wrap it
1694 in extensions. Passing `kwargs` to the function makes it easy to
1692 in extensions. Passing `kwargs` to the function makes it easy to
1695 add new parameters in extensions.
1693 add new parameters in extensions.
1696 """
1694 """
1697
1695
1698 return dict(bookmod.listbinbookmarks(repo))
1696 return dict(bookmod.listbinbookmarks(repo))
1699
1697
1700 def check_heads(repo, their_heads, context):
1698 def check_heads(repo, their_heads, context):
1701 """check if the heads of a repo have been modified
1699 """check if the heads of a repo have been modified
1702
1700
1703 Used by peer for unbundling.
1701 Used by peer for unbundling.
1704 """
1702 """
1705 heads = repo.heads()
1703 heads = repo.heads()
1706 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1704 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
1707 if not (their_heads == ['force'] or their_heads == heads or
1705 if not (their_heads == ['force'] or their_heads == heads or
1708 their_heads == ['hashed', heads_hash]):
1706 their_heads == ['hashed', heads_hash]):
1709 # someone else committed/pushed/unbundled while we
1707 # someone else committed/pushed/unbundled while we
1710 # were transferring data
1708 # were transferring data
1711 raise error.PushRaced('repository changed while %s - '
1709 raise error.PushRaced('repository changed while %s - '
1712 'please try again' % context)
1710 'please try again' % context)
1713
1711
1714 def unbundle(repo, cg, heads, source, url):
1712 def unbundle(repo, cg, heads, source, url):
1715 """Apply a bundle to a repo.
1713 """Apply a bundle to a repo.
1716
1714
1717 this function makes sure the repo is locked during the application and have
1715 this function makes sure the repo is locked during the application and have
1718 mechanism to check that no push race occurred between the creation of the
1716 mechanism to check that no push race occurred between the creation of the
1719 bundle and its application.
1717 bundle and its application.
1720
1718
1721 If the push was raced as PushRaced exception is raised."""
1719 If the push was raced as PushRaced exception is raised."""
1722 r = 0
1720 r = 0
1723 # need a transaction when processing a bundle2 stream
1721 # need a transaction when processing a bundle2 stream
1724 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1722 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
1725 lockandtr = [None, None, None]
1723 lockandtr = [None, None, None]
1726 recordout = None
1724 recordout = None
1727 # quick fix for output mismatch with bundle2 in 3.4
1725 # quick fix for output mismatch with bundle2 in 3.4
1728 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1726 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
1729 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1727 if url.startswith('remote:http:') or url.startswith('remote:https:'):
1730 captureoutput = True
1728 captureoutput = True
1731 try:
1729 try:
1732 # note: outside bundle1, 'heads' is expected to be empty and this
1730 # note: outside bundle1, 'heads' is expected to be empty and this
1733 # 'check_heads' call wil be a no-op
1731 # 'check_heads' call wil be a no-op
1734 check_heads(repo, heads, 'uploading changes')
1732 check_heads(repo, heads, 'uploading changes')
1735 # push can proceed
1733 # push can proceed
1736 if not isinstance(cg, bundle2.unbundle20):
1734 if not isinstance(cg, bundle2.unbundle20):
1737 # legacy case: bundle1 (changegroup 01)
1735 # legacy case: bundle1 (changegroup 01)
1738 txnname = "\n".join([source, util.hidepassword(url)])
1736 txnname = "\n".join([source, util.hidepassword(url)])
1739 with repo.lock(), repo.transaction(txnname) as tr:
1737 with repo.lock(), repo.transaction(txnname) as tr:
1740 op = bundle2.applybundle(repo, cg, tr, source, url)
1738 op = bundle2.applybundle(repo, cg, tr, source, url)
1741 r = bundle2.combinechangegroupresults(op)
1739 r = bundle2.combinechangegroupresults(op)
1742 else:
1740 else:
1743 r = None
1741 r = None
1744 try:
1742 try:
1745 def gettransaction():
1743 def gettransaction():
1746 if not lockandtr[2]:
1744 if not lockandtr[2]:
1747 lockandtr[0] = repo.wlock()
1745 lockandtr[0] = repo.wlock()
1748 lockandtr[1] = repo.lock()
1746 lockandtr[1] = repo.lock()
1749 lockandtr[2] = repo.transaction(source)
1747 lockandtr[2] = repo.transaction(source)
1750 lockandtr[2].hookargs['source'] = source
1748 lockandtr[2].hookargs['source'] = source
1751 lockandtr[2].hookargs['url'] = url
1749 lockandtr[2].hookargs['url'] = url
1752 lockandtr[2].hookargs['bundle2'] = '1'
1750 lockandtr[2].hookargs['bundle2'] = '1'
1753 return lockandtr[2]
1751 return lockandtr[2]
1754
1752
1755 # Do greedy locking by default until we're satisfied with lazy
1753 # Do greedy locking by default until we're satisfied with lazy
1756 # locking.
1754 # locking.
1757 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1755 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
1758 gettransaction()
1756 gettransaction()
1759
1757
1760 op = bundle2.bundleoperation(repo, gettransaction,
1758 op = bundle2.bundleoperation(repo, gettransaction,
1761 captureoutput=captureoutput)
1759 captureoutput=captureoutput)
1762 try:
1760 try:
1763 op = bundle2.processbundle(repo, cg, op=op)
1761 op = bundle2.processbundle(repo, cg, op=op)
1764 finally:
1762 finally:
1765 r = op.reply
1763 r = op.reply
1766 if captureoutput and r is not None:
1764 if captureoutput and r is not None:
1767 repo.ui.pushbuffer(error=True, subproc=True)
1765 repo.ui.pushbuffer(error=True, subproc=True)
1768 def recordout(output):
1766 def recordout(output):
1769 r.newpart('output', data=output, mandatory=False)
1767 r.newpart('output', data=output, mandatory=False)
1770 if lockandtr[2] is not None:
1768 if lockandtr[2] is not None:
1771 lockandtr[2].close()
1769 lockandtr[2].close()
1772 except BaseException as exc:
1770 except BaseException as exc:
1773 exc.duringunbundle2 = True
1771 exc.duringunbundle2 = True
1774 if captureoutput and r is not None:
1772 if captureoutput and r is not None:
1775 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1773 parts = exc._bundle2salvagedoutput = r.salvageoutput()
1776 def recordout(output):
1774 def recordout(output):
1777 part = bundle2.bundlepart('output', data=output,
1775 part = bundle2.bundlepart('output', data=output,
1778 mandatory=False)
1776 mandatory=False)
1779 parts.append(part)
1777 parts.append(part)
1780 raise
1778 raise
1781 finally:
1779 finally:
1782 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1780 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
1783 if recordout is not None:
1781 if recordout is not None:
1784 recordout(repo.ui.popbuffer())
1782 recordout(repo.ui.popbuffer())
1785 return r
1783 return r
1786
1784
1787 def _maybeapplyclonebundle(pullop):
1785 def _maybeapplyclonebundle(pullop):
1788 """Apply a clone bundle from a remote, if possible."""
1786 """Apply a clone bundle from a remote, if possible."""
1789
1787
1790 repo = pullop.repo
1788 repo = pullop.repo
1791 remote = pullop.remote
1789 remote = pullop.remote
1792
1790
1793 if not repo.ui.configbool('ui', 'clonebundles'):
1791 if not repo.ui.configbool('ui', 'clonebundles'):
1794 return
1792 return
1795
1793
1796 # Only run if local repo is empty.
1794 # Only run if local repo is empty.
1797 if len(repo):
1795 if len(repo):
1798 return
1796 return
1799
1797
1800 if pullop.heads:
1798 if pullop.heads:
1801 return
1799 return
1802
1800
1803 if not remote.capable('clonebundles'):
1801 if not remote.capable('clonebundles'):
1804 return
1802 return
1805
1803
1806 res = remote._call('clonebundles')
1804 res = remote._call('clonebundles')
1807
1805
1808 # If we call the wire protocol command, that's good enough to record the
1806 # If we call the wire protocol command, that's good enough to record the
1809 # attempt.
1807 # attempt.
1810 pullop.clonebundleattempted = True
1808 pullop.clonebundleattempted = True
1811
1809
1812 entries = parseclonebundlesmanifest(repo, res)
1810 entries = parseclonebundlesmanifest(repo, res)
1813 if not entries:
1811 if not entries:
1814 repo.ui.note(_('no clone bundles available on remote; '
1812 repo.ui.note(_('no clone bundles available on remote; '
1815 'falling back to regular clone\n'))
1813 'falling back to regular clone\n'))
1816 return
1814 return
1817
1815
1818 entries = filterclonebundleentries(repo, entries)
1816 entries = filterclonebundleentries(repo, entries)
1819 if not entries:
1817 if not entries:
1820 # There is a thundering herd concern here. However, if a server
1818 # There is a thundering herd concern here. However, if a server
1821 # operator doesn't advertise bundles appropriate for its clients,
1819 # operator doesn't advertise bundles appropriate for its clients,
1822 # they deserve what's coming. Furthermore, from a client's
1820 # they deserve what's coming. Furthermore, from a client's
1823 # perspective, no automatic fallback would mean not being able to
1821 # perspective, no automatic fallback would mean not being able to
1824 # clone!
1822 # clone!
1825 repo.ui.warn(_('no compatible clone bundles available on server; '
1823 repo.ui.warn(_('no compatible clone bundles available on server; '
1826 'falling back to regular clone\n'))
1824 'falling back to regular clone\n'))
1827 repo.ui.warn(_('(you may want to report this to the server '
1825 repo.ui.warn(_('(you may want to report this to the server '
1828 'operator)\n'))
1826 'operator)\n'))
1829 return
1827 return
1830
1828
1831 entries = sortclonebundleentries(repo.ui, entries)
1829 entries = sortclonebundleentries(repo.ui, entries)
1832
1830
1833 url = entries[0]['URL']
1831 url = entries[0]['URL']
1834 repo.ui.status(_('applying clone bundle from %s\n') % url)
1832 repo.ui.status(_('applying clone bundle from %s\n') % url)
1835 if trypullbundlefromurl(repo.ui, repo, url):
1833 if trypullbundlefromurl(repo.ui, repo, url):
1836 repo.ui.status(_('finished applying clone bundle\n'))
1834 repo.ui.status(_('finished applying clone bundle\n'))
1837 # Bundle failed.
1835 # Bundle failed.
1838 #
1836 #
1839 # We abort by default to avoid the thundering herd of
1837 # We abort by default to avoid the thundering herd of
1840 # clients flooding a server that was expecting expensive
1838 # clients flooding a server that was expecting expensive
1841 # clone load to be offloaded.
1839 # clone load to be offloaded.
1842 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1840 elif repo.ui.configbool('ui', 'clonebundlefallback'):
1843 repo.ui.warn(_('falling back to normal clone\n'))
1841 repo.ui.warn(_('falling back to normal clone\n'))
1844 else:
1842 else:
1845 raise error.Abort(_('error applying bundle'),
1843 raise error.Abort(_('error applying bundle'),
1846 hint=_('if this error persists, consider contacting '
1844 hint=_('if this error persists, consider contacting '
1847 'the server operator or disable clone '
1845 'the server operator or disable clone '
1848 'bundles via '
1846 'bundles via '
1849 '"--config ui.clonebundles=false"'))
1847 '"--config ui.clonebundles=false"'))
1850
1848
1851 def parseclonebundlesmanifest(repo, s):
1849 def parseclonebundlesmanifest(repo, s):
1852 """Parses the raw text of a clone bundles manifest.
1850 """Parses the raw text of a clone bundles manifest.
1853
1851
1854 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1852 Returns a list of dicts. The dicts have a ``URL`` key corresponding
1855 to the URL and other keys are the attributes for the entry.
1853 to the URL and other keys are the attributes for the entry.
1856 """
1854 """
1857 m = []
1855 m = []
1858 for line in s.splitlines():
1856 for line in s.splitlines():
1859 fields = line.split()
1857 fields = line.split()
1860 if not fields:
1858 if not fields:
1861 continue
1859 continue
1862 attrs = {'URL': fields[0]}
1860 attrs = {'URL': fields[0]}
1863 for rawattr in fields[1:]:
1861 for rawattr in fields[1:]:
1864 key, value = rawattr.split('=', 1)
1862 key, value = rawattr.split('=', 1)
1865 key = urlreq.unquote(key)
1863 key = urlreq.unquote(key)
1866 value = urlreq.unquote(value)
1864 value = urlreq.unquote(value)
1867 attrs[key] = value
1865 attrs[key] = value
1868
1866
1869 # Parse BUNDLESPEC into components. This makes client-side
1867 # Parse BUNDLESPEC into components. This makes client-side
1870 # preferences easier to specify since you can prefer a single
1868 # preferences easier to specify since you can prefer a single
1871 # component of the BUNDLESPEC.
1869 # component of the BUNDLESPEC.
1872 if key == 'BUNDLESPEC':
1870 if key == 'BUNDLESPEC':
1873 try:
1871 try:
1874 comp, version, params = parsebundlespec(repo, value,
1872 comp, version, params = parsebundlespec(repo, value,
1875 externalnames=True)
1873 externalnames=True)
1876 attrs['COMPRESSION'] = comp
1874 attrs['COMPRESSION'] = comp
1877 attrs['VERSION'] = version
1875 attrs['VERSION'] = version
1878 except error.InvalidBundleSpecification:
1876 except error.InvalidBundleSpecification:
1879 pass
1877 pass
1880 except error.UnsupportedBundleSpecification:
1878 except error.UnsupportedBundleSpecification:
1881 pass
1879 pass
1882
1880
1883 m.append(attrs)
1881 m.append(attrs)
1884
1882
1885 return m
1883 return m
1886
1884
1887 def filterclonebundleentries(repo, entries):
1885 def filterclonebundleentries(repo, entries):
1888 """Remove incompatible clone bundle manifest entries.
1886 """Remove incompatible clone bundle manifest entries.
1889
1887
1890 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1888 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
1891 and returns a new list consisting of only the entries that this client
1889 and returns a new list consisting of only the entries that this client
1892 should be able to apply.
1890 should be able to apply.
1893
1891
1894 There is no guarantee we'll be able to apply all returned entries because
1892 There is no guarantee we'll be able to apply all returned entries because
1895 the metadata we use to filter on may be missing or wrong.
1893 the metadata we use to filter on may be missing or wrong.
1896 """
1894 """
1897 newentries = []
1895 newentries = []
1898 for entry in entries:
1896 for entry in entries:
1899 spec = entry.get('BUNDLESPEC')
1897 spec = entry.get('BUNDLESPEC')
1900 if spec:
1898 if spec:
1901 try:
1899 try:
1902 parsebundlespec(repo, spec, strict=True)
1900 parsebundlespec(repo, spec, strict=True)
1903 except error.InvalidBundleSpecification as e:
1901 except error.InvalidBundleSpecification as e:
1904 repo.ui.debug(str(e) + '\n')
1902 repo.ui.debug(str(e) + '\n')
1905 continue
1903 continue
1906 except error.UnsupportedBundleSpecification as e:
1904 except error.UnsupportedBundleSpecification as e:
1907 repo.ui.debug('filtering %s because unsupported bundle '
1905 repo.ui.debug('filtering %s because unsupported bundle '
1908 'spec: %s\n' % (entry['URL'], str(e)))
1906 'spec: %s\n' % (entry['URL'], str(e)))
1909 continue
1907 continue
1910
1908
1911 if 'REQUIRESNI' in entry and not sslutil.hassni:
1909 if 'REQUIRESNI' in entry and not sslutil.hassni:
1912 repo.ui.debug('filtering %s because SNI not supported\n' %
1910 repo.ui.debug('filtering %s because SNI not supported\n' %
1913 entry['URL'])
1911 entry['URL'])
1914 continue
1912 continue
1915
1913
1916 newentries.append(entry)
1914 newentries.append(entry)
1917
1915
1918 return newentries
1916 return newentries
1919
1917
1920 class clonebundleentry(object):
1918 class clonebundleentry(object):
1921 """Represents an item in a clone bundles manifest.
1919 """Represents an item in a clone bundles manifest.
1922
1920
1923 This rich class is needed to support sorting since sorted() in Python 3
1921 This rich class is needed to support sorting since sorted() in Python 3
1924 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1922 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
1925 won't work.
1923 won't work.
1926 """
1924 """
1927
1925
1928 def __init__(self, value, prefers):
1926 def __init__(self, value, prefers):
1929 self.value = value
1927 self.value = value
1930 self.prefers = prefers
1928 self.prefers = prefers
1931
1929
1932 def _cmp(self, other):
1930 def _cmp(self, other):
1933 for prefkey, prefvalue in self.prefers:
1931 for prefkey, prefvalue in self.prefers:
1934 avalue = self.value.get(prefkey)
1932 avalue = self.value.get(prefkey)
1935 bvalue = other.value.get(prefkey)
1933 bvalue = other.value.get(prefkey)
1936
1934
1937 # Special case for b missing attribute and a matches exactly.
1935 # Special case for b missing attribute and a matches exactly.
1938 if avalue is not None and bvalue is None and avalue == prefvalue:
1936 if avalue is not None and bvalue is None and avalue == prefvalue:
1939 return -1
1937 return -1
1940
1938
1941 # Special case for a missing attribute and b matches exactly.
1939 # Special case for a missing attribute and b matches exactly.
1942 if bvalue is not None and avalue is None and bvalue == prefvalue:
1940 if bvalue is not None and avalue is None and bvalue == prefvalue:
1943 return 1
1941 return 1
1944
1942
1945 # We can't compare unless attribute present on both.
1943 # We can't compare unless attribute present on both.
1946 if avalue is None or bvalue is None:
1944 if avalue is None or bvalue is None:
1947 continue
1945 continue
1948
1946
1949 # Same values should fall back to next attribute.
1947 # Same values should fall back to next attribute.
1950 if avalue == bvalue:
1948 if avalue == bvalue:
1951 continue
1949 continue
1952
1950
1953 # Exact matches come first.
1951 # Exact matches come first.
1954 if avalue == prefvalue:
1952 if avalue == prefvalue:
1955 return -1
1953 return -1
1956 if bvalue == prefvalue:
1954 if bvalue == prefvalue:
1957 return 1
1955 return 1
1958
1956
1959 # Fall back to next attribute.
1957 # Fall back to next attribute.
1960 continue
1958 continue
1961
1959
1962 # If we got here we couldn't sort by attributes and prefers. Fall
1960 # If we got here we couldn't sort by attributes and prefers. Fall
1963 # back to index order.
1961 # back to index order.
1964 return 0
1962 return 0
1965
1963
1966 def __lt__(self, other):
1964 def __lt__(self, other):
1967 return self._cmp(other) < 0
1965 return self._cmp(other) < 0
1968
1966
1969 def __gt__(self, other):
1967 def __gt__(self, other):
1970 return self._cmp(other) > 0
1968 return self._cmp(other) > 0
1971
1969
1972 def __eq__(self, other):
1970 def __eq__(self, other):
1973 return self._cmp(other) == 0
1971 return self._cmp(other) == 0
1974
1972
1975 def __le__(self, other):
1973 def __le__(self, other):
1976 return self._cmp(other) <= 0
1974 return self._cmp(other) <= 0
1977
1975
1978 def __ge__(self, other):
1976 def __ge__(self, other):
1979 return self._cmp(other) >= 0
1977 return self._cmp(other) >= 0
1980
1978
1981 def __ne__(self, other):
1979 def __ne__(self, other):
1982 return self._cmp(other) != 0
1980 return self._cmp(other) != 0
1983
1981
1984 def sortclonebundleentries(ui, entries):
1982 def sortclonebundleentries(ui, entries):
1985 prefers = ui.configlist('ui', 'clonebundleprefers')
1983 prefers = ui.configlist('ui', 'clonebundleprefers')
1986 if not prefers:
1984 if not prefers:
1987 return list(entries)
1985 return list(entries)
1988
1986
1989 prefers = [p.split('=', 1) for p in prefers]
1987 prefers = [p.split('=', 1) for p in prefers]
1990
1988
1991 items = sorted(clonebundleentry(v, prefers) for v in entries)
1989 items = sorted(clonebundleentry(v, prefers) for v in entries)
1992 return [i.value for i in items]
1990 return [i.value for i in items]
1993
1991
1994 def trypullbundlefromurl(ui, repo, url):
1992 def trypullbundlefromurl(ui, repo, url):
1995 """Attempt to apply a bundle from a URL."""
1993 """Attempt to apply a bundle from a URL."""
1996 with repo.lock(), repo.transaction('bundleurl') as tr:
1994 with repo.lock(), repo.transaction('bundleurl') as tr:
1997 try:
1995 try:
1998 fh = urlmod.open(ui, url)
1996 fh = urlmod.open(ui, url)
1999 cg = readbundle(ui, fh, 'stream')
1997 cg = readbundle(ui, fh, 'stream')
2000
1998
2001 if isinstance(cg, streamclone.streamcloneapplier):
1999 if isinstance(cg, streamclone.streamcloneapplier):
2002 cg.apply(repo)
2000 cg.apply(repo)
2003 else:
2001 else:
2004 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2002 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2005 return True
2003 return True
2006 except urlerr.httperror as e:
2004 except urlerr.httperror as e:
2007 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2005 ui.warn(_('HTTP error fetching bundle: %s\n') % str(e))
2008 except urlerr.urlerror as e:
2006 except urlerr.urlerror as e:
2009 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2007 ui.warn(_('error fetching bundle: %s\n') % e.reason)
2010
2008
2011 return False
2009 return False
General Comments 0
You need to be logged in to leave comments. Login now