##// END OF EJS Templates
remotenames: synchronise remotenames after push also...
Pulkit Goyal -
r38634:4d5fb406 default
parent child Browse files
Show More
@@ -1,2418 +1,2421 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 bin,
15 bin,
16 hex,
16 hex,
17 nullid,
17 nullid,
18 )
18 )
19 from .thirdparty import (
19 from .thirdparty import (
20 attr,
20 attr,
21 )
21 )
22 from . import (
22 from . import (
23 bookmarks as bookmod,
23 bookmarks as bookmod,
24 bundle2,
24 bundle2,
25 changegroup,
25 changegroup,
26 discovery,
26 discovery,
27 error,
27 error,
28 lock as lockmod,
28 lock as lockmod,
29 logexchange,
29 logexchange,
30 obsolete,
30 obsolete,
31 phases,
31 phases,
32 pushkey,
32 pushkey,
33 pycompat,
33 pycompat,
34 scmutil,
34 scmutil,
35 sslutil,
35 sslutil,
36 streamclone,
36 streamclone,
37 url as urlmod,
37 url as urlmod,
38 util,
38 util,
39 )
39 )
40 from .utils import (
40 from .utils import (
41 stringutil,
41 stringutil,
42 )
42 )
43
43
44 urlerr = util.urlerr
44 urlerr = util.urlerr
45 urlreq = util.urlreq
45 urlreq = util.urlreq
46
46
47 # Maps bundle version human names to changegroup versions.
47 # Maps bundle version human names to changegroup versions.
48 _bundlespeccgversions = {'v1': '01',
48 _bundlespeccgversions = {'v1': '01',
49 'v2': '02',
49 'v2': '02',
50 'packed1': 's1',
50 'packed1': 's1',
51 'bundle2': '02', #legacy
51 'bundle2': '02', #legacy
52 }
52 }
53
53
54 # Maps bundle version with content opts to choose which part to bundle
54 # Maps bundle version with content opts to choose which part to bundle
55 _bundlespeccontentopts = {
55 _bundlespeccontentopts = {
56 'v1': {
56 'v1': {
57 'changegroup': True,
57 'changegroup': True,
58 'cg.version': '01',
58 'cg.version': '01',
59 'obsolescence': False,
59 'obsolescence': False,
60 'phases': False,
60 'phases': False,
61 'tagsfnodescache': False,
61 'tagsfnodescache': False,
62 'revbranchcache': False
62 'revbranchcache': False
63 },
63 },
64 'v2': {
64 'v2': {
65 'changegroup': True,
65 'changegroup': True,
66 'cg.version': '02',
66 'cg.version': '02',
67 'obsolescence': False,
67 'obsolescence': False,
68 'phases': False,
68 'phases': False,
69 'tagsfnodescache': True,
69 'tagsfnodescache': True,
70 'revbranchcache': True
70 'revbranchcache': True
71 },
71 },
72 'packed1' : {
72 'packed1' : {
73 'cg.version': 's1'
73 'cg.version': 's1'
74 }
74 }
75 }
75 }
76 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
76 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
77
77
78 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
78 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
79 "tagsfnodescache": False,
79 "tagsfnodescache": False,
80 "revbranchcache": False}}
80 "revbranchcache": False}}
81
81
82 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
82 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
83 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
83 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
84
84
85 @attr.s
85 @attr.s
86 class bundlespec(object):
86 class bundlespec(object):
87 compression = attr.ib()
87 compression = attr.ib()
88 wirecompression = attr.ib()
88 wirecompression = attr.ib()
89 version = attr.ib()
89 version = attr.ib()
90 wireversion = attr.ib()
90 wireversion = attr.ib()
91 params = attr.ib()
91 params = attr.ib()
92 contentopts = attr.ib()
92 contentopts = attr.ib()
93
93
94 def parsebundlespec(repo, spec, strict=True):
94 def parsebundlespec(repo, spec, strict=True):
95 """Parse a bundle string specification into parts.
95 """Parse a bundle string specification into parts.
96
96
97 Bundle specifications denote a well-defined bundle/exchange format.
97 Bundle specifications denote a well-defined bundle/exchange format.
98 The content of a given specification should not change over time in
98 The content of a given specification should not change over time in
99 order to ensure that bundles produced by a newer version of Mercurial are
99 order to ensure that bundles produced by a newer version of Mercurial are
100 readable from an older version.
100 readable from an older version.
101
101
102 The string currently has the form:
102 The string currently has the form:
103
103
104 <compression>-<type>[;<parameter0>[;<parameter1>]]
104 <compression>-<type>[;<parameter0>[;<parameter1>]]
105
105
106 Where <compression> is one of the supported compression formats
106 Where <compression> is one of the supported compression formats
107 and <type> is (currently) a version string. A ";" can follow the type and
107 and <type> is (currently) a version string. A ";" can follow the type and
108 all text afterwards is interpreted as URI encoded, ";" delimited key=value
108 all text afterwards is interpreted as URI encoded, ";" delimited key=value
109 pairs.
109 pairs.
110
110
111 If ``strict`` is True (the default) <compression> is required. Otherwise,
111 If ``strict`` is True (the default) <compression> is required. Otherwise,
112 it is optional.
112 it is optional.
113
113
114 Returns a bundlespec object of (compression, version, parameters).
114 Returns a bundlespec object of (compression, version, parameters).
115 Compression will be ``None`` if not in strict mode and a compression isn't
115 Compression will be ``None`` if not in strict mode and a compression isn't
116 defined.
116 defined.
117
117
118 An ``InvalidBundleSpecification`` is raised when the specification is
118 An ``InvalidBundleSpecification`` is raised when the specification is
119 not syntactically well formed.
119 not syntactically well formed.
120
120
121 An ``UnsupportedBundleSpecification`` is raised when the compression or
121 An ``UnsupportedBundleSpecification`` is raised when the compression or
122 bundle type/version is not recognized.
122 bundle type/version is not recognized.
123
123
124 Note: this function will likely eventually return a more complex data
124 Note: this function will likely eventually return a more complex data
125 structure, including bundle2 part information.
125 structure, including bundle2 part information.
126 """
126 """
127 def parseparams(s):
127 def parseparams(s):
128 if ';' not in s:
128 if ';' not in s:
129 return s, {}
129 return s, {}
130
130
131 params = {}
131 params = {}
132 version, paramstr = s.split(';', 1)
132 version, paramstr = s.split(';', 1)
133
133
134 for p in paramstr.split(';'):
134 for p in paramstr.split(';'):
135 if '=' not in p:
135 if '=' not in p:
136 raise error.InvalidBundleSpecification(
136 raise error.InvalidBundleSpecification(
137 _('invalid bundle specification: '
137 _('invalid bundle specification: '
138 'missing "=" in parameter: %s') % p)
138 'missing "=" in parameter: %s') % p)
139
139
140 key, value = p.split('=', 1)
140 key, value = p.split('=', 1)
141 key = urlreq.unquote(key)
141 key = urlreq.unquote(key)
142 value = urlreq.unquote(value)
142 value = urlreq.unquote(value)
143 params[key] = value
143 params[key] = value
144
144
145 return version, params
145 return version, params
146
146
147
147
148 if strict and '-' not in spec:
148 if strict and '-' not in spec:
149 raise error.InvalidBundleSpecification(
149 raise error.InvalidBundleSpecification(
150 _('invalid bundle specification; '
150 _('invalid bundle specification; '
151 'must be prefixed with compression: %s') % spec)
151 'must be prefixed with compression: %s') % spec)
152
152
153 if '-' in spec:
153 if '-' in spec:
154 compression, version = spec.split('-', 1)
154 compression, version = spec.split('-', 1)
155
155
156 if compression not in util.compengines.supportedbundlenames:
156 if compression not in util.compengines.supportedbundlenames:
157 raise error.UnsupportedBundleSpecification(
157 raise error.UnsupportedBundleSpecification(
158 _('%s compression is not supported') % compression)
158 _('%s compression is not supported') % compression)
159
159
160 version, params = parseparams(version)
160 version, params = parseparams(version)
161
161
162 if version not in _bundlespeccgversions:
162 if version not in _bundlespeccgversions:
163 raise error.UnsupportedBundleSpecification(
163 raise error.UnsupportedBundleSpecification(
164 _('%s is not a recognized bundle version') % version)
164 _('%s is not a recognized bundle version') % version)
165 else:
165 else:
166 # Value could be just the compression or just the version, in which
166 # Value could be just the compression or just the version, in which
167 # case some defaults are assumed (but only when not in strict mode).
167 # case some defaults are assumed (but only when not in strict mode).
168 assert not strict
168 assert not strict
169
169
170 spec, params = parseparams(spec)
170 spec, params = parseparams(spec)
171
171
172 if spec in util.compengines.supportedbundlenames:
172 if spec in util.compengines.supportedbundlenames:
173 compression = spec
173 compression = spec
174 version = 'v1'
174 version = 'v1'
175 # Generaldelta repos require v2.
175 # Generaldelta repos require v2.
176 if 'generaldelta' in repo.requirements:
176 if 'generaldelta' in repo.requirements:
177 version = 'v2'
177 version = 'v2'
178 # Modern compression engines require v2.
178 # Modern compression engines require v2.
179 if compression not in _bundlespecv1compengines:
179 if compression not in _bundlespecv1compengines:
180 version = 'v2'
180 version = 'v2'
181 elif spec in _bundlespeccgversions:
181 elif spec in _bundlespeccgversions:
182 if spec == 'packed1':
182 if spec == 'packed1':
183 compression = 'none'
183 compression = 'none'
184 else:
184 else:
185 compression = 'bzip2'
185 compression = 'bzip2'
186 version = spec
186 version = spec
187 else:
187 else:
188 raise error.UnsupportedBundleSpecification(
188 raise error.UnsupportedBundleSpecification(
189 _('%s is not a recognized bundle specification') % spec)
189 _('%s is not a recognized bundle specification') % spec)
190
190
191 # Bundle version 1 only supports a known set of compression engines.
191 # Bundle version 1 only supports a known set of compression engines.
192 if version == 'v1' and compression not in _bundlespecv1compengines:
192 if version == 'v1' and compression not in _bundlespecv1compengines:
193 raise error.UnsupportedBundleSpecification(
193 raise error.UnsupportedBundleSpecification(
194 _('compression engine %s is not supported on v1 bundles') %
194 _('compression engine %s is not supported on v1 bundles') %
195 compression)
195 compression)
196
196
197 # The specification for packed1 can optionally declare the data formats
197 # The specification for packed1 can optionally declare the data formats
198 # required to apply it. If we see this metadata, compare against what the
198 # required to apply it. If we see this metadata, compare against what the
199 # repo supports and error if the bundle isn't compatible.
199 # repo supports and error if the bundle isn't compatible.
200 if version == 'packed1' and 'requirements' in params:
200 if version == 'packed1' and 'requirements' in params:
201 requirements = set(params['requirements'].split(','))
201 requirements = set(params['requirements'].split(','))
202 missingreqs = requirements - repo.supportedformats
202 missingreqs = requirements - repo.supportedformats
203 if missingreqs:
203 if missingreqs:
204 raise error.UnsupportedBundleSpecification(
204 raise error.UnsupportedBundleSpecification(
205 _('missing support for repository features: %s') %
205 _('missing support for repository features: %s') %
206 ', '.join(sorted(missingreqs)))
206 ', '.join(sorted(missingreqs)))
207
207
208 # Compute contentopts based on the version
208 # Compute contentopts based on the version
209 contentopts = _bundlespeccontentopts.get(version, {}).copy()
209 contentopts = _bundlespeccontentopts.get(version, {}).copy()
210
210
211 # Process the variants
211 # Process the variants
212 if "stream" in params and params["stream"] == "v2":
212 if "stream" in params and params["stream"] == "v2":
213 variant = _bundlespecvariants["streamv2"]
213 variant = _bundlespecvariants["streamv2"]
214 contentopts.update(variant)
214 contentopts.update(variant)
215
215
216 engine = util.compengines.forbundlename(compression)
216 engine = util.compengines.forbundlename(compression)
217 compression, wirecompression = engine.bundletype()
217 compression, wirecompression = engine.bundletype()
218 wireversion = _bundlespeccgversions[version]
218 wireversion = _bundlespeccgversions[version]
219
219
220 return bundlespec(compression, wirecompression, version, wireversion,
220 return bundlespec(compression, wirecompression, version, wireversion,
221 params, contentopts)
221 params, contentopts)
222
222
223 def readbundle(ui, fh, fname, vfs=None):
223 def readbundle(ui, fh, fname, vfs=None):
224 header = changegroup.readexactly(fh, 4)
224 header = changegroup.readexactly(fh, 4)
225
225
226 alg = None
226 alg = None
227 if not fname:
227 if not fname:
228 fname = "stream"
228 fname = "stream"
229 if not header.startswith('HG') and header.startswith('\0'):
229 if not header.startswith('HG') and header.startswith('\0'):
230 fh = changegroup.headerlessfixup(fh, header)
230 fh = changegroup.headerlessfixup(fh, header)
231 header = "HG10"
231 header = "HG10"
232 alg = 'UN'
232 alg = 'UN'
233 elif vfs:
233 elif vfs:
234 fname = vfs.join(fname)
234 fname = vfs.join(fname)
235
235
236 magic, version = header[0:2], header[2:4]
236 magic, version = header[0:2], header[2:4]
237
237
238 if magic != 'HG':
238 if magic != 'HG':
239 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
239 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
240 if version == '10':
240 if version == '10':
241 if alg is None:
241 if alg is None:
242 alg = changegroup.readexactly(fh, 2)
242 alg = changegroup.readexactly(fh, 2)
243 return changegroup.cg1unpacker(fh, alg)
243 return changegroup.cg1unpacker(fh, alg)
244 elif version.startswith('2'):
244 elif version.startswith('2'):
245 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
245 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
246 elif version == 'S1':
246 elif version == 'S1':
247 return streamclone.streamcloneapplier(fh)
247 return streamclone.streamcloneapplier(fh)
248 else:
248 else:
249 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
249 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
250
250
251 def getbundlespec(ui, fh):
251 def getbundlespec(ui, fh):
252 """Infer the bundlespec from a bundle file handle.
252 """Infer the bundlespec from a bundle file handle.
253
253
254 The input file handle is seeked and the original seek position is not
254 The input file handle is seeked and the original seek position is not
255 restored.
255 restored.
256 """
256 """
257 def speccompression(alg):
257 def speccompression(alg):
258 try:
258 try:
259 return util.compengines.forbundletype(alg).bundletype()[0]
259 return util.compengines.forbundletype(alg).bundletype()[0]
260 except KeyError:
260 except KeyError:
261 return None
261 return None
262
262
263 b = readbundle(ui, fh, None)
263 b = readbundle(ui, fh, None)
264 if isinstance(b, changegroup.cg1unpacker):
264 if isinstance(b, changegroup.cg1unpacker):
265 alg = b._type
265 alg = b._type
266 if alg == '_truncatedBZ':
266 if alg == '_truncatedBZ':
267 alg = 'BZ'
267 alg = 'BZ'
268 comp = speccompression(alg)
268 comp = speccompression(alg)
269 if not comp:
269 if not comp:
270 raise error.Abort(_('unknown compression algorithm: %s') % alg)
270 raise error.Abort(_('unknown compression algorithm: %s') % alg)
271 return '%s-v1' % comp
271 return '%s-v1' % comp
272 elif isinstance(b, bundle2.unbundle20):
272 elif isinstance(b, bundle2.unbundle20):
273 if 'Compression' in b.params:
273 if 'Compression' in b.params:
274 comp = speccompression(b.params['Compression'])
274 comp = speccompression(b.params['Compression'])
275 if not comp:
275 if not comp:
276 raise error.Abort(_('unknown compression algorithm: %s') % comp)
276 raise error.Abort(_('unknown compression algorithm: %s') % comp)
277 else:
277 else:
278 comp = 'none'
278 comp = 'none'
279
279
280 version = None
280 version = None
281 for part in b.iterparts():
281 for part in b.iterparts():
282 if part.type == 'changegroup':
282 if part.type == 'changegroup':
283 version = part.params['version']
283 version = part.params['version']
284 if version in ('01', '02'):
284 if version in ('01', '02'):
285 version = 'v2'
285 version = 'v2'
286 else:
286 else:
287 raise error.Abort(_('changegroup version %s does not have '
287 raise error.Abort(_('changegroup version %s does not have '
288 'a known bundlespec') % version,
288 'a known bundlespec') % version,
289 hint=_('try upgrading your Mercurial '
289 hint=_('try upgrading your Mercurial '
290 'client'))
290 'client'))
291 elif part.type == 'stream2' and version is None:
291 elif part.type == 'stream2' and version is None:
292 # A stream2 part requires to be part of a v2 bundle
292 # A stream2 part requires to be part of a v2 bundle
293 version = "v2"
293 version = "v2"
294 requirements = urlreq.unquote(part.params['requirements'])
294 requirements = urlreq.unquote(part.params['requirements'])
295 splitted = requirements.split()
295 splitted = requirements.split()
296 params = bundle2._formatrequirementsparams(splitted)
296 params = bundle2._formatrequirementsparams(splitted)
297 return 'none-v2;stream=v2;%s' % params
297 return 'none-v2;stream=v2;%s' % params
298
298
299 if not version:
299 if not version:
300 raise error.Abort(_('could not identify changegroup version in '
300 raise error.Abort(_('could not identify changegroup version in '
301 'bundle'))
301 'bundle'))
302
302
303 return '%s-%s' % (comp, version)
303 return '%s-%s' % (comp, version)
304 elif isinstance(b, streamclone.streamcloneapplier):
304 elif isinstance(b, streamclone.streamcloneapplier):
305 requirements = streamclone.readbundle1header(fh)[2]
305 requirements = streamclone.readbundle1header(fh)[2]
306 formatted = bundle2._formatrequirementsparams(requirements)
306 formatted = bundle2._formatrequirementsparams(requirements)
307 return 'none-packed1;%s' % formatted
307 return 'none-packed1;%s' % formatted
308 else:
308 else:
309 raise error.Abort(_('unknown bundle type: %s') % b)
309 raise error.Abort(_('unknown bundle type: %s') % b)
310
310
311 def _computeoutgoing(repo, heads, common):
311 def _computeoutgoing(repo, heads, common):
312 """Computes which revs are outgoing given a set of common
312 """Computes which revs are outgoing given a set of common
313 and a set of heads.
313 and a set of heads.
314
314
315 This is a separate function so extensions can have access to
315 This is a separate function so extensions can have access to
316 the logic.
316 the logic.
317
317
318 Returns a discovery.outgoing object.
318 Returns a discovery.outgoing object.
319 """
319 """
320 cl = repo.changelog
320 cl = repo.changelog
321 if common:
321 if common:
322 hasnode = cl.hasnode
322 hasnode = cl.hasnode
323 common = [n for n in common if hasnode(n)]
323 common = [n for n in common if hasnode(n)]
324 else:
324 else:
325 common = [nullid]
325 common = [nullid]
326 if not heads:
326 if not heads:
327 heads = cl.heads()
327 heads = cl.heads()
328 return discovery.outgoing(repo, common, heads)
328 return discovery.outgoing(repo, common, heads)
329
329
330 def _forcebundle1(op):
330 def _forcebundle1(op):
331 """return true if a pull/push must use bundle1
331 """return true if a pull/push must use bundle1
332
332
333 This function is used to allow testing of the older bundle version"""
333 This function is used to allow testing of the older bundle version"""
334 ui = op.repo.ui
334 ui = op.repo.ui
335 # The goal is this config is to allow developer to choose the bundle
335 # The goal is this config is to allow developer to choose the bundle
336 # version used during exchanged. This is especially handy during test.
336 # version used during exchanged. This is especially handy during test.
337 # Value is a list of bundle version to be picked from, highest version
337 # Value is a list of bundle version to be picked from, highest version
338 # should be used.
338 # should be used.
339 #
339 #
340 # developer config: devel.legacy.exchange
340 # developer config: devel.legacy.exchange
341 exchange = ui.configlist('devel', 'legacy.exchange')
341 exchange = ui.configlist('devel', 'legacy.exchange')
342 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
342 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
343 return forcebundle1 or not op.remote.capable('bundle2')
343 return forcebundle1 or not op.remote.capable('bundle2')
344
344
345 class pushoperation(object):
345 class pushoperation(object):
346 """A object that represent a single push operation
346 """A object that represent a single push operation
347
347
348 Its purpose is to carry push related state and very common operations.
348 Its purpose is to carry push related state and very common operations.
349
349
350 A new pushoperation should be created at the beginning of each push and
350 A new pushoperation should be created at the beginning of each push and
351 discarded afterward.
351 discarded afterward.
352 """
352 """
353
353
354 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
354 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
355 bookmarks=(), pushvars=None):
355 bookmarks=(), pushvars=None):
356 # repo we push from
356 # repo we push from
357 self.repo = repo
357 self.repo = repo
358 self.ui = repo.ui
358 self.ui = repo.ui
359 # repo we push to
359 # repo we push to
360 self.remote = remote
360 self.remote = remote
361 # force option provided
361 # force option provided
362 self.force = force
362 self.force = force
363 # revs to be pushed (None is "all")
363 # revs to be pushed (None is "all")
364 self.revs = revs
364 self.revs = revs
365 # bookmark explicitly pushed
365 # bookmark explicitly pushed
366 self.bookmarks = bookmarks
366 self.bookmarks = bookmarks
367 # allow push of new branch
367 # allow push of new branch
368 self.newbranch = newbranch
368 self.newbranch = newbranch
369 # step already performed
369 # step already performed
370 # (used to check what steps have been already performed through bundle2)
370 # (used to check what steps have been already performed through bundle2)
371 self.stepsdone = set()
371 self.stepsdone = set()
372 # Integer version of the changegroup push result
372 # Integer version of the changegroup push result
373 # - None means nothing to push
373 # - None means nothing to push
374 # - 0 means HTTP error
374 # - 0 means HTTP error
375 # - 1 means we pushed and remote head count is unchanged *or*
375 # - 1 means we pushed and remote head count is unchanged *or*
376 # we have outgoing changesets but refused to push
376 # we have outgoing changesets but refused to push
377 # - other values as described by addchangegroup()
377 # - other values as described by addchangegroup()
378 self.cgresult = None
378 self.cgresult = None
379 # Boolean value for the bookmark push
379 # Boolean value for the bookmark push
380 self.bkresult = None
380 self.bkresult = None
381 # discover.outgoing object (contains common and outgoing data)
381 # discover.outgoing object (contains common and outgoing data)
382 self.outgoing = None
382 self.outgoing = None
383 # all remote topological heads before the push
383 # all remote topological heads before the push
384 self.remoteheads = None
384 self.remoteheads = None
385 # Details of the remote branch pre and post push
385 # Details of the remote branch pre and post push
386 #
386 #
387 # mapping: {'branch': ([remoteheads],
387 # mapping: {'branch': ([remoteheads],
388 # [newheads],
388 # [newheads],
389 # [unsyncedheads],
389 # [unsyncedheads],
390 # [discardedheads])}
390 # [discardedheads])}
391 # - branch: the branch name
391 # - branch: the branch name
392 # - remoteheads: the list of remote heads known locally
392 # - remoteheads: the list of remote heads known locally
393 # None if the branch is new
393 # None if the branch is new
394 # - newheads: the new remote heads (known locally) with outgoing pushed
394 # - newheads: the new remote heads (known locally) with outgoing pushed
395 # - unsyncedheads: the list of remote heads unknown locally.
395 # - unsyncedheads: the list of remote heads unknown locally.
396 # - discardedheads: the list of remote heads made obsolete by the push
396 # - discardedheads: the list of remote heads made obsolete by the push
397 self.pushbranchmap = None
397 self.pushbranchmap = None
398 # testable as a boolean indicating if any nodes are missing locally.
398 # testable as a boolean indicating if any nodes are missing locally.
399 self.incoming = None
399 self.incoming = None
400 # summary of the remote phase situation
400 # summary of the remote phase situation
401 self.remotephases = None
401 self.remotephases = None
402 # phases changes that must be pushed along side the changesets
402 # phases changes that must be pushed along side the changesets
403 self.outdatedphases = None
403 self.outdatedphases = None
404 # phases changes that must be pushed if changeset push fails
404 # phases changes that must be pushed if changeset push fails
405 self.fallbackoutdatedphases = None
405 self.fallbackoutdatedphases = None
406 # outgoing obsmarkers
406 # outgoing obsmarkers
407 self.outobsmarkers = set()
407 self.outobsmarkers = set()
408 # outgoing bookmarks
408 # outgoing bookmarks
409 self.outbookmarks = []
409 self.outbookmarks = []
410 # transaction manager
410 # transaction manager
411 self.trmanager = None
411 self.trmanager = None
412 # map { pushkey partid -> callback handling failure}
412 # map { pushkey partid -> callback handling failure}
413 # used to handle exception from mandatory pushkey part failure
413 # used to handle exception from mandatory pushkey part failure
414 self.pkfailcb = {}
414 self.pkfailcb = {}
415 # an iterable of pushvars or None
415 # an iterable of pushvars or None
416 self.pushvars = pushvars
416 self.pushvars = pushvars
417
417
418 @util.propertycache
418 @util.propertycache
419 def futureheads(self):
419 def futureheads(self):
420 """future remote heads if the changeset push succeeds"""
420 """future remote heads if the changeset push succeeds"""
421 return self.outgoing.missingheads
421 return self.outgoing.missingheads
422
422
423 @util.propertycache
423 @util.propertycache
424 def fallbackheads(self):
424 def fallbackheads(self):
425 """future remote heads if the changeset push fails"""
425 """future remote heads if the changeset push fails"""
426 if self.revs is None:
426 if self.revs is None:
427 # not target to push, all common are relevant
427 # not target to push, all common are relevant
428 return self.outgoing.commonheads
428 return self.outgoing.commonheads
429 unfi = self.repo.unfiltered()
429 unfi = self.repo.unfiltered()
430 # I want cheads = heads(::missingheads and ::commonheads)
430 # I want cheads = heads(::missingheads and ::commonheads)
431 # (missingheads is revs with secret changeset filtered out)
431 # (missingheads is revs with secret changeset filtered out)
432 #
432 #
433 # This can be expressed as:
433 # This can be expressed as:
434 # cheads = ( (missingheads and ::commonheads)
434 # cheads = ( (missingheads and ::commonheads)
435 # + (commonheads and ::missingheads))"
435 # + (commonheads and ::missingheads))"
436 # )
436 # )
437 #
437 #
438 # while trying to push we already computed the following:
438 # while trying to push we already computed the following:
439 # common = (::commonheads)
439 # common = (::commonheads)
440 # missing = ((commonheads::missingheads) - commonheads)
440 # missing = ((commonheads::missingheads) - commonheads)
441 #
441 #
442 # We can pick:
442 # We can pick:
443 # * missingheads part of common (::commonheads)
443 # * missingheads part of common (::commonheads)
444 common = self.outgoing.common
444 common = self.outgoing.common
445 nm = self.repo.changelog.nodemap
445 nm = self.repo.changelog.nodemap
446 cheads = [node for node in self.revs if nm[node] in common]
446 cheads = [node for node in self.revs if nm[node] in common]
447 # and
447 # and
448 # * commonheads parents on missing
448 # * commonheads parents on missing
449 revset = unfi.set('%ln and parents(roots(%ln))',
449 revset = unfi.set('%ln and parents(roots(%ln))',
450 self.outgoing.commonheads,
450 self.outgoing.commonheads,
451 self.outgoing.missing)
451 self.outgoing.missing)
452 cheads.extend(c.node() for c in revset)
452 cheads.extend(c.node() for c in revset)
453 return cheads
453 return cheads
454
454
455 @property
455 @property
456 def commonheads(self):
456 def commonheads(self):
457 """set of all common heads after changeset bundle push"""
457 """set of all common heads after changeset bundle push"""
458 if self.cgresult:
458 if self.cgresult:
459 return self.futureheads
459 return self.futureheads
460 else:
460 else:
461 return self.fallbackheads
461 return self.fallbackheads
462
462
463 # mapping of message used when pushing bookmark
463 # mapping of message used when pushing bookmark
464 bookmsgmap = {'update': (_("updating bookmark %s\n"),
464 bookmsgmap = {'update': (_("updating bookmark %s\n"),
465 _('updating bookmark %s failed!\n')),
465 _('updating bookmark %s failed!\n')),
466 'export': (_("exporting bookmark %s\n"),
466 'export': (_("exporting bookmark %s\n"),
467 _('exporting bookmark %s failed!\n')),
467 _('exporting bookmark %s failed!\n')),
468 'delete': (_("deleting remote bookmark %s\n"),
468 'delete': (_("deleting remote bookmark %s\n"),
469 _('deleting remote bookmark %s failed!\n')),
469 _('deleting remote bookmark %s failed!\n')),
470 }
470 }
471
471
472
472
473 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
473 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
474 opargs=None):
474 opargs=None):
475 '''Push outgoing changesets (limited by revs) from a local
475 '''Push outgoing changesets (limited by revs) from a local
476 repository to remote. Return an integer:
476 repository to remote. Return an integer:
477 - None means nothing to push
477 - None means nothing to push
478 - 0 means HTTP error
478 - 0 means HTTP error
479 - 1 means we pushed and remote head count is unchanged *or*
479 - 1 means we pushed and remote head count is unchanged *or*
480 we have outgoing changesets but refused to push
480 we have outgoing changesets but refused to push
481 - other values as described by addchangegroup()
481 - other values as described by addchangegroup()
482 '''
482 '''
483 if opargs is None:
483 if opargs is None:
484 opargs = {}
484 opargs = {}
485 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
485 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
486 **pycompat.strkwargs(opargs))
486 **pycompat.strkwargs(opargs))
487 if pushop.remote.local():
487 if pushop.remote.local():
488 missing = (set(pushop.repo.requirements)
488 missing = (set(pushop.repo.requirements)
489 - pushop.remote.local().supported)
489 - pushop.remote.local().supported)
490 if missing:
490 if missing:
491 msg = _("required features are not"
491 msg = _("required features are not"
492 " supported in the destination:"
492 " supported in the destination:"
493 " %s") % (', '.join(sorted(missing)))
493 " %s") % (', '.join(sorted(missing)))
494 raise error.Abort(msg)
494 raise error.Abort(msg)
495
495
496 if not pushop.remote.canpush():
496 if not pushop.remote.canpush():
497 raise error.Abort(_("destination does not support push"))
497 raise error.Abort(_("destination does not support push"))
498
498
499 if not pushop.remote.capable('unbundle'):
499 if not pushop.remote.capable('unbundle'):
500 raise error.Abort(_('cannot push: destination does not support the '
500 raise error.Abort(_('cannot push: destination does not support the '
501 'unbundle wire protocol command'))
501 'unbundle wire protocol command'))
502
502
503 # get lock as we might write phase data
503 # get lock as we might write phase data
504 wlock = lock = None
504 wlock = lock = None
505 try:
505 try:
506 # bundle2 push may receive a reply bundle touching bookmarks or other
506 # bundle2 push may receive a reply bundle touching bookmarks or other
507 # things requiring the wlock. Take it now to ensure proper ordering.
507 # things requiring the wlock. Take it now to ensure proper ordering.
508 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
508 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
509 if (not _forcebundle1(pushop)) and maypushback:
509 if (not _forcebundle1(pushop)) and maypushback:
510 wlock = pushop.repo.wlock()
510 wlock = pushop.repo.wlock()
511 lock = pushop.repo.lock()
511 lock = pushop.repo.lock()
512 pushop.trmanager = transactionmanager(pushop.repo,
512 pushop.trmanager = transactionmanager(pushop.repo,
513 'push-response',
513 'push-response',
514 pushop.remote.url())
514 pushop.remote.url())
515 except error.LockUnavailable as err:
515 except error.LockUnavailable as err:
516 # source repo cannot be locked.
516 # source repo cannot be locked.
517 # We do not abort the push, but just disable the local phase
517 # We do not abort the push, but just disable the local phase
518 # synchronisation.
518 # synchronisation.
519 msg = 'cannot lock source repository: %s\n' % err
519 msg = 'cannot lock source repository: %s\n' % err
520 pushop.ui.debug(msg)
520 pushop.ui.debug(msg)
521
521
522 with wlock or util.nullcontextmanager(), \
522 with wlock or util.nullcontextmanager(), \
523 lock or util.nullcontextmanager(), \
523 lock or util.nullcontextmanager(), \
524 pushop.trmanager or util.nullcontextmanager():
524 pushop.trmanager or util.nullcontextmanager():
525 pushop.repo.checkpush(pushop)
525 pushop.repo.checkpush(pushop)
526 _pushdiscovery(pushop)
526 _pushdiscovery(pushop)
527 if not _forcebundle1(pushop):
527 if not _forcebundle1(pushop):
528 _pushbundle2(pushop)
528 _pushbundle2(pushop)
529 _pushchangeset(pushop)
529 _pushchangeset(pushop)
530 _pushsyncphase(pushop)
530 _pushsyncphase(pushop)
531 _pushobsolete(pushop)
531 _pushobsolete(pushop)
532 _pushbookmark(pushop)
532 _pushbookmark(pushop)
533
533
534 if repo.ui.configbool('experimental', 'remotenames'):
535 logexchange.pullremotenames(repo, remote)
536
534 return pushop
537 return pushop
535
538
536 # list of steps to perform discovery before push
539 # list of steps to perform discovery before push
537 pushdiscoveryorder = []
540 pushdiscoveryorder = []
538
541
539 # Mapping between step name and function
542 # Mapping between step name and function
540 #
543 #
541 # This exists to help extensions wrap steps if necessary
544 # This exists to help extensions wrap steps if necessary
542 pushdiscoverymapping = {}
545 pushdiscoverymapping = {}
543
546
544 def pushdiscovery(stepname):
547 def pushdiscovery(stepname):
545 """decorator for function performing discovery before push
548 """decorator for function performing discovery before push
546
549
547 The function is added to the step -> function mapping and appended to the
550 The function is added to the step -> function mapping and appended to the
548 list of steps. Beware that decorated function will be added in order (this
551 list of steps. Beware that decorated function will be added in order (this
549 may matter).
552 may matter).
550
553
551 You can only use this decorator for a new step, if you want to wrap a step
554 You can only use this decorator for a new step, if you want to wrap a step
552 from an extension, change the pushdiscovery dictionary directly."""
555 from an extension, change the pushdiscovery dictionary directly."""
553 def dec(func):
556 def dec(func):
554 assert stepname not in pushdiscoverymapping
557 assert stepname not in pushdiscoverymapping
555 pushdiscoverymapping[stepname] = func
558 pushdiscoverymapping[stepname] = func
556 pushdiscoveryorder.append(stepname)
559 pushdiscoveryorder.append(stepname)
557 return func
560 return func
558 return dec
561 return dec
559
562
560 def _pushdiscovery(pushop):
563 def _pushdiscovery(pushop):
561 """Run all discovery steps"""
564 """Run all discovery steps"""
562 for stepname in pushdiscoveryorder:
565 for stepname in pushdiscoveryorder:
563 step = pushdiscoverymapping[stepname]
566 step = pushdiscoverymapping[stepname]
564 step(pushop)
567 step(pushop)
565
568
566 @pushdiscovery('changeset')
569 @pushdiscovery('changeset')
567 def _pushdiscoverychangeset(pushop):
570 def _pushdiscoverychangeset(pushop):
568 """discover the changeset that need to be pushed"""
571 """discover the changeset that need to be pushed"""
569 fci = discovery.findcommonincoming
572 fci = discovery.findcommonincoming
570 if pushop.revs:
573 if pushop.revs:
571 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
574 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
572 ancestorsof=pushop.revs)
575 ancestorsof=pushop.revs)
573 else:
576 else:
574 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
577 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
575 common, inc, remoteheads = commoninc
578 common, inc, remoteheads = commoninc
576 fco = discovery.findcommonoutgoing
579 fco = discovery.findcommonoutgoing
577 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
580 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
578 commoninc=commoninc, force=pushop.force)
581 commoninc=commoninc, force=pushop.force)
579 pushop.outgoing = outgoing
582 pushop.outgoing = outgoing
580 pushop.remoteheads = remoteheads
583 pushop.remoteheads = remoteheads
581 pushop.incoming = inc
584 pushop.incoming = inc
582
585
583 @pushdiscovery('phase')
586 @pushdiscovery('phase')
584 def _pushdiscoveryphase(pushop):
587 def _pushdiscoveryphase(pushop):
585 """discover the phase that needs to be pushed
588 """discover the phase that needs to be pushed
586
589
587 (computed for both success and failure case for changesets push)"""
590 (computed for both success and failure case for changesets push)"""
588 outgoing = pushop.outgoing
591 outgoing = pushop.outgoing
589 unfi = pushop.repo.unfiltered()
592 unfi = pushop.repo.unfiltered()
590 remotephases = listkeys(pushop.remote, 'phases')
593 remotephases = listkeys(pushop.remote, 'phases')
591
594
592 if (pushop.ui.configbool('ui', '_usedassubrepo')
595 if (pushop.ui.configbool('ui', '_usedassubrepo')
593 and remotephases # server supports phases
596 and remotephases # server supports phases
594 and not pushop.outgoing.missing # no changesets to be pushed
597 and not pushop.outgoing.missing # no changesets to be pushed
595 and remotephases.get('publishing', False)):
598 and remotephases.get('publishing', False)):
596 # When:
599 # When:
597 # - this is a subrepo push
600 # - this is a subrepo push
598 # - and remote support phase
601 # - and remote support phase
599 # - and no changeset are to be pushed
602 # - and no changeset are to be pushed
600 # - and remote is publishing
603 # - and remote is publishing
601 # We may be in issue 3781 case!
604 # We may be in issue 3781 case!
602 # We drop the possible phase synchronisation done by
605 # We drop the possible phase synchronisation done by
603 # courtesy to publish changesets possibly locally draft
606 # courtesy to publish changesets possibly locally draft
604 # on the remote.
607 # on the remote.
605 pushop.outdatedphases = []
608 pushop.outdatedphases = []
606 pushop.fallbackoutdatedphases = []
609 pushop.fallbackoutdatedphases = []
607 return
610 return
608
611
609 pushop.remotephases = phases.remotephasessummary(pushop.repo,
612 pushop.remotephases = phases.remotephasessummary(pushop.repo,
610 pushop.fallbackheads,
613 pushop.fallbackheads,
611 remotephases)
614 remotephases)
612 droots = pushop.remotephases.draftroots
615 droots = pushop.remotephases.draftroots
613
616
614 extracond = ''
617 extracond = ''
615 if not pushop.remotephases.publishing:
618 if not pushop.remotephases.publishing:
616 extracond = ' and public()'
619 extracond = ' and public()'
617 revset = 'heads((%%ln::%%ln) %s)' % extracond
620 revset = 'heads((%%ln::%%ln) %s)' % extracond
618 # Get the list of all revs draft on remote by public here.
621 # Get the list of all revs draft on remote by public here.
619 # XXX Beware that revset break if droots is not strictly
622 # XXX Beware that revset break if droots is not strictly
620 # XXX root we may want to ensure it is but it is costly
623 # XXX root we may want to ensure it is but it is costly
621 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
624 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
622 if not outgoing.missing:
625 if not outgoing.missing:
623 future = fallback
626 future = fallback
624 else:
627 else:
625 # adds changeset we are going to push as draft
628 # adds changeset we are going to push as draft
626 #
629 #
627 # should not be necessary for publishing server, but because of an
630 # should not be necessary for publishing server, but because of an
628 # issue fixed in xxxxx we have to do it anyway.
631 # issue fixed in xxxxx we have to do it anyway.
629 fdroots = list(unfi.set('roots(%ln + %ln::)',
632 fdroots = list(unfi.set('roots(%ln + %ln::)',
630 outgoing.missing, droots))
633 outgoing.missing, droots))
631 fdroots = [f.node() for f in fdroots]
634 fdroots = [f.node() for f in fdroots]
632 future = list(unfi.set(revset, fdroots, pushop.futureheads))
635 future = list(unfi.set(revset, fdroots, pushop.futureheads))
633 pushop.outdatedphases = future
636 pushop.outdatedphases = future
634 pushop.fallbackoutdatedphases = fallback
637 pushop.fallbackoutdatedphases = fallback
635
638
636 @pushdiscovery('obsmarker')
639 @pushdiscovery('obsmarker')
637 def _pushdiscoveryobsmarkers(pushop):
640 def _pushdiscoveryobsmarkers(pushop):
638 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
641 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
639 return
642 return
640
643
641 if not pushop.repo.obsstore:
644 if not pushop.repo.obsstore:
642 return
645 return
643
646
644 if 'obsolete' not in listkeys(pushop.remote, 'namespaces'):
647 if 'obsolete' not in listkeys(pushop.remote, 'namespaces'):
645 return
648 return
646
649
647 repo = pushop.repo
650 repo = pushop.repo
648 # very naive computation, that can be quite expensive on big repo.
651 # very naive computation, that can be quite expensive on big repo.
649 # However: evolution is currently slow on them anyway.
652 # However: evolution is currently slow on them anyway.
650 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
653 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
651 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
654 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
652
655
653 @pushdiscovery('bookmarks')
656 @pushdiscovery('bookmarks')
654 def _pushdiscoverybookmarks(pushop):
657 def _pushdiscoverybookmarks(pushop):
655 ui = pushop.ui
658 ui = pushop.ui
656 repo = pushop.repo.unfiltered()
659 repo = pushop.repo.unfiltered()
657 remote = pushop.remote
660 remote = pushop.remote
658 ui.debug("checking for updated bookmarks\n")
661 ui.debug("checking for updated bookmarks\n")
659 ancestors = ()
662 ancestors = ()
660 if pushop.revs:
663 if pushop.revs:
661 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
664 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
662 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
665 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
663
666
664 remotebookmark = listkeys(remote, 'bookmarks')
667 remotebookmark = listkeys(remote, 'bookmarks')
665
668
666 explicit = set([repo._bookmarks.expandname(bookmark)
669 explicit = set([repo._bookmarks.expandname(bookmark)
667 for bookmark in pushop.bookmarks])
670 for bookmark in pushop.bookmarks])
668
671
669 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
672 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
670 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
673 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
671
674
672 def safehex(x):
675 def safehex(x):
673 if x is None:
676 if x is None:
674 return x
677 return x
675 return hex(x)
678 return hex(x)
676
679
677 def hexifycompbookmarks(bookmarks):
680 def hexifycompbookmarks(bookmarks):
678 return [(b, safehex(scid), safehex(dcid))
681 return [(b, safehex(scid), safehex(dcid))
679 for (b, scid, dcid) in bookmarks]
682 for (b, scid, dcid) in bookmarks]
680
683
681 comp = [hexifycompbookmarks(marks) for marks in comp]
684 comp = [hexifycompbookmarks(marks) for marks in comp]
682 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
685 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
683
686
684 def _processcompared(pushop, pushed, explicit, remotebms, comp):
687 def _processcompared(pushop, pushed, explicit, remotebms, comp):
685 """take decision on bookmark to pull from the remote bookmark
688 """take decision on bookmark to pull from the remote bookmark
686
689
687 Exist to help extensions who want to alter this behavior.
690 Exist to help extensions who want to alter this behavior.
688 """
691 """
689 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
692 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
690
693
691 repo = pushop.repo
694 repo = pushop.repo
692
695
693 for b, scid, dcid in advsrc:
696 for b, scid, dcid in advsrc:
694 if b in explicit:
697 if b in explicit:
695 explicit.remove(b)
698 explicit.remove(b)
696 if not pushed or repo[scid].rev() in pushed:
699 if not pushed or repo[scid].rev() in pushed:
697 pushop.outbookmarks.append((b, dcid, scid))
700 pushop.outbookmarks.append((b, dcid, scid))
698 # search added bookmark
701 # search added bookmark
699 for b, scid, dcid in addsrc:
702 for b, scid, dcid in addsrc:
700 if b in explicit:
703 if b in explicit:
701 explicit.remove(b)
704 explicit.remove(b)
702 pushop.outbookmarks.append((b, '', scid))
705 pushop.outbookmarks.append((b, '', scid))
703 # search for overwritten bookmark
706 # search for overwritten bookmark
704 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
707 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
705 if b in explicit:
708 if b in explicit:
706 explicit.remove(b)
709 explicit.remove(b)
707 pushop.outbookmarks.append((b, dcid, scid))
710 pushop.outbookmarks.append((b, dcid, scid))
708 # search for bookmark to delete
711 # search for bookmark to delete
709 for b, scid, dcid in adddst:
712 for b, scid, dcid in adddst:
710 if b in explicit:
713 if b in explicit:
711 explicit.remove(b)
714 explicit.remove(b)
712 # treat as "deleted locally"
715 # treat as "deleted locally"
713 pushop.outbookmarks.append((b, dcid, ''))
716 pushop.outbookmarks.append((b, dcid, ''))
714 # identical bookmarks shouldn't get reported
717 # identical bookmarks shouldn't get reported
715 for b, scid, dcid in same:
718 for b, scid, dcid in same:
716 if b in explicit:
719 if b in explicit:
717 explicit.remove(b)
720 explicit.remove(b)
718
721
719 if explicit:
722 if explicit:
720 explicit = sorted(explicit)
723 explicit = sorted(explicit)
721 # we should probably list all of them
724 # we should probably list all of them
722 pushop.ui.warn(_('bookmark %s does not exist on the local '
725 pushop.ui.warn(_('bookmark %s does not exist on the local '
723 'or remote repository!\n') % explicit[0])
726 'or remote repository!\n') % explicit[0])
724 pushop.bkresult = 2
727 pushop.bkresult = 2
725
728
726 pushop.outbookmarks.sort()
729 pushop.outbookmarks.sort()
727
730
728 def _pushcheckoutgoing(pushop):
731 def _pushcheckoutgoing(pushop):
729 outgoing = pushop.outgoing
732 outgoing = pushop.outgoing
730 unfi = pushop.repo.unfiltered()
733 unfi = pushop.repo.unfiltered()
731 if not outgoing.missing:
734 if not outgoing.missing:
732 # nothing to push
735 # nothing to push
733 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
736 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
734 return False
737 return False
735 # something to push
738 # something to push
736 if not pushop.force:
739 if not pushop.force:
737 # if repo.obsstore == False --> no obsolete
740 # if repo.obsstore == False --> no obsolete
738 # then, save the iteration
741 # then, save the iteration
739 if unfi.obsstore:
742 if unfi.obsstore:
740 # this message are here for 80 char limit reason
743 # this message are here for 80 char limit reason
741 mso = _("push includes obsolete changeset: %s!")
744 mso = _("push includes obsolete changeset: %s!")
742 mspd = _("push includes phase-divergent changeset: %s!")
745 mspd = _("push includes phase-divergent changeset: %s!")
743 mscd = _("push includes content-divergent changeset: %s!")
746 mscd = _("push includes content-divergent changeset: %s!")
744 mst = {"orphan": _("push includes orphan changeset: %s!"),
747 mst = {"orphan": _("push includes orphan changeset: %s!"),
745 "phase-divergent": mspd,
748 "phase-divergent": mspd,
746 "content-divergent": mscd}
749 "content-divergent": mscd}
747 # If we are to push if there is at least one
750 # If we are to push if there is at least one
748 # obsolete or unstable changeset in missing, at
751 # obsolete or unstable changeset in missing, at
749 # least one of the missinghead will be obsolete or
752 # least one of the missinghead will be obsolete or
750 # unstable. So checking heads only is ok
753 # unstable. So checking heads only is ok
751 for node in outgoing.missingheads:
754 for node in outgoing.missingheads:
752 ctx = unfi[node]
755 ctx = unfi[node]
753 if ctx.obsolete():
756 if ctx.obsolete():
754 raise error.Abort(mso % ctx)
757 raise error.Abort(mso % ctx)
755 elif ctx.isunstable():
758 elif ctx.isunstable():
756 # TODO print more than one instability in the abort
759 # TODO print more than one instability in the abort
757 # message
760 # message
758 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
761 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
759
762
760 discovery.checkheads(pushop)
763 discovery.checkheads(pushop)
761 return True
764 return True
762
765
763 # List of names of steps to perform for an outgoing bundle2, order matters.
766 # List of names of steps to perform for an outgoing bundle2, order matters.
764 b2partsgenorder = []
767 b2partsgenorder = []
765
768
766 # Mapping between step name and function
769 # Mapping between step name and function
767 #
770 #
768 # This exists to help extensions wrap steps if necessary
771 # This exists to help extensions wrap steps if necessary
769 b2partsgenmapping = {}
772 b2partsgenmapping = {}
770
773
771 def b2partsgenerator(stepname, idx=None):
774 def b2partsgenerator(stepname, idx=None):
772 """decorator for function generating bundle2 part
775 """decorator for function generating bundle2 part
773
776
774 The function is added to the step -> function mapping and appended to the
777 The function is added to the step -> function mapping and appended to the
775 list of steps. Beware that decorated functions will be added in order
778 list of steps. Beware that decorated functions will be added in order
776 (this may matter).
779 (this may matter).
777
780
778 You can only use this decorator for new steps, if you want to wrap a step
781 You can only use this decorator for new steps, if you want to wrap a step
779 from an extension, attack the b2partsgenmapping dictionary directly."""
782 from an extension, attack the b2partsgenmapping dictionary directly."""
780 def dec(func):
783 def dec(func):
781 assert stepname not in b2partsgenmapping
784 assert stepname not in b2partsgenmapping
782 b2partsgenmapping[stepname] = func
785 b2partsgenmapping[stepname] = func
783 if idx is None:
786 if idx is None:
784 b2partsgenorder.append(stepname)
787 b2partsgenorder.append(stepname)
785 else:
788 else:
786 b2partsgenorder.insert(idx, stepname)
789 b2partsgenorder.insert(idx, stepname)
787 return func
790 return func
788 return dec
791 return dec
789
792
790 def _pushb2ctxcheckheads(pushop, bundler):
793 def _pushb2ctxcheckheads(pushop, bundler):
791 """Generate race condition checking parts
794 """Generate race condition checking parts
792
795
793 Exists as an independent function to aid extensions
796 Exists as an independent function to aid extensions
794 """
797 """
795 # * 'force' do not check for push race,
798 # * 'force' do not check for push race,
796 # * if we don't push anything, there are nothing to check.
799 # * if we don't push anything, there are nothing to check.
797 if not pushop.force and pushop.outgoing.missingheads:
800 if not pushop.force and pushop.outgoing.missingheads:
798 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
801 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
799 emptyremote = pushop.pushbranchmap is None
802 emptyremote = pushop.pushbranchmap is None
800 if not allowunrelated or emptyremote:
803 if not allowunrelated or emptyremote:
801 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
804 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
802 else:
805 else:
803 affected = set()
806 affected = set()
804 for branch, heads in pushop.pushbranchmap.iteritems():
807 for branch, heads in pushop.pushbranchmap.iteritems():
805 remoteheads, newheads, unsyncedheads, discardedheads = heads
808 remoteheads, newheads, unsyncedheads, discardedheads = heads
806 if remoteheads is not None:
809 if remoteheads is not None:
807 remote = set(remoteheads)
810 remote = set(remoteheads)
808 affected |= set(discardedheads) & remote
811 affected |= set(discardedheads) & remote
809 affected |= remote - set(newheads)
812 affected |= remote - set(newheads)
810 if affected:
813 if affected:
811 data = iter(sorted(affected))
814 data = iter(sorted(affected))
812 bundler.newpart('check:updated-heads', data=data)
815 bundler.newpart('check:updated-heads', data=data)
813
816
814 def _pushing(pushop):
817 def _pushing(pushop):
815 """return True if we are pushing anything"""
818 """return True if we are pushing anything"""
816 return bool(pushop.outgoing.missing
819 return bool(pushop.outgoing.missing
817 or pushop.outdatedphases
820 or pushop.outdatedphases
818 or pushop.outobsmarkers
821 or pushop.outobsmarkers
819 or pushop.outbookmarks)
822 or pushop.outbookmarks)
820
823
821 @b2partsgenerator('check-bookmarks')
824 @b2partsgenerator('check-bookmarks')
822 def _pushb2checkbookmarks(pushop, bundler):
825 def _pushb2checkbookmarks(pushop, bundler):
823 """insert bookmark move checking"""
826 """insert bookmark move checking"""
824 if not _pushing(pushop) or pushop.force:
827 if not _pushing(pushop) or pushop.force:
825 return
828 return
826 b2caps = bundle2.bundle2caps(pushop.remote)
829 b2caps = bundle2.bundle2caps(pushop.remote)
827 hasbookmarkcheck = 'bookmarks' in b2caps
830 hasbookmarkcheck = 'bookmarks' in b2caps
828 if not (pushop.outbookmarks and hasbookmarkcheck):
831 if not (pushop.outbookmarks and hasbookmarkcheck):
829 return
832 return
830 data = []
833 data = []
831 for book, old, new in pushop.outbookmarks:
834 for book, old, new in pushop.outbookmarks:
832 old = bin(old)
835 old = bin(old)
833 data.append((book, old))
836 data.append((book, old))
834 checkdata = bookmod.binaryencode(data)
837 checkdata = bookmod.binaryencode(data)
835 bundler.newpart('check:bookmarks', data=checkdata)
838 bundler.newpart('check:bookmarks', data=checkdata)
836
839
837 @b2partsgenerator('check-phases')
840 @b2partsgenerator('check-phases')
838 def _pushb2checkphases(pushop, bundler):
841 def _pushb2checkphases(pushop, bundler):
839 """insert phase move checking"""
842 """insert phase move checking"""
840 if not _pushing(pushop) or pushop.force:
843 if not _pushing(pushop) or pushop.force:
841 return
844 return
842 b2caps = bundle2.bundle2caps(pushop.remote)
845 b2caps = bundle2.bundle2caps(pushop.remote)
843 hasphaseheads = 'heads' in b2caps.get('phases', ())
846 hasphaseheads = 'heads' in b2caps.get('phases', ())
844 if pushop.remotephases is not None and hasphaseheads:
847 if pushop.remotephases is not None and hasphaseheads:
845 # check that the remote phase has not changed
848 # check that the remote phase has not changed
846 checks = [[] for p in phases.allphases]
849 checks = [[] for p in phases.allphases]
847 checks[phases.public].extend(pushop.remotephases.publicheads)
850 checks[phases.public].extend(pushop.remotephases.publicheads)
848 checks[phases.draft].extend(pushop.remotephases.draftroots)
851 checks[phases.draft].extend(pushop.remotephases.draftroots)
849 if any(checks):
852 if any(checks):
850 for nodes in checks:
853 for nodes in checks:
851 nodes.sort()
854 nodes.sort()
852 checkdata = phases.binaryencode(checks)
855 checkdata = phases.binaryencode(checks)
853 bundler.newpart('check:phases', data=checkdata)
856 bundler.newpart('check:phases', data=checkdata)
854
857
855 @b2partsgenerator('changeset')
858 @b2partsgenerator('changeset')
856 def _pushb2ctx(pushop, bundler):
859 def _pushb2ctx(pushop, bundler):
857 """handle changegroup push through bundle2
860 """handle changegroup push through bundle2
858
861
859 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
862 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
860 """
863 """
861 if 'changesets' in pushop.stepsdone:
864 if 'changesets' in pushop.stepsdone:
862 return
865 return
863 pushop.stepsdone.add('changesets')
866 pushop.stepsdone.add('changesets')
864 # Send known heads to the server for race detection.
867 # Send known heads to the server for race detection.
865 if not _pushcheckoutgoing(pushop):
868 if not _pushcheckoutgoing(pushop):
866 return
869 return
867 pushop.repo.prepushoutgoinghooks(pushop)
870 pushop.repo.prepushoutgoinghooks(pushop)
868
871
869 _pushb2ctxcheckheads(pushop, bundler)
872 _pushb2ctxcheckheads(pushop, bundler)
870
873
871 b2caps = bundle2.bundle2caps(pushop.remote)
874 b2caps = bundle2.bundle2caps(pushop.remote)
872 version = '01'
875 version = '01'
873 cgversions = b2caps.get('changegroup')
876 cgversions = b2caps.get('changegroup')
874 if cgversions: # 3.1 and 3.2 ship with an empty value
877 if cgversions: # 3.1 and 3.2 ship with an empty value
875 cgversions = [v for v in cgversions
878 cgversions = [v for v in cgversions
876 if v in changegroup.supportedoutgoingversions(
879 if v in changegroup.supportedoutgoingversions(
877 pushop.repo)]
880 pushop.repo)]
878 if not cgversions:
881 if not cgversions:
879 raise ValueError(_('no common changegroup version'))
882 raise ValueError(_('no common changegroup version'))
880 version = max(cgversions)
883 version = max(cgversions)
881 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
884 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
882 'push')
885 'push')
883 cgpart = bundler.newpart('changegroup', data=cgstream)
886 cgpart = bundler.newpart('changegroup', data=cgstream)
884 if cgversions:
887 if cgversions:
885 cgpart.addparam('version', version)
888 cgpart.addparam('version', version)
886 if 'treemanifest' in pushop.repo.requirements:
889 if 'treemanifest' in pushop.repo.requirements:
887 cgpart.addparam('treemanifest', '1')
890 cgpart.addparam('treemanifest', '1')
888 def handlereply(op):
891 def handlereply(op):
889 """extract addchangegroup returns from server reply"""
892 """extract addchangegroup returns from server reply"""
890 cgreplies = op.records.getreplies(cgpart.id)
893 cgreplies = op.records.getreplies(cgpart.id)
891 assert len(cgreplies['changegroup']) == 1
894 assert len(cgreplies['changegroup']) == 1
892 pushop.cgresult = cgreplies['changegroup'][0]['return']
895 pushop.cgresult = cgreplies['changegroup'][0]['return']
893 return handlereply
896 return handlereply
894
897
895 @b2partsgenerator('phase')
898 @b2partsgenerator('phase')
896 def _pushb2phases(pushop, bundler):
899 def _pushb2phases(pushop, bundler):
897 """handle phase push through bundle2"""
900 """handle phase push through bundle2"""
898 if 'phases' in pushop.stepsdone:
901 if 'phases' in pushop.stepsdone:
899 return
902 return
900 b2caps = bundle2.bundle2caps(pushop.remote)
903 b2caps = bundle2.bundle2caps(pushop.remote)
901 ui = pushop.repo.ui
904 ui = pushop.repo.ui
902
905
903 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
906 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
904 haspushkey = 'pushkey' in b2caps
907 haspushkey = 'pushkey' in b2caps
905 hasphaseheads = 'heads' in b2caps.get('phases', ())
908 hasphaseheads = 'heads' in b2caps.get('phases', ())
906
909
907 if hasphaseheads and not legacyphase:
910 if hasphaseheads and not legacyphase:
908 return _pushb2phaseheads(pushop, bundler)
911 return _pushb2phaseheads(pushop, bundler)
909 elif haspushkey:
912 elif haspushkey:
910 return _pushb2phasespushkey(pushop, bundler)
913 return _pushb2phasespushkey(pushop, bundler)
911
914
912 def _pushb2phaseheads(pushop, bundler):
915 def _pushb2phaseheads(pushop, bundler):
913 """push phase information through a bundle2 - binary part"""
916 """push phase information through a bundle2 - binary part"""
914 pushop.stepsdone.add('phases')
917 pushop.stepsdone.add('phases')
915 if pushop.outdatedphases:
918 if pushop.outdatedphases:
916 updates = [[] for p in phases.allphases]
919 updates = [[] for p in phases.allphases]
917 updates[0].extend(h.node() for h in pushop.outdatedphases)
920 updates[0].extend(h.node() for h in pushop.outdatedphases)
918 phasedata = phases.binaryencode(updates)
921 phasedata = phases.binaryencode(updates)
919 bundler.newpart('phase-heads', data=phasedata)
922 bundler.newpart('phase-heads', data=phasedata)
920
923
921 def _pushb2phasespushkey(pushop, bundler):
924 def _pushb2phasespushkey(pushop, bundler):
922 """push phase information through a bundle2 - pushkey part"""
925 """push phase information through a bundle2 - pushkey part"""
923 pushop.stepsdone.add('phases')
926 pushop.stepsdone.add('phases')
924 part2node = []
927 part2node = []
925
928
926 def handlefailure(pushop, exc):
929 def handlefailure(pushop, exc):
927 targetid = int(exc.partid)
930 targetid = int(exc.partid)
928 for partid, node in part2node:
931 for partid, node in part2node:
929 if partid == targetid:
932 if partid == targetid:
930 raise error.Abort(_('updating %s to public failed') % node)
933 raise error.Abort(_('updating %s to public failed') % node)
931
934
932 enc = pushkey.encode
935 enc = pushkey.encode
933 for newremotehead in pushop.outdatedphases:
936 for newremotehead in pushop.outdatedphases:
934 part = bundler.newpart('pushkey')
937 part = bundler.newpart('pushkey')
935 part.addparam('namespace', enc('phases'))
938 part.addparam('namespace', enc('phases'))
936 part.addparam('key', enc(newremotehead.hex()))
939 part.addparam('key', enc(newremotehead.hex()))
937 part.addparam('old', enc('%d' % phases.draft))
940 part.addparam('old', enc('%d' % phases.draft))
938 part.addparam('new', enc('%d' % phases.public))
941 part.addparam('new', enc('%d' % phases.public))
939 part2node.append((part.id, newremotehead))
942 part2node.append((part.id, newremotehead))
940 pushop.pkfailcb[part.id] = handlefailure
943 pushop.pkfailcb[part.id] = handlefailure
941
944
942 def handlereply(op):
945 def handlereply(op):
943 for partid, node in part2node:
946 for partid, node in part2node:
944 partrep = op.records.getreplies(partid)
947 partrep = op.records.getreplies(partid)
945 results = partrep['pushkey']
948 results = partrep['pushkey']
946 assert len(results) <= 1
949 assert len(results) <= 1
947 msg = None
950 msg = None
948 if not results:
951 if not results:
949 msg = _('server ignored update of %s to public!\n') % node
952 msg = _('server ignored update of %s to public!\n') % node
950 elif not int(results[0]['return']):
953 elif not int(results[0]['return']):
951 msg = _('updating %s to public failed!\n') % node
954 msg = _('updating %s to public failed!\n') % node
952 if msg is not None:
955 if msg is not None:
953 pushop.ui.warn(msg)
956 pushop.ui.warn(msg)
954 return handlereply
957 return handlereply
955
958
956 @b2partsgenerator('obsmarkers')
959 @b2partsgenerator('obsmarkers')
957 def _pushb2obsmarkers(pushop, bundler):
960 def _pushb2obsmarkers(pushop, bundler):
958 if 'obsmarkers' in pushop.stepsdone:
961 if 'obsmarkers' in pushop.stepsdone:
959 return
962 return
960 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
963 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
961 if obsolete.commonversion(remoteversions) is None:
964 if obsolete.commonversion(remoteversions) is None:
962 return
965 return
963 pushop.stepsdone.add('obsmarkers')
966 pushop.stepsdone.add('obsmarkers')
964 if pushop.outobsmarkers:
967 if pushop.outobsmarkers:
965 markers = sorted(pushop.outobsmarkers)
968 markers = sorted(pushop.outobsmarkers)
966 bundle2.buildobsmarkerspart(bundler, markers)
969 bundle2.buildobsmarkerspart(bundler, markers)
967
970
968 @b2partsgenerator('bookmarks')
971 @b2partsgenerator('bookmarks')
969 def _pushb2bookmarks(pushop, bundler):
972 def _pushb2bookmarks(pushop, bundler):
970 """handle bookmark push through bundle2"""
973 """handle bookmark push through bundle2"""
971 if 'bookmarks' in pushop.stepsdone:
974 if 'bookmarks' in pushop.stepsdone:
972 return
975 return
973 b2caps = bundle2.bundle2caps(pushop.remote)
976 b2caps = bundle2.bundle2caps(pushop.remote)
974
977
975 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
978 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
976 legacybooks = 'bookmarks' in legacy
979 legacybooks = 'bookmarks' in legacy
977
980
978 if not legacybooks and 'bookmarks' in b2caps:
981 if not legacybooks and 'bookmarks' in b2caps:
979 return _pushb2bookmarkspart(pushop, bundler)
982 return _pushb2bookmarkspart(pushop, bundler)
980 elif 'pushkey' in b2caps:
983 elif 'pushkey' in b2caps:
981 return _pushb2bookmarkspushkey(pushop, bundler)
984 return _pushb2bookmarkspushkey(pushop, bundler)
982
985
983 def _bmaction(old, new):
986 def _bmaction(old, new):
984 """small utility for bookmark pushing"""
987 """small utility for bookmark pushing"""
985 if not old:
988 if not old:
986 return 'export'
989 return 'export'
987 elif not new:
990 elif not new:
988 return 'delete'
991 return 'delete'
989 return 'update'
992 return 'update'
990
993
991 def _pushb2bookmarkspart(pushop, bundler):
994 def _pushb2bookmarkspart(pushop, bundler):
992 pushop.stepsdone.add('bookmarks')
995 pushop.stepsdone.add('bookmarks')
993 if not pushop.outbookmarks:
996 if not pushop.outbookmarks:
994 return
997 return
995
998
996 allactions = []
999 allactions = []
997 data = []
1000 data = []
998 for book, old, new in pushop.outbookmarks:
1001 for book, old, new in pushop.outbookmarks:
999 new = bin(new)
1002 new = bin(new)
1000 data.append((book, new))
1003 data.append((book, new))
1001 allactions.append((book, _bmaction(old, new)))
1004 allactions.append((book, _bmaction(old, new)))
1002 checkdata = bookmod.binaryencode(data)
1005 checkdata = bookmod.binaryencode(data)
1003 bundler.newpart('bookmarks', data=checkdata)
1006 bundler.newpart('bookmarks', data=checkdata)
1004
1007
1005 def handlereply(op):
1008 def handlereply(op):
1006 ui = pushop.ui
1009 ui = pushop.ui
1007 # if success
1010 # if success
1008 for book, action in allactions:
1011 for book, action in allactions:
1009 ui.status(bookmsgmap[action][0] % book)
1012 ui.status(bookmsgmap[action][0] % book)
1010
1013
1011 return handlereply
1014 return handlereply
1012
1015
1013 def _pushb2bookmarkspushkey(pushop, bundler):
1016 def _pushb2bookmarkspushkey(pushop, bundler):
1014 pushop.stepsdone.add('bookmarks')
1017 pushop.stepsdone.add('bookmarks')
1015 part2book = []
1018 part2book = []
1016 enc = pushkey.encode
1019 enc = pushkey.encode
1017
1020
1018 def handlefailure(pushop, exc):
1021 def handlefailure(pushop, exc):
1019 targetid = int(exc.partid)
1022 targetid = int(exc.partid)
1020 for partid, book, action in part2book:
1023 for partid, book, action in part2book:
1021 if partid == targetid:
1024 if partid == targetid:
1022 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1025 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1023 # we should not be called for part we did not generated
1026 # we should not be called for part we did not generated
1024 assert False
1027 assert False
1025
1028
1026 for book, old, new in pushop.outbookmarks:
1029 for book, old, new in pushop.outbookmarks:
1027 part = bundler.newpart('pushkey')
1030 part = bundler.newpart('pushkey')
1028 part.addparam('namespace', enc('bookmarks'))
1031 part.addparam('namespace', enc('bookmarks'))
1029 part.addparam('key', enc(book))
1032 part.addparam('key', enc(book))
1030 part.addparam('old', enc(old))
1033 part.addparam('old', enc(old))
1031 part.addparam('new', enc(new))
1034 part.addparam('new', enc(new))
1032 action = 'update'
1035 action = 'update'
1033 if not old:
1036 if not old:
1034 action = 'export'
1037 action = 'export'
1035 elif not new:
1038 elif not new:
1036 action = 'delete'
1039 action = 'delete'
1037 part2book.append((part.id, book, action))
1040 part2book.append((part.id, book, action))
1038 pushop.pkfailcb[part.id] = handlefailure
1041 pushop.pkfailcb[part.id] = handlefailure
1039
1042
1040 def handlereply(op):
1043 def handlereply(op):
1041 ui = pushop.ui
1044 ui = pushop.ui
1042 for partid, book, action in part2book:
1045 for partid, book, action in part2book:
1043 partrep = op.records.getreplies(partid)
1046 partrep = op.records.getreplies(partid)
1044 results = partrep['pushkey']
1047 results = partrep['pushkey']
1045 assert len(results) <= 1
1048 assert len(results) <= 1
1046 if not results:
1049 if not results:
1047 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1050 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1048 else:
1051 else:
1049 ret = int(results[0]['return'])
1052 ret = int(results[0]['return'])
1050 if ret:
1053 if ret:
1051 ui.status(bookmsgmap[action][0] % book)
1054 ui.status(bookmsgmap[action][0] % book)
1052 else:
1055 else:
1053 ui.warn(bookmsgmap[action][1] % book)
1056 ui.warn(bookmsgmap[action][1] % book)
1054 if pushop.bkresult is not None:
1057 if pushop.bkresult is not None:
1055 pushop.bkresult = 1
1058 pushop.bkresult = 1
1056 return handlereply
1059 return handlereply
1057
1060
1058 @b2partsgenerator('pushvars', idx=0)
1061 @b2partsgenerator('pushvars', idx=0)
1059 def _getbundlesendvars(pushop, bundler):
1062 def _getbundlesendvars(pushop, bundler):
1060 '''send shellvars via bundle2'''
1063 '''send shellvars via bundle2'''
1061 pushvars = pushop.pushvars
1064 pushvars = pushop.pushvars
1062 if pushvars:
1065 if pushvars:
1063 shellvars = {}
1066 shellvars = {}
1064 for raw in pushvars:
1067 for raw in pushvars:
1065 if '=' not in raw:
1068 if '=' not in raw:
1066 msg = ("unable to parse variable '%s', should follow "
1069 msg = ("unable to parse variable '%s', should follow "
1067 "'KEY=VALUE' or 'KEY=' format")
1070 "'KEY=VALUE' or 'KEY=' format")
1068 raise error.Abort(msg % raw)
1071 raise error.Abort(msg % raw)
1069 k, v = raw.split('=', 1)
1072 k, v = raw.split('=', 1)
1070 shellvars[k] = v
1073 shellvars[k] = v
1071
1074
1072 part = bundler.newpart('pushvars')
1075 part = bundler.newpart('pushvars')
1073
1076
1074 for key, value in shellvars.iteritems():
1077 for key, value in shellvars.iteritems():
1075 part.addparam(key, value, mandatory=False)
1078 part.addparam(key, value, mandatory=False)
1076
1079
1077 def _pushbundle2(pushop):
1080 def _pushbundle2(pushop):
1078 """push data to the remote using bundle2
1081 """push data to the remote using bundle2
1079
1082
1080 The only currently supported type of data is changegroup but this will
1083 The only currently supported type of data is changegroup but this will
1081 evolve in the future."""
1084 evolve in the future."""
1082 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1085 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1083 pushback = (pushop.trmanager
1086 pushback = (pushop.trmanager
1084 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1087 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1085
1088
1086 # create reply capability
1089 # create reply capability
1087 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1090 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1088 allowpushback=pushback,
1091 allowpushback=pushback,
1089 role='client'))
1092 role='client'))
1090 bundler.newpart('replycaps', data=capsblob)
1093 bundler.newpart('replycaps', data=capsblob)
1091 replyhandlers = []
1094 replyhandlers = []
1092 for partgenname in b2partsgenorder:
1095 for partgenname in b2partsgenorder:
1093 partgen = b2partsgenmapping[partgenname]
1096 partgen = b2partsgenmapping[partgenname]
1094 ret = partgen(pushop, bundler)
1097 ret = partgen(pushop, bundler)
1095 if callable(ret):
1098 if callable(ret):
1096 replyhandlers.append(ret)
1099 replyhandlers.append(ret)
1097 # do not push if nothing to push
1100 # do not push if nothing to push
1098 if bundler.nbparts <= 1:
1101 if bundler.nbparts <= 1:
1099 return
1102 return
1100 stream = util.chunkbuffer(bundler.getchunks())
1103 stream = util.chunkbuffer(bundler.getchunks())
1101 try:
1104 try:
1102 try:
1105 try:
1103 with pushop.remote.commandexecutor() as e:
1106 with pushop.remote.commandexecutor() as e:
1104 reply = e.callcommand('unbundle', {
1107 reply = e.callcommand('unbundle', {
1105 'bundle': stream,
1108 'bundle': stream,
1106 'heads': ['force'],
1109 'heads': ['force'],
1107 'url': pushop.remote.url(),
1110 'url': pushop.remote.url(),
1108 }).result()
1111 }).result()
1109 except error.BundleValueError as exc:
1112 except error.BundleValueError as exc:
1110 raise error.Abort(_('missing support for %s') % exc)
1113 raise error.Abort(_('missing support for %s') % exc)
1111 try:
1114 try:
1112 trgetter = None
1115 trgetter = None
1113 if pushback:
1116 if pushback:
1114 trgetter = pushop.trmanager.transaction
1117 trgetter = pushop.trmanager.transaction
1115 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1118 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1116 except error.BundleValueError as exc:
1119 except error.BundleValueError as exc:
1117 raise error.Abort(_('missing support for %s') % exc)
1120 raise error.Abort(_('missing support for %s') % exc)
1118 except bundle2.AbortFromPart as exc:
1121 except bundle2.AbortFromPart as exc:
1119 pushop.ui.status(_('remote: %s\n') % exc)
1122 pushop.ui.status(_('remote: %s\n') % exc)
1120 if exc.hint is not None:
1123 if exc.hint is not None:
1121 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1124 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1122 raise error.Abort(_('push failed on remote'))
1125 raise error.Abort(_('push failed on remote'))
1123 except error.PushkeyFailed as exc:
1126 except error.PushkeyFailed as exc:
1124 partid = int(exc.partid)
1127 partid = int(exc.partid)
1125 if partid not in pushop.pkfailcb:
1128 if partid not in pushop.pkfailcb:
1126 raise
1129 raise
1127 pushop.pkfailcb[partid](pushop, exc)
1130 pushop.pkfailcb[partid](pushop, exc)
1128 for rephand in replyhandlers:
1131 for rephand in replyhandlers:
1129 rephand(op)
1132 rephand(op)
1130
1133
1131 def _pushchangeset(pushop):
1134 def _pushchangeset(pushop):
1132 """Make the actual push of changeset bundle to remote repo"""
1135 """Make the actual push of changeset bundle to remote repo"""
1133 if 'changesets' in pushop.stepsdone:
1136 if 'changesets' in pushop.stepsdone:
1134 return
1137 return
1135 pushop.stepsdone.add('changesets')
1138 pushop.stepsdone.add('changesets')
1136 if not _pushcheckoutgoing(pushop):
1139 if not _pushcheckoutgoing(pushop):
1137 return
1140 return
1138
1141
1139 # Should have verified this in push().
1142 # Should have verified this in push().
1140 assert pushop.remote.capable('unbundle')
1143 assert pushop.remote.capable('unbundle')
1141
1144
1142 pushop.repo.prepushoutgoinghooks(pushop)
1145 pushop.repo.prepushoutgoinghooks(pushop)
1143 outgoing = pushop.outgoing
1146 outgoing = pushop.outgoing
1144 # TODO: get bundlecaps from remote
1147 # TODO: get bundlecaps from remote
1145 bundlecaps = None
1148 bundlecaps = None
1146 # create a changegroup from local
1149 # create a changegroup from local
1147 if pushop.revs is None and not (outgoing.excluded
1150 if pushop.revs is None and not (outgoing.excluded
1148 or pushop.repo.changelog.filteredrevs):
1151 or pushop.repo.changelog.filteredrevs):
1149 # push everything,
1152 # push everything,
1150 # use the fast path, no race possible on push
1153 # use the fast path, no race possible on push
1151 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1154 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1152 fastpath=True, bundlecaps=bundlecaps)
1155 fastpath=True, bundlecaps=bundlecaps)
1153 else:
1156 else:
1154 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1157 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1155 'push', bundlecaps=bundlecaps)
1158 'push', bundlecaps=bundlecaps)
1156
1159
1157 # apply changegroup to remote
1160 # apply changegroup to remote
1158 # local repo finds heads on server, finds out what
1161 # local repo finds heads on server, finds out what
1159 # revs it must push. once revs transferred, if server
1162 # revs it must push. once revs transferred, if server
1160 # finds it has different heads (someone else won
1163 # finds it has different heads (someone else won
1161 # commit/push race), server aborts.
1164 # commit/push race), server aborts.
1162 if pushop.force:
1165 if pushop.force:
1163 remoteheads = ['force']
1166 remoteheads = ['force']
1164 else:
1167 else:
1165 remoteheads = pushop.remoteheads
1168 remoteheads = pushop.remoteheads
1166 # ssh: return remote's addchangegroup()
1169 # ssh: return remote's addchangegroup()
1167 # http: return remote's addchangegroup() or 0 for error
1170 # http: return remote's addchangegroup() or 0 for error
1168 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1171 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1169 pushop.repo.url())
1172 pushop.repo.url())
1170
1173
1171 def _pushsyncphase(pushop):
1174 def _pushsyncphase(pushop):
1172 """synchronise phase information locally and remotely"""
1175 """synchronise phase information locally and remotely"""
1173 cheads = pushop.commonheads
1176 cheads = pushop.commonheads
1174 # even when we don't push, exchanging phase data is useful
1177 # even when we don't push, exchanging phase data is useful
1175 remotephases = listkeys(pushop.remote, 'phases')
1178 remotephases = listkeys(pushop.remote, 'phases')
1176 if (pushop.ui.configbool('ui', '_usedassubrepo')
1179 if (pushop.ui.configbool('ui', '_usedassubrepo')
1177 and remotephases # server supports phases
1180 and remotephases # server supports phases
1178 and pushop.cgresult is None # nothing was pushed
1181 and pushop.cgresult is None # nothing was pushed
1179 and remotephases.get('publishing', False)):
1182 and remotephases.get('publishing', False)):
1180 # When:
1183 # When:
1181 # - this is a subrepo push
1184 # - this is a subrepo push
1182 # - and remote support phase
1185 # - and remote support phase
1183 # - and no changeset was pushed
1186 # - and no changeset was pushed
1184 # - and remote is publishing
1187 # - and remote is publishing
1185 # We may be in issue 3871 case!
1188 # We may be in issue 3871 case!
1186 # We drop the possible phase synchronisation done by
1189 # We drop the possible phase synchronisation done by
1187 # courtesy to publish changesets possibly locally draft
1190 # courtesy to publish changesets possibly locally draft
1188 # on the remote.
1191 # on the remote.
1189 remotephases = {'publishing': 'True'}
1192 remotephases = {'publishing': 'True'}
1190 if not remotephases: # old server or public only reply from non-publishing
1193 if not remotephases: # old server or public only reply from non-publishing
1191 _localphasemove(pushop, cheads)
1194 _localphasemove(pushop, cheads)
1192 # don't push any phase data as there is nothing to push
1195 # don't push any phase data as there is nothing to push
1193 else:
1196 else:
1194 ana = phases.analyzeremotephases(pushop.repo, cheads,
1197 ana = phases.analyzeremotephases(pushop.repo, cheads,
1195 remotephases)
1198 remotephases)
1196 pheads, droots = ana
1199 pheads, droots = ana
1197 ### Apply remote phase on local
1200 ### Apply remote phase on local
1198 if remotephases.get('publishing', False):
1201 if remotephases.get('publishing', False):
1199 _localphasemove(pushop, cheads)
1202 _localphasemove(pushop, cheads)
1200 else: # publish = False
1203 else: # publish = False
1201 _localphasemove(pushop, pheads)
1204 _localphasemove(pushop, pheads)
1202 _localphasemove(pushop, cheads, phases.draft)
1205 _localphasemove(pushop, cheads, phases.draft)
1203 ### Apply local phase on remote
1206 ### Apply local phase on remote
1204
1207
1205 if pushop.cgresult:
1208 if pushop.cgresult:
1206 if 'phases' in pushop.stepsdone:
1209 if 'phases' in pushop.stepsdone:
1207 # phases already pushed though bundle2
1210 # phases already pushed though bundle2
1208 return
1211 return
1209 outdated = pushop.outdatedphases
1212 outdated = pushop.outdatedphases
1210 else:
1213 else:
1211 outdated = pushop.fallbackoutdatedphases
1214 outdated = pushop.fallbackoutdatedphases
1212
1215
1213 pushop.stepsdone.add('phases')
1216 pushop.stepsdone.add('phases')
1214
1217
1215 # filter heads already turned public by the push
1218 # filter heads already turned public by the push
1216 outdated = [c for c in outdated if c.node() not in pheads]
1219 outdated = [c for c in outdated if c.node() not in pheads]
1217 # fallback to independent pushkey command
1220 # fallback to independent pushkey command
1218 for newremotehead in outdated:
1221 for newremotehead in outdated:
1219 with pushop.remote.commandexecutor() as e:
1222 with pushop.remote.commandexecutor() as e:
1220 r = e.callcommand('pushkey', {
1223 r = e.callcommand('pushkey', {
1221 'namespace': 'phases',
1224 'namespace': 'phases',
1222 'key': newremotehead.hex(),
1225 'key': newremotehead.hex(),
1223 'old': '%d' % phases.draft,
1226 'old': '%d' % phases.draft,
1224 'new': '%d' % phases.public
1227 'new': '%d' % phases.public
1225 }).result()
1228 }).result()
1226
1229
1227 if not r:
1230 if not r:
1228 pushop.ui.warn(_('updating %s to public failed!\n')
1231 pushop.ui.warn(_('updating %s to public failed!\n')
1229 % newremotehead)
1232 % newremotehead)
1230
1233
1231 def _localphasemove(pushop, nodes, phase=phases.public):
1234 def _localphasemove(pushop, nodes, phase=phases.public):
1232 """move <nodes> to <phase> in the local source repo"""
1235 """move <nodes> to <phase> in the local source repo"""
1233 if pushop.trmanager:
1236 if pushop.trmanager:
1234 phases.advanceboundary(pushop.repo,
1237 phases.advanceboundary(pushop.repo,
1235 pushop.trmanager.transaction(),
1238 pushop.trmanager.transaction(),
1236 phase,
1239 phase,
1237 nodes)
1240 nodes)
1238 else:
1241 else:
1239 # repo is not locked, do not change any phases!
1242 # repo is not locked, do not change any phases!
1240 # Informs the user that phases should have been moved when
1243 # Informs the user that phases should have been moved when
1241 # applicable.
1244 # applicable.
1242 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1245 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1243 phasestr = phases.phasenames[phase]
1246 phasestr = phases.phasenames[phase]
1244 if actualmoves:
1247 if actualmoves:
1245 pushop.ui.status(_('cannot lock source repo, skipping '
1248 pushop.ui.status(_('cannot lock source repo, skipping '
1246 'local %s phase update\n') % phasestr)
1249 'local %s phase update\n') % phasestr)
1247
1250
1248 def _pushobsolete(pushop):
1251 def _pushobsolete(pushop):
1249 """utility function to push obsolete markers to a remote"""
1252 """utility function to push obsolete markers to a remote"""
1250 if 'obsmarkers' in pushop.stepsdone:
1253 if 'obsmarkers' in pushop.stepsdone:
1251 return
1254 return
1252 repo = pushop.repo
1255 repo = pushop.repo
1253 remote = pushop.remote
1256 remote = pushop.remote
1254 pushop.stepsdone.add('obsmarkers')
1257 pushop.stepsdone.add('obsmarkers')
1255 if pushop.outobsmarkers:
1258 if pushop.outobsmarkers:
1256 pushop.ui.debug('try to push obsolete markers to remote\n')
1259 pushop.ui.debug('try to push obsolete markers to remote\n')
1257 rslts = []
1260 rslts = []
1258 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1261 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1259 for key in sorted(remotedata, reverse=True):
1262 for key in sorted(remotedata, reverse=True):
1260 # reverse sort to ensure we end with dump0
1263 # reverse sort to ensure we end with dump0
1261 data = remotedata[key]
1264 data = remotedata[key]
1262 rslts.append(remote.pushkey('obsolete', key, '', data))
1265 rslts.append(remote.pushkey('obsolete', key, '', data))
1263 if [r for r in rslts if not r]:
1266 if [r for r in rslts if not r]:
1264 msg = _('failed to push some obsolete markers!\n')
1267 msg = _('failed to push some obsolete markers!\n')
1265 repo.ui.warn(msg)
1268 repo.ui.warn(msg)
1266
1269
1267 def _pushbookmark(pushop):
1270 def _pushbookmark(pushop):
1268 """Update bookmark position on remote"""
1271 """Update bookmark position on remote"""
1269 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1272 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1270 return
1273 return
1271 pushop.stepsdone.add('bookmarks')
1274 pushop.stepsdone.add('bookmarks')
1272 ui = pushop.ui
1275 ui = pushop.ui
1273 remote = pushop.remote
1276 remote = pushop.remote
1274
1277
1275 for b, old, new in pushop.outbookmarks:
1278 for b, old, new in pushop.outbookmarks:
1276 action = 'update'
1279 action = 'update'
1277 if not old:
1280 if not old:
1278 action = 'export'
1281 action = 'export'
1279 elif not new:
1282 elif not new:
1280 action = 'delete'
1283 action = 'delete'
1281
1284
1282 with remote.commandexecutor() as e:
1285 with remote.commandexecutor() as e:
1283 r = e.callcommand('pushkey', {
1286 r = e.callcommand('pushkey', {
1284 'namespace': 'bookmarks',
1287 'namespace': 'bookmarks',
1285 'key': b,
1288 'key': b,
1286 'old': old,
1289 'old': old,
1287 'new': new,
1290 'new': new,
1288 }).result()
1291 }).result()
1289
1292
1290 if r:
1293 if r:
1291 ui.status(bookmsgmap[action][0] % b)
1294 ui.status(bookmsgmap[action][0] % b)
1292 else:
1295 else:
1293 ui.warn(bookmsgmap[action][1] % b)
1296 ui.warn(bookmsgmap[action][1] % b)
1294 # discovery can have set the value form invalid entry
1297 # discovery can have set the value form invalid entry
1295 if pushop.bkresult is not None:
1298 if pushop.bkresult is not None:
1296 pushop.bkresult = 1
1299 pushop.bkresult = 1
1297
1300
1298 class pulloperation(object):
1301 class pulloperation(object):
1299 """A object that represent a single pull operation
1302 """A object that represent a single pull operation
1300
1303
1301 It purpose is to carry pull related state and very common operation.
1304 It purpose is to carry pull related state and very common operation.
1302
1305
1303 A new should be created at the beginning of each pull and discarded
1306 A new should be created at the beginning of each pull and discarded
1304 afterward.
1307 afterward.
1305 """
1308 """
1306
1309
1307 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1310 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1308 remotebookmarks=None, streamclonerequested=None):
1311 remotebookmarks=None, streamclonerequested=None):
1309 # repo we pull into
1312 # repo we pull into
1310 self.repo = repo
1313 self.repo = repo
1311 # repo we pull from
1314 # repo we pull from
1312 self.remote = remote
1315 self.remote = remote
1313 # revision we try to pull (None is "all")
1316 # revision we try to pull (None is "all")
1314 self.heads = heads
1317 self.heads = heads
1315 # bookmark pulled explicitly
1318 # bookmark pulled explicitly
1316 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1319 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1317 for bookmark in bookmarks]
1320 for bookmark in bookmarks]
1318 # do we force pull?
1321 # do we force pull?
1319 self.force = force
1322 self.force = force
1320 # whether a streaming clone was requested
1323 # whether a streaming clone was requested
1321 self.streamclonerequested = streamclonerequested
1324 self.streamclonerequested = streamclonerequested
1322 # transaction manager
1325 # transaction manager
1323 self.trmanager = None
1326 self.trmanager = None
1324 # set of common changeset between local and remote before pull
1327 # set of common changeset between local and remote before pull
1325 self.common = None
1328 self.common = None
1326 # set of pulled head
1329 # set of pulled head
1327 self.rheads = None
1330 self.rheads = None
1328 # list of missing changeset to fetch remotely
1331 # list of missing changeset to fetch remotely
1329 self.fetch = None
1332 self.fetch = None
1330 # remote bookmarks data
1333 # remote bookmarks data
1331 self.remotebookmarks = remotebookmarks
1334 self.remotebookmarks = remotebookmarks
1332 # result of changegroup pulling (used as return code by pull)
1335 # result of changegroup pulling (used as return code by pull)
1333 self.cgresult = None
1336 self.cgresult = None
1334 # list of step already done
1337 # list of step already done
1335 self.stepsdone = set()
1338 self.stepsdone = set()
1336 # Whether we attempted a clone from pre-generated bundles.
1339 # Whether we attempted a clone from pre-generated bundles.
1337 self.clonebundleattempted = False
1340 self.clonebundleattempted = False
1338
1341
1339 @util.propertycache
1342 @util.propertycache
1340 def pulledsubset(self):
1343 def pulledsubset(self):
1341 """heads of the set of changeset target by the pull"""
1344 """heads of the set of changeset target by the pull"""
1342 # compute target subset
1345 # compute target subset
1343 if self.heads is None:
1346 if self.heads is None:
1344 # We pulled every thing possible
1347 # We pulled every thing possible
1345 # sync on everything common
1348 # sync on everything common
1346 c = set(self.common)
1349 c = set(self.common)
1347 ret = list(self.common)
1350 ret = list(self.common)
1348 for n in self.rheads:
1351 for n in self.rheads:
1349 if n not in c:
1352 if n not in c:
1350 ret.append(n)
1353 ret.append(n)
1351 return ret
1354 return ret
1352 else:
1355 else:
1353 # We pulled a specific subset
1356 # We pulled a specific subset
1354 # sync on this subset
1357 # sync on this subset
1355 return self.heads
1358 return self.heads
1356
1359
1357 @util.propertycache
1360 @util.propertycache
1358 def canusebundle2(self):
1361 def canusebundle2(self):
1359 return not _forcebundle1(self)
1362 return not _forcebundle1(self)
1360
1363
1361 @util.propertycache
1364 @util.propertycache
1362 def remotebundle2caps(self):
1365 def remotebundle2caps(self):
1363 return bundle2.bundle2caps(self.remote)
1366 return bundle2.bundle2caps(self.remote)
1364
1367
1365 def gettransaction(self):
1368 def gettransaction(self):
1366 # deprecated; talk to trmanager directly
1369 # deprecated; talk to trmanager directly
1367 return self.trmanager.transaction()
1370 return self.trmanager.transaction()
1368
1371
1369 class transactionmanager(util.transactional):
1372 class transactionmanager(util.transactional):
1370 """An object to manage the life cycle of a transaction
1373 """An object to manage the life cycle of a transaction
1371
1374
1372 It creates the transaction on demand and calls the appropriate hooks when
1375 It creates the transaction on demand and calls the appropriate hooks when
1373 closing the transaction."""
1376 closing the transaction."""
1374 def __init__(self, repo, source, url):
1377 def __init__(self, repo, source, url):
1375 self.repo = repo
1378 self.repo = repo
1376 self.source = source
1379 self.source = source
1377 self.url = url
1380 self.url = url
1378 self._tr = None
1381 self._tr = None
1379
1382
1380 def transaction(self):
1383 def transaction(self):
1381 """Return an open transaction object, constructing if necessary"""
1384 """Return an open transaction object, constructing if necessary"""
1382 if not self._tr:
1385 if not self._tr:
1383 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1386 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1384 self._tr = self.repo.transaction(trname)
1387 self._tr = self.repo.transaction(trname)
1385 self._tr.hookargs['source'] = self.source
1388 self._tr.hookargs['source'] = self.source
1386 self._tr.hookargs['url'] = self.url
1389 self._tr.hookargs['url'] = self.url
1387 return self._tr
1390 return self._tr
1388
1391
1389 def close(self):
1392 def close(self):
1390 """close transaction if created"""
1393 """close transaction if created"""
1391 if self._tr is not None:
1394 if self._tr is not None:
1392 self._tr.close()
1395 self._tr.close()
1393
1396
1394 def release(self):
1397 def release(self):
1395 """release transaction if created"""
1398 """release transaction if created"""
1396 if self._tr is not None:
1399 if self._tr is not None:
1397 self._tr.release()
1400 self._tr.release()
1398
1401
1399 def listkeys(remote, namespace):
1402 def listkeys(remote, namespace):
1400 with remote.commandexecutor() as e:
1403 with remote.commandexecutor() as e:
1401 return e.callcommand('listkeys', {'namespace': namespace}).result()
1404 return e.callcommand('listkeys', {'namespace': namespace}).result()
1402
1405
1403 def _fullpullbundle2(repo, pullop):
1406 def _fullpullbundle2(repo, pullop):
1404 # The server may send a partial reply, i.e. when inlining
1407 # The server may send a partial reply, i.e. when inlining
1405 # pre-computed bundles. In that case, update the common
1408 # pre-computed bundles. In that case, update the common
1406 # set based on the results and pull another bundle.
1409 # set based on the results and pull another bundle.
1407 #
1410 #
1408 # There are two indicators that the process is finished:
1411 # There are two indicators that the process is finished:
1409 # - no changeset has been added, or
1412 # - no changeset has been added, or
1410 # - all remote heads are known locally.
1413 # - all remote heads are known locally.
1411 # The head check must use the unfiltered view as obsoletion
1414 # The head check must use the unfiltered view as obsoletion
1412 # markers can hide heads.
1415 # markers can hide heads.
1413 unfi = repo.unfiltered()
1416 unfi = repo.unfiltered()
1414 unficl = unfi.changelog
1417 unficl = unfi.changelog
1415 def headsofdiff(h1, h2):
1418 def headsofdiff(h1, h2):
1416 """Returns heads(h1 % h2)"""
1419 """Returns heads(h1 % h2)"""
1417 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1420 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1418 return set(ctx.node() for ctx in res)
1421 return set(ctx.node() for ctx in res)
1419 def headsofunion(h1, h2):
1422 def headsofunion(h1, h2):
1420 """Returns heads((h1 + h2) - null)"""
1423 """Returns heads((h1 + h2) - null)"""
1421 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1424 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1422 return set(ctx.node() for ctx in res)
1425 return set(ctx.node() for ctx in res)
1423 while True:
1426 while True:
1424 old_heads = unficl.heads()
1427 old_heads = unficl.heads()
1425 clstart = len(unficl)
1428 clstart = len(unficl)
1426 _pullbundle2(pullop)
1429 _pullbundle2(pullop)
1427 if changegroup.NARROW_REQUIREMENT in repo.requirements:
1430 if changegroup.NARROW_REQUIREMENT in repo.requirements:
1428 # XXX narrow clones filter the heads on the server side during
1431 # XXX narrow clones filter the heads on the server side during
1429 # XXX getbundle and result in partial replies as well.
1432 # XXX getbundle and result in partial replies as well.
1430 # XXX Disable pull bundles in this case as band aid to avoid
1433 # XXX Disable pull bundles in this case as band aid to avoid
1431 # XXX extra round trips.
1434 # XXX extra round trips.
1432 break
1435 break
1433 if clstart == len(unficl):
1436 if clstart == len(unficl):
1434 break
1437 break
1435 if all(unficl.hasnode(n) for n in pullop.rheads):
1438 if all(unficl.hasnode(n) for n in pullop.rheads):
1436 break
1439 break
1437 new_heads = headsofdiff(unficl.heads(), old_heads)
1440 new_heads = headsofdiff(unficl.heads(), old_heads)
1438 pullop.common = headsofunion(new_heads, pullop.common)
1441 pullop.common = headsofunion(new_heads, pullop.common)
1439 pullop.rheads = set(pullop.rheads) - pullop.common
1442 pullop.rheads = set(pullop.rheads) - pullop.common
1440
1443
1441 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1444 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1442 streamclonerequested=None):
1445 streamclonerequested=None):
1443 """Fetch repository data from a remote.
1446 """Fetch repository data from a remote.
1444
1447
1445 This is the main function used to retrieve data from a remote repository.
1448 This is the main function used to retrieve data from a remote repository.
1446
1449
1447 ``repo`` is the local repository to clone into.
1450 ``repo`` is the local repository to clone into.
1448 ``remote`` is a peer instance.
1451 ``remote`` is a peer instance.
1449 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1452 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1450 default) means to pull everything from the remote.
1453 default) means to pull everything from the remote.
1451 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1454 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1452 default, all remote bookmarks are pulled.
1455 default, all remote bookmarks are pulled.
1453 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1456 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1454 initialization.
1457 initialization.
1455 ``streamclonerequested`` is a boolean indicating whether a "streaming
1458 ``streamclonerequested`` is a boolean indicating whether a "streaming
1456 clone" is requested. A "streaming clone" is essentially a raw file copy
1459 clone" is requested. A "streaming clone" is essentially a raw file copy
1457 of revlogs from the server. This only works when the local repository is
1460 of revlogs from the server. This only works when the local repository is
1458 empty. The default value of ``None`` means to respect the server
1461 empty. The default value of ``None`` means to respect the server
1459 configuration for preferring stream clones.
1462 configuration for preferring stream clones.
1460
1463
1461 Returns the ``pulloperation`` created for this pull.
1464 Returns the ``pulloperation`` created for this pull.
1462 """
1465 """
1463 if opargs is None:
1466 if opargs is None:
1464 opargs = {}
1467 opargs = {}
1465 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1468 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1466 streamclonerequested=streamclonerequested,
1469 streamclonerequested=streamclonerequested,
1467 **pycompat.strkwargs(opargs))
1470 **pycompat.strkwargs(opargs))
1468
1471
1469 peerlocal = pullop.remote.local()
1472 peerlocal = pullop.remote.local()
1470 if peerlocal:
1473 if peerlocal:
1471 missing = set(peerlocal.requirements) - pullop.repo.supported
1474 missing = set(peerlocal.requirements) - pullop.repo.supported
1472 if missing:
1475 if missing:
1473 msg = _("required features are not"
1476 msg = _("required features are not"
1474 " supported in the destination:"
1477 " supported in the destination:"
1475 " %s") % (', '.join(sorted(missing)))
1478 " %s") % (', '.join(sorted(missing)))
1476 raise error.Abort(msg)
1479 raise error.Abort(msg)
1477
1480
1478 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1481 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1479 with repo.wlock(), repo.lock(), pullop.trmanager:
1482 with repo.wlock(), repo.lock(), pullop.trmanager:
1480 # This should ideally be in _pullbundle2(). However, it needs to run
1483 # This should ideally be in _pullbundle2(). However, it needs to run
1481 # before discovery to avoid extra work.
1484 # before discovery to avoid extra work.
1482 _maybeapplyclonebundle(pullop)
1485 _maybeapplyclonebundle(pullop)
1483 streamclone.maybeperformlegacystreamclone(pullop)
1486 streamclone.maybeperformlegacystreamclone(pullop)
1484 _pulldiscovery(pullop)
1487 _pulldiscovery(pullop)
1485 if pullop.canusebundle2:
1488 if pullop.canusebundle2:
1486 _fullpullbundle2(repo, pullop)
1489 _fullpullbundle2(repo, pullop)
1487 _pullchangeset(pullop)
1490 _pullchangeset(pullop)
1488 _pullphase(pullop)
1491 _pullphase(pullop)
1489 _pullbookmarks(pullop)
1492 _pullbookmarks(pullop)
1490 _pullobsolete(pullop)
1493 _pullobsolete(pullop)
1491
1494
1492 # storing remotenames
1495 # storing remotenames
1493 if repo.ui.configbool('experimental', 'remotenames'):
1496 if repo.ui.configbool('experimental', 'remotenames'):
1494 logexchange.pullremotenames(repo, remote)
1497 logexchange.pullremotenames(repo, remote)
1495
1498
1496 return pullop
1499 return pullop
1497
1500
1498 # list of steps to perform discovery before pull
1501 # list of steps to perform discovery before pull
1499 pulldiscoveryorder = []
1502 pulldiscoveryorder = []
1500
1503
1501 # Mapping between step name and function
1504 # Mapping between step name and function
1502 #
1505 #
1503 # This exists to help extensions wrap steps if necessary
1506 # This exists to help extensions wrap steps if necessary
1504 pulldiscoverymapping = {}
1507 pulldiscoverymapping = {}
1505
1508
1506 def pulldiscovery(stepname):
1509 def pulldiscovery(stepname):
1507 """decorator for function performing discovery before pull
1510 """decorator for function performing discovery before pull
1508
1511
1509 The function is added to the step -> function mapping and appended to the
1512 The function is added to the step -> function mapping and appended to the
1510 list of steps. Beware that decorated function will be added in order (this
1513 list of steps. Beware that decorated function will be added in order (this
1511 may matter).
1514 may matter).
1512
1515
1513 You can only use this decorator for a new step, if you want to wrap a step
1516 You can only use this decorator for a new step, if you want to wrap a step
1514 from an extension, change the pulldiscovery dictionary directly."""
1517 from an extension, change the pulldiscovery dictionary directly."""
1515 def dec(func):
1518 def dec(func):
1516 assert stepname not in pulldiscoverymapping
1519 assert stepname not in pulldiscoverymapping
1517 pulldiscoverymapping[stepname] = func
1520 pulldiscoverymapping[stepname] = func
1518 pulldiscoveryorder.append(stepname)
1521 pulldiscoveryorder.append(stepname)
1519 return func
1522 return func
1520 return dec
1523 return dec
1521
1524
1522 def _pulldiscovery(pullop):
1525 def _pulldiscovery(pullop):
1523 """Run all discovery steps"""
1526 """Run all discovery steps"""
1524 for stepname in pulldiscoveryorder:
1527 for stepname in pulldiscoveryorder:
1525 step = pulldiscoverymapping[stepname]
1528 step = pulldiscoverymapping[stepname]
1526 step(pullop)
1529 step(pullop)
1527
1530
1528 @pulldiscovery('b1:bookmarks')
1531 @pulldiscovery('b1:bookmarks')
1529 def _pullbookmarkbundle1(pullop):
1532 def _pullbookmarkbundle1(pullop):
1530 """fetch bookmark data in bundle1 case
1533 """fetch bookmark data in bundle1 case
1531
1534
1532 If not using bundle2, we have to fetch bookmarks before changeset
1535 If not using bundle2, we have to fetch bookmarks before changeset
1533 discovery to reduce the chance and impact of race conditions."""
1536 discovery to reduce the chance and impact of race conditions."""
1534 if pullop.remotebookmarks is not None:
1537 if pullop.remotebookmarks is not None:
1535 return
1538 return
1536 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1539 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1537 # all known bundle2 servers now support listkeys, but lets be nice with
1540 # all known bundle2 servers now support listkeys, but lets be nice with
1538 # new implementation.
1541 # new implementation.
1539 return
1542 return
1540 books = listkeys(pullop.remote, 'bookmarks')
1543 books = listkeys(pullop.remote, 'bookmarks')
1541 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1544 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1542
1545
1543
1546
1544 @pulldiscovery('changegroup')
1547 @pulldiscovery('changegroup')
1545 def _pulldiscoverychangegroup(pullop):
1548 def _pulldiscoverychangegroup(pullop):
1546 """discovery phase for the pull
1549 """discovery phase for the pull
1547
1550
1548 Current handle changeset discovery only, will change handle all discovery
1551 Current handle changeset discovery only, will change handle all discovery
1549 at some point."""
1552 at some point."""
1550 tmp = discovery.findcommonincoming(pullop.repo,
1553 tmp = discovery.findcommonincoming(pullop.repo,
1551 pullop.remote,
1554 pullop.remote,
1552 heads=pullop.heads,
1555 heads=pullop.heads,
1553 force=pullop.force)
1556 force=pullop.force)
1554 common, fetch, rheads = tmp
1557 common, fetch, rheads = tmp
1555 nm = pullop.repo.unfiltered().changelog.nodemap
1558 nm = pullop.repo.unfiltered().changelog.nodemap
1556 if fetch and rheads:
1559 if fetch and rheads:
1557 # If a remote heads is filtered locally, put in back in common.
1560 # If a remote heads is filtered locally, put in back in common.
1558 #
1561 #
1559 # This is a hackish solution to catch most of "common but locally
1562 # This is a hackish solution to catch most of "common but locally
1560 # hidden situation". We do not performs discovery on unfiltered
1563 # hidden situation". We do not performs discovery on unfiltered
1561 # repository because it end up doing a pathological amount of round
1564 # repository because it end up doing a pathological amount of round
1562 # trip for w huge amount of changeset we do not care about.
1565 # trip for w huge amount of changeset we do not care about.
1563 #
1566 #
1564 # If a set of such "common but filtered" changeset exist on the server
1567 # If a set of such "common but filtered" changeset exist on the server
1565 # but are not including a remote heads, we'll not be able to detect it,
1568 # but are not including a remote heads, we'll not be able to detect it,
1566 scommon = set(common)
1569 scommon = set(common)
1567 for n in rheads:
1570 for n in rheads:
1568 if n in nm:
1571 if n in nm:
1569 if n not in scommon:
1572 if n not in scommon:
1570 common.append(n)
1573 common.append(n)
1571 if set(rheads).issubset(set(common)):
1574 if set(rheads).issubset(set(common)):
1572 fetch = []
1575 fetch = []
1573 pullop.common = common
1576 pullop.common = common
1574 pullop.fetch = fetch
1577 pullop.fetch = fetch
1575 pullop.rheads = rheads
1578 pullop.rheads = rheads
1576
1579
1577 def _pullbundle2(pullop):
1580 def _pullbundle2(pullop):
1578 """pull data using bundle2
1581 """pull data using bundle2
1579
1582
1580 For now, the only supported data are changegroup."""
1583 For now, the only supported data are changegroup."""
1581 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1584 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1582
1585
1583 # make ui easier to access
1586 # make ui easier to access
1584 ui = pullop.repo.ui
1587 ui = pullop.repo.ui
1585
1588
1586 # At the moment we don't do stream clones over bundle2. If that is
1589 # At the moment we don't do stream clones over bundle2. If that is
1587 # implemented then here's where the check for that will go.
1590 # implemented then here's where the check for that will go.
1588 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1591 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1589
1592
1590 # declare pull perimeters
1593 # declare pull perimeters
1591 kwargs['common'] = pullop.common
1594 kwargs['common'] = pullop.common
1592 kwargs['heads'] = pullop.heads or pullop.rheads
1595 kwargs['heads'] = pullop.heads or pullop.rheads
1593
1596
1594 if streaming:
1597 if streaming:
1595 kwargs['cg'] = False
1598 kwargs['cg'] = False
1596 kwargs['stream'] = True
1599 kwargs['stream'] = True
1597 pullop.stepsdone.add('changegroup')
1600 pullop.stepsdone.add('changegroup')
1598 pullop.stepsdone.add('phases')
1601 pullop.stepsdone.add('phases')
1599
1602
1600 else:
1603 else:
1601 # pulling changegroup
1604 # pulling changegroup
1602 pullop.stepsdone.add('changegroup')
1605 pullop.stepsdone.add('changegroup')
1603
1606
1604 kwargs['cg'] = pullop.fetch
1607 kwargs['cg'] = pullop.fetch
1605
1608
1606 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1609 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1607 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1610 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1608 if (not legacyphase and hasbinaryphase):
1611 if (not legacyphase and hasbinaryphase):
1609 kwargs['phases'] = True
1612 kwargs['phases'] = True
1610 pullop.stepsdone.add('phases')
1613 pullop.stepsdone.add('phases')
1611
1614
1612 if 'listkeys' in pullop.remotebundle2caps:
1615 if 'listkeys' in pullop.remotebundle2caps:
1613 if 'phases' not in pullop.stepsdone:
1616 if 'phases' not in pullop.stepsdone:
1614 kwargs['listkeys'] = ['phases']
1617 kwargs['listkeys'] = ['phases']
1615
1618
1616 bookmarksrequested = False
1619 bookmarksrequested = False
1617 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1620 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1618 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1621 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1619
1622
1620 if pullop.remotebookmarks is not None:
1623 if pullop.remotebookmarks is not None:
1621 pullop.stepsdone.add('request-bookmarks')
1624 pullop.stepsdone.add('request-bookmarks')
1622
1625
1623 if ('request-bookmarks' not in pullop.stepsdone
1626 if ('request-bookmarks' not in pullop.stepsdone
1624 and pullop.remotebookmarks is None
1627 and pullop.remotebookmarks is None
1625 and not legacybookmark and hasbinarybook):
1628 and not legacybookmark and hasbinarybook):
1626 kwargs['bookmarks'] = True
1629 kwargs['bookmarks'] = True
1627 bookmarksrequested = True
1630 bookmarksrequested = True
1628
1631
1629 if 'listkeys' in pullop.remotebundle2caps:
1632 if 'listkeys' in pullop.remotebundle2caps:
1630 if 'request-bookmarks' not in pullop.stepsdone:
1633 if 'request-bookmarks' not in pullop.stepsdone:
1631 # make sure to always includes bookmark data when migrating
1634 # make sure to always includes bookmark data when migrating
1632 # `hg incoming --bundle` to using this function.
1635 # `hg incoming --bundle` to using this function.
1633 pullop.stepsdone.add('request-bookmarks')
1636 pullop.stepsdone.add('request-bookmarks')
1634 kwargs.setdefault('listkeys', []).append('bookmarks')
1637 kwargs.setdefault('listkeys', []).append('bookmarks')
1635
1638
1636 # If this is a full pull / clone and the server supports the clone bundles
1639 # If this is a full pull / clone and the server supports the clone bundles
1637 # feature, tell the server whether we attempted a clone bundle. The
1640 # feature, tell the server whether we attempted a clone bundle. The
1638 # presence of this flag indicates the client supports clone bundles. This
1641 # presence of this flag indicates the client supports clone bundles. This
1639 # will enable the server to treat clients that support clone bundles
1642 # will enable the server to treat clients that support clone bundles
1640 # differently from those that don't.
1643 # differently from those that don't.
1641 if (pullop.remote.capable('clonebundles')
1644 if (pullop.remote.capable('clonebundles')
1642 and pullop.heads is None and list(pullop.common) == [nullid]):
1645 and pullop.heads is None and list(pullop.common) == [nullid]):
1643 kwargs['cbattempted'] = pullop.clonebundleattempted
1646 kwargs['cbattempted'] = pullop.clonebundleattempted
1644
1647
1645 if streaming:
1648 if streaming:
1646 pullop.repo.ui.status(_('streaming all changes\n'))
1649 pullop.repo.ui.status(_('streaming all changes\n'))
1647 elif not pullop.fetch:
1650 elif not pullop.fetch:
1648 pullop.repo.ui.status(_("no changes found\n"))
1651 pullop.repo.ui.status(_("no changes found\n"))
1649 pullop.cgresult = 0
1652 pullop.cgresult = 0
1650 else:
1653 else:
1651 if pullop.heads is None and list(pullop.common) == [nullid]:
1654 if pullop.heads is None and list(pullop.common) == [nullid]:
1652 pullop.repo.ui.status(_("requesting all changes\n"))
1655 pullop.repo.ui.status(_("requesting all changes\n"))
1653 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1656 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1654 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1657 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1655 if obsolete.commonversion(remoteversions) is not None:
1658 if obsolete.commonversion(remoteversions) is not None:
1656 kwargs['obsmarkers'] = True
1659 kwargs['obsmarkers'] = True
1657 pullop.stepsdone.add('obsmarkers')
1660 pullop.stepsdone.add('obsmarkers')
1658 _pullbundle2extraprepare(pullop, kwargs)
1661 _pullbundle2extraprepare(pullop, kwargs)
1659
1662
1660 with pullop.remote.commandexecutor() as e:
1663 with pullop.remote.commandexecutor() as e:
1661 args = dict(kwargs)
1664 args = dict(kwargs)
1662 args['source'] = 'pull'
1665 args['source'] = 'pull'
1663 bundle = e.callcommand('getbundle', args).result()
1666 bundle = e.callcommand('getbundle', args).result()
1664
1667
1665 try:
1668 try:
1666 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
1669 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
1667 source='pull')
1670 source='pull')
1668 op.modes['bookmarks'] = 'records'
1671 op.modes['bookmarks'] = 'records'
1669 bundle2.processbundle(pullop.repo, bundle, op=op)
1672 bundle2.processbundle(pullop.repo, bundle, op=op)
1670 except bundle2.AbortFromPart as exc:
1673 except bundle2.AbortFromPart as exc:
1671 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1674 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1672 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1675 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1673 except error.BundleValueError as exc:
1676 except error.BundleValueError as exc:
1674 raise error.Abort(_('missing support for %s') % exc)
1677 raise error.Abort(_('missing support for %s') % exc)
1675
1678
1676 if pullop.fetch:
1679 if pullop.fetch:
1677 pullop.cgresult = bundle2.combinechangegroupresults(op)
1680 pullop.cgresult = bundle2.combinechangegroupresults(op)
1678
1681
1679 # processing phases change
1682 # processing phases change
1680 for namespace, value in op.records['listkeys']:
1683 for namespace, value in op.records['listkeys']:
1681 if namespace == 'phases':
1684 if namespace == 'phases':
1682 _pullapplyphases(pullop, value)
1685 _pullapplyphases(pullop, value)
1683
1686
1684 # processing bookmark update
1687 # processing bookmark update
1685 if bookmarksrequested:
1688 if bookmarksrequested:
1686 books = {}
1689 books = {}
1687 for record in op.records['bookmarks']:
1690 for record in op.records['bookmarks']:
1688 books[record['bookmark']] = record["node"]
1691 books[record['bookmark']] = record["node"]
1689 pullop.remotebookmarks = books
1692 pullop.remotebookmarks = books
1690 else:
1693 else:
1691 for namespace, value in op.records['listkeys']:
1694 for namespace, value in op.records['listkeys']:
1692 if namespace == 'bookmarks':
1695 if namespace == 'bookmarks':
1693 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1696 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1694
1697
1695 # bookmark data were either already there or pulled in the bundle
1698 # bookmark data were either already there or pulled in the bundle
1696 if pullop.remotebookmarks is not None:
1699 if pullop.remotebookmarks is not None:
1697 _pullbookmarks(pullop)
1700 _pullbookmarks(pullop)
1698
1701
1699 def _pullbundle2extraprepare(pullop, kwargs):
1702 def _pullbundle2extraprepare(pullop, kwargs):
1700 """hook function so that extensions can extend the getbundle call"""
1703 """hook function so that extensions can extend the getbundle call"""
1701
1704
1702 def _pullchangeset(pullop):
1705 def _pullchangeset(pullop):
1703 """pull changeset from unbundle into the local repo"""
1706 """pull changeset from unbundle into the local repo"""
1704 # We delay the open of the transaction as late as possible so we
1707 # We delay the open of the transaction as late as possible so we
1705 # don't open transaction for nothing or you break future useful
1708 # don't open transaction for nothing or you break future useful
1706 # rollback call
1709 # rollback call
1707 if 'changegroup' in pullop.stepsdone:
1710 if 'changegroup' in pullop.stepsdone:
1708 return
1711 return
1709 pullop.stepsdone.add('changegroup')
1712 pullop.stepsdone.add('changegroup')
1710 if not pullop.fetch:
1713 if not pullop.fetch:
1711 pullop.repo.ui.status(_("no changes found\n"))
1714 pullop.repo.ui.status(_("no changes found\n"))
1712 pullop.cgresult = 0
1715 pullop.cgresult = 0
1713 return
1716 return
1714 tr = pullop.gettransaction()
1717 tr = pullop.gettransaction()
1715 if pullop.heads is None and list(pullop.common) == [nullid]:
1718 if pullop.heads is None and list(pullop.common) == [nullid]:
1716 pullop.repo.ui.status(_("requesting all changes\n"))
1719 pullop.repo.ui.status(_("requesting all changes\n"))
1717 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1720 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1718 # issue1320, avoid a race if remote changed after discovery
1721 # issue1320, avoid a race if remote changed after discovery
1719 pullop.heads = pullop.rheads
1722 pullop.heads = pullop.rheads
1720
1723
1721 if pullop.remote.capable('getbundle'):
1724 if pullop.remote.capable('getbundle'):
1722 # TODO: get bundlecaps from remote
1725 # TODO: get bundlecaps from remote
1723 cg = pullop.remote.getbundle('pull', common=pullop.common,
1726 cg = pullop.remote.getbundle('pull', common=pullop.common,
1724 heads=pullop.heads or pullop.rheads)
1727 heads=pullop.heads or pullop.rheads)
1725 elif pullop.heads is None:
1728 elif pullop.heads is None:
1726 with pullop.remote.commandexecutor() as e:
1729 with pullop.remote.commandexecutor() as e:
1727 cg = e.callcommand('changegroup', {
1730 cg = e.callcommand('changegroup', {
1728 'nodes': pullop.fetch,
1731 'nodes': pullop.fetch,
1729 'source': 'pull',
1732 'source': 'pull',
1730 }).result()
1733 }).result()
1731
1734
1732 elif not pullop.remote.capable('changegroupsubset'):
1735 elif not pullop.remote.capable('changegroupsubset'):
1733 raise error.Abort(_("partial pull cannot be done because "
1736 raise error.Abort(_("partial pull cannot be done because "
1734 "other repository doesn't support "
1737 "other repository doesn't support "
1735 "changegroupsubset."))
1738 "changegroupsubset."))
1736 else:
1739 else:
1737 with pullop.remote.commandexecutor() as e:
1740 with pullop.remote.commandexecutor() as e:
1738 cg = e.callcommand('changegroupsubset', {
1741 cg = e.callcommand('changegroupsubset', {
1739 'bases': pullop.fetch,
1742 'bases': pullop.fetch,
1740 'heads': pullop.heads,
1743 'heads': pullop.heads,
1741 'source': 'pull',
1744 'source': 'pull',
1742 }).result()
1745 }).result()
1743
1746
1744 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1747 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1745 pullop.remote.url())
1748 pullop.remote.url())
1746 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1749 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1747
1750
1748 def _pullphase(pullop):
1751 def _pullphase(pullop):
1749 # Get remote phases data from remote
1752 # Get remote phases data from remote
1750 if 'phases' in pullop.stepsdone:
1753 if 'phases' in pullop.stepsdone:
1751 return
1754 return
1752 remotephases = listkeys(pullop.remote, 'phases')
1755 remotephases = listkeys(pullop.remote, 'phases')
1753 _pullapplyphases(pullop, remotephases)
1756 _pullapplyphases(pullop, remotephases)
1754
1757
1755 def _pullapplyphases(pullop, remotephases):
1758 def _pullapplyphases(pullop, remotephases):
1756 """apply phase movement from observed remote state"""
1759 """apply phase movement from observed remote state"""
1757 if 'phases' in pullop.stepsdone:
1760 if 'phases' in pullop.stepsdone:
1758 return
1761 return
1759 pullop.stepsdone.add('phases')
1762 pullop.stepsdone.add('phases')
1760 publishing = bool(remotephases.get('publishing', False))
1763 publishing = bool(remotephases.get('publishing', False))
1761 if remotephases and not publishing:
1764 if remotephases and not publishing:
1762 # remote is new and non-publishing
1765 # remote is new and non-publishing
1763 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1766 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1764 pullop.pulledsubset,
1767 pullop.pulledsubset,
1765 remotephases)
1768 remotephases)
1766 dheads = pullop.pulledsubset
1769 dheads = pullop.pulledsubset
1767 else:
1770 else:
1768 # Remote is old or publishing all common changesets
1771 # Remote is old or publishing all common changesets
1769 # should be seen as public
1772 # should be seen as public
1770 pheads = pullop.pulledsubset
1773 pheads = pullop.pulledsubset
1771 dheads = []
1774 dheads = []
1772 unfi = pullop.repo.unfiltered()
1775 unfi = pullop.repo.unfiltered()
1773 phase = unfi._phasecache.phase
1776 phase = unfi._phasecache.phase
1774 rev = unfi.changelog.nodemap.get
1777 rev = unfi.changelog.nodemap.get
1775 public = phases.public
1778 public = phases.public
1776 draft = phases.draft
1779 draft = phases.draft
1777
1780
1778 # exclude changesets already public locally and update the others
1781 # exclude changesets already public locally and update the others
1779 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1782 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1780 if pheads:
1783 if pheads:
1781 tr = pullop.gettransaction()
1784 tr = pullop.gettransaction()
1782 phases.advanceboundary(pullop.repo, tr, public, pheads)
1785 phases.advanceboundary(pullop.repo, tr, public, pheads)
1783
1786
1784 # exclude changesets already draft locally and update the others
1787 # exclude changesets already draft locally and update the others
1785 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1788 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1786 if dheads:
1789 if dheads:
1787 tr = pullop.gettransaction()
1790 tr = pullop.gettransaction()
1788 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1791 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1789
1792
1790 def _pullbookmarks(pullop):
1793 def _pullbookmarks(pullop):
1791 """process the remote bookmark information to update the local one"""
1794 """process the remote bookmark information to update the local one"""
1792 if 'bookmarks' in pullop.stepsdone:
1795 if 'bookmarks' in pullop.stepsdone:
1793 return
1796 return
1794 pullop.stepsdone.add('bookmarks')
1797 pullop.stepsdone.add('bookmarks')
1795 repo = pullop.repo
1798 repo = pullop.repo
1796 remotebookmarks = pullop.remotebookmarks
1799 remotebookmarks = pullop.remotebookmarks
1797 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1800 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1798 pullop.remote.url(),
1801 pullop.remote.url(),
1799 pullop.gettransaction,
1802 pullop.gettransaction,
1800 explicit=pullop.explicitbookmarks)
1803 explicit=pullop.explicitbookmarks)
1801
1804
1802 def _pullobsolete(pullop):
1805 def _pullobsolete(pullop):
1803 """utility function to pull obsolete markers from a remote
1806 """utility function to pull obsolete markers from a remote
1804
1807
1805 The `gettransaction` is function that return the pull transaction, creating
1808 The `gettransaction` is function that return the pull transaction, creating
1806 one if necessary. We return the transaction to inform the calling code that
1809 one if necessary. We return the transaction to inform the calling code that
1807 a new transaction have been created (when applicable).
1810 a new transaction have been created (when applicable).
1808
1811
1809 Exists mostly to allow overriding for experimentation purpose"""
1812 Exists mostly to allow overriding for experimentation purpose"""
1810 if 'obsmarkers' in pullop.stepsdone:
1813 if 'obsmarkers' in pullop.stepsdone:
1811 return
1814 return
1812 pullop.stepsdone.add('obsmarkers')
1815 pullop.stepsdone.add('obsmarkers')
1813 tr = None
1816 tr = None
1814 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1817 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1815 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1818 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1816 remoteobs = listkeys(pullop.remote, 'obsolete')
1819 remoteobs = listkeys(pullop.remote, 'obsolete')
1817 if 'dump0' in remoteobs:
1820 if 'dump0' in remoteobs:
1818 tr = pullop.gettransaction()
1821 tr = pullop.gettransaction()
1819 markers = []
1822 markers = []
1820 for key in sorted(remoteobs, reverse=True):
1823 for key in sorted(remoteobs, reverse=True):
1821 if key.startswith('dump'):
1824 if key.startswith('dump'):
1822 data = util.b85decode(remoteobs[key])
1825 data = util.b85decode(remoteobs[key])
1823 version, newmarks = obsolete._readmarkers(data)
1826 version, newmarks = obsolete._readmarkers(data)
1824 markers += newmarks
1827 markers += newmarks
1825 if markers:
1828 if markers:
1826 pullop.repo.obsstore.add(tr, markers)
1829 pullop.repo.obsstore.add(tr, markers)
1827 pullop.repo.invalidatevolatilesets()
1830 pullop.repo.invalidatevolatilesets()
1828 return tr
1831 return tr
1829
1832
1830 def caps20to10(repo, role):
1833 def caps20to10(repo, role):
1831 """return a set with appropriate options to use bundle20 during getbundle"""
1834 """return a set with appropriate options to use bundle20 during getbundle"""
1832 caps = {'HG20'}
1835 caps = {'HG20'}
1833 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1836 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1834 caps.add('bundle2=' + urlreq.quote(capsblob))
1837 caps.add('bundle2=' + urlreq.quote(capsblob))
1835 return caps
1838 return caps
1836
1839
1837 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1840 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1838 getbundle2partsorder = []
1841 getbundle2partsorder = []
1839
1842
1840 # Mapping between step name and function
1843 # Mapping between step name and function
1841 #
1844 #
1842 # This exists to help extensions wrap steps if necessary
1845 # This exists to help extensions wrap steps if necessary
1843 getbundle2partsmapping = {}
1846 getbundle2partsmapping = {}
1844
1847
1845 def getbundle2partsgenerator(stepname, idx=None):
1848 def getbundle2partsgenerator(stepname, idx=None):
1846 """decorator for function generating bundle2 part for getbundle
1849 """decorator for function generating bundle2 part for getbundle
1847
1850
1848 The function is added to the step -> function mapping and appended to the
1851 The function is added to the step -> function mapping and appended to the
1849 list of steps. Beware that decorated functions will be added in order
1852 list of steps. Beware that decorated functions will be added in order
1850 (this may matter).
1853 (this may matter).
1851
1854
1852 You can only use this decorator for new steps, if you want to wrap a step
1855 You can only use this decorator for new steps, if you want to wrap a step
1853 from an extension, attack the getbundle2partsmapping dictionary directly."""
1856 from an extension, attack the getbundle2partsmapping dictionary directly."""
1854 def dec(func):
1857 def dec(func):
1855 assert stepname not in getbundle2partsmapping
1858 assert stepname not in getbundle2partsmapping
1856 getbundle2partsmapping[stepname] = func
1859 getbundle2partsmapping[stepname] = func
1857 if idx is None:
1860 if idx is None:
1858 getbundle2partsorder.append(stepname)
1861 getbundle2partsorder.append(stepname)
1859 else:
1862 else:
1860 getbundle2partsorder.insert(idx, stepname)
1863 getbundle2partsorder.insert(idx, stepname)
1861 return func
1864 return func
1862 return dec
1865 return dec
1863
1866
1864 def bundle2requested(bundlecaps):
1867 def bundle2requested(bundlecaps):
1865 if bundlecaps is not None:
1868 if bundlecaps is not None:
1866 return any(cap.startswith('HG2') for cap in bundlecaps)
1869 return any(cap.startswith('HG2') for cap in bundlecaps)
1867 return False
1870 return False
1868
1871
1869 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1872 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1870 **kwargs):
1873 **kwargs):
1871 """Return chunks constituting a bundle's raw data.
1874 """Return chunks constituting a bundle's raw data.
1872
1875
1873 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1876 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1874 passed.
1877 passed.
1875
1878
1876 Returns a 2-tuple of a dict with metadata about the generated bundle
1879 Returns a 2-tuple of a dict with metadata about the generated bundle
1877 and an iterator over raw chunks (of varying sizes).
1880 and an iterator over raw chunks (of varying sizes).
1878 """
1881 """
1879 kwargs = pycompat.byteskwargs(kwargs)
1882 kwargs = pycompat.byteskwargs(kwargs)
1880 info = {}
1883 info = {}
1881 usebundle2 = bundle2requested(bundlecaps)
1884 usebundle2 = bundle2requested(bundlecaps)
1882 # bundle10 case
1885 # bundle10 case
1883 if not usebundle2:
1886 if not usebundle2:
1884 if bundlecaps and not kwargs.get('cg', True):
1887 if bundlecaps and not kwargs.get('cg', True):
1885 raise ValueError(_('request for bundle10 must include changegroup'))
1888 raise ValueError(_('request for bundle10 must include changegroup'))
1886
1889
1887 if kwargs:
1890 if kwargs:
1888 raise ValueError(_('unsupported getbundle arguments: %s')
1891 raise ValueError(_('unsupported getbundle arguments: %s')
1889 % ', '.join(sorted(kwargs.keys())))
1892 % ', '.join(sorted(kwargs.keys())))
1890 outgoing = _computeoutgoing(repo, heads, common)
1893 outgoing = _computeoutgoing(repo, heads, common)
1891 info['bundleversion'] = 1
1894 info['bundleversion'] = 1
1892 return info, changegroup.makestream(repo, outgoing, '01', source,
1895 return info, changegroup.makestream(repo, outgoing, '01', source,
1893 bundlecaps=bundlecaps)
1896 bundlecaps=bundlecaps)
1894
1897
1895 # bundle20 case
1898 # bundle20 case
1896 info['bundleversion'] = 2
1899 info['bundleversion'] = 2
1897 b2caps = {}
1900 b2caps = {}
1898 for bcaps in bundlecaps:
1901 for bcaps in bundlecaps:
1899 if bcaps.startswith('bundle2='):
1902 if bcaps.startswith('bundle2='):
1900 blob = urlreq.unquote(bcaps[len('bundle2='):])
1903 blob = urlreq.unquote(bcaps[len('bundle2='):])
1901 b2caps.update(bundle2.decodecaps(blob))
1904 b2caps.update(bundle2.decodecaps(blob))
1902 bundler = bundle2.bundle20(repo.ui, b2caps)
1905 bundler = bundle2.bundle20(repo.ui, b2caps)
1903
1906
1904 kwargs['heads'] = heads
1907 kwargs['heads'] = heads
1905 kwargs['common'] = common
1908 kwargs['common'] = common
1906
1909
1907 for name in getbundle2partsorder:
1910 for name in getbundle2partsorder:
1908 func = getbundle2partsmapping[name]
1911 func = getbundle2partsmapping[name]
1909 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1912 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1910 **pycompat.strkwargs(kwargs))
1913 **pycompat.strkwargs(kwargs))
1911
1914
1912 info['prefercompressed'] = bundler.prefercompressed
1915 info['prefercompressed'] = bundler.prefercompressed
1913
1916
1914 return info, bundler.getchunks()
1917 return info, bundler.getchunks()
1915
1918
1916 @getbundle2partsgenerator('stream2')
1919 @getbundle2partsgenerator('stream2')
1917 def _getbundlestream2(bundler, repo, *args, **kwargs):
1920 def _getbundlestream2(bundler, repo, *args, **kwargs):
1918 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
1921 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
1919
1922
1920 @getbundle2partsgenerator('changegroup')
1923 @getbundle2partsgenerator('changegroup')
1921 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1924 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1922 b2caps=None, heads=None, common=None, **kwargs):
1925 b2caps=None, heads=None, common=None, **kwargs):
1923 """add a changegroup part to the requested bundle"""
1926 """add a changegroup part to the requested bundle"""
1924 cgstream = None
1927 cgstream = None
1925 if kwargs.get(r'cg', True):
1928 if kwargs.get(r'cg', True):
1926 # build changegroup bundle here.
1929 # build changegroup bundle here.
1927 version = '01'
1930 version = '01'
1928 cgversions = b2caps.get('changegroup')
1931 cgversions = b2caps.get('changegroup')
1929 if cgversions: # 3.1 and 3.2 ship with an empty value
1932 if cgversions: # 3.1 and 3.2 ship with an empty value
1930 cgversions = [v for v in cgversions
1933 cgversions = [v for v in cgversions
1931 if v in changegroup.supportedoutgoingversions(repo)]
1934 if v in changegroup.supportedoutgoingversions(repo)]
1932 if not cgversions:
1935 if not cgversions:
1933 raise ValueError(_('no common changegroup version'))
1936 raise ValueError(_('no common changegroup version'))
1934 version = max(cgversions)
1937 version = max(cgversions)
1935 outgoing = _computeoutgoing(repo, heads, common)
1938 outgoing = _computeoutgoing(repo, heads, common)
1936 if outgoing.missing:
1939 if outgoing.missing:
1937 cgstream = changegroup.makestream(repo, outgoing, version, source,
1940 cgstream = changegroup.makestream(repo, outgoing, version, source,
1938 bundlecaps=bundlecaps)
1941 bundlecaps=bundlecaps)
1939
1942
1940 if cgstream:
1943 if cgstream:
1941 part = bundler.newpart('changegroup', data=cgstream)
1944 part = bundler.newpart('changegroup', data=cgstream)
1942 if cgversions:
1945 if cgversions:
1943 part.addparam('version', version)
1946 part.addparam('version', version)
1944 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1947 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1945 mandatory=False)
1948 mandatory=False)
1946 if 'treemanifest' in repo.requirements:
1949 if 'treemanifest' in repo.requirements:
1947 part.addparam('treemanifest', '1')
1950 part.addparam('treemanifest', '1')
1948
1951
1949 @getbundle2partsgenerator('bookmarks')
1952 @getbundle2partsgenerator('bookmarks')
1950 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1953 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1951 b2caps=None, **kwargs):
1954 b2caps=None, **kwargs):
1952 """add a bookmark part to the requested bundle"""
1955 """add a bookmark part to the requested bundle"""
1953 if not kwargs.get(r'bookmarks', False):
1956 if not kwargs.get(r'bookmarks', False):
1954 return
1957 return
1955 if 'bookmarks' not in b2caps:
1958 if 'bookmarks' not in b2caps:
1956 raise ValueError(_('no common bookmarks exchange method'))
1959 raise ValueError(_('no common bookmarks exchange method'))
1957 books = bookmod.listbinbookmarks(repo)
1960 books = bookmod.listbinbookmarks(repo)
1958 data = bookmod.binaryencode(books)
1961 data = bookmod.binaryencode(books)
1959 if data:
1962 if data:
1960 bundler.newpart('bookmarks', data=data)
1963 bundler.newpart('bookmarks', data=data)
1961
1964
1962 @getbundle2partsgenerator('listkeys')
1965 @getbundle2partsgenerator('listkeys')
1963 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1966 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1964 b2caps=None, **kwargs):
1967 b2caps=None, **kwargs):
1965 """add parts containing listkeys namespaces to the requested bundle"""
1968 """add parts containing listkeys namespaces to the requested bundle"""
1966 listkeys = kwargs.get(r'listkeys', ())
1969 listkeys = kwargs.get(r'listkeys', ())
1967 for namespace in listkeys:
1970 for namespace in listkeys:
1968 part = bundler.newpart('listkeys')
1971 part = bundler.newpart('listkeys')
1969 part.addparam('namespace', namespace)
1972 part.addparam('namespace', namespace)
1970 keys = repo.listkeys(namespace).items()
1973 keys = repo.listkeys(namespace).items()
1971 part.data = pushkey.encodekeys(keys)
1974 part.data = pushkey.encodekeys(keys)
1972
1975
1973 @getbundle2partsgenerator('obsmarkers')
1976 @getbundle2partsgenerator('obsmarkers')
1974 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1977 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1975 b2caps=None, heads=None, **kwargs):
1978 b2caps=None, heads=None, **kwargs):
1976 """add an obsolescence markers part to the requested bundle"""
1979 """add an obsolescence markers part to the requested bundle"""
1977 if kwargs.get(r'obsmarkers', False):
1980 if kwargs.get(r'obsmarkers', False):
1978 if heads is None:
1981 if heads is None:
1979 heads = repo.heads()
1982 heads = repo.heads()
1980 subset = [c.node() for c in repo.set('::%ln', heads)]
1983 subset = [c.node() for c in repo.set('::%ln', heads)]
1981 markers = repo.obsstore.relevantmarkers(subset)
1984 markers = repo.obsstore.relevantmarkers(subset)
1982 markers = sorted(markers)
1985 markers = sorted(markers)
1983 bundle2.buildobsmarkerspart(bundler, markers)
1986 bundle2.buildobsmarkerspart(bundler, markers)
1984
1987
1985 @getbundle2partsgenerator('phases')
1988 @getbundle2partsgenerator('phases')
1986 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1989 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1987 b2caps=None, heads=None, **kwargs):
1990 b2caps=None, heads=None, **kwargs):
1988 """add phase heads part to the requested bundle"""
1991 """add phase heads part to the requested bundle"""
1989 if kwargs.get(r'phases', False):
1992 if kwargs.get(r'phases', False):
1990 if not 'heads' in b2caps.get('phases'):
1993 if not 'heads' in b2caps.get('phases'):
1991 raise ValueError(_('no common phases exchange method'))
1994 raise ValueError(_('no common phases exchange method'))
1992 if heads is None:
1995 if heads is None:
1993 heads = repo.heads()
1996 heads = repo.heads()
1994
1997
1995 headsbyphase = collections.defaultdict(set)
1998 headsbyphase = collections.defaultdict(set)
1996 if repo.publishing():
1999 if repo.publishing():
1997 headsbyphase[phases.public] = heads
2000 headsbyphase[phases.public] = heads
1998 else:
2001 else:
1999 # find the appropriate heads to move
2002 # find the appropriate heads to move
2000
2003
2001 phase = repo._phasecache.phase
2004 phase = repo._phasecache.phase
2002 node = repo.changelog.node
2005 node = repo.changelog.node
2003 rev = repo.changelog.rev
2006 rev = repo.changelog.rev
2004 for h in heads:
2007 for h in heads:
2005 headsbyphase[phase(repo, rev(h))].add(h)
2008 headsbyphase[phase(repo, rev(h))].add(h)
2006 seenphases = list(headsbyphase.keys())
2009 seenphases = list(headsbyphase.keys())
2007
2010
2008 # We do not handle anything but public and draft phase for now)
2011 # We do not handle anything but public and draft phase for now)
2009 if seenphases:
2012 if seenphases:
2010 assert max(seenphases) <= phases.draft
2013 assert max(seenphases) <= phases.draft
2011
2014
2012 # if client is pulling non-public changesets, we need to find
2015 # if client is pulling non-public changesets, we need to find
2013 # intermediate public heads.
2016 # intermediate public heads.
2014 draftheads = headsbyphase.get(phases.draft, set())
2017 draftheads = headsbyphase.get(phases.draft, set())
2015 if draftheads:
2018 if draftheads:
2016 publicheads = headsbyphase.get(phases.public, set())
2019 publicheads = headsbyphase.get(phases.public, set())
2017
2020
2018 revset = 'heads(only(%ln, %ln) and public())'
2021 revset = 'heads(only(%ln, %ln) and public())'
2019 extraheads = repo.revs(revset, draftheads, publicheads)
2022 extraheads = repo.revs(revset, draftheads, publicheads)
2020 for r in extraheads:
2023 for r in extraheads:
2021 headsbyphase[phases.public].add(node(r))
2024 headsbyphase[phases.public].add(node(r))
2022
2025
2023 # transform data in a format used by the encoding function
2026 # transform data in a format used by the encoding function
2024 phasemapping = []
2027 phasemapping = []
2025 for phase in phases.allphases:
2028 for phase in phases.allphases:
2026 phasemapping.append(sorted(headsbyphase[phase]))
2029 phasemapping.append(sorted(headsbyphase[phase]))
2027
2030
2028 # generate the actual part
2031 # generate the actual part
2029 phasedata = phases.binaryencode(phasemapping)
2032 phasedata = phases.binaryencode(phasemapping)
2030 bundler.newpart('phase-heads', data=phasedata)
2033 bundler.newpart('phase-heads', data=phasedata)
2031
2034
2032 @getbundle2partsgenerator('hgtagsfnodes')
2035 @getbundle2partsgenerator('hgtagsfnodes')
2033 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
2036 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
2034 b2caps=None, heads=None, common=None,
2037 b2caps=None, heads=None, common=None,
2035 **kwargs):
2038 **kwargs):
2036 """Transfer the .hgtags filenodes mapping.
2039 """Transfer the .hgtags filenodes mapping.
2037
2040
2038 Only values for heads in this bundle will be transferred.
2041 Only values for heads in this bundle will be transferred.
2039
2042
2040 The part data consists of pairs of 20 byte changeset node and .hgtags
2043 The part data consists of pairs of 20 byte changeset node and .hgtags
2041 filenodes raw values.
2044 filenodes raw values.
2042 """
2045 """
2043 # Don't send unless:
2046 # Don't send unless:
2044 # - changeset are being exchanged,
2047 # - changeset are being exchanged,
2045 # - the client supports it.
2048 # - the client supports it.
2046 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
2049 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
2047 return
2050 return
2048
2051
2049 outgoing = _computeoutgoing(repo, heads, common)
2052 outgoing = _computeoutgoing(repo, heads, common)
2050 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2053 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2051
2054
2052 @getbundle2partsgenerator('cache:rev-branch-cache')
2055 @getbundle2partsgenerator('cache:rev-branch-cache')
2053 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
2056 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
2054 b2caps=None, heads=None, common=None,
2057 b2caps=None, heads=None, common=None,
2055 **kwargs):
2058 **kwargs):
2056 """Transfer the rev-branch-cache mapping
2059 """Transfer the rev-branch-cache mapping
2057
2060
2058 The payload is a series of data related to each branch
2061 The payload is a series of data related to each branch
2059
2062
2060 1) branch name length
2063 1) branch name length
2061 2) number of open heads
2064 2) number of open heads
2062 3) number of closed heads
2065 3) number of closed heads
2063 4) open heads nodes
2066 4) open heads nodes
2064 5) closed heads nodes
2067 5) closed heads nodes
2065 """
2068 """
2066 # Don't send unless:
2069 # Don't send unless:
2067 # - changeset are being exchanged,
2070 # - changeset are being exchanged,
2068 # - the client supports it.
2071 # - the client supports it.
2069 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
2072 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
2070 return
2073 return
2071 outgoing = _computeoutgoing(repo, heads, common)
2074 outgoing = _computeoutgoing(repo, heads, common)
2072 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2075 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2073
2076
2074 def check_heads(repo, their_heads, context):
2077 def check_heads(repo, their_heads, context):
2075 """check if the heads of a repo have been modified
2078 """check if the heads of a repo have been modified
2076
2079
2077 Used by peer for unbundling.
2080 Used by peer for unbundling.
2078 """
2081 """
2079 heads = repo.heads()
2082 heads = repo.heads()
2080 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2083 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2081 if not (their_heads == ['force'] or their_heads == heads or
2084 if not (their_heads == ['force'] or their_heads == heads or
2082 their_heads == ['hashed', heads_hash]):
2085 their_heads == ['hashed', heads_hash]):
2083 # someone else committed/pushed/unbundled while we
2086 # someone else committed/pushed/unbundled while we
2084 # were transferring data
2087 # were transferring data
2085 raise error.PushRaced('repository changed while %s - '
2088 raise error.PushRaced('repository changed while %s - '
2086 'please try again' % context)
2089 'please try again' % context)
2087
2090
2088 def unbundle(repo, cg, heads, source, url):
2091 def unbundle(repo, cg, heads, source, url):
2089 """Apply a bundle to a repo.
2092 """Apply a bundle to a repo.
2090
2093
2091 this function makes sure the repo is locked during the application and have
2094 this function makes sure the repo is locked during the application and have
2092 mechanism to check that no push race occurred between the creation of the
2095 mechanism to check that no push race occurred between the creation of the
2093 bundle and its application.
2096 bundle and its application.
2094
2097
2095 If the push was raced as PushRaced exception is raised."""
2098 If the push was raced as PushRaced exception is raised."""
2096 r = 0
2099 r = 0
2097 # need a transaction when processing a bundle2 stream
2100 # need a transaction when processing a bundle2 stream
2098 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2101 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2099 lockandtr = [None, None, None]
2102 lockandtr = [None, None, None]
2100 recordout = None
2103 recordout = None
2101 # quick fix for output mismatch with bundle2 in 3.4
2104 # quick fix for output mismatch with bundle2 in 3.4
2102 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2105 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2103 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2106 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2104 captureoutput = True
2107 captureoutput = True
2105 try:
2108 try:
2106 # note: outside bundle1, 'heads' is expected to be empty and this
2109 # note: outside bundle1, 'heads' is expected to be empty and this
2107 # 'check_heads' call wil be a no-op
2110 # 'check_heads' call wil be a no-op
2108 check_heads(repo, heads, 'uploading changes')
2111 check_heads(repo, heads, 'uploading changes')
2109 # push can proceed
2112 # push can proceed
2110 if not isinstance(cg, bundle2.unbundle20):
2113 if not isinstance(cg, bundle2.unbundle20):
2111 # legacy case: bundle1 (changegroup 01)
2114 # legacy case: bundle1 (changegroup 01)
2112 txnname = "\n".join([source, util.hidepassword(url)])
2115 txnname = "\n".join([source, util.hidepassword(url)])
2113 with repo.lock(), repo.transaction(txnname) as tr:
2116 with repo.lock(), repo.transaction(txnname) as tr:
2114 op = bundle2.applybundle(repo, cg, tr, source, url)
2117 op = bundle2.applybundle(repo, cg, tr, source, url)
2115 r = bundle2.combinechangegroupresults(op)
2118 r = bundle2.combinechangegroupresults(op)
2116 else:
2119 else:
2117 r = None
2120 r = None
2118 try:
2121 try:
2119 def gettransaction():
2122 def gettransaction():
2120 if not lockandtr[2]:
2123 if not lockandtr[2]:
2121 lockandtr[0] = repo.wlock()
2124 lockandtr[0] = repo.wlock()
2122 lockandtr[1] = repo.lock()
2125 lockandtr[1] = repo.lock()
2123 lockandtr[2] = repo.transaction(source)
2126 lockandtr[2] = repo.transaction(source)
2124 lockandtr[2].hookargs['source'] = source
2127 lockandtr[2].hookargs['source'] = source
2125 lockandtr[2].hookargs['url'] = url
2128 lockandtr[2].hookargs['url'] = url
2126 lockandtr[2].hookargs['bundle2'] = '1'
2129 lockandtr[2].hookargs['bundle2'] = '1'
2127 return lockandtr[2]
2130 return lockandtr[2]
2128
2131
2129 # Do greedy locking by default until we're satisfied with lazy
2132 # Do greedy locking by default until we're satisfied with lazy
2130 # locking.
2133 # locking.
2131 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2134 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2132 gettransaction()
2135 gettransaction()
2133
2136
2134 op = bundle2.bundleoperation(repo, gettransaction,
2137 op = bundle2.bundleoperation(repo, gettransaction,
2135 captureoutput=captureoutput,
2138 captureoutput=captureoutput,
2136 source='push')
2139 source='push')
2137 try:
2140 try:
2138 op = bundle2.processbundle(repo, cg, op=op)
2141 op = bundle2.processbundle(repo, cg, op=op)
2139 finally:
2142 finally:
2140 r = op.reply
2143 r = op.reply
2141 if captureoutput and r is not None:
2144 if captureoutput and r is not None:
2142 repo.ui.pushbuffer(error=True, subproc=True)
2145 repo.ui.pushbuffer(error=True, subproc=True)
2143 def recordout(output):
2146 def recordout(output):
2144 r.newpart('output', data=output, mandatory=False)
2147 r.newpart('output', data=output, mandatory=False)
2145 if lockandtr[2] is not None:
2148 if lockandtr[2] is not None:
2146 lockandtr[2].close()
2149 lockandtr[2].close()
2147 except BaseException as exc:
2150 except BaseException as exc:
2148 exc.duringunbundle2 = True
2151 exc.duringunbundle2 = True
2149 if captureoutput and r is not None:
2152 if captureoutput and r is not None:
2150 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2153 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2151 def recordout(output):
2154 def recordout(output):
2152 part = bundle2.bundlepart('output', data=output,
2155 part = bundle2.bundlepart('output', data=output,
2153 mandatory=False)
2156 mandatory=False)
2154 parts.append(part)
2157 parts.append(part)
2155 raise
2158 raise
2156 finally:
2159 finally:
2157 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2160 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2158 if recordout is not None:
2161 if recordout is not None:
2159 recordout(repo.ui.popbuffer())
2162 recordout(repo.ui.popbuffer())
2160 return r
2163 return r
2161
2164
2162 def _maybeapplyclonebundle(pullop):
2165 def _maybeapplyclonebundle(pullop):
2163 """Apply a clone bundle from a remote, if possible."""
2166 """Apply a clone bundle from a remote, if possible."""
2164
2167
2165 repo = pullop.repo
2168 repo = pullop.repo
2166 remote = pullop.remote
2169 remote = pullop.remote
2167
2170
2168 if not repo.ui.configbool('ui', 'clonebundles'):
2171 if not repo.ui.configbool('ui', 'clonebundles'):
2169 return
2172 return
2170
2173
2171 # Only run if local repo is empty.
2174 # Only run if local repo is empty.
2172 if len(repo):
2175 if len(repo):
2173 return
2176 return
2174
2177
2175 if pullop.heads:
2178 if pullop.heads:
2176 return
2179 return
2177
2180
2178 if not remote.capable('clonebundles'):
2181 if not remote.capable('clonebundles'):
2179 return
2182 return
2180
2183
2181 with remote.commandexecutor() as e:
2184 with remote.commandexecutor() as e:
2182 res = e.callcommand('clonebundles', {}).result()
2185 res = e.callcommand('clonebundles', {}).result()
2183
2186
2184 # If we call the wire protocol command, that's good enough to record the
2187 # If we call the wire protocol command, that's good enough to record the
2185 # attempt.
2188 # attempt.
2186 pullop.clonebundleattempted = True
2189 pullop.clonebundleattempted = True
2187
2190
2188 entries = parseclonebundlesmanifest(repo, res)
2191 entries = parseclonebundlesmanifest(repo, res)
2189 if not entries:
2192 if not entries:
2190 repo.ui.note(_('no clone bundles available on remote; '
2193 repo.ui.note(_('no clone bundles available on remote; '
2191 'falling back to regular clone\n'))
2194 'falling back to regular clone\n'))
2192 return
2195 return
2193
2196
2194 entries = filterclonebundleentries(
2197 entries = filterclonebundleentries(
2195 repo, entries, streamclonerequested=pullop.streamclonerequested)
2198 repo, entries, streamclonerequested=pullop.streamclonerequested)
2196
2199
2197 if not entries:
2200 if not entries:
2198 # There is a thundering herd concern here. However, if a server
2201 # There is a thundering herd concern here. However, if a server
2199 # operator doesn't advertise bundles appropriate for its clients,
2202 # operator doesn't advertise bundles appropriate for its clients,
2200 # they deserve what's coming. Furthermore, from a client's
2203 # they deserve what's coming. Furthermore, from a client's
2201 # perspective, no automatic fallback would mean not being able to
2204 # perspective, no automatic fallback would mean not being able to
2202 # clone!
2205 # clone!
2203 repo.ui.warn(_('no compatible clone bundles available on server; '
2206 repo.ui.warn(_('no compatible clone bundles available on server; '
2204 'falling back to regular clone\n'))
2207 'falling back to regular clone\n'))
2205 repo.ui.warn(_('(you may want to report this to the server '
2208 repo.ui.warn(_('(you may want to report this to the server '
2206 'operator)\n'))
2209 'operator)\n'))
2207 return
2210 return
2208
2211
2209 entries = sortclonebundleentries(repo.ui, entries)
2212 entries = sortclonebundleentries(repo.ui, entries)
2210
2213
2211 url = entries[0]['URL']
2214 url = entries[0]['URL']
2212 repo.ui.status(_('applying clone bundle from %s\n') % url)
2215 repo.ui.status(_('applying clone bundle from %s\n') % url)
2213 if trypullbundlefromurl(repo.ui, repo, url):
2216 if trypullbundlefromurl(repo.ui, repo, url):
2214 repo.ui.status(_('finished applying clone bundle\n'))
2217 repo.ui.status(_('finished applying clone bundle\n'))
2215 # Bundle failed.
2218 # Bundle failed.
2216 #
2219 #
2217 # We abort by default to avoid the thundering herd of
2220 # We abort by default to avoid the thundering herd of
2218 # clients flooding a server that was expecting expensive
2221 # clients flooding a server that was expecting expensive
2219 # clone load to be offloaded.
2222 # clone load to be offloaded.
2220 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2223 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2221 repo.ui.warn(_('falling back to normal clone\n'))
2224 repo.ui.warn(_('falling back to normal clone\n'))
2222 else:
2225 else:
2223 raise error.Abort(_('error applying bundle'),
2226 raise error.Abort(_('error applying bundle'),
2224 hint=_('if this error persists, consider contacting '
2227 hint=_('if this error persists, consider contacting '
2225 'the server operator or disable clone '
2228 'the server operator or disable clone '
2226 'bundles via '
2229 'bundles via '
2227 '"--config ui.clonebundles=false"'))
2230 '"--config ui.clonebundles=false"'))
2228
2231
2229 def parseclonebundlesmanifest(repo, s):
2232 def parseclonebundlesmanifest(repo, s):
2230 """Parses the raw text of a clone bundles manifest.
2233 """Parses the raw text of a clone bundles manifest.
2231
2234
2232 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2235 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2233 to the URL and other keys are the attributes for the entry.
2236 to the URL and other keys are the attributes for the entry.
2234 """
2237 """
2235 m = []
2238 m = []
2236 for line in s.splitlines():
2239 for line in s.splitlines():
2237 fields = line.split()
2240 fields = line.split()
2238 if not fields:
2241 if not fields:
2239 continue
2242 continue
2240 attrs = {'URL': fields[0]}
2243 attrs = {'URL': fields[0]}
2241 for rawattr in fields[1:]:
2244 for rawattr in fields[1:]:
2242 key, value = rawattr.split('=', 1)
2245 key, value = rawattr.split('=', 1)
2243 key = urlreq.unquote(key)
2246 key = urlreq.unquote(key)
2244 value = urlreq.unquote(value)
2247 value = urlreq.unquote(value)
2245 attrs[key] = value
2248 attrs[key] = value
2246
2249
2247 # Parse BUNDLESPEC into components. This makes client-side
2250 # Parse BUNDLESPEC into components. This makes client-side
2248 # preferences easier to specify since you can prefer a single
2251 # preferences easier to specify since you can prefer a single
2249 # component of the BUNDLESPEC.
2252 # component of the BUNDLESPEC.
2250 if key == 'BUNDLESPEC':
2253 if key == 'BUNDLESPEC':
2251 try:
2254 try:
2252 bundlespec = parsebundlespec(repo, value)
2255 bundlespec = parsebundlespec(repo, value)
2253 attrs['COMPRESSION'] = bundlespec.compression
2256 attrs['COMPRESSION'] = bundlespec.compression
2254 attrs['VERSION'] = bundlespec.version
2257 attrs['VERSION'] = bundlespec.version
2255 except error.InvalidBundleSpecification:
2258 except error.InvalidBundleSpecification:
2256 pass
2259 pass
2257 except error.UnsupportedBundleSpecification:
2260 except error.UnsupportedBundleSpecification:
2258 pass
2261 pass
2259
2262
2260 m.append(attrs)
2263 m.append(attrs)
2261
2264
2262 return m
2265 return m
2263
2266
2264 def isstreamclonespec(bundlespec):
2267 def isstreamclonespec(bundlespec):
2265 # Stream clone v1
2268 # Stream clone v1
2266 if (bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1'):
2269 if (bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1'):
2267 return True
2270 return True
2268
2271
2269 # Stream clone v2
2272 # Stream clone v2
2270 if (bundlespec.wirecompression == 'UN' and \
2273 if (bundlespec.wirecompression == 'UN' and \
2271 bundlespec.wireversion == '02' and \
2274 bundlespec.wireversion == '02' and \
2272 bundlespec.contentopts.get('streamv2')):
2275 bundlespec.contentopts.get('streamv2')):
2273 return True
2276 return True
2274
2277
2275 return False
2278 return False
2276
2279
2277 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2280 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2278 """Remove incompatible clone bundle manifest entries.
2281 """Remove incompatible clone bundle manifest entries.
2279
2282
2280 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2283 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2281 and returns a new list consisting of only the entries that this client
2284 and returns a new list consisting of only the entries that this client
2282 should be able to apply.
2285 should be able to apply.
2283
2286
2284 There is no guarantee we'll be able to apply all returned entries because
2287 There is no guarantee we'll be able to apply all returned entries because
2285 the metadata we use to filter on may be missing or wrong.
2288 the metadata we use to filter on may be missing or wrong.
2286 """
2289 """
2287 newentries = []
2290 newentries = []
2288 for entry in entries:
2291 for entry in entries:
2289 spec = entry.get('BUNDLESPEC')
2292 spec = entry.get('BUNDLESPEC')
2290 if spec:
2293 if spec:
2291 try:
2294 try:
2292 bundlespec = parsebundlespec(repo, spec, strict=True)
2295 bundlespec = parsebundlespec(repo, spec, strict=True)
2293
2296
2294 # If a stream clone was requested, filter out non-streamclone
2297 # If a stream clone was requested, filter out non-streamclone
2295 # entries.
2298 # entries.
2296 if streamclonerequested and not isstreamclonespec(bundlespec):
2299 if streamclonerequested and not isstreamclonespec(bundlespec):
2297 repo.ui.debug('filtering %s because not a stream clone\n' %
2300 repo.ui.debug('filtering %s because not a stream clone\n' %
2298 entry['URL'])
2301 entry['URL'])
2299 continue
2302 continue
2300
2303
2301 except error.InvalidBundleSpecification as e:
2304 except error.InvalidBundleSpecification as e:
2302 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
2305 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
2303 continue
2306 continue
2304 except error.UnsupportedBundleSpecification as e:
2307 except error.UnsupportedBundleSpecification as e:
2305 repo.ui.debug('filtering %s because unsupported bundle '
2308 repo.ui.debug('filtering %s because unsupported bundle '
2306 'spec: %s\n' % (
2309 'spec: %s\n' % (
2307 entry['URL'], stringutil.forcebytestr(e)))
2310 entry['URL'], stringutil.forcebytestr(e)))
2308 continue
2311 continue
2309 # If we don't have a spec and requested a stream clone, we don't know
2312 # If we don't have a spec and requested a stream clone, we don't know
2310 # what the entry is so don't attempt to apply it.
2313 # what the entry is so don't attempt to apply it.
2311 elif streamclonerequested:
2314 elif streamclonerequested:
2312 repo.ui.debug('filtering %s because cannot determine if a stream '
2315 repo.ui.debug('filtering %s because cannot determine if a stream '
2313 'clone bundle\n' % entry['URL'])
2316 'clone bundle\n' % entry['URL'])
2314 continue
2317 continue
2315
2318
2316 if 'REQUIRESNI' in entry and not sslutil.hassni:
2319 if 'REQUIRESNI' in entry and not sslutil.hassni:
2317 repo.ui.debug('filtering %s because SNI not supported\n' %
2320 repo.ui.debug('filtering %s because SNI not supported\n' %
2318 entry['URL'])
2321 entry['URL'])
2319 continue
2322 continue
2320
2323
2321 newentries.append(entry)
2324 newentries.append(entry)
2322
2325
2323 return newentries
2326 return newentries
2324
2327
2325 class clonebundleentry(object):
2328 class clonebundleentry(object):
2326 """Represents an item in a clone bundles manifest.
2329 """Represents an item in a clone bundles manifest.
2327
2330
2328 This rich class is needed to support sorting since sorted() in Python 3
2331 This rich class is needed to support sorting since sorted() in Python 3
2329 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2332 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2330 won't work.
2333 won't work.
2331 """
2334 """
2332
2335
2333 def __init__(self, value, prefers):
2336 def __init__(self, value, prefers):
2334 self.value = value
2337 self.value = value
2335 self.prefers = prefers
2338 self.prefers = prefers
2336
2339
2337 def _cmp(self, other):
2340 def _cmp(self, other):
2338 for prefkey, prefvalue in self.prefers:
2341 for prefkey, prefvalue in self.prefers:
2339 avalue = self.value.get(prefkey)
2342 avalue = self.value.get(prefkey)
2340 bvalue = other.value.get(prefkey)
2343 bvalue = other.value.get(prefkey)
2341
2344
2342 # Special case for b missing attribute and a matches exactly.
2345 # Special case for b missing attribute and a matches exactly.
2343 if avalue is not None and bvalue is None and avalue == prefvalue:
2346 if avalue is not None and bvalue is None and avalue == prefvalue:
2344 return -1
2347 return -1
2345
2348
2346 # Special case for a missing attribute and b matches exactly.
2349 # Special case for a missing attribute and b matches exactly.
2347 if bvalue is not None and avalue is None and bvalue == prefvalue:
2350 if bvalue is not None and avalue is None and bvalue == prefvalue:
2348 return 1
2351 return 1
2349
2352
2350 # We can't compare unless attribute present on both.
2353 # We can't compare unless attribute present on both.
2351 if avalue is None or bvalue is None:
2354 if avalue is None or bvalue is None:
2352 continue
2355 continue
2353
2356
2354 # Same values should fall back to next attribute.
2357 # Same values should fall back to next attribute.
2355 if avalue == bvalue:
2358 if avalue == bvalue:
2356 continue
2359 continue
2357
2360
2358 # Exact matches come first.
2361 # Exact matches come first.
2359 if avalue == prefvalue:
2362 if avalue == prefvalue:
2360 return -1
2363 return -1
2361 if bvalue == prefvalue:
2364 if bvalue == prefvalue:
2362 return 1
2365 return 1
2363
2366
2364 # Fall back to next attribute.
2367 # Fall back to next attribute.
2365 continue
2368 continue
2366
2369
2367 # If we got here we couldn't sort by attributes and prefers. Fall
2370 # If we got here we couldn't sort by attributes and prefers. Fall
2368 # back to index order.
2371 # back to index order.
2369 return 0
2372 return 0
2370
2373
2371 def __lt__(self, other):
2374 def __lt__(self, other):
2372 return self._cmp(other) < 0
2375 return self._cmp(other) < 0
2373
2376
2374 def __gt__(self, other):
2377 def __gt__(self, other):
2375 return self._cmp(other) > 0
2378 return self._cmp(other) > 0
2376
2379
2377 def __eq__(self, other):
2380 def __eq__(self, other):
2378 return self._cmp(other) == 0
2381 return self._cmp(other) == 0
2379
2382
2380 def __le__(self, other):
2383 def __le__(self, other):
2381 return self._cmp(other) <= 0
2384 return self._cmp(other) <= 0
2382
2385
2383 def __ge__(self, other):
2386 def __ge__(self, other):
2384 return self._cmp(other) >= 0
2387 return self._cmp(other) >= 0
2385
2388
2386 def __ne__(self, other):
2389 def __ne__(self, other):
2387 return self._cmp(other) != 0
2390 return self._cmp(other) != 0
2388
2391
2389 def sortclonebundleentries(ui, entries):
2392 def sortclonebundleentries(ui, entries):
2390 prefers = ui.configlist('ui', 'clonebundleprefers')
2393 prefers = ui.configlist('ui', 'clonebundleprefers')
2391 if not prefers:
2394 if not prefers:
2392 return list(entries)
2395 return list(entries)
2393
2396
2394 prefers = [p.split('=', 1) for p in prefers]
2397 prefers = [p.split('=', 1) for p in prefers]
2395
2398
2396 items = sorted(clonebundleentry(v, prefers) for v in entries)
2399 items = sorted(clonebundleentry(v, prefers) for v in entries)
2397 return [i.value for i in items]
2400 return [i.value for i in items]
2398
2401
2399 def trypullbundlefromurl(ui, repo, url):
2402 def trypullbundlefromurl(ui, repo, url):
2400 """Attempt to apply a bundle from a URL."""
2403 """Attempt to apply a bundle from a URL."""
2401 with repo.lock(), repo.transaction('bundleurl') as tr:
2404 with repo.lock(), repo.transaction('bundleurl') as tr:
2402 try:
2405 try:
2403 fh = urlmod.open(ui, url)
2406 fh = urlmod.open(ui, url)
2404 cg = readbundle(ui, fh, 'stream')
2407 cg = readbundle(ui, fh, 'stream')
2405
2408
2406 if isinstance(cg, streamclone.streamcloneapplier):
2409 if isinstance(cg, streamclone.streamcloneapplier):
2407 cg.apply(repo)
2410 cg.apply(repo)
2408 else:
2411 else:
2409 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2412 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2410 return True
2413 return True
2411 except urlerr.httperror as e:
2414 except urlerr.httperror as e:
2412 ui.warn(_('HTTP error fetching bundle: %s\n') %
2415 ui.warn(_('HTTP error fetching bundle: %s\n') %
2413 stringutil.forcebytestr(e))
2416 stringutil.forcebytestr(e))
2414 except urlerr.urlerror as e:
2417 except urlerr.urlerror as e:
2415 ui.warn(_('error fetching bundle: %s\n') %
2418 ui.warn(_('error fetching bundle: %s\n') %
2416 stringutil.forcebytestr(e.reason))
2419 stringutil.forcebytestr(e.reason))
2417
2420
2418 return False
2421 return False
@@ -1,341 +1,397 b''
1 Testing the functionality to pull remotenames
1 Testing the functionality to pull remotenames
2 =============================================
2 =============================================
3
3
4 $ cat >> $HGRCPATH << EOF
4 $ cat >> $HGRCPATH << EOF
5 > [ui]
5 > [ui]
6 > ssh = $PYTHON "$TESTDIR/dummyssh"
6 > ssh = $PYTHON "$TESTDIR/dummyssh"
7 > [alias]
7 > [alias]
8 > glog = log -G -T '{rev}:{node|short} {desc}'
8 > glog = log -G -T '{rev}:{node|short} {desc}'
9 > [extensions]
9 > [extensions]
10 > remotenames =
10 > remotenames =
11 > show =
11 > show =
12 > EOF
12 > EOF
13
13
14 Making a server repo
14 Making a server repo
15 --------------------
15 --------------------
16
16
17 $ hg init server
17 $ hg init server
18 $ cd server
18 $ cd server
19 $ for ch in a b c d e f g h; do
19 $ for ch in a b c d e f g h; do
20 > echo "foo" >> $ch
20 > echo "foo" >> $ch
21 > hg ci -Aqm "Added "$ch
21 > hg ci -Aqm "Added "$ch
22 > done
22 > done
23 $ hg glog
23 $ hg glog
24 @ 7:ec2426147f0e Added h
24 @ 7:ec2426147f0e Added h
25 |
25 |
26 o 6:87d6d6676308 Added g
26 o 6:87d6d6676308 Added g
27 |
27 |
28 o 5:825660c69f0c Added f
28 o 5:825660c69f0c Added f
29 |
29 |
30 o 4:aa98ab95a928 Added e
30 o 4:aa98ab95a928 Added e
31 |
31 |
32 o 3:62615734edd5 Added d
32 o 3:62615734edd5 Added d
33 |
33 |
34 o 2:28ad74487de9 Added c
34 o 2:28ad74487de9 Added c
35 |
35 |
36 o 1:29becc82797a Added b
36 o 1:29becc82797a Added b
37 |
37 |
38 o 0:18d04c59bb5d Added a
38 o 0:18d04c59bb5d Added a
39
39
40 $ hg bookmark -r 3 foo
40 $ hg bookmark -r 3 foo
41 $ hg bookmark -r 6 bar
41 $ hg bookmark -r 6 bar
42 $ hg up 4
42 $ hg up 4
43 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
43 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
44 $ hg branch wat
44 $ hg branch wat
45 marked working directory as branch wat
45 marked working directory as branch wat
46 (branches are permanent and global, did you want a bookmark?)
46 (branches are permanent and global, did you want a bookmark?)
47 $ echo foo >> bar
47 $ echo foo >> bar
48 $ hg ci -Aqm "added bar"
48 $ hg ci -Aqm "added bar"
49
49
50 Making a client repo
50 Making a client repo
51 --------------------
51 --------------------
52
52
53 $ cd ..
53 $ cd ..
54
54
55 $ hg clone ssh://user@dummy/server client
55 $ hg clone ssh://user@dummy/server client
56 requesting all changes
56 requesting all changes
57 adding changesets
57 adding changesets
58 adding manifests
58 adding manifests
59 adding file changes
59 adding file changes
60 added 9 changesets with 9 changes to 9 files (+1 heads)
60 added 9 changesets with 9 changes to 9 files (+1 heads)
61 new changesets 18d04c59bb5d:3e1487808078
61 new changesets 18d04c59bb5d:3e1487808078
62 updating to branch default
62 updating to branch default
63 8 files updated, 0 files merged, 0 files removed, 0 files unresolved
63 8 files updated, 0 files merged, 0 files removed, 0 files unresolved
64
64
65 $ cd client
65 $ cd client
66 $ cat .hg/logexchange/bookmarks
66 $ cat .hg/logexchange/bookmarks
67 0
67 0
68
68
69 87d6d66763085b629e6d7ed56778c79827273022\x00default\x00bar (esc)
69 87d6d66763085b629e6d7ed56778c79827273022\x00default\x00bar (esc)
70 62615734edd52f06b6fb9c2beb429e4fe30d57b8\x00default\x00foo (esc)
70 62615734edd52f06b6fb9c2beb429e4fe30d57b8\x00default\x00foo (esc)
71
71
72 $ cat .hg/logexchange/branches
72 $ cat .hg/logexchange/branches
73 0
73 0
74
74
75 ec2426147f0e39dbc9cef599b066be6035ce691d\x00default\x00default (esc)
75 ec2426147f0e39dbc9cef599b066be6035ce691d\x00default\x00default (esc)
76 3e1487808078543b0af6d10dadf5d46943578db0\x00default\x00wat (esc)
76 3e1487808078543b0af6d10dadf5d46943578db0\x00default\x00wat (esc)
77
77
78 $ hg show work
78 $ hg show work
79 o 3e14 (wat) (default/wat) added bar
79 o 3e14 (wat) (default/wat) added bar
80 |
80 |
81 ~
81 ~
82 @ ec24 (default/default) Added h
82 @ ec24 (default/default) Added h
83 |
83 |
84 ~
84 ~
85
85
86 $ hg update "default/wat"
86 $ hg update "default/wat"
87 1 files updated, 0 files merged, 3 files removed, 0 files unresolved
87 1 files updated, 0 files merged, 3 files removed, 0 files unresolved
88 $ hg identify
88 $ hg identify
89 3e1487808078 (wat) tip
89 3e1487808078 (wat) tip
90
90
91 Making a new server
91 Making a new server
92 -------------------
92 -------------------
93
93
94 $ cd ..
94 $ cd ..
95 $ hg init server2
95 $ hg init server2
96 $ cd server2
96 $ cd server2
97 $ hg pull ../server/
97 $ hg pull ../server/
98 pulling from ../server/
98 pulling from ../server/
99 requesting all changes
99 requesting all changes
100 adding changesets
100 adding changesets
101 adding manifests
101 adding manifests
102 adding file changes
102 adding file changes
103 added 9 changesets with 9 changes to 9 files (+1 heads)
103 added 9 changesets with 9 changes to 9 files (+1 heads)
104 adding remote bookmark bar
104 adding remote bookmark bar
105 adding remote bookmark foo
105 adding remote bookmark foo
106 new changesets 18d04c59bb5d:3e1487808078
106 new changesets 18d04c59bb5d:3e1487808078
107 (run 'hg heads' to see heads)
107 (run 'hg heads' to see heads)
108
108
109 Pulling form the new server
109 Pulling form the new server
110 ---------------------------
110 ---------------------------
111 $ cd ../client/
111 $ cd ../client/
112 $ hg pull ../server2/
112 $ hg pull ../server2/
113 pulling from ../server2/
113 pulling from ../server2/
114 searching for changes
114 searching for changes
115 no changes found
115 no changes found
116 $ cat .hg/logexchange/bookmarks
116 $ cat .hg/logexchange/bookmarks
117 0
117 0
118
118
119 62615734edd52f06b6fb9c2beb429e4fe30d57b8\x00default\x00foo (esc)
119 62615734edd52f06b6fb9c2beb429e4fe30d57b8\x00default\x00foo (esc)
120 87d6d66763085b629e6d7ed56778c79827273022\x00default\x00bar (esc)
120 87d6d66763085b629e6d7ed56778c79827273022\x00default\x00bar (esc)
121 87d6d66763085b629e6d7ed56778c79827273022\x00$TESTTMP/server2\x00bar (esc)
121 87d6d66763085b629e6d7ed56778c79827273022\x00$TESTTMP/server2\x00bar (esc)
122 62615734edd52f06b6fb9c2beb429e4fe30d57b8\x00$TESTTMP/server2\x00foo (esc)
122 62615734edd52f06b6fb9c2beb429e4fe30d57b8\x00$TESTTMP/server2\x00foo (esc)
123
123
124 $ cat .hg/logexchange/branches
124 $ cat .hg/logexchange/branches
125 0
125 0
126
126
127 3e1487808078543b0af6d10dadf5d46943578db0\x00default\x00wat (esc)
127 3e1487808078543b0af6d10dadf5d46943578db0\x00default\x00wat (esc)
128 ec2426147f0e39dbc9cef599b066be6035ce691d\x00default\x00default (esc)
128 ec2426147f0e39dbc9cef599b066be6035ce691d\x00default\x00default (esc)
129 ec2426147f0e39dbc9cef599b066be6035ce691d\x00$TESTTMP/server2\x00default (esc)
129 ec2426147f0e39dbc9cef599b066be6035ce691d\x00$TESTTMP/server2\x00default (esc)
130 3e1487808078543b0af6d10dadf5d46943578db0\x00$TESTTMP/server2\x00wat (esc)
130 3e1487808078543b0af6d10dadf5d46943578db0\x00$TESTTMP/server2\x00wat (esc)
131
131
132 $ hg log -G
132 $ hg log -G
133 @ changeset: 8:3e1487808078
133 @ changeset: 8:3e1487808078
134 | branch: wat
134 | branch: wat
135 | tag: tip
135 | tag: tip
136 | remote branch: $TESTTMP/server2/wat
136 | remote branch: $TESTTMP/server2/wat
137 | remote branch: default/wat
137 | remote branch: default/wat
138 | parent: 4:aa98ab95a928
138 | parent: 4:aa98ab95a928
139 | user: test
139 | user: test
140 | date: Thu Jan 01 00:00:00 1970 +0000
140 | date: Thu Jan 01 00:00:00 1970 +0000
141 | summary: added bar
141 | summary: added bar
142 |
142 |
143 | o changeset: 7:ec2426147f0e
143 | o changeset: 7:ec2426147f0e
144 | | remote branch: $TESTTMP/server2/default
144 | | remote branch: $TESTTMP/server2/default
145 | | remote branch: default/default
145 | | remote branch: default/default
146 | | user: test
146 | | user: test
147 | | date: Thu Jan 01 00:00:00 1970 +0000
147 | | date: Thu Jan 01 00:00:00 1970 +0000
148 | | summary: Added h
148 | | summary: Added h
149 | |
149 | |
150 | o changeset: 6:87d6d6676308
150 | o changeset: 6:87d6d6676308
151 | | bookmark: bar
151 | | bookmark: bar
152 | | remote bookmark: $TESTTMP/server2/bar
152 | | remote bookmark: $TESTTMP/server2/bar
153 | | remote bookmark: default/bar
153 | | remote bookmark: default/bar
154 | | hoisted name: bar
154 | | hoisted name: bar
155 | | user: test
155 | | user: test
156 | | date: Thu Jan 01 00:00:00 1970 +0000
156 | | date: Thu Jan 01 00:00:00 1970 +0000
157 | | summary: Added g
157 | | summary: Added g
158 | |
158 | |
159 | o changeset: 5:825660c69f0c
159 | o changeset: 5:825660c69f0c
160 |/ user: test
160 |/ user: test
161 | date: Thu Jan 01 00:00:00 1970 +0000
161 | date: Thu Jan 01 00:00:00 1970 +0000
162 | summary: Added f
162 | summary: Added f
163 |
163 |
164 o changeset: 4:aa98ab95a928
164 o changeset: 4:aa98ab95a928
165 | user: test
165 | user: test
166 | date: Thu Jan 01 00:00:00 1970 +0000
166 | date: Thu Jan 01 00:00:00 1970 +0000
167 | summary: Added e
167 | summary: Added e
168 |
168 |
169 o changeset: 3:62615734edd5
169 o changeset: 3:62615734edd5
170 | bookmark: foo
170 | bookmark: foo
171 | remote bookmark: $TESTTMP/server2/foo
171 | remote bookmark: $TESTTMP/server2/foo
172 | remote bookmark: default/foo
172 | remote bookmark: default/foo
173 | hoisted name: foo
173 | hoisted name: foo
174 | user: test
174 | user: test
175 | date: Thu Jan 01 00:00:00 1970 +0000
175 | date: Thu Jan 01 00:00:00 1970 +0000
176 | summary: Added d
176 | summary: Added d
177 |
177 |
178 o changeset: 2:28ad74487de9
178 o changeset: 2:28ad74487de9
179 | user: test
179 | user: test
180 | date: Thu Jan 01 00:00:00 1970 +0000
180 | date: Thu Jan 01 00:00:00 1970 +0000
181 | summary: Added c
181 | summary: Added c
182 |
182 |
183 o changeset: 1:29becc82797a
183 o changeset: 1:29becc82797a
184 | user: test
184 | user: test
185 | date: Thu Jan 01 00:00:00 1970 +0000
185 | date: Thu Jan 01 00:00:00 1970 +0000
186 | summary: Added b
186 | summary: Added b
187 |
187 |
188 o changeset: 0:18d04c59bb5d
188 o changeset: 0:18d04c59bb5d
189 user: test
189 user: test
190 date: Thu Jan 01 00:00:00 1970 +0000
190 date: Thu Jan 01 00:00:00 1970 +0000
191 summary: Added a
191 summary: Added a
192
192
193 Testing the templates provided by remotenames extension
193 Testing the templates provided by remotenames extension
194
194
195 `remotenames` keyword
195 `remotenames` keyword
196
196
197 $ hg log -G -T "{rev}:{node|short} {remotenames}\n"
197 $ hg log -G -T "{rev}:{node|short} {remotenames}\n"
198 @ 8:3e1487808078 $TESTTMP/server2/wat default/wat
198 @ 8:3e1487808078 $TESTTMP/server2/wat default/wat
199 |
199 |
200 | o 7:ec2426147f0e $TESTTMP/server2/default default/default
200 | o 7:ec2426147f0e $TESTTMP/server2/default default/default
201 | |
201 | |
202 | o 6:87d6d6676308 $TESTTMP/server2/bar default/bar
202 | o 6:87d6d6676308 $TESTTMP/server2/bar default/bar
203 | |
203 | |
204 | o 5:825660c69f0c
204 | o 5:825660c69f0c
205 |/
205 |/
206 o 4:aa98ab95a928
206 o 4:aa98ab95a928
207 |
207 |
208 o 3:62615734edd5 $TESTTMP/server2/foo default/foo
208 o 3:62615734edd5 $TESTTMP/server2/foo default/foo
209 |
209 |
210 o 2:28ad74487de9
210 o 2:28ad74487de9
211 |
211 |
212 o 1:29becc82797a
212 o 1:29becc82797a
213 |
213 |
214 o 0:18d04c59bb5d
214 o 0:18d04c59bb5d
215
215
216 `remotebookmarks` and `remotebranches` keywords
216 `remotebookmarks` and `remotebranches` keywords
217
217
218 $ hg log -G -T "{rev}:{node|short} [{remotebookmarks}] ({remotebranches})"
218 $ hg log -G -T "{rev}:{node|short} [{remotebookmarks}] ({remotebranches})"
219 @ 8:3e1487808078 [] ($TESTTMP/server2/wat default/wat)
219 @ 8:3e1487808078 [] ($TESTTMP/server2/wat default/wat)
220 |
220 |
221 | o 7:ec2426147f0e [] ($TESTTMP/server2/default default/default)
221 | o 7:ec2426147f0e [] ($TESTTMP/server2/default default/default)
222 | |
222 | |
223 | o 6:87d6d6676308 [$TESTTMP/server2/bar default/bar] ()
223 | o 6:87d6d6676308 [$TESTTMP/server2/bar default/bar] ()
224 | |
224 | |
225 | o 5:825660c69f0c [] ()
225 | o 5:825660c69f0c [] ()
226 |/
226 |/
227 o 4:aa98ab95a928 [] ()
227 o 4:aa98ab95a928 [] ()
228 |
228 |
229 o 3:62615734edd5 [$TESTTMP/server2/foo default/foo] ()
229 o 3:62615734edd5 [$TESTTMP/server2/foo default/foo] ()
230 |
230 |
231 o 2:28ad74487de9 [] ()
231 o 2:28ad74487de9 [] ()
232 |
232 |
233 o 1:29becc82797a [] ()
233 o 1:29becc82797a [] ()
234 |
234 |
235 o 0:18d04c59bb5d [] ()
235 o 0:18d04c59bb5d [] ()
236
236
237 The `hoistednames` template keyword
237 The `hoistednames` template keyword
238
238
239 $ hg log -GT "{rev}:{node|short} ({hoistednames})"
239 $ hg log -GT "{rev}:{node|short} ({hoistednames})"
240 @ 8:3e1487808078 ()
240 @ 8:3e1487808078 ()
241 |
241 |
242 | o 7:ec2426147f0e ()
242 | o 7:ec2426147f0e ()
243 | |
243 | |
244 | o 6:87d6d6676308 (bar)
244 | o 6:87d6d6676308 (bar)
245 | |
245 | |
246 | o 5:825660c69f0c ()
246 | o 5:825660c69f0c ()
247 |/
247 |/
248 o 4:aa98ab95a928 ()
248 o 4:aa98ab95a928 ()
249 |
249 |
250 o 3:62615734edd5 (foo)
250 o 3:62615734edd5 (foo)
251 |
251 |
252 o 2:28ad74487de9 ()
252 o 2:28ad74487de9 ()
253 |
253 |
254 o 1:29becc82797a ()
254 o 1:29becc82797a ()
255 |
255 |
256 o 0:18d04c59bb5d ()
256 o 0:18d04c59bb5d ()
257
257
258
258
259 Testing the revsets provided by remotenames extension
259 Testing the revsets provided by remotenames extension
260
260
261 `remotenames` revset
261 `remotenames` revset
262
262
263 $ hg log -r "remotenames()" -GT "{rev}:{node|short} {remotenames}\n"
263 $ hg log -r "remotenames()" -GT "{rev}:{node|short} {remotenames}\n"
264 @ 8:3e1487808078 $TESTTMP/server2/wat default/wat
264 @ 8:3e1487808078 $TESTTMP/server2/wat default/wat
265 :
265 :
266 : o 7:ec2426147f0e $TESTTMP/server2/default default/default
266 : o 7:ec2426147f0e $TESTTMP/server2/default default/default
267 : |
267 : |
268 : o 6:87d6d6676308 $TESTTMP/server2/bar default/bar
268 : o 6:87d6d6676308 $TESTTMP/server2/bar default/bar
269 :/
269 :/
270 o 3:62615734edd5 $TESTTMP/server2/foo default/foo
270 o 3:62615734edd5 $TESTTMP/server2/foo default/foo
271 |
271 |
272 ~
272 ~
273
273
274 `remotebranches` revset
274 `remotebranches` revset
275
275
276 $ hg log -r "remotebranches()" -GT "{rev}:{node|short} {remotenames}\n"
276 $ hg log -r "remotebranches()" -GT "{rev}:{node|short} {remotenames}\n"
277 @ 8:3e1487808078 $TESTTMP/server2/wat default/wat
277 @ 8:3e1487808078 $TESTTMP/server2/wat default/wat
278 |
278 |
279 ~
279 ~
280 o 7:ec2426147f0e $TESTTMP/server2/default default/default
280 o 7:ec2426147f0e $TESTTMP/server2/default default/default
281 |
281 |
282 ~
282 ~
283
283
284 `remotebookmarks` revset
284 `remotebookmarks` revset
285
285
286 $ hg log -r "remotebookmarks()" -GT "{rev}:{node|short} {remotenames}\n"
286 $ hg log -r "remotebookmarks()" -GT "{rev}:{node|short} {remotenames}\n"
287 o 6:87d6d6676308 $TESTTMP/server2/bar default/bar
287 o 6:87d6d6676308 $TESTTMP/server2/bar default/bar
288 :
288 :
289 o 3:62615734edd5 $TESTTMP/server2/foo default/foo
289 o 3:62615734edd5 $TESTTMP/server2/foo default/foo
290 |
290 |
291 ~
291 ~
292
292
293 Updating to revision using hoisted name
293 Updating to revision using hoisted name
294
294
295 Deleting local bookmark to make sure we update to hoisted name only
295 Deleting local bookmark to make sure we update to hoisted name only
296
296
297 $ hg bookmark -d bar
297 $ hg bookmark -d bar
298
298
299 $ hg up bar
299 $ hg up bar
300 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
300 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
301
301
302 $ hg log -r .
302 $ hg log -r .
303 changeset: 6:87d6d6676308
303 changeset: 6:87d6d6676308
304 remote bookmark: $TESTTMP/server2/bar
304 remote bookmark: $TESTTMP/server2/bar
305 remote bookmark: default/bar
305 remote bookmark: default/bar
306 hoisted name: bar
306 hoisted name: bar
307 user: test
307 user: test
308 date: Thu Jan 01 00:00:00 1970 +0000
308 date: Thu Jan 01 00:00:00 1970 +0000
309 summary: Added g
309 summary: Added g
310
310
311 When both local bookmark and hoisted name exists but on different revs
311 When both local bookmark and hoisted name exists but on different revs
312
312
313 $ hg up 8
313 $ hg up 8
314 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
314 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
315
315
316 $ hg bookmark foo
316 $ hg bookmark foo
317 moving bookmark 'foo' forward from 62615734edd5
317 moving bookmark 'foo' forward from 62615734edd5
318
318
319 Local bookmark should take precedence over hoisted name
319 Local bookmark should take precedence over hoisted name
320
320
321 $ hg up foo
321 $ hg up foo
322 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
322 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
323
323
324 $ hg log -r .
324 $ hg log -r .
325 changeset: 8:3e1487808078
325 changeset: 8:3e1487808078
326 branch: wat
326 branch: wat
327 bookmark: foo
327 bookmark: foo
328 tag: tip
328 tag: tip
329 remote branch: $TESTTMP/server2/wat
329 remote branch: $TESTTMP/server2/wat
330 remote branch: default/wat
330 remote branch: default/wat
331 parent: 4:aa98ab95a928
331 parent: 4:aa98ab95a928
332 user: test
332 user: test
333 date: Thu Jan 01 00:00:00 1970 +0000
333 date: Thu Jan 01 00:00:00 1970 +0000
334 summary: added bar
334 summary: added bar
335
335
336 $ hg bookmarks
336 $ hg bookmarks
337 $TESTTMP/server2/bar 6:87d6d6676308
337 $TESTTMP/server2/bar 6:87d6d6676308
338 $TESTTMP/server2/foo 3:62615734edd5
338 $TESTTMP/server2/foo 3:62615734edd5
339 default/bar 6:87d6d6676308
339 default/bar 6:87d6d6676308
340 default/foo 3:62615734edd5
340 default/foo 3:62615734edd5
341 * foo 8:3e1487808078
341 * foo 8:3e1487808078
342
343 Testing the remotenames sychronization during `hg push`
344 -------------------------------------------------------
345
346 $ cd ../server/
347 $ hg bookmark foo
348 moving bookmark 'foo' forward from 62615734edd5
349
350 After the push, default/foo should move to rev 8
351 $ cd ../client/
352 $ hg push
353 pushing to ssh://user@dummy/server
354 searching for changes
355 no changes found
356 [1]
357 $ hg log -Gr 'remotenames()'
358 @ changeset: 8:3e1487808078
359 : branch: wat
360 : bookmark: foo
361 : tag: tip
362 : remote bookmark: default/foo
363 : hoisted name: foo
364 : remote branch: $TESTTMP/server2/wat
365 : remote branch: default/wat
366 : parent: 4:aa98ab95a928
367 : user: test
368 : date: Thu Jan 01 00:00:00 1970 +0000
369 : summary: added bar
370 :
371 : o changeset: 7:ec2426147f0e
372 : | remote branch: $TESTTMP/server2/default
373 : | remote branch: default/default
374 : | user: test
375 : | date: Thu Jan 01 00:00:00 1970 +0000
376 : | summary: Added h
377 : |
378 : o changeset: 6:87d6d6676308
379 :/ remote bookmark: $TESTTMP/server2/bar
380 : remote bookmark: default/bar
381 : hoisted name: bar
382 : user: test
383 : date: Thu Jan 01 00:00:00 1970 +0000
384 : summary: Added g
385 :
386 o changeset: 3:62615734edd5
387 | remote bookmark: $TESTTMP/server2/foo
388 ~ user: test
389 date: Thu Jan 01 00:00:00 1970 +0000
390 summary: Added d
391
392 $ hg bookmarks
393 $TESTTMP/server2/bar 6:87d6d6676308
394 $TESTTMP/server2/foo 3:62615734edd5
395 default/bar 6:87d6d6676308
396 default/foo 8:3e1487808078
397 * foo 8:3e1487808078
General Comments 0
You need to be logged in to leave comments. Login now