##// END OF EJS Templates
exchange: abort on pushing bookmarks pointing to secret changesets (issue6159)...
Navaneeth Suresh -
r43082:3332bde5 stable
parent child Browse files
Show More
@@ -1,2701 +1,2709 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import hashlib
11 import hashlib
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 bin,
15 bin,
16 hex,
16 hex,
17 nullid,
17 nullid,
18 nullrev,
18 nullrev,
19 )
19 )
20 from .thirdparty import (
20 from .thirdparty import (
21 attr,
21 attr,
22 )
22 )
23 from . import (
23 from . import (
24 bookmarks as bookmod,
24 bookmarks as bookmod,
25 bundle2,
25 bundle2,
26 changegroup,
26 changegroup,
27 discovery,
27 discovery,
28 error,
28 error,
29 exchangev2,
29 exchangev2,
30 lock as lockmod,
30 lock as lockmod,
31 logexchange,
31 logexchange,
32 narrowspec,
32 narrowspec,
33 obsolete,
33 obsolete,
34 phases,
34 phases,
35 pushkey,
35 pushkey,
36 pycompat,
36 pycompat,
37 repository,
37 repository,
38 scmutil,
38 scmutil,
39 sslutil,
39 sslutil,
40 streamclone,
40 streamclone,
41 url as urlmod,
41 url as urlmod,
42 util,
42 util,
43 wireprototypes,
43 wireprototypes,
44 )
44 )
45 from .utils import (
45 from .utils import (
46 stringutil,
46 stringutil,
47 )
47 )
48
48
49 urlerr = util.urlerr
49 urlerr = util.urlerr
50 urlreq = util.urlreq
50 urlreq = util.urlreq
51
51
52 _NARROWACL_SECTION = 'narrowacl'
52 _NARROWACL_SECTION = 'narrowacl'
53
53
54 # Maps bundle version human names to changegroup versions.
54 # Maps bundle version human names to changegroup versions.
55 _bundlespeccgversions = {'v1': '01',
55 _bundlespeccgversions = {'v1': '01',
56 'v2': '02',
56 'v2': '02',
57 'packed1': 's1',
57 'packed1': 's1',
58 'bundle2': '02', #legacy
58 'bundle2': '02', #legacy
59 }
59 }
60
60
61 # Maps bundle version with content opts to choose which part to bundle
61 # Maps bundle version with content opts to choose which part to bundle
62 _bundlespeccontentopts = {
62 _bundlespeccontentopts = {
63 'v1': {
63 'v1': {
64 'changegroup': True,
64 'changegroup': True,
65 'cg.version': '01',
65 'cg.version': '01',
66 'obsolescence': False,
66 'obsolescence': False,
67 'phases': False,
67 'phases': False,
68 'tagsfnodescache': False,
68 'tagsfnodescache': False,
69 'revbranchcache': False
69 'revbranchcache': False
70 },
70 },
71 'v2': {
71 'v2': {
72 'changegroup': True,
72 'changegroup': True,
73 'cg.version': '02',
73 'cg.version': '02',
74 'obsolescence': False,
74 'obsolescence': False,
75 'phases': False,
75 'phases': False,
76 'tagsfnodescache': True,
76 'tagsfnodescache': True,
77 'revbranchcache': True
77 'revbranchcache': True
78 },
78 },
79 'packed1' : {
79 'packed1' : {
80 'cg.version': 's1'
80 'cg.version': 's1'
81 }
81 }
82 }
82 }
83 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
83 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
84
84
85 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
85 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
86 "tagsfnodescache": False,
86 "tagsfnodescache": False,
87 "revbranchcache": False}}
87 "revbranchcache": False}}
88
88
89 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
89 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
90 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
90 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
91
91
92 @attr.s
92 @attr.s
93 class bundlespec(object):
93 class bundlespec(object):
94 compression = attr.ib()
94 compression = attr.ib()
95 wirecompression = attr.ib()
95 wirecompression = attr.ib()
96 version = attr.ib()
96 version = attr.ib()
97 wireversion = attr.ib()
97 wireversion = attr.ib()
98 params = attr.ib()
98 params = attr.ib()
99 contentopts = attr.ib()
99 contentopts = attr.ib()
100
100
101 def parsebundlespec(repo, spec, strict=True):
101 def parsebundlespec(repo, spec, strict=True):
102 """Parse a bundle string specification into parts.
102 """Parse a bundle string specification into parts.
103
103
104 Bundle specifications denote a well-defined bundle/exchange format.
104 Bundle specifications denote a well-defined bundle/exchange format.
105 The content of a given specification should not change over time in
105 The content of a given specification should not change over time in
106 order to ensure that bundles produced by a newer version of Mercurial are
106 order to ensure that bundles produced by a newer version of Mercurial are
107 readable from an older version.
107 readable from an older version.
108
108
109 The string currently has the form:
109 The string currently has the form:
110
110
111 <compression>-<type>[;<parameter0>[;<parameter1>]]
111 <compression>-<type>[;<parameter0>[;<parameter1>]]
112
112
113 Where <compression> is one of the supported compression formats
113 Where <compression> is one of the supported compression formats
114 and <type> is (currently) a version string. A ";" can follow the type and
114 and <type> is (currently) a version string. A ";" can follow the type and
115 all text afterwards is interpreted as URI encoded, ";" delimited key=value
115 all text afterwards is interpreted as URI encoded, ";" delimited key=value
116 pairs.
116 pairs.
117
117
118 If ``strict`` is True (the default) <compression> is required. Otherwise,
118 If ``strict`` is True (the default) <compression> is required. Otherwise,
119 it is optional.
119 it is optional.
120
120
121 Returns a bundlespec object of (compression, version, parameters).
121 Returns a bundlespec object of (compression, version, parameters).
122 Compression will be ``None`` if not in strict mode and a compression isn't
122 Compression will be ``None`` if not in strict mode and a compression isn't
123 defined.
123 defined.
124
124
125 An ``InvalidBundleSpecification`` is raised when the specification is
125 An ``InvalidBundleSpecification`` is raised when the specification is
126 not syntactically well formed.
126 not syntactically well formed.
127
127
128 An ``UnsupportedBundleSpecification`` is raised when the compression or
128 An ``UnsupportedBundleSpecification`` is raised when the compression or
129 bundle type/version is not recognized.
129 bundle type/version is not recognized.
130
130
131 Note: this function will likely eventually return a more complex data
131 Note: this function will likely eventually return a more complex data
132 structure, including bundle2 part information.
132 structure, including bundle2 part information.
133 """
133 """
134 def parseparams(s):
134 def parseparams(s):
135 if ';' not in s:
135 if ';' not in s:
136 return s, {}
136 return s, {}
137
137
138 params = {}
138 params = {}
139 version, paramstr = s.split(';', 1)
139 version, paramstr = s.split(';', 1)
140
140
141 for p in paramstr.split(';'):
141 for p in paramstr.split(';'):
142 if '=' not in p:
142 if '=' not in p:
143 raise error.InvalidBundleSpecification(
143 raise error.InvalidBundleSpecification(
144 _('invalid bundle specification: '
144 _('invalid bundle specification: '
145 'missing "=" in parameter: %s') % p)
145 'missing "=" in parameter: %s') % p)
146
146
147 key, value = p.split('=', 1)
147 key, value = p.split('=', 1)
148 key = urlreq.unquote(key)
148 key = urlreq.unquote(key)
149 value = urlreq.unquote(value)
149 value = urlreq.unquote(value)
150 params[key] = value
150 params[key] = value
151
151
152 return version, params
152 return version, params
153
153
154
154
155 if strict and '-' not in spec:
155 if strict and '-' not in spec:
156 raise error.InvalidBundleSpecification(
156 raise error.InvalidBundleSpecification(
157 _('invalid bundle specification; '
157 _('invalid bundle specification; '
158 'must be prefixed with compression: %s') % spec)
158 'must be prefixed with compression: %s') % spec)
159
159
160 if '-' in spec:
160 if '-' in spec:
161 compression, version = spec.split('-', 1)
161 compression, version = spec.split('-', 1)
162
162
163 if compression not in util.compengines.supportedbundlenames:
163 if compression not in util.compengines.supportedbundlenames:
164 raise error.UnsupportedBundleSpecification(
164 raise error.UnsupportedBundleSpecification(
165 _('%s compression is not supported') % compression)
165 _('%s compression is not supported') % compression)
166
166
167 version, params = parseparams(version)
167 version, params = parseparams(version)
168
168
169 if version not in _bundlespeccgversions:
169 if version not in _bundlespeccgversions:
170 raise error.UnsupportedBundleSpecification(
170 raise error.UnsupportedBundleSpecification(
171 _('%s is not a recognized bundle version') % version)
171 _('%s is not a recognized bundle version') % version)
172 else:
172 else:
173 # Value could be just the compression or just the version, in which
173 # Value could be just the compression or just the version, in which
174 # case some defaults are assumed (but only when not in strict mode).
174 # case some defaults are assumed (but only when not in strict mode).
175 assert not strict
175 assert not strict
176
176
177 spec, params = parseparams(spec)
177 spec, params = parseparams(spec)
178
178
179 if spec in util.compengines.supportedbundlenames:
179 if spec in util.compengines.supportedbundlenames:
180 compression = spec
180 compression = spec
181 version = 'v1'
181 version = 'v1'
182 # Generaldelta repos require v2.
182 # Generaldelta repos require v2.
183 if 'generaldelta' in repo.requirements:
183 if 'generaldelta' in repo.requirements:
184 version = 'v2'
184 version = 'v2'
185 # Modern compression engines require v2.
185 # Modern compression engines require v2.
186 if compression not in _bundlespecv1compengines:
186 if compression not in _bundlespecv1compengines:
187 version = 'v2'
187 version = 'v2'
188 elif spec in _bundlespeccgversions:
188 elif spec in _bundlespeccgversions:
189 if spec == 'packed1':
189 if spec == 'packed1':
190 compression = 'none'
190 compression = 'none'
191 else:
191 else:
192 compression = 'bzip2'
192 compression = 'bzip2'
193 version = spec
193 version = spec
194 else:
194 else:
195 raise error.UnsupportedBundleSpecification(
195 raise error.UnsupportedBundleSpecification(
196 _('%s is not a recognized bundle specification') % spec)
196 _('%s is not a recognized bundle specification') % spec)
197
197
198 # Bundle version 1 only supports a known set of compression engines.
198 # Bundle version 1 only supports a known set of compression engines.
199 if version == 'v1' and compression not in _bundlespecv1compengines:
199 if version == 'v1' and compression not in _bundlespecv1compengines:
200 raise error.UnsupportedBundleSpecification(
200 raise error.UnsupportedBundleSpecification(
201 _('compression engine %s is not supported on v1 bundles') %
201 _('compression engine %s is not supported on v1 bundles') %
202 compression)
202 compression)
203
203
204 # The specification for packed1 can optionally declare the data formats
204 # The specification for packed1 can optionally declare the data formats
205 # required to apply it. If we see this metadata, compare against what the
205 # required to apply it. If we see this metadata, compare against what the
206 # repo supports and error if the bundle isn't compatible.
206 # repo supports and error if the bundle isn't compatible.
207 if version == 'packed1' and 'requirements' in params:
207 if version == 'packed1' and 'requirements' in params:
208 requirements = set(params['requirements'].split(','))
208 requirements = set(params['requirements'].split(','))
209 missingreqs = requirements - repo.supportedformats
209 missingreqs = requirements - repo.supportedformats
210 if missingreqs:
210 if missingreqs:
211 raise error.UnsupportedBundleSpecification(
211 raise error.UnsupportedBundleSpecification(
212 _('missing support for repository features: %s') %
212 _('missing support for repository features: %s') %
213 ', '.join(sorted(missingreqs)))
213 ', '.join(sorted(missingreqs)))
214
214
215 # Compute contentopts based on the version
215 # Compute contentopts based on the version
216 contentopts = _bundlespeccontentopts.get(version, {}).copy()
216 contentopts = _bundlespeccontentopts.get(version, {}).copy()
217
217
218 # Process the variants
218 # Process the variants
219 if "stream" in params and params["stream"] == "v2":
219 if "stream" in params and params["stream"] == "v2":
220 variant = _bundlespecvariants["streamv2"]
220 variant = _bundlespecvariants["streamv2"]
221 contentopts.update(variant)
221 contentopts.update(variant)
222
222
223 engine = util.compengines.forbundlename(compression)
223 engine = util.compengines.forbundlename(compression)
224 compression, wirecompression = engine.bundletype()
224 compression, wirecompression = engine.bundletype()
225 wireversion = _bundlespeccgversions[version]
225 wireversion = _bundlespeccgversions[version]
226
226
227 return bundlespec(compression, wirecompression, version, wireversion,
227 return bundlespec(compression, wirecompression, version, wireversion,
228 params, contentopts)
228 params, contentopts)
229
229
230 def readbundle(ui, fh, fname, vfs=None):
230 def readbundle(ui, fh, fname, vfs=None):
231 header = changegroup.readexactly(fh, 4)
231 header = changegroup.readexactly(fh, 4)
232
232
233 alg = None
233 alg = None
234 if not fname:
234 if not fname:
235 fname = "stream"
235 fname = "stream"
236 if not header.startswith('HG') and header.startswith('\0'):
236 if not header.startswith('HG') and header.startswith('\0'):
237 fh = changegroup.headerlessfixup(fh, header)
237 fh = changegroup.headerlessfixup(fh, header)
238 header = "HG10"
238 header = "HG10"
239 alg = 'UN'
239 alg = 'UN'
240 elif vfs:
240 elif vfs:
241 fname = vfs.join(fname)
241 fname = vfs.join(fname)
242
242
243 magic, version = header[0:2], header[2:4]
243 magic, version = header[0:2], header[2:4]
244
244
245 if magic != 'HG':
245 if magic != 'HG':
246 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
246 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
247 if version == '10':
247 if version == '10':
248 if alg is None:
248 if alg is None:
249 alg = changegroup.readexactly(fh, 2)
249 alg = changegroup.readexactly(fh, 2)
250 return changegroup.cg1unpacker(fh, alg)
250 return changegroup.cg1unpacker(fh, alg)
251 elif version.startswith('2'):
251 elif version.startswith('2'):
252 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
252 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
253 elif version == 'S1':
253 elif version == 'S1':
254 return streamclone.streamcloneapplier(fh)
254 return streamclone.streamcloneapplier(fh)
255 else:
255 else:
256 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
256 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
257
257
258 def getbundlespec(ui, fh):
258 def getbundlespec(ui, fh):
259 """Infer the bundlespec from a bundle file handle.
259 """Infer the bundlespec from a bundle file handle.
260
260
261 The input file handle is seeked and the original seek position is not
261 The input file handle is seeked and the original seek position is not
262 restored.
262 restored.
263 """
263 """
264 def speccompression(alg):
264 def speccompression(alg):
265 try:
265 try:
266 return util.compengines.forbundletype(alg).bundletype()[0]
266 return util.compengines.forbundletype(alg).bundletype()[0]
267 except KeyError:
267 except KeyError:
268 return None
268 return None
269
269
270 b = readbundle(ui, fh, None)
270 b = readbundle(ui, fh, None)
271 if isinstance(b, changegroup.cg1unpacker):
271 if isinstance(b, changegroup.cg1unpacker):
272 alg = b._type
272 alg = b._type
273 if alg == '_truncatedBZ':
273 if alg == '_truncatedBZ':
274 alg = 'BZ'
274 alg = 'BZ'
275 comp = speccompression(alg)
275 comp = speccompression(alg)
276 if not comp:
276 if not comp:
277 raise error.Abort(_('unknown compression algorithm: %s') % alg)
277 raise error.Abort(_('unknown compression algorithm: %s') % alg)
278 return '%s-v1' % comp
278 return '%s-v1' % comp
279 elif isinstance(b, bundle2.unbundle20):
279 elif isinstance(b, bundle2.unbundle20):
280 if 'Compression' in b.params:
280 if 'Compression' in b.params:
281 comp = speccompression(b.params['Compression'])
281 comp = speccompression(b.params['Compression'])
282 if not comp:
282 if not comp:
283 raise error.Abort(_('unknown compression algorithm: %s') % comp)
283 raise error.Abort(_('unknown compression algorithm: %s') % comp)
284 else:
284 else:
285 comp = 'none'
285 comp = 'none'
286
286
287 version = None
287 version = None
288 for part in b.iterparts():
288 for part in b.iterparts():
289 if part.type == 'changegroup':
289 if part.type == 'changegroup':
290 version = part.params['version']
290 version = part.params['version']
291 if version in ('01', '02'):
291 if version in ('01', '02'):
292 version = 'v2'
292 version = 'v2'
293 else:
293 else:
294 raise error.Abort(_('changegroup version %s does not have '
294 raise error.Abort(_('changegroup version %s does not have '
295 'a known bundlespec') % version,
295 'a known bundlespec') % version,
296 hint=_('try upgrading your Mercurial '
296 hint=_('try upgrading your Mercurial '
297 'client'))
297 'client'))
298 elif part.type == 'stream2' and version is None:
298 elif part.type == 'stream2' and version is None:
299 # A stream2 part requires to be part of a v2 bundle
299 # A stream2 part requires to be part of a v2 bundle
300 requirements = urlreq.unquote(part.params['requirements'])
300 requirements = urlreq.unquote(part.params['requirements'])
301 splitted = requirements.split()
301 splitted = requirements.split()
302 params = bundle2._formatrequirementsparams(splitted)
302 params = bundle2._formatrequirementsparams(splitted)
303 return 'none-v2;stream=v2;%s' % params
303 return 'none-v2;stream=v2;%s' % params
304
304
305 if not version:
305 if not version:
306 raise error.Abort(_('could not identify changegroup version in '
306 raise error.Abort(_('could not identify changegroup version in '
307 'bundle'))
307 'bundle'))
308
308
309 return '%s-%s' % (comp, version)
309 return '%s-%s' % (comp, version)
310 elif isinstance(b, streamclone.streamcloneapplier):
310 elif isinstance(b, streamclone.streamcloneapplier):
311 requirements = streamclone.readbundle1header(fh)[2]
311 requirements = streamclone.readbundle1header(fh)[2]
312 formatted = bundle2._formatrequirementsparams(requirements)
312 formatted = bundle2._formatrequirementsparams(requirements)
313 return 'none-packed1;%s' % formatted
313 return 'none-packed1;%s' % formatted
314 else:
314 else:
315 raise error.Abort(_('unknown bundle type: %s') % b)
315 raise error.Abort(_('unknown bundle type: %s') % b)
316
316
317 def _computeoutgoing(repo, heads, common):
317 def _computeoutgoing(repo, heads, common):
318 """Computes which revs are outgoing given a set of common
318 """Computes which revs are outgoing given a set of common
319 and a set of heads.
319 and a set of heads.
320
320
321 This is a separate function so extensions can have access to
321 This is a separate function so extensions can have access to
322 the logic.
322 the logic.
323
323
324 Returns a discovery.outgoing object.
324 Returns a discovery.outgoing object.
325 """
325 """
326 cl = repo.changelog
326 cl = repo.changelog
327 if common:
327 if common:
328 hasnode = cl.hasnode
328 hasnode = cl.hasnode
329 common = [n for n in common if hasnode(n)]
329 common = [n for n in common if hasnode(n)]
330 else:
330 else:
331 common = [nullid]
331 common = [nullid]
332 if not heads:
332 if not heads:
333 heads = cl.heads()
333 heads = cl.heads()
334 return discovery.outgoing(repo, common, heads)
334 return discovery.outgoing(repo, common, heads)
335
335
336 def _checkpublish(pushop):
336 def _checkpublish(pushop):
337 repo = pushop.repo
337 repo = pushop.repo
338 ui = repo.ui
338 ui = repo.ui
339 behavior = ui.config('experimental', 'auto-publish')
339 behavior = ui.config('experimental', 'auto-publish')
340 if pushop.publish or behavior not in ('warn', 'confirm', 'abort'):
340 if pushop.publish or behavior not in ('warn', 'confirm', 'abort'):
341 return
341 return
342 remotephases = listkeys(pushop.remote, 'phases')
342 remotephases = listkeys(pushop.remote, 'phases')
343 if not remotephases.get('publishing', False):
343 if not remotephases.get('publishing', False):
344 return
344 return
345
345
346 if pushop.revs is None:
346 if pushop.revs is None:
347 published = repo.filtered('served').revs('not public()')
347 published = repo.filtered('served').revs('not public()')
348 else:
348 else:
349 published = repo.revs('::%ln - public()', pushop.revs)
349 published = repo.revs('::%ln - public()', pushop.revs)
350 if published:
350 if published:
351 if behavior == 'warn':
351 if behavior == 'warn':
352 ui.warn(_('%i changesets about to be published\n')
352 ui.warn(_('%i changesets about to be published\n')
353 % len(published))
353 % len(published))
354 elif behavior == 'confirm':
354 elif behavior == 'confirm':
355 if ui.promptchoice(_('push and publish %i changesets (yn)?'
355 if ui.promptchoice(_('push and publish %i changesets (yn)?'
356 '$$ &Yes $$ &No') % len(published)):
356 '$$ &Yes $$ &No') % len(published)):
357 raise error.Abort(_('user quit'))
357 raise error.Abort(_('user quit'))
358 elif behavior == 'abort':
358 elif behavior == 'abort':
359 msg = _('push would publish %i changesets') % len(published)
359 msg = _('push would publish %i changesets') % len(published)
360 hint = _("use --publish or adjust 'experimental.auto-publish'"
360 hint = _("use --publish or adjust 'experimental.auto-publish'"
361 " config")
361 " config")
362 raise error.Abort(msg, hint=hint)
362 raise error.Abort(msg, hint=hint)
363
363
364 def _forcebundle1(op):
364 def _forcebundle1(op):
365 """return true if a pull/push must use bundle1
365 """return true if a pull/push must use bundle1
366
366
367 This function is used to allow testing of the older bundle version"""
367 This function is used to allow testing of the older bundle version"""
368 ui = op.repo.ui
368 ui = op.repo.ui
369 # The goal is this config is to allow developer to choose the bundle
369 # The goal is this config is to allow developer to choose the bundle
370 # version used during exchanged. This is especially handy during test.
370 # version used during exchanged. This is especially handy during test.
371 # Value is a list of bundle version to be picked from, highest version
371 # Value is a list of bundle version to be picked from, highest version
372 # should be used.
372 # should be used.
373 #
373 #
374 # developer config: devel.legacy.exchange
374 # developer config: devel.legacy.exchange
375 exchange = ui.configlist('devel', 'legacy.exchange')
375 exchange = ui.configlist('devel', 'legacy.exchange')
376 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
376 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
377 return forcebundle1 or not op.remote.capable('bundle2')
377 return forcebundle1 or not op.remote.capable('bundle2')
378
378
379 class pushoperation(object):
379 class pushoperation(object):
380 """A object that represent a single push operation
380 """A object that represent a single push operation
381
381
382 Its purpose is to carry push related state and very common operations.
382 Its purpose is to carry push related state and very common operations.
383
383
384 A new pushoperation should be created at the beginning of each push and
384 A new pushoperation should be created at the beginning of each push and
385 discarded afterward.
385 discarded afterward.
386 """
386 """
387
387
388 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
388 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
389 bookmarks=(), publish=False, pushvars=None):
389 bookmarks=(), publish=False, pushvars=None):
390 # repo we push from
390 # repo we push from
391 self.repo = repo
391 self.repo = repo
392 self.ui = repo.ui
392 self.ui = repo.ui
393 # repo we push to
393 # repo we push to
394 self.remote = remote
394 self.remote = remote
395 # force option provided
395 # force option provided
396 self.force = force
396 self.force = force
397 # revs to be pushed (None is "all")
397 # revs to be pushed (None is "all")
398 self.revs = revs
398 self.revs = revs
399 # bookmark explicitly pushed
399 # bookmark explicitly pushed
400 self.bookmarks = bookmarks
400 self.bookmarks = bookmarks
401 # allow push of new branch
401 # allow push of new branch
402 self.newbranch = newbranch
402 self.newbranch = newbranch
403 # step already performed
403 # step already performed
404 # (used to check what steps have been already performed through bundle2)
404 # (used to check what steps have been already performed through bundle2)
405 self.stepsdone = set()
405 self.stepsdone = set()
406 # Integer version of the changegroup push result
406 # Integer version of the changegroup push result
407 # - None means nothing to push
407 # - None means nothing to push
408 # - 0 means HTTP error
408 # - 0 means HTTP error
409 # - 1 means we pushed and remote head count is unchanged *or*
409 # - 1 means we pushed and remote head count is unchanged *or*
410 # we have outgoing changesets but refused to push
410 # we have outgoing changesets but refused to push
411 # - other values as described by addchangegroup()
411 # - other values as described by addchangegroup()
412 self.cgresult = None
412 self.cgresult = None
413 # Boolean value for the bookmark push
413 # Boolean value for the bookmark push
414 self.bkresult = None
414 self.bkresult = None
415 # discover.outgoing object (contains common and outgoing data)
415 # discover.outgoing object (contains common and outgoing data)
416 self.outgoing = None
416 self.outgoing = None
417 # all remote topological heads before the push
417 # all remote topological heads before the push
418 self.remoteheads = None
418 self.remoteheads = None
419 # Details of the remote branch pre and post push
419 # Details of the remote branch pre and post push
420 #
420 #
421 # mapping: {'branch': ([remoteheads],
421 # mapping: {'branch': ([remoteheads],
422 # [newheads],
422 # [newheads],
423 # [unsyncedheads],
423 # [unsyncedheads],
424 # [discardedheads])}
424 # [discardedheads])}
425 # - branch: the branch name
425 # - branch: the branch name
426 # - remoteheads: the list of remote heads known locally
426 # - remoteheads: the list of remote heads known locally
427 # None if the branch is new
427 # None if the branch is new
428 # - newheads: the new remote heads (known locally) with outgoing pushed
428 # - newheads: the new remote heads (known locally) with outgoing pushed
429 # - unsyncedheads: the list of remote heads unknown locally.
429 # - unsyncedheads: the list of remote heads unknown locally.
430 # - discardedheads: the list of remote heads made obsolete by the push
430 # - discardedheads: the list of remote heads made obsolete by the push
431 self.pushbranchmap = None
431 self.pushbranchmap = None
432 # testable as a boolean indicating if any nodes are missing locally.
432 # testable as a boolean indicating if any nodes are missing locally.
433 self.incoming = None
433 self.incoming = None
434 # summary of the remote phase situation
434 # summary of the remote phase situation
435 self.remotephases = None
435 self.remotephases = None
436 # phases changes that must be pushed along side the changesets
436 # phases changes that must be pushed along side the changesets
437 self.outdatedphases = None
437 self.outdatedphases = None
438 # phases changes that must be pushed if changeset push fails
438 # phases changes that must be pushed if changeset push fails
439 self.fallbackoutdatedphases = None
439 self.fallbackoutdatedphases = None
440 # outgoing obsmarkers
440 # outgoing obsmarkers
441 self.outobsmarkers = set()
441 self.outobsmarkers = set()
442 # outgoing bookmarks
442 # outgoing bookmarks
443 self.outbookmarks = []
443 self.outbookmarks = []
444 # transaction manager
444 # transaction manager
445 self.trmanager = None
445 self.trmanager = None
446 # map { pushkey partid -> callback handling failure}
446 # map { pushkey partid -> callback handling failure}
447 # used to handle exception from mandatory pushkey part failure
447 # used to handle exception from mandatory pushkey part failure
448 self.pkfailcb = {}
448 self.pkfailcb = {}
449 # an iterable of pushvars or None
449 # an iterable of pushvars or None
450 self.pushvars = pushvars
450 self.pushvars = pushvars
451 # publish pushed changesets
451 # publish pushed changesets
452 self.publish = publish
452 self.publish = publish
453
453
454 @util.propertycache
454 @util.propertycache
455 def futureheads(self):
455 def futureheads(self):
456 """future remote heads if the changeset push succeeds"""
456 """future remote heads if the changeset push succeeds"""
457 return self.outgoing.missingheads
457 return self.outgoing.missingheads
458
458
459 @util.propertycache
459 @util.propertycache
460 def fallbackheads(self):
460 def fallbackheads(self):
461 """future remote heads if the changeset push fails"""
461 """future remote heads if the changeset push fails"""
462 if self.revs is None:
462 if self.revs is None:
463 # not target to push, all common are relevant
463 # not target to push, all common are relevant
464 return self.outgoing.commonheads
464 return self.outgoing.commonheads
465 unfi = self.repo.unfiltered()
465 unfi = self.repo.unfiltered()
466 # I want cheads = heads(::missingheads and ::commonheads)
466 # I want cheads = heads(::missingheads and ::commonheads)
467 # (missingheads is revs with secret changeset filtered out)
467 # (missingheads is revs with secret changeset filtered out)
468 #
468 #
469 # This can be expressed as:
469 # This can be expressed as:
470 # cheads = ( (missingheads and ::commonheads)
470 # cheads = ( (missingheads and ::commonheads)
471 # + (commonheads and ::missingheads))"
471 # + (commonheads and ::missingheads))"
472 # )
472 # )
473 #
473 #
474 # while trying to push we already computed the following:
474 # while trying to push we already computed the following:
475 # common = (::commonheads)
475 # common = (::commonheads)
476 # missing = ((commonheads::missingheads) - commonheads)
476 # missing = ((commonheads::missingheads) - commonheads)
477 #
477 #
478 # We can pick:
478 # We can pick:
479 # * missingheads part of common (::commonheads)
479 # * missingheads part of common (::commonheads)
480 common = self.outgoing.common
480 common = self.outgoing.common
481 nm = self.repo.changelog.nodemap
481 nm = self.repo.changelog.nodemap
482 cheads = [node for node in self.revs if nm[node] in common]
482 cheads = [node for node in self.revs if nm[node] in common]
483 # and
483 # and
484 # * commonheads parents on missing
484 # * commonheads parents on missing
485 revset = unfi.set('%ln and parents(roots(%ln))',
485 revset = unfi.set('%ln and parents(roots(%ln))',
486 self.outgoing.commonheads,
486 self.outgoing.commonheads,
487 self.outgoing.missing)
487 self.outgoing.missing)
488 cheads.extend(c.node() for c in revset)
488 cheads.extend(c.node() for c in revset)
489 return cheads
489 return cheads
490
490
491 @property
491 @property
492 def commonheads(self):
492 def commonheads(self):
493 """set of all common heads after changeset bundle push"""
493 """set of all common heads after changeset bundle push"""
494 if self.cgresult:
494 if self.cgresult:
495 return self.futureheads
495 return self.futureheads
496 else:
496 else:
497 return self.fallbackheads
497 return self.fallbackheads
498
498
499 # mapping of message used when pushing bookmark
499 # mapping of message used when pushing bookmark
500 bookmsgmap = {'update': (_("updating bookmark %s\n"),
500 bookmsgmap = {'update': (_("updating bookmark %s\n"),
501 _('updating bookmark %s failed!\n')),
501 _('updating bookmark %s failed!\n')),
502 'export': (_("exporting bookmark %s\n"),
502 'export': (_("exporting bookmark %s\n"),
503 _('exporting bookmark %s failed!\n')),
503 _('exporting bookmark %s failed!\n')),
504 'delete': (_("deleting remote bookmark %s\n"),
504 'delete': (_("deleting remote bookmark %s\n"),
505 _('deleting remote bookmark %s failed!\n')),
505 _('deleting remote bookmark %s failed!\n')),
506 }
506 }
507
507
508
508
509 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
509 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
510 publish=False, opargs=None):
510 publish=False, opargs=None):
511 '''Push outgoing changesets (limited by revs) from a local
511 '''Push outgoing changesets (limited by revs) from a local
512 repository to remote. Return an integer:
512 repository to remote. Return an integer:
513 - None means nothing to push
513 - None means nothing to push
514 - 0 means HTTP error
514 - 0 means HTTP error
515 - 1 means we pushed and remote head count is unchanged *or*
515 - 1 means we pushed and remote head count is unchanged *or*
516 we have outgoing changesets but refused to push
516 we have outgoing changesets but refused to push
517 - other values as described by addchangegroup()
517 - other values as described by addchangegroup()
518 '''
518 '''
519 if opargs is None:
519 if opargs is None:
520 opargs = {}
520 opargs = {}
521 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
521 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
522 publish, **pycompat.strkwargs(opargs))
522 publish, **pycompat.strkwargs(opargs))
523 if pushop.remote.local():
523 if pushop.remote.local():
524 missing = (set(pushop.repo.requirements)
524 missing = (set(pushop.repo.requirements)
525 - pushop.remote.local().supported)
525 - pushop.remote.local().supported)
526 if missing:
526 if missing:
527 msg = _("required features are not"
527 msg = _("required features are not"
528 " supported in the destination:"
528 " supported in the destination:"
529 " %s") % (', '.join(sorted(missing)))
529 " %s") % (', '.join(sorted(missing)))
530 raise error.Abort(msg)
530 raise error.Abort(msg)
531
531
532 if not pushop.remote.canpush():
532 if not pushop.remote.canpush():
533 raise error.Abort(_("destination does not support push"))
533 raise error.Abort(_("destination does not support push"))
534
534
535 if not pushop.remote.capable('unbundle'):
535 if not pushop.remote.capable('unbundle'):
536 raise error.Abort(_('cannot push: destination does not support the '
536 raise error.Abort(_('cannot push: destination does not support the '
537 'unbundle wire protocol command'))
537 'unbundle wire protocol command'))
538
538
539 # get lock as we might write phase data
539 # get lock as we might write phase data
540 wlock = lock = None
540 wlock = lock = None
541 try:
541 try:
542 # bundle2 push may receive a reply bundle touching bookmarks
542 # bundle2 push may receive a reply bundle touching bookmarks
543 # requiring the wlock. Take it now to ensure proper ordering.
543 # requiring the wlock. Take it now to ensure proper ordering.
544 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
544 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
545 if ((not _forcebundle1(pushop)) and
545 if ((not _forcebundle1(pushop)) and
546 maypushback and
546 maypushback and
547 not bookmod.bookmarksinstore(repo)):
547 not bookmod.bookmarksinstore(repo)):
548 wlock = pushop.repo.wlock()
548 wlock = pushop.repo.wlock()
549 lock = pushop.repo.lock()
549 lock = pushop.repo.lock()
550 pushop.trmanager = transactionmanager(pushop.repo,
550 pushop.trmanager = transactionmanager(pushop.repo,
551 'push-response',
551 'push-response',
552 pushop.remote.url())
552 pushop.remote.url())
553 except error.LockUnavailable as err:
553 except error.LockUnavailable as err:
554 # source repo cannot be locked.
554 # source repo cannot be locked.
555 # We do not abort the push, but just disable the local phase
555 # We do not abort the push, but just disable the local phase
556 # synchronisation.
556 # synchronisation.
557 msg = ('cannot lock source repository: %s\n'
557 msg = ('cannot lock source repository: %s\n'
558 % stringutil.forcebytestr(err))
558 % stringutil.forcebytestr(err))
559 pushop.ui.debug(msg)
559 pushop.ui.debug(msg)
560
560
561 with wlock or util.nullcontextmanager():
561 with wlock or util.nullcontextmanager():
562 with lock or util.nullcontextmanager():
562 with lock or util.nullcontextmanager():
563 with pushop.trmanager or util.nullcontextmanager():
563 with pushop.trmanager or util.nullcontextmanager():
564 pushop.repo.checkpush(pushop)
564 pushop.repo.checkpush(pushop)
565 _checkpublish(pushop)
565 _checkpublish(pushop)
566 _pushdiscovery(pushop)
566 _pushdiscovery(pushop)
567 if not _forcebundle1(pushop):
567 if not _forcebundle1(pushop):
568 _pushbundle2(pushop)
568 _pushbundle2(pushop)
569 _pushchangeset(pushop)
569 _pushchangeset(pushop)
570 _pushsyncphase(pushop)
570 _pushsyncphase(pushop)
571 _pushobsolete(pushop)
571 _pushobsolete(pushop)
572 _pushbookmark(pushop)
572 _pushbookmark(pushop)
573
573
574 if repo.ui.configbool('experimental', 'remotenames'):
574 if repo.ui.configbool('experimental', 'remotenames'):
575 logexchange.pullremotenames(repo, remote)
575 logexchange.pullremotenames(repo, remote)
576
576
577 return pushop
577 return pushop
578
578
579 # list of steps to perform discovery before push
579 # list of steps to perform discovery before push
580 pushdiscoveryorder = []
580 pushdiscoveryorder = []
581
581
582 # Mapping between step name and function
582 # Mapping between step name and function
583 #
583 #
584 # This exists to help extensions wrap steps if necessary
584 # This exists to help extensions wrap steps if necessary
585 pushdiscoverymapping = {}
585 pushdiscoverymapping = {}
586
586
587 def pushdiscovery(stepname):
587 def pushdiscovery(stepname):
588 """decorator for function performing discovery before push
588 """decorator for function performing discovery before push
589
589
590 The function is added to the step -> function mapping and appended to the
590 The function is added to the step -> function mapping and appended to the
591 list of steps. Beware that decorated function will be added in order (this
591 list of steps. Beware that decorated function will be added in order (this
592 may matter).
592 may matter).
593
593
594 You can only use this decorator for a new step, if you want to wrap a step
594 You can only use this decorator for a new step, if you want to wrap a step
595 from an extension, change the pushdiscovery dictionary directly."""
595 from an extension, change the pushdiscovery dictionary directly."""
596 def dec(func):
596 def dec(func):
597 assert stepname not in pushdiscoverymapping
597 assert stepname not in pushdiscoverymapping
598 pushdiscoverymapping[stepname] = func
598 pushdiscoverymapping[stepname] = func
599 pushdiscoveryorder.append(stepname)
599 pushdiscoveryorder.append(stepname)
600 return func
600 return func
601 return dec
601 return dec
602
602
603 def _pushdiscovery(pushop):
603 def _pushdiscovery(pushop):
604 """Run all discovery steps"""
604 """Run all discovery steps"""
605 for stepname in pushdiscoveryorder:
605 for stepname in pushdiscoveryorder:
606 step = pushdiscoverymapping[stepname]
606 step = pushdiscoverymapping[stepname]
607 step(pushop)
607 step(pushop)
608
608
609 @pushdiscovery('changeset')
609 @pushdiscovery('changeset')
610 def _pushdiscoverychangeset(pushop):
610 def _pushdiscoverychangeset(pushop):
611 """discover the changeset that need to be pushed"""
611 """discover the changeset that need to be pushed"""
612 fci = discovery.findcommonincoming
612 fci = discovery.findcommonincoming
613 if pushop.revs:
613 if pushop.revs:
614 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
614 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
615 ancestorsof=pushop.revs)
615 ancestorsof=pushop.revs)
616 else:
616 else:
617 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
617 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
618 common, inc, remoteheads = commoninc
618 common, inc, remoteheads = commoninc
619 fco = discovery.findcommonoutgoing
619 fco = discovery.findcommonoutgoing
620 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
620 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
621 commoninc=commoninc, force=pushop.force)
621 commoninc=commoninc, force=pushop.force)
622 pushop.outgoing = outgoing
622 pushop.outgoing = outgoing
623 pushop.remoteheads = remoteheads
623 pushop.remoteheads = remoteheads
624 pushop.incoming = inc
624 pushop.incoming = inc
625
625
626 @pushdiscovery('phase')
626 @pushdiscovery('phase')
627 def _pushdiscoveryphase(pushop):
627 def _pushdiscoveryphase(pushop):
628 """discover the phase that needs to be pushed
628 """discover the phase that needs to be pushed
629
629
630 (computed for both success and failure case for changesets push)"""
630 (computed for both success and failure case for changesets push)"""
631 outgoing = pushop.outgoing
631 outgoing = pushop.outgoing
632 unfi = pushop.repo.unfiltered()
632 unfi = pushop.repo.unfiltered()
633 remotephases = listkeys(pushop.remote, 'phases')
633 remotephases = listkeys(pushop.remote, 'phases')
634
634
635 if (pushop.ui.configbool('ui', '_usedassubrepo')
635 if (pushop.ui.configbool('ui', '_usedassubrepo')
636 and remotephases # server supports phases
636 and remotephases # server supports phases
637 and not pushop.outgoing.missing # no changesets to be pushed
637 and not pushop.outgoing.missing # no changesets to be pushed
638 and remotephases.get('publishing', False)):
638 and remotephases.get('publishing', False)):
639 # When:
639 # When:
640 # - this is a subrepo push
640 # - this is a subrepo push
641 # - and remote support phase
641 # - and remote support phase
642 # - and no changeset are to be pushed
642 # - and no changeset are to be pushed
643 # - and remote is publishing
643 # - and remote is publishing
644 # We may be in issue 3781 case!
644 # We may be in issue 3781 case!
645 # We drop the possible phase synchronisation done by
645 # We drop the possible phase synchronisation done by
646 # courtesy to publish changesets possibly locally draft
646 # courtesy to publish changesets possibly locally draft
647 # on the remote.
647 # on the remote.
648 pushop.outdatedphases = []
648 pushop.outdatedphases = []
649 pushop.fallbackoutdatedphases = []
649 pushop.fallbackoutdatedphases = []
650 return
650 return
651
651
652 pushop.remotephases = phases.remotephasessummary(pushop.repo,
652 pushop.remotephases = phases.remotephasessummary(pushop.repo,
653 pushop.fallbackheads,
653 pushop.fallbackheads,
654 remotephases)
654 remotephases)
655 droots = pushop.remotephases.draftroots
655 droots = pushop.remotephases.draftroots
656
656
657 extracond = ''
657 extracond = ''
658 if not pushop.remotephases.publishing:
658 if not pushop.remotephases.publishing:
659 extracond = ' and public()'
659 extracond = ' and public()'
660 revset = 'heads((%%ln::%%ln) %s)' % extracond
660 revset = 'heads((%%ln::%%ln) %s)' % extracond
661 # Get the list of all revs draft on remote by public here.
661 # Get the list of all revs draft on remote by public here.
662 # XXX Beware that revset break if droots is not strictly
662 # XXX Beware that revset break if droots is not strictly
663 # XXX root we may want to ensure it is but it is costly
663 # XXX root we may want to ensure it is but it is costly
664 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
664 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
665 if not pushop.remotephases.publishing and pushop.publish:
665 if not pushop.remotephases.publishing and pushop.publish:
666 future = list(unfi.set('%ln and (not public() or %ln::)',
666 future = list(unfi.set('%ln and (not public() or %ln::)',
667 pushop.futureheads, droots))
667 pushop.futureheads, droots))
668 elif not outgoing.missing:
668 elif not outgoing.missing:
669 future = fallback
669 future = fallback
670 else:
670 else:
671 # adds changeset we are going to push as draft
671 # adds changeset we are going to push as draft
672 #
672 #
673 # should not be necessary for publishing server, but because of an
673 # should not be necessary for publishing server, but because of an
674 # issue fixed in xxxxx we have to do it anyway.
674 # issue fixed in xxxxx we have to do it anyway.
675 fdroots = list(unfi.set('roots(%ln + %ln::)',
675 fdroots = list(unfi.set('roots(%ln + %ln::)',
676 outgoing.missing, droots))
676 outgoing.missing, droots))
677 fdroots = [f.node() for f in fdroots]
677 fdroots = [f.node() for f in fdroots]
678 future = list(unfi.set(revset, fdroots, pushop.futureheads))
678 future = list(unfi.set(revset, fdroots, pushop.futureheads))
679 pushop.outdatedphases = future
679 pushop.outdatedphases = future
680 pushop.fallbackoutdatedphases = fallback
680 pushop.fallbackoutdatedphases = fallback
681
681
682 @pushdiscovery('obsmarker')
682 @pushdiscovery('obsmarker')
683 def _pushdiscoveryobsmarkers(pushop):
683 def _pushdiscoveryobsmarkers(pushop):
684 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
684 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
685 return
685 return
686
686
687 if not pushop.repo.obsstore:
687 if not pushop.repo.obsstore:
688 return
688 return
689
689
690 if 'obsolete' not in listkeys(pushop.remote, 'namespaces'):
690 if 'obsolete' not in listkeys(pushop.remote, 'namespaces'):
691 return
691 return
692
692
693 repo = pushop.repo
693 repo = pushop.repo
694 # very naive computation, that can be quite expensive on big repo.
694 # very naive computation, that can be quite expensive on big repo.
695 # However: evolution is currently slow on them anyway.
695 # However: evolution is currently slow on them anyway.
696 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
696 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
697 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
697 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
698
698
699 @pushdiscovery('bookmarks')
699 @pushdiscovery('bookmarks')
700 def _pushdiscoverybookmarks(pushop):
700 def _pushdiscoverybookmarks(pushop):
701 ui = pushop.ui
701 ui = pushop.ui
702 repo = pushop.repo.unfiltered()
702 repo = pushop.repo.unfiltered()
703 remote = pushop.remote
703 remote = pushop.remote
704 ui.debug("checking for updated bookmarks\n")
704 ui.debug("checking for updated bookmarks\n")
705 ancestors = ()
705 ancestors = ()
706 if pushop.revs:
706 if pushop.revs:
707 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
707 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
708 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
708 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
709
709
710 remotebookmark = listkeys(remote, 'bookmarks')
710 remotebookmark = listkeys(remote, 'bookmarks')
711
711
712 explicit = {repo._bookmarks.expandname(bookmark)
712 explicit = {repo._bookmarks.expandname(bookmark)
713 for bookmark in pushop.bookmarks}
713 for bookmark in pushop.bookmarks}
714
714
715 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
715 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
716 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
716 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
717
717
718 def safehex(x):
718 def safehex(x):
719 if x is None:
719 if x is None:
720 return x
720 return x
721 return hex(x)
721 return hex(x)
722
722
723 def hexifycompbookmarks(bookmarks):
723 def hexifycompbookmarks(bookmarks):
724 return [(b, safehex(scid), safehex(dcid))
724 return [(b, safehex(scid), safehex(dcid))
725 for (b, scid, dcid) in bookmarks]
725 for (b, scid, dcid) in bookmarks]
726
726
727 comp = [hexifycompbookmarks(marks) for marks in comp]
727 comp = [hexifycompbookmarks(marks) for marks in comp]
728 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
728 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
729
729
730 def _processcompared(pushop, pushed, explicit, remotebms, comp):
730 def _processcompared(pushop, pushed, explicit, remotebms, comp):
731 """take decision on bookmark to pull from the remote bookmark
731 """take decision on bookmark to pull from the remote bookmark
732
732
733 Exist to help extensions who want to alter this behavior.
733 Exist to help extensions who want to alter this behavior.
734 """
734 """
735 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
735 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
736
736
737 repo = pushop.repo
737 repo = pushop.repo
738
738
739 for b, scid, dcid in advsrc:
739 for b, scid, dcid in advsrc:
740 if b in explicit:
740 if b in explicit:
741 explicit.remove(b)
741 explicit.remove(b)
742 if not pushed or repo[scid].rev() in pushed:
742 if not pushed or repo[scid].rev() in pushed:
743 pushop.outbookmarks.append((b, dcid, scid))
743 pushop.outbookmarks.append((b, dcid, scid))
744 # search added bookmark
744 # search added bookmark
745 for b, scid, dcid in addsrc:
745 for b, scid, dcid in addsrc:
746 if b in explicit:
746 if b in explicit:
747 explicit.remove(b)
747 explicit.remove(b)
748 pushop.outbookmarks.append((b, '', scid))
748 pushop.outbookmarks.append((b, '', scid))
749 # search for overwritten bookmark
749 # search for overwritten bookmark
750 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
750 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
751 if b in explicit:
751 if b in explicit:
752 explicit.remove(b)
752 explicit.remove(b)
753 pushop.outbookmarks.append((b, dcid, scid))
753 pushop.outbookmarks.append((b, dcid, scid))
754 # search for bookmark to delete
754 # search for bookmark to delete
755 for b, scid, dcid in adddst:
755 for b, scid, dcid in adddst:
756 if b in explicit:
756 if b in explicit:
757 explicit.remove(b)
757 explicit.remove(b)
758 # treat as "deleted locally"
758 # treat as "deleted locally"
759 pushop.outbookmarks.append((b, dcid, ''))
759 pushop.outbookmarks.append((b, dcid, ''))
760 # identical bookmarks shouldn't get reported
760 # identical bookmarks shouldn't get reported
761 for b, scid, dcid in same:
761 for b, scid, dcid in same:
762 if b in explicit:
762 if b in explicit:
763 explicit.remove(b)
763 explicit.remove(b)
764
764
765 if explicit:
765 if explicit:
766 explicit = sorted(explicit)
766 explicit = sorted(explicit)
767 # we should probably list all of them
767 # we should probably list all of them
768 pushop.ui.warn(_('bookmark %s does not exist on the local '
768 pushop.ui.warn(_('bookmark %s does not exist on the local '
769 'or remote repository!\n') % explicit[0])
769 'or remote repository!\n') % explicit[0])
770 pushop.bkresult = 2
770 pushop.bkresult = 2
771
771
772 pushop.outbookmarks.sort()
772 pushop.outbookmarks.sort()
773
773
774 def _pushcheckoutgoing(pushop):
774 def _pushcheckoutgoing(pushop):
775 outgoing = pushop.outgoing
775 outgoing = pushop.outgoing
776 unfi = pushop.repo.unfiltered()
776 unfi = pushop.repo.unfiltered()
777 if not outgoing.missing:
777 if not outgoing.missing:
778 # nothing to push
778 # nothing to push
779 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
779 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
780 return False
780 return False
781 # something to push
781 # something to push
782 if not pushop.force:
782 if not pushop.force:
783 # if repo.obsstore == False --> no obsolete
783 # if repo.obsstore == False --> no obsolete
784 # then, save the iteration
784 # then, save the iteration
785 if unfi.obsstore:
785 if unfi.obsstore:
786 # this message are here for 80 char limit reason
786 # this message are here for 80 char limit reason
787 mso = _("push includes obsolete changeset: %s!")
787 mso = _("push includes obsolete changeset: %s!")
788 mspd = _("push includes phase-divergent changeset: %s!")
788 mspd = _("push includes phase-divergent changeset: %s!")
789 mscd = _("push includes content-divergent changeset: %s!")
789 mscd = _("push includes content-divergent changeset: %s!")
790 mst = {"orphan": _("push includes orphan changeset: %s!"),
790 mst = {"orphan": _("push includes orphan changeset: %s!"),
791 "phase-divergent": mspd,
791 "phase-divergent": mspd,
792 "content-divergent": mscd}
792 "content-divergent": mscd}
793 # If we are to push if there is at least one
793 # If we are to push if there is at least one
794 # obsolete or unstable changeset in missing, at
794 # obsolete or unstable changeset in missing, at
795 # least one of the missinghead will be obsolete or
795 # least one of the missinghead will be obsolete or
796 # unstable. So checking heads only is ok
796 # unstable. So checking heads only is ok
797 for node in outgoing.missingheads:
797 for node in outgoing.missingheads:
798 ctx = unfi[node]
798 ctx = unfi[node]
799 if ctx.obsolete():
799 if ctx.obsolete():
800 raise error.Abort(mso % ctx)
800 raise error.Abort(mso % ctx)
801 elif ctx.isunstable():
801 elif ctx.isunstable():
802 # TODO print more than one instability in the abort
802 # TODO print more than one instability in the abort
803 # message
803 # message
804 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
804 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
805
805
806 discovery.checkheads(pushop)
806 discovery.checkheads(pushop)
807 return True
807 return True
808
808
809 # List of names of steps to perform for an outgoing bundle2, order matters.
809 # List of names of steps to perform for an outgoing bundle2, order matters.
810 b2partsgenorder = []
810 b2partsgenorder = []
811
811
812 # Mapping between step name and function
812 # Mapping between step name and function
813 #
813 #
814 # This exists to help extensions wrap steps if necessary
814 # This exists to help extensions wrap steps if necessary
815 b2partsgenmapping = {}
815 b2partsgenmapping = {}
816
816
817 def b2partsgenerator(stepname, idx=None):
817 def b2partsgenerator(stepname, idx=None):
818 """decorator for function generating bundle2 part
818 """decorator for function generating bundle2 part
819
819
820 The function is added to the step -> function mapping and appended to the
820 The function is added to the step -> function mapping and appended to the
821 list of steps. Beware that decorated functions will be added in order
821 list of steps. Beware that decorated functions will be added in order
822 (this may matter).
822 (this may matter).
823
823
824 You can only use this decorator for new steps, if you want to wrap a step
824 You can only use this decorator for new steps, if you want to wrap a step
825 from an extension, attack the b2partsgenmapping dictionary directly."""
825 from an extension, attack the b2partsgenmapping dictionary directly."""
826 def dec(func):
826 def dec(func):
827 assert stepname not in b2partsgenmapping
827 assert stepname not in b2partsgenmapping
828 b2partsgenmapping[stepname] = func
828 b2partsgenmapping[stepname] = func
829 if idx is None:
829 if idx is None:
830 b2partsgenorder.append(stepname)
830 b2partsgenorder.append(stepname)
831 else:
831 else:
832 b2partsgenorder.insert(idx, stepname)
832 b2partsgenorder.insert(idx, stepname)
833 return func
833 return func
834 return dec
834 return dec
835
835
836 def _pushb2ctxcheckheads(pushop, bundler):
836 def _pushb2ctxcheckheads(pushop, bundler):
837 """Generate race condition checking parts
837 """Generate race condition checking parts
838
838
839 Exists as an independent function to aid extensions
839 Exists as an independent function to aid extensions
840 """
840 """
841 # * 'force' do not check for push race,
841 # * 'force' do not check for push race,
842 # * if we don't push anything, there are nothing to check.
842 # * if we don't push anything, there are nothing to check.
843 if not pushop.force and pushop.outgoing.missingheads:
843 if not pushop.force and pushop.outgoing.missingheads:
844 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
844 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
845 emptyremote = pushop.pushbranchmap is None
845 emptyremote = pushop.pushbranchmap is None
846 if not allowunrelated or emptyremote:
846 if not allowunrelated or emptyremote:
847 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
847 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
848 else:
848 else:
849 affected = set()
849 affected = set()
850 for branch, heads in pushop.pushbranchmap.iteritems():
850 for branch, heads in pushop.pushbranchmap.iteritems():
851 remoteheads, newheads, unsyncedheads, discardedheads = heads
851 remoteheads, newheads, unsyncedheads, discardedheads = heads
852 if remoteheads is not None:
852 if remoteheads is not None:
853 remote = set(remoteheads)
853 remote = set(remoteheads)
854 affected |= set(discardedheads) & remote
854 affected |= set(discardedheads) & remote
855 affected |= remote - set(newheads)
855 affected |= remote - set(newheads)
856 if affected:
856 if affected:
857 data = iter(sorted(affected))
857 data = iter(sorted(affected))
858 bundler.newpart('check:updated-heads', data=data)
858 bundler.newpart('check:updated-heads', data=data)
859
859
860 def _pushing(pushop):
860 def _pushing(pushop):
861 """return True if we are pushing anything"""
861 """return True if we are pushing anything"""
862 return bool(pushop.outgoing.missing
862 return bool(pushop.outgoing.missing
863 or pushop.outdatedphases
863 or pushop.outdatedphases
864 or pushop.outobsmarkers
864 or pushop.outobsmarkers
865 or pushop.outbookmarks)
865 or pushop.outbookmarks)
866
866
867 @b2partsgenerator('check-bookmarks')
867 @b2partsgenerator('check-bookmarks')
868 def _pushb2checkbookmarks(pushop, bundler):
868 def _pushb2checkbookmarks(pushop, bundler):
869 """insert bookmark move checking"""
869 """insert bookmark move checking"""
870 if not _pushing(pushop) or pushop.force:
870 if not _pushing(pushop) or pushop.force:
871 return
871 return
872 b2caps = bundle2.bundle2caps(pushop.remote)
872 b2caps = bundle2.bundle2caps(pushop.remote)
873 hasbookmarkcheck = 'bookmarks' in b2caps
873 hasbookmarkcheck = 'bookmarks' in b2caps
874 if not (pushop.outbookmarks and hasbookmarkcheck):
874 if not (pushop.outbookmarks and hasbookmarkcheck):
875 return
875 return
876 data = []
876 data = []
877 for book, old, new in pushop.outbookmarks:
877 for book, old, new in pushop.outbookmarks:
878 old = bin(old)
878 old = bin(old)
879 data.append((book, old))
879 data.append((book, old))
880 checkdata = bookmod.binaryencode(data)
880 checkdata = bookmod.binaryencode(data)
881 bundler.newpart('check:bookmarks', data=checkdata)
881 bundler.newpart('check:bookmarks', data=checkdata)
882
882
883 @b2partsgenerator('check-phases')
883 @b2partsgenerator('check-phases')
884 def _pushb2checkphases(pushop, bundler):
884 def _pushb2checkphases(pushop, bundler):
885 """insert phase move checking"""
885 """insert phase move checking"""
886 if not _pushing(pushop) or pushop.force:
886 if not _pushing(pushop) or pushop.force:
887 return
887 return
888 b2caps = bundle2.bundle2caps(pushop.remote)
888 b2caps = bundle2.bundle2caps(pushop.remote)
889 hasphaseheads = 'heads' in b2caps.get('phases', ())
889 hasphaseheads = 'heads' in b2caps.get('phases', ())
890 if pushop.remotephases is not None and hasphaseheads:
890 if pushop.remotephases is not None and hasphaseheads:
891 # check that the remote phase has not changed
891 # check that the remote phase has not changed
892 checks = [[] for p in phases.allphases]
892 checks = [[] for p in phases.allphases]
893 checks[phases.public].extend(pushop.remotephases.publicheads)
893 checks[phases.public].extend(pushop.remotephases.publicheads)
894 checks[phases.draft].extend(pushop.remotephases.draftroots)
894 checks[phases.draft].extend(pushop.remotephases.draftroots)
895 if any(checks):
895 if any(checks):
896 for nodes in checks:
896 for nodes in checks:
897 nodes.sort()
897 nodes.sort()
898 checkdata = phases.binaryencode(checks)
898 checkdata = phases.binaryencode(checks)
899 bundler.newpart('check:phases', data=checkdata)
899 bundler.newpart('check:phases', data=checkdata)
900
900
901 @b2partsgenerator('changeset')
901 @b2partsgenerator('changeset')
902 def _pushb2ctx(pushop, bundler):
902 def _pushb2ctx(pushop, bundler):
903 """handle changegroup push through bundle2
903 """handle changegroup push through bundle2
904
904
905 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
905 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
906 """
906 """
907 if 'changesets' in pushop.stepsdone:
907 if 'changesets' in pushop.stepsdone:
908 return
908 return
909 pushop.stepsdone.add('changesets')
909 pushop.stepsdone.add('changesets')
910 # Send known heads to the server for race detection.
910 # Send known heads to the server for race detection.
911 if not _pushcheckoutgoing(pushop):
911 if not _pushcheckoutgoing(pushop):
912 return
912 return
913 pushop.repo.prepushoutgoinghooks(pushop)
913 pushop.repo.prepushoutgoinghooks(pushop)
914
914
915 _pushb2ctxcheckheads(pushop, bundler)
915 _pushb2ctxcheckheads(pushop, bundler)
916
916
917 b2caps = bundle2.bundle2caps(pushop.remote)
917 b2caps = bundle2.bundle2caps(pushop.remote)
918 version = '01'
918 version = '01'
919 cgversions = b2caps.get('changegroup')
919 cgversions = b2caps.get('changegroup')
920 if cgversions: # 3.1 and 3.2 ship with an empty value
920 if cgversions: # 3.1 and 3.2 ship with an empty value
921 cgversions = [v for v in cgversions
921 cgversions = [v for v in cgversions
922 if v in changegroup.supportedoutgoingversions(
922 if v in changegroup.supportedoutgoingversions(
923 pushop.repo)]
923 pushop.repo)]
924 if not cgversions:
924 if not cgversions:
925 raise error.Abort(_('no common changegroup version'))
925 raise error.Abort(_('no common changegroup version'))
926 version = max(cgversions)
926 version = max(cgversions)
927 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
927 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
928 'push')
928 'push')
929 cgpart = bundler.newpart('changegroup', data=cgstream)
929 cgpart = bundler.newpart('changegroup', data=cgstream)
930 if cgversions:
930 if cgversions:
931 cgpart.addparam('version', version)
931 cgpart.addparam('version', version)
932 if 'treemanifest' in pushop.repo.requirements:
932 if 'treemanifest' in pushop.repo.requirements:
933 cgpart.addparam('treemanifest', '1')
933 cgpart.addparam('treemanifest', '1')
934 def handlereply(op):
934 def handlereply(op):
935 """extract addchangegroup returns from server reply"""
935 """extract addchangegroup returns from server reply"""
936 cgreplies = op.records.getreplies(cgpart.id)
936 cgreplies = op.records.getreplies(cgpart.id)
937 assert len(cgreplies['changegroup']) == 1
937 assert len(cgreplies['changegroup']) == 1
938 pushop.cgresult = cgreplies['changegroup'][0]['return']
938 pushop.cgresult = cgreplies['changegroup'][0]['return']
939 return handlereply
939 return handlereply
940
940
941 @b2partsgenerator('phase')
941 @b2partsgenerator('phase')
942 def _pushb2phases(pushop, bundler):
942 def _pushb2phases(pushop, bundler):
943 """handle phase push through bundle2"""
943 """handle phase push through bundle2"""
944 if 'phases' in pushop.stepsdone:
944 if 'phases' in pushop.stepsdone:
945 return
945 return
946 b2caps = bundle2.bundle2caps(pushop.remote)
946 b2caps = bundle2.bundle2caps(pushop.remote)
947 ui = pushop.repo.ui
947 ui = pushop.repo.ui
948
948
949 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
949 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
950 haspushkey = 'pushkey' in b2caps
950 haspushkey = 'pushkey' in b2caps
951 hasphaseheads = 'heads' in b2caps.get('phases', ())
951 hasphaseheads = 'heads' in b2caps.get('phases', ())
952
952
953 if hasphaseheads and not legacyphase:
953 if hasphaseheads and not legacyphase:
954 return _pushb2phaseheads(pushop, bundler)
954 return _pushb2phaseheads(pushop, bundler)
955 elif haspushkey:
955 elif haspushkey:
956 return _pushb2phasespushkey(pushop, bundler)
956 return _pushb2phasespushkey(pushop, bundler)
957
957
958 def _pushb2phaseheads(pushop, bundler):
958 def _pushb2phaseheads(pushop, bundler):
959 """push phase information through a bundle2 - binary part"""
959 """push phase information through a bundle2 - binary part"""
960 pushop.stepsdone.add('phases')
960 pushop.stepsdone.add('phases')
961 if pushop.outdatedphases:
961 if pushop.outdatedphases:
962 updates = [[] for p in phases.allphases]
962 updates = [[] for p in phases.allphases]
963 updates[0].extend(h.node() for h in pushop.outdatedphases)
963 updates[0].extend(h.node() for h in pushop.outdatedphases)
964 phasedata = phases.binaryencode(updates)
964 phasedata = phases.binaryencode(updates)
965 bundler.newpart('phase-heads', data=phasedata)
965 bundler.newpart('phase-heads', data=phasedata)
966
966
967 def _pushb2phasespushkey(pushop, bundler):
967 def _pushb2phasespushkey(pushop, bundler):
968 """push phase information through a bundle2 - pushkey part"""
968 """push phase information through a bundle2 - pushkey part"""
969 pushop.stepsdone.add('phases')
969 pushop.stepsdone.add('phases')
970 part2node = []
970 part2node = []
971
971
972 def handlefailure(pushop, exc):
972 def handlefailure(pushop, exc):
973 targetid = int(exc.partid)
973 targetid = int(exc.partid)
974 for partid, node in part2node:
974 for partid, node in part2node:
975 if partid == targetid:
975 if partid == targetid:
976 raise error.Abort(_('updating %s to public failed') % node)
976 raise error.Abort(_('updating %s to public failed') % node)
977
977
978 enc = pushkey.encode
978 enc = pushkey.encode
979 for newremotehead in pushop.outdatedphases:
979 for newremotehead in pushop.outdatedphases:
980 part = bundler.newpart('pushkey')
980 part = bundler.newpart('pushkey')
981 part.addparam('namespace', enc('phases'))
981 part.addparam('namespace', enc('phases'))
982 part.addparam('key', enc(newremotehead.hex()))
982 part.addparam('key', enc(newremotehead.hex()))
983 part.addparam('old', enc('%d' % phases.draft))
983 part.addparam('old', enc('%d' % phases.draft))
984 part.addparam('new', enc('%d' % phases.public))
984 part.addparam('new', enc('%d' % phases.public))
985 part2node.append((part.id, newremotehead))
985 part2node.append((part.id, newremotehead))
986 pushop.pkfailcb[part.id] = handlefailure
986 pushop.pkfailcb[part.id] = handlefailure
987
987
988 def handlereply(op):
988 def handlereply(op):
989 for partid, node in part2node:
989 for partid, node in part2node:
990 partrep = op.records.getreplies(partid)
990 partrep = op.records.getreplies(partid)
991 results = partrep['pushkey']
991 results = partrep['pushkey']
992 assert len(results) <= 1
992 assert len(results) <= 1
993 msg = None
993 msg = None
994 if not results:
994 if not results:
995 msg = _('server ignored update of %s to public!\n') % node
995 msg = _('server ignored update of %s to public!\n') % node
996 elif not int(results[0]['return']):
996 elif not int(results[0]['return']):
997 msg = _('updating %s to public failed!\n') % node
997 msg = _('updating %s to public failed!\n') % node
998 if msg is not None:
998 if msg is not None:
999 pushop.ui.warn(msg)
999 pushop.ui.warn(msg)
1000 return handlereply
1000 return handlereply
1001
1001
1002 @b2partsgenerator('obsmarkers')
1002 @b2partsgenerator('obsmarkers')
1003 def _pushb2obsmarkers(pushop, bundler):
1003 def _pushb2obsmarkers(pushop, bundler):
1004 if 'obsmarkers' in pushop.stepsdone:
1004 if 'obsmarkers' in pushop.stepsdone:
1005 return
1005 return
1006 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
1006 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
1007 if obsolete.commonversion(remoteversions) is None:
1007 if obsolete.commonversion(remoteversions) is None:
1008 return
1008 return
1009 pushop.stepsdone.add('obsmarkers')
1009 pushop.stepsdone.add('obsmarkers')
1010 if pushop.outobsmarkers:
1010 if pushop.outobsmarkers:
1011 markers = sorted(pushop.outobsmarkers)
1011 markers = sorted(pushop.outobsmarkers)
1012 bundle2.buildobsmarkerspart(bundler, markers)
1012 bundle2.buildobsmarkerspart(bundler, markers)
1013
1013
1014 @b2partsgenerator('bookmarks')
1014 @b2partsgenerator('bookmarks')
1015 def _pushb2bookmarks(pushop, bundler):
1015 def _pushb2bookmarks(pushop, bundler):
1016 """handle bookmark push through bundle2"""
1016 """handle bookmark push through bundle2"""
1017 if 'bookmarks' in pushop.stepsdone:
1017 if 'bookmarks' in pushop.stepsdone:
1018 return
1018 return
1019 b2caps = bundle2.bundle2caps(pushop.remote)
1019 b2caps = bundle2.bundle2caps(pushop.remote)
1020
1020
1021 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
1021 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
1022 legacybooks = 'bookmarks' in legacy
1022 legacybooks = 'bookmarks' in legacy
1023
1023
1024 if not legacybooks and 'bookmarks' in b2caps:
1024 if not legacybooks and 'bookmarks' in b2caps:
1025 return _pushb2bookmarkspart(pushop, bundler)
1025 return _pushb2bookmarkspart(pushop, bundler)
1026 elif 'pushkey' in b2caps:
1026 elif 'pushkey' in b2caps:
1027 return _pushb2bookmarkspushkey(pushop, bundler)
1027 return _pushb2bookmarkspushkey(pushop, bundler)
1028
1028
1029 def _bmaction(old, new):
1029 def _bmaction(old, new):
1030 """small utility for bookmark pushing"""
1030 """small utility for bookmark pushing"""
1031 if not old:
1031 if not old:
1032 return 'export'
1032 return 'export'
1033 elif not new:
1033 elif not new:
1034 return 'delete'
1034 return 'delete'
1035 return 'update'
1035 return 'update'
1036
1036
1037 def _abortonsecretctx(pushop, node, b):
1038 """abort if a given bookmark points to a secret changeset"""
1039 if node and pushop.repo[node].phase() == phases.secret:
1040 raise error.Abort(_('cannot push bookmark %s as it points to a secret'
1041 ' changeset') % b)
1042
1037 def _pushb2bookmarkspart(pushop, bundler):
1043 def _pushb2bookmarkspart(pushop, bundler):
1038 pushop.stepsdone.add('bookmarks')
1044 pushop.stepsdone.add('bookmarks')
1039 if not pushop.outbookmarks:
1045 if not pushop.outbookmarks:
1040 return
1046 return
1041
1047
1042 allactions = []
1048 allactions = []
1043 data = []
1049 data = []
1044 for book, old, new in pushop.outbookmarks:
1050 for book, old, new in pushop.outbookmarks:
1051 _abortonsecretctx(pushop, new, book)
1045 new = bin(new)
1052 new = bin(new)
1046 data.append((book, new))
1053 data.append((book, new))
1047 allactions.append((book, _bmaction(old, new)))
1054 allactions.append((book, _bmaction(old, new)))
1048 checkdata = bookmod.binaryencode(data)
1055 checkdata = bookmod.binaryencode(data)
1049 bundler.newpart('bookmarks', data=checkdata)
1056 bundler.newpart('bookmarks', data=checkdata)
1050
1057
1051 def handlereply(op):
1058 def handlereply(op):
1052 ui = pushop.ui
1059 ui = pushop.ui
1053 # if success
1060 # if success
1054 for book, action in allactions:
1061 for book, action in allactions:
1055 ui.status(bookmsgmap[action][0] % book)
1062 ui.status(bookmsgmap[action][0] % book)
1056
1063
1057 return handlereply
1064 return handlereply
1058
1065
1059 def _pushb2bookmarkspushkey(pushop, bundler):
1066 def _pushb2bookmarkspushkey(pushop, bundler):
1060 pushop.stepsdone.add('bookmarks')
1067 pushop.stepsdone.add('bookmarks')
1061 part2book = []
1068 part2book = []
1062 enc = pushkey.encode
1069 enc = pushkey.encode
1063
1070
1064 def handlefailure(pushop, exc):
1071 def handlefailure(pushop, exc):
1065 targetid = int(exc.partid)
1072 targetid = int(exc.partid)
1066 for partid, book, action in part2book:
1073 for partid, book, action in part2book:
1067 if partid == targetid:
1074 if partid == targetid:
1068 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1075 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1069 # we should not be called for part we did not generated
1076 # we should not be called for part we did not generated
1070 assert False
1077 assert False
1071
1078
1072 for book, old, new in pushop.outbookmarks:
1079 for book, old, new in pushop.outbookmarks:
1080 _abortonsecretctx(pushop, new, book)
1073 part = bundler.newpart('pushkey')
1081 part = bundler.newpart('pushkey')
1074 part.addparam('namespace', enc('bookmarks'))
1082 part.addparam('namespace', enc('bookmarks'))
1075 part.addparam('key', enc(book))
1083 part.addparam('key', enc(book))
1076 part.addparam('old', enc(old))
1084 part.addparam('old', enc(old))
1077 part.addparam('new', enc(new))
1085 part.addparam('new', enc(new))
1078 action = 'update'
1086 action = 'update'
1079 if not old:
1087 if not old:
1080 action = 'export'
1088 action = 'export'
1081 elif not new:
1089 elif not new:
1082 action = 'delete'
1090 action = 'delete'
1083 part2book.append((part.id, book, action))
1091 part2book.append((part.id, book, action))
1084 pushop.pkfailcb[part.id] = handlefailure
1092 pushop.pkfailcb[part.id] = handlefailure
1085
1093
1086 def handlereply(op):
1094 def handlereply(op):
1087 ui = pushop.ui
1095 ui = pushop.ui
1088 for partid, book, action in part2book:
1096 for partid, book, action in part2book:
1089 partrep = op.records.getreplies(partid)
1097 partrep = op.records.getreplies(partid)
1090 results = partrep['pushkey']
1098 results = partrep['pushkey']
1091 assert len(results) <= 1
1099 assert len(results) <= 1
1092 if not results:
1100 if not results:
1093 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1101 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1094 else:
1102 else:
1095 ret = int(results[0]['return'])
1103 ret = int(results[0]['return'])
1096 if ret:
1104 if ret:
1097 ui.status(bookmsgmap[action][0] % book)
1105 ui.status(bookmsgmap[action][0] % book)
1098 else:
1106 else:
1099 ui.warn(bookmsgmap[action][1] % book)
1107 ui.warn(bookmsgmap[action][1] % book)
1100 if pushop.bkresult is not None:
1108 if pushop.bkresult is not None:
1101 pushop.bkresult = 1
1109 pushop.bkresult = 1
1102 return handlereply
1110 return handlereply
1103
1111
1104 @b2partsgenerator('pushvars', idx=0)
1112 @b2partsgenerator('pushvars', idx=0)
1105 def _getbundlesendvars(pushop, bundler):
1113 def _getbundlesendvars(pushop, bundler):
1106 '''send shellvars via bundle2'''
1114 '''send shellvars via bundle2'''
1107 pushvars = pushop.pushvars
1115 pushvars = pushop.pushvars
1108 if pushvars:
1116 if pushvars:
1109 shellvars = {}
1117 shellvars = {}
1110 for raw in pushvars:
1118 for raw in pushvars:
1111 if '=' not in raw:
1119 if '=' not in raw:
1112 msg = ("unable to parse variable '%s', should follow "
1120 msg = ("unable to parse variable '%s', should follow "
1113 "'KEY=VALUE' or 'KEY=' format")
1121 "'KEY=VALUE' or 'KEY=' format")
1114 raise error.Abort(msg % raw)
1122 raise error.Abort(msg % raw)
1115 k, v = raw.split('=', 1)
1123 k, v = raw.split('=', 1)
1116 shellvars[k] = v
1124 shellvars[k] = v
1117
1125
1118 part = bundler.newpart('pushvars')
1126 part = bundler.newpart('pushvars')
1119
1127
1120 for key, value in shellvars.iteritems():
1128 for key, value in shellvars.iteritems():
1121 part.addparam(key, value, mandatory=False)
1129 part.addparam(key, value, mandatory=False)
1122
1130
1123 def _pushbundle2(pushop):
1131 def _pushbundle2(pushop):
1124 """push data to the remote using bundle2
1132 """push data to the remote using bundle2
1125
1133
1126 The only currently supported type of data is changegroup but this will
1134 The only currently supported type of data is changegroup but this will
1127 evolve in the future."""
1135 evolve in the future."""
1128 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1136 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1129 pushback = (pushop.trmanager
1137 pushback = (pushop.trmanager
1130 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1138 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1131
1139
1132 # create reply capability
1140 # create reply capability
1133 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1141 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1134 allowpushback=pushback,
1142 allowpushback=pushback,
1135 role='client'))
1143 role='client'))
1136 bundler.newpart('replycaps', data=capsblob)
1144 bundler.newpart('replycaps', data=capsblob)
1137 replyhandlers = []
1145 replyhandlers = []
1138 for partgenname in b2partsgenorder:
1146 for partgenname in b2partsgenorder:
1139 partgen = b2partsgenmapping[partgenname]
1147 partgen = b2partsgenmapping[partgenname]
1140 ret = partgen(pushop, bundler)
1148 ret = partgen(pushop, bundler)
1141 if callable(ret):
1149 if callable(ret):
1142 replyhandlers.append(ret)
1150 replyhandlers.append(ret)
1143 # do not push if nothing to push
1151 # do not push if nothing to push
1144 if bundler.nbparts <= 1:
1152 if bundler.nbparts <= 1:
1145 return
1153 return
1146 stream = util.chunkbuffer(bundler.getchunks())
1154 stream = util.chunkbuffer(bundler.getchunks())
1147 try:
1155 try:
1148 try:
1156 try:
1149 with pushop.remote.commandexecutor() as e:
1157 with pushop.remote.commandexecutor() as e:
1150 reply = e.callcommand('unbundle', {
1158 reply = e.callcommand('unbundle', {
1151 'bundle': stream,
1159 'bundle': stream,
1152 'heads': ['force'],
1160 'heads': ['force'],
1153 'url': pushop.remote.url(),
1161 'url': pushop.remote.url(),
1154 }).result()
1162 }).result()
1155 except error.BundleValueError as exc:
1163 except error.BundleValueError as exc:
1156 raise error.Abort(_('missing support for %s') % exc)
1164 raise error.Abort(_('missing support for %s') % exc)
1157 try:
1165 try:
1158 trgetter = None
1166 trgetter = None
1159 if pushback:
1167 if pushback:
1160 trgetter = pushop.trmanager.transaction
1168 trgetter = pushop.trmanager.transaction
1161 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1169 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1162 except error.BundleValueError as exc:
1170 except error.BundleValueError as exc:
1163 raise error.Abort(_('missing support for %s') % exc)
1171 raise error.Abort(_('missing support for %s') % exc)
1164 except bundle2.AbortFromPart as exc:
1172 except bundle2.AbortFromPart as exc:
1165 pushop.ui.status(_('remote: %s\n') % exc)
1173 pushop.ui.status(_('remote: %s\n') % exc)
1166 if exc.hint is not None:
1174 if exc.hint is not None:
1167 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1175 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1168 raise error.Abort(_('push failed on remote'))
1176 raise error.Abort(_('push failed on remote'))
1169 except error.PushkeyFailed as exc:
1177 except error.PushkeyFailed as exc:
1170 partid = int(exc.partid)
1178 partid = int(exc.partid)
1171 if partid not in pushop.pkfailcb:
1179 if partid not in pushop.pkfailcb:
1172 raise
1180 raise
1173 pushop.pkfailcb[partid](pushop, exc)
1181 pushop.pkfailcb[partid](pushop, exc)
1174 for rephand in replyhandlers:
1182 for rephand in replyhandlers:
1175 rephand(op)
1183 rephand(op)
1176
1184
1177 def _pushchangeset(pushop):
1185 def _pushchangeset(pushop):
1178 """Make the actual push of changeset bundle to remote repo"""
1186 """Make the actual push of changeset bundle to remote repo"""
1179 if 'changesets' in pushop.stepsdone:
1187 if 'changesets' in pushop.stepsdone:
1180 return
1188 return
1181 pushop.stepsdone.add('changesets')
1189 pushop.stepsdone.add('changesets')
1182 if not _pushcheckoutgoing(pushop):
1190 if not _pushcheckoutgoing(pushop):
1183 return
1191 return
1184
1192
1185 # Should have verified this in push().
1193 # Should have verified this in push().
1186 assert pushop.remote.capable('unbundle')
1194 assert pushop.remote.capable('unbundle')
1187
1195
1188 pushop.repo.prepushoutgoinghooks(pushop)
1196 pushop.repo.prepushoutgoinghooks(pushop)
1189 outgoing = pushop.outgoing
1197 outgoing = pushop.outgoing
1190 # TODO: get bundlecaps from remote
1198 # TODO: get bundlecaps from remote
1191 bundlecaps = None
1199 bundlecaps = None
1192 # create a changegroup from local
1200 # create a changegroup from local
1193 if pushop.revs is None and not (outgoing.excluded
1201 if pushop.revs is None and not (outgoing.excluded
1194 or pushop.repo.changelog.filteredrevs):
1202 or pushop.repo.changelog.filteredrevs):
1195 # push everything,
1203 # push everything,
1196 # use the fast path, no race possible on push
1204 # use the fast path, no race possible on push
1197 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1205 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1198 fastpath=True, bundlecaps=bundlecaps)
1206 fastpath=True, bundlecaps=bundlecaps)
1199 else:
1207 else:
1200 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1208 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1201 'push', bundlecaps=bundlecaps)
1209 'push', bundlecaps=bundlecaps)
1202
1210
1203 # apply changegroup to remote
1211 # apply changegroup to remote
1204 # local repo finds heads on server, finds out what
1212 # local repo finds heads on server, finds out what
1205 # revs it must push. once revs transferred, if server
1213 # revs it must push. once revs transferred, if server
1206 # finds it has different heads (someone else won
1214 # finds it has different heads (someone else won
1207 # commit/push race), server aborts.
1215 # commit/push race), server aborts.
1208 if pushop.force:
1216 if pushop.force:
1209 remoteheads = ['force']
1217 remoteheads = ['force']
1210 else:
1218 else:
1211 remoteheads = pushop.remoteheads
1219 remoteheads = pushop.remoteheads
1212 # ssh: return remote's addchangegroup()
1220 # ssh: return remote's addchangegroup()
1213 # http: return remote's addchangegroup() or 0 for error
1221 # http: return remote's addchangegroup() or 0 for error
1214 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1222 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1215 pushop.repo.url())
1223 pushop.repo.url())
1216
1224
1217 def _pushsyncphase(pushop):
1225 def _pushsyncphase(pushop):
1218 """synchronise phase information locally and remotely"""
1226 """synchronise phase information locally and remotely"""
1219 cheads = pushop.commonheads
1227 cheads = pushop.commonheads
1220 # even when we don't push, exchanging phase data is useful
1228 # even when we don't push, exchanging phase data is useful
1221 remotephases = listkeys(pushop.remote, 'phases')
1229 remotephases = listkeys(pushop.remote, 'phases')
1222 if (pushop.ui.configbool('ui', '_usedassubrepo')
1230 if (pushop.ui.configbool('ui', '_usedassubrepo')
1223 and remotephases # server supports phases
1231 and remotephases # server supports phases
1224 and pushop.cgresult is None # nothing was pushed
1232 and pushop.cgresult is None # nothing was pushed
1225 and remotephases.get('publishing', False)):
1233 and remotephases.get('publishing', False)):
1226 # When:
1234 # When:
1227 # - this is a subrepo push
1235 # - this is a subrepo push
1228 # - and remote support phase
1236 # - and remote support phase
1229 # - and no changeset was pushed
1237 # - and no changeset was pushed
1230 # - and remote is publishing
1238 # - and remote is publishing
1231 # We may be in issue 3871 case!
1239 # We may be in issue 3871 case!
1232 # We drop the possible phase synchronisation done by
1240 # We drop the possible phase synchronisation done by
1233 # courtesy to publish changesets possibly locally draft
1241 # courtesy to publish changesets possibly locally draft
1234 # on the remote.
1242 # on the remote.
1235 remotephases = {'publishing': 'True'}
1243 remotephases = {'publishing': 'True'}
1236 if not remotephases: # old server or public only reply from non-publishing
1244 if not remotephases: # old server or public only reply from non-publishing
1237 _localphasemove(pushop, cheads)
1245 _localphasemove(pushop, cheads)
1238 # don't push any phase data as there is nothing to push
1246 # don't push any phase data as there is nothing to push
1239 else:
1247 else:
1240 ana = phases.analyzeremotephases(pushop.repo, cheads,
1248 ana = phases.analyzeremotephases(pushop.repo, cheads,
1241 remotephases)
1249 remotephases)
1242 pheads, droots = ana
1250 pheads, droots = ana
1243 ### Apply remote phase on local
1251 ### Apply remote phase on local
1244 if remotephases.get('publishing', False):
1252 if remotephases.get('publishing', False):
1245 _localphasemove(pushop, cheads)
1253 _localphasemove(pushop, cheads)
1246 else: # publish = False
1254 else: # publish = False
1247 _localphasemove(pushop, pheads)
1255 _localphasemove(pushop, pheads)
1248 _localphasemove(pushop, cheads, phases.draft)
1256 _localphasemove(pushop, cheads, phases.draft)
1249 ### Apply local phase on remote
1257 ### Apply local phase on remote
1250
1258
1251 if pushop.cgresult:
1259 if pushop.cgresult:
1252 if 'phases' in pushop.stepsdone:
1260 if 'phases' in pushop.stepsdone:
1253 # phases already pushed though bundle2
1261 # phases already pushed though bundle2
1254 return
1262 return
1255 outdated = pushop.outdatedphases
1263 outdated = pushop.outdatedphases
1256 else:
1264 else:
1257 outdated = pushop.fallbackoutdatedphases
1265 outdated = pushop.fallbackoutdatedphases
1258
1266
1259 pushop.stepsdone.add('phases')
1267 pushop.stepsdone.add('phases')
1260
1268
1261 # filter heads already turned public by the push
1269 # filter heads already turned public by the push
1262 outdated = [c for c in outdated if c.node() not in pheads]
1270 outdated = [c for c in outdated if c.node() not in pheads]
1263 # fallback to independent pushkey command
1271 # fallback to independent pushkey command
1264 for newremotehead in outdated:
1272 for newremotehead in outdated:
1265 with pushop.remote.commandexecutor() as e:
1273 with pushop.remote.commandexecutor() as e:
1266 r = e.callcommand('pushkey', {
1274 r = e.callcommand('pushkey', {
1267 'namespace': 'phases',
1275 'namespace': 'phases',
1268 'key': newremotehead.hex(),
1276 'key': newremotehead.hex(),
1269 'old': '%d' % phases.draft,
1277 'old': '%d' % phases.draft,
1270 'new': '%d' % phases.public
1278 'new': '%d' % phases.public
1271 }).result()
1279 }).result()
1272
1280
1273 if not r:
1281 if not r:
1274 pushop.ui.warn(_('updating %s to public failed!\n')
1282 pushop.ui.warn(_('updating %s to public failed!\n')
1275 % newremotehead)
1283 % newremotehead)
1276
1284
1277 def _localphasemove(pushop, nodes, phase=phases.public):
1285 def _localphasemove(pushop, nodes, phase=phases.public):
1278 """move <nodes> to <phase> in the local source repo"""
1286 """move <nodes> to <phase> in the local source repo"""
1279 if pushop.trmanager:
1287 if pushop.trmanager:
1280 phases.advanceboundary(pushop.repo,
1288 phases.advanceboundary(pushop.repo,
1281 pushop.trmanager.transaction(),
1289 pushop.trmanager.transaction(),
1282 phase,
1290 phase,
1283 nodes)
1291 nodes)
1284 else:
1292 else:
1285 # repo is not locked, do not change any phases!
1293 # repo is not locked, do not change any phases!
1286 # Informs the user that phases should have been moved when
1294 # Informs the user that phases should have been moved when
1287 # applicable.
1295 # applicable.
1288 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1296 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1289 phasestr = phases.phasenames[phase]
1297 phasestr = phases.phasenames[phase]
1290 if actualmoves:
1298 if actualmoves:
1291 pushop.ui.status(_('cannot lock source repo, skipping '
1299 pushop.ui.status(_('cannot lock source repo, skipping '
1292 'local %s phase update\n') % phasestr)
1300 'local %s phase update\n') % phasestr)
1293
1301
1294 def _pushobsolete(pushop):
1302 def _pushobsolete(pushop):
1295 """utility function to push obsolete markers to a remote"""
1303 """utility function to push obsolete markers to a remote"""
1296 if 'obsmarkers' in pushop.stepsdone:
1304 if 'obsmarkers' in pushop.stepsdone:
1297 return
1305 return
1298 repo = pushop.repo
1306 repo = pushop.repo
1299 remote = pushop.remote
1307 remote = pushop.remote
1300 pushop.stepsdone.add('obsmarkers')
1308 pushop.stepsdone.add('obsmarkers')
1301 if pushop.outobsmarkers:
1309 if pushop.outobsmarkers:
1302 pushop.ui.debug('try to push obsolete markers to remote\n')
1310 pushop.ui.debug('try to push obsolete markers to remote\n')
1303 rslts = []
1311 rslts = []
1304 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1312 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1305 for key in sorted(remotedata, reverse=True):
1313 for key in sorted(remotedata, reverse=True):
1306 # reverse sort to ensure we end with dump0
1314 # reverse sort to ensure we end with dump0
1307 data = remotedata[key]
1315 data = remotedata[key]
1308 rslts.append(remote.pushkey('obsolete', key, '', data))
1316 rslts.append(remote.pushkey('obsolete', key, '', data))
1309 if [r for r in rslts if not r]:
1317 if [r for r in rslts if not r]:
1310 msg = _('failed to push some obsolete markers!\n')
1318 msg = _('failed to push some obsolete markers!\n')
1311 repo.ui.warn(msg)
1319 repo.ui.warn(msg)
1312
1320
1313 def _pushbookmark(pushop):
1321 def _pushbookmark(pushop):
1314 """Update bookmark position on remote"""
1322 """Update bookmark position on remote"""
1315 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1323 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1316 return
1324 return
1317 pushop.stepsdone.add('bookmarks')
1325 pushop.stepsdone.add('bookmarks')
1318 ui = pushop.ui
1326 ui = pushop.ui
1319 remote = pushop.remote
1327 remote = pushop.remote
1320
1328
1321 for b, old, new in pushop.outbookmarks:
1329 for b, old, new in pushop.outbookmarks:
1322 action = 'update'
1330 action = 'update'
1323 if not old:
1331 if not old:
1324 action = 'export'
1332 action = 'export'
1325 elif not new:
1333 elif not new:
1326 action = 'delete'
1334 action = 'delete'
1327
1335
1328 with remote.commandexecutor() as e:
1336 with remote.commandexecutor() as e:
1329 r = e.callcommand('pushkey', {
1337 r = e.callcommand('pushkey', {
1330 'namespace': 'bookmarks',
1338 'namespace': 'bookmarks',
1331 'key': b,
1339 'key': b,
1332 'old': old,
1340 'old': old,
1333 'new': new,
1341 'new': new,
1334 }).result()
1342 }).result()
1335
1343
1336 if r:
1344 if r:
1337 ui.status(bookmsgmap[action][0] % b)
1345 ui.status(bookmsgmap[action][0] % b)
1338 else:
1346 else:
1339 ui.warn(bookmsgmap[action][1] % b)
1347 ui.warn(bookmsgmap[action][1] % b)
1340 # discovery can have set the value form invalid entry
1348 # discovery can have set the value form invalid entry
1341 if pushop.bkresult is not None:
1349 if pushop.bkresult is not None:
1342 pushop.bkresult = 1
1350 pushop.bkresult = 1
1343
1351
1344 class pulloperation(object):
1352 class pulloperation(object):
1345 """A object that represent a single pull operation
1353 """A object that represent a single pull operation
1346
1354
1347 It purpose is to carry pull related state and very common operation.
1355 It purpose is to carry pull related state and very common operation.
1348
1356
1349 A new should be created at the beginning of each pull and discarded
1357 A new should be created at the beginning of each pull and discarded
1350 afterward.
1358 afterward.
1351 """
1359 """
1352
1360
1353 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1361 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1354 remotebookmarks=None, streamclonerequested=None,
1362 remotebookmarks=None, streamclonerequested=None,
1355 includepats=None, excludepats=None, depth=None):
1363 includepats=None, excludepats=None, depth=None):
1356 # repo we pull into
1364 # repo we pull into
1357 self.repo = repo
1365 self.repo = repo
1358 # repo we pull from
1366 # repo we pull from
1359 self.remote = remote
1367 self.remote = remote
1360 # revision we try to pull (None is "all")
1368 # revision we try to pull (None is "all")
1361 self.heads = heads
1369 self.heads = heads
1362 # bookmark pulled explicitly
1370 # bookmark pulled explicitly
1363 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1371 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1364 for bookmark in bookmarks]
1372 for bookmark in bookmarks]
1365 # do we force pull?
1373 # do we force pull?
1366 self.force = force
1374 self.force = force
1367 # whether a streaming clone was requested
1375 # whether a streaming clone was requested
1368 self.streamclonerequested = streamclonerequested
1376 self.streamclonerequested = streamclonerequested
1369 # transaction manager
1377 # transaction manager
1370 self.trmanager = None
1378 self.trmanager = None
1371 # set of common changeset between local and remote before pull
1379 # set of common changeset between local and remote before pull
1372 self.common = None
1380 self.common = None
1373 # set of pulled head
1381 # set of pulled head
1374 self.rheads = None
1382 self.rheads = None
1375 # list of missing changeset to fetch remotely
1383 # list of missing changeset to fetch remotely
1376 self.fetch = None
1384 self.fetch = None
1377 # remote bookmarks data
1385 # remote bookmarks data
1378 self.remotebookmarks = remotebookmarks
1386 self.remotebookmarks = remotebookmarks
1379 # result of changegroup pulling (used as return code by pull)
1387 # result of changegroup pulling (used as return code by pull)
1380 self.cgresult = None
1388 self.cgresult = None
1381 # list of step already done
1389 # list of step already done
1382 self.stepsdone = set()
1390 self.stepsdone = set()
1383 # Whether we attempted a clone from pre-generated bundles.
1391 # Whether we attempted a clone from pre-generated bundles.
1384 self.clonebundleattempted = False
1392 self.clonebundleattempted = False
1385 # Set of file patterns to include.
1393 # Set of file patterns to include.
1386 self.includepats = includepats
1394 self.includepats = includepats
1387 # Set of file patterns to exclude.
1395 # Set of file patterns to exclude.
1388 self.excludepats = excludepats
1396 self.excludepats = excludepats
1389 # Number of ancestor changesets to pull from each pulled head.
1397 # Number of ancestor changesets to pull from each pulled head.
1390 self.depth = depth
1398 self.depth = depth
1391
1399
1392 @util.propertycache
1400 @util.propertycache
1393 def pulledsubset(self):
1401 def pulledsubset(self):
1394 """heads of the set of changeset target by the pull"""
1402 """heads of the set of changeset target by the pull"""
1395 # compute target subset
1403 # compute target subset
1396 if self.heads is None:
1404 if self.heads is None:
1397 # We pulled every thing possible
1405 # We pulled every thing possible
1398 # sync on everything common
1406 # sync on everything common
1399 c = set(self.common)
1407 c = set(self.common)
1400 ret = list(self.common)
1408 ret = list(self.common)
1401 for n in self.rheads:
1409 for n in self.rheads:
1402 if n not in c:
1410 if n not in c:
1403 ret.append(n)
1411 ret.append(n)
1404 return ret
1412 return ret
1405 else:
1413 else:
1406 # We pulled a specific subset
1414 # We pulled a specific subset
1407 # sync on this subset
1415 # sync on this subset
1408 return self.heads
1416 return self.heads
1409
1417
1410 @util.propertycache
1418 @util.propertycache
1411 def canusebundle2(self):
1419 def canusebundle2(self):
1412 return not _forcebundle1(self)
1420 return not _forcebundle1(self)
1413
1421
1414 @util.propertycache
1422 @util.propertycache
1415 def remotebundle2caps(self):
1423 def remotebundle2caps(self):
1416 return bundle2.bundle2caps(self.remote)
1424 return bundle2.bundle2caps(self.remote)
1417
1425
1418 def gettransaction(self):
1426 def gettransaction(self):
1419 # deprecated; talk to trmanager directly
1427 # deprecated; talk to trmanager directly
1420 return self.trmanager.transaction()
1428 return self.trmanager.transaction()
1421
1429
1422 class transactionmanager(util.transactional):
1430 class transactionmanager(util.transactional):
1423 """An object to manage the life cycle of a transaction
1431 """An object to manage the life cycle of a transaction
1424
1432
1425 It creates the transaction on demand and calls the appropriate hooks when
1433 It creates the transaction on demand and calls the appropriate hooks when
1426 closing the transaction."""
1434 closing the transaction."""
1427 def __init__(self, repo, source, url):
1435 def __init__(self, repo, source, url):
1428 self.repo = repo
1436 self.repo = repo
1429 self.source = source
1437 self.source = source
1430 self.url = url
1438 self.url = url
1431 self._tr = None
1439 self._tr = None
1432
1440
1433 def transaction(self):
1441 def transaction(self):
1434 """Return an open transaction object, constructing if necessary"""
1442 """Return an open transaction object, constructing if necessary"""
1435 if not self._tr:
1443 if not self._tr:
1436 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1444 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1437 self._tr = self.repo.transaction(trname)
1445 self._tr = self.repo.transaction(trname)
1438 self._tr.hookargs['source'] = self.source
1446 self._tr.hookargs['source'] = self.source
1439 self._tr.hookargs['url'] = self.url
1447 self._tr.hookargs['url'] = self.url
1440 return self._tr
1448 return self._tr
1441
1449
1442 def close(self):
1450 def close(self):
1443 """close transaction if created"""
1451 """close transaction if created"""
1444 if self._tr is not None:
1452 if self._tr is not None:
1445 self._tr.close()
1453 self._tr.close()
1446
1454
1447 def release(self):
1455 def release(self):
1448 """release transaction if created"""
1456 """release transaction if created"""
1449 if self._tr is not None:
1457 if self._tr is not None:
1450 self._tr.release()
1458 self._tr.release()
1451
1459
1452 def listkeys(remote, namespace):
1460 def listkeys(remote, namespace):
1453 with remote.commandexecutor() as e:
1461 with remote.commandexecutor() as e:
1454 return e.callcommand('listkeys', {'namespace': namespace}).result()
1462 return e.callcommand('listkeys', {'namespace': namespace}).result()
1455
1463
1456 def _fullpullbundle2(repo, pullop):
1464 def _fullpullbundle2(repo, pullop):
1457 # The server may send a partial reply, i.e. when inlining
1465 # The server may send a partial reply, i.e. when inlining
1458 # pre-computed bundles. In that case, update the common
1466 # pre-computed bundles. In that case, update the common
1459 # set based on the results and pull another bundle.
1467 # set based on the results and pull another bundle.
1460 #
1468 #
1461 # There are two indicators that the process is finished:
1469 # There are two indicators that the process is finished:
1462 # - no changeset has been added, or
1470 # - no changeset has been added, or
1463 # - all remote heads are known locally.
1471 # - all remote heads are known locally.
1464 # The head check must use the unfiltered view as obsoletion
1472 # The head check must use the unfiltered view as obsoletion
1465 # markers can hide heads.
1473 # markers can hide heads.
1466 unfi = repo.unfiltered()
1474 unfi = repo.unfiltered()
1467 unficl = unfi.changelog
1475 unficl = unfi.changelog
1468 def headsofdiff(h1, h2):
1476 def headsofdiff(h1, h2):
1469 """Returns heads(h1 % h2)"""
1477 """Returns heads(h1 % h2)"""
1470 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1478 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1471 return set(ctx.node() for ctx in res)
1479 return set(ctx.node() for ctx in res)
1472 def headsofunion(h1, h2):
1480 def headsofunion(h1, h2):
1473 """Returns heads((h1 + h2) - null)"""
1481 """Returns heads((h1 + h2) - null)"""
1474 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1482 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1475 return set(ctx.node() for ctx in res)
1483 return set(ctx.node() for ctx in res)
1476 while True:
1484 while True:
1477 old_heads = unficl.heads()
1485 old_heads = unficl.heads()
1478 clstart = len(unficl)
1486 clstart = len(unficl)
1479 _pullbundle2(pullop)
1487 _pullbundle2(pullop)
1480 if repository.NARROW_REQUIREMENT in repo.requirements:
1488 if repository.NARROW_REQUIREMENT in repo.requirements:
1481 # XXX narrow clones filter the heads on the server side during
1489 # XXX narrow clones filter the heads on the server side during
1482 # XXX getbundle and result in partial replies as well.
1490 # XXX getbundle and result in partial replies as well.
1483 # XXX Disable pull bundles in this case as band aid to avoid
1491 # XXX Disable pull bundles in this case as band aid to avoid
1484 # XXX extra round trips.
1492 # XXX extra round trips.
1485 break
1493 break
1486 if clstart == len(unficl):
1494 if clstart == len(unficl):
1487 break
1495 break
1488 if all(unficl.hasnode(n) for n in pullop.rheads):
1496 if all(unficl.hasnode(n) for n in pullop.rheads):
1489 break
1497 break
1490 new_heads = headsofdiff(unficl.heads(), old_heads)
1498 new_heads = headsofdiff(unficl.heads(), old_heads)
1491 pullop.common = headsofunion(new_heads, pullop.common)
1499 pullop.common = headsofunion(new_heads, pullop.common)
1492 pullop.rheads = set(pullop.rheads) - pullop.common
1500 pullop.rheads = set(pullop.rheads) - pullop.common
1493
1501
1494 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1502 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1495 streamclonerequested=None, includepats=None, excludepats=None,
1503 streamclonerequested=None, includepats=None, excludepats=None,
1496 depth=None):
1504 depth=None):
1497 """Fetch repository data from a remote.
1505 """Fetch repository data from a remote.
1498
1506
1499 This is the main function used to retrieve data from a remote repository.
1507 This is the main function used to retrieve data from a remote repository.
1500
1508
1501 ``repo`` is the local repository to clone into.
1509 ``repo`` is the local repository to clone into.
1502 ``remote`` is a peer instance.
1510 ``remote`` is a peer instance.
1503 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1511 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1504 default) means to pull everything from the remote.
1512 default) means to pull everything from the remote.
1505 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1513 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1506 default, all remote bookmarks are pulled.
1514 default, all remote bookmarks are pulled.
1507 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1515 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1508 initialization.
1516 initialization.
1509 ``streamclonerequested`` is a boolean indicating whether a "streaming
1517 ``streamclonerequested`` is a boolean indicating whether a "streaming
1510 clone" is requested. A "streaming clone" is essentially a raw file copy
1518 clone" is requested. A "streaming clone" is essentially a raw file copy
1511 of revlogs from the server. This only works when the local repository is
1519 of revlogs from the server. This only works when the local repository is
1512 empty. The default value of ``None`` means to respect the server
1520 empty. The default value of ``None`` means to respect the server
1513 configuration for preferring stream clones.
1521 configuration for preferring stream clones.
1514 ``includepats`` and ``excludepats`` define explicit file patterns to
1522 ``includepats`` and ``excludepats`` define explicit file patterns to
1515 include and exclude in storage, respectively. If not defined, narrow
1523 include and exclude in storage, respectively. If not defined, narrow
1516 patterns from the repo instance are used, if available.
1524 patterns from the repo instance are used, if available.
1517 ``depth`` is an integer indicating the DAG depth of history we're
1525 ``depth`` is an integer indicating the DAG depth of history we're
1518 interested in. If defined, for each revision specified in ``heads``, we
1526 interested in. If defined, for each revision specified in ``heads``, we
1519 will fetch up to this many of its ancestors and data associated with them.
1527 will fetch up to this many of its ancestors and data associated with them.
1520
1528
1521 Returns the ``pulloperation`` created for this pull.
1529 Returns the ``pulloperation`` created for this pull.
1522 """
1530 """
1523 if opargs is None:
1531 if opargs is None:
1524 opargs = {}
1532 opargs = {}
1525
1533
1526 # We allow the narrow patterns to be passed in explicitly to provide more
1534 # We allow the narrow patterns to be passed in explicitly to provide more
1527 # flexibility for API consumers.
1535 # flexibility for API consumers.
1528 if includepats or excludepats:
1536 if includepats or excludepats:
1529 includepats = includepats or set()
1537 includepats = includepats or set()
1530 excludepats = excludepats or set()
1538 excludepats = excludepats or set()
1531 else:
1539 else:
1532 includepats, excludepats = repo.narrowpats
1540 includepats, excludepats = repo.narrowpats
1533
1541
1534 narrowspec.validatepatterns(includepats)
1542 narrowspec.validatepatterns(includepats)
1535 narrowspec.validatepatterns(excludepats)
1543 narrowspec.validatepatterns(excludepats)
1536
1544
1537 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1545 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1538 streamclonerequested=streamclonerequested,
1546 streamclonerequested=streamclonerequested,
1539 includepats=includepats, excludepats=excludepats,
1547 includepats=includepats, excludepats=excludepats,
1540 depth=depth,
1548 depth=depth,
1541 **pycompat.strkwargs(opargs))
1549 **pycompat.strkwargs(opargs))
1542
1550
1543 peerlocal = pullop.remote.local()
1551 peerlocal = pullop.remote.local()
1544 if peerlocal:
1552 if peerlocal:
1545 missing = set(peerlocal.requirements) - pullop.repo.supported
1553 missing = set(peerlocal.requirements) - pullop.repo.supported
1546 if missing:
1554 if missing:
1547 msg = _("required features are not"
1555 msg = _("required features are not"
1548 " supported in the destination:"
1556 " supported in the destination:"
1549 " %s") % (', '.join(sorted(missing)))
1557 " %s") % (', '.join(sorted(missing)))
1550 raise error.Abort(msg)
1558 raise error.Abort(msg)
1551
1559
1552 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1560 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1553 wlock = util.nullcontextmanager()
1561 wlock = util.nullcontextmanager()
1554 if not bookmod.bookmarksinstore(repo):
1562 if not bookmod.bookmarksinstore(repo):
1555 wlock = repo.wlock()
1563 wlock = repo.wlock()
1556 with wlock, repo.lock(), pullop.trmanager:
1564 with wlock, repo.lock(), pullop.trmanager:
1557 # Use the modern wire protocol, if available.
1565 # Use the modern wire protocol, if available.
1558 if remote.capable('command-changesetdata'):
1566 if remote.capable('command-changesetdata'):
1559 exchangev2.pull(pullop)
1567 exchangev2.pull(pullop)
1560 else:
1568 else:
1561 # This should ideally be in _pullbundle2(). However, it needs to run
1569 # This should ideally be in _pullbundle2(). However, it needs to run
1562 # before discovery to avoid extra work.
1570 # before discovery to avoid extra work.
1563 _maybeapplyclonebundle(pullop)
1571 _maybeapplyclonebundle(pullop)
1564 streamclone.maybeperformlegacystreamclone(pullop)
1572 streamclone.maybeperformlegacystreamclone(pullop)
1565 _pulldiscovery(pullop)
1573 _pulldiscovery(pullop)
1566 if pullop.canusebundle2:
1574 if pullop.canusebundle2:
1567 _fullpullbundle2(repo, pullop)
1575 _fullpullbundle2(repo, pullop)
1568 _pullchangeset(pullop)
1576 _pullchangeset(pullop)
1569 _pullphase(pullop)
1577 _pullphase(pullop)
1570 _pullbookmarks(pullop)
1578 _pullbookmarks(pullop)
1571 _pullobsolete(pullop)
1579 _pullobsolete(pullop)
1572
1580
1573 # storing remotenames
1581 # storing remotenames
1574 if repo.ui.configbool('experimental', 'remotenames'):
1582 if repo.ui.configbool('experimental', 'remotenames'):
1575 logexchange.pullremotenames(repo, remote)
1583 logexchange.pullremotenames(repo, remote)
1576
1584
1577 return pullop
1585 return pullop
1578
1586
1579 # list of steps to perform discovery before pull
1587 # list of steps to perform discovery before pull
1580 pulldiscoveryorder = []
1588 pulldiscoveryorder = []
1581
1589
1582 # Mapping between step name and function
1590 # Mapping between step name and function
1583 #
1591 #
1584 # This exists to help extensions wrap steps if necessary
1592 # This exists to help extensions wrap steps if necessary
1585 pulldiscoverymapping = {}
1593 pulldiscoverymapping = {}
1586
1594
1587 def pulldiscovery(stepname):
1595 def pulldiscovery(stepname):
1588 """decorator for function performing discovery before pull
1596 """decorator for function performing discovery before pull
1589
1597
1590 The function is added to the step -> function mapping and appended to the
1598 The function is added to the step -> function mapping and appended to the
1591 list of steps. Beware that decorated function will be added in order (this
1599 list of steps. Beware that decorated function will be added in order (this
1592 may matter).
1600 may matter).
1593
1601
1594 You can only use this decorator for a new step, if you want to wrap a step
1602 You can only use this decorator for a new step, if you want to wrap a step
1595 from an extension, change the pulldiscovery dictionary directly."""
1603 from an extension, change the pulldiscovery dictionary directly."""
1596 def dec(func):
1604 def dec(func):
1597 assert stepname not in pulldiscoverymapping
1605 assert stepname not in pulldiscoverymapping
1598 pulldiscoverymapping[stepname] = func
1606 pulldiscoverymapping[stepname] = func
1599 pulldiscoveryorder.append(stepname)
1607 pulldiscoveryorder.append(stepname)
1600 return func
1608 return func
1601 return dec
1609 return dec
1602
1610
1603 def _pulldiscovery(pullop):
1611 def _pulldiscovery(pullop):
1604 """Run all discovery steps"""
1612 """Run all discovery steps"""
1605 for stepname in pulldiscoveryorder:
1613 for stepname in pulldiscoveryorder:
1606 step = pulldiscoverymapping[stepname]
1614 step = pulldiscoverymapping[stepname]
1607 step(pullop)
1615 step(pullop)
1608
1616
1609 @pulldiscovery('b1:bookmarks')
1617 @pulldiscovery('b1:bookmarks')
1610 def _pullbookmarkbundle1(pullop):
1618 def _pullbookmarkbundle1(pullop):
1611 """fetch bookmark data in bundle1 case
1619 """fetch bookmark data in bundle1 case
1612
1620
1613 If not using bundle2, we have to fetch bookmarks before changeset
1621 If not using bundle2, we have to fetch bookmarks before changeset
1614 discovery to reduce the chance and impact of race conditions."""
1622 discovery to reduce the chance and impact of race conditions."""
1615 if pullop.remotebookmarks is not None:
1623 if pullop.remotebookmarks is not None:
1616 return
1624 return
1617 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1625 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1618 # all known bundle2 servers now support listkeys, but lets be nice with
1626 # all known bundle2 servers now support listkeys, but lets be nice with
1619 # new implementation.
1627 # new implementation.
1620 return
1628 return
1621 books = listkeys(pullop.remote, 'bookmarks')
1629 books = listkeys(pullop.remote, 'bookmarks')
1622 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1630 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1623
1631
1624
1632
1625 @pulldiscovery('changegroup')
1633 @pulldiscovery('changegroup')
1626 def _pulldiscoverychangegroup(pullop):
1634 def _pulldiscoverychangegroup(pullop):
1627 """discovery phase for the pull
1635 """discovery phase for the pull
1628
1636
1629 Current handle changeset discovery only, will change handle all discovery
1637 Current handle changeset discovery only, will change handle all discovery
1630 at some point."""
1638 at some point."""
1631 tmp = discovery.findcommonincoming(pullop.repo,
1639 tmp = discovery.findcommonincoming(pullop.repo,
1632 pullop.remote,
1640 pullop.remote,
1633 heads=pullop.heads,
1641 heads=pullop.heads,
1634 force=pullop.force)
1642 force=pullop.force)
1635 common, fetch, rheads = tmp
1643 common, fetch, rheads = tmp
1636 nm = pullop.repo.unfiltered().changelog.nodemap
1644 nm = pullop.repo.unfiltered().changelog.nodemap
1637 if fetch and rheads:
1645 if fetch and rheads:
1638 # If a remote heads is filtered locally, put in back in common.
1646 # If a remote heads is filtered locally, put in back in common.
1639 #
1647 #
1640 # This is a hackish solution to catch most of "common but locally
1648 # This is a hackish solution to catch most of "common but locally
1641 # hidden situation". We do not performs discovery on unfiltered
1649 # hidden situation". We do not performs discovery on unfiltered
1642 # repository because it end up doing a pathological amount of round
1650 # repository because it end up doing a pathological amount of round
1643 # trip for w huge amount of changeset we do not care about.
1651 # trip for w huge amount of changeset we do not care about.
1644 #
1652 #
1645 # If a set of such "common but filtered" changeset exist on the server
1653 # If a set of such "common but filtered" changeset exist on the server
1646 # but are not including a remote heads, we'll not be able to detect it,
1654 # but are not including a remote heads, we'll not be able to detect it,
1647 scommon = set(common)
1655 scommon = set(common)
1648 for n in rheads:
1656 for n in rheads:
1649 if n in nm:
1657 if n in nm:
1650 if n not in scommon:
1658 if n not in scommon:
1651 common.append(n)
1659 common.append(n)
1652 if set(rheads).issubset(set(common)):
1660 if set(rheads).issubset(set(common)):
1653 fetch = []
1661 fetch = []
1654 pullop.common = common
1662 pullop.common = common
1655 pullop.fetch = fetch
1663 pullop.fetch = fetch
1656 pullop.rheads = rheads
1664 pullop.rheads = rheads
1657
1665
1658 def _pullbundle2(pullop):
1666 def _pullbundle2(pullop):
1659 """pull data using bundle2
1667 """pull data using bundle2
1660
1668
1661 For now, the only supported data are changegroup."""
1669 For now, the only supported data are changegroup."""
1662 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1670 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1663
1671
1664 # make ui easier to access
1672 # make ui easier to access
1665 ui = pullop.repo.ui
1673 ui = pullop.repo.ui
1666
1674
1667 # At the moment we don't do stream clones over bundle2. If that is
1675 # At the moment we don't do stream clones over bundle2. If that is
1668 # implemented then here's where the check for that will go.
1676 # implemented then here's where the check for that will go.
1669 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1677 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1670
1678
1671 # declare pull perimeters
1679 # declare pull perimeters
1672 kwargs['common'] = pullop.common
1680 kwargs['common'] = pullop.common
1673 kwargs['heads'] = pullop.heads or pullop.rheads
1681 kwargs['heads'] = pullop.heads or pullop.rheads
1674
1682
1675 # check server supports narrow and then adding includepats and excludepats
1683 # check server supports narrow and then adding includepats and excludepats
1676 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1684 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1677 if servernarrow and pullop.includepats:
1685 if servernarrow and pullop.includepats:
1678 kwargs['includepats'] = pullop.includepats
1686 kwargs['includepats'] = pullop.includepats
1679 if servernarrow and pullop.excludepats:
1687 if servernarrow and pullop.excludepats:
1680 kwargs['excludepats'] = pullop.excludepats
1688 kwargs['excludepats'] = pullop.excludepats
1681
1689
1682 if streaming:
1690 if streaming:
1683 kwargs['cg'] = False
1691 kwargs['cg'] = False
1684 kwargs['stream'] = True
1692 kwargs['stream'] = True
1685 pullop.stepsdone.add('changegroup')
1693 pullop.stepsdone.add('changegroup')
1686 pullop.stepsdone.add('phases')
1694 pullop.stepsdone.add('phases')
1687
1695
1688 else:
1696 else:
1689 # pulling changegroup
1697 # pulling changegroup
1690 pullop.stepsdone.add('changegroup')
1698 pullop.stepsdone.add('changegroup')
1691
1699
1692 kwargs['cg'] = pullop.fetch
1700 kwargs['cg'] = pullop.fetch
1693
1701
1694 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1702 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1695 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1703 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1696 if (not legacyphase and hasbinaryphase):
1704 if (not legacyphase and hasbinaryphase):
1697 kwargs['phases'] = True
1705 kwargs['phases'] = True
1698 pullop.stepsdone.add('phases')
1706 pullop.stepsdone.add('phases')
1699
1707
1700 if 'listkeys' in pullop.remotebundle2caps:
1708 if 'listkeys' in pullop.remotebundle2caps:
1701 if 'phases' not in pullop.stepsdone:
1709 if 'phases' not in pullop.stepsdone:
1702 kwargs['listkeys'] = ['phases']
1710 kwargs['listkeys'] = ['phases']
1703
1711
1704 bookmarksrequested = False
1712 bookmarksrequested = False
1705 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1713 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1706 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1714 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1707
1715
1708 if pullop.remotebookmarks is not None:
1716 if pullop.remotebookmarks is not None:
1709 pullop.stepsdone.add('request-bookmarks')
1717 pullop.stepsdone.add('request-bookmarks')
1710
1718
1711 if ('request-bookmarks' not in pullop.stepsdone
1719 if ('request-bookmarks' not in pullop.stepsdone
1712 and pullop.remotebookmarks is None
1720 and pullop.remotebookmarks is None
1713 and not legacybookmark and hasbinarybook):
1721 and not legacybookmark and hasbinarybook):
1714 kwargs['bookmarks'] = True
1722 kwargs['bookmarks'] = True
1715 bookmarksrequested = True
1723 bookmarksrequested = True
1716
1724
1717 if 'listkeys' in pullop.remotebundle2caps:
1725 if 'listkeys' in pullop.remotebundle2caps:
1718 if 'request-bookmarks' not in pullop.stepsdone:
1726 if 'request-bookmarks' not in pullop.stepsdone:
1719 # make sure to always includes bookmark data when migrating
1727 # make sure to always includes bookmark data when migrating
1720 # `hg incoming --bundle` to using this function.
1728 # `hg incoming --bundle` to using this function.
1721 pullop.stepsdone.add('request-bookmarks')
1729 pullop.stepsdone.add('request-bookmarks')
1722 kwargs.setdefault('listkeys', []).append('bookmarks')
1730 kwargs.setdefault('listkeys', []).append('bookmarks')
1723
1731
1724 # If this is a full pull / clone and the server supports the clone bundles
1732 # If this is a full pull / clone and the server supports the clone bundles
1725 # feature, tell the server whether we attempted a clone bundle. The
1733 # feature, tell the server whether we attempted a clone bundle. The
1726 # presence of this flag indicates the client supports clone bundles. This
1734 # presence of this flag indicates the client supports clone bundles. This
1727 # will enable the server to treat clients that support clone bundles
1735 # will enable the server to treat clients that support clone bundles
1728 # differently from those that don't.
1736 # differently from those that don't.
1729 if (pullop.remote.capable('clonebundles')
1737 if (pullop.remote.capable('clonebundles')
1730 and pullop.heads is None and list(pullop.common) == [nullid]):
1738 and pullop.heads is None and list(pullop.common) == [nullid]):
1731 kwargs['cbattempted'] = pullop.clonebundleattempted
1739 kwargs['cbattempted'] = pullop.clonebundleattempted
1732
1740
1733 if streaming:
1741 if streaming:
1734 pullop.repo.ui.status(_('streaming all changes\n'))
1742 pullop.repo.ui.status(_('streaming all changes\n'))
1735 elif not pullop.fetch:
1743 elif not pullop.fetch:
1736 pullop.repo.ui.status(_("no changes found\n"))
1744 pullop.repo.ui.status(_("no changes found\n"))
1737 pullop.cgresult = 0
1745 pullop.cgresult = 0
1738 else:
1746 else:
1739 if pullop.heads is None and list(pullop.common) == [nullid]:
1747 if pullop.heads is None and list(pullop.common) == [nullid]:
1740 pullop.repo.ui.status(_("requesting all changes\n"))
1748 pullop.repo.ui.status(_("requesting all changes\n"))
1741 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1749 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1742 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1750 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1743 if obsolete.commonversion(remoteversions) is not None:
1751 if obsolete.commonversion(remoteversions) is not None:
1744 kwargs['obsmarkers'] = True
1752 kwargs['obsmarkers'] = True
1745 pullop.stepsdone.add('obsmarkers')
1753 pullop.stepsdone.add('obsmarkers')
1746 _pullbundle2extraprepare(pullop, kwargs)
1754 _pullbundle2extraprepare(pullop, kwargs)
1747
1755
1748 with pullop.remote.commandexecutor() as e:
1756 with pullop.remote.commandexecutor() as e:
1749 args = dict(kwargs)
1757 args = dict(kwargs)
1750 args['source'] = 'pull'
1758 args['source'] = 'pull'
1751 bundle = e.callcommand('getbundle', args).result()
1759 bundle = e.callcommand('getbundle', args).result()
1752
1760
1753 try:
1761 try:
1754 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
1762 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
1755 source='pull')
1763 source='pull')
1756 op.modes['bookmarks'] = 'records'
1764 op.modes['bookmarks'] = 'records'
1757 bundle2.processbundle(pullop.repo, bundle, op=op)
1765 bundle2.processbundle(pullop.repo, bundle, op=op)
1758 except bundle2.AbortFromPart as exc:
1766 except bundle2.AbortFromPart as exc:
1759 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1767 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1760 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1768 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1761 except error.BundleValueError as exc:
1769 except error.BundleValueError as exc:
1762 raise error.Abort(_('missing support for %s') % exc)
1770 raise error.Abort(_('missing support for %s') % exc)
1763
1771
1764 if pullop.fetch:
1772 if pullop.fetch:
1765 pullop.cgresult = bundle2.combinechangegroupresults(op)
1773 pullop.cgresult = bundle2.combinechangegroupresults(op)
1766
1774
1767 # processing phases change
1775 # processing phases change
1768 for namespace, value in op.records['listkeys']:
1776 for namespace, value in op.records['listkeys']:
1769 if namespace == 'phases':
1777 if namespace == 'phases':
1770 _pullapplyphases(pullop, value)
1778 _pullapplyphases(pullop, value)
1771
1779
1772 # processing bookmark update
1780 # processing bookmark update
1773 if bookmarksrequested:
1781 if bookmarksrequested:
1774 books = {}
1782 books = {}
1775 for record in op.records['bookmarks']:
1783 for record in op.records['bookmarks']:
1776 books[record['bookmark']] = record["node"]
1784 books[record['bookmark']] = record["node"]
1777 pullop.remotebookmarks = books
1785 pullop.remotebookmarks = books
1778 else:
1786 else:
1779 for namespace, value in op.records['listkeys']:
1787 for namespace, value in op.records['listkeys']:
1780 if namespace == 'bookmarks':
1788 if namespace == 'bookmarks':
1781 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1789 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1782
1790
1783 # bookmark data were either already there or pulled in the bundle
1791 # bookmark data were either already there or pulled in the bundle
1784 if pullop.remotebookmarks is not None:
1792 if pullop.remotebookmarks is not None:
1785 _pullbookmarks(pullop)
1793 _pullbookmarks(pullop)
1786
1794
1787 def _pullbundle2extraprepare(pullop, kwargs):
1795 def _pullbundle2extraprepare(pullop, kwargs):
1788 """hook function so that extensions can extend the getbundle call"""
1796 """hook function so that extensions can extend the getbundle call"""
1789
1797
1790 def _pullchangeset(pullop):
1798 def _pullchangeset(pullop):
1791 """pull changeset from unbundle into the local repo"""
1799 """pull changeset from unbundle into the local repo"""
1792 # We delay the open of the transaction as late as possible so we
1800 # We delay the open of the transaction as late as possible so we
1793 # don't open transaction for nothing or you break future useful
1801 # don't open transaction for nothing or you break future useful
1794 # rollback call
1802 # rollback call
1795 if 'changegroup' in pullop.stepsdone:
1803 if 'changegroup' in pullop.stepsdone:
1796 return
1804 return
1797 pullop.stepsdone.add('changegroup')
1805 pullop.stepsdone.add('changegroup')
1798 if not pullop.fetch:
1806 if not pullop.fetch:
1799 pullop.repo.ui.status(_("no changes found\n"))
1807 pullop.repo.ui.status(_("no changes found\n"))
1800 pullop.cgresult = 0
1808 pullop.cgresult = 0
1801 return
1809 return
1802 tr = pullop.gettransaction()
1810 tr = pullop.gettransaction()
1803 if pullop.heads is None and list(pullop.common) == [nullid]:
1811 if pullop.heads is None and list(pullop.common) == [nullid]:
1804 pullop.repo.ui.status(_("requesting all changes\n"))
1812 pullop.repo.ui.status(_("requesting all changes\n"))
1805 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1813 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1806 # issue1320, avoid a race if remote changed after discovery
1814 # issue1320, avoid a race if remote changed after discovery
1807 pullop.heads = pullop.rheads
1815 pullop.heads = pullop.rheads
1808
1816
1809 if pullop.remote.capable('getbundle'):
1817 if pullop.remote.capable('getbundle'):
1810 # TODO: get bundlecaps from remote
1818 # TODO: get bundlecaps from remote
1811 cg = pullop.remote.getbundle('pull', common=pullop.common,
1819 cg = pullop.remote.getbundle('pull', common=pullop.common,
1812 heads=pullop.heads or pullop.rheads)
1820 heads=pullop.heads or pullop.rheads)
1813 elif pullop.heads is None:
1821 elif pullop.heads is None:
1814 with pullop.remote.commandexecutor() as e:
1822 with pullop.remote.commandexecutor() as e:
1815 cg = e.callcommand('changegroup', {
1823 cg = e.callcommand('changegroup', {
1816 'nodes': pullop.fetch,
1824 'nodes': pullop.fetch,
1817 'source': 'pull',
1825 'source': 'pull',
1818 }).result()
1826 }).result()
1819
1827
1820 elif not pullop.remote.capable('changegroupsubset'):
1828 elif not pullop.remote.capable('changegroupsubset'):
1821 raise error.Abort(_("partial pull cannot be done because "
1829 raise error.Abort(_("partial pull cannot be done because "
1822 "other repository doesn't support "
1830 "other repository doesn't support "
1823 "changegroupsubset."))
1831 "changegroupsubset."))
1824 else:
1832 else:
1825 with pullop.remote.commandexecutor() as e:
1833 with pullop.remote.commandexecutor() as e:
1826 cg = e.callcommand('changegroupsubset', {
1834 cg = e.callcommand('changegroupsubset', {
1827 'bases': pullop.fetch,
1835 'bases': pullop.fetch,
1828 'heads': pullop.heads,
1836 'heads': pullop.heads,
1829 'source': 'pull',
1837 'source': 'pull',
1830 }).result()
1838 }).result()
1831
1839
1832 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1840 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1833 pullop.remote.url())
1841 pullop.remote.url())
1834 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1842 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1835
1843
1836 def _pullphase(pullop):
1844 def _pullphase(pullop):
1837 # Get remote phases data from remote
1845 # Get remote phases data from remote
1838 if 'phases' in pullop.stepsdone:
1846 if 'phases' in pullop.stepsdone:
1839 return
1847 return
1840 remotephases = listkeys(pullop.remote, 'phases')
1848 remotephases = listkeys(pullop.remote, 'phases')
1841 _pullapplyphases(pullop, remotephases)
1849 _pullapplyphases(pullop, remotephases)
1842
1850
1843 def _pullapplyphases(pullop, remotephases):
1851 def _pullapplyphases(pullop, remotephases):
1844 """apply phase movement from observed remote state"""
1852 """apply phase movement from observed remote state"""
1845 if 'phases' in pullop.stepsdone:
1853 if 'phases' in pullop.stepsdone:
1846 return
1854 return
1847 pullop.stepsdone.add('phases')
1855 pullop.stepsdone.add('phases')
1848 publishing = bool(remotephases.get('publishing', False))
1856 publishing = bool(remotephases.get('publishing', False))
1849 if remotephases and not publishing:
1857 if remotephases and not publishing:
1850 # remote is new and non-publishing
1858 # remote is new and non-publishing
1851 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1859 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1852 pullop.pulledsubset,
1860 pullop.pulledsubset,
1853 remotephases)
1861 remotephases)
1854 dheads = pullop.pulledsubset
1862 dheads = pullop.pulledsubset
1855 else:
1863 else:
1856 # Remote is old or publishing all common changesets
1864 # Remote is old or publishing all common changesets
1857 # should be seen as public
1865 # should be seen as public
1858 pheads = pullop.pulledsubset
1866 pheads = pullop.pulledsubset
1859 dheads = []
1867 dheads = []
1860 unfi = pullop.repo.unfiltered()
1868 unfi = pullop.repo.unfiltered()
1861 phase = unfi._phasecache.phase
1869 phase = unfi._phasecache.phase
1862 rev = unfi.changelog.nodemap.get
1870 rev = unfi.changelog.nodemap.get
1863 public = phases.public
1871 public = phases.public
1864 draft = phases.draft
1872 draft = phases.draft
1865
1873
1866 # exclude changesets already public locally and update the others
1874 # exclude changesets already public locally and update the others
1867 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1875 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1868 if pheads:
1876 if pheads:
1869 tr = pullop.gettransaction()
1877 tr = pullop.gettransaction()
1870 phases.advanceboundary(pullop.repo, tr, public, pheads)
1878 phases.advanceboundary(pullop.repo, tr, public, pheads)
1871
1879
1872 # exclude changesets already draft locally and update the others
1880 # exclude changesets already draft locally and update the others
1873 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1881 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1874 if dheads:
1882 if dheads:
1875 tr = pullop.gettransaction()
1883 tr = pullop.gettransaction()
1876 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1884 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1877
1885
1878 def _pullbookmarks(pullop):
1886 def _pullbookmarks(pullop):
1879 """process the remote bookmark information to update the local one"""
1887 """process the remote bookmark information to update the local one"""
1880 if 'bookmarks' in pullop.stepsdone:
1888 if 'bookmarks' in pullop.stepsdone:
1881 return
1889 return
1882 pullop.stepsdone.add('bookmarks')
1890 pullop.stepsdone.add('bookmarks')
1883 repo = pullop.repo
1891 repo = pullop.repo
1884 remotebookmarks = pullop.remotebookmarks
1892 remotebookmarks = pullop.remotebookmarks
1885 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1893 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1886 pullop.remote.url(),
1894 pullop.remote.url(),
1887 pullop.gettransaction,
1895 pullop.gettransaction,
1888 explicit=pullop.explicitbookmarks)
1896 explicit=pullop.explicitbookmarks)
1889
1897
1890 def _pullobsolete(pullop):
1898 def _pullobsolete(pullop):
1891 """utility function to pull obsolete markers from a remote
1899 """utility function to pull obsolete markers from a remote
1892
1900
1893 The `gettransaction` is function that return the pull transaction, creating
1901 The `gettransaction` is function that return the pull transaction, creating
1894 one if necessary. We return the transaction to inform the calling code that
1902 one if necessary. We return the transaction to inform the calling code that
1895 a new transaction have been created (when applicable).
1903 a new transaction have been created (when applicable).
1896
1904
1897 Exists mostly to allow overriding for experimentation purpose"""
1905 Exists mostly to allow overriding for experimentation purpose"""
1898 if 'obsmarkers' in pullop.stepsdone:
1906 if 'obsmarkers' in pullop.stepsdone:
1899 return
1907 return
1900 pullop.stepsdone.add('obsmarkers')
1908 pullop.stepsdone.add('obsmarkers')
1901 tr = None
1909 tr = None
1902 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1910 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1903 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1911 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1904 remoteobs = listkeys(pullop.remote, 'obsolete')
1912 remoteobs = listkeys(pullop.remote, 'obsolete')
1905 if 'dump0' in remoteobs:
1913 if 'dump0' in remoteobs:
1906 tr = pullop.gettransaction()
1914 tr = pullop.gettransaction()
1907 markers = []
1915 markers = []
1908 for key in sorted(remoteobs, reverse=True):
1916 for key in sorted(remoteobs, reverse=True):
1909 if key.startswith('dump'):
1917 if key.startswith('dump'):
1910 data = util.b85decode(remoteobs[key])
1918 data = util.b85decode(remoteobs[key])
1911 version, newmarks = obsolete._readmarkers(data)
1919 version, newmarks = obsolete._readmarkers(data)
1912 markers += newmarks
1920 markers += newmarks
1913 if markers:
1921 if markers:
1914 pullop.repo.obsstore.add(tr, markers)
1922 pullop.repo.obsstore.add(tr, markers)
1915 pullop.repo.invalidatevolatilesets()
1923 pullop.repo.invalidatevolatilesets()
1916 return tr
1924 return tr
1917
1925
1918 def applynarrowacl(repo, kwargs):
1926 def applynarrowacl(repo, kwargs):
1919 """Apply narrow fetch access control.
1927 """Apply narrow fetch access control.
1920
1928
1921 This massages the named arguments for getbundle wire protocol commands
1929 This massages the named arguments for getbundle wire protocol commands
1922 so requested data is filtered through access control rules.
1930 so requested data is filtered through access control rules.
1923 """
1931 """
1924 ui = repo.ui
1932 ui = repo.ui
1925 # TODO this assumes existence of HTTP and is a layering violation.
1933 # TODO this assumes existence of HTTP and is a layering violation.
1926 username = ui.shortuser(ui.environ.get('REMOTE_USER') or ui.username())
1934 username = ui.shortuser(ui.environ.get('REMOTE_USER') or ui.username())
1927 user_includes = ui.configlist(
1935 user_includes = ui.configlist(
1928 _NARROWACL_SECTION, username + '.includes',
1936 _NARROWACL_SECTION, username + '.includes',
1929 ui.configlist(_NARROWACL_SECTION, 'default.includes'))
1937 ui.configlist(_NARROWACL_SECTION, 'default.includes'))
1930 user_excludes = ui.configlist(
1938 user_excludes = ui.configlist(
1931 _NARROWACL_SECTION, username + '.excludes',
1939 _NARROWACL_SECTION, username + '.excludes',
1932 ui.configlist(_NARROWACL_SECTION, 'default.excludes'))
1940 ui.configlist(_NARROWACL_SECTION, 'default.excludes'))
1933 if not user_includes:
1941 if not user_includes:
1934 raise error.Abort(_("{} configuration for user {} is empty")
1942 raise error.Abort(_("{} configuration for user {} is empty")
1935 .format(_NARROWACL_SECTION, username))
1943 .format(_NARROWACL_SECTION, username))
1936
1944
1937 user_includes = [
1945 user_includes = [
1938 'path:.' if p == '*' else 'path:' + p for p in user_includes]
1946 'path:.' if p == '*' else 'path:' + p for p in user_includes]
1939 user_excludes = [
1947 user_excludes = [
1940 'path:.' if p == '*' else 'path:' + p for p in user_excludes]
1948 'path:.' if p == '*' else 'path:' + p for p in user_excludes]
1941
1949
1942 req_includes = set(kwargs.get(r'includepats', []))
1950 req_includes = set(kwargs.get(r'includepats', []))
1943 req_excludes = set(kwargs.get(r'excludepats', []))
1951 req_excludes = set(kwargs.get(r'excludepats', []))
1944
1952
1945 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
1953 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
1946 req_includes, req_excludes, user_includes, user_excludes)
1954 req_includes, req_excludes, user_includes, user_excludes)
1947
1955
1948 if invalid_includes:
1956 if invalid_includes:
1949 raise error.Abort(
1957 raise error.Abort(
1950 _("The following includes are not accessible for {}: {}")
1958 _("The following includes are not accessible for {}: {}")
1951 .format(username, invalid_includes))
1959 .format(username, invalid_includes))
1952
1960
1953 new_args = {}
1961 new_args = {}
1954 new_args.update(kwargs)
1962 new_args.update(kwargs)
1955 new_args[r'narrow'] = True
1963 new_args[r'narrow'] = True
1956 new_args[r'narrow_acl'] = True
1964 new_args[r'narrow_acl'] = True
1957 new_args[r'includepats'] = req_includes
1965 new_args[r'includepats'] = req_includes
1958 if req_excludes:
1966 if req_excludes:
1959 new_args[r'excludepats'] = req_excludes
1967 new_args[r'excludepats'] = req_excludes
1960
1968
1961 return new_args
1969 return new_args
1962
1970
1963 def _computeellipsis(repo, common, heads, known, match, depth=None):
1971 def _computeellipsis(repo, common, heads, known, match, depth=None):
1964 """Compute the shape of a narrowed DAG.
1972 """Compute the shape of a narrowed DAG.
1965
1973
1966 Args:
1974 Args:
1967 repo: The repository we're transferring.
1975 repo: The repository we're transferring.
1968 common: The roots of the DAG range we're transferring.
1976 common: The roots of the DAG range we're transferring.
1969 May be just [nullid], which means all ancestors of heads.
1977 May be just [nullid], which means all ancestors of heads.
1970 heads: The heads of the DAG range we're transferring.
1978 heads: The heads of the DAG range we're transferring.
1971 match: The narrowmatcher that allows us to identify relevant changes.
1979 match: The narrowmatcher that allows us to identify relevant changes.
1972 depth: If not None, only consider nodes to be full nodes if they are at
1980 depth: If not None, only consider nodes to be full nodes if they are at
1973 most depth changesets away from one of heads.
1981 most depth changesets away from one of heads.
1974
1982
1975 Returns:
1983 Returns:
1976 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
1984 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
1977
1985
1978 visitnodes: The list of nodes (either full or ellipsis) which
1986 visitnodes: The list of nodes (either full or ellipsis) which
1979 need to be sent to the client.
1987 need to be sent to the client.
1980 relevant_nodes: The set of changelog nodes which change a file inside
1988 relevant_nodes: The set of changelog nodes which change a file inside
1981 the narrowspec. The client needs these as non-ellipsis nodes.
1989 the narrowspec. The client needs these as non-ellipsis nodes.
1982 ellipsisroots: A dict of {rev: parents} that is used in
1990 ellipsisroots: A dict of {rev: parents} that is used in
1983 narrowchangegroup to produce ellipsis nodes with the
1991 narrowchangegroup to produce ellipsis nodes with the
1984 correct parents.
1992 correct parents.
1985 """
1993 """
1986 cl = repo.changelog
1994 cl = repo.changelog
1987 mfl = repo.manifestlog
1995 mfl = repo.manifestlog
1988
1996
1989 clrev = cl.rev
1997 clrev = cl.rev
1990
1998
1991 commonrevs = {clrev(n) for n in common} | {nullrev}
1999 commonrevs = {clrev(n) for n in common} | {nullrev}
1992 headsrevs = {clrev(n) for n in heads}
2000 headsrevs = {clrev(n) for n in heads}
1993
2001
1994 if depth:
2002 if depth:
1995 revdepth = {h: 0 for h in headsrevs}
2003 revdepth = {h: 0 for h in headsrevs}
1996
2004
1997 ellipsisheads = collections.defaultdict(set)
2005 ellipsisheads = collections.defaultdict(set)
1998 ellipsisroots = collections.defaultdict(set)
2006 ellipsisroots = collections.defaultdict(set)
1999
2007
2000 def addroot(head, curchange):
2008 def addroot(head, curchange):
2001 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2009 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2002 ellipsisroots[head].add(curchange)
2010 ellipsisroots[head].add(curchange)
2003 # Recursively split ellipsis heads with 3 roots by finding the
2011 # Recursively split ellipsis heads with 3 roots by finding the
2004 # roots' youngest common descendant which is an elided merge commit.
2012 # roots' youngest common descendant which is an elided merge commit.
2005 # That descendant takes 2 of the 3 roots as its own, and becomes a
2013 # That descendant takes 2 of the 3 roots as its own, and becomes a
2006 # root of the head.
2014 # root of the head.
2007 while len(ellipsisroots[head]) > 2:
2015 while len(ellipsisroots[head]) > 2:
2008 child, roots = splithead(head)
2016 child, roots = splithead(head)
2009 splitroots(head, child, roots)
2017 splitroots(head, child, roots)
2010 head = child # Recurse in case we just added a 3rd root
2018 head = child # Recurse in case we just added a 3rd root
2011
2019
2012 def splitroots(head, child, roots):
2020 def splitroots(head, child, roots):
2013 ellipsisroots[head].difference_update(roots)
2021 ellipsisroots[head].difference_update(roots)
2014 ellipsisroots[head].add(child)
2022 ellipsisroots[head].add(child)
2015 ellipsisroots[child].update(roots)
2023 ellipsisroots[child].update(roots)
2016 ellipsisroots[child].discard(child)
2024 ellipsisroots[child].discard(child)
2017
2025
2018 def splithead(head):
2026 def splithead(head):
2019 r1, r2, r3 = sorted(ellipsisroots[head])
2027 r1, r2, r3 = sorted(ellipsisroots[head])
2020 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2028 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2021 mid = repo.revs('sort(merge() & %d::%d & %d::%d, -rev)',
2029 mid = repo.revs('sort(merge() & %d::%d & %d::%d, -rev)',
2022 nr1, head, nr2, head)
2030 nr1, head, nr2, head)
2023 for j in mid:
2031 for j in mid:
2024 if j == nr2:
2032 if j == nr2:
2025 return nr2, (nr1, nr2)
2033 return nr2, (nr1, nr2)
2026 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2034 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2027 return j, (nr1, nr2)
2035 return j, (nr1, nr2)
2028 raise error.Abort(_('Failed to split up ellipsis node! head: %d, '
2036 raise error.Abort(_('Failed to split up ellipsis node! head: %d, '
2029 'roots: %d %d %d') % (head, r1, r2, r3))
2037 'roots: %d %d %d') % (head, r1, r2, r3))
2030
2038
2031 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2039 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2032 visit = reversed(missing)
2040 visit = reversed(missing)
2033 relevant_nodes = set()
2041 relevant_nodes = set()
2034 visitnodes = [cl.node(m) for m in missing]
2042 visitnodes = [cl.node(m) for m in missing]
2035 required = set(headsrevs) | known
2043 required = set(headsrevs) | known
2036 for rev in visit:
2044 for rev in visit:
2037 clrev = cl.changelogrevision(rev)
2045 clrev = cl.changelogrevision(rev)
2038 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2046 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2039 if depth is not None:
2047 if depth is not None:
2040 curdepth = revdepth[rev]
2048 curdepth = revdepth[rev]
2041 for p in ps:
2049 for p in ps:
2042 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2050 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2043 needed = False
2051 needed = False
2044 shallow_enough = depth is None or revdepth[rev] <= depth
2052 shallow_enough = depth is None or revdepth[rev] <= depth
2045 if shallow_enough:
2053 if shallow_enough:
2046 curmf = mfl[clrev.manifest].read()
2054 curmf = mfl[clrev.manifest].read()
2047 if ps:
2055 if ps:
2048 # We choose to not trust the changed files list in
2056 # We choose to not trust the changed files list in
2049 # changesets because it's not always correct. TODO: could
2057 # changesets because it's not always correct. TODO: could
2050 # we trust it for the non-merge case?
2058 # we trust it for the non-merge case?
2051 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2059 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2052 needed = bool(curmf.diff(p1mf, match))
2060 needed = bool(curmf.diff(p1mf, match))
2053 if not needed and len(ps) > 1:
2061 if not needed and len(ps) > 1:
2054 # For merge changes, the list of changed files is not
2062 # For merge changes, the list of changed files is not
2055 # helpful, since we need to emit the merge if a file
2063 # helpful, since we need to emit the merge if a file
2056 # in the narrow spec has changed on either side of the
2064 # in the narrow spec has changed on either side of the
2057 # merge. As a result, we do a manifest diff to check.
2065 # merge. As a result, we do a manifest diff to check.
2058 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2066 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2059 needed = bool(curmf.diff(p2mf, match))
2067 needed = bool(curmf.diff(p2mf, match))
2060 else:
2068 else:
2061 # For a root node, we need to include the node if any
2069 # For a root node, we need to include the node if any
2062 # files in the node match the narrowspec.
2070 # files in the node match the narrowspec.
2063 needed = any(curmf.walk(match))
2071 needed = any(curmf.walk(match))
2064
2072
2065 if needed:
2073 if needed:
2066 for head in ellipsisheads[rev]:
2074 for head in ellipsisheads[rev]:
2067 addroot(head, rev)
2075 addroot(head, rev)
2068 for p in ps:
2076 for p in ps:
2069 required.add(p)
2077 required.add(p)
2070 relevant_nodes.add(cl.node(rev))
2078 relevant_nodes.add(cl.node(rev))
2071 else:
2079 else:
2072 if not ps:
2080 if not ps:
2073 ps = [nullrev]
2081 ps = [nullrev]
2074 if rev in required:
2082 if rev in required:
2075 for head in ellipsisheads[rev]:
2083 for head in ellipsisheads[rev]:
2076 addroot(head, rev)
2084 addroot(head, rev)
2077 for p in ps:
2085 for p in ps:
2078 ellipsisheads[p].add(rev)
2086 ellipsisheads[p].add(rev)
2079 else:
2087 else:
2080 for p in ps:
2088 for p in ps:
2081 ellipsisheads[p] |= ellipsisheads[rev]
2089 ellipsisheads[p] |= ellipsisheads[rev]
2082
2090
2083 # add common changesets as roots of their reachable ellipsis heads
2091 # add common changesets as roots of their reachable ellipsis heads
2084 for c in commonrevs:
2092 for c in commonrevs:
2085 for head in ellipsisheads[c]:
2093 for head in ellipsisheads[c]:
2086 addroot(head, c)
2094 addroot(head, c)
2087 return visitnodes, relevant_nodes, ellipsisroots
2095 return visitnodes, relevant_nodes, ellipsisroots
2088
2096
2089 def caps20to10(repo, role):
2097 def caps20to10(repo, role):
2090 """return a set with appropriate options to use bundle20 during getbundle"""
2098 """return a set with appropriate options to use bundle20 during getbundle"""
2091 caps = {'HG20'}
2099 caps = {'HG20'}
2092 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2100 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2093 caps.add('bundle2=' + urlreq.quote(capsblob))
2101 caps.add('bundle2=' + urlreq.quote(capsblob))
2094 return caps
2102 return caps
2095
2103
2096 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2104 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2097 getbundle2partsorder = []
2105 getbundle2partsorder = []
2098
2106
2099 # Mapping between step name and function
2107 # Mapping between step name and function
2100 #
2108 #
2101 # This exists to help extensions wrap steps if necessary
2109 # This exists to help extensions wrap steps if necessary
2102 getbundle2partsmapping = {}
2110 getbundle2partsmapping = {}
2103
2111
2104 def getbundle2partsgenerator(stepname, idx=None):
2112 def getbundle2partsgenerator(stepname, idx=None):
2105 """decorator for function generating bundle2 part for getbundle
2113 """decorator for function generating bundle2 part for getbundle
2106
2114
2107 The function is added to the step -> function mapping and appended to the
2115 The function is added to the step -> function mapping and appended to the
2108 list of steps. Beware that decorated functions will be added in order
2116 list of steps. Beware that decorated functions will be added in order
2109 (this may matter).
2117 (this may matter).
2110
2118
2111 You can only use this decorator for new steps, if you want to wrap a step
2119 You can only use this decorator for new steps, if you want to wrap a step
2112 from an extension, attack the getbundle2partsmapping dictionary directly."""
2120 from an extension, attack the getbundle2partsmapping dictionary directly."""
2113 def dec(func):
2121 def dec(func):
2114 assert stepname not in getbundle2partsmapping
2122 assert stepname not in getbundle2partsmapping
2115 getbundle2partsmapping[stepname] = func
2123 getbundle2partsmapping[stepname] = func
2116 if idx is None:
2124 if idx is None:
2117 getbundle2partsorder.append(stepname)
2125 getbundle2partsorder.append(stepname)
2118 else:
2126 else:
2119 getbundle2partsorder.insert(idx, stepname)
2127 getbundle2partsorder.insert(idx, stepname)
2120 return func
2128 return func
2121 return dec
2129 return dec
2122
2130
2123 def bundle2requested(bundlecaps):
2131 def bundle2requested(bundlecaps):
2124 if bundlecaps is not None:
2132 if bundlecaps is not None:
2125 return any(cap.startswith('HG2') for cap in bundlecaps)
2133 return any(cap.startswith('HG2') for cap in bundlecaps)
2126 return False
2134 return False
2127
2135
2128 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
2136 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
2129 **kwargs):
2137 **kwargs):
2130 """Return chunks constituting a bundle's raw data.
2138 """Return chunks constituting a bundle's raw data.
2131
2139
2132 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2140 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2133 passed.
2141 passed.
2134
2142
2135 Returns a 2-tuple of a dict with metadata about the generated bundle
2143 Returns a 2-tuple of a dict with metadata about the generated bundle
2136 and an iterator over raw chunks (of varying sizes).
2144 and an iterator over raw chunks (of varying sizes).
2137 """
2145 """
2138 kwargs = pycompat.byteskwargs(kwargs)
2146 kwargs = pycompat.byteskwargs(kwargs)
2139 info = {}
2147 info = {}
2140 usebundle2 = bundle2requested(bundlecaps)
2148 usebundle2 = bundle2requested(bundlecaps)
2141 # bundle10 case
2149 # bundle10 case
2142 if not usebundle2:
2150 if not usebundle2:
2143 if bundlecaps and not kwargs.get('cg', True):
2151 if bundlecaps and not kwargs.get('cg', True):
2144 raise ValueError(_('request for bundle10 must include changegroup'))
2152 raise ValueError(_('request for bundle10 must include changegroup'))
2145
2153
2146 if kwargs:
2154 if kwargs:
2147 raise ValueError(_('unsupported getbundle arguments: %s')
2155 raise ValueError(_('unsupported getbundle arguments: %s')
2148 % ', '.join(sorted(kwargs.keys())))
2156 % ', '.join(sorted(kwargs.keys())))
2149 outgoing = _computeoutgoing(repo, heads, common)
2157 outgoing = _computeoutgoing(repo, heads, common)
2150 info['bundleversion'] = 1
2158 info['bundleversion'] = 1
2151 return info, changegroup.makestream(repo, outgoing, '01', source,
2159 return info, changegroup.makestream(repo, outgoing, '01', source,
2152 bundlecaps=bundlecaps)
2160 bundlecaps=bundlecaps)
2153
2161
2154 # bundle20 case
2162 # bundle20 case
2155 info['bundleversion'] = 2
2163 info['bundleversion'] = 2
2156 b2caps = {}
2164 b2caps = {}
2157 for bcaps in bundlecaps:
2165 for bcaps in bundlecaps:
2158 if bcaps.startswith('bundle2='):
2166 if bcaps.startswith('bundle2='):
2159 blob = urlreq.unquote(bcaps[len('bundle2='):])
2167 blob = urlreq.unquote(bcaps[len('bundle2='):])
2160 b2caps.update(bundle2.decodecaps(blob))
2168 b2caps.update(bundle2.decodecaps(blob))
2161 bundler = bundle2.bundle20(repo.ui, b2caps)
2169 bundler = bundle2.bundle20(repo.ui, b2caps)
2162
2170
2163 kwargs['heads'] = heads
2171 kwargs['heads'] = heads
2164 kwargs['common'] = common
2172 kwargs['common'] = common
2165
2173
2166 for name in getbundle2partsorder:
2174 for name in getbundle2partsorder:
2167 func = getbundle2partsmapping[name]
2175 func = getbundle2partsmapping[name]
2168 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
2176 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
2169 **pycompat.strkwargs(kwargs))
2177 **pycompat.strkwargs(kwargs))
2170
2178
2171 info['prefercompressed'] = bundler.prefercompressed
2179 info['prefercompressed'] = bundler.prefercompressed
2172
2180
2173 return info, bundler.getchunks()
2181 return info, bundler.getchunks()
2174
2182
2175 @getbundle2partsgenerator('stream2')
2183 @getbundle2partsgenerator('stream2')
2176 def _getbundlestream2(bundler, repo, *args, **kwargs):
2184 def _getbundlestream2(bundler, repo, *args, **kwargs):
2177 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2185 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2178
2186
2179 @getbundle2partsgenerator('changegroup')
2187 @getbundle2partsgenerator('changegroup')
2180 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
2188 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
2181 b2caps=None, heads=None, common=None, **kwargs):
2189 b2caps=None, heads=None, common=None, **kwargs):
2182 """add a changegroup part to the requested bundle"""
2190 """add a changegroup part to the requested bundle"""
2183 if not kwargs.get(r'cg', True):
2191 if not kwargs.get(r'cg', True):
2184 return
2192 return
2185
2193
2186 version = '01'
2194 version = '01'
2187 cgversions = b2caps.get('changegroup')
2195 cgversions = b2caps.get('changegroup')
2188 if cgversions: # 3.1 and 3.2 ship with an empty value
2196 if cgversions: # 3.1 and 3.2 ship with an empty value
2189 cgversions = [v for v in cgversions
2197 cgversions = [v for v in cgversions
2190 if v in changegroup.supportedoutgoingversions(repo)]
2198 if v in changegroup.supportedoutgoingversions(repo)]
2191 if not cgversions:
2199 if not cgversions:
2192 raise error.Abort(_('no common changegroup version'))
2200 raise error.Abort(_('no common changegroup version'))
2193 version = max(cgversions)
2201 version = max(cgversions)
2194
2202
2195 outgoing = _computeoutgoing(repo, heads, common)
2203 outgoing = _computeoutgoing(repo, heads, common)
2196 if not outgoing.missing:
2204 if not outgoing.missing:
2197 return
2205 return
2198
2206
2199 if kwargs.get(r'narrow', False):
2207 if kwargs.get(r'narrow', False):
2200 include = sorted(filter(bool, kwargs.get(r'includepats', [])))
2208 include = sorted(filter(bool, kwargs.get(r'includepats', [])))
2201 exclude = sorted(filter(bool, kwargs.get(r'excludepats', [])))
2209 exclude = sorted(filter(bool, kwargs.get(r'excludepats', [])))
2202 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2210 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2203 else:
2211 else:
2204 matcher = None
2212 matcher = None
2205
2213
2206 cgstream = changegroup.makestream(repo, outgoing, version, source,
2214 cgstream = changegroup.makestream(repo, outgoing, version, source,
2207 bundlecaps=bundlecaps, matcher=matcher)
2215 bundlecaps=bundlecaps, matcher=matcher)
2208
2216
2209 part = bundler.newpart('changegroup', data=cgstream)
2217 part = bundler.newpart('changegroup', data=cgstream)
2210 if cgversions:
2218 if cgversions:
2211 part.addparam('version', version)
2219 part.addparam('version', version)
2212
2220
2213 part.addparam('nbchanges', '%d' % len(outgoing.missing),
2221 part.addparam('nbchanges', '%d' % len(outgoing.missing),
2214 mandatory=False)
2222 mandatory=False)
2215
2223
2216 if 'treemanifest' in repo.requirements:
2224 if 'treemanifest' in repo.requirements:
2217 part.addparam('treemanifest', '1')
2225 part.addparam('treemanifest', '1')
2218
2226
2219 if (kwargs.get(r'narrow', False) and kwargs.get(r'narrow_acl', False)
2227 if (kwargs.get(r'narrow', False) and kwargs.get(r'narrow_acl', False)
2220 and (include or exclude)):
2228 and (include or exclude)):
2221 # this is mandatory because otherwise ACL clients won't work
2229 # this is mandatory because otherwise ACL clients won't work
2222 narrowspecpart = bundler.newpart('Narrow:responsespec')
2230 narrowspecpart = bundler.newpart('Narrow:responsespec')
2223 narrowspecpart.data = '%s\0%s' % ('\n'.join(include),
2231 narrowspecpart.data = '%s\0%s' % ('\n'.join(include),
2224 '\n'.join(exclude))
2232 '\n'.join(exclude))
2225
2233
2226 @getbundle2partsgenerator('bookmarks')
2234 @getbundle2partsgenerator('bookmarks')
2227 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
2235 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
2228 b2caps=None, **kwargs):
2236 b2caps=None, **kwargs):
2229 """add a bookmark part to the requested bundle"""
2237 """add a bookmark part to the requested bundle"""
2230 if not kwargs.get(r'bookmarks', False):
2238 if not kwargs.get(r'bookmarks', False):
2231 return
2239 return
2232 if 'bookmarks' not in b2caps:
2240 if 'bookmarks' not in b2caps:
2233 raise error.Abort(_('no common bookmarks exchange method'))
2241 raise error.Abort(_('no common bookmarks exchange method'))
2234 books = bookmod.listbinbookmarks(repo)
2242 books = bookmod.listbinbookmarks(repo)
2235 data = bookmod.binaryencode(books)
2243 data = bookmod.binaryencode(books)
2236 if data:
2244 if data:
2237 bundler.newpart('bookmarks', data=data)
2245 bundler.newpart('bookmarks', data=data)
2238
2246
2239 @getbundle2partsgenerator('listkeys')
2247 @getbundle2partsgenerator('listkeys')
2240 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
2248 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
2241 b2caps=None, **kwargs):
2249 b2caps=None, **kwargs):
2242 """add parts containing listkeys namespaces to the requested bundle"""
2250 """add parts containing listkeys namespaces to the requested bundle"""
2243 listkeys = kwargs.get(r'listkeys', ())
2251 listkeys = kwargs.get(r'listkeys', ())
2244 for namespace in listkeys:
2252 for namespace in listkeys:
2245 part = bundler.newpart('listkeys')
2253 part = bundler.newpart('listkeys')
2246 part.addparam('namespace', namespace)
2254 part.addparam('namespace', namespace)
2247 keys = repo.listkeys(namespace).items()
2255 keys = repo.listkeys(namespace).items()
2248 part.data = pushkey.encodekeys(keys)
2256 part.data = pushkey.encodekeys(keys)
2249
2257
2250 @getbundle2partsgenerator('obsmarkers')
2258 @getbundle2partsgenerator('obsmarkers')
2251 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
2259 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
2252 b2caps=None, heads=None, **kwargs):
2260 b2caps=None, heads=None, **kwargs):
2253 """add an obsolescence markers part to the requested bundle"""
2261 """add an obsolescence markers part to the requested bundle"""
2254 if kwargs.get(r'obsmarkers', False):
2262 if kwargs.get(r'obsmarkers', False):
2255 if heads is None:
2263 if heads is None:
2256 heads = repo.heads()
2264 heads = repo.heads()
2257 subset = [c.node() for c in repo.set('::%ln', heads)]
2265 subset = [c.node() for c in repo.set('::%ln', heads)]
2258 markers = repo.obsstore.relevantmarkers(subset)
2266 markers = repo.obsstore.relevantmarkers(subset)
2259 markers = sorted(markers)
2267 markers = sorted(markers)
2260 bundle2.buildobsmarkerspart(bundler, markers)
2268 bundle2.buildobsmarkerspart(bundler, markers)
2261
2269
2262 @getbundle2partsgenerator('phases')
2270 @getbundle2partsgenerator('phases')
2263 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
2271 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
2264 b2caps=None, heads=None, **kwargs):
2272 b2caps=None, heads=None, **kwargs):
2265 """add phase heads part to the requested bundle"""
2273 """add phase heads part to the requested bundle"""
2266 if kwargs.get(r'phases', False):
2274 if kwargs.get(r'phases', False):
2267 if not 'heads' in b2caps.get('phases'):
2275 if not 'heads' in b2caps.get('phases'):
2268 raise error.Abort(_('no common phases exchange method'))
2276 raise error.Abort(_('no common phases exchange method'))
2269 if heads is None:
2277 if heads is None:
2270 heads = repo.heads()
2278 heads = repo.heads()
2271
2279
2272 headsbyphase = collections.defaultdict(set)
2280 headsbyphase = collections.defaultdict(set)
2273 if repo.publishing():
2281 if repo.publishing():
2274 headsbyphase[phases.public] = heads
2282 headsbyphase[phases.public] = heads
2275 else:
2283 else:
2276 # find the appropriate heads to move
2284 # find the appropriate heads to move
2277
2285
2278 phase = repo._phasecache.phase
2286 phase = repo._phasecache.phase
2279 node = repo.changelog.node
2287 node = repo.changelog.node
2280 rev = repo.changelog.rev
2288 rev = repo.changelog.rev
2281 for h in heads:
2289 for h in heads:
2282 headsbyphase[phase(repo, rev(h))].add(h)
2290 headsbyphase[phase(repo, rev(h))].add(h)
2283 seenphases = list(headsbyphase.keys())
2291 seenphases = list(headsbyphase.keys())
2284
2292
2285 # We do not handle anything but public and draft phase for now)
2293 # We do not handle anything but public and draft phase for now)
2286 if seenphases:
2294 if seenphases:
2287 assert max(seenphases) <= phases.draft
2295 assert max(seenphases) <= phases.draft
2288
2296
2289 # if client is pulling non-public changesets, we need to find
2297 # if client is pulling non-public changesets, we need to find
2290 # intermediate public heads.
2298 # intermediate public heads.
2291 draftheads = headsbyphase.get(phases.draft, set())
2299 draftheads = headsbyphase.get(phases.draft, set())
2292 if draftheads:
2300 if draftheads:
2293 publicheads = headsbyphase.get(phases.public, set())
2301 publicheads = headsbyphase.get(phases.public, set())
2294
2302
2295 revset = 'heads(only(%ln, %ln) and public())'
2303 revset = 'heads(only(%ln, %ln) and public())'
2296 extraheads = repo.revs(revset, draftheads, publicheads)
2304 extraheads = repo.revs(revset, draftheads, publicheads)
2297 for r in extraheads:
2305 for r in extraheads:
2298 headsbyphase[phases.public].add(node(r))
2306 headsbyphase[phases.public].add(node(r))
2299
2307
2300 # transform data in a format used by the encoding function
2308 # transform data in a format used by the encoding function
2301 phasemapping = []
2309 phasemapping = []
2302 for phase in phases.allphases:
2310 for phase in phases.allphases:
2303 phasemapping.append(sorted(headsbyphase[phase]))
2311 phasemapping.append(sorted(headsbyphase[phase]))
2304
2312
2305 # generate the actual part
2313 # generate the actual part
2306 phasedata = phases.binaryencode(phasemapping)
2314 phasedata = phases.binaryencode(phasemapping)
2307 bundler.newpart('phase-heads', data=phasedata)
2315 bundler.newpart('phase-heads', data=phasedata)
2308
2316
2309 @getbundle2partsgenerator('hgtagsfnodes')
2317 @getbundle2partsgenerator('hgtagsfnodes')
2310 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
2318 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
2311 b2caps=None, heads=None, common=None,
2319 b2caps=None, heads=None, common=None,
2312 **kwargs):
2320 **kwargs):
2313 """Transfer the .hgtags filenodes mapping.
2321 """Transfer the .hgtags filenodes mapping.
2314
2322
2315 Only values for heads in this bundle will be transferred.
2323 Only values for heads in this bundle will be transferred.
2316
2324
2317 The part data consists of pairs of 20 byte changeset node and .hgtags
2325 The part data consists of pairs of 20 byte changeset node and .hgtags
2318 filenodes raw values.
2326 filenodes raw values.
2319 """
2327 """
2320 # Don't send unless:
2328 # Don't send unless:
2321 # - changeset are being exchanged,
2329 # - changeset are being exchanged,
2322 # - the client supports it.
2330 # - the client supports it.
2323 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
2331 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
2324 return
2332 return
2325
2333
2326 outgoing = _computeoutgoing(repo, heads, common)
2334 outgoing = _computeoutgoing(repo, heads, common)
2327 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2335 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2328
2336
2329 @getbundle2partsgenerator('cache:rev-branch-cache')
2337 @getbundle2partsgenerator('cache:rev-branch-cache')
2330 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
2338 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
2331 b2caps=None, heads=None, common=None,
2339 b2caps=None, heads=None, common=None,
2332 **kwargs):
2340 **kwargs):
2333 """Transfer the rev-branch-cache mapping
2341 """Transfer the rev-branch-cache mapping
2334
2342
2335 The payload is a series of data related to each branch
2343 The payload is a series of data related to each branch
2336
2344
2337 1) branch name length
2345 1) branch name length
2338 2) number of open heads
2346 2) number of open heads
2339 3) number of closed heads
2347 3) number of closed heads
2340 4) open heads nodes
2348 4) open heads nodes
2341 5) closed heads nodes
2349 5) closed heads nodes
2342 """
2350 """
2343 # Don't send unless:
2351 # Don't send unless:
2344 # - changeset are being exchanged,
2352 # - changeset are being exchanged,
2345 # - the client supports it.
2353 # - the client supports it.
2346 # - narrow bundle isn't in play (not currently compatible).
2354 # - narrow bundle isn't in play (not currently compatible).
2347 if (not kwargs.get(r'cg', True)
2355 if (not kwargs.get(r'cg', True)
2348 or 'rev-branch-cache' not in b2caps
2356 or 'rev-branch-cache' not in b2caps
2349 or kwargs.get(r'narrow', False)
2357 or kwargs.get(r'narrow', False)
2350 or repo.ui.has_section(_NARROWACL_SECTION)):
2358 or repo.ui.has_section(_NARROWACL_SECTION)):
2351 return
2359 return
2352
2360
2353 outgoing = _computeoutgoing(repo, heads, common)
2361 outgoing = _computeoutgoing(repo, heads, common)
2354 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2362 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2355
2363
2356 def check_heads(repo, their_heads, context):
2364 def check_heads(repo, their_heads, context):
2357 """check if the heads of a repo have been modified
2365 """check if the heads of a repo have been modified
2358
2366
2359 Used by peer for unbundling.
2367 Used by peer for unbundling.
2360 """
2368 """
2361 heads = repo.heads()
2369 heads = repo.heads()
2362 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2370 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2363 if not (their_heads == ['force'] or their_heads == heads or
2371 if not (their_heads == ['force'] or their_heads == heads or
2364 their_heads == ['hashed', heads_hash]):
2372 their_heads == ['hashed', heads_hash]):
2365 # someone else committed/pushed/unbundled while we
2373 # someone else committed/pushed/unbundled while we
2366 # were transferring data
2374 # were transferring data
2367 raise error.PushRaced('repository changed while %s - '
2375 raise error.PushRaced('repository changed while %s - '
2368 'please try again' % context)
2376 'please try again' % context)
2369
2377
2370 def unbundle(repo, cg, heads, source, url):
2378 def unbundle(repo, cg, heads, source, url):
2371 """Apply a bundle to a repo.
2379 """Apply a bundle to a repo.
2372
2380
2373 this function makes sure the repo is locked during the application and have
2381 this function makes sure the repo is locked during the application and have
2374 mechanism to check that no push race occurred between the creation of the
2382 mechanism to check that no push race occurred between the creation of the
2375 bundle and its application.
2383 bundle and its application.
2376
2384
2377 If the push was raced as PushRaced exception is raised."""
2385 If the push was raced as PushRaced exception is raised."""
2378 r = 0
2386 r = 0
2379 # need a transaction when processing a bundle2 stream
2387 # need a transaction when processing a bundle2 stream
2380 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2388 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2381 lockandtr = [None, None, None]
2389 lockandtr = [None, None, None]
2382 recordout = None
2390 recordout = None
2383 # quick fix for output mismatch with bundle2 in 3.4
2391 # quick fix for output mismatch with bundle2 in 3.4
2384 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2392 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2385 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2393 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2386 captureoutput = True
2394 captureoutput = True
2387 try:
2395 try:
2388 # note: outside bundle1, 'heads' is expected to be empty and this
2396 # note: outside bundle1, 'heads' is expected to be empty and this
2389 # 'check_heads' call wil be a no-op
2397 # 'check_heads' call wil be a no-op
2390 check_heads(repo, heads, 'uploading changes')
2398 check_heads(repo, heads, 'uploading changes')
2391 # push can proceed
2399 # push can proceed
2392 if not isinstance(cg, bundle2.unbundle20):
2400 if not isinstance(cg, bundle2.unbundle20):
2393 # legacy case: bundle1 (changegroup 01)
2401 # legacy case: bundle1 (changegroup 01)
2394 txnname = "\n".join([source, util.hidepassword(url)])
2402 txnname = "\n".join([source, util.hidepassword(url)])
2395 with repo.lock(), repo.transaction(txnname) as tr:
2403 with repo.lock(), repo.transaction(txnname) as tr:
2396 op = bundle2.applybundle(repo, cg, tr, source, url)
2404 op = bundle2.applybundle(repo, cg, tr, source, url)
2397 r = bundle2.combinechangegroupresults(op)
2405 r = bundle2.combinechangegroupresults(op)
2398 else:
2406 else:
2399 r = None
2407 r = None
2400 try:
2408 try:
2401 def gettransaction():
2409 def gettransaction():
2402 if not lockandtr[2]:
2410 if not lockandtr[2]:
2403 if not bookmod.bookmarksinstore(repo):
2411 if not bookmod.bookmarksinstore(repo):
2404 lockandtr[0] = repo.wlock()
2412 lockandtr[0] = repo.wlock()
2405 lockandtr[1] = repo.lock()
2413 lockandtr[1] = repo.lock()
2406 lockandtr[2] = repo.transaction(source)
2414 lockandtr[2] = repo.transaction(source)
2407 lockandtr[2].hookargs['source'] = source
2415 lockandtr[2].hookargs['source'] = source
2408 lockandtr[2].hookargs['url'] = url
2416 lockandtr[2].hookargs['url'] = url
2409 lockandtr[2].hookargs['bundle2'] = '1'
2417 lockandtr[2].hookargs['bundle2'] = '1'
2410 return lockandtr[2]
2418 return lockandtr[2]
2411
2419
2412 # Do greedy locking by default until we're satisfied with lazy
2420 # Do greedy locking by default until we're satisfied with lazy
2413 # locking.
2421 # locking.
2414 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2422 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2415 gettransaction()
2423 gettransaction()
2416
2424
2417 op = bundle2.bundleoperation(repo, gettransaction,
2425 op = bundle2.bundleoperation(repo, gettransaction,
2418 captureoutput=captureoutput,
2426 captureoutput=captureoutput,
2419 source='push')
2427 source='push')
2420 try:
2428 try:
2421 op = bundle2.processbundle(repo, cg, op=op)
2429 op = bundle2.processbundle(repo, cg, op=op)
2422 finally:
2430 finally:
2423 r = op.reply
2431 r = op.reply
2424 if captureoutput and r is not None:
2432 if captureoutput and r is not None:
2425 repo.ui.pushbuffer(error=True, subproc=True)
2433 repo.ui.pushbuffer(error=True, subproc=True)
2426 def recordout(output):
2434 def recordout(output):
2427 r.newpart('output', data=output, mandatory=False)
2435 r.newpart('output', data=output, mandatory=False)
2428 if lockandtr[2] is not None:
2436 if lockandtr[2] is not None:
2429 lockandtr[2].close()
2437 lockandtr[2].close()
2430 except BaseException as exc:
2438 except BaseException as exc:
2431 exc.duringunbundle2 = True
2439 exc.duringunbundle2 = True
2432 if captureoutput and r is not None:
2440 if captureoutput and r is not None:
2433 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2441 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2434 def recordout(output):
2442 def recordout(output):
2435 part = bundle2.bundlepart('output', data=output,
2443 part = bundle2.bundlepart('output', data=output,
2436 mandatory=False)
2444 mandatory=False)
2437 parts.append(part)
2445 parts.append(part)
2438 raise
2446 raise
2439 finally:
2447 finally:
2440 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2448 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2441 if recordout is not None:
2449 if recordout is not None:
2442 recordout(repo.ui.popbuffer())
2450 recordout(repo.ui.popbuffer())
2443 return r
2451 return r
2444
2452
2445 def _maybeapplyclonebundle(pullop):
2453 def _maybeapplyclonebundle(pullop):
2446 """Apply a clone bundle from a remote, if possible."""
2454 """Apply a clone bundle from a remote, if possible."""
2447
2455
2448 repo = pullop.repo
2456 repo = pullop.repo
2449 remote = pullop.remote
2457 remote = pullop.remote
2450
2458
2451 if not repo.ui.configbool('ui', 'clonebundles'):
2459 if not repo.ui.configbool('ui', 'clonebundles'):
2452 return
2460 return
2453
2461
2454 # Only run if local repo is empty.
2462 # Only run if local repo is empty.
2455 if len(repo):
2463 if len(repo):
2456 return
2464 return
2457
2465
2458 if pullop.heads:
2466 if pullop.heads:
2459 return
2467 return
2460
2468
2461 if not remote.capable('clonebundles'):
2469 if not remote.capable('clonebundles'):
2462 return
2470 return
2463
2471
2464 with remote.commandexecutor() as e:
2472 with remote.commandexecutor() as e:
2465 res = e.callcommand('clonebundles', {}).result()
2473 res = e.callcommand('clonebundles', {}).result()
2466
2474
2467 # If we call the wire protocol command, that's good enough to record the
2475 # If we call the wire protocol command, that's good enough to record the
2468 # attempt.
2476 # attempt.
2469 pullop.clonebundleattempted = True
2477 pullop.clonebundleattempted = True
2470
2478
2471 entries = parseclonebundlesmanifest(repo, res)
2479 entries = parseclonebundlesmanifest(repo, res)
2472 if not entries:
2480 if not entries:
2473 repo.ui.note(_('no clone bundles available on remote; '
2481 repo.ui.note(_('no clone bundles available on remote; '
2474 'falling back to regular clone\n'))
2482 'falling back to regular clone\n'))
2475 return
2483 return
2476
2484
2477 entries = filterclonebundleentries(
2485 entries = filterclonebundleentries(
2478 repo, entries, streamclonerequested=pullop.streamclonerequested)
2486 repo, entries, streamclonerequested=pullop.streamclonerequested)
2479
2487
2480 if not entries:
2488 if not entries:
2481 # There is a thundering herd concern here. However, if a server
2489 # There is a thundering herd concern here. However, if a server
2482 # operator doesn't advertise bundles appropriate for its clients,
2490 # operator doesn't advertise bundles appropriate for its clients,
2483 # they deserve what's coming. Furthermore, from a client's
2491 # they deserve what's coming. Furthermore, from a client's
2484 # perspective, no automatic fallback would mean not being able to
2492 # perspective, no automatic fallback would mean not being able to
2485 # clone!
2493 # clone!
2486 repo.ui.warn(_('no compatible clone bundles available on server; '
2494 repo.ui.warn(_('no compatible clone bundles available on server; '
2487 'falling back to regular clone\n'))
2495 'falling back to regular clone\n'))
2488 repo.ui.warn(_('(you may want to report this to the server '
2496 repo.ui.warn(_('(you may want to report this to the server '
2489 'operator)\n'))
2497 'operator)\n'))
2490 return
2498 return
2491
2499
2492 entries = sortclonebundleentries(repo.ui, entries)
2500 entries = sortclonebundleentries(repo.ui, entries)
2493
2501
2494 url = entries[0]['URL']
2502 url = entries[0]['URL']
2495 repo.ui.status(_('applying clone bundle from %s\n') % url)
2503 repo.ui.status(_('applying clone bundle from %s\n') % url)
2496 if trypullbundlefromurl(repo.ui, repo, url):
2504 if trypullbundlefromurl(repo.ui, repo, url):
2497 repo.ui.status(_('finished applying clone bundle\n'))
2505 repo.ui.status(_('finished applying clone bundle\n'))
2498 # Bundle failed.
2506 # Bundle failed.
2499 #
2507 #
2500 # We abort by default to avoid the thundering herd of
2508 # We abort by default to avoid the thundering herd of
2501 # clients flooding a server that was expecting expensive
2509 # clients flooding a server that was expecting expensive
2502 # clone load to be offloaded.
2510 # clone load to be offloaded.
2503 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2511 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2504 repo.ui.warn(_('falling back to normal clone\n'))
2512 repo.ui.warn(_('falling back to normal clone\n'))
2505 else:
2513 else:
2506 raise error.Abort(_('error applying bundle'),
2514 raise error.Abort(_('error applying bundle'),
2507 hint=_('if this error persists, consider contacting '
2515 hint=_('if this error persists, consider contacting '
2508 'the server operator or disable clone '
2516 'the server operator or disable clone '
2509 'bundles via '
2517 'bundles via '
2510 '"--config ui.clonebundles=false"'))
2518 '"--config ui.clonebundles=false"'))
2511
2519
2512 def parseclonebundlesmanifest(repo, s):
2520 def parseclonebundlesmanifest(repo, s):
2513 """Parses the raw text of a clone bundles manifest.
2521 """Parses the raw text of a clone bundles manifest.
2514
2522
2515 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2523 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2516 to the URL and other keys are the attributes for the entry.
2524 to the URL and other keys are the attributes for the entry.
2517 """
2525 """
2518 m = []
2526 m = []
2519 for line in s.splitlines():
2527 for line in s.splitlines():
2520 fields = line.split()
2528 fields = line.split()
2521 if not fields:
2529 if not fields:
2522 continue
2530 continue
2523 attrs = {'URL': fields[0]}
2531 attrs = {'URL': fields[0]}
2524 for rawattr in fields[1:]:
2532 for rawattr in fields[1:]:
2525 key, value = rawattr.split('=', 1)
2533 key, value = rawattr.split('=', 1)
2526 key = urlreq.unquote(key)
2534 key = urlreq.unquote(key)
2527 value = urlreq.unquote(value)
2535 value = urlreq.unquote(value)
2528 attrs[key] = value
2536 attrs[key] = value
2529
2537
2530 # Parse BUNDLESPEC into components. This makes client-side
2538 # Parse BUNDLESPEC into components. This makes client-side
2531 # preferences easier to specify since you can prefer a single
2539 # preferences easier to specify since you can prefer a single
2532 # component of the BUNDLESPEC.
2540 # component of the BUNDLESPEC.
2533 if key == 'BUNDLESPEC':
2541 if key == 'BUNDLESPEC':
2534 try:
2542 try:
2535 bundlespec = parsebundlespec(repo, value)
2543 bundlespec = parsebundlespec(repo, value)
2536 attrs['COMPRESSION'] = bundlespec.compression
2544 attrs['COMPRESSION'] = bundlespec.compression
2537 attrs['VERSION'] = bundlespec.version
2545 attrs['VERSION'] = bundlespec.version
2538 except error.InvalidBundleSpecification:
2546 except error.InvalidBundleSpecification:
2539 pass
2547 pass
2540 except error.UnsupportedBundleSpecification:
2548 except error.UnsupportedBundleSpecification:
2541 pass
2549 pass
2542
2550
2543 m.append(attrs)
2551 m.append(attrs)
2544
2552
2545 return m
2553 return m
2546
2554
2547 def isstreamclonespec(bundlespec):
2555 def isstreamclonespec(bundlespec):
2548 # Stream clone v1
2556 # Stream clone v1
2549 if (bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1'):
2557 if (bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1'):
2550 return True
2558 return True
2551
2559
2552 # Stream clone v2
2560 # Stream clone v2
2553 if (bundlespec.wirecompression == 'UN' and
2561 if (bundlespec.wirecompression == 'UN' and
2554 bundlespec.wireversion == '02' and
2562 bundlespec.wireversion == '02' and
2555 bundlespec.contentopts.get('streamv2')):
2563 bundlespec.contentopts.get('streamv2')):
2556 return True
2564 return True
2557
2565
2558 return False
2566 return False
2559
2567
2560 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2568 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2561 """Remove incompatible clone bundle manifest entries.
2569 """Remove incompatible clone bundle manifest entries.
2562
2570
2563 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2571 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2564 and returns a new list consisting of only the entries that this client
2572 and returns a new list consisting of only the entries that this client
2565 should be able to apply.
2573 should be able to apply.
2566
2574
2567 There is no guarantee we'll be able to apply all returned entries because
2575 There is no guarantee we'll be able to apply all returned entries because
2568 the metadata we use to filter on may be missing or wrong.
2576 the metadata we use to filter on may be missing or wrong.
2569 """
2577 """
2570 newentries = []
2578 newentries = []
2571 for entry in entries:
2579 for entry in entries:
2572 spec = entry.get('BUNDLESPEC')
2580 spec = entry.get('BUNDLESPEC')
2573 if spec:
2581 if spec:
2574 try:
2582 try:
2575 bundlespec = parsebundlespec(repo, spec, strict=True)
2583 bundlespec = parsebundlespec(repo, spec, strict=True)
2576
2584
2577 # If a stream clone was requested, filter out non-streamclone
2585 # If a stream clone was requested, filter out non-streamclone
2578 # entries.
2586 # entries.
2579 if streamclonerequested and not isstreamclonespec(bundlespec):
2587 if streamclonerequested and not isstreamclonespec(bundlespec):
2580 repo.ui.debug('filtering %s because not a stream clone\n' %
2588 repo.ui.debug('filtering %s because not a stream clone\n' %
2581 entry['URL'])
2589 entry['URL'])
2582 continue
2590 continue
2583
2591
2584 except error.InvalidBundleSpecification as e:
2592 except error.InvalidBundleSpecification as e:
2585 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
2593 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
2586 continue
2594 continue
2587 except error.UnsupportedBundleSpecification as e:
2595 except error.UnsupportedBundleSpecification as e:
2588 repo.ui.debug('filtering %s because unsupported bundle '
2596 repo.ui.debug('filtering %s because unsupported bundle '
2589 'spec: %s\n' % (
2597 'spec: %s\n' % (
2590 entry['URL'], stringutil.forcebytestr(e)))
2598 entry['URL'], stringutil.forcebytestr(e)))
2591 continue
2599 continue
2592 # If we don't have a spec and requested a stream clone, we don't know
2600 # If we don't have a spec and requested a stream clone, we don't know
2593 # what the entry is so don't attempt to apply it.
2601 # what the entry is so don't attempt to apply it.
2594 elif streamclonerequested:
2602 elif streamclonerequested:
2595 repo.ui.debug('filtering %s because cannot determine if a stream '
2603 repo.ui.debug('filtering %s because cannot determine if a stream '
2596 'clone bundle\n' % entry['URL'])
2604 'clone bundle\n' % entry['URL'])
2597 continue
2605 continue
2598
2606
2599 if 'REQUIRESNI' in entry and not sslutil.hassni:
2607 if 'REQUIRESNI' in entry and not sslutil.hassni:
2600 repo.ui.debug('filtering %s because SNI not supported\n' %
2608 repo.ui.debug('filtering %s because SNI not supported\n' %
2601 entry['URL'])
2609 entry['URL'])
2602 continue
2610 continue
2603
2611
2604 newentries.append(entry)
2612 newentries.append(entry)
2605
2613
2606 return newentries
2614 return newentries
2607
2615
2608 class clonebundleentry(object):
2616 class clonebundleentry(object):
2609 """Represents an item in a clone bundles manifest.
2617 """Represents an item in a clone bundles manifest.
2610
2618
2611 This rich class is needed to support sorting since sorted() in Python 3
2619 This rich class is needed to support sorting since sorted() in Python 3
2612 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2620 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2613 won't work.
2621 won't work.
2614 """
2622 """
2615
2623
2616 def __init__(self, value, prefers):
2624 def __init__(self, value, prefers):
2617 self.value = value
2625 self.value = value
2618 self.prefers = prefers
2626 self.prefers = prefers
2619
2627
2620 def _cmp(self, other):
2628 def _cmp(self, other):
2621 for prefkey, prefvalue in self.prefers:
2629 for prefkey, prefvalue in self.prefers:
2622 avalue = self.value.get(prefkey)
2630 avalue = self.value.get(prefkey)
2623 bvalue = other.value.get(prefkey)
2631 bvalue = other.value.get(prefkey)
2624
2632
2625 # Special case for b missing attribute and a matches exactly.
2633 # Special case for b missing attribute and a matches exactly.
2626 if avalue is not None and bvalue is None and avalue == prefvalue:
2634 if avalue is not None and bvalue is None and avalue == prefvalue:
2627 return -1
2635 return -1
2628
2636
2629 # Special case for a missing attribute and b matches exactly.
2637 # Special case for a missing attribute and b matches exactly.
2630 if bvalue is not None and avalue is None and bvalue == prefvalue:
2638 if bvalue is not None and avalue is None and bvalue == prefvalue:
2631 return 1
2639 return 1
2632
2640
2633 # We can't compare unless attribute present on both.
2641 # We can't compare unless attribute present on both.
2634 if avalue is None or bvalue is None:
2642 if avalue is None or bvalue is None:
2635 continue
2643 continue
2636
2644
2637 # Same values should fall back to next attribute.
2645 # Same values should fall back to next attribute.
2638 if avalue == bvalue:
2646 if avalue == bvalue:
2639 continue
2647 continue
2640
2648
2641 # Exact matches come first.
2649 # Exact matches come first.
2642 if avalue == prefvalue:
2650 if avalue == prefvalue:
2643 return -1
2651 return -1
2644 if bvalue == prefvalue:
2652 if bvalue == prefvalue:
2645 return 1
2653 return 1
2646
2654
2647 # Fall back to next attribute.
2655 # Fall back to next attribute.
2648 continue
2656 continue
2649
2657
2650 # If we got here we couldn't sort by attributes and prefers. Fall
2658 # If we got here we couldn't sort by attributes and prefers. Fall
2651 # back to index order.
2659 # back to index order.
2652 return 0
2660 return 0
2653
2661
2654 def __lt__(self, other):
2662 def __lt__(self, other):
2655 return self._cmp(other) < 0
2663 return self._cmp(other) < 0
2656
2664
2657 def __gt__(self, other):
2665 def __gt__(self, other):
2658 return self._cmp(other) > 0
2666 return self._cmp(other) > 0
2659
2667
2660 def __eq__(self, other):
2668 def __eq__(self, other):
2661 return self._cmp(other) == 0
2669 return self._cmp(other) == 0
2662
2670
2663 def __le__(self, other):
2671 def __le__(self, other):
2664 return self._cmp(other) <= 0
2672 return self._cmp(other) <= 0
2665
2673
2666 def __ge__(self, other):
2674 def __ge__(self, other):
2667 return self._cmp(other) >= 0
2675 return self._cmp(other) >= 0
2668
2676
2669 def __ne__(self, other):
2677 def __ne__(self, other):
2670 return self._cmp(other) != 0
2678 return self._cmp(other) != 0
2671
2679
2672 def sortclonebundleentries(ui, entries):
2680 def sortclonebundleentries(ui, entries):
2673 prefers = ui.configlist('ui', 'clonebundleprefers')
2681 prefers = ui.configlist('ui', 'clonebundleprefers')
2674 if not prefers:
2682 if not prefers:
2675 return list(entries)
2683 return list(entries)
2676
2684
2677 prefers = [p.split('=', 1) for p in prefers]
2685 prefers = [p.split('=', 1) for p in prefers]
2678
2686
2679 items = sorted(clonebundleentry(v, prefers) for v in entries)
2687 items = sorted(clonebundleentry(v, prefers) for v in entries)
2680 return [i.value for i in items]
2688 return [i.value for i in items]
2681
2689
2682 def trypullbundlefromurl(ui, repo, url):
2690 def trypullbundlefromurl(ui, repo, url):
2683 """Attempt to apply a bundle from a URL."""
2691 """Attempt to apply a bundle from a URL."""
2684 with repo.lock(), repo.transaction('bundleurl') as tr:
2692 with repo.lock(), repo.transaction('bundleurl') as tr:
2685 try:
2693 try:
2686 fh = urlmod.open(ui, url)
2694 fh = urlmod.open(ui, url)
2687 cg = readbundle(ui, fh, 'stream')
2695 cg = readbundle(ui, fh, 'stream')
2688
2696
2689 if isinstance(cg, streamclone.streamcloneapplier):
2697 if isinstance(cg, streamclone.streamcloneapplier):
2690 cg.apply(repo)
2698 cg.apply(repo)
2691 else:
2699 else:
2692 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2700 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2693 return True
2701 return True
2694 except urlerr.httperror as e:
2702 except urlerr.httperror as e:
2695 ui.warn(_('HTTP error fetching bundle: %s\n') %
2703 ui.warn(_('HTTP error fetching bundle: %s\n') %
2696 stringutil.forcebytestr(e))
2704 stringutil.forcebytestr(e))
2697 except urlerr.urlerror as e:
2705 except urlerr.urlerror as e:
2698 ui.warn(_('error fetching bundle: %s\n') %
2706 ui.warn(_('error fetching bundle: %s\n') %
2699 stringutil.forcebytestr(e.reason))
2707 stringutil.forcebytestr(e.reason))
2700
2708
2701 return False
2709 return False
@@ -1,1382 +1,1352 b''
1 #testcases b2-pushkey b2-binary
1 #testcases b2-pushkey b2-binary
2
2
3 #if b2-pushkey
3 #if b2-pushkey
4 $ cat << EOF >> $HGRCPATH
4 $ cat << EOF >> $HGRCPATH
5 > [devel]
5 > [devel]
6 > legacy.exchange=bookmarks
6 > legacy.exchange=bookmarks
7 > EOF
7 > EOF
8 #endif
8 #endif
9
9
10 #require serve
10 #require serve
11
11
12 $ cat << EOF >> $HGRCPATH
12 $ cat << EOF >> $HGRCPATH
13 > [ui]
13 > [ui]
14 > logtemplate={rev}:{node|short} {desc|firstline}
14 > logtemplate={rev}:{node|short} {desc|firstline}
15 > [phases]
15 > [phases]
16 > publish=False
16 > publish=False
17 > [experimental]
17 > [experimental]
18 > evolution.createmarkers=True
18 > evolution.createmarkers=True
19 > evolution.exchange=True
19 > evolution.exchange=True
20 > EOF
20 > EOF
21
21
22 $ cat > $TESTTMP/hook.sh <<'EOF'
22 $ cat > $TESTTMP/hook.sh <<'EOF'
23 > echo "test-hook-bookmark: $HG_BOOKMARK: $HG_OLDNODE -> $HG_NODE"
23 > echo "test-hook-bookmark: $HG_BOOKMARK: $HG_OLDNODE -> $HG_NODE"
24 > EOF
24 > EOF
25 $ TESTHOOK="hooks.txnclose-bookmark.test=sh $TESTTMP/hook.sh"
25 $ TESTHOOK="hooks.txnclose-bookmark.test=sh $TESTTMP/hook.sh"
26
26
27 initialize
27 initialize
28
28
29 $ hg init a
29 $ hg init a
30 $ cd a
30 $ cd a
31 $ echo 'test' > test
31 $ echo 'test' > test
32 $ hg commit -Am'test'
32 $ hg commit -Am'test'
33 adding test
33 adding test
34
34
35 set bookmarks
35 set bookmarks
36
36
37 $ hg bookmark X
37 $ hg bookmark X
38 $ hg bookmark Y
38 $ hg bookmark Y
39 $ hg bookmark Z
39 $ hg bookmark Z
40
40
41 import bookmark by name
41 import bookmark by name
42
42
43 $ hg init ../b
43 $ hg init ../b
44 $ cd ../b
44 $ cd ../b
45 $ hg book Y
45 $ hg book Y
46 $ hg book
46 $ hg book
47 * Y -1:000000000000
47 * Y -1:000000000000
48 $ hg pull ../a --config "$TESTHOOK"
48 $ hg pull ../a --config "$TESTHOOK"
49 pulling from ../a
49 pulling from ../a
50 requesting all changes
50 requesting all changes
51 adding changesets
51 adding changesets
52 adding manifests
52 adding manifests
53 adding file changes
53 adding file changes
54 added 1 changesets with 1 changes to 1 files
54 added 1 changesets with 1 changes to 1 files
55 adding remote bookmark X
55 adding remote bookmark X
56 updating bookmark Y
56 updating bookmark Y
57 adding remote bookmark Z
57 adding remote bookmark Z
58 new changesets 4e3505fd9583 (1 drafts)
58 new changesets 4e3505fd9583 (1 drafts)
59 test-hook-bookmark: X: -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
59 test-hook-bookmark: X: -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
60 test-hook-bookmark: Y: 0000000000000000000000000000000000000000 -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
60 test-hook-bookmark: Y: 0000000000000000000000000000000000000000 -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
61 test-hook-bookmark: Z: -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
61 test-hook-bookmark: Z: -> 4e3505fd95835d721066b76e75dbb8cc554d7f77
62 (run 'hg update' to get a working copy)
62 (run 'hg update' to get a working copy)
63 $ hg bookmarks
63 $ hg bookmarks
64 X 0:4e3505fd9583
64 X 0:4e3505fd9583
65 * Y 0:4e3505fd9583
65 * Y 0:4e3505fd9583
66 Z 0:4e3505fd9583
66 Z 0:4e3505fd9583
67 $ hg debugpushkey ../a namespaces
67 $ hg debugpushkey ../a namespaces
68 bookmarks
68 bookmarks
69 namespaces
69 namespaces
70 obsolete
70 obsolete
71 phases
71 phases
72 $ hg debugpushkey ../a bookmarks
72 $ hg debugpushkey ../a bookmarks
73 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
73 X 4e3505fd95835d721066b76e75dbb8cc554d7f77
74 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
74 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77
75 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
75 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77
76
76
77 delete the bookmark to re-pull it
77 delete the bookmark to re-pull it
78
78
79 $ hg book -d X
79 $ hg book -d X
80 $ hg pull -B X ../a
80 $ hg pull -B X ../a
81 pulling from ../a
81 pulling from ../a
82 no changes found
82 no changes found
83 adding remote bookmark X
83 adding remote bookmark X
84
84
85 finally no-op pull
85 finally no-op pull
86
86
87 $ hg pull -B X ../a
87 $ hg pull -B X ../a
88 pulling from ../a
88 pulling from ../a
89 no changes found
89 no changes found
90 $ hg bookmark
90 $ hg bookmark
91 X 0:4e3505fd9583
91 X 0:4e3505fd9583
92 * Y 0:4e3505fd9583
92 * Y 0:4e3505fd9583
93 Z 0:4e3505fd9583
93 Z 0:4e3505fd9583
94
94
95 export bookmark by name
95 export bookmark by name
96
96
97 $ hg bookmark W
97 $ hg bookmark W
98 $ hg bookmark foo
98 $ hg bookmark foo
99 $ hg bookmark foobar
99 $ hg bookmark foobar
100 $ hg push -B W ../a
100 $ hg push -B W ../a
101 pushing to ../a
101 pushing to ../a
102 searching for changes
102 searching for changes
103 no changes found
103 no changes found
104 exporting bookmark W
104 exporting bookmark W
105 [1]
105 [1]
106 $ hg -R ../a bookmarks
106 $ hg -R ../a bookmarks
107 W -1:000000000000
107 W -1:000000000000
108 X 0:4e3505fd9583
108 X 0:4e3505fd9583
109 Y 0:4e3505fd9583
109 Y 0:4e3505fd9583
110 * Z 0:4e3505fd9583
110 * Z 0:4e3505fd9583
111
111
112 delete a remote bookmark
112 delete a remote bookmark
113
113
114 $ hg book -d W
114 $ hg book -d W
115
115
116 #if b2-pushkey
116 #if b2-pushkey
117
117
118 $ hg push -B W ../a --config "$TESTHOOK" --debug --config devel.bundle2.debug=yes
118 $ hg push -B W ../a --config "$TESTHOOK" --debug --config devel.bundle2.debug=yes
119 pushing to ../a
119 pushing to ../a
120 query 1; heads
120 query 1; heads
121 searching for changes
121 searching for changes
122 all remote heads known locally
122 all remote heads known locally
123 listing keys for "phases"
123 listing keys for "phases"
124 checking for updated bookmarks
124 checking for updated bookmarks
125 listing keys for "bookmarks"
125 listing keys for "bookmarks"
126 no changes found
126 no changes found
127 bundle2-output-bundle: "HG20", 4 parts total
127 bundle2-output-bundle: "HG20", 4 parts total
128 bundle2-output: start emission of HG20 stream
128 bundle2-output: start emission of HG20 stream
129 bundle2-output: bundle parameter:
129 bundle2-output: bundle parameter:
130 bundle2-output: start of parts
130 bundle2-output: start of parts
131 bundle2-output: bundle part: "replycaps"
131 bundle2-output: bundle part: "replycaps"
132 bundle2-output-part: "replycaps" 222 bytes payload
132 bundle2-output-part: "replycaps" 222 bytes payload
133 bundle2-output: part 0: "REPLYCAPS"
133 bundle2-output: part 0: "REPLYCAPS"
134 bundle2-output: header chunk size: 16
134 bundle2-output: header chunk size: 16
135 bundle2-output: payload chunk size: 222
135 bundle2-output: payload chunk size: 222
136 bundle2-output: closing payload chunk
136 bundle2-output: closing payload chunk
137 bundle2-output: bundle part: "check:bookmarks"
137 bundle2-output: bundle part: "check:bookmarks"
138 bundle2-output-part: "check:bookmarks" 23 bytes payload
138 bundle2-output-part: "check:bookmarks" 23 bytes payload
139 bundle2-output: part 1: "CHECK:BOOKMARKS"
139 bundle2-output: part 1: "CHECK:BOOKMARKS"
140 bundle2-output: header chunk size: 22
140 bundle2-output: header chunk size: 22
141 bundle2-output: payload chunk size: 23
141 bundle2-output: payload chunk size: 23
142 bundle2-output: closing payload chunk
142 bundle2-output: closing payload chunk
143 bundle2-output: bundle part: "check:phases"
143 bundle2-output: bundle part: "check:phases"
144 bundle2-output-part: "check:phases" 24 bytes payload
144 bundle2-output-part: "check:phases" 24 bytes payload
145 bundle2-output: part 2: "CHECK:PHASES"
145 bundle2-output: part 2: "CHECK:PHASES"
146 bundle2-output: header chunk size: 19
146 bundle2-output: header chunk size: 19
147 bundle2-output: payload chunk size: 24
147 bundle2-output: payload chunk size: 24
148 bundle2-output: closing payload chunk
148 bundle2-output: closing payload chunk
149 bundle2-output: bundle part: "pushkey"
149 bundle2-output: bundle part: "pushkey"
150 bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
150 bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload
151 bundle2-output: part 3: "PUSHKEY"
151 bundle2-output: part 3: "PUSHKEY"
152 bundle2-output: header chunk size: 90
152 bundle2-output: header chunk size: 90
153 bundle2-output: closing payload chunk
153 bundle2-output: closing payload chunk
154 bundle2-output: end of bundle
154 bundle2-output: end of bundle
155 bundle2-input: start processing of HG20 stream
155 bundle2-input: start processing of HG20 stream
156 bundle2-input: reading bundle2 stream parameters
156 bundle2-input: reading bundle2 stream parameters
157 bundle2-input-bundle: with-transaction
157 bundle2-input-bundle: with-transaction
158 bundle2-input: start extraction of bundle2 parts
158 bundle2-input: start extraction of bundle2 parts
159 bundle2-input: part header size: 16
159 bundle2-input: part header size: 16
160 bundle2-input: part type: "REPLYCAPS"
160 bundle2-input: part type: "REPLYCAPS"
161 bundle2-input: part id: "0"
161 bundle2-input: part id: "0"
162 bundle2-input: part parameters: 0
162 bundle2-input: part parameters: 0
163 bundle2-input: found a handler for part replycaps
163 bundle2-input: found a handler for part replycaps
164 bundle2-input-part: "replycaps" supported
164 bundle2-input-part: "replycaps" supported
165 bundle2-input: payload chunk size: 222
165 bundle2-input: payload chunk size: 222
166 bundle2-input: payload chunk size: 0
166 bundle2-input: payload chunk size: 0
167 bundle2-input-part: total payload size 222
167 bundle2-input-part: total payload size 222
168 bundle2-input: part header size: 22
168 bundle2-input: part header size: 22
169 bundle2-input: part type: "CHECK:BOOKMARKS"
169 bundle2-input: part type: "CHECK:BOOKMARKS"
170 bundle2-input: part id: "1"
170 bundle2-input: part id: "1"
171 bundle2-input: part parameters: 0
171 bundle2-input: part parameters: 0
172 bundle2-input: found a handler for part check:bookmarks
172 bundle2-input: found a handler for part check:bookmarks
173 bundle2-input-part: "check:bookmarks" supported
173 bundle2-input-part: "check:bookmarks" supported
174 bundle2-input: payload chunk size: 23
174 bundle2-input: payload chunk size: 23
175 bundle2-input: payload chunk size: 0
175 bundle2-input: payload chunk size: 0
176 bundle2-input-part: total payload size 23
176 bundle2-input-part: total payload size 23
177 bundle2-input: part header size: 19
177 bundle2-input: part header size: 19
178 bundle2-input: part type: "CHECK:PHASES"
178 bundle2-input: part type: "CHECK:PHASES"
179 bundle2-input: part id: "2"
179 bundle2-input: part id: "2"
180 bundle2-input: part parameters: 0
180 bundle2-input: part parameters: 0
181 bundle2-input: found a handler for part check:phases
181 bundle2-input: found a handler for part check:phases
182 bundle2-input-part: "check:phases" supported
182 bundle2-input-part: "check:phases" supported
183 bundle2-input: payload chunk size: 24
183 bundle2-input: payload chunk size: 24
184 bundle2-input: payload chunk size: 0
184 bundle2-input: payload chunk size: 0
185 bundle2-input-part: total payload size 24
185 bundle2-input-part: total payload size 24
186 bundle2-input: part header size: 90
186 bundle2-input: part header size: 90
187 bundle2-input: part type: "PUSHKEY"
187 bundle2-input: part type: "PUSHKEY"
188 bundle2-input: part id: "3"
188 bundle2-input: part id: "3"
189 bundle2-input: part parameters: 4
189 bundle2-input: part parameters: 4
190 bundle2-input: found a handler for part pushkey
190 bundle2-input: found a handler for part pushkey
191 bundle2-input-part: "pushkey" (params: 4 mandatory) supported
191 bundle2-input-part: "pushkey" (params: 4 mandatory) supported
192 pushing key for "bookmarks:W"
192 pushing key for "bookmarks:W"
193 bundle2-input: payload chunk size: 0
193 bundle2-input: payload chunk size: 0
194 bundle2-input: part header size: 0
194 bundle2-input: part header size: 0
195 bundle2-input: end of bundle2 stream
195 bundle2-input: end of bundle2 stream
196 bundle2-input-bundle: 3 parts total
196 bundle2-input-bundle: 3 parts total
197 running hook txnclose-bookmark.test: sh $TESTTMP/hook.sh
197 running hook txnclose-bookmark.test: sh $TESTTMP/hook.sh
198 test-hook-bookmark: W: 0000000000000000000000000000000000000000 ->
198 test-hook-bookmark: W: 0000000000000000000000000000000000000000 ->
199 bundle2-output-bundle: "HG20", 1 parts total
199 bundle2-output-bundle: "HG20", 1 parts total
200 bundle2-output: start emission of HG20 stream
200 bundle2-output: start emission of HG20 stream
201 bundle2-output: bundle parameter:
201 bundle2-output: bundle parameter:
202 bundle2-output: start of parts
202 bundle2-output: start of parts
203 bundle2-output: bundle part: "reply:pushkey"
203 bundle2-output: bundle part: "reply:pushkey"
204 bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
204 bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload
205 bundle2-output: part 0: "REPLY:PUSHKEY"
205 bundle2-output: part 0: "REPLY:PUSHKEY"
206 bundle2-output: header chunk size: 43
206 bundle2-output: header chunk size: 43
207 bundle2-output: closing payload chunk
207 bundle2-output: closing payload chunk
208 bundle2-output: end of bundle
208 bundle2-output: end of bundle
209 bundle2-input: start processing of HG20 stream
209 bundle2-input: start processing of HG20 stream
210 bundle2-input: reading bundle2 stream parameters
210 bundle2-input: reading bundle2 stream parameters
211 bundle2-input-bundle: no-transaction
211 bundle2-input-bundle: no-transaction
212 bundle2-input: start extraction of bundle2 parts
212 bundle2-input: start extraction of bundle2 parts
213 bundle2-input: part header size: 43
213 bundle2-input: part header size: 43
214 bundle2-input: part type: "REPLY:PUSHKEY"
214 bundle2-input: part type: "REPLY:PUSHKEY"
215 bundle2-input: part id: "0"
215 bundle2-input: part id: "0"
216 bundle2-input: part parameters: 2
216 bundle2-input: part parameters: 2
217 bundle2-input: found a handler for part reply:pushkey
217 bundle2-input: found a handler for part reply:pushkey
218 bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
218 bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported
219 bundle2-input: payload chunk size: 0
219 bundle2-input: payload chunk size: 0
220 bundle2-input: part header size: 0
220 bundle2-input: part header size: 0
221 bundle2-input: end of bundle2 stream
221 bundle2-input: end of bundle2 stream
222 bundle2-input-bundle: 0 parts total
222 bundle2-input-bundle: 0 parts total
223 deleting remote bookmark W
223 deleting remote bookmark W
224 listing keys for "phases"
224 listing keys for "phases"
225 [1]
225 [1]
226
226
227 #endif
227 #endif
228 #if b2-binary
228 #if b2-binary
229
229
230 $ hg push -B W ../a --config "$TESTHOOK" --debug --config devel.bundle2.debug=yes
230 $ hg push -B W ../a --config "$TESTHOOK" --debug --config devel.bundle2.debug=yes
231 pushing to ../a
231 pushing to ../a
232 query 1; heads
232 query 1; heads
233 searching for changes
233 searching for changes
234 all remote heads known locally
234 all remote heads known locally
235 listing keys for "phases"
235 listing keys for "phases"
236 checking for updated bookmarks
236 checking for updated bookmarks
237 listing keys for "bookmarks"
237 listing keys for "bookmarks"
238 no changes found
238 no changes found
239 bundle2-output-bundle: "HG20", 4 parts total
239 bundle2-output-bundle: "HG20", 4 parts total
240 bundle2-output: start emission of HG20 stream
240 bundle2-output: start emission of HG20 stream
241 bundle2-output: bundle parameter:
241 bundle2-output: bundle parameter:
242 bundle2-output: start of parts
242 bundle2-output: start of parts
243 bundle2-output: bundle part: "replycaps"
243 bundle2-output: bundle part: "replycaps"
244 bundle2-output-part: "replycaps" 222 bytes payload
244 bundle2-output-part: "replycaps" 222 bytes payload
245 bundle2-output: part 0: "REPLYCAPS"
245 bundle2-output: part 0: "REPLYCAPS"
246 bundle2-output: header chunk size: 16
246 bundle2-output: header chunk size: 16
247 bundle2-output: payload chunk size: 222
247 bundle2-output: payload chunk size: 222
248 bundle2-output: closing payload chunk
248 bundle2-output: closing payload chunk
249 bundle2-output: bundle part: "check:bookmarks"
249 bundle2-output: bundle part: "check:bookmarks"
250 bundle2-output-part: "check:bookmarks" 23 bytes payload
250 bundle2-output-part: "check:bookmarks" 23 bytes payload
251 bundle2-output: part 1: "CHECK:BOOKMARKS"
251 bundle2-output: part 1: "CHECK:BOOKMARKS"
252 bundle2-output: header chunk size: 22
252 bundle2-output: header chunk size: 22
253 bundle2-output: payload chunk size: 23
253 bundle2-output: payload chunk size: 23
254 bundle2-output: closing payload chunk
254 bundle2-output: closing payload chunk
255 bundle2-output: bundle part: "check:phases"
255 bundle2-output: bundle part: "check:phases"
256 bundle2-output-part: "check:phases" 24 bytes payload
256 bundle2-output-part: "check:phases" 24 bytes payload
257 bundle2-output: part 2: "CHECK:PHASES"
257 bundle2-output: part 2: "CHECK:PHASES"
258 bundle2-output: header chunk size: 19
258 bundle2-output: header chunk size: 19
259 bundle2-output: payload chunk size: 24
259 bundle2-output: payload chunk size: 24
260 bundle2-output: closing payload chunk
260 bundle2-output: closing payload chunk
261 bundle2-output: bundle part: "bookmarks"
261 bundle2-output: bundle part: "bookmarks"
262 bundle2-output-part: "bookmarks" 23 bytes payload
262 bundle2-output-part: "bookmarks" 23 bytes payload
263 bundle2-output: part 3: "BOOKMARKS"
263 bundle2-output: part 3: "BOOKMARKS"
264 bundle2-output: header chunk size: 16
264 bundle2-output: header chunk size: 16
265 bundle2-output: payload chunk size: 23
265 bundle2-output: payload chunk size: 23
266 bundle2-output: closing payload chunk
266 bundle2-output: closing payload chunk
267 bundle2-output: end of bundle
267 bundle2-output: end of bundle
268 bundle2-input: start processing of HG20 stream
268 bundle2-input: start processing of HG20 stream
269 bundle2-input: reading bundle2 stream parameters
269 bundle2-input: reading bundle2 stream parameters
270 bundle2-input-bundle: with-transaction
270 bundle2-input-bundle: with-transaction
271 bundle2-input: start extraction of bundle2 parts
271 bundle2-input: start extraction of bundle2 parts
272 bundle2-input: part header size: 16
272 bundle2-input: part header size: 16
273 bundle2-input: part type: "REPLYCAPS"
273 bundle2-input: part type: "REPLYCAPS"
274 bundle2-input: part id: "0"
274 bundle2-input: part id: "0"
275 bundle2-input: part parameters: 0
275 bundle2-input: part parameters: 0
276 bundle2-input: found a handler for part replycaps
276 bundle2-input: found a handler for part replycaps
277 bundle2-input-part: "replycaps" supported
277 bundle2-input-part: "replycaps" supported
278 bundle2-input: payload chunk size: 222
278 bundle2-input: payload chunk size: 222
279 bundle2-input: payload chunk size: 0
279 bundle2-input: payload chunk size: 0
280 bundle2-input-part: total payload size 222
280 bundle2-input-part: total payload size 222
281 bundle2-input: part header size: 22
281 bundle2-input: part header size: 22
282 bundle2-input: part type: "CHECK:BOOKMARKS"
282 bundle2-input: part type: "CHECK:BOOKMARKS"
283 bundle2-input: part id: "1"
283 bundle2-input: part id: "1"
284 bundle2-input: part parameters: 0
284 bundle2-input: part parameters: 0
285 bundle2-input: found a handler for part check:bookmarks
285 bundle2-input: found a handler for part check:bookmarks
286 bundle2-input-part: "check:bookmarks" supported
286 bundle2-input-part: "check:bookmarks" supported
287 bundle2-input: payload chunk size: 23
287 bundle2-input: payload chunk size: 23
288 bundle2-input: payload chunk size: 0
288 bundle2-input: payload chunk size: 0
289 bundle2-input-part: total payload size 23
289 bundle2-input-part: total payload size 23
290 bundle2-input: part header size: 19
290 bundle2-input: part header size: 19
291 bundle2-input: part type: "CHECK:PHASES"
291 bundle2-input: part type: "CHECK:PHASES"
292 bundle2-input: part id: "2"
292 bundle2-input: part id: "2"
293 bundle2-input: part parameters: 0
293 bundle2-input: part parameters: 0
294 bundle2-input: found a handler for part check:phases
294 bundle2-input: found a handler for part check:phases
295 bundle2-input-part: "check:phases" supported
295 bundle2-input-part: "check:phases" supported
296 bundle2-input: payload chunk size: 24
296 bundle2-input: payload chunk size: 24
297 bundle2-input: payload chunk size: 0
297 bundle2-input: payload chunk size: 0
298 bundle2-input-part: total payload size 24
298 bundle2-input-part: total payload size 24
299 bundle2-input: part header size: 16
299 bundle2-input: part header size: 16
300 bundle2-input: part type: "BOOKMARKS"
300 bundle2-input: part type: "BOOKMARKS"
301 bundle2-input: part id: "3"
301 bundle2-input: part id: "3"
302 bundle2-input: part parameters: 0
302 bundle2-input: part parameters: 0
303 bundle2-input: found a handler for part bookmarks
303 bundle2-input: found a handler for part bookmarks
304 bundle2-input-part: "bookmarks" supported
304 bundle2-input-part: "bookmarks" supported
305 bundle2-input: payload chunk size: 23
305 bundle2-input: payload chunk size: 23
306 bundle2-input: payload chunk size: 0
306 bundle2-input: payload chunk size: 0
307 bundle2-input-part: total payload size 23
307 bundle2-input-part: total payload size 23
308 bundle2-input: part header size: 0
308 bundle2-input: part header size: 0
309 bundle2-input: end of bundle2 stream
309 bundle2-input: end of bundle2 stream
310 bundle2-input-bundle: 3 parts total
310 bundle2-input-bundle: 3 parts total
311 running hook txnclose-bookmark.test: sh $TESTTMP/hook.sh
311 running hook txnclose-bookmark.test: sh $TESTTMP/hook.sh
312 test-hook-bookmark: W: 0000000000000000000000000000000000000000 ->
312 test-hook-bookmark: W: 0000000000000000000000000000000000000000 ->
313 bundle2-output-bundle: "HG20", 0 parts total
313 bundle2-output-bundle: "HG20", 0 parts total
314 bundle2-output: start emission of HG20 stream
314 bundle2-output: start emission of HG20 stream
315 bundle2-output: bundle parameter:
315 bundle2-output: bundle parameter:
316 bundle2-output: start of parts
316 bundle2-output: start of parts
317 bundle2-output: end of bundle
317 bundle2-output: end of bundle
318 bundle2-input: start processing of HG20 stream
318 bundle2-input: start processing of HG20 stream
319 bundle2-input: reading bundle2 stream parameters
319 bundle2-input: reading bundle2 stream parameters
320 bundle2-input-bundle: no-transaction
320 bundle2-input-bundle: no-transaction
321 bundle2-input: start extraction of bundle2 parts
321 bundle2-input: start extraction of bundle2 parts
322 bundle2-input: part header size: 0
322 bundle2-input: part header size: 0
323 bundle2-input: end of bundle2 stream
323 bundle2-input: end of bundle2 stream
324 bundle2-input-bundle: 0 parts total
324 bundle2-input-bundle: 0 parts total
325 deleting remote bookmark W
325 deleting remote bookmark W
326 listing keys for "phases"
326 listing keys for "phases"
327 [1]
327 [1]
328
328
329 #endif
329 #endif
330
330
331 export the active bookmark
331 export the active bookmark
332
332
333 $ hg bookmark V
333 $ hg bookmark V
334 $ hg push -B . ../a
334 $ hg push -B . ../a
335 pushing to ../a
335 pushing to ../a
336 searching for changes
336 searching for changes
337 no changes found
337 no changes found
338 exporting bookmark V
338 exporting bookmark V
339 [1]
339 [1]
340
340
341 exporting the active bookmark with 'push -B .'
341 exporting the active bookmark with 'push -B .'
342 demand that one of the bookmarks is activated
342 demand that one of the bookmarks is activated
343
343
344 $ hg update -r default
344 $ hg update -r default
345 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
345 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
346 (leaving bookmark V)
346 (leaving bookmark V)
347 $ hg push -B . ../a
347 $ hg push -B . ../a
348 abort: no active bookmark!
348 abort: no active bookmark!
349 [255]
349 [255]
350 $ hg update -r V
350 $ hg update -r V
351 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
351 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
352 (activating bookmark V)
352 (activating bookmark V)
353
353
354 delete the bookmark
354 delete the bookmark
355
355
356 $ hg book -d V
356 $ hg book -d V
357 $ hg push -B V ../a
357 $ hg push -B V ../a
358 pushing to ../a
358 pushing to ../a
359 searching for changes
359 searching for changes
360 no changes found
360 no changes found
361 deleting remote bookmark V
361 deleting remote bookmark V
362 [1]
362 [1]
363 $ hg up foobar
363 $ hg up foobar
364 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
364 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
365 (activating bookmark foobar)
365 (activating bookmark foobar)
366
366
367 push/pull name that doesn't exist
367 push/pull name that doesn't exist
368
368
369 $ hg push -B badname ../a
369 $ hg push -B badname ../a
370 pushing to ../a
370 pushing to ../a
371 searching for changes
371 searching for changes
372 bookmark badname does not exist on the local or remote repository!
372 bookmark badname does not exist on the local or remote repository!
373 no changes found
373 no changes found
374 [2]
374 [2]
375 $ hg pull -B anotherbadname ../a
375 $ hg pull -B anotherbadname ../a
376 pulling from ../a
376 pulling from ../a
377 abort: remote bookmark anotherbadname not found!
377 abort: remote bookmark anotherbadname not found!
378 [255]
378 [255]
379
379
380 divergent bookmarks
380 divergent bookmarks
381
381
382 $ cd ../a
382 $ cd ../a
383 $ echo c1 > f1
383 $ echo c1 > f1
384 $ hg ci -Am1
384 $ hg ci -Am1
385 adding f1
385 adding f1
386 $ hg book -f @
386 $ hg book -f @
387 $ hg book -f X
387 $ hg book -f X
388 $ hg book
388 $ hg book
389 @ 1:0d2164f0ce0d
389 @ 1:0d2164f0ce0d
390 * X 1:0d2164f0ce0d
390 * X 1:0d2164f0ce0d
391 Y 0:4e3505fd9583
391 Y 0:4e3505fd9583
392 Z 1:0d2164f0ce0d
392 Z 1:0d2164f0ce0d
393
393
394 $ cd ../b
394 $ cd ../b
395 $ hg up
395 $ hg up
396 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
396 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
397 updating bookmark foobar
397 updating bookmark foobar
398 $ echo c2 > f2
398 $ echo c2 > f2
399 $ hg ci -Am2
399 $ hg ci -Am2
400 adding f2
400 adding f2
401 $ hg book -if @
401 $ hg book -if @
402 $ hg book -if X
402 $ hg book -if X
403 $ hg book
403 $ hg book
404 @ 1:9b140be10808
404 @ 1:9b140be10808
405 X 1:9b140be10808
405 X 1:9b140be10808
406 Y 0:4e3505fd9583
406 Y 0:4e3505fd9583
407 Z 0:4e3505fd9583
407 Z 0:4e3505fd9583
408 foo -1:000000000000
408 foo -1:000000000000
409 * foobar 1:9b140be10808
409 * foobar 1:9b140be10808
410
410
411 $ hg pull --config paths.foo=../a foo --config "$TESTHOOK"
411 $ hg pull --config paths.foo=../a foo --config "$TESTHOOK"
412 pulling from $TESTTMP/a
412 pulling from $TESTTMP/a
413 searching for changes
413 searching for changes
414 adding changesets
414 adding changesets
415 adding manifests
415 adding manifests
416 adding file changes
416 adding file changes
417 added 1 changesets with 1 changes to 1 files (+1 heads)
417 added 1 changesets with 1 changes to 1 files (+1 heads)
418 divergent bookmark @ stored as @foo
418 divergent bookmark @ stored as @foo
419 divergent bookmark X stored as X@foo
419 divergent bookmark X stored as X@foo
420 updating bookmark Z
420 updating bookmark Z
421 new changesets 0d2164f0ce0d (1 drafts)
421 new changesets 0d2164f0ce0d (1 drafts)
422 test-hook-bookmark: @foo: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
422 test-hook-bookmark: @foo: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
423 test-hook-bookmark: X@foo: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
423 test-hook-bookmark: X@foo: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
424 test-hook-bookmark: Z: 4e3505fd95835d721066b76e75dbb8cc554d7f77 -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
424 test-hook-bookmark: Z: 4e3505fd95835d721066b76e75dbb8cc554d7f77 -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
425 (run 'hg heads' to see heads, 'hg merge' to merge)
425 (run 'hg heads' to see heads, 'hg merge' to merge)
426 $ hg book
426 $ hg book
427 @ 1:9b140be10808
427 @ 1:9b140be10808
428 @foo 2:0d2164f0ce0d
428 @foo 2:0d2164f0ce0d
429 X 1:9b140be10808
429 X 1:9b140be10808
430 X@foo 2:0d2164f0ce0d
430 X@foo 2:0d2164f0ce0d
431 Y 0:4e3505fd9583
431 Y 0:4e3505fd9583
432 Z 2:0d2164f0ce0d
432 Z 2:0d2164f0ce0d
433 foo -1:000000000000
433 foo -1:000000000000
434 * foobar 1:9b140be10808
434 * foobar 1:9b140be10808
435
435
436 (test that too many divergence of bookmark)
436 (test that too many divergence of bookmark)
437
437
438 $ "$PYTHON" $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -r 000000000000 "X@${i}"; done
438 $ "$PYTHON" $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -r 000000000000 "X@${i}"; done
439 $ hg pull ../a
439 $ hg pull ../a
440 pulling from ../a
440 pulling from ../a
441 searching for changes
441 searching for changes
442 no changes found
442 no changes found
443 warning: failed to assign numbered name to divergent bookmark X
443 warning: failed to assign numbered name to divergent bookmark X
444 divergent bookmark @ stored as @1
444 divergent bookmark @ stored as @1
445 $ hg bookmarks | grep '^ X' | grep -v ':000000000000'
445 $ hg bookmarks | grep '^ X' | grep -v ':000000000000'
446 X 1:9b140be10808
446 X 1:9b140be10808
447 X@foo 2:0d2164f0ce0d
447 X@foo 2:0d2164f0ce0d
448
448
449 (test that remotely diverged bookmarks are reused if they aren't changed)
449 (test that remotely diverged bookmarks are reused if they aren't changed)
450
450
451 $ hg bookmarks | grep '^ @'
451 $ hg bookmarks | grep '^ @'
452 @ 1:9b140be10808
452 @ 1:9b140be10808
453 @1 2:0d2164f0ce0d
453 @1 2:0d2164f0ce0d
454 @foo 2:0d2164f0ce0d
454 @foo 2:0d2164f0ce0d
455 $ hg pull ../a
455 $ hg pull ../a
456 pulling from ../a
456 pulling from ../a
457 searching for changes
457 searching for changes
458 no changes found
458 no changes found
459 warning: failed to assign numbered name to divergent bookmark X
459 warning: failed to assign numbered name to divergent bookmark X
460 divergent bookmark @ stored as @1
460 divergent bookmark @ stored as @1
461 $ hg bookmarks | grep '^ @'
461 $ hg bookmarks | grep '^ @'
462 @ 1:9b140be10808
462 @ 1:9b140be10808
463 @1 2:0d2164f0ce0d
463 @1 2:0d2164f0ce0d
464 @foo 2:0d2164f0ce0d
464 @foo 2:0d2164f0ce0d
465
465
466 $ "$PYTHON" $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -d "X@${i}"; done
466 $ "$PYTHON" $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -d "X@${i}"; done
467 $ hg bookmarks -d "@1"
467 $ hg bookmarks -d "@1"
468
468
469 $ hg push -f ../a
469 $ hg push -f ../a
470 pushing to ../a
470 pushing to ../a
471 searching for changes
471 searching for changes
472 adding changesets
472 adding changesets
473 adding manifests
473 adding manifests
474 adding file changes
474 adding file changes
475 added 1 changesets with 1 changes to 1 files (+1 heads)
475 added 1 changesets with 1 changes to 1 files (+1 heads)
476 $ hg -R ../a book
476 $ hg -R ../a book
477 @ 1:0d2164f0ce0d
477 @ 1:0d2164f0ce0d
478 * X 1:0d2164f0ce0d
478 * X 1:0d2164f0ce0d
479 Y 0:4e3505fd9583
479 Y 0:4e3505fd9583
480 Z 1:0d2164f0ce0d
480 Z 1:0d2164f0ce0d
481
481
482 explicit pull should overwrite the local version (issue4439)
482 explicit pull should overwrite the local version (issue4439)
483
483
484 $ hg update -r X
484 $ hg update -r X
485 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
485 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
486 (activating bookmark X)
486 (activating bookmark X)
487 $ hg pull --config paths.foo=../a foo -B . --config "$TESTHOOK"
487 $ hg pull --config paths.foo=../a foo -B . --config "$TESTHOOK"
488 pulling from $TESTTMP/a
488 pulling from $TESTTMP/a
489 no changes found
489 no changes found
490 divergent bookmark @ stored as @foo
490 divergent bookmark @ stored as @foo
491 importing bookmark X
491 importing bookmark X
492 test-hook-bookmark: @foo: 0d2164f0ce0d8f1d6f94351eba04b794909be66c -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
492 test-hook-bookmark: @foo: 0d2164f0ce0d8f1d6f94351eba04b794909be66c -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
493 test-hook-bookmark: X: 9b140be1080824d768c5a4691a564088eede71f9 -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
493 test-hook-bookmark: X: 9b140be1080824d768c5a4691a564088eede71f9 -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
494
494
495 reinstall state for further testing:
495 reinstall state for further testing:
496
496
497 $ hg book -fr 9b140be10808 X
497 $ hg book -fr 9b140be10808 X
498
498
499 revsets should not ignore divergent bookmarks
499 revsets should not ignore divergent bookmarks
500
500
501 $ hg bookmark -fr 1 Z
501 $ hg bookmark -fr 1 Z
502 $ hg log -r 'bookmark()' --template '{rev}:{node|short} {bookmarks}\n'
502 $ hg log -r 'bookmark()' --template '{rev}:{node|short} {bookmarks}\n'
503 0:4e3505fd9583 Y
503 0:4e3505fd9583 Y
504 1:9b140be10808 @ X Z foobar
504 1:9b140be10808 @ X Z foobar
505 2:0d2164f0ce0d @foo X@foo
505 2:0d2164f0ce0d @foo X@foo
506 $ hg log -r 'bookmark("X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
506 $ hg log -r 'bookmark("X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
507 2:0d2164f0ce0d @foo X@foo
507 2:0d2164f0ce0d @foo X@foo
508 $ hg log -r 'bookmark("re:X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
508 $ hg log -r 'bookmark("re:X@foo")' --template '{rev}:{node|short} {bookmarks}\n'
509 2:0d2164f0ce0d @foo X@foo
509 2:0d2164f0ce0d @foo X@foo
510
510
511 update a remote bookmark from a non-head to a head
511 update a remote bookmark from a non-head to a head
512
512
513 $ hg up -q Y
513 $ hg up -q Y
514 $ echo c3 > f2
514 $ echo c3 > f2
515 $ hg ci -Am3
515 $ hg ci -Am3
516 adding f2
516 adding f2
517 created new head
517 created new head
518 $ hg push ../a --config "$TESTHOOK"
518 $ hg push ../a --config "$TESTHOOK"
519 pushing to ../a
519 pushing to ../a
520 searching for changes
520 searching for changes
521 adding changesets
521 adding changesets
522 adding manifests
522 adding manifests
523 adding file changes
523 adding file changes
524 added 1 changesets with 1 changes to 1 files (+1 heads)
524 added 1 changesets with 1 changes to 1 files (+1 heads)
525 test-hook-bookmark: Y: 4e3505fd95835d721066b76e75dbb8cc554d7f77 -> f6fc62dde3c0771e29704af56ba4d8af77abcc2f
525 test-hook-bookmark: Y: 4e3505fd95835d721066b76e75dbb8cc554d7f77 -> f6fc62dde3c0771e29704af56ba4d8af77abcc2f
526 updating bookmark Y
526 updating bookmark Y
527 $ hg -R ../a book
527 $ hg -R ../a book
528 @ 1:0d2164f0ce0d
528 @ 1:0d2164f0ce0d
529 * X 1:0d2164f0ce0d
529 * X 1:0d2164f0ce0d
530 Y 3:f6fc62dde3c0
530 Y 3:f6fc62dde3c0
531 Z 1:0d2164f0ce0d
531 Z 1:0d2164f0ce0d
532
532
533 update a bookmark in the middle of a client pulling changes
533 update a bookmark in the middle of a client pulling changes
534
534
535 $ cd ..
535 $ cd ..
536 $ hg clone -q a pull-race
536 $ hg clone -q a pull-race
537
537
538 We want to use http because it is stateless and therefore more susceptible to
538 We want to use http because it is stateless and therefore more susceptible to
539 race conditions
539 race conditions
540
540
541 $ hg serve -R pull-race -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log
541 $ hg serve -R pull-race -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log
542 $ cat pull-race.pid >> $DAEMON_PIDS
542 $ cat pull-race.pid >> $DAEMON_PIDS
543
543
544 $ cat <<EOF > $TESTTMP/out_makecommit.sh
544 $ cat <<EOF > $TESTTMP/out_makecommit.sh
545 > #!/bin/sh
545 > #!/bin/sh
546 > hg ci -Am5
546 > hg ci -Am5
547 > echo committed in pull-race
547 > echo committed in pull-race
548 > EOF
548 > EOF
549
549
550 $ hg clone -q http://localhost:$HGPORT/ pull-race2 --config "$TESTHOOK"
550 $ hg clone -q http://localhost:$HGPORT/ pull-race2 --config "$TESTHOOK"
551 test-hook-bookmark: @: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
551 test-hook-bookmark: @: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
552 test-hook-bookmark: X: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
552 test-hook-bookmark: X: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
553 test-hook-bookmark: Y: -> f6fc62dde3c0771e29704af56ba4d8af77abcc2f
553 test-hook-bookmark: Y: -> f6fc62dde3c0771e29704af56ba4d8af77abcc2f
554 test-hook-bookmark: Z: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
554 test-hook-bookmark: Z: -> 0d2164f0ce0d8f1d6f94351eba04b794909be66c
555 $ cd pull-race
555 $ cd pull-race
556 $ hg up -q Y
556 $ hg up -q Y
557 $ echo c4 > f2
557 $ echo c4 > f2
558 $ hg ci -Am4
558 $ hg ci -Am4
559 $ echo c5 > f3
559 $ echo c5 > f3
560 $ cat <<EOF > .hg/hgrc
560 $ cat <<EOF > .hg/hgrc
561 > [hooks]
561 > [hooks]
562 > outgoing.makecommit = sh $TESTTMP/out_makecommit.sh
562 > outgoing.makecommit = sh $TESTTMP/out_makecommit.sh
563 > EOF
563 > EOF
564
564
565 (new config needs a server restart)
565 (new config needs a server restart)
566
566
567 $ cd ..
567 $ cd ..
568 $ killdaemons.py
568 $ killdaemons.py
569 $ hg serve -R pull-race -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log
569 $ hg serve -R pull-race -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log
570 $ cat pull-race.pid >> $DAEMON_PIDS
570 $ cat pull-race.pid >> $DAEMON_PIDS
571 $ cd pull-race2
571 $ cd pull-race2
572 $ hg -R $TESTTMP/pull-race book
572 $ hg -R $TESTTMP/pull-race book
573 @ 1:0d2164f0ce0d
573 @ 1:0d2164f0ce0d
574 X 1:0d2164f0ce0d
574 X 1:0d2164f0ce0d
575 * Y 4:b0a5eff05604
575 * Y 4:b0a5eff05604
576 Z 1:0d2164f0ce0d
576 Z 1:0d2164f0ce0d
577 $ hg pull
577 $ hg pull
578 pulling from http://localhost:$HGPORT/
578 pulling from http://localhost:$HGPORT/
579 searching for changes
579 searching for changes
580 adding changesets
580 adding changesets
581 adding manifests
581 adding manifests
582 adding file changes
582 adding file changes
583 added 1 changesets with 1 changes to 1 files
583 added 1 changesets with 1 changes to 1 files
584 updating bookmark Y
584 updating bookmark Y
585 new changesets b0a5eff05604 (1 drafts)
585 new changesets b0a5eff05604 (1 drafts)
586 (run 'hg update' to get a working copy)
586 (run 'hg update' to get a working copy)
587 $ hg book
587 $ hg book
588 * @ 1:0d2164f0ce0d
588 * @ 1:0d2164f0ce0d
589 X 1:0d2164f0ce0d
589 X 1:0d2164f0ce0d
590 Y 4:b0a5eff05604
590 Y 4:b0a5eff05604
591 Z 1:0d2164f0ce0d
591 Z 1:0d2164f0ce0d
592
592
593 Update a bookmark right after the initial lookup -B (issue4689)
593 Update a bookmark right after the initial lookup -B (issue4689)
594
594
595 $ echo c6 > ../pull-race/f3 # to be committed during the race
595 $ echo c6 > ../pull-race/f3 # to be committed during the race
596 $ cat <<EOF > $TESTTMP/listkeys_makecommit.sh
596 $ cat <<EOF > $TESTTMP/listkeys_makecommit.sh
597 > #!/bin/sh
597 > #!/bin/sh
598 > if hg st | grep -q M; then
598 > if hg st | grep -q M; then
599 > hg commit -m race
599 > hg commit -m race
600 > echo committed in pull-race
600 > echo committed in pull-race
601 > else
601 > else
602 > exit 0
602 > exit 0
603 > fi
603 > fi
604 > EOF
604 > EOF
605 $ cat <<EOF > ../pull-race/.hg/hgrc
605 $ cat <<EOF > ../pull-race/.hg/hgrc
606 > [hooks]
606 > [hooks]
607 > # If anything to commit, commit it right after the first key listing used
607 > # If anything to commit, commit it right after the first key listing used
608 > # during lookup. This makes the commit appear before the actual getbundle
608 > # during lookup. This makes the commit appear before the actual getbundle
609 > # call.
609 > # call.
610 > listkeys.makecommit= sh $TESTTMP/listkeys_makecommit.sh
610 > listkeys.makecommit= sh $TESTTMP/listkeys_makecommit.sh
611 > EOF
611 > EOF
612 $ restart_server() {
612 $ restart_server() {
613 > "$TESTDIR/killdaemons.py" $DAEMON_PIDS
613 > "$TESTDIR/killdaemons.py" $DAEMON_PIDS
614 > hg serve -R ../pull-race -p $HGPORT -d --pid-file=../pull-race.pid -E main-error.log
614 > hg serve -R ../pull-race -p $HGPORT -d --pid-file=../pull-race.pid -E main-error.log
615 > cat ../pull-race.pid >> $DAEMON_PIDS
615 > cat ../pull-race.pid >> $DAEMON_PIDS
616 > }
616 > }
617 $ restart_server # new config need server restart
617 $ restart_server # new config need server restart
618 $ hg -R $TESTTMP/pull-race book
618 $ hg -R $TESTTMP/pull-race book
619 @ 1:0d2164f0ce0d
619 @ 1:0d2164f0ce0d
620 X 1:0d2164f0ce0d
620 X 1:0d2164f0ce0d
621 * Y 5:35d1ef0a8d1b
621 * Y 5:35d1ef0a8d1b
622 Z 1:0d2164f0ce0d
622 Z 1:0d2164f0ce0d
623 $ hg update -r Y
623 $ hg update -r Y
624 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
624 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
625 (activating bookmark Y)
625 (activating bookmark Y)
626 $ hg pull -B .
626 $ hg pull -B .
627 pulling from http://localhost:$HGPORT/
627 pulling from http://localhost:$HGPORT/
628 searching for changes
628 searching for changes
629 adding changesets
629 adding changesets
630 adding manifests
630 adding manifests
631 adding file changes
631 adding file changes
632 added 1 changesets with 1 changes to 1 files
632 added 1 changesets with 1 changes to 1 files
633 updating bookmark Y
633 updating bookmark Y
634 new changesets 35d1ef0a8d1b (1 drafts)
634 new changesets 35d1ef0a8d1b (1 drafts)
635 (run 'hg update' to get a working copy)
635 (run 'hg update' to get a working copy)
636 $ hg book
636 $ hg book
637 @ 1:0d2164f0ce0d
637 @ 1:0d2164f0ce0d
638 X 1:0d2164f0ce0d
638 X 1:0d2164f0ce0d
639 * Y 5:35d1ef0a8d1b
639 * Y 5:35d1ef0a8d1b
640 Z 1:0d2164f0ce0d
640 Z 1:0d2164f0ce0d
641
641
642 Update a bookmark right after the initial lookup -r (issue4700)
642 Update a bookmark right after the initial lookup -r (issue4700)
643
643
644 $ echo c7 > ../pull-race/f3 # to be committed during the race
644 $ echo c7 > ../pull-race/f3 # to be committed during the race
645 $ cat <<EOF > ../lookuphook.py
645 $ cat <<EOF > ../lookuphook.py
646 > """small extensions adding a hook after wireprotocol lookup to test race"""
646 > """small extensions adding a hook after wireprotocol lookup to test race"""
647 > import functools
647 > import functools
648 > from mercurial import wireprotov1server, wireprotov2server
648 > from mercurial import wireprotov1server, wireprotov2server
649 >
649 >
650 > def wrappedlookup(orig, repo, *args, **kwargs):
650 > def wrappedlookup(orig, repo, *args, **kwargs):
651 > ret = orig(repo, *args, **kwargs)
651 > ret = orig(repo, *args, **kwargs)
652 > repo.hook(b'lookup')
652 > repo.hook(b'lookup')
653 > return ret
653 > return ret
654 > for table in [wireprotov1server.commands, wireprotov2server.COMMANDS]:
654 > for table in [wireprotov1server.commands, wireprotov2server.COMMANDS]:
655 > table[b'lookup'].func = functools.partial(wrappedlookup, table[b'lookup'].func)
655 > table[b'lookup'].func = functools.partial(wrappedlookup, table[b'lookup'].func)
656 > EOF
656 > EOF
657 $ cat <<EOF > ../pull-race/.hg/hgrc
657 $ cat <<EOF > ../pull-race/.hg/hgrc
658 > [extensions]
658 > [extensions]
659 > lookuphook=$TESTTMP/lookuphook.py
659 > lookuphook=$TESTTMP/lookuphook.py
660 > [hooks]
660 > [hooks]
661 > lookup.makecommit= sh $TESTTMP/listkeys_makecommit.sh
661 > lookup.makecommit= sh $TESTTMP/listkeys_makecommit.sh
662 > EOF
662 > EOF
663 $ restart_server # new config need server restart
663 $ restart_server # new config need server restart
664 $ hg -R $TESTTMP/pull-race book
664 $ hg -R $TESTTMP/pull-race book
665 @ 1:0d2164f0ce0d
665 @ 1:0d2164f0ce0d
666 X 1:0d2164f0ce0d
666 X 1:0d2164f0ce0d
667 * Y 6:0d60821d2197
667 * Y 6:0d60821d2197
668 Z 1:0d2164f0ce0d
668 Z 1:0d2164f0ce0d
669 $ hg pull -r Y
669 $ hg pull -r Y
670 pulling from http://localhost:$HGPORT/
670 pulling from http://localhost:$HGPORT/
671 searching for changes
671 searching for changes
672 adding changesets
672 adding changesets
673 adding manifests
673 adding manifests
674 adding file changes
674 adding file changes
675 added 1 changesets with 1 changes to 1 files
675 added 1 changesets with 1 changes to 1 files
676 updating bookmark Y
676 updating bookmark Y
677 new changesets 0d60821d2197 (1 drafts)
677 new changesets 0d60821d2197 (1 drafts)
678 (run 'hg update' to get a working copy)
678 (run 'hg update' to get a working copy)
679 $ hg book
679 $ hg book
680 @ 1:0d2164f0ce0d
680 @ 1:0d2164f0ce0d
681 X 1:0d2164f0ce0d
681 X 1:0d2164f0ce0d
682 * Y 6:0d60821d2197
682 * Y 6:0d60821d2197
683 Z 1:0d2164f0ce0d
683 Z 1:0d2164f0ce0d
684 $ hg -R $TESTTMP/pull-race book
684 $ hg -R $TESTTMP/pull-race book
685 @ 1:0d2164f0ce0d
685 @ 1:0d2164f0ce0d
686 X 1:0d2164f0ce0d
686 X 1:0d2164f0ce0d
687 * Y 7:714424d9e8b8
687 * Y 7:714424d9e8b8
688 Z 1:0d2164f0ce0d
688 Z 1:0d2164f0ce0d
689
689
690 (done with this section of the test)
690 (done with this section of the test)
691
691
692 $ killdaemons.py
692 $ killdaemons.py
693 $ cd ../b
693 $ cd ../b
694
694
695 diverging a remote bookmark fails
695 diverging a remote bookmark fails
696
696
697 $ hg up -q 4e3505fd9583
697 $ hg up -q 4e3505fd9583
698 $ echo c4 > f2
698 $ echo c4 > f2
699 $ hg ci -Am4
699 $ hg ci -Am4
700 adding f2
700 adding f2
701 created new head
701 created new head
702 $ echo c5 > f2
702 $ echo c5 > f2
703 $ hg ci -Am5
703 $ hg ci -Am5
704 $ hg log -G
704 $ hg log -G
705 @ 5:c922c0139ca0 5
705 @ 5:c922c0139ca0 5
706 |
706 |
707 o 4:4efff6d98829 4
707 o 4:4efff6d98829 4
708 |
708 |
709 | o 3:f6fc62dde3c0 3
709 | o 3:f6fc62dde3c0 3
710 |/
710 |/
711 | o 2:0d2164f0ce0d 1
711 | o 2:0d2164f0ce0d 1
712 |/
712 |/
713 | o 1:9b140be10808 2
713 | o 1:9b140be10808 2
714 |/
714 |/
715 o 0:4e3505fd9583 test
715 o 0:4e3505fd9583 test
716
716
717
717
718 $ hg book -f Y
718 $ hg book -f Y
719
719
720 $ cat <<EOF > ../a/.hg/hgrc
720 $ cat <<EOF > ../a/.hg/hgrc
721 > [web]
721 > [web]
722 > push_ssl = false
722 > push_ssl = false
723 > allow_push = *
723 > allow_push = *
724 > EOF
724 > EOF
725
725
726 $ hg serve -R ../a -p $HGPORT2 -d --pid-file=../hg2.pid
726 $ hg serve -R ../a -p $HGPORT2 -d --pid-file=../hg2.pid
727 $ cat ../hg2.pid >> $DAEMON_PIDS
727 $ cat ../hg2.pid >> $DAEMON_PIDS
728
728
729 $ hg push http://localhost:$HGPORT2/
729 $ hg push http://localhost:$HGPORT2/
730 pushing to http://localhost:$HGPORT2/
730 pushing to http://localhost:$HGPORT2/
731 searching for changes
731 searching for changes
732 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'!
732 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'!
733 (merge or see 'hg help push' for details about pushing new heads)
733 (merge or see 'hg help push' for details about pushing new heads)
734 [255]
734 [255]
735 $ hg -R ../a book
735 $ hg -R ../a book
736 @ 1:0d2164f0ce0d
736 @ 1:0d2164f0ce0d
737 * X 1:0d2164f0ce0d
737 * X 1:0d2164f0ce0d
738 Y 3:f6fc62dde3c0
738 Y 3:f6fc62dde3c0
739 Z 1:0d2164f0ce0d
739 Z 1:0d2164f0ce0d
740
740
741
741
742 Unrelated marker does not alter the decision
742 Unrelated marker does not alter the decision
743
743
744 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
744 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
745 $ hg push http://localhost:$HGPORT2/
745 $ hg push http://localhost:$HGPORT2/
746 pushing to http://localhost:$HGPORT2/
746 pushing to http://localhost:$HGPORT2/
747 searching for changes
747 searching for changes
748 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'!
748 abort: push creates new remote head c922c0139ca0 with bookmark 'Y'!
749 (merge or see 'hg help push' for details about pushing new heads)
749 (merge or see 'hg help push' for details about pushing new heads)
750 [255]
750 [255]
751 $ hg -R ../a book
751 $ hg -R ../a book
752 @ 1:0d2164f0ce0d
752 @ 1:0d2164f0ce0d
753 * X 1:0d2164f0ce0d
753 * X 1:0d2164f0ce0d
754 Y 3:f6fc62dde3c0
754 Y 3:f6fc62dde3c0
755 Z 1:0d2164f0ce0d
755 Z 1:0d2164f0ce0d
756
756
757 Update to a successor works
757 Update to a successor works
758
758
759 $ hg id --debug -r 3
759 $ hg id --debug -r 3
760 f6fc62dde3c0771e29704af56ba4d8af77abcc2f
760 f6fc62dde3c0771e29704af56ba4d8af77abcc2f
761 $ hg id --debug -r 4
761 $ hg id --debug -r 4
762 4efff6d98829d9c824c621afd6e3f01865f5439f
762 4efff6d98829d9c824c621afd6e3f01865f5439f
763 $ hg id --debug -r 5
763 $ hg id --debug -r 5
764 c922c0139ca03858f655e4a2af4dd02796a63969 tip Y
764 c922c0139ca03858f655e4a2af4dd02796a63969 tip Y
765 $ hg debugobsolete f6fc62dde3c0771e29704af56ba4d8af77abcc2f cccccccccccccccccccccccccccccccccccccccc
765 $ hg debugobsolete f6fc62dde3c0771e29704af56ba4d8af77abcc2f cccccccccccccccccccccccccccccccccccccccc
766 obsoleted 1 changesets
766 obsoleted 1 changesets
767 $ hg debugobsolete cccccccccccccccccccccccccccccccccccccccc 4efff6d98829d9c824c621afd6e3f01865f5439f
767 $ hg debugobsolete cccccccccccccccccccccccccccccccccccccccc 4efff6d98829d9c824c621afd6e3f01865f5439f
768 $ hg push http://localhost:$HGPORT2/
768 $ hg push http://localhost:$HGPORT2/
769 pushing to http://localhost:$HGPORT2/
769 pushing to http://localhost:$HGPORT2/
770 searching for changes
770 searching for changes
771 remote: adding changesets
771 remote: adding changesets
772 remote: adding manifests
772 remote: adding manifests
773 remote: adding file changes
773 remote: adding file changes
774 remote: added 2 changesets with 2 changes to 1 files (+1 heads)
774 remote: added 2 changesets with 2 changes to 1 files (+1 heads)
775 remote: 2 new obsolescence markers
775 remote: 2 new obsolescence markers
776 remote: obsoleted 1 changesets
776 remote: obsoleted 1 changesets
777 updating bookmark Y
777 updating bookmark Y
778 $ hg -R ../a book
778 $ hg -R ../a book
779 @ 1:0d2164f0ce0d
779 @ 1:0d2164f0ce0d
780 * X 1:0d2164f0ce0d
780 * X 1:0d2164f0ce0d
781 Y 5:c922c0139ca0
781 Y 5:c922c0139ca0
782 Z 1:0d2164f0ce0d
782 Z 1:0d2164f0ce0d
783
783
784 hgweb
784 hgweb
785
785
786 $ cat <<EOF > .hg/hgrc
786 $ cat <<EOF > .hg/hgrc
787 > [web]
787 > [web]
788 > push_ssl = false
788 > push_ssl = false
789 > allow_push = *
789 > allow_push = *
790 > EOF
790 > EOF
791
791
792 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
792 $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log
793 $ cat ../hg.pid >> $DAEMON_PIDS
793 $ cat ../hg.pid >> $DAEMON_PIDS
794 $ cd ../a
794 $ cd ../a
795
795
796 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
796 $ hg debugpushkey http://localhost:$HGPORT/ namespaces
797 bookmarks
797 bookmarks
798 namespaces
798 namespaces
799 obsolete
799 obsolete
800 phases
800 phases
801 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
801 $ hg debugpushkey http://localhost:$HGPORT/ bookmarks
802 @ 9b140be1080824d768c5a4691a564088eede71f9
802 @ 9b140be1080824d768c5a4691a564088eede71f9
803 X 9b140be1080824d768c5a4691a564088eede71f9
803 X 9b140be1080824d768c5a4691a564088eede71f9
804 Y c922c0139ca03858f655e4a2af4dd02796a63969
804 Y c922c0139ca03858f655e4a2af4dd02796a63969
805 Z 9b140be1080824d768c5a4691a564088eede71f9
805 Z 9b140be1080824d768c5a4691a564088eede71f9
806 foo 0000000000000000000000000000000000000000
806 foo 0000000000000000000000000000000000000000
807 foobar 9b140be1080824d768c5a4691a564088eede71f9
807 foobar 9b140be1080824d768c5a4691a564088eede71f9
808 $ hg out -B http://localhost:$HGPORT/
808 $ hg out -B http://localhost:$HGPORT/
809 comparing with http://localhost:$HGPORT/
809 comparing with http://localhost:$HGPORT/
810 searching for changed bookmarks
810 searching for changed bookmarks
811 @ 0d2164f0ce0d
811 @ 0d2164f0ce0d
812 X 0d2164f0ce0d
812 X 0d2164f0ce0d
813 Z 0d2164f0ce0d
813 Z 0d2164f0ce0d
814 foo
814 foo
815 foobar
815 foobar
816 $ hg push -B Z http://localhost:$HGPORT/
816 $ hg push -B Z http://localhost:$HGPORT/
817 pushing to http://localhost:$HGPORT/
817 pushing to http://localhost:$HGPORT/
818 searching for changes
818 searching for changes
819 no changes found
819 no changes found
820 updating bookmark Z
820 updating bookmark Z
821 [1]
821 [1]
822 $ hg book -d Z
822 $ hg book -d Z
823 $ hg in -B http://localhost:$HGPORT/
823 $ hg in -B http://localhost:$HGPORT/
824 comparing with http://localhost:$HGPORT/
824 comparing with http://localhost:$HGPORT/
825 searching for changed bookmarks
825 searching for changed bookmarks
826 @ 9b140be10808
826 @ 9b140be10808
827 X 9b140be10808
827 X 9b140be10808
828 Z 0d2164f0ce0d
828 Z 0d2164f0ce0d
829 foo 000000000000
829 foo 000000000000
830 foobar 9b140be10808
830 foobar 9b140be10808
831 $ hg pull -B Z http://localhost:$HGPORT/
831 $ hg pull -B Z http://localhost:$HGPORT/
832 pulling from http://localhost:$HGPORT/
832 pulling from http://localhost:$HGPORT/
833 no changes found
833 no changes found
834 divergent bookmark @ stored as @1
834 divergent bookmark @ stored as @1
835 divergent bookmark X stored as X@1
835 divergent bookmark X stored as X@1
836 adding remote bookmark Z
836 adding remote bookmark Z
837 adding remote bookmark foo
837 adding remote bookmark foo
838 adding remote bookmark foobar
838 adding remote bookmark foobar
839 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
839 $ hg clone http://localhost:$HGPORT/ cloned-bookmarks
840 requesting all changes
840 requesting all changes
841 adding changesets
841 adding changesets
842 adding manifests
842 adding manifests
843 adding file changes
843 adding file changes
844 added 5 changesets with 5 changes to 3 files (+2 heads)
844 added 5 changesets with 5 changes to 3 files (+2 heads)
845 2 new obsolescence markers
845 2 new obsolescence markers
846 new changesets 4e3505fd9583:c922c0139ca0 (5 drafts)
846 new changesets 4e3505fd9583:c922c0139ca0 (5 drafts)
847 updating to bookmark @
847 updating to bookmark @
848 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
848 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
849 $ hg -R cloned-bookmarks bookmarks
849 $ hg -R cloned-bookmarks bookmarks
850 * @ 1:9b140be10808
850 * @ 1:9b140be10808
851 X 1:9b140be10808
851 X 1:9b140be10808
852 Y 4:c922c0139ca0
852 Y 4:c922c0139ca0
853 Z 2:0d2164f0ce0d
853 Z 2:0d2164f0ce0d
854 foo -1:000000000000
854 foo -1:000000000000
855 foobar 1:9b140be10808
855 foobar 1:9b140be10808
856
856
857 $ cd ..
857 $ cd ..
858
858
859 Test to show result of bookmarks comparison
859 Test to show result of bookmarks comparison
860
860
861 $ mkdir bmcomparison
861 $ mkdir bmcomparison
862 $ cd bmcomparison
862 $ cd bmcomparison
863
863
864 $ hg init source
864 $ hg init source
865 $ hg -R source debugbuilddag '+2*2*3*4'
865 $ hg -R source debugbuilddag '+2*2*3*4'
866 $ hg -R source log -G --template '{rev}:{node|short}'
866 $ hg -R source log -G --template '{rev}:{node|short}'
867 o 4:e7bd5218ca15
867 o 4:e7bd5218ca15
868 |
868 |
869 | o 3:6100d3090acf
869 | o 3:6100d3090acf
870 |/
870 |/
871 | o 2:fa942426a6fd
871 | o 2:fa942426a6fd
872 |/
872 |/
873 | o 1:66f7d451a68b
873 | o 1:66f7d451a68b
874 |/
874 |/
875 o 0:1ea73414a91b
875 o 0:1ea73414a91b
876
876
877 $ hg -R source bookmarks -r 0 SAME
877 $ hg -R source bookmarks -r 0 SAME
878 $ hg -R source bookmarks -r 0 ADV_ON_REPO1
878 $ hg -R source bookmarks -r 0 ADV_ON_REPO1
879 $ hg -R source bookmarks -r 0 ADV_ON_REPO2
879 $ hg -R source bookmarks -r 0 ADV_ON_REPO2
880 $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO1
880 $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO1
881 $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO2
881 $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO2
882 $ hg -R source bookmarks -r 1 DIVERGED
882 $ hg -R source bookmarks -r 1 DIVERGED
883
883
884 $ hg clone -U source repo1
884 $ hg clone -U source repo1
885
885
886 (test that incoming/outgoing exit with 1, if there is no bookmark to
886 (test that incoming/outgoing exit with 1, if there is no bookmark to
887 be exchanged)
887 be exchanged)
888
888
889 $ hg -R repo1 incoming -B
889 $ hg -R repo1 incoming -B
890 comparing with $TESTTMP/bmcomparison/source
890 comparing with $TESTTMP/bmcomparison/source
891 searching for changed bookmarks
891 searching for changed bookmarks
892 no changed bookmarks found
892 no changed bookmarks found
893 [1]
893 [1]
894 $ hg -R repo1 outgoing -B
894 $ hg -R repo1 outgoing -B
895 comparing with $TESTTMP/bmcomparison/source
895 comparing with $TESTTMP/bmcomparison/source
896 searching for changed bookmarks
896 searching for changed bookmarks
897 no changed bookmarks found
897 no changed bookmarks found
898 [1]
898 [1]
899
899
900 $ hg -R repo1 bookmarks -f -r 1 ADD_ON_REPO1
900 $ hg -R repo1 bookmarks -f -r 1 ADD_ON_REPO1
901 $ hg -R repo1 bookmarks -f -r 2 ADV_ON_REPO1
901 $ hg -R repo1 bookmarks -f -r 2 ADV_ON_REPO1
902 $ hg -R repo1 bookmarks -f -r 3 DIFF_ADV_ON_REPO1
902 $ hg -R repo1 bookmarks -f -r 3 DIFF_ADV_ON_REPO1
903 $ hg -R repo1 bookmarks -f -r 3 DIFF_DIVERGED
903 $ hg -R repo1 bookmarks -f -r 3 DIFF_DIVERGED
904 $ hg -R repo1 -q --config extensions.mq= strip 4
904 $ hg -R repo1 -q --config extensions.mq= strip 4
905 $ hg -R repo1 log -G --template '{node|short} ({bookmarks})'
905 $ hg -R repo1 log -G --template '{node|short} ({bookmarks})'
906 o 6100d3090acf (DIFF_ADV_ON_REPO1 DIFF_DIVERGED)
906 o 6100d3090acf (DIFF_ADV_ON_REPO1 DIFF_DIVERGED)
907 |
907 |
908 | o fa942426a6fd (ADV_ON_REPO1)
908 | o fa942426a6fd (ADV_ON_REPO1)
909 |/
909 |/
910 | o 66f7d451a68b (ADD_ON_REPO1 DIVERGED)
910 | o 66f7d451a68b (ADD_ON_REPO1 DIVERGED)
911 |/
911 |/
912 o 1ea73414a91b (ADV_ON_REPO2 DIFF_ADV_ON_REPO2 SAME)
912 o 1ea73414a91b (ADV_ON_REPO2 DIFF_ADV_ON_REPO2 SAME)
913
913
914
914
915 $ hg clone -U source repo2
915 $ hg clone -U source repo2
916 $ hg -R repo2 bookmarks -f -r 1 ADD_ON_REPO2
916 $ hg -R repo2 bookmarks -f -r 1 ADD_ON_REPO2
917 $ hg -R repo2 bookmarks -f -r 1 ADV_ON_REPO2
917 $ hg -R repo2 bookmarks -f -r 1 ADV_ON_REPO2
918 $ hg -R repo2 bookmarks -f -r 2 DIVERGED
918 $ hg -R repo2 bookmarks -f -r 2 DIVERGED
919 $ hg -R repo2 bookmarks -f -r 4 DIFF_ADV_ON_REPO2
919 $ hg -R repo2 bookmarks -f -r 4 DIFF_ADV_ON_REPO2
920 $ hg -R repo2 bookmarks -f -r 4 DIFF_DIVERGED
920 $ hg -R repo2 bookmarks -f -r 4 DIFF_DIVERGED
921 $ hg -R repo2 -q --config extensions.mq= strip 3
921 $ hg -R repo2 -q --config extensions.mq= strip 3
922 $ hg -R repo2 log -G --template '{node|short} ({bookmarks})'
922 $ hg -R repo2 log -G --template '{node|short} ({bookmarks})'
923 o e7bd5218ca15 (DIFF_ADV_ON_REPO2 DIFF_DIVERGED)
923 o e7bd5218ca15 (DIFF_ADV_ON_REPO2 DIFF_DIVERGED)
924 |
924 |
925 | o fa942426a6fd (DIVERGED)
925 | o fa942426a6fd (DIVERGED)
926 |/
926 |/
927 | o 66f7d451a68b (ADD_ON_REPO2 ADV_ON_REPO2)
927 | o 66f7d451a68b (ADD_ON_REPO2 ADV_ON_REPO2)
928 |/
928 |/
929 o 1ea73414a91b (ADV_ON_REPO1 DIFF_ADV_ON_REPO1 SAME)
929 o 1ea73414a91b (ADV_ON_REPO1 DIFF_ADV_ON_REPO1 SAME)
930
930
931
931
932 (test that difference of bookmarks between repositories are fully shown)
932 (test that difference of bookmarks between repositories are fully shown)
933
933
934 $ hg -R repo1 incoming -B repo2 -v
934 $ hg -R repo1 incoming -B repo2 -v
935 comparing with repo2
935 comparing with repo2
936 searching for changed bookmarks
936 searching for changed bookmarks
937 ADD_ON_REPO2 66f7d451a68b added
937 ADD_ON_REPO2 66f7d451a68b added
938 ADV_ON_REPO2 66f7d451a68b advanced
938 ADV_ON_REPO2 66f7d451a68b advanced
939 DIFF_ADV_ON_REPO2 e7bd5218ca15 changed
939 DIFF_ADV_ON_REPO2 e7bd5218ca15 changed
940 DIFF_DIVERGED e7bd5218ca15 changed
940 DIFF_DIVERGED e7bd5218ca15 changed
941 DIVERGED fa942426a6fd diverged
941 DIVERGED fa942426a6fd diverged
942 $ hg -R repo1 outgoing -B repo2 -v
942 $ hg -R repo1 outgoing -B repo2 -v
943 comparing with repo2
943 comparing with repo2
944 searching for changed bookmarks
944 searching for changed bookmarks
945 ADD_ON_REPO1 66f7d451a68b added
945 ADD_ON_REPO1 66f7d451a68b added
946 ADD_ON_REPO2 deleted
946 ADD_ON_REPO2 deleted
947 ADV_ON_REPO1 fa942426a6fd advanced
947 ADV_ON_REPO1 fa942426a6fd advanced
948 DIFF_ADV_ON_REPO1 6100d3090acf advanced
948 DIFF_ADV_ON_REPO1 6100d3090acf advanced
949 DIFF_ADV_ON_REPO2 1ea73414a91b changed
949 DIFF_ADV_ON_REPO2 1ea73414a91b changed
950 DIFF_DIVERGED 6100d3090acf changed
950 DIFF_DIVERGED 6100d3090acf changed
951 DIVERGED 66f7d451a68b diverged
951 DIVERGED 66f7d451a68b diverged
952
952
953 $ hg -R repo2 incoming -B repo1 -v
953 $ hg -R repo2 incoming -B repo1 -v
954 comparing with repo1
954 comparing with repo1
955 searching for changed bookmarks
955 searching for changed bookmarks
956 ADD_ON_REPO1 66f7d451a68b added
956 ADD_ON_REPO1 66f7d451a68b added
957 ADV_ON_REPO1 fa942426a6fd advanced
957 ADV_ON_REPO1 fa942426a6fd advanced
958 DIFF_ADV_ON_REPO1 6100d3090acf changed
958 DIFF_ADV_ON_REPO1 6100d3090acf changed
959 DIFF_DIVERGED 6100d3090acf changed
959 DIFF_DIVERGED 6100d3090acf changed
960 DIVERGED 66f7d451a68b diverged
960 DIVERGED 66f7d451a68b diverged
961 $ hg -R repo2 outgoing -B repo1 -v
961 $ hg -R repo2 outgoing -B repo1 -v
962 comparing with repo1
962 comparing with repo1
963 searching for changed bookmarks
963 searching for changed bookmarks
964 ADD_ON_REPO1 deleted
964 ADD_ON_REPO1 deleted
965 ADD_ON_REPO2 66f7d451a68b added
965 ADD_ON_REPO2 66f7d451a68b added
966 ADV_ON_REPO2 66f7d451a68b advanced
966 ADV_ON_REPO2 66f7d451a68b advanced
967 DIFF_ADV_ON_REPO1 1ea73414a91b changed
967 DIFF_ADV_ON_REPO1 1ea73414a91b changed
968 DIFF_ADV_ON_REPO2 e7bd5218ca15 advanced
968 DIFF_ADV_ON_REPO2 e7bd5218ca15 advanced
969 DIFF_DIVERGED e7bd5218ca15 changed
969 DIFF_DIVERGED e7bd5218ca15 changed
970 DIVERGED fa942426a6fd diverged
970 DIVERGED fa942426a6fd diverged
971
971
972 $ cd ..
972 $ cd ..
973
973
974 Pushing a bookmark should only push the changes required by that
974 Pushing a bookmark should only push the changes required by that
975 bookmark, not all outgoing changes:
975 bookmark, not all outgoing changes:
976 $ hg clone http://localhost:$HGPORT/ addmarks
976 $ hg clone http://localhost:$HGPORT/ addmarks
977 requesting all changes
977 requesting all changes
978 adding changesets
978 adding changesets
979 adding manifests
979 adding manifests
980 adding file changes
980 adding file changes
981 added 5 changesets with 5 changes to 3 files (+2 heads)
981 added 5 changesets with 5 changes to 3 files (+2 heads)
982 2 new obsolescence markers
982 2 new obsolescence markers
983 new changesets 4e3505fd9583:c922c0139ca0 (5 drafts)
983 new changesets 4e3505fd9583:c922c0139ca0 (5 drafts)
984 updating to bookmark @
984 updating to bookmark @
985 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
985 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
986 $ cd addmarks
986 $ cd addmarks
987 $ echo foo > foo
987 $ echo foo > foo
988 $ hg add foo
988 $ hg add foo
989 $ hg commit -m 'add foo'
989 $ hg commit -m 'add foo'
990 $ echo bar > bar
990 $ echo bar > bar
991 $ hg add bar
991 $ hg add bar
992 $ hg commit -m 'add bar'
992 $ hg commit -m 'add bar'
993 $ hg co "tip^"
993 $ hg co "tip^"
994 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
994 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
995 (leaving bookmark @)
995 (leaving bookmark @)
996 $ hg book add-foo
996 $ hg book add-foo
997 $ hg book -r tip add-bar
997 $ hg book -r tip add-bar
998 Note: this push *must* push only a single changeset, as that's the point
998 Note: this push *must* push only a single changeset, as that's the point
999 of this test.
999 of this test.
1000 $ hg push -B add-foo --traceback
1000 $ hg push -B add-foo --traceback
1001 pushing to http://localhost:$HGPORT/
1001 pushing to http://localhost:$HGPORT/
1002 searching for changes
1002 searching for changes
1003 remote: adding changesets
1003 remote: adding changesets
1004 remote: adding manifests
1004 remote: adding manifests
1005 remote: adding file changes
1005 remote: adding file changes
1006 remote: added 1 changesets with 1 changes to 1 files
1006 remote: added 1 changesets with 1 changes to 1 files
1007 exporting bookmark add-foo
1007 exporting bookmark add-foo
1008
1008
1009 pushing a new bookmark on a new head does not require -f if -B is specified
1009 pushing a new bookmark on a new head does not require -f if -B is specified
1010
1010
1011 $ hg up -q X
1011 $ hg up -q X
1012 $ hg book W
1012 $ hg book W
1013 $ echo c5 > f2
1013 $ echo c5 > f2
1014 $ hg ci -Am5
1014 $ hg ci -Am5
1015 created new head
1015 created new head
1016 $ hg push -B .
1016 $ hg push -B .
1017 pushing to http://localhost:$HGPORT/
1017 pushing to http://localhost:$HGPORT/
1018 searching for changes
1018 searching for changes
1019 remote: adding changesets
1019 remote: adding changesets
1020 remote: adding manifests
1020 remote: adding manifests
1021 remote: adding file changes
1021 remote: adding file changes
1022 remote: added 1 changesets with 1 changes to 1 files (+1 heads)
1022 remote: added 1 changesets with 1 changes to 1 files (+1 heads)
1023 exporting bookmark W
1023 exporting bookmark W
1024 $ hg -R ../b id -r W
1024 $ hg -R ../b id -r W
1025 cc978a373a53 tip W
1025 cc978a373a53 tip W
1026
1026
1027 pushing an existing but divergent bookmark with -B still requires -f
1027 pushing an existing but divergent bookmark with -B still requires -f
1028
1028
1029 $ hg clone -q . ../r
1029 $ hg clone -q . ../r
1030 $ hg up -q X
1030 $ hg up -q X
1031 $ echo 1 > f2
1031 $ echo 1 > f2
1032 $ hg ci -qAml
1032 $ hg ci -qAml
1033
1033
1034 $ cd ../r
1034 $ cd ../r
1035 $ hg up -q X
1035 $ hg up -q X
1036 $ echo 2 > f2
1036 $ echo 2 > f2
1037 $ hg ci -qAmr
1037 $ hg ci -qAmr
1038 $ hg push -B X
1038 $ hg push -B X
1039 pushing to $TESTTMP/addmarks
1039 pushing to $TESTTMP/addmarks
1040 searching for changes
1040 searching for changes
1041 remote has heads on branch 'default' that are not known locally: a2a606d9ff1b
1041 remote has heads on branch 'default' that are not known locally: a2a606d9ff1b
1042 abort: push creates new remote head 54694f811df9 with bookmark 'X'!
1042 abort: push creates new remote head 54694f811df9 with bookmark 'X'!
1043 (pull and merge or see 'hg help push' for details about pushing new heads)
1043 (pull and merge or see 'hg help push' for details about pushing new heads)
1044 [255]
1044 [255]
1045 $ cd ../addmarks
1045 $ cd ../addmarks
1046
1046
1047 Check summary output for incoming/outgoing bookmarks
1047 Check summary output for incoming/outgoing bookmarks
1048
1048
1049 $ hg bookmarks -d X
1049 $ hg bookmarks -d X
1050 $ hg bookmarks -d Y
1050 $ hg bookmarks -d Y
1051 $ hg summary --remote | grep '^remote:'
1051 $ hg summary --remote | grep '^remote:'
1052 remote: *, 2 incoming bookmarks, 1 outgoing bookmarks (glob)
1052 remote: *, 2 incoming bookmarks, 1 outgoing bookmarks (glob)
1053
1053
1054 $ cd ..
1054 $ cd ..
1055
1055
1056 pushing an unchanged bookmark should result in no changes
1056 pushing an unchanged bookmark should result in no changes
1057
1057
1058 $ hg init unchanged-a
1058 $ hg init unchanged-a
1059 $ hg init unchanged-b
1059 $ hg init unchanged-b
1060 $ cd unchanged-a
1060 $ cd unchanged-a
1061 $ echo initial > foo
1061 $ echo initial > foo
1062 $ hg commit -A -m initial
1062 $ hg commit -A -m initial
1063 adding foo
1063 adding foo
1064 $ hg bookmark @
1064 $ hg bookmark @
1065 $ hg push -B @ ../unchanged-b
1065 $ hg push -B @ ../unchanged-b
1066 pushing to ../unchanged-b
1066 pushing to ../unchanged-b
1067 searching for changes
1067 searching for changes
1068 adding changesets
1068 adding changesets
1069 adding manifests
1069 adding manifests
1070 adding file changes
1070 adding file changes
1071 added 1 changesets with 1 changes to 1 files
1071 added 1 changesets with 1 changes to 1 files
1072 exporting bookmark @
1072 exporting bookmark @
1073
1073
1074 $ hg push -B @ ../unchanged-b
1074 $ hg push -B @ ../unchanged-b
1075 pushing to ../unchanged-b
1075 pushing to ../unchanged-b
1076 searching for changes
1076 searching for changes
1077 no changes found
1077 no changes found
1078 [1]
1078 [1]
1079
1079
1080 Pushing a really long bookmark should work fine (issue5165)
1080 Pushing a really long bookmark should work fine (issue5165)
1081 ===============================================
1081 ===============================================
1082
1082
1083 #if b2-binary
1083 #if b2-binary
1084 >>> with open('longname', 'w') as f:
1084 >>> with open('longname', 'w') as f:
1085 ... f.write('wat' * 100) and None
1085 ... f.write('wat' * 100) and None
1086 $ hg book `cat longname`
1086 $ hg book `cat longname`
1087 $ hg push -B `cat longname` ../unchanged-b
1087 $ hg push -B `cat longname` ../unchanged-b
1088 pushing to ../unchanged-b
1088 pushing to ../unchanged-b
1089 searching for changes
1089 searching for changes
1090 no changes found
1090 no changes found
1091 exporting bookmark (wat){100} (re)
1091 exporting bookmark (wat){100} (re)
1092 [1]
1092 [1]
1093 $ hg -R ../unchanged-b book --delete `cat longname`
1093 $ hg -R ../unchanged-b book --delete `cat longname`
1094
1094
1095 Test again but forcing bundle2 exchange to make sure that doesn't regress.
1095 Test again but forcing bundle2 exchange to make sure that doesn't regress.
1096
1096
1097 $ hg push -B `cat longname` ../unchanged-b --config devel.legacy.exchange=bundle1
1097 $ hg push -B `cat longname` ../unchanged-b --config devel.legacy.exchange=bundle1
1098 pushing to ../unchanged-b
1098 pushing to ../unchanged-b
1099 searching for changes
1099 searching for changes
1100 no changes found
1100 no changes found
1101 exporting bookmark (wat){100} (re)
1101 exporting bookmark (wat){100} (re)
1102 [1]
1102 [1]
1103 $ hg -R ../unchanged-b book --delete `cat longname`
1103 $ hg -R ../unchanged-b book --delete `cat longname`
1104 $ hg book --delete `cat longname`
1104 $ hg book --delete `cat longname`
1105 $ hg co @
1105 $ hg co @
1106 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1106 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
1107 (activating bookmark @)
1107 (activating bookmark @)
1108 #endif
1108 #endif
1109
1109
1110 Check hook preventing push (issue4455)
1110 Check hook preventing push (issue4455)
1111 ======================================
1111 ======================================
1112
1112
1113 $ hg bookmarks
1113 $ hg bookmarks
1114 * @ 0:55482a6fb4b1
1114 * @ 0:55482a6fb4b1
1115 $ hg log -G
1115 $ hg log -G
1116 @ 0:55482a6fb4b1 initial
1116 @ 0:55482a6fb4b1 initial
1117
1117
1118 $ hg init ../issue4455-dest
1118 $ hg init ../issue4455-dest
1119 $ hg push ../issue4455-dest # changesets only
1119 $ hg push ../issue4455-dest # changesets only
1120 pushing to ../issue4455-dest
1120 pushing to ../issue4455-dest
1121 searching for changes
1121 searching for changes
1122 adding changesets
1122 adding changesets
1123 adding manifests
1123 adding manifests
1124 adding file changes
1124 adding file changes
1125 added 1 changesets with 1 changes to 1 files
1125 added 1 changesets with 1 changes to 1 files
1126 $ cat >> .hg/hgrc << EOF
1126 $ cat >> .hg/hgrc << EOF
1127 > [paths]
1127 > [paths]
1128 > local=../issue4455-dest/
1128 > local=../issue4455-dest/
1129 > ssh=ssh://user@dummy/issue4455-dest
1129 > ssh=ssh://user@dummy/issue4455-dest
1130 > http=http://localhost:$HGPORT/
1130 > http=http://localhost:$HGPORT/
1131 > [ui]
1131 > [ui]
1132 > ssh="$PYTHON" "$TESTDIR/dummyssh"
1132 > ssh="$PYTHON" "$TESTDIR/dummyssh"
1133 > EOF
1133 > EOF
1134 $ cat >> ../issue4455-dest/.hg/hgrc << EOF
1134 $ cat >> ../issue4455-dest/.hg/hgrc << EOF
1135 > [hooks]
1135 > [hooks]
1136 > prepushkey=false
1136 > prepushkey=false
1137 > [web]
1137 > [web]
1138 > push_ssl = false
1138 > push_ssl = false
1139 > allow_push = *
1139 > allow_push = *
1140 > EOF
1140 > EOF
1141 $ killdaemons.py
1141 $ killdaemons.py
1142 $ hg serve -R ../issue4455-dest -p $HGPORT -d --pid-file=../issue4455.pid -E ../issue4455-error.log
1142 $ hg serve -R ../issue4455-dest -p $HGPORT -d --pid-file=../issue4455.pid -E ../issue4455-error.log
1143 $ cat ../issue4455.pid >> $DAEMON_PIDS
1143 $ cat ../issue4455.pid >> $DAEMON_PIDS
1144
1144
1145 Local push
1145 Local push
1146 ----------
1146 ----------
1147
1147
1148 #if b2-pushkey
1148 #if b2-pushkey
1149
1149
1150 $ hg push -B @ local
1150 $ hg push -B @ local
1151 pushing to $TESTTMP/issue4455-dest
1151 pushing to $TESTTMP/issue4455-dest
1152 searching for changes
1152 searching for changes
1153 no changes found
1153 no changes found
1154 pushkey-abort: prepushkey hook exited with status 1
1154 pushkey-abort: prepushkey hook exited with status 1
1155 abort: exporting bookmark @ failed!
1155 abort: exporting bookmark @ failed!
1156 [255]
1156 [255]
1157
1157
1158 #endif
1158 #endif
1159 #if b2-binary
1159 #if b2-binary
1160
1160
1161 $ hg push -B @ local
1161 $ hg push -B @ local
1162 pushing to $TESTTMP/issue4455-dest
1162 pushing to $TESTTMP/issue4455-dest
1163 searching for changes
1163 searching for changes
1164 no changes found
1164 no changes found
1165 abort: prepushkey hook exited with status 1
1165 abort: prepushkey hook exited with status 1
1166 [255]
1166 [255]
1167
1167
1168 #endif
1168 #endif
1169
1169
1170 $ hg -R ../issue4455-dest/ bookmarks
1170 $ hg -R ../issue4455-dest/ bookmarks
1171 no bookmarks set
1171 no bookmarks set
1172
1172
1173 Using ssh
1173 Using ssh
1174 ---------
1174 ---------
1175
1175
1176 #if b2-pushkey
1176 #if b2-pushkey
1177
1177
1178 $ hg push -B @ ssh # bundle2+
1178 $ hg push -B @ ssh # bundle2+
1179 pushing to ssh://user@dummy/issue4455-dest
1179 pushing to ssh://user@dummy/issue4455-dest
1180 searching for changes
1180 searching for changes
1181 no changes found
1181 no changes found
1182 remote: pushkey-abort: prepushkey hook exited with status 1
1182 remote: pushkey-abort: prepushkey hook exited with status 1
1183 abort: exporting bookmark @ failed!
1183 abort: exporting bookmark @ failed!
1184 [255]
1184 [255]
1185
1185
1186 $ hg -R ../issue4455-dest/ bookmarks
1186 $ hg -R ../issue4455-dest/ bookmarks
1187 no bookmarks set
1187 no bookmarks set
1188
1188
1189 $ hg push -B @ ssh --config devel.legacy.exchange=bundle1
1189 $ hg push -B @ ssh --config devel.legacy.exchange=bundle1
1190 pushing to ssh://user@dummy/issue4455-dest
1190 pushing to ssh://user@dummy/issue4455-dest
1191 searching for changes
1191 searching for changes
1192 no changes found
1192 no changes found
1193 remote: pushkey-abort: prepushkey hook exited with status 1
1193 remote: pushkey-abort: prepushkey hook exited with status 1
1194 exporting bookmark @ failed!
1194 exporting bookmark @ failed!
1195 [1]
1195 [1]
1196
1196
1197 #endif
1197 #endif
1198 #if b2-binary
1198 #if b2-binary
1199
1199
1200 $ hg push -B @ ssh # bundle2+
1200 $ hg push -B @ ssh # bundle2+
1201 pushing to ssh://user@dummy/issue4455-dest
1201 pushing to ssh://user@dummy/issue4455-dest
1202 searching for changes
1202 searching for changes
1203 no changes found
1203 no changes found
1204 remote: prepushkey hook exited with status 1
1204 remote: prepushkey hook exited with status 1
1205 abort: push failed on remote
1205 abort: push failed on remote
1206 [255]
1206 [255]
1207
1207
1208 #endif
1208 #endif
1209
1209
1210 $ hg -R ../issue4455-dest/ bookmarks
1210 $ hg -R ../issue4455-dest/ bookmarks
1211 no bookmarks set
1211 no bookmarks set
1212
1212
1213 Using http
1213 Using http
1214 ----------
1214 ----------
1215
1215
1216 #if b2-pushkey
1216 #if b2-pushkey
1217 $ hg push -B @ http # bundle2+
1217 $ hg push -B @ http # bundle2+
1218 pushing to http://localhost:$HGPORT/
1218 pushing to http://localhost:$HGPORT/
1219 searching for changes
1219 searching for changes
1220 no changes found
1220 no changes found
1221 remote: pushkey-abort: prepushkey hook exited with status 1
1221 remote: pushkey-abort: prepushkey hook exited with status 1
1222 abort: exporting bookmark @ failed!
1222 abort: exporting bookmark @ failed!
1223 [255]
1223 [255]
1224
1224
1225 $ hg -R ../issue4455-dest/ bookmarks
1225 $ hg -R ../issue4455-dest/ bookmarks
1226 no bookmarks set
1226 no bookmarks set
1227
1227
1228 $ hg push -B @ http --config devel.legacy.exchange=bundle1
1228 $ hg push -B @ http --config devel.legacy.exchange=bundle1
1229 pushing to http://localhost:$HGPORT/
1229 pushing to http://localhost:$HGPORT/
1230 searching for changes
1230 searching for changes
1231 no changes found
1231 no changes found
1232 remote: pushkey-abort: prepushkey hook exited with status 1
1232 remote: pushkey-abort: prepushkey hook exited with status 1
1233 exporting bookmark @ failed!
1233 exporting bookmark @ failed!
1234 [1]
1234 [1]
1235
1235
1236 #endif
1236 #endif
1237
1237
1238 #if b2-binary
1238 #if b2-binary
1239
1239
1240 $ hg push -B @ ssh # bundle2+
1240 $ hg push -B @ ssh # bundle2+
1241 pushing to ssh://user@dummy/issue4455-dest
1241 pushing to ssh://user@dummy/issue4455-dest
1242 searching for changes
1242 searching for changes
1243 no changes found
1243 no changes found
1244 remote: prepushkey hook exited with status 1
1244 remote: prepushkey hook exited with status 1
1245 abort: push failed on remote
1245 abort: push failed on remote
1246 [255]
1246 [255]
1247
1247
1248 #endif
1248 #endif
1249
1249
1250 $ hg -R ../issue4455-dest/ bookmarks
1250 $ hg -R ../issue4455-dest/ bookmarks
1251 no bookmarks set
1251 no bookmarks set
1252
1252
1253 $ cd ..
1253 $ cd ..
1254
1254
1255 Test that pre-pushkey compat for bookmark works as expected (issue5777)
1255 Test that pre-pushkey compat for bookmark works as expected (issue5777)
1256
1256
1257 $ cat << EOF >> $HGRCPATH
1257 $ cat << EOF >> $HGRCPATH
1258 > [ui]
1258 > [ui]
1259 > ssh="$PYTHON" "$TESTDIR/dummyssh"
1259 > ssh="$PYTHON" "$TESTDIR/dummyssh"
1260 > [server]
1260 > [server]
1261 > bookmarks-pushkey-compat = yes
1261 > bookmarks-pushkey-compat = yes
1262 > EOF
1262 > EOF
1263
1263
1264 $ hg init server
1264 $ hg init server
1265 $ echo foo > server/a
1265 $ echo foo > server/a
1266 $ hg -R server book foo
1266 $ hg -R server book foo
1267 $ hg -R server commit -Am a
1267 $ hg -R server commit -Am a
1268 adding a
1268 adding a
1269 $ hg clone ssh://user@dummy/server client
1269 $ hg clone ssh://user@dummy/server client
1270 requesting all changes
1270 requesting all changes
1271 adding changesets
1271 adding changesets
1272 adding manifests
1272 adding manifests
1273 adding file changes
1273 adding file changes
1274 added 1 changesets with 1 changes to 1 files
1274 added 1 changesets with 1 changes to 1 files
1275 new changesets 79513d0d7716 (1 drafts)
1275 new changesets 79513d0d7716 (1 drafts)
1276 updating to branch default
1276 updating to branch default
1277 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1277 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1278
1278
1279 Forbid bookmark move on the server
1279 Forbid bookmark move on the server
1280
1280
1281 $ cat << EOF >> $TESTTMP/no-bm-move.sh
1281 $ cat << EOF >> $TESTTMP/no-bm-move.sh
1282 > #!/bin/sh
1282 > #!/bin/sh
1283 > echo \$HG_NAMESPACE | grep -v bookmarks
1283 > echo \$HG_NAMESPACE | grep -v bookmarks
1284 > EOF
1284 > EOF
1285 $ cat << EOF >> server/.hg/hgrc
1285 $ cat << EOF >> server/.hg/hgrc
1286 > [hooks]
1286 > [hooks]
1287 > prepushkey.no-bm-move= sh $TESTTMP/no-bm-move.sh
1287 > prepushkey.no-bm-move= sh $TESTTMP/no-bm-move.sh
1288 > EOF
1288 > EOF
1289
1289
1290 pushing changeset is okay
1290 pushing changeset is okay
1291
1291
1292 $ echo bar >> client/a
1292 $ echo bar >> client/a
1293 $ hg -R client commit -m b
1293 $ hg -R client commit -m b
1294 $ hg -R client push
1294 $ hg -R client push
1295 pushing to ssh://user@dummy/server
1295 pushing to ssh://user@dummy/server
1296 searching for changes
1296 searching for changes
1297 remote: adding changesets
1297 remote: adding changesets
1298 remote: adding manifests
1298 remote: adding manifests
1299 remote: adding file changes
1299 remote: adding file changes
1300 remote: added 1 changesets with 1 changes to 1 files
1300 remote: added 1 changesets with 1 changes to 1 files
1301
1301
1302 attempt to move the bookmark is rejected
1302 attempt to move the bookmark is rejected
1303
1303
1304 $ hg -R client book foo -r .
1304 $ hg -R client book foo -r .
1305 moving bookmark 'foo' forward from 79513d0d7716
1305 moving bookmark 'foo' forward from 79513d0d7716
1306
1306
1307 #if b2-pushkey
1307 #if b2-pushkey
1308 $ hg -R client push
1308 $ hg -R client push
1309 pushing to ssh://user@dummy/server
1309 pushing to ssh://user@dummy/server
1310 searching for changes
1310 searching for changes
1311 no changes found
1311 no changes found
1312 remote: pushkey-abort: prepushkey.no-bm-move hook exited with status 1
1312 remote: pushkey-abort: prepushkey.no-bm-move hook exited with status 1
1313 abort: updating bookmark foo failed!
1313 abort: updating bookmark foo failed!
1314 [255]
1314 [255]
1315 #endif
1315 #endif
1316 #if b2-binary
1316 #if b2-binary
1317 $ hg -R client push
1317 $ hg -R client push
1318 pushing to ssh://user@dummy/server
1318 pushing to ssh://user@dummy/server
1319 searching for changes
1319 searching for changes
1320 no changes found
1320 no changes found
1321 remote: prepushkey.no-bm-move hook exited with status 1
1321 remote: prepushkey.no-bm-move hook exited with status 1
1322 abort: push failed on remote
1322 abort: push failed on remote
1323 [255]
1323 [255]
1324 #endif
1324 #endif
1325
1325
1326 -- test for pushing bookmarks pointing to secret changesets
1326 -- test for pushing bookmarks pointing to secret changesets
1327
1327
1328 Set up a "remote" repo
1328 Set up a "remote" repo
1329 $ hg init issue6159remote
1329 $ hg init issue6159remote
1330 $ cd issue6159remote
1330 $ cd issue6159remote
1331 $ echo a > a
1331 $ echo a > a
1332 $ hg add a
1332 $ hg add a
1333 $ hg commit -m_
1333 $ hg commit -m_
1334 $ hg bookmark foo
1334 $ hg bookmark foo
1335 $ cd ..
1335 $ cd ..
1336
1336
1337 Clone a local repo
1337 Clone a local repo
1338 $ hg clone -q issue6159remote issue6159local
1338 $ hg clone -q issue6159remote issue6159local
1339 $ cd issue6159local
1339 $ cd issue6159local
1340 $ hg up -qr foo
1340 $ hg up -qr foo
1341 $ echo b > b
1341 $ echo b > b
1342
1342
1343 Move the bookmark "foo" to point at a secret changeset
1343 Move the bookmark "foo" to point at a secret changeset
1344 $ hg commit -qAm_ --config phases.new-commit=secret
1344 $ hg commit -qAm_ --config phases.new-commit=secret
1345
1345
1346 Pushing the bookmark "foo" now fails as it contains a secret changeset
1346 Pushing the bookmark "foo" now fails as it contains a secret changeset
1347 #if b2-pushkey
1348 $ hg push -r foo
1349 pushing to $TESTTMP/issue6159remote
1350 searching for changes
1351 no changes found (ignored 1 secret changesets)
1352 abort: updating bookmark foo failed!
1353 [255]
1354 #endif
1355
1356 #if b2-binary
1357 $ hg push -r foo
1347 $ hg push -r foo
1358 pushing to $TESTTMP/issue6159remote
1348 pushing to $TESTTMP/issue6159remote
1359 searching for changes
1349 searching for changes
1360 no changes found (ignored 1 secret changesets)
1350 no changes found (ignored 1 secret changesets)
1361 updating bookmark foo
1351 abort: cannot push bookmark foo as it points to a secret changeset
1362 [1]
1363 #endif
1364
1365 Now the "remote" repo contains a bookmark pointing to a nonexistent revision
1366 $ cd ../issue6159remote
1367 #if b2-pushkey
1368 $ hg bookmark
1369 * foo 0:1599bc8b897a
1370 $ hg log -r 1599bc8b897a
1371 0:1599bc8b897a _ (no-eol)
1372 #endif
1373
1374 #if b2-binary
1375 $ hg bookmark
1376 no bookmarks set
1377 $ cat .hg/bookmarks
1378 cf489fd8a374cab73c2dc19e899bde6fe3a43f8f foo
1379 $ hg log -r cf489fd8a374
1380 abort: unknown revision 'cf489fd8a374'!
1381 [255]
1352 [255]
1382 #endif
General Comments 0
You need to be logged in to leave comments. Login now