##// END OF EJS Templates
push: continue without locking on lock failure other than EEXIST (issue5882)...
Yuya Nishihara -
r38111:7c05198c stable
parent child Browse files
Show More
@@ -1,2421 +1,2418 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
12 import hashlib
11 import hashlib
13
12
14 from .i18n import _
13 from .i18n import _
15 from .node import (
14 from .node import (
16 bin,
15 bin,
17 hex,
16 hex,
18 nullid,
17 nullid,
19 )
18 )
20 from .thirdparty import (
19 from .thirdparty import (
21 attr,
20 attr,
22 )
21 )
23 from . import (
22 from . import (
24 bookmarks as bookmod,
23 bookmarks as bookmod,
25 bundle2,
24 bundle2,
26 changegroup,
25 changegroup,
27 discovery,
26 discovery,
28 error,
27 error,
29 lock as lockmod,
28 lock as lockmod,
30 logexchange,
29 logexchange,
31 obsolete,
30 obsolete,
32 phases,
31 phases,
33 pushkey,
32 pushkey,
34 pycompat,
33 pycompat,
35 scmutil,
34 scmutil,
36 sslutil,
35 sslutil,
37 streamclone,
36 streamclone,
38 url as urlmod,
37 url as urlmod,
39 util,
38 util,
40 )
39 )
41 from .utils import (
40 from .utils import (
42 stringutil,
41 stringutil,
43 )
42 )
44
43
45 urlerr = util.urlerr
44 urlerr = util.urlerr
46 urlreq = util.urlreq
45 urlreq = util.urlreq
47
46
48 # Maps bundle version human names to changegroup versions.
47 # Maps bundle version human names to changegroup versions.
49 _bundlespeccgversions = {'v1': '01',
48 _bundlespeccgversions = {'v1': '01',
50 'v2': '02',
49 'v2': '02',
51 'packed1': 's1',
50 'packed1': 's1',
52 'bundle2': '02', #legacy
51 'bundle2': '02', #legacy
53 }
52 }
54
53
55 # Maps bundle version with content opts to choose which part to bundle
54 # Maps bundle version with content opts to choose which part to bundle
56 _bundlespeccontentopts = {
55 _bundlespeccontentopts = {
57 'v1': {
56 'v1': {
58 'changegroup': True,
57 'changegroup': True,
59 'cg.version': '01',
58 'cg.version': '01',
60 'obsolescence': False,
59 'obsolescence': False,
61 'phases': False,
60 'phases': False,
62 'tagsfnodescache': False,
61 'tagsfnodescache': False,
63 'revbranchcache': False
62 'revbranchcache': False
64 },
63 },
65 'v2': {
64 'v2': {
66 'changegroup': True,
65 'changegroup': True,
67 'cg.version': '02',
66 'cg.version': '02',
68 'obsolescence': False,
67 'obsolescence': False,
69 'phases': False,
68 'phases': False,
70 'tagsfnodescache': True,
69 'tagsfnodescache': True,
71 'revbranchcache': True
70 'revbranchcache': True
72 },
71 },
73 'packed1' : {
72 'packed1' : {
74 'cg.version': 's1'
73 'cg.version': 's1'
75 }
74 }
76 }
75 }
77 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
76 _bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
78
77
79 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
78 _bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
80 "tagsfnodescache": False,
79 "tagsfnodescache": False,
81 "revbranchcache": False}}
80 "revbranchcache": False}}
82
81
83 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
82 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
84 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
83 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
85
84
86 @attr.s
85 @attr.s
87 class bundlespec(object):
86 class bundlespec(object):
88 compression = attr.ib()
87 compression = attr.ib()
89 wirecompression = attr.ib()
88 wirecompression = attr.ib()
90 version = attr.ib()
89 version = attr.ib()
91 wireversion = attr.ib()
90 wireversion = attr.ib()
92 params = attr.ib()
91 params = attr.ib()
93 contentopts = attr.ib()
92 contentopts = attr.ib()
94
93
95 def parsebundlespec(repo, spec, strict=True):
94 def parsebundlespec(repo, spec, strict=True):
96 """Parse a bundle string specification into parts.
95 """Parse a bundle string specification into parts.
97
96
98 Bundle specifications denote a well-defined bundle/exchange format.
97 Bundle specifications denote a well-defined bundle/exchange format.
99 The content of a given specification should not change over time in
98 The content of a given specification should not change over time in
100 order to ensure that bundles produced by a newer version of Mercurial are
99 order to ensure that bundles produced by a newer version of Mercurial are
101 readable from an older version.
100 readable from an older version.
102
101
103 The string currently has the form:
102 The string currently has the form:
104
103
105 <compression>-<type>[;<parameter0>[;<parameter1>]]
104 <compression>-<type>[;<parameter0>[;<parameter1>]]
106
105
107 Where <compression> is one of the supported compression formats
106 Where <compression> is one of the supported compression formats
108 and <type> is (currently) a version string. A ";" can follow the type and
107 and <type> is (currently) a version string. A ";" can follow the type and
109 all text afterwards is interpreted as URI encoded, ";" delimited key=value
108 all text afterwards is interpreted as URI encoded, ";" delimited key=value
110 pairs.
109 pairs.
111
110
112 If ``strict`` is True (the default) <compression> is required. Otherwise,
111 If ``strict`` is True (the default) <compression> is required. Otherwise,
113 it is optional.
112 it is optional.
114
113
115 Returns a bundlespec object of (compression, version, parameters).
114 Returns a bundlespec object of (compression, version, parameters).
116 Compression will be ``None`` if not in strict mode and a compression isn't
115 Compression will be ``None`` if not in strict mode and a compression isn't
117 defined.
116 defined.
118
117
119 An ``InvalidBundleSpecification`` is raised when the specification is
118 An ``InvalidBundleSpecification`` is raised when the specification is
120 not syntactically well formed.
119 not syntactically well formed.
121
120
122 An ``UnsupportedBundleSpecification`` is raised when the compression or
121 An ``UnsupportedBundleSpecification`` is raised when the compression or
123 bundle type/version is not recognized.
122 bundle type/version is not recognized.
124
123
125 Note: this function will likely eventually return a more complex data
124 Note: this function will likely eventually return a more complex data
126 structure, including bundle2 part information.
125 structure, including bundle2 part information.
127 """
126 """
128 def parseparams(s):
127 def parseparams(s):
129 if ';' not in s:
128 if ';' not in s:
130 return s, {}
129 return s, {}
131
130
132 params = {}
131 params = {}
133 version, paramstr = s.split(';', 1)
132 version, paramstr = s.split(';', 1)
134
133
135 for p in paramstr.split(';'):
134 for p in paramstr.split(';'):
136 if '=' not in p:
135 if '=' not in p:
137 raise error.InvalidBundleSpecification(
136 raise error.InvalidBundleSpecification(
138 _('invalid bundle specification: '
137 _('invalid bundle specification: '
139 'missing "=" in parameter: %s') % p)
138 'missing "=" in parameter: %s') % p)
140
139
141 key, value = p.split('=', 1)
140 key, value = p.split('=', 1)
142 key = urlreq.unquote(key)
141 key = urlreq.unquote(key)
143 value = urlreq.unquote(value)
142 value = urlreq.unquote(value)
144 params[key] = value
143 params[key] = value
145
144
146 return version, params
145 return version, params
147
146
148
147
149 if strict and '-' not in spec:
148 if strict and '-' not in spec:
150 raise error.InvalidBundleSpecification(
149 raise error.InvalidBundleSpecification(
151 _('invalid bundle specification; '
150 _('invalid bundle specification; '
152 'must be prefixed with compression: %s') % spec)
151 'must be prefixed with compression: %s') % spec)
153
152
154 if '-' in spec:
153 if '-' in spec:
155 compression, version = spec.split('-', 1)
154 compression, version = spec.split('-', 1)
156
155
157 if compression not in util.compengines.supportedbundlenames:
156 if compression not in util.compengines.supportedbundlenames:
158 raise error.UnsupportedBundleSpecification(
157 raise error.UnsupportedBundleSpecification(
159 _('%s compression is not supported') % compression)
158 _('%s compression is not supported') % compression)
160
159
161 version, params = parseparams(version)
160 version, params = parseparams(version)
162
161
163 if version not in _bundlespeccgversions:
162 if version not in _bundlespeccgversions:
164 raise error.UnsupportedBundleSpecification(
163 raise error.UnsupportedBundleSpecification(
165 _('%s is not a recognized bundle version') % version)
164 _('%s is not a recognized bundle version') % version)
166 else:
165 else:
167 # Value could be just the compression or just the version, in which
166 # Value could be just the compression or just the version, in which
168 # case some defaults are assumed (but only when not in strict mode).
167 # case some defaults are assumed (but only when not in strict mode).
169 assert not strict
168 assert not strict
170
169
171 spec, params = parseparams(spec)
170 spec, params = parseparams(spec)
172
171
173 if spec in util.compengines.supportedbundlenames:
172 if spec in util.compengines.supportedbundlenames:
174 compression = spec
173 compression = spec
175 version = 'v1'
174 version = 'v1'
176 # Generaldelta repos require v2.
175 # Generaldelta repos require v2.
177 if 'generaldelta' in repo.requirements:
176 if 'generaldelta' in repo.requirements:
178 version = 'v2'
177 version = 'v2'
179 # Modern compression engines require v2.
178 # Modern compression engines require v2.
180 if compression not in _bundlespecv1compengines:
179 if compression not in _bundlespecv1compengines:
181 version = 'v2'
180 version = 'v2'
182 elif spec in _bundlespeccgversions:
181 elif spec in _bundlespeccgversions:
183 if spec == 'packed1':
182 if spec == 'packed1':
184 compression = 'none'
183 compression = 'none'
185 else:
184 else:
186 compression = 'bzip2'
185 compression = 'bzip2'
187 version = spec
186 version = spec
188 else:
187 else:
189 raise error.UnsupportedBundleSpecification(
188 raise error.UnsupportedBundleSpecification(
190 _('%s is not a recognized bundle specification') % spec)
189 _('%s is not a recognized bundle specification') % spec)
191
190
192 # Bundle version 1 only supports a known set of compression engines.
191 # Bundle version 1 only supports a known set of compression engines.
193 if version == 'v1' and compression not in _bundlespecv1compengines:
192 if version == 'v1' and compression not in _bundlespecv1compengines:
194 raise error.UnsupportedBundleSpecification(
193 raise error.UnsupportedBundleSpecification(
195 _('compression engine %s is not supported on v1 bundles') %
194 _('compression engine %s is not supported on v1 bundles') %
196 compression)
195 compression)
197
196
198 # The specification for packed1 can optionally declare the data formats
197 # The specification for packed1 can optionally declare the data formats
199 # required to apply it. If we see this metadata, compare against what the
198 # required to apply it. If we see this metadata, compare against what the
200 # repo supports and error if the bundle isn't compatible.
199 # repo supports and error if the bundle isn't compatible.
201 if version == 'packed1' and 'requirements' in params:
200 if version == 'packed1' and 'requirements' in params:
202 requirements = set(params['requirements'].split(','))
201 requirements = set(params['requirements'].split(','))
203 missingreqs = requirements - repo.supportedformats
202 missingreqs = requirements - repo.supportedformats
204 if missingreqs:
203 if missingreqs:
205 raise error.UnsupportedBundleSpecification(
204 raise error.UnsupportedBundleSpecification(
206 _('missing support for repository features: %s') %
205 _('missing support for repository features: %s') %
207 ', '.join(sorted(missingreqs)))
206 ', '.join(sorted(missingreqs)))
208
207
209 # Compute contentopts based on the version
208 # Compute contentopts based on the version
210 contentopts = _bundlespeccontentopts.get(version, {}).copy()
209 contentopts = _bundlespeccontentopts.get(version, {}).copy()
211
210
212 # Process the variants
211 # Process the variants
213 if "stream" in params and params["stream"] == "v2":
212 if "stream" in params and params["stream"] == "v2":
214 variant = _bundlespecvariants["streamv2"]
213 variant = _bundlespecvariants["streamv2"]
215 contentopts.update(variant)
214 contentopts.update(variant)
216
215
217 engine = util.compengines.forbundlename(compression)
216 engine = util.compengines.forbundlename(compression)
218 compression, wirecompression = engine.bundletype()
217 compression, wirecompression = engine.bundletype()
219 wireversion = _bundlespeccgversions[version]
218 wireversion = _bundlespeccgversions[version]
220
219
221 return bundlespec(compression, wirecompression, version, wireversion,
220 return bundlespec(compression, wirecompression, version, wireversion,
222 params, contentopts)
221 params, contentopts)
223
222
224 def readbundle(ui, fh, fname, vfs=None):
223 def readbundle(ui, fh, fname, vfs=None):
225 header = changegroup.readexactly(fh, 4)
224 header = changegroup.readexactly(fh, 4)
226
225
227 alg = None
226 alg = None
228 if not fname:
227 if not fname:
229 fname = "stream"
228 fname = "stream"
230 if not header.startswith('HG') and header.startswith('\0'):
229 if not header.startswith('HG') and header.startswith('\0'):
231 fh = changegroup.headerlessfixup(fh, header)
230 fh = changegroup.headerlessfixup(fh, header)
232 header = "HG10"
231 header = "HG10"
233 alg = 'UN'
232 alg = 'UN'
234 elif vfs:
233 elif vfs:
235 fname = vfs.join(fname)
234 fname = vfs.join(fname)
236
235
237 magic, version = header[0:2], header[2:4]
236 magic, version = header[0:2], header[2:4]
238
237
239 if magic != 'HG':
238 if magic != 'HG':
240 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
239 raise error.Abort(_('%s: not a Mercurial bundle') % fname)
241 if version == '10':
240 if version == '10':
242 if alg is None:
241 if alg is None:
243 alg = changegroup.readexactly(fh, 2)
242 alg = changegroup.readexactly(fh, 2)
244 return changegroup.cg1unpacker(fh, alg)
243 return changegroup.cg1unpacker(fh, alg)
245 elif version.startswith('2'):
244 elif version.startswith('2'):
246 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
245 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
247 elif version == 'S1':
246 elif version == 'S1':
248 return streamclone.streamcloneapplier(fh)
247 return streamclone.streamcloneapplier(fh)
249 else:
248 else:
250 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
249 raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
251
250
252 def getbundlespec(ui, fh):
251 def getbundlespec(ui, fh):
253 """Infer the bundlespec from a bundle file handle.
252 """Infer the bundlespec from a bundle file handle.
254
253
255 The input file handle is seeked and the original seek position is not
254 The input file handle is seeked and the original seek position is not
256 restored.
255 restored.
257 """
256 """
258 def speccompression(alg):
257 def speccompression(alg):
259 try:
258 try:
260 return util.compengines.forbundletype(alg).bundletype()[0]
259 return util.compengines.forbundletype(alg).bundletype()[0]
261 except KeyError:
260 except KeyError:
262 return None
261 return None
263
262
264 b = readbundle(ui, fh, None)
263 b = readbundle(ui, fh, None)
265 if isinstance(b, changegroup.cg1unpacker):
264 if isinstance(b, changegroup.cg1unpacker):
266 alg = b._type
265 alg = b._type
267 if alg == '_truncatedBZ':
266 if alg == '_truncatedBZ':
268 alg = 'BZ'
267 alg = 'BZ'
269 comp = speccompression(alg)
268 comp = speccompression(alg)
270 if not comp:
269 if not comp:
271 raise error.Abort(_('unknown compression algorithm: %s') % alg)
270 raise error.Abort(_('unknown compression algorithm: %s') % alg)
272 return '%s-v1' % comp
271 return '%s-v1' % comp
273 elif isinstance(b, bundle2.unbundle20):
272 elif isinstance(b, bundle2.unbundle20):
274 if 'Compression' in b.params:
273 if 'Compression' in b.params:
275 comp = speccompression(b.params['Compression'])
274 comp = speccompression(b.params['Compression'])
276 if not comp:
275 if not comp:
277 raise error.Abort(_('unknown compression algorithm: %s') % comp)
276 raise error.Abort(_('unknown compression algorithm: %s') % comp)
278 else:
277 else:
279 comp = 'none'
278 comp = 'none'
280
279
281 version = None
280 version = None
282 for part in b.iterparts():
281 for part in b.iterparts():
283 if part.type == 'changegroup':
282 if part.type == 'changegroup':
284 version = part.params['version']
283 version = part.params['version']
285 if version in ('01', '02'):
284 if version in ('01', '02'):
286 version = 'v2'
285 version = 'v2'
287 else:
286 else:
288 raise error.Abort(_('changegroup version %s does not have '
287 raise error.Abort(_('changegroup version %s does not have '
289 'a known bundlespec') % version,
288 'a known bundlespec') % version,
290 hint=_('try upgrading your Mercurial '
289 hint=_('try upgrading your Mercurial '
291 'client'))
290 'client'))
292 elif part.type == 'stream2' and version is None:
291 elif part.type == 'stream2' and version is None:
293 # A stream2 part requires to be part of a v2 bundle
292 # A stream2 part requires to be part of a v2 bundle
294 version = "v2"
293 version = "v2"
295 requirements = urlreq.unquote(part.params['requirements'])
294 requirements = urlreq.unquote(part.params['requirements'])
296 splitted = requirements.split()
295 splitted = requirements.split()
297 params = bundle2._formatrequirementsparams(splitted)
296 params = bundle2._formatrequirementsparams(splitted)
298 return 'none-v2;stream=v2;%s' % params
297 return 'none-v2;stream=v2;%s' % params
299
298
300 if not version:
299 if not version:
301 raise error.Abort(_('could not identify changegroup version in '
300 raise error.Abort(_('could not identify changegroup version in '
302 'bundle'))
301 'bundle'))
303
302
304 return '%s-%s' % (comp, version)
303 return '%s-%s' % (comp, version)
305 elif isinstance(b, streamclone.streamcloneapplier):
304 elif isinstance(b, streamclone.streamcloneapplier):
306 requirements = streamclone.readbundle1header(fh)[2]
305 requirements = streamclone.readbundle1header(fh)[2]
307 formatted = bundle2._formatrequirementsparams(requirements)
306 formatted = bundle2._formatrequirementsparams(requirements)
308 return 'none-packed1;%s' % formatted
307 return 'none-packed1;%s' % formatted
309 else:
308 else:
310 raise error.Abort(_('unknown bundle type: %s') % b)
309 raise error.Abort(_('unknown bundle type: %s') % b)
311
310
312 def _computeoutgoing(repo, heads, common):
311 def _computeoutgoing(repo, heads, common):
313 """Computes which revs are outgoing given a set of common
312 """Computes which revs are outgoing given a set of common
314 and a set of heads.
313 and a set of heads.
315
314
316 This is a separate function so extensions can have access to
315 This is a separate function so extensions can have access to
317 the logic.
316 the logic.
318
317
319 Returns a discovery.outgoing object.
318 Returns a discovery.outgoing object.
320 """
319 """
321 cl = repo.changelog
320 cl = repo.changelog
322 if common:
321 if common:
323 hasnode = cl.hasnode
322 hasnode = cl.hasnode
324 common = [n for n in common if hasnode(n)]
323 common = [n for n in common if hasnode(n)]
325 else:
324 else:
326 common = [nullid]
325 common = [nullid]
327 if not heads:
326 if not heads:
328 heads = cl.heads()
327 heads = cl.heads()
329 return discovery.outgoing(repo, common, heads)
328 return discovery.outgoing(repo, common, heads)
330
329
331 def _forcebundle1(op):
330 def _forcebundle1(op):
332 """return true if a pull/push must use bundle1
331 """return true if a pull/push must use bundle1
333
332
334 This function is used to allow testing of the older bundle version"""
333 This function is used to allow testing of the older bundle version"""
335 ui = op.repo.ui
334 ui = op.repo.ui
336 # The goal is this config is to allow developer to choose the bundle
335 # The goal is this config is to allow developer to choose the bundle
337 # version used during exchanged. This is especially handy during test.
336 # version used during exchanged. This is especially handy during test.
338 # Value is a list of bundle version to be picked from, highest version
337 # Value is a list of bundle version to be picked from, highest version
339 # should be used.
338 # should be used.
340 #
339 #
341 # developer config: devel.legacy.exchange
340 # developer config: devel.legacy.exchange
342 exchange = ui.configlist('devel', 'legacy.exchange')
341 exchange = ui.configlist('devel', 'legacy.exchange')
343 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
342 forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
344 return forcebundle1 or not op.remote.capable('bundle2')
343 return forcebundle1 or not op.remote.capable('bundle2')
345
344
346 class pushoperation(object):
345 class pushoperation(object):
347 """A object that represent a single push operation
346 """A object that represent a single push operation
348
347
349 Its purpose is to carry push related state and very common operations.
348 Its purpose is to carry push related state and very common operations.
350
349
351 A new pushoperation should be created at the beginning of each push and
350 A new pushoperation should be created at the beginning of each push and
352 discarded afterward.
351 discarded afterward.
353 """
352 """
354
353
355 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
354 def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
356 bookmarks=(), pushvars=None):
355 bookmarks=(), pushvars=None):
357 # repo we push from
356 # repo we push from
358 self.repo = repo
357 self.repo = repo
359 self.ui = repo.ui
358 self.ui = repo.ui
360 # repo we push to
359 # repo we push to
361 self.remote = remote
360 self.remote = remote
362 # force option provided
361 # force option provided
363 self.force = force
362 self.force = force
364 # revs to be pushed (None is "all")
363 # revs to be pushed (None is "all")
365 self.revs = revs
364 self.revs = revs
366 # bookmark explicitly pushed
365 # bookmark explicitly pushed
367 self.bookmarks = bookmarks
366 self.bookmarks = bookmarks
368 # allow push of new branch
367 # allow push of new branch
369 self.newbranch = newbranch
368 self.newbranch = newbranch
370 # step already performed
369 # step already performed
371 # (used to check what steps have been already performed through bundle2)
370 # (used to check what steps have been already performed through bundle2)
372 self.stepsdone = set()
371 self.stepsdone = set()
373 # Integer version of the changegroup push result
372 # Integer version of the changegroup push result
374 # - None means nothing to push
373 # - None means nothing to push
375 # - 0 means HTTP error
374 # - 0 means HTTP error
376 # - 1 means we pushed and remote head count is unchanged *or*
375 # - 1 means we pushed and remote head count is unchanged *or*
377 # we have outgoing changesets but refused to push
376 # we have outgoing changesets but refused to push
378 # - other values as described by addchangegroup()
377 # - other values as described by addchangegroup()
379 self.cgresult = None
378 self.cgresult = None
380 # Boolean value for the bookmark push
379 # Boolean value for the bookmark push
381 self.bkresult = None
380 self.bkresult = None
382 # discover.outgoing object (contains common and outgoing data)
381 # discover.outgoing object (contains common and outgoing data)
383 self.outgoing = None
382 self.outgoing = None
384 # all remote topological heads before the push
383 # all remote topological heads before the push
385 self.remoteheads = None
384 self.remoteheads = None
386 # Details of the remote branch pre and post push
385 # Details of the remote branch pre and post push
387 #
386 #
388 # mapping: {'branch': ([remoteheads],
387 # mapping: {'branch': ([remoteheads],
389 # [newheads],
388 # [newheads],
390 # [unsyncedheads],
389 # [unsyncedheads],
391 # [discardedheads])}
390 # [discardedheads])}
392 # - branch: the branch name
391 # - branch: the branch name
393 # - remoteheads: the list of remote heads known locally
392 # - remoteheads: the list of remote heads known locally
394 # None if the branch is new
393 # None if the branch is new
395 # - newheads: the new remote heads (known locally) with outgoing pushed
394 # - newheads: the new remote heads (known locally) with outgoing pushed
396 # - unsyncedheads: the list of remote heads unknown locally.
395 # - unsyncedheads: the list of remote heads unknown locally.
397 # - discardedheads: the list of remote heads made obsolete by the push
396 # - discardedheads: the list of remote heads made obsolete by the push
398 self.pushbranchmap = None
397 self.pushbranchmap = None
399 # testable as a boolean indicating if any nodes are missing locally.
398 # testable as a boolean indicating if any nodes are missing locally.
400 self.incoming = None
399 self.incoming = None
401 # summary of the remote phase situation
400 # summary of the remote phase situation
402 self.remotephases = None
401 self.remotephases = None
403 # phases changes that must be pushed along side the changesets
402 # phases changes that must be pushed along side the changesets
404 self.outdatedphases = None
403 self.outdatedphases = None
405 # phases changes that must be pushed if changeset push fails
404 # phases changes that must be pushed if changeset push fails
406 self.fallbackoutdatedphases = None
405 self.fallbackoutdatedphases = None
407 # outgoing obsmarkers
406 # outgoing obsmarkers
408 self.outobsmarkers = set()
407 self.outobsmarkers = set()
409 # outgoing bookmarks
408 # outgoing bookmarks
410 self.outbookmarks = []
409 self.outbookmarks = []
411 # transaction manager
410 # transaction manager
412 self.trmanager = None
411 self.trmanager = None
413 # map { pushkey partid -> callback handling failure}
412 # map { pushkey partid -> callback handling failure}
414 # used to handle exception from mandatory pushkey part failure
413 # used to handle exception from mandatory pushkey part failure
415 self.pkfailcb = {}
414 self.pkfailcb = {}
416 # an iterable of pushvars or None
415 # an iterable of pushvars or None
417 self.pushvars = pushvars
416 self.pushvars = pushvars
418
417
419 @util.propertycache
418 @util.propertycache
420 def futureheads(self):
419 def futureheads(self):
421 """future remote heads if the changeset push succeeds"""
420 """future remote heads if the changeset push succeeds"""
422 return self.outgoing.missingheads
421 return self.outgoing.missingheads
423
422
424 @util.propertycache
423 @util.propertycache
425 def fallbackheads(self):
424 def fallbackheads(self):
426 """future remote heads if the changeset push fails"""
425 """future remote heads if the changeset push fails"""
427 if self.revs is None:
426 if self.revs is None:
428 # not target to push, all common are relevant
427 # not target to push, all common are relevant
429 return self.outgoing.commonheads
428 return self.outgoing.commonheads
430 unfi = self.repo.unfiltered()
429 unfi = self.repo.unfiltered()
431 # I want cheads = heads(::missingheads and ::commonheads)
430 # I want cheads = heads(::missingheads and ::commonheads)
432 # (missingheads is revs with secret changeset filtered out)
431 # (missingheads is revs with secret changeset filtered out)
433 #
432 #
434 # This can be expressed as:
433 # This can be expressed as:
435 # cheads = ( (missingheads and ::commonheads)
434 # cheads = ( (missingheads and ::commonheads)
436 # + (commonheads and ::missingheads))"
435 # + (commonheads and ::missingheads))"
437 # )
436 # )
438 #
437 #
439 # while trying to push we already computed the following:
438 # while trying to push we already computed the following:
440 # common = (::commonheads)
439 # common = (::commonheads)
441 # missing = ((commonheads::missingheads) - commonheads)
440 # missing = ((commonheads::missingheads) - commonheads)
442 #
441 #
443 # We can pick:
442 # We can pick:
444 # * missingheads part of common (::commonheads)
443 # * missingheads part of common (::commonheads)
445 common = self.outgoing.common
444 common = self.outgoing.common
446 nm = self.repo.changelog.nodemap
445 nm = self.repo.changelog.nodemap
447 cheads = [node for node in self.revs if nm[node] in common]
446 cheads = [node for node in self.revs if nm[node] in common]
448 # and
447 # and
449 # * commonheads parents on missing
448 # * commonheads parents on missing
450 revset = unfi.set('%ln and parents(roots(%ln))',
449 revset = unfi.set('%ln and parents(roots(%ln))',
451 self.outgoing.commonheads,
450 self.outgoing.commonheads,
452 self.outgoing.missing)
451 self.outgoing.missing)
453 cheads.extend(c.node() for c in revset)
452 cheads.extend(c.node() for c in revset)
454 return cheads
453 return cheads
455
454
456 @property
455 @property
457 def commonheads(self):
456 def commonheads(self):
458 """set of all common heads after changeset bundle push"""
457 """set of all common heads after changeset bundle push"""
459 if self.cgresult:
458 if self.cgresult:
460 return self.futureheads
459 return self.futureheads
461 else:
460 else:
462 return self.fallbackheads
461 return self.fallbackheads
463
462
464 # mapping of message used when pushing bookmark
463 # mapping of message used when pushing bookmark
465 bookmsgmap = {'update': (_("updating bookmark %s\n"),
464 bookmsgmap = {'update': (_("updating bookmark %s\n"),
466 _('updating bookmark %s failed!\n')),
465 _('updating bookmark %s failed!\n')),
467 'export': (_("exporting bookmark %s\n"),
466 'export': (_("exporting bookmark %s\n"),
468 _('exporting bookmark %s failed!\n')),
467 _('exporting bookmark %s failed!\n')),
469 'delete': (_("deleting remote bookmark %s\n"),
468 'delete': (_("deleting remote bookmark %s\n"),
470 _('deleting remote bookmark %s failed!\n')),
469 _('deleting remote bookmark %s failed!\n')),
471 }
470 }
472
471
473
472
474 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
473 def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
475 opargs=None):
474 opargs=None):
476 '''Push outgoing changesets (limited by revs) from a local
475 '''Push outgoing changesets (limited by revs) from a local
477 repository to remote. Return an integer:
476 repository to remote. Return an integer:
478 - None means nothing to push
477 - None means nothing to push
479 - 0 means HTTP error
478 - 0 means HTTP error
480 - 1 means we pushed and remote head count is unchanged *or*
479 - 1 means we pushed and remote head count is unchanged *or*
481 we have outgoing changesets but refused to push
480 we have outgoing changesets but refused to push
482 - other values as described by addchangegroup()
481 - other values as described by addchangegroup()
483 '''
482 '''
484 if opargs is None:
483 if opargs is None:
485 opargs = {}
484 opargs = {}
486 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
485 pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
487 **pycompat.strkwargs(opargs))
486 **pycompat.strkwargs(opargs))
488 if pushop.remote.local():
487 if pushop.remote.local():
489 missing = (set(pushop.repo.requirements)
488 missing = (set(pushop.repo.requirements)
490 - pushop.remote.local().supported)
489 - pushop.remote.local().supported)
491 if missing:
490 if missing:
492 msg = _("required features are not"
491 msg = _("required features are not"
493 " supported in the destination:"
492 " supported in the destination:"
494 " %s") % (', '.join(sorted(missing)))
493 " %s") % (', '.join(sorted(missing)))
495 raise error.Abort(msg)
494 raise error.Abort(msg)
496
495
497 if not pushop.remote.canpush():
496 if not pushop.remote.canpush():
498 raise error.Abort(_("destination does not support push"))
497 raise error.Abort(_("destination does not support push"))
499
498
500 if not pushop.remote.capable('unbundle'):
499 if not pushop.remote.capable('unbundle'):
501 raise error.Abort(_('cannot push: destination does not support the '
500 raise error.Abort(_('cannot push: destination does not support the '
502 'unbundle wire protocol command'))
501 'unbundle wire protocol command'))
503
502
504 # get lock as we might write phase data
503 # get lock as we might write phase data
505 wlock = lock = None
504 wlock = lock = None
506 try:
505 try:
507 # bundle2 push may receive a reply bundle touching bookmarks or other
506 # bundle2 push may receive a reply bundle touching bookmarks or other
508 # things requiring the wlock. Take it now to ensure proper ordering.
507 # things requiring the wlock. Take it now to ensure proper ordering.
509 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
508 maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
510 if (not _forcebundle1(pushop)) and maypushback:
509 if (not _forcebundle1(pushop)) and maypushback:
511 wlock = pushop.repo.wlock()
510 wlock = pushop.repo.wlock()
512 lock = pushop.repo.lock()
511 lock = pushop.repo.lock()
513 pushop.trmanager = transactionmanager(pushop.repo,
512 pushop.trmanager = transactionmanager(pushop.repo,
514 'push-response',
513 'push-response',
515 pushop.remote.url())
514 pushop.remote.url())
516 except IOError as err:
515 except error.LockUnavailable as err:
517 if err.errno != errno.EACCES:
518 raise
519 # source repo cannot be locked.
516 # source repo cannot be locked.
520 # We do not abort the push, but just disable the local phase
517 # We do not abort the push, but just disable the local phase
521 # synchronisation.
518 # synchronisation.
522 msg = 'cannot lock source repository: %s\n' % err
519 msg = 'cannot lock source repository: %s\n' % err
523 pushop.ui.debug(msg)
520 pushop.ui.debug(msg)
524
521
525 with wlock or util.nullcontextmanager(), \
522 with wlock or util.nullcontextmanager(), \
526 lock or util.nullcontextmanager(), \
523 lock or util.nullcontextmanager(), \
527 pushop.trmanager or util.nullcontextmanager():
524 pushop.trmanager or util.nullcontextmanager():
528 pushop.repo.checkpush(pushop)
525 pushop.repo.checkpush(pushop)
529 _pushdiscovery(pushop)
526 _pushdiscovery(pushop)
530 if not _forcebundle1(pushop):
527 if not _forcebundle1(pushop):
531 _pushbundle2(pushop)
528 _pushbundle2(pushop)
532 _pushchangeset(pushop)
529 _pushchangeset(pushop)
533 _pushsyncphase(pushop)
530 _pushsyncphase(pushop)
534 _pushobsolete(pushop)
531 _pushobsolete(pushop)
535 _pushbookmark(pushop)
532 _pushbookmark(pushop)
536
533
537 return pushop
534 return pushop
538
535
539 # list of steps to perform discovery before push
536 # list of steps to perform discovery before push
540 pushdiscoveryorder = []
537 pushdiscoveryorder = []
541
538
542 # Mapping between step name and function
539 # Mapping between step name and function
543 #
540 #
544 # This exists to help extensions wrap steps if necessary
541 # This exists to help extensions wrap steps if necessary
545 pushdiscoverymapping = {}
542 pushdiscoverymapping = {}
546
543
547 def pushdiscovery(stepname):
544 def pushdiscovery(stepname):
548 """decorator for function performing discovery before push
545 """decorator for function performing discovery before push
549
546
550 The function is added to the step -> function mapping and appended to the
547 The function is added to the step -> function mapping and appended to the
551 list of steps. Beware that decorated function will be added in order (this
548 list of steps. Beware that decorated function will be added in order (this
552 may matter).
549 may matter).
553
550
554 You can only use this decorator for a new step, if you want to wrap a step
551 You can only use this decorator for a new step, if you want to wrap a step
555 from an extension, change the pushdiscovery dictionary directly."""
552 from an extension, change the pushdiscovery dictionary directly."""
556 def dec(func):
553 def dec(func):
557 assert stepname not in pushdiscoverymapping
554 assert stepname not in pushdiscoverymapping
558 pushdiscoverymapping[stepname] = func
555 pushdiscoverymapping[stepname] = func
559 pushdiscoveryorder.append(stepname)
556 pushdiscoveryorder.append(stepname)
560 return func
557 return func
561 return dec
558 return dec
562
559
563 def _pushdiscovery(pushop):
560 def _pushdiscovery(pushop):
564 """Run all discovery steps"""
561 """Run all discovery steps"""
565 for stepname in pushdiscoveryorder:
562 for stepname in pushdiscoveryorder:
566 step = pushdiscoverymapping[stepname]
563 step = pushdiscoverymapping[stepname]
567 step(pushop)
564 step(pushop)
568
565
569 @pushdiscovery('changeset')
566 @pushdiscovery('changeset')
570 def _pushdiscoverychangeset(pushop):
567 def _pushdiscoverychangeset(pushop):
571 """discover the changeset that need to be pushed"""
568 """discover the changeset that need to be pushed"""
572 fci = discovery.findcommonincoming
569 fci = discovery.findcommonincoming
573 if pushop.revs:
570 if pushop.revs:
574 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
571 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
575 ancestorsof=pushop.revs)
572 ancestorsof=pushop.revs)
576 else:
573 else:
577 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
574 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
578 common, inc, remoteheads = commoninc
575 common, inc, remoteheads = commoninc
579 fco = discovery.findcommonoutgoing
576 fco = discovery.findcommonoutgoing
580 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
577 outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
581 commoninc=commoninc, force=pushop.force)
578 commoninc=commoninc, force=pushop.force)
582 pushop.outgoing = outgoing
579 pushop.outgoing = outgoing
583 pushop.remoteheads = remoteheads
580 pushop.remoteheads = remoteheads
584 pushop.incoming = inc
581 pushop.incoming = inc
585
582
586 @pushdiscovery('phase')
583 @pushdiscovery('phase')
587 def _pushdiscoveryphase(pushop):
584 def _pushdiscoveryphase(pushop):
588 """discover the phase that needs to be pushed
585 """discover the phase that needs to be pushed
589
586
590 (computed for both success and failure case for changesets push)"""
587 (computed for both success and failure case for changesets push)"""
591 outgoing = pushop.outgoing
588 outgoing = pushop.outgoing
592 unfi = pushop.repo.unfiltered()
589 unfi = pushop.repo.unfiltered()
593 remotephases = listkeys(pushop.remote, 'phases')
590 remotephases = listkeys(pushop.remote, 'phases')
594
591
595 if (pushop.ui.configbool('ui', '_usedassubrepo')
592 if (pushop.ui.configbool('ui', '_usedassubrepo')
596 and remotephases # server supports phases
593 and remotephases # server supports phases
597 and not pushop.outgoing.missing # no changesets to be pushed
594 and not pushop.outgoing.missing # no changesets to be pushed
598 and remotephases.get('publishing', False)):
595 and remotephases.get('publishing', False)):
599 # When:
596 # When:
600 # - this is a subrepo push
597 # - this is a subrepo push
601 # - and remote support phase
598 # - and remote support phase
602 # - and no changeset are to be pushed
599 # - and no changeset are to be pushed
603 # - and remote is publishing
600 # - and remote is publishing
604 # We may be in issue 3781 case!
601 # We may be in issue 3781 case!
605 # We drop the possible phase synchronisation done by
602 # We drop the possible phase synchronisation done by
606 # courtesy to publish changesets possibly locally draft
603 # courtesy to publish changesets possibly locally draft
607 # on the remote.
604 # on the remote.
608 pushop.outdatedphases = []
605 pushop.outdatedphases = []
609 pushop.fallbackoutdatedphases = []
606 pushop.fallbackoutdatedphases = []
610 return
607 return
611
608
612 pushop.remotephases = phases.remotephasessummary(pushop.repo,
609 pushop.remotephases = phases.remotephasessummary(pushop.repo,
613 pushop.fallbackheads,
610 pushop.fallbackheads,
614 remotephases)
611 remotephases)
615 droots = pushop.remotephases.draftroots
612 droots = pushop.remotephases.draftroots
616
613
617 extracond = ''
614 extracond = ''
618 if not pushop.remotephases.publishing:
615 if not pushop.remotephases.publishing:
619 extracond = ' and public()'
616 extracond = ' and public()'
620 revset = 'heads((%%ln::%%ln) %s)' % extracond
617 revset = 'heads((%%ln::%%ln) %s)' % extracond
621 # Get the list of all revs draft on remote by public here.
618 # Get the list of all revs draft on remote by public here.
622 # XXX Beware that revset break if droots is not strictly
619 # XXX Beware that revset break if droots is not strictly
623 # XXX root we may want to ensure it is but it is costly
620 # XXX root we may want to ensure it is but it is costly
624 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
621 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
625 if not outgoing.missing:
622 if not outgoing.missing:
626 future = fallback
623 future = fallback
627 else:
624 else:
628 # adds changeset we are going to push as draft
625 # adds changeset we are going to push as draft
629 #
626 #
630 # should not be necessary for publishing server, but because of an
627 # should not be necessary for publishing server, but because of an
631 # issue fixed in xxxxx we have to do it anyway.
628 # issue fixed in xxxxx we have to do it anyway.
632 fdroots = list(unfi.set('roots(%ln + %ln::)',
629 fdroots = list(unfi.set('roots(%ln + %ln::)',
633 outgoing.missing, droots))
630 outgoing.missing, droots))
634 fdroots = [f.node() for f in fdroots]
631 fdroots = [f.node() for f in fdroots]
635 future = list(unfi.set(revset, fdroots, pushop.futureheads))
632 future = list(unfi.set(revset, fdroots, pushop.futureheads))
636 pushop.outdatedphases = future
633 pushop.outdatedphases = future
637 pushop.fallbackoutdatedphases = fallback
634 pushop.fallbackoutdatedphases = fallback
638
635
639 @pushdiscovery('obsmarker')
636 @pushdiscovery('obsmarker')
640 def _pushdiscoveryobsmarkers(pushop):
637 def _pushdiscoveryobsmarkers(pushop):
641 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
638 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
642 return
639 return
643
640
644 if not pushop.repo.obsstore:
641 if not pushop.repo.obsstore:
645 return
642 return
646
643
647 if 'obsolete' not in listkeys(pushop.remote, 'namespaces'):
644 if 'obsolete' not in listkeys(pushop.remote, 'namespaces'):
648 return
645 return
649
646
650 repo = pushop.repo
647 repo = pushop.repo
651 # very naive computation, that can be quite expensive on big repo.
648 # very naive computation, that can be quite expensive on big repo.
652 # However: evolution is currently slow on them anyway.
649 # However: evolution is currently slow on them anyway.
653 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
650 nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
654 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
651 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
655
652
656 @pushdiscovery('bookmarks')
653 @pushdiscovery('bookmarks')
657 def _pushdiscoverybookmarks(pushop):
654 def _pushdiscoverybookmarks(pushop):
658 ui = pushop.ui
655 ui = pushop.ui
659 repo = pushop.repo.unfiltered()
656 repo = pushop.repo.unfiltered()
660 remote = pushop.remote
657 remote = pushop.remote
661 ui.debug("checking for updated bookmarks\n")
658 ui.debug("checking for updated bookmarks\n")
662 ancestors = ()
659 ancestors = ()
663 if pushop.revs:
660 if pushop.revs:
664 revnums = map(repo.changelog.rev, pushop.revs)
661 revnums = map(repo.changelog.rev, pushop.revs)
665 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
662 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
666
663
667 remotebookmark = listkeys(remote, 'bookmarks')
664 remotebookmark = listkeys(remote, 'bookmarks')
668
665
669 explicit = set([repo._bookmarks.expandname(bookmark)
666 explicit = set([repo._bookmarks.expandname(bookmark)
670 for bookmark in pushop.bookmarks])
667 for bookmark in pushop.bookmarks])
671
668
672 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
669 remotebookmark = bookmod.unhexlifybookmarks(remotebookmark)
673 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
670 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
674
671
675 def safehex(x):
672 def safehex(x):
676 if x is None:
673 if x is None:
677 return x
674 return x
678 return hex(x)
675 return hex(x)
679
676
680 def hexifycompbookmarks(bookmarks):
677 def hexifycompbookmarks(bookmarks):
681 return [(b, safehex(scid), safehex(dcid))
678 return [(b, safehex(scid), safehex(dcid))
682 for (b, scid, dcid) in bookmarks]
679 for (b, scid, dcid) in bookmarks]
683
680
684 comp = [hexifycompbookmarks(marks) for marks in comp]
681 comp = [hexifycompbookmarks(marks) for marks in comp]
685 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
682 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
686
683
687 def _processcompared(pushop, pushed, explicit, remotebms, comp):
684 def _processcompared(pushop, pushed, explicit, remotebms, comp):
688 """take decision on bookmark to pull from the remote bookmark
685 """take decision on bookmark to pull from the remote bookmark
689
686
690 Exist to help extensions who want to alter this behavior.
687 Exist to help extensions who want to alter this behavior.
691 """
688 """
692 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
689 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
693
690
694 repo = pushop.repo
691 repo = pushop.repo
695
692
696 for b, scid, dcid in advsrc:
693 for b, scid, dcid in advsrc:
697 if b in explicit:
694 if b in explicit:
698 explicit.remove(b)
695 explicit.remove(b)
699 if not pushed or repo[scid].rev() in pushed:
696 if not pushed or repo[scid].rev() in pushed:
700 pushop.outbookmarks.append((b, dcid, scid))
697 pushop.outbookmarks.append((b, dcid, scid))
701 # search added bookmark
698 # search added bookmark
702 for b, scid, dcid in addsrc:
699 for b, scid, dcid in addsrc:
703 if b in explicit:
700 if b in explicit:
704 explicit.remove(b)
701 explicit.remove(b)
705 pushop.outbookmarks.append((b, '', scid))
702 pushop.outbookmarks.append((b, '', scid))
706 # search for overwritten bookmark
703 # search for overwritten bookmark
707 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
704 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
708 if b in explicit:
705 if b in explicit:
709 explicit.remove(b)
706 explicit.remove(b)
710 pushop.outbookmarks.append((b, dcid, scid))
707 pushop.outbookmarks.append((b, dcid, scid))
711 # search for bookmark to delete
708 # search for bookmark to delete
712 for b, scid, dcid in adddst:
709 for b, scid, dcid in adddst:
713 if b in explicit:
710 if b in explicit:
714 explicit.remove(b)
711 explicit.remove(b)
715 # treat as "deleted locally"
712 # treat as "deleted locally"
716 pushop.outbookmarks.append((b, dcid, ''))
713 pushop.outbookmarks.append((b, dcid, ''))
717 # identical bookmarks shouldn't get reported
714 # identical bookmarks shouldn't get reported
718 for b, scid, dcid in same:
715 for b, scid, dcid in same:
719 if b in explicit:
716 if b in explicit:
720 explicit.remove(b)
717 explicit.remove(b)
721
718
722 if explicit:
719 if explicit:
723 explicit = sorted(explicit)
720 explicit = sorted(explicit)
724 # we should probably list all of them
721 # we should probably list all of them
725 pushop.ui.warn(_('bookmark %s does not exist on the local '
722 pushop.ui.warn(_('bookmark %s does not exist on the local '
726 'or remote repository!\n') % explicit[0])
723 'or remote repository!\n') % explicit[0])
727 pushop.bkresult = 2
724 pushop.bkresult = 2
728
725
729 pushop.outbookmarks.sort()
726 pushop.outbookmarks.sort()
730
727
731 def _pushcheckoutgoing(pushop):
728 def _pushcheckoutgoing(pushop):
732 outgoing = pushop.outgoing
729 outgoing = pushop.outgoing
733 unfi = pushop.repo.unfiltered()
730 unfi = pushop.repo.unfiltered()
734 if not outgoing.missing:
731 if not outgoing.missing:
735 # nothing to push
732 # nothing to push
736 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
733 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
737 return False
734 return False
738 # something to push
735 # something to push
739 if not pushop.force:
736 if not pushop.force:
740 # if repo.obsstore == False --> no obsolete
737 # if repo.obsstore == False --> no obsolete
741 # then, save the iteration
738 # then, save the iteration
742 if unfi.obsstore:
739 if unfi.obsstore:
743 # this message are here for 80 char limit reason
740 # this message are here for 80 char limit reason
744 mso = _("push includes obsolete changeset: %s!")
741 mso = _("push includes obsolete changeset: %s!")
745 mspd = _("push includes phase-divergent changeset: %s!")
742 mspd = _("push includes phase-divergent changeset: %s!")
746 mscd = _("push includes content-divergent changeset: %s!")
743 mscd = _("push includes content-divergent changeset: %s!")
747 mst = {"orphan": _("push includes orphan changeset: %s!"),
744 mst = {"orphan": _("push includes orphan changeset: %s!"),
748 "phase-divergent": mspd,
745 "phase-divergent": mspd,
749 "content-divergent": mscd}
746 "content-divergent": mscd}
750 # If we are to push if there is at least one
747 # If we are to push if there is at least one
751 # obsolete or unstable changeset in missing, at
748 # obsolete or unstable changeset in missing, at
752 # least one of the missinghead will be obsolete or
749 # least one of the missinghead will be obsolete or
753 # unstable. So checking heads only is ok
750 # unstable. So checking heads only is ok
754 for node in outgoing.missingheads:
751 for node in outgoing.missingheads:
755 ctx = unfi[node]
752 ctx = unfi[node]
756 if ctx.obsolete():
753 if ctx.obsolete():
757 raise error.Abort(mso % ctx)
754 raise error.Abort(mso % ctx)
758 elif ctx.isunstable():
755 elif ctx.isunstable():
759 # TODO print more than one instability in the abort
756 # TODO print more than one instability in the abort
760 # message
757 # message
761 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
758 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
762
759
763 discovery.checkheads(pushop)
760 discovery.checkheads(pushop)
764 return True
761 return True
765
762
766 # List of names of steps to perform for an outgoing bundle2, order matters.
763 # List of names of steps to perform for an outgoing bundle2, order matters.
767 b2partsgenorder = []
764 b2partsgenorder = []
768
765
769 # Mapping between step name and function
766 # Mapping between step name and function
770 #
767 #
771 # This exists to help extensions wrap steps if necessary
768 # This exists to help extensions wrap steps if necessary
772 b2partsgenmapping = {}
769 b2partsgenmapping = {}
773
770
774 def b2partsgenerator(stepname, idx=None):
771 def b2partsgenerator(stepname, idx=None):
775 """decorator for function generating bundle2 part
772 """decorator for function generating bundle2 part
776
773
777 The function is added to the step -> function mapping and appended to the
774 The function is added to the step -> function mapping and appended to the
778 list of steps. Beware that decorated functions will be added in order
775 list of steps. Beware that decorated functions will be added in order
779 (this may matter).
776 (this may matter).
780
777
781 You can only use this decorator for new steps, if you want to wrap a step
778 You can only use this decorator for new steps, if you want to wrap a step
782 from an extension, attack the b2partsgenmapping dictionary directly."""
779 from an extension, attack the b2partsgenmapping dictionary directly."""
783 def dec(func):
780 def dec(func):
784 assert stepname not in b2partsgenmapping
781 assert stepname not in b2partsgenmapping
785 b2partsgenmapping[stepname] = func
782 b2partsgenmapping[stepname] = func
786 if idx is None:
783 if idx is None:
787 b2partsgenorder.append(stepname)
784 b2partsgenorder.append(stepname)
788 else:
785 else:
789 b2partsgenorder.insert(idx, stepname)
786 b2partsgenorder.insert(idx, stepname)
790 return func
787 return func
791 return dec
788 return dec
792
789
793 def _pushb2ctxcheckheads(pushop, bundler):
790 def _pushb2ctxcheckheads(pushop, bundler):
794 """Generate race condition checking parts
791 """Generate race condition checking parts
795
792
796 Exists as an independent function to aid extensions
793 Exists as an independent function to aid extensions
797 """
794 """
798 # * 'force' do not check for push race,
795 # * 'force' do not check for push race,
799 # * if we don't push anything, there are nothing to check.
796 # * if we don't push anything, there are nothing to check.
800 if not pushop.force and pushop.outgoing.missingheads:
797 if not pushop.force and pushop.outgoing.missingheads:
801 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
798 allowunrelated = 'related' in bundler.capabilities.get('checkheads', ())
802 emptyremote = pushop.pushbranchmap is None
799 emptyremote = pushop.pushbranchmap is None
803 if not allowunrelated or emptyremote:
800 if not allowunrelated or emptyremote:
804 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
801 bundler.newpart('check:heads', data=iter(pushop.remoteheads))
805 else:
802 else:
806 affected = set()
803 affected = set()
807 for branch, heads in pushop.pushbranchmap.iteritems():
804 for branch, heads in pushop.pushbranchmap.iteritems():
808 remoteheads, newheads, unsyncedheads, discardedheads = heads
805 remoteheads, newheads, unsyncedheads, discardedheads = heads
809 if remoteheads is not None:
806 if remoteheads is not None:
810 remote = set(remoteheads)
807 remote = set(remoteheads)
811 affected |= set(discardedheads) & remote
808 affected |= set(discardedheads) & remote
812 affected |= remote - set(newheads)
809 affected |= remote - set(newheads)
813 if affected:
810 if affected:
814 data = iter(sorted(affected))
811 data = iter(sorted(affected))
815 bundler.newpart('check:updated-heads', data=data)
812 bundler.newpart('check:updated-heads', data=data)
816
813
817 def _pushing(pushop):
814 def _pushing(pushop):
818 """return True if we are pushing anything"""
815 """return True if we are pushing anything"""
819 return bool(pushop.outgoing.missing
816 return bool(pushop.outgoing.missing
820 or pushop.outdatedphases
817 or pushop.outdatedphases
821 or pushop.outobsmarkers
818 or pushop.outobsmarkers
822 or pushop.outbookmarks)
819 or pushop.outbookmarks)
823
820
824 @b2partsgenerator('check-bookmarks')
821 @b2partsgenerator('check-bookmarks')
825 def _pushb2checkbookmarks(pushop, bundler):
822 def _pushb2checkbookmarks(pushop, bundler):
826 """insert bookmark move checking"""
823 """insert bookmark move checking"""
827 if not _pushing(pushop) or pushop.force:
824 if not _pushing(pushop) or pushop.force:
828 return
825 return
829 b2caps = bundle2.bundle2caps(pushop.remote)
826 b2caps = bundle2.bundle2caps(pushop.remote)
830 hasbookmarkcheck = 'bookmarks' in b2caps
827 hasbookmarkcheck = 'bookmarks' in b2caps
831 if not (pushop.outbookmarks and hasbookmarkcheck):
828 if not (pushop.outbookmarks and hasbookmarkcheck):
832 return
829 return
833 data = []
830 data = []
834 for book, old, new in pushop.outbookmarks:
831 for book, old, new in pushop.outbookmarks:
835 old = bin(old)
832 old = bin(old)
836 data.append((book, old))
833 data.append((book, old))
837 checkdata = bookmod.binaryencode(data)
834 checkdata = bookmod.binaryencode(data)
838 bundler.newpart('check:bookmarks', data=checkdata)
835 bundler.newpart('check:bookmarks', data=checkdata)
839
836
840 @b2partsgenerator('check-phases')
837 @b2partsgenerator('check-phases')
841 def _pushb2checkphases(pushop, bundler):
838 def _pushb2checkphases(pushop, bundler):
842 """insert phase move checking"""
839 """insert phase move checking"""
843 if not _pushing(pushop) or pushop.force:
840 if not _pushing(pushop) or pushop.force:
844 return
841 return
845 b2caps = bundle2.bundle2caps(pushop.remote)
842 b2caps = bundle2.bundle2caps(pushop.remote)
846 hasphaseheads = 'heads' in b2caps.get('phases', ())
843 hasphaseheads = 'heads' in b2caps.get('phases', ())
847 if pushop.remotephases is not None and hasphaseheads:
844 if pushop.remotephases is not None and hasphaseheads:
848 # check that the remote phase has not changed
845 # check that the remote phase has not changed
849 checks = [[] for p in phases.allphases]
846 checks = [[] for p in phases.allphases]
850 checks[phases.public].extend(pushop.remotephases.publicheads)
847 checks[phases.public].extend(pushop.remotephases.publicheads)
851 checks[phases.draft].extend(pushop.remotephases.draftroots)
848 checks[phases.draft].extend(pushop.remotephases.draftroots)
852 if any(checks):
849 if any(checks):
853 for nodes in checks:
850 for nodes in checks:
854 nodes.sort()
851 nodes.sort()
855 checkdata = phases.binaryencode(checks)
852 checkdata = phases.binaryencode(checks)
856 bundler.newpart('check:phases', data=checkdata)
853 bundler.newpart('check:phases', data=checkdata)
857
854
858 @b2partsgenerator('changeset')
855 @b2partsgenerator('changeset')
859 def _pushb2ctx(pushop, bundler):
856 def _pushb2ctx(pushop, bundler):
860 """handle changegroup push through bundle2
857 """handle changegroup push through bundle2
861
858
862 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
859 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
863 """
860 """
864 if 'changesets' in pushop.stepsdone:
861 if 'changesets' in pushop.stepsdone:
865 return
862 return
866 pushop.stepsdone.add('changesets')
863 pushop.stepsdone.add('changesets')
867 # Send known heads to the server for race detection.
864 # Send known heads to the server for race detection.
868 if not _pushcheckoutgoing(pushop):
865 if not _pushcheckoutgoing(pushop):
869 return
866 return
870 pushop.repo.prepushoutgoinghooks(pushop)
867 pushop.repo.prepushoutgoinghooks(pushop)
871
868
872 _pushb2ctxcheckheads(pushop, bundler)
869 _pushb2ctxcheckheads(pushop, bundler)
873
870
874 b2caps = bundle2.bundle2caps(pushop.remote)
871 b2caps = bundle2.bundle2caps(pushop.remote)
875 version = '01'
872 version = '01'
876 cgversions = b2caps.get('changegroup')
873 cgversions = b2caps.get('changegroup')
877 if cgversions: # 3.1 and 3.2 ship with an empty value
874 if cgversions: # 3.1 and 3.2 ship with an empty value
878 cgversions = [v for v in cgversions
875 cgversions = [v for v in cgversions
879 if v in changegroup.supportedoutgoingversions(
876 if v in changegroup.supportedoutgoingversions(
880 pushop.repo)]
877 pushop.repo)]
881 if not cgversions:
878 if not cgversions:
882 raise ValueError(_('no common changegroup version'))
879 raise ValueError(_('no common changegroup version'))
883 version = max(cgversions)
880 version = max(cgversions)
884 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
881 cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
885 'push')
882 'push')
886 cgpart = bundler.newpart('changegroup', data=cgstream)
883 cgpart = bundler.newpart('changegroup', data=cgstream)
887 if cgversions:
884 if cgversions:
888 cgpart.addparam('version', version)
885 cgpart.addparam('version', version)
889 if 'treemanifest' in pushop.repo.requirements:
886 if 'treemanifest' in pushop.repo.requirements:
890 cgpart.addparam('treemanifest', '1')
887 cgpart.addparam('treemanifest', '1')
891 def handlereply(op):
888 def handlereply(op):
892 """extract addchangegroup returns from server reply"""
889 """extract addchangegroup returns from server reply"""
893 cgreplies = op.records.getreplies(cgpart.id)
890 cgreplies = op.records.getreplies(cgpart.id)
894 assert len(cgreplies['changegroup']) == 1
891 assert len(cgreplies['changegroup']) == 1
895 pushop.cgresult = cgreplies['changegroup'][0]['return']
892 pushop.cgresult = cgreplies['changegroup'][0]['return']
896 return handlereply
893 return handlereply
897
894
898 @b2partsgenerator('phase')
895 @b2partsgenerator('phase')
899 def _pushb2phases(pushop, bundler):
896 def _pushb2phases(pushop, bundler):
900 """handle phase push through bundle2"""
897 """handle phase push through bundle2"""
901 if 'phases' in pushop.stepsdone:
898 if 'phases' in pushop.stepsdone:
902 return
899 return
903 b2caps = bundle2.bundle2caps(pushop.remote)
900 b2caps = bundle2.bundle2caps(pushop.remote)
904 ui = pushop.repo.ui
901 ui = pushop.repo.ui
905
902
906 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
903 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
907 haspushkey = 'pushkey' in b2caps
904 haspushkey = 'pushkey' in b2caps
908 hasphaseheads = 'heads' in b2caps.get('phases', ())
905 hasphaseheads = 'heads' in b2caps.get('phases', ())
909
906
910 if hasphaseheads and not legacyphase:
907 if hasphaseheads and not legacyphase:
911 return _pushb2phaseheads(pushop, bundler)
908 return _pushb2phaseheads(pushop, bundler)
912 elif haspushkey:
909 elif haspushkey:
913 return _pushb2phasespushkey(pushop, bundler)
910 return _pushb2phasespushkey(pushop, bundler)
914
911
915 def _pushb2phaseheads(pushop, bundler):
912 def _pushb2phaseheads(pushop, bundler):
916 """push phase information through a bundle2 - binary part"""
913 """push phase information through a bundle2 - binary part"""
917 pushop.stepsdone.add('phases')
914 pushop.stepsdone.add('phases')
918 if pushop.outdatedphases:
915 if pushop.outdatedphases:
919 updates = [[] for p in phases.allphases]
916 updates = [[] for p in phases.allphases]
920 updates[0].extend(h.node() for h in pushop.outdatedphases)
917 updates[0].extend(h.node() for h in pushop.outdatedphases)
921 phasedata = phases.binaryencode(updates)
918 phasedata = phases.binaryencode(updates)
922 bundler.newpart('phase-heads', data=phasedata)
919 bundler.newpart('phase-heads', data=phasedata)
923
920
924 def _pushb2phasespushkey(pushop, bundler):
921 def _pushb2phasespushkey(pushop, bundler):
925 """push phase information through a bundle2 - pushkey part"""
922 """push phase information through a bundle2 - pushkey part"""
926 pushop.stepsdone.add('phases')
923 pushop.stepsdone.add('phases')
927 part2node = []
924 part2node = []
928
925
929 def handlefailure(pushop, exc):
926 def handlefailure(pushop, exc):
930 targetid = int(exc.partid)
927 targetid = int(exc.partid)
931 for partid, node in part2node:
928 for partid, node in part2node:
932 if partid == targetid:
929 if partid == targetid:
933 raise error.Abort(_('updating %s to public failed') % node)
930 raise error.Abort(_('updating %s to public failed') % node)
934
931
935 enc = pushkey.encode
932 enc = pushkey.encode
936 for newremotehead in pushop.outdatedphases:
933 for newremotehead in pushop.outdatedphases:
937 part = bundler.newpart('pushkey')
934 part = bundler.newpart('pushkey')
938 part.addparam('namespace', enc('phases'))
935 part.addparam('namespace', enc('phases'))
939 part.addparam('key', enc(newremotehead.hex()))
936 part.addparam('key', enc(newremotehead.hex()))
940 part.addparam('old', enc('%d' % phases.draft))
937 part.addparam('old', enc('%d' % phases.draft))
941 part.addparam('new', enc('%d' % phases.public))
938 part.addparam('new', enc('%d' % phases.public))
942 part2node.append((part.id, newremotehead))
939 part2node.append((part.id, newremotehead))
943 pushop.pkfailcb[part.id] = handlefailure
940 pushop.pkfailcb[part.id] = handlefailure
944
941
945 def handlereply(op):
942 def handlereply(op):
946 for partid, node in part2node:
943 for partid, node in part2node:
947 partrep = op.records.getreplies(partid)
944 partrep = op.records.getreplies(partid)
948 results = partrep['pushkey']
945 results = partrep['pushkey']
949 assert len(results) <= 1
946 assert len(results) <= 1
950 msg = None
947 msg = None
951 if not results:
948 if not results:
952 msg = _('server ignored update of %s to public!\n') % node
949 msg = _('server ignored update of %s to public!\n') % node
953 elif not int(results[0]['return']):
950 elif not int(results[0]['return']):
954 msg = _('updating %s to public failed!\n') % node
951 msg = _('updating %s to public failed!\n') % node
955 if msg is not None:
952 if msg is not None:
956 pushop.ui.warn(msg)
953 pushop.ui.warn(msg)
957 return handlereply
954 return handlereply
958
955
959 @b2partsgenerator('obsmarkers')
956 @b2partsgenerator('obsmarkers')
960 def _pushb2obsmarkers(pushop, bundler):
957 def _pushb2obsmarkers(pushop, bundler):
961 if 'obsmarkers' in pushop.stepsdone:
958 if 'obsmarkers' in pushop.stepsdone:
962 return
959 return
963 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
960 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
964 if obsolete.commonversion(remoteversions) is None:
961 if obsolete.commonversion(remoteversions) is None:
965 return
962 return
966 pushop.stepsdone.add('obsmarkers')
963 pushop.stepsdone.add('obsmarkers')
967 if pushop.outobsmarkers:
964 if pushop.outobsmarkers:
968 markers = sorted(pushop.outobsmarkers)
965 markers = sorted(pushop.outobsmarkers)
969 bundle2.buildobsmarkerspart(bundler, markers)
966 bundle2.buildobsmarkerspart(bundler, markers)
970
967
971 @b2partsgenerator('bookmarks')
968 @b2partsgenerator('bookmarks')
972 def _pushb2bookmarks(pushop, bundler):
969 def _pushb2bookmarks(pushop, bundler):
973 """handle bookmark push through bundle2"""
970 """handle bookmark push through bundle2"""
974 if 'bookmarks' in pushop.stepsdone:
971 if 'bookmarks' in pushop.stepsdone:
975 return
972 return
976 b2caps = bundle2.bundle2caps(pushop.remote)
973 b2caps = bundle2.bundle2caps(pushop.remote)
977
974
978 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
975 legacy = pushop.repo.ui.configlist('devel', 'legacy.exchange')
979 legacybooks = 'bookmarks' in legacy
976 legacybooks = 'bookmarks' in legacy
980
977
981 if not legacybooks and 'bookmarks' in b2caps:
978 if not legacybooks and 'bookmarks' in b2caps:
982 return _pushb2bookmarkspart(pushop, bundler)
979 return _pushb2bookmarkspart(pushop, bundler)
983 elif 'pushkey' in b2caps:
980 elif 'pushkey' in b2caps:
984 return _pushb2bookmarkspushkey(pushop, bundler)
981 return _pushb2bookmarkspushkey(pushop, bundler)
985
982
986 def _bmaction(old, new):
983 def _bmaction(old, new):
987 """small utility for bookmark pushing"""
984 """small utility for bookmark pushing"""
988 if not old:
985 if not old:
989 return 'export'
986 return 'export'
990 elif not new:
987 elif not new:
991 return 'delete'
988 return 'delete'
992 return 'update'
989 return 'update'
993
990
994 def _pushb2bookmarkspart(pushop, bundler):
991 def _pushb2bookmarkspart(pushop, bundler):
995 pushop.stepsdone.add('bookmarks')
992 pushop.stepsdone.add('bookmarks')
996 if not pushop.outbookmarks:
993 if not pushop.outbookmarks:
997 return
994 return
998
995
999 allactions = []
996 allactions = []
1000 data = []
997 data = []
1001 for book, old, new in pushop.outbookmarks:
998 for book, old, new in pushop.outbookmarks:
1002 new = bin(new)
999 new = bin(new)
1003 data.append((book, new))
1000 data.append((book, new))
1004 allactions.append((book, _bmaction(old, new)))
1001 allactions.append((book, _bmaction(old, new)))
1005 checkdata = bookmod.binaryencode(data)
1002 checkdata = bookmod.binaryencode(data)
1006 bundler.newpart('bookmarks', data=checkdata)
1003 bundler.newpart('bookmarks', data=checkdata)
1007
1004
1008 def handlereply(op):
1005 def handlereply(op):
1009 ui = pushop.ui
1006 ui = pushop.ui
1010 # if success
1007 # if success
1011 for book, action in allactions:
1008 for book, action in allactions:
1012 ui.status(bookmsgmap[action][0] % book)
1009 ui.status(bookmsgmap[action][0] % book)
1013
1010
1014 return handlereply
1011 return handlereply
1015
1012
1016 def _pushb2bookmarkspushkey(pushop, bundler):
1013 def _pushb2bookmarkspushkey(pushop, bundler):
1017 pushop.stepsdone.add('bookmarks')
1014 pushop.stepsdone.add('bookmarks')
1018 part2book = []
1015 part2book = []
1019 enc = pushkey.encode
1016 enc = pushkey.encode
1020
1017
1021 def handlefailure(pushop, exc):
1018 def handlefailure(pushop, exc):
1022 targetid = int(exc.partid)
1019 targetid = int(exc.partid)
1023 for partid, book, action in part2book:
1020 for partid, book, action in part2book:
1024 if partid == targetid:
1021 if partid == targetid:
1025 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1022 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1026 # we should not be called for part we did not generated
1023 # we should not be called for part we did not generated
1027 assert False
1024 assert False
1028
1025
1029 for book, old, new in pushop.outbookmarks:
1026 for book, old, new in pushop.outbookmarks:
1030 part = bundler.newpart('pushkey')
1027 part = bundler.newpart('pushkey')
1031 part.addparam('namespace', enc('bookmarks'))
1028 part.addparam('namespace', enc('bookmarks'))
1032 part.addparam('key', enc(book))
1029 part.addparam('key', enc(book))
1033 part.addparam('old', enc(old))
1030 part.addparam('old', enc(old))
1034 part.addparam('new', enc(new))
1031 part.addparam('new', enc(new))
1035 action = 'update'
1032 action = 'update'
1036 if not old:
1033 if not old:
1037 action = 'export'
1034 action = 'export'
1038 elif not new:
1035 elif not new:
1039 action = 'delete'
1036 action = 'delete'
1040 part2book.append((part.id, book, action))
1037 part2book.append((part.id, book, action))
1041 pushop.pkfailcb[part.id] = handlefailure
1038 pushop.pkfailcb[part.id] = handlefailure
1042
1039
1043 def handlereply(op):
1040 def handlereply(op):
1044 ui = pushop.ui
1041 ui = pushop.ui
1045 for partid, book, action in part2book:
1042 for partid, book, action in part2book:
1046 partrep = op.records.getreplies(partid)
1043 partrep = op.records.getreplies(partid)
1047 results = partrep['pushkey']
1044 results = partrep['pushkey']
1048 assert len(results) <= 1
1045 assert len(results) <= 1
1049 if not results:
1046 if not results:
1050 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1047 pushop.ui.warn(_('server ignored bookmark %s update\n') % book)
1051 else:
1048 else:
1052 ret = int(results[0]['return'])
1049 ret = int(results[0]['return'])
1053 if ret:
1050 if ret:
1054 ui.status(bookmsgmap[action][0] % book)
1051 ui.status(bookmsgmap[action][0] % book)
1055 else:
1052 else:
1056 ui.warn(bookmsgmap[action][1] % book)
1053 ui.warn(bookmsgmap[action][1] % book)
1057 if pushop.bkresult is not None:
1054 if pushop.bkresult is not None:
1058 pushop.bkresult = 1
1055 pushop.bkresult = 1
1059 return handlereply
1056 return handlereply
1060
1057
1061 @b2partsgenerator('pushvars', idx=0)
1058 @b2partsgenerator('pushvars', idx=0)
1062 def _getbundlesendvars(pushop, bundler):
1059 def _getbundlesendvars(pushop, bundler):
1063 '''send shellvars via bundle2'''
1060 '''send shellvars via bundle2'''
1064 pushvars = pushop.pushvars
1061 pushvars = pushop.pushvars
1065 if pushvars:
1062 if pushvars:
1066 shellvars = {}
1063 shellvars = {}
1067 for raw in pushvars:
1064 for raw in pushvars:
1068 if '=' not in raw:
1065 if '=' not in raw:
1069 msg = ("unable to parse variable '%s', should follow "
1066 msg = ("unable to parse variable '%s', should follow "
1070 "'KEY=VALUE' or 'KEY=' format")
1067 "'KEY=VALUE' or 'KEY=' format")
1071 raise error.Abort(msg % raw)
1068 raise error.Abort(msg % raw)
1072 k, v = raw.split('=', 1)
1069 k, v = raw.split('=', 1)
1073 shellvars[k] = v
1070 shellvars[k] = v
1074
1071
1075 part = bundler.newpart('pushvars')
1072 part = bundler.newpart('pushvars')
1076
1073
1077 for key, value in shellvars.iteritems():
1074 for key, value in shellvars.iteritems():
1078 part.addparam(key, value, mandatory=False)
1075 part.addparam(key, value, mandatory=False)
1079
1076
1080 def _pushbundle2(pushop):
1077 def _pushbundle2(pushop):
1081 """push data to the remote using bundle2
1078 """push data to the remote using bundle2
1082
1079
1083 The only currently supported type of data is changegroup but this will
1080 The only currently supported type of data is changegroup but this will
1084 evolve in the future."""
1081 evolve in the future."""
1085 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1082 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1086 pushback = (pushop.trmanager
1083 pushback = (pushop.trmanager
1087 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1084 and pushop.ui.configbool('experimental', 'bundle2.pushback'))
1088
1085
1089 # create reply capability
1086 # create reply capability
1090 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1087 capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
1091 allowpushback=pushback,
1088 allowpushback=pushback,
1092 role='client'))
1089 role='client'))
1093 bundler.newpart('replycaps', data=capsblob)
1090 bundler.newpart('replycaps', data=capsblob)
1094 replyhandlers = []
1091 replyhandlers = []
1095 for partgenname in b2partsgenorder:
1092 for partgenname in b2partsgenorder:
1096 partgen = b2partsgenmapping[partgenname]
1093 partgen = b2partsgenmapping[partgenname]
1097 ret = partgen(pushop, bundler)
1094 ret = partgen(pushop, bundler)
1098 if callable(ret):
1095 if callable(ret):
1099 replyhandlers.append(ret)
1096 replyhandlers.append(ret)
1100 # do not push if nothing to push
1097 # do not push if nothing to push
1101 if bundler.nbparts <= 1:
1098 if bundler.nbparts <= 1:
1102 return
1099 return
1103 stream = util.chunkbuffer(bundler.getchunks())
1100 stream = util.chunkbuffer(bundler.getchunks())
1104 try:
1101 try:
1105 try:
1102 try:
1106 with pushop.remote.commandexecutor() as e:
1103 with pushop.remote.commandexecutor() as e:
1107 reply = e.callcommand('unbundle', {
1104 reply = e.callcommand('unbundle', {
1108 'bundle': stream,
1105 'bundle': stream,
1109 'heads': ['force'],
1106 'heads': ['force'],
1110 'url': pushop.remote.url(),
1107 'url': pushop.remote.url(),
1111 }).result()
1108 }).result()
1112 except error.BundleValueError as exc:
1109 except error.BundleValueError as exc:
1113 raise error.Abort(_('missing support for %s') % exc)
1110 raise error.Abort(_('missing support for %s') % exc)
1114 try:
1111 try:
1115 trgetter = None
1112 trgetter = None
1116 if pushback:
1113 if pushback:
1117 trgetter = pushop.trmanager.transaction
1114 trgetter = pushop.trmanager.transaction
1118 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1115 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1119 except error.BundleValueError as exc:
1116 except error.BundleValueError as exc:
1120 raise error.Abort(_('missing support for %s') % exc)
1117 raise error.Abort(_('missing support for %s') % exc)
1121 except bundle2.AbortFromPart as exc:
1118 except bundle2.AbortFromPart as exc:
1122 pushop.ui.status(_('remote: %s\n') % exc)
1119 pushop.ui.status(_('remote: %s\n') % exc)
1123 if exc.hint is not None:
1120 if exc.hint is not None:
1124 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1121 pushop.ui.status(_('remote: %s\n') % ('(%s)' % exc.hint))
1125 raise error.Abort(_('push failed on remote'))
1122 raise error.Abort(_('push failed on remote'))
1126 except error.PushkeyFailed as exc:
1123 except error.PushkeyFailed as exc:
1127 partid = int(exc.partid)
1124 partid = int(exc.partid)
1128 if partid not in pushop.pkfailcb:
1125 if partid not in pushop.pkfailcb:
1129 raise
1126 raise
1130 pushop.pkfailcb[partid](pushop, exc)
1127 pushop.pkfailcb[partid](pushop, exc)
1131 for rephand in replyhandlers:
1128 for rephand in replyhandlers:
1132 rephand(op)
1129 rephand(op)
1133
1130
1134 def _pushchangeset(pushop):
1131 def _pushchangeset(pushop):
1135 """Make the actual push of changeset bundle to remote repo"""
1132 """Make the actual push of changeset bundle to remote repo"""
1136 if 'changesets' in pushop.stepsdone:
1133 if 'changesets' in pushop.stepsdone:
1137 return
1134 return
1138 pushop.stepsdone.add('changesets')
1135 pushop.stepsdone.add('changesets')
1139 if not _pushcheckoutgoing(pushop):
1136 if not _pushcheckoutgoing(pushop):
1140 return
1137 return
1141
1138
1142 # Should have verified this in push().
1139 # Should have verified this in push().
1143 assert pushop.remote.capable('unbundle')
1140 assert pushop.remote.capable('unbundle')
1144
1141
1145 pushop.repo.prepushoutgoinghooks(pushop)
1142 pushop.repo.prepushoutgoinghooks(pushop)
1146 outgoing = pushop.outgoing
1143 outgoing = pushop.outgoing
1147 # TODO: get bundlecaps from remote
1144 # TODO: get bundlecaps from remote
1148 bundlecaps = None
1145 bundlecaps = None
1149 # create a changegroup from local
1146 # create a changegroup from local
1150 if pushop.revs is None and not (outgoing.excluded
1147 if pushop.revs is None and not (outgoing.excluded
1151 or pushop.repo.changelog.filteredrevs):
1148 or pushop.repo.changelog.filteredrevs):
1152 # push everything,
1149 # push everything,
1153 # use the fast path, no race possible on push
1150 # use the fast path, no race possible on push
1154 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1151 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
1155 fastpath=True, bundlecaps=bundlecaps)
1152 fastpath=True, bundlecaps=bundlecaps)
1156 else:
1153 else:
1157 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1154 cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
1158 'push', bundlecaps=bundlecaps)
1155 'push', bundlecaps=bundlecaps)
1159
1156
1160 # apply changegroup to remote
1157 # apply changegroup to remote
1161 # local repo finds heads on server, finds out what
1158 # local repo finds heads on server, finds out what
1162 # revs it must push. once revs transferred, if server
1159 # revs it must push. once revs transferred, if server
1163 # finds it has different heads (someone else won
1160 # finds it has different heads (someone else won
1164 # commit/push race), server aborts.
1161 # commit/push race), server aborts.
1165 if pushop.force:
1162 if pushop.force:
1166 remoteheads = ['force']
1163 remoteheads = ['force']
1167 else:
1164 else:
1168 remoteheads = pushop.remoteheads
1165 remoteheads = pushop.remoteheads
1169 # ssh: return remote's addchangegroup()
1166 # ssh: return remote's addchangegroup()
1170 # http: return remote's addchangegroup() or 0 for error
1167 # http: return remote's addchangegroup() or 0 for error
1171 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1168 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
1172 pushop.repo.url())
1169 pushop.repo.url())
1173
1170
1174 def _pushsyncphase(pushop):
1171 def _pushsyncphase(pushop):
1175 """synchronise phase information locally and remotely"""
1172 """synchronise phase information locally and remotely"""
1176 cheads = pushop.commonheads
1173 cheads = pushop.commonheads
1177 # even when we don't push, exchanging phase data is useful
1174 # even when we don't push, exchanging phase data is useful
1178 remotephases = listkeys(pushop.remote, 'phases')
1175 remotephases = listkeys(pushop.remote, 'phases')
1179 if (pushop.ui.configbool('ui', '_usedassubrepo')
1176 if (pushop.ui.configbool('ui', '_usedassubrepo')
1180 and remotephases # server supports phases
1177 and remotephases # server supports phases
1181 and pushop.cgresult is None # nothing was pushed
1178 and pushop.cgresult is None # nothing was pushed
1182 and remotephases.get('publishing', False)):
1179 and remotephases.get('publishing', False)):
1183 # When:
1180 # When:
1184 # - this is a subrepo push
1181 # - this is a subrepo push
1185 # - and remote support phase
1182 # - and remote support phase
1186 # - and no changeset was pushed
1183 # - and no changeset was pushed
1187 # - and remote is publishing
1184 # - and remote is publishing
1188 # We may be in issue 3871 case!
1185 # We may be in issue 3871 case!
1189 # We drop the possible phase synchronisation done by
1186 # We drop the possible phase synchronisation done by
1190 # courtesy to publish changesets possibly locally draft
1187 # courtesy to publish changesets possibly locally draft
1191 # on the remote.
1188 # on the remote.
1192 remotephases = {'publishing': 'True'}
1189 remotephases = {'publishing': 'True'}
1193 if not remotephases: # old server or public only reply from non-publishing
1190 if not remotephases: # old server or public only reply from non-publishing
1194 _localphasemove(pushop, cheads)
1191 _localphasemove(pushop, cheads)
1195 # don't push any phase data as there is nothing to push
1192 # don't push any phase data as there is nothing to push
1196 else:
1193 else:
1197 ana = phases.analyzeremotephases(pushop.repo, cheads,
1194 ana = phases.analyzeremotephases(pushop.repo, cheads,
1198 remotephases)
1195 remotephases)
1199 pheads, droots = ana
1196 pheads, droots = ana
1200 ### Apply remote phase on local
1197 ### Apply remote phase on local
1201 if remotephases.get('publishing', False):
1198 if remotephases.get('publishing', False):
1202 _localphasemove(pushop, cheads)
1199 _localphasemove(pushop, cheads)
1203 else: # publish = False
1200 else: # publish = False
1204 _localphasemove(pushop, pheads)
1201 _localphasemove(pushop, pheads)
1205 _localphasemove(pushop, cheads, phases.draft)
1202 _localphasemove(pushop, cheads, phases.draft)
1206 ### Apply local phase on remote
1203 ### Apply local phase on remote
1207
1204
1208 if pushop.cgresult:
1205 if pushop.cgresult:
1209 if 'phases' in pushop.stepsdone:
1206 if 'phases' in pushop.stepsdone:
1210 # phases already pushed though bundle2
1207 # phases already pushed though bundle2
1211 return
1208 return
1212 outdated = pushop.outdatedphases
1209 outdated = pushop.outdatedphases
1213 else:
1210 else:
1214 outdated = pushop.fallbackoutdatedphases
1211 outdated = pushop.fallbackoutdatedphases
1215
1212
1216 pushop.stepsdone.add('phases')
1213 pushop.stepsdone.add('phases')
1217
1214
1218 # filter heads already turned public by the push
1215 # filter heads already turned public by the push
1219 outdated = [c for c in outdated if c.node() not in pheads]
1216 outdated = [c for c in outdated if c.node() not in pheads]
1220 # fallback to independent pushkey command
1217 # fallback to independent pushkey command
1221 for newremotehead in outdated:
1218 for newremotehead in outdated:
1222 with pushop.remote.commandexecutor() as e:
1219 with pushop.remote.commandexecutor() as e:
1223 r = e.callcommand('pushkey', {
1220 r = e.callcommand('pushkey', {
1224 'namespace': 'phases',
1221 'namespace': 'phases',
1225 'key': newremotehead.hex(),
1222 'key': newremotehead.hex(),
1226 'old': '%d' % phases.draft,
1223 'old': '%d' % phases.draft,
1227 'new': '%d' % phases.public
1224 'new': '%d' % phases.public
1228 }).result()
1225 }).result()
1229
1226
1230 if not r:
1227 if not r:
1231 pushop.ui.warn(_('updating %s to public failed!\n')
1228 pushop.ui.warn(_('updating %s to public failed!\n')
1232 % newremotehead)
1229 % newremotehead)
1233
1230
1234 def _localphasemove(pushop, nodes, phase=phases.public):
1231 def _localphasemove(pushop, nodes, phase=phases.public):
1235 """move <nodes> to <phase> in the local source repo"""
1232 """move <nodes> to <phase> in the local source repo"""
1236 if pushop.trmanager:
1233 if pushop.trmanager:
1237 phases.advanceboundary(pushop.repo,
1234 phases.advanceboundary(pushop.repo,
1238 pushop.trmanager.transaction(),
1235 pushop.trmanager.transaction(),
1239 phase,
1236 phase,
1240 nodes)
1237 nodes)
1241 else:
1238 else:
1242 # repo is not locked, do not change any phases!
1239 # repo is not locked, do not change any phases!
1243 # Informs the user that phases should have been moved when
1240 # Informs the user that phases should have been moved when
1244 # applicable.
1241 # applicable.
1245 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1242 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1246 phasestr = phases.phasenames[phase]
1243 phasestr = phases.phasenames[phase]
1247 if actualmoves:
1244 if actualmoves:
1248 pushop.ui.status(_('cannot lock source repo, skipping '
1245 pushop.ui.status(_('cannot lock source repo, skipping '
1249 'local %s phase update\n') % phasestr)
1246 'local %s phase update\n') % phasestr)
1250
1247
1251 def _pushobsolete(pushop):
1248 def _pushobsolete(pushop):
1252 """utility function to push obsolete markers to a remote"""
1249 """utility function to push obsolete markers to a remote"""
1253 if 'obsmarkers' in pushop.stepsdone:
1250 if 'obsmarkers' in pushop.stepsdone:
1254 return
1251 return
1255 repo = pushop.repo
1252 repo = pushop.repo
1256 remote = pushop.remote
1253 remote = pushop.remote
1257 pushop.stepsdone.add('obsmarkers')
1254 pushop.stepsdone.add('obsmarkers')
1258 if pushop.outobsmarkers:
1255 if pushop.outobsmarkers:
1259 pushop.ui.debug('try to push obsolete markers to remote\n')
1256 pushop.ui.debug('try to push obsolete markers to remote\n')
1260 rslts = []
1257 rslts = []
1261 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1258 remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers))
1262 for key in sorted(remotedata, reverse=True):
1259 for key in sorted(remotedata, reverse=True):
1263 # reverse sort to ensure we end with dump0
1260 # reverse sort to ensure we end with dump0
1264 data = remotedata[key]
1261 data = remotedata[key]
1265 rslts.append(remote.pushkey('obsolete', key, '', data))
1262 rslts.append(remote.pushkey('obsolete', key, '', data))
1266 if [r for r in rslts if not r]:
1263 if [r for r in rslts if not r]:
1267 msg = _('failed to push some obsolete markers!\n')
1264 msg = _('failed to push some obsolete markers!\n')
1268 repo.ui.warn(msg)
1265 repo.ui.warn(msg)
1269
1266
1270 def _pushbookmark(pushop):
1267 def _pushbookmark(pushop):
1271 """Update bookmark position on remote"""
1268 """Update bookmark position on remote"""
1272 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1269 if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
1273 return
1270 return
1274 pushop.stepsdone.add('bookmarks')
1271 pushop.stepsdone.add('bookmarks')
1275 ui = pushop.ui
1272 ui = pushop.ui
1276 remote = pushop.remote
1273 remote = pushop.remote
1277
1274
1278 for b, old, new in pushop.outbookmarks:
1275 for b, old, new in pushop.outbookmarks:
1279 action = 'update'
1276 action = 'update'
1280 if not old:
1277 if not old:
1281 action = 'export'
1278 action = 'export'
1282 elif not new:
1279 elif not new:
1283 action = 'delete'
1280 action = 'delete'
1284
1281
1285 with remote.commandexecutor() as e:
1282 with remote.commandexecutor() as e:
1286 r = e.callcommand('pushkey', {
1283 r = e.callcommand('pushkey', {
1287 'namespace': 'bookmarks',
1284 'namespace': 'bookmarks',
1288 'key': b,
1285 'key': b,
1289 'old': old,
1286 'old': old,
1290 'new': new,
1287 'new': new,
1291 }).result()
1288 }).result()
1292
1289
1293 if r:
1290 if r:
1294 ui.status(bookmsgmap[action][0] % b)
1291 ui.status(bookmsgmap[action][0] % b)
1295 else:
1292 else:
1296 ui.warn(bookmsgmap[action][1] % b)
1293 ui.warn(bookmsgmap[action][1] % b)
1297 # discovery can have set the value form invalid entry
1294 # discovery can have set the value form invalid entry
1298 if pushop.bkresult is not None:
1295 if pushop.bkresult is not None:
1299 pushop.bkresult = 1
1296 pushop.bkresult = 1
1300
1297
1301 class pulloperation(object):
1298 class pulloperation(object):
1302 """A object that represent a single pull operation
1299 """A object that represent a single pull operation
1303
1300
1304 It purpose is to carry pull related state and very common operation.
1301 It purpose is to carry pull related state and very common operation.
1305
1302
1306 A new should be created at the beginning of each pull and discarded
1303 A new should be created at the beginning of each pull and discarded
1307 afterward.
1304 afterward.
1308 """
1305 """
1309
1306
1310 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1307 def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
1311 remotebookmarks=None, streamclonerequested=None):
1308 remotebookmarks=None, streamclonerequested=None):
1312 # repo we pull into
1309 # repo we pull into
1313 self.repo = repo
1310 self.repo = repo
1314 # repo we pull from
1311 # repo we pull from
1315 self.remote = remote
1312 self.remote = remote
1316 # revision we try to pull (None is "all")
1313 # revision we try to pull (None is "all")
1317 self.heads = heads
1314 self.heads = heads
1318 # bookmark pulled explicitly
1315 # bookmark pulled explicitly
1319 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1316 self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
1320 for bookmark in bookmarks]
1317 for bookmark in bookmarks]
1321 # do we force pull?
1318 # do we force pull?
1322 self.force = force
1319 self.force = force
1323 # whether a streaming clone was requested
1320 # whether a streaming clone was requested
1324 self.streamclonerequested = streamclonerequested
1321 self.streamclonerequested = streamclonerequested
1325 # transaction manager
1322 # transaction manager
1326 self.trmanager = None
1323 self.trmanager = None
1327 # set of common changeset between local and remote before pull
1324 # set of common changeset between local and remote before pull
1328 self.common = None
1325 self.common = None
1329 # set of pulled head
1326 # set of pulled head
1330 self.rheads = None
1327 self.rheads = None
1331 # list of missing changeset to fetch remotely
1328 # list of missing changeset to fetch remotely
1332 self.fetch = None
1329 self.fetch = None
1333 # remote bookmarks data
1330 # remote bookmarks data
1334 self.remotebookmarks = remotebookmarks
1331 self.remotebookmarks = remotebookmarks
1335 # result of changegroup pulling (used as return code by pull)
1332 # result of changegroup pulling (used as return code by pull)
1336 self.cgresult = None
1333 self.cgresult = None
1337 # list of step already done
1334 # list of step already done
1338 self.stepsdone = set()
1335 self.stepsdone = set()
1339 # Whether we attempted a clone from pre-generated bundles.
1336 # Whether we attempted a clone from pre-generated bundles.
1340 self.clonebundleattempted = False
1337 self.clonebundleattempted = False
1341
1338
1342 @util.propertycache
1339 @util.propertycache
1343 def pulledsubset(self):
1340 def pulledsubset(self):
1344 """heads of the set of changeset target by the pull"""
1341 """heads of the set of changeset target by the pull"""
1345 # compute target subset
1342 # compute target subset
1346 if self.heads is None:
1343 if self.heads is None:
1347 # We pulled every thing possible
1344 # We pulled every thing possible
1348 # sync on everything common
1345 # sync on everything common
1349 c = set(self.common)
1346 c = set(self.common)
1350 ret = list(self.common)
1347 ret = list(self.common)
1351 for n in self.rheads:
1348 for n in self.rheads:
1352 if n not in c:
1349 if n not in c:
1353 ret.append(n)
1350 ret.append(n)
1354 return ret
1351 return ret
1355 else:
1352 else:
1356 # We pulled a specific subset
1353 # We pulled a specific subset
1357 # sync on this subset
1354 # sync on this subset
1358 return self.heads
1355 return self.heads
1359
1356
1360 @util.propertycache
1357 @util.propertycache
1361 def canusebundle2(self):
1358 def canusebundle2(self):
1362 return not _forcebundle1(self)
1359 return not _forcebundle1(self)
1363
1360
1364 @util.propertycache
1361 @util.propertycache
1365 def remotebundle2caps(self):
1362 def remotebundle2caps(self):
1366 return bundle2.bundle2caps(self.remote)
1363 return bundle2.bundle2caps(self.remote)
1367
1364
1368 def gettransaction(self):
1365 def gettransaction(self):
1369 # deprecated; talk to trmanager directly
1366 # deprecated; talk to trmanager directly
1370 return self.trmanager.transaction()
1367 return self.trmanager.transaction()
1371
1368
1372 class transactionmanager(util.transactional):
1369 class transactionmanager(util.transactional):
1373 """An object to manage the life cycle of a transaction
1370 """An object to manage the life cycle of a transaction
1374
1371
1375 It creates the transaction on demand and calls the appropriate hooks when
1372 It creates the transaction on demand and calls the appropriate hooks when
1376 closing the transaction."""
1373 closing the transaction."""
1377 def __init__(self, repo, source, url):
1374 def __init__(self, repo, source, url):
1378 self.repo = repo
1375 self.repo = repo
1379 self.source = source
1376 self.source = source
1380 self.url = url
1377 self.url = url
1381 self._tr = None
1378 self._tr = None
1382
1379
1383 def transaction(self):
1380 def transaction(self):
1384 """Return an open transaction object, constructing if necessary"""
1381 """Return an open transaction object, constructing if necessary"""
1385 if not self._tr:
1382 if not self._tr:
1386 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1383 trname = '%s\n%s' % (self.source, util.hidepassword(self.url))
1387 self._tr = self.repo.transaction(trname)
1384 self._tr = self.repo.transaction(trname)
1388 self._tr.hookargs['source'] = self.source
1385 self._tr.hookargs['source'] = self.source
1389 self._tr.hookargs['url'] = self.url
1386 self._tr.hookargs['url'] = self.url
1390 return self._tr
1387 return self._tr
1391
1388
1392 def close(self):
1389 def close(self):
1393 """close transaction if created"""
1390 """close transaction if created"""
1394 if self._tr is not None:
1391 if self._tr is not None:
1395 self._tr.close()
1392 self._tr.close()
1396
1393
1397 def release(self):
1394 def release(self):
1398 """release transaction if created"""
1395 """release transaction if created"""
1399 if self._tr is not None:
1396 if self._tr is not None:
1400 self._tr.release()
1397 self._tr.release()
1401
1398
1402 def listkeys(remote, namespace):
1399 def listkeys(remote, namespace):
1403 with remote.commandexecutor() as e:
1400 with remote.commandexecutor() as e:
1404 return e.callcommand('listkeys', {'namespace': namespace}).result()
1401 return e.callcommand('listkeys', {'namespace': namespace}).result()
1405
1402
1406 def _fullpullbundle2(repo, pullop):
1403 def _fullpullbundle2(repo, pullop):
1407 # The server may send a partial reply, i.e. when inlining
1404 # The server may send a partial reply, i.e. when inlining
1408 # pre-computed bundles. In that case, update the common
1405 # pre-computed bundles. In that case, update the common
1409 # set based on the results and pull another bundle.
1406 # set based on the results and pull another bundle.
1410 #
1407 #
1411 # There are two indicators that the process is finished:
1408 # There are two indicators that the process is finished:
1412 # - no changeset has been added, or
1409 # - no changeset has been added, or
1413 # - all remote heads are known locally.
1410 # - all remote heads are known locally.
1414 # The head check must use the unfiltered view as obsoletion
1411 # The head check must use the unfiltered view as obsoletion
1415 # markers can hide heads.
1412 # markers can hide heads.
1416 unfi = repo.unfiltered()
1413 unfi = repo.unfiltered()
1417 unficl = unfi.changelog
1414 unficl = unfi.changelog
1418 def headsofdiff(h1, h2):
1415 def headsofdiff(h1, h2):
1419 """Returns heads(h1 % h2)"""
1416 """Returns heads(h1 % h2)"""
1420 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1417 res = unfi.set('heads(%ln %% %ln)', h1, h2)
1421 return set(ctx.node() for ctx in res)
1418 return set(ctx.node() for ctx in res)
1422 def headsofunion(h1, h2):
1419 def headsofunion(h1, h2):
1423 """Returns heads((h1 + h2) - null)"""
1420 """Returns heads((h1 + h2) - null)"""
1424 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1421 res = unfi.set('heads((%ln + %ln - null))', h1, h2)
1425 return set(ctx.node() for ctx in res)
1422 return set(ctx.node() for ctx in res)
1426 while True:
1423 while True:
1427 old_heads = unficl.heads()
1424 old_heads = unficl.heads()
1428 clstart = len(unficl)
1425 clstart = len(unficl)
1429 _pullbundle2(pullop)
1426 _pullbundle2(pullop)
1430 if changegroup.NARROW_REQUIREMENT in repo.requirements:
1427 if changegroup.NARROW_REQUIREMENT in repo.requirements:
1431 # XXX narrow clones filter the heads on the server side during
1428 # XXX narrow clones filter the heads on the server side during
1432 # XXX getbundle and result in partial replies as well.
1429 # XXX getbundle and result in partial replies as well.
1433 # XXX Disable pull bundles in this case as band aid to avoid
1430 # XXX Disable pull bundles in this case as band aid to avoid
1434 # XXX extra round trips.
1431 # XXX extra round trips.
1435 break
1432 break
1436 if clstart == len(unficl):
1433 if clstart == len(unficl):
1437 break
1434 break
1438 if all(unficl.hasnode(n) for n in pullop.rheads):
1435 if all(unficl.hasnode(n) for n in pullop.rheads):
1439 break
1436 break
1440 new_heads = headsofdiff(unficl.heads(), old_heads)
1437 new_heads = headsofdiff(unficl.heads(), old_heads)
1441 pullop.common = headsofunion(new_heads, pullop.common)
1438 pullop.common = headsofunion(new_heads, pullop.common)
1442 pullop.rheads = set(pullop.rheads) - pullop.common
1439 pullop.rheads = set(pullop.rheads) - pullop.common
1443
1440
1444 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1441 def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
1445 streamclonerequested=None):
1442 streamclonerequested=None):
1446 """Fetch repository data from a remote.
1443 """Fetch repository data from a remote.
1447
1444
1448 This is the main function used to retrieve data from a remote repository.
1445 This is the main function used to retrieve data from a remote repository.
1449
1446
1450 ``repo`` is the local repository to clone into.
1447 ``repo`` is the local repository to clone into.
1451 ``remote`` is a peer instance.
1448 ``remote`` is a peer instance.
1452 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1449 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1453 default) means to pull everything from the remote.
1450 default) means to pull everything from the remote.
1454 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1451 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1455 default, all remote bookmarks are pulled.
1452 default, all remote bookmarks are pulled.
1456 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1453 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1457 initialization.
1454 initialization.
1458 ``streamclonerequested`` is a boolean indicating whether a "streaming
1455 ``streamclonerequested`` is a boolean indicating whether a "streaming
1459 clone" is requested. A "streaming clone" is essentially a raw file copy
1456 clone" is requested. A "streaming clone" is essentially a raw file copy
1460 of revlogs from the server. This only works when the local repository is
1457 of revlogs from the server. This only works when the local repository is
1461 empty. The default value of ``None`` means to respect the server
1458 empty. The default value of ``None`` means to respect the server
1462 configuration for preferring stream clones.
1459 configuration for preferring stream clones.
1463
1460
1464 Returns the ``pulloperation`` created for this pull.
1461 Returns the ``pulloperation`` created for this pull.
1465 """
1462 """
1466 if opargs is None:
1463 if opargs is None:
1467 opargs = {}
1464 opargs = {}
1468 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1465 pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
1469 streamclonerequested=streamclonerequested,
1466 streamclonerequested=streamclonerequested,
1470 **pycompat.strkwargs(opargs))
1467 **pycompat.strkwargs(opargs))
1471
1468
1472 peerlocal = pullop.remote.local()
1469 peerlocal = pullop.remote.local()
1473 if peerlocal:
1470 if peerlocal:
1474 missing = set(peerlocal.requirements) - pullop.repo.supported
1471 missing = set(peerlocal.requirements) - pullop.repo.supported
1475 if missing:
1472 if missing:
1476 msg = _("required features are not"
1473 msg = _("required features are not"
1477 " supported in the destination:"
1474 " supported in the destination:"
1478 " %s") % (', '.join(sorted(missing)))
1475 " %s") % (', '.join(sorted(missing)))
1479 raise error.Abort(msg)
1476 raise error.Abort(msg)
1480
1477
1481 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1478 pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
1482 with repo.wlock(), repo.lock(), pullop.trmanager:
1479 with repo.wlock(), repo.lock(), pullop.trmanager:
1483 # This should ideally be in _pullbundle2(). However, it needs to run
1480 # This should ideally be in _pullbundle2(). However, it needs to run
1484 # before discovery to avoid extra work.
1481 # before discovery to avoid extra work.
1485 _maybeapplyclonebundle(pullop)
1482 _maybeapplyclonebundle(pullop)
1486 streamclone.maybeperformlegacystreamclone(pullop)
1483 streamclone.maybeperformlegacystreamclone(pullop)
1487 _pulldiscovery(pullop)
1484 _pulldiscovery(pullop)
1488 if pullop.canusebundle2:
1485 if pullop.canusebundle2:
1489 _fullpullbundle2(repo, pullop)
1486 _fullpullbundle2(repo, pullop)
1490 _pullchangeset(pullop)
1487 _pullchangeset(pullop)
1491 _pullphase(pullop)
1488 _pullphase(pullop)
1492 _pullbookmarks(pullop)
1489 _pullbookmarks(pullop)
1493 _pullobsolete(pullop)
1490 _pullobsolete(pullop)
1494
1491
1495 # storing remotenames
1492 # storing remotenames
1496 if repo.ui.configbool('experimental', 'remotenames'):
1493 if repo.ui.configbool('experimental', 'remotenames'):
1497 logexchange.pullremotenames(repo, remote)
1494 logexchange.pullremotenames(repo, remote)
1498
1495
1499 return pullop
1496 return pullop
1500
1497
1501 # list of steps to perform discovery before pull
1498 # list of steps to perform discovery before pull
1502 pulldiscoveryorder = []
1499 pulldiscoveryorder = []
1503
1500
1504 # Mapping between step name and function
1501 # Mapping between step name and function
1505 #
1502 #
1506 # This exists to help extensions wrap steps if necessary
1503 # This exists to help extensions wrap steps if necessary
1507 pulldiscoverymapping = {}
1504 pulldiscoverymapping = {}
1508
1505
1509 def pulldiscovery(stepname):
1506 def pulldiscovery(stepname):
1510 """decorator for function performing discovery before pull
1507 """decorator for function performing discovery before pull
1511
1508
1512 The function is added to the step -> function mapping and appended to the
1509 The function is added to the step -> function mapping and appended to the
1513 list of steps. Beware that decorated function will be added in order (this
1510 list of steps. Beware that decorated function will be added in order (this
1514 may matter).
1511 may matter).
1515
1512
1516 You can only use this decorator for a new step, if you want to wrap a step
1513 You can only use this decorator for a new step, if you want to wrap a step
1517 from an extension, change the pulldiscovery dictionary directly."""
1514 from an extension, change the pulldiscovery dictionary directly."""
1518 def dec(func):
1515 def dec(func):
1519 assert stepname not in pulldiscoverymapping
1516 assert stepname not in pulldiscoverymapping
1520 pulldiscoverymapping[stepname] = func
1517 pulldiscoverymapping[stepname] = func
1521 pulldiscoveryorder.append(stepname)
1518 pulldiscoveryorder.append(stepname)
1522 return func
1519 return func
1523 return dec
1520 return dec
1524
1521
1525 def _pulldiscovery(pullop):
1522 def _pulldiscovery(pullop):
1526 """Run all discovery steps"""
1523 """Run all discovery steps"""
1527 for stepname in pulldiscoveryorder:
1524 for stepname in pulldiscoveryorder:
1528 step = pulldiscoverymapping[stepname]
1525 step = pulldiscoverymapping[stepname]
1529 step(pullop)
1526 step(pullop)
1530
1527
1531 @pulldiscovery('b1:bookmarks')
1528 @pulldiscovery('b1:bookmarks')
1532 def _pullbookmarkbundle1(pullop):
1529 def _pullbookmarkbundle1(pullop):
1533 """fetch bookmark data in bundle1 case
1530 """fetch bookmark data in bundle1 case
1534
1531
1535 If not using bundle2, we have to fetch bookmarks before changeset
1532 If not using bundle2, we have to fetch bookmarks before changeset
1536 discovery to reduce the chance and impact of race conditions."""
1533 discovery to reduce the chance and impact of race conditions."""
1537 if pullop.remotebookmarks is not None:
1534 if pullop.remotebookmarks is not None:
1538 return
1535 return
1539 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1536 if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps:
1540 # all known bundle2 servers now support listkeys, but lets be nice with
1537 # all known bundle2 servers now support listkeys, but lets be nice with
1541 # new implementation.
1538 # new implementation.
1542 return
1539 return
1543 books = listkeys(pullop.remote, 'bookmarks')
1540 books = listkeys(pullop.remote, 'bookmarks')
1544 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1541 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1545
1542
1546
1543
1547 @pulldiscovery('changegroup')
1544 @pulldiscovery('changegroup')
1548 def _pulldiscoverychangegroup(pullop):
1545 def _pulldiscoverychangegroup(pullop):
1549 """discovery phase for the pull
1546 """discovery phase for the pull
1550
1547
1551 Current handle changeset discovery only, will change handle all discovery
1548 Current handle changeset discovery only, will change handle all discovery
1552 at some point."""
1549 at some point."""
1553 tmp = discovery.findcommonincoming(pullop.repo,
1550 tmp = discovery.findcommonincoming(pullop.repo,
1554 pullop.remote,
1551 pullop.remote,
1555 heads=pullop.heads,
1552 heads=pullop.heads,
1556 force=pullop.force)
1553 force=pullop.force)
1557 common, fetch, rheads = tmp
1554 common, fetch, rheads = tmp
1558 nm = pullop.repo.unfiltered().changelog.nodemap
1555 nm = pullop.repo.unfiltered().changelog.nodemap
1559 if fetch and rheads:
1556 if fetch and rheads:
1560 # If a remote heads is filtered locally, put in back in common.
1557 # If a remote heads is filtered locally, put in back in common.
1561 #
1558 #
1562 # This is a hackish solution to catch most of "common but locally
1559 # This is a hackish solution to catch most of "common but locally
1563 # hidden situation". We do not performs discovery on unfiltered
1560 # hidden situation". We do not performs discovery on unfiltered
1564 # repository because it end up doing a pathological amount of round
1561 # repository because it end up doing a pathological amount of round
1565 # trip for w huge amount of changeset we do not care about.
1562 # trip for w huge amount of changeset we do not care about.
1566 #
1563 #
1567 # If a set of such "common but filtered" changeset exist on the server
1564 # If a set of such "common but filtered" changeset exist on the server
1568 # but are not including a remote heads, we'll not be able to detect it,
1565 # but are not including a remote heads, we'll not be able to detect it,
1569 scommon = set(common)
1566 scommon = set(common)
1570 for n in rheads:
1567 for n in rheads:
1571 if n in nm:
1568 if n in nm:
1572 if n not in scommon:
1569 if n not in scommon:
1573 common.append(n)
1570 common.append(n)
1574 if set(rheads).issubset(set(common)):
1571 if set(rheads).issubset(set(common)):
1575 fetch = []
1572 fetch = []
1576 pullop.common = common
1573 pullop.common = common
1577 pullop.fetch = fetch
1574 pullop.fetch = fetch
1578 pullop.rheads = rheads
1575 pullop.rheads = rheads
1579
1576
1580 def _pullbundle2(pullop):
1577 def _pullbundle2(pullop):
1581 """pull data using bundle2
1578 """pull data using bundle2
1582
1579
1583 For now, the only supported data are changegroup."""
1580 For now, the only supported data are changegroup."""
1584 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1581 kwargs = {'bundlecaps': caps20to10(pullop.repo, role='client')}
1585
1582
1586 # make ui easier to access
1583 # make ui easier to access
1587 ui = pullop.repo.ui
1584 ui = pullop.repo.ui
1588
1585
1589 # At the moment we don't do stream clones over bundle2. If that is
1586 # At the moment we don't do stream clones over bundle2. If that is
1590 # implemented then here's where the check for that will go.
1587 # implemented then here's where the check for that will go.
1591 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1588 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1592
1589
1593 # declare pull perimeters
1590 # declare pull perimeters
1594 kwargs['common'] = pullop.common
1591 kwargs['common'] = pullop.common
1595 kwargs['heads'] = pullop.heads or pullop.rheads
1592 kwargs['heads'] = pullop.heads or pullop.rheads
1596
1593
1597 if streaming:
1594 if streaming:
1598 kwargs['cg'] = False
1595 kwargs['cg'] = False
1599 kwargs['stream'] = True
1596 kwargs['stream'] = True
1600 pullop.stepsdone.add('changegroup')
1597 pullop.stepsdone.add('changegroup')
1601 pullop.stepsdone.add('phases')
1598 pullop.stepsdone.add('phases')
1602
1599
1603 else:
1600 else:
1604 # pulling changegroup
1601 # pulling changegroup
1605 pullop.stepsdone.add('changegroup')
1602 pullop.stepsdone.add('changegroup')
1606
1603
1607 kwargs['cg'] = pullop.fetch
1604 kwargs['cg'] = pullop.fetch
1608
1605
1609 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1606 legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
1610 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1607 hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
1611 if (not legacyphase and hasbinaryphase):
1608 if (not legacyphase and hasbinaryphase):
1612 kwargs['phases'] = True
1609 kwargs['phases'] = True
1613 pullop.stepsdone.add('phases')
1610 pullop.stepsdone.add('phases')
1614
1611
1615 if 'listkeys' in pullop.remotebundle2caps:
1612 if 'listkeys' in pullop.remotebundle2caps:
1616 if 'phases' not in pullop.stepsdone:
1613 if 'phases' not in pullop.stepsdone:
1617 kwargs['listkeys'] = ['phases']
1614 kwargs['listkeys'] = ['phases']
1618
1615
1619 bookmarksrequested = False
1616 bookmarksrequested = False
1620 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1617 legacybookmark = 'bookmarks' in ui.configlist('devel', 'legacy.exchange')
1621 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1618 hasbinarybook = 'bookmarks' in pullop.remotebundle2caps
1622
1619
1623 if pullop.remotebookmarks is not None:
1620 if pullop.remotebookmarks is not None:
1624 pullop.stepsdone.add('request-bookmarks')
1621 pullop.stepsdone.add('request-bookmarks')
1625
1622
1626 if ('request-bookmarks' not in pullop.stepsdone
1623 if ('request-bookmarks' not in pullop.stepsdone
1627 and pullop.remotebookmarks is None
1624 and pullop.remotebookmarks is None
1628 and not legacybookmark and hasbinarybook):
1625 and not legacybookmark and hasbinarybook):
1629 kwargs['bookmarks'] = True
1626 kwargs['bookmarks'] = True
1630 bookmarksrequested = True
1627 bookmarksrequested = True
1631
1628
1632 if 'listkeys' in pullop.remotebundle2caps:
1629 if 'listkeys' in pullop.remotebundle2caps:
1633 if 'request-bookmarks' not in pullop.stepsdone:
1630 if 'request-bookmarks' not in pullop.stepsdone:
1634 # make sure to always includes bookmark data when migrating
1631 # make sure to always includes bookmark data when migrating
1635 # `hg incoming --bundle` to using this function.
1632 # `hg incoming --bundle` to using this function.
1636 pullop.stepsdone.add('request-bookmarks')
1633 pullop.stepsdone.add('request-bookmarks')
1637 kwargs.setdefault('listkeys', []).append('bookmarks')
1634 kwargs.setdefault('listkeys', []).append('bookmarks')
1638
1635
1639 # If this is a full pull / clone and the server supports the clone bundles
1636 # If this is a full pull / clone and the server supports the clone bundles
1640 # feature, tell the server whether we attempted a clone bundle. The
1637 # feature, tell the server whether we attempted a clone bundle. The
1641 # presence of this flag indicates the client supports clone bundles. This
1638 # presence of this flag indicates the client supports clone bundles. This
1642 # will enable the server to treat clients that support clone bundles
1639 # will enable the server to treat clients that support clone bundles
1643 # differently from those that don't.
1640 # differently from those that don't.
1644 if (pullop.remote.capable('clonebundles')
1641 if (pullop.remote.capable('clonebundles')
1645 and pullop.heads is None and list(pullop.common) == [nullid]):
1642 and pullop.heads is None and list(pullop.common) == [nullid]):
1646 kwargs['cbattempted'] = pullop.clonebundleattempted
1643 kwargs['cbattempted'] = pullop.clonebundleattempted
1647
1644
1648 if streaming:
1645 if streaming:
1649 pullop.repo.ui.status(_('streaming all changes\n'))
1646 pullop.repo.ui.status(_('streaming all changes\n'))
1650 elif not pullop.fetch:
1647 elif not pullop.fetch:
1651 pullop.repo.ui.status(_("no changes found\n"))
1648 pullop.repo.ui.status(_("no changes found\n"))
1652 pullop.cgresult = 0
1649 pullop.cgresult = 0
1653 else:
1650 else:
1654 if pullop.heads is None and list(pullop.common) == [nullid]:
1651 if pullop.heads is None and list(pullop.common) == [nullid]:
1655 pullop.repo.ui.status(_("requesting all changes\n"))
1652 pullop.repo.ui.status(_("requesting all changes\n"))
1656 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1653 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1657 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1654 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1658 if obsolete.commonversion(remoteversions) is not None:
1655 if obsolete.commonversion(remoteversions) is not None:
1659 kwargs['obsmarkers'] = True
1656 kwargs['obsmarkers'] = True
1660 pullop.stepsdone.add('obsmarkers')
1657 pullop.stepsdone.add('obsmarkers')
1661 _pullbundle2extraprepare(pullop, kwargs)
1658 _pullbundle2extraprepare(pullop, kwargs)
1662
1659
1663 with pullop.remote.commandexecutor() as e:
1660 with pullop.remote.commandexecutor() as e:
1664 args = dict(kwargs)
1661 args = dict(kwargs)
1665 args['source'] = 'pull'
1662 args['source'] = 'pull'
1666 bundle = e.callcommand('getbundle', args).result()
1663 bundle = e.callcommand('getbundle', args).result()
1667
1664
1668 try:
1665 try:
1669 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
1666 op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
1670 source='pull')
1667 source='pull')
1671 op.modes['bookmarks'] = 'records'
1668 op.modes['bookmarks'] = 'records'
1672 bundle2.processbundle(pullop.repo, bundle, op=op)
1669 bundle2.processbundle(pullop.repo, bundle, op=op)
1673 except bundle2.AbortFromPart as exc:
1670 except bundle2.AbortFromPart as exc:
1674 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1671 pullop.repo.ui.status(_('remote: abort: %s\n') % exc)
1675 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1672 raise error.Abort(_('pull failed on remote'), hint=exc.hint)
1676 except error.BundleValueError as exc:
1673 except error.BundleValueError as exc:
1677 raise error.Abort(_('missing support for %s') % exc)
1674 raise error.Abort(_('missing support for %s') % exc)
1678
1675
1679 if pullop.fetch:
1676 if pullop.fetch:
1680 pullop.cgresult = bundle2.combinechangegroupresults(op)
1677 pullop.cgresult = bundle2.combinechangegroupresults(op)
1681
1678
1682 # processing phases change
1679 # processing phases change
1683 for namespace, value in op.records['listkeys']:
1680 for namespace, value in op.records['listkeys']:
1684 if namespace == 'phases':
1681 if namespace == 'phases':
1685 _pullapplyphases(pullop, value)
1682 _pullapplyphases(pullop, value)
1686
1683
1687 # processing bookmark update
1684 # processing bookmark update
1688 if bookmarksrequested:
1685 if bookmarksrequested:
1689 books = {}
1686 books = {}
1690 for record in op.records['bookmarks']:
1687 for record in op.records['bookmarks']:
1691 books[record['bookmark']] = record["node"]
1688 books[record['bookmark']] = record["node"]
1692 pullop.remotebookmarks = books
1689 pullop.remotebookmarks = books
1693 else:
1690 else:
1694 for namespace, value in op.records['listkeys']:
1691 for namespace, value in op.records['listkeys']:
1695 if namespace == 'bookmarks':
1692 if namespace == 'bookmarks':
1696 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1693 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1697
1694
1698 # bookmark data were either already there or pulled in the bundle
1695 # bookmark data were either already there or pulled in the bundle
1699 if pullop.remotebookmarks is not None:
1696 if pullop.remotebookmarks is not None:
1700 _pullbookmarks(pullop)
1697 _pullbookmarks(pullop)
1701
1698
1702 def _pullbundle2extraprepare(pullop, kwargs):
1699 def _pullbundle2extraprepare(pullop, kwargs):
1703 """hook function so that extensions can extend the getbundle call"""
1700 """hook function so that extensions can extend the getbundle call"""
1704
1701
1705 def _pullchangeset(pullop):
1702 def _pullchangeset(pullop):
1706 """pull changeset from unbundle into the local repo"""
1703 """pull changeset from unbundle into the local repo"""
1707 # We delay the open of the transaction as late as possible so we
1704 # We delay the open of the transaction as late as possible so we
1708 # don't open transaction for nothing or you break future useful
1705 # don't open transaction for nothing or you break future useful
1709 # rollback call
1706 # rollback call
1710 if 'changegroup' in pullop.stepsdone:
1707 if 'changegroup' in pullop.stepsdone:
1711 return
1708 return
1712 pullop.stepsdone.add('changegroup')
1709 pullop.stepsdone.add('changegroup')
1713 if not pullop.fetch:
1710 if not pullop.fetch:
1714 pullop.repo.ui.status(_("no changes found\n"))
1711 pullop.repo.ui.status(_("no changes found\n"))
1715 pullop.cgresult = 0
1712 pullop.cgresult = 0
1716 return
1713 return
1717 tr = pullop.gettransaction()
1714 tr = pullop.gettransaction()
1718 if pullop.heads is None and list(pullop.common) == [nullid]:
1715 if pullop.heads is None and list(pullop.common) == [nullid]:
1719 pullop.repo.ui.status(_("requesting all changes\n"))
1716 pullop.repo.ui.status(_("requesting all changes\n"))
1720 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1717 elif pullop.heads is None and pullop.remote.capable('changegroupsubset'):
1721 # issue1320, avoid a race if remote changed after discovery
1718 # issue1320, avoid a race if remote changed after discovery
1722 pullop.heads = pullop.rheads
1719 pullop.heads = pullop.rheads
1723
1720
1724 if pullop.remote.capable('getbundle'):
1721 if pullop.remote.capable('getbundle'):
1725 # TODO: get bundlecaps from remote
1722 # TODO: get bundlecaps from remote
1726 cg = pullop.remote.getbundle('pull', common=pullop.common,
1723 cg = pullop.remote.getbundle('pull', common=pullop.common,
1727 heads=pullop.heads or pullop.rheads)
1724 heads=pullop.heads or pullop.rheads)
1728 elif pullop.heads is None:
1725 elif pullop.heads is None:
1729 with pullop.remote.commandexecutor() as e:
1726 with pullop.remote.commandexecutor() as e:
1730 cg = e.callcommand('changegroup', {
1727 cg = e.callcommand('changegroup', {
1731 'nodes': pullop.fetch,
1728 'nodes': pullop.fetch,
1732 'source': 'pull',
1729 'source': 'pull',
1733 }).result()
1730 }).result()
1734
1731
1735 elif not pullop.remote.capable('changegroupsubset'):
1732 elif not pullop.remote.capable('changegroupsubset'):
1736 raise error.Abort(_("partial pull cannot be done because "
1733 raise error.Abort(_("partial pull cannot be done because "
1737 "other repository doesn't support "
1734 "other repository doesn't support "
1738 "changegroupsubset."))
1735 "changegroupsubset."))
1739 else:
1736 else:
1740 with pullop.remote.commandexecutor() as e:
1737 with pullop.remote.commandexecutor() as e:
1741 cg = e.callcommand('changegroupsubset', {
1738 cg = e.callcommand('changegroupsubset', {
1742 'bases': pullop.fetch,
1739 'bases': pullop.fetch,
1743 'heads': pullop.heads,
1740 'heads': pullop.heads,
1744 'source': 'pull',
1741 'source': 'pull',
1745 }).result()
1742 }).result()
1746
1743
1747 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1744 bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
1748 pullop.remote.url())
1745 pullop.remote.url())
1749 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1746 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1750
1747
1751 def _pullphase(pullop):
1748 def _pullphase(pullop):
1752 # Get remote phases data from remote
1749 # Get remote phases data from remote
1753 if 'phases' in pullop.stepsdone:
1750 if 'phases' in pullop.stepsdone:
1754 return
1751 return
1755 remotephases = listkeys(pullop.remote, 'phases')
1752 remotephases = listkeys(pullop.remote, 'phases')
1756 _pullapplyphases(pullop, remotephases)
1753 _pullapplyphases(pullop, remotephases)
1757
1754
1758 def _pullapplyphases(pullop, remotephases):
1755 def _pullapplyphases(pullop, remotephases):
1759 """apply phase movement from observed remote state"""
1756 """apply phase movement from observed remote state"""
1760 if 'phases' in pullop.stepsdone:
1757 if 'phases' in pullop.stepsdone:
1761 return
1758 return
1762 pullop.stepsdone.add('phases')
1759 pullop.stepsdone.add('phases')
1763 publishing = bool(remotephases.get('publishing', False))
1760 publishing = bool(remotephases.get('publishing', False))
1764 if remotephases and not publishing:
1761 if remotephases and not publishing:
1765 # remote is new and non-publishing
1762 # remote is new and non-publishing
1766 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1763 pheads, _dr = phases.analyzeremotephases(pullop.repo,
1767 pullop.pulledsubset,
1764 pullop.pulledsubset,
1768 remotephases)
1765 remotephases)
1769 dheads = pullop.pulledsubset
1766 dheads = pullop.pulledsubset
1770 else:
1767 else:
1771 # Remote is old or publishing all common changesets
1768 # Remote is old or publishing all common changesets
1772 # should be seen as public
1769 # should be seen as public
1773 pheads = pullop.pulledsubset
1770 pheads = pullop.pulledsubset
1774 dheads = []
1771 dheads = []
1775 unfi = pullop.repo.unfiltered()
1772 unfi = pullop.repo.unfiltered()
1776 phase = unfi._phasecache.phase
1773 phase = unfi._phasecache.phase
1777 rev = unfi.changelog.nodemap.get
1774 rev = unfi.changelog.nodemap.get
1778 public = phases.public
1775 public = phases.public
1779 draft = phases.draft
1776 draft = phases.draft
1780
1777
1781 # exclude changesets already public locally and update the others
1778 # exclude changesets already public locally and update the others
1782 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1779 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1783 if pheads:
1780 if pheads:
1784 tr = pullop.gettransaction()
1781 tr = pullop.gettransaction()
1785 phases.advanceboundary(pullop.repo, tr, public, pheads)
1782 phases.advanceboundary(pullop.repo, tr, public, pheads)
1786
1783
1787 # exclude changesets already draft locally and update the others
1784 # exclude changesets already draft locally and update the others
1788 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1785 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1789 if dheads:
1786 if dheads:
1790 tr = pullop.gettransaction()
1787 tr = pullop.gettransaction()
1791 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1788 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1792
1789
1793 def _pullbookmarks(pullop):
1790 def _pullbookmarks(pullop):
1794 """process the remote bookmark information to update the local one"""
1791 """process the remote bookmark information to update the local one"""
1795 if 'bookmarks' in pullop.stepsdone:
1792 if 'bookmarks' in pullop.stepsdone:
1796 return
1793 return
1797 pullop.stepsdone.add('bookmarks')
1794 pullop.stepsdone.add('bookmarks')
1798 repo = pullop.repo
1795 repo = pullop.repo
1799 remotebookmarks = pullop.remotebookmarks
1796 remotebookmarks = pullop.remotebookmarks
1800 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1797 bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
1801 pullop.remote.url(),
1798 pullop.remote.url(),
1802 pullop.gettransaction,
1799 pullop.gettransaction,
1803 explicit=pullop.explicitbookmarks)
1800 explicit=pullop.explicitbookmarks)
1804
1801
1805 def _pullobsolete(pullop):
1802 def _pullobsolete(pullop):
1806 """utility function to pull obsolete markers from a remote
1803 """utility function to pull obsolete markers from a remote
1807
1804
1808 The `gettransaction` is function that return the pull transaction, creating
1805 The `gettransaction` is function that return the pull transaction, creating
1809 one if necessary. We return the transaction to inform the calling code that
1806 one if necessary. We return the transaction to inform the calling code that
1810 a new transaction have been created (when applicable).
1807 a new transaction have been created (when applicable).
1811
1808
1812 Exists mostly to allow overriding for experimentation purpose"""
1809 Exists mostly to allow overriding for experimentation purpose"""
1813 if 'obsmarkers' in pullop.stepsdone:
1810 if 'obsmarkers' in pullop.stepsdone:
1814 return
1811 return
1815 pullop.stepsdone.add('obsmarkers')
1812 pullop.stepsdone.add('obsmarkers')
1816 tr = None
1813 tr = None
1817 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1814 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1818 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1815 pullop.repo.ui.debug('fetching remote obsolete markers\n')
1819 remoteobs = listkeys(pullop.remote, 'obsolete')
1816 remoteobs = listkeys(pullop.remote, 'obsolete')
1820 if 'dump0' in remoteobs:
1817 if 'dump0' in remoteobs:
1821 tr = pullop.gettransaction()
1818 tr = pullop.gettransaction()
1822 markers = []
1819 markers = []
1823 for key in sorted(remoteobs, reverse=True):
1820 for key in sorted(remoteobs, reverse=True):
1824 if key.startswith('dump'):
1821 if key.startswith('dump'):
1825 data = util.b85decode(remoteobs[key])
1822 data = util.b85decode(remoteobs[key])
1826 version, newmarks = obsolete._readmarkers(data)
1823 version, newmarks = obsolete._readmarkers(data)
1827 markers += newmarks
1824 markers += newmarks
1828 if markers:
1825 if markers:
1829 pullop.repo.obsstore.add(tr, markers)
1826 pullop.repo.obsstore.add(tr, markers)
1830 pullop.repo.invalidatevolatilesets()
1827 pullop.repo.invalidatevolatilesets()
1831 return tr
1828 return tr
1832
1829
1833 def caps20to10(repo, role):
1830 def caps20to10(repo, role):
1834 """return a set with appropriate options to use bundle20 during getbundle"""
1831 """return a set with appropriate options to use bundle20 during getbundle"""
1835 caps = {'HG20'}
1832 caps = {'HG20'}
1836 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1833 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
1837 caps.add('bundle2=' + urlreq.quote(capsblob))
1834 caps.add('bundle2=' + urlreq.quote(capsblob))
1838 return caps
1835 return caps
1839
1836
1840 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1837 # List of names of steps to perform for a bundle2 for getbundle, order matters.
1841 getbundle2partsorder = []
1838 getbundle2partsorder = []
1842
1839
1843 # Mapping between step name and function
1840 # Mapping between step name and function
1844 #
1841 #
1845 # This exists to help extensions wrap steps if necessary
1842 # This exists to help extensions wrap steps if necessary
1846 getbundle2partsmapping = {}
1843 getbundle2partsmapping = {}
1847
1844
1848 def getbundle2partsgenerator(stepname, idx=None):
1845 def getbundle2partsgenerator(stepname, idx=None):
1849 """decorator for function generating bundle2 part for getbundle
1846 """decorator for function generating bundle2 part for getbundle
1850
1847
1851 The function is added to the step -> function mapping and appended to the
1848 The function is added to the step -> function mapping and appended to the
1852 list of steps. Beware that decorated functions will be added in order
1849 list of steps. Beware that decorated functions will be added in order
1853 (this may matter).
1850 (this may matter).
1854
1851
1855 You can only use this decorator for new steps, if you want to wrap a step
1852 You can only use this decorator for new steps, if you want to wrap a step
1856 from an extension, attack the getbundle2partsmapping dictionary directly."""
1853 from an extension, attack the getbundle2partsmapping dictionary directly."""
1857 def dec(func):
1854 def dec(func):
1858 assert stepname not in getbundle2partsmapping
1855 assert stepname not in getbundle2partsmapping
1859 getbundle2partsmapping[stepname] = func
1856 getbundle2partsmapping[stepname] = func
1860 if idx is None:
1857 if idx is None:
1861 getbundle2partsorder.append(stepname)
1858 getbundle2partsorder.append(stepname)
1862 else:
1859 else:
1863 getbundle2partsorder.insert(idx, stepname)
1860 getbundle2partsorder.insert(idx, stepname)
1864 return func
1861 return func
1865 return dec
1862 return dec
1866
1863
1867 def bundle2requested(bundlecaps):
1864 def bundle2requested(bundlecaps):
1868 if bundlecaps is not None:
1865 if bundlecaps is not None:
1869 return any(cap.startswith('HG2') for cap in bundlecaps)
1866 return any(cap.startswith('HG2') for cap in bundlecaps)
1870 return False
1867 return False
1871
1868
1872 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1869 def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
1873 **kwargs):
1870 **kwargs):
1874 """Return chunks constituting a bundle's raw data.
1871 """Return chunks constituting a bundle's raw data.
1875
1872
1876 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1873 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
1877 passed.
1874 passed.
1878
1875
1879 Returns a 2-tuple of a dict with metadata about the generated bundle
1876 Returns a 2-tuple of a dict with metadata about the generated bundle
1880 and an iterator over raw chunks (of varying sizes).
1877 and an iterator over raw chunks (of varying sizes).
1881 """
1878 """
1882 kwargs = pycompat.byteskwargs(kwargs)
1879 kwargs = pycompat.byteskwargs(kwargs)
1883 info = {}
1880 info = {}
1884 usebundle2 = bundle2requested(bundlecaps)
1881 usebundle2 = bundle2requested(bundlecaps)
1885 # bundle10 case
1882 # bundle10 case
1886 if not usebundle2:
1883 if not usebundle2:
1887 if bundlecaps and not kwargs.get('cg', True):
1884 if bundlecaps and not kwargs.get('cg', True):
1888 raise ValueError(_('request for bundle10 must include changegroup'))
1885 raise ValueError(_('request for bundle10 must include changegroup'))
1889
1886
1890 if kwargs:
1887 if kwargs:
1891 raise ValueError(_('unsupported getbundle arguments: %s')
1888 raise ValueError(_('unsupported getbundle arguments: %s')
1892 % ', '.join(sorted(kwargs.keys())))
1889 % ', '.join(sorted(kwargs.keys())))
1893 outgoing = _computeoutgoing(repo, heads, common)
1890 outgoing = _computeoutgoing(repo, heads, common)
1894 info['bundleversion'] = 1
1891 info['bundleversion'] = 1
1895 return info, changegroup.makestream(repo, outgoing, '01', source,
1892 return info, changegroup.makestream(repo, outgoing, '01', source,
1896 bundlecaps=bundlecaps)
1893 bundlecaps=bundlecaps)
1897
1894
1898 # bundle20 case
1895 # bundle20 case
1899 info['bundleversion'] = 2
1896 info['bundleversion'] = 2
1900 b2caps = {}
1897 b2caps = {}
1901 for bcaps in bundlecaps:
1898 for bcaps in bundlecaps:
1902 if bcaps.startswith('bundle2='):
1899 if bcaps.startswith('bundle2='):
1903 blob = urlreq.unquote(bcaps[len('bundle2='):])
1900 blob = urlreq.unquote(bcaps[len('bundle2='):])
1904 b2caps.update(bundle2.decodecaps(blob))
1901 b2caps.update(bundle2.decodecaps(blob))
1905 bundler = bundle2.bundle20(repo.ui, b2caps)
1902 bundler = bundle2.bundle20(repo.ui, b2caps)
1906
1903
1907 kwargs['heads'] = heads
1904 kwargs['heads'] = heads
1908 kwargs['common'] = common
1905 kwargs['common'] = common
1909
1906
1910 for name in getbundle2partsorder:
1907 for name in getbundle2partsorder:
1911 func = getbundle2partsmapping[name]
1908 func = getbundle2partsmapping[name]
1912 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1909 func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
1913 **pycompat.strkwargs(kwargs))
1910 **pycompat.strkwargs(kwargs))
1914
1911
1915 info['prefercompressed'] = bundler.prefercompressed
1912 info['prefercompressed'] = bundler.prefercompressed
1916
1913
1917 return info, bundler.getchunks()
1914 return info, bundler.getchunks()
1918
1915
1919 @getbundle2partsgenerator('stream2')
1916 @getbundle2partsgenerator('stream2')
1920 def _getbundlestream2(bundler, repo, *args, **kwargs):
1917 def _getbundlestream2(bundler, repo, *args, **kwargs):
1921 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
1918 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
1922
1919
1923 @getbundle2partsgenerator('changegroup')
1920 @getbundle2partsgenerator('changegroup')
1924 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1921 def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
1925 b2caps=None, heads=None, common=None, **kwargs):
1922 b2caps=None, heads=None, common=None, **kwargs):
1926 """add a changegroup part to the requested bundle"""
1923 """add a changegroup part to the requested bundle"""
1927 cgstream = None
1924 cgstream = None
1928 if kwargs.get(r'cg', True):
1925 if kwargs.get(r'cg', True):
1929 # build changegroup bundle here.
1926 # build changegroup bundle here.
1930 version = '01'
1927 version = '01'
1931 cgversions = b2caps.get('changegroup')
1928 cgversions = b2caps.get('changegroup')
1932 if cgversions: # 3.1 and 3.2 ship with an empty value
1929 if cgversions: # 3.1 and 3.2 ship with an empty value
1933 cgversions = [v for v in cgversions
1930 cgversions = [v for v in cgversions
1934 if v in changegroup.supportedoutgoingversions(repo)]
1931 if v in changegroup.supportedoutgoingversions(repo)]
1935 if not cgversions:
1932 if not cgversions:
1936 raise ValueError(_('no common changegroup version'))
1933 raise ValueError(_('no common changegroup version'))
1937 version = max(cgversions)
1934 version = max(cgversions)
1938 outgoing = _computeoutgoing(repo, heads, common)
1935 outgoing = _computeoutgoing(repo, heads, common)
1939 if outgoing.missing:
1936 if outgoing.missing:
1940 cgstream = changegroup.makestream(repo, outgoing, version, source,
1937 cgstream = changegroup.makestream(repo, outgoing, version, source,
1941 bundlecaps=bundlecaps)
1938 bundlecaps=bundlecaps)
1942
1939
1943 if cgstream:
1940 if cgstream:
1944 part = bundler.newpart('changegroup', data=cgstream)
1941 part = bundler.newpart('changegroup', data=cgstream)
1945 if cgversions:
1942 if cgversions:
1946 part.addparam('version', version)
1943 part.addparam('version', version)
1947 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1944 part.addparam('nbchanges', '%d' % len(outgoing.missing),
1948 mandatory=False)
1945 mandatory=False)
1949 if 'treemanifest' in repo.requirements:
1946 if 'treemanifest' in repo.requirements:
1950 part.addparam('treemanifest', '1')
1947 part.addparam('treemanifest', '1')
1951
1948
1952 @getbundle2partsgenerator('bookmarks')
1949 @getbundle2partsgenerator('bookmarks')
1953 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1950 def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
1954 b2caps=None, **kwargs):
1951 b2caps=None, **kwargs):
1955 """add a bookmark part to the requested bundle"""
1952 """add a bookmark part to the requested bundle"""
1956 if not kwargs.get(r'bookmarks', False):
1953 if not kwargs.get(r'bookmarks', False):
1957 return
1954 return
1958 if 'bookmarks' not in b2caps:
1955 if 'bookmarks' not in b2caps:
1959 raise ValueError(_('no common bookmarks exchange method'))
1956 raise ValueError(_('no common bookmarks exchange method'))
1960 books = bookmod.listbinbookmarks(repo)
1957 books = bookmod.listbinbookmarks(repo)
1961 data = bookmod.binaryencode(books)
1958 data = bookmod.binaryencode(books)
1962 if data:
1959 if data:
1963 bundler.newpart('bookmarks', data=data)
1960 bundler.newpart('bookmarks', data=data)
1964
1961
1965 @getbundle2partsgenerator('listkeys')
1962 @getbundle2partsgenerator('listkeys')
1966 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1963 def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
1967 b2caps=None, **kwargs):
1964 b2caps=None, **kwargs):
1968 """add parts containing listkeys namespaces to the requested bundle"""
1965 """add parts containing listkeys namespaces to the requested bundle"""
1969 listkeys = kwargs.get(r'listkeys', ())
1966 listkeys = kwargs.get(r'listkeys', ())
1970 for namespace in listkeys:
1967 for namespace in listkeys:
1971 part = bundler.newpart('listkeys')
1968 part = bundler.newpart('listkeys')
1972 part.addparam('namespace', namespace)
1969 part.addparam('namespace', namespace)
1973 keys = repo.listkeys(namespace).items()
1970 keys = repo.listkeys(namespace).items()
1974 part.data = pushkey.encodekeys(keys)
1971 part.data = pushkey.encodekeys(keys)
1975
1972
1976 @getbundle2partsgenerator('obsmarkers')
1973 @getbundle2partsgenerator('obsmarkers')
1977 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1974 def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
1978 b2caps=None, heads=None, **kwargs):
1975 b2caps=None, heads=None, **kwargs):
1979 """add an obsolescence markers part to the requested bundle"""
1976 """add an obsolescence markers part to the requested bundle"""
1980 if kwargs.get(r'obsmarkers', False):
1977 if kwargs.get(r'obsmarkers', False):
1981 if heads is None:
1978 if heads is None:
1982 heads = repo.heads()
1979 heads = repo.heads()
1983 subset = [c.node() for c in repo.set('::%ln', heads)]
1980 subset = [c.node() for c in repo.set('::%ln', heads)]
1984 markers = repo.obsstore.relevantmarkers(subset)
1981 markers = repo.obsstore.relevantmarkers(subset)
1985 markers = sorted(markers)
1982 markers = sorted(markers)
1986 bundle2.buildobsmarkerspart(bundler, markers)
1983 bundle2.buildobsmarkerspart(bundler, markers)
1987
1984
1988 @getbundle2partsgenerator('phases')
1985 @getbundle2partsgenerator('phases')
1989 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1986 def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
1990 b2caps=None, heads=None, **kwargs):
1987 b2caps=None, heads=None, **kwargs):
1991 """add phase heads part to the requested bundle"""
1988 """add phase heads part to the requested bundle"""
1992 if kwargs.get(r'phases', False):
1989 if kwargs.get(r'phases', False):
1993 if not 'heads' in b2caps.get('phases'):
1990 if not 'heads' in b2caps.get('phases'):
1994 raise ValueError(_('no common phases exchange method'))
1991 raise ValueError(_('no common phases exchange method'))
1995 if heads is None:
1992 if heads is None:
1996 heads = repo.heads()
1993 heads = repo.heads()
1997
1994
1998 headsbyphase = collections.defaultdict(set)
1995 headsbyphase = collections.defaultdict(set)
1999 if repo.publishing():
1996 if repo.publishing():
2000 headsbyphase[phases.public] = heads
1997 headsbyphase[phases.public] = heads
2001 else:
1998 else:
2002 # find the appropriate heads to move
1999 # find the appropriate heads to move
2003
2000
2004 phase = repo._phasecache.phase
2001 phase = repo._phasecache.phase
2005 node = repo.changelog.node
2002 node = repo.changelog.node
2006 rev = repo.changelog.rev
2003 rev = repo.changelog.rev
2007 for h in heads:
2004 for h in heads:
2008 headsbyphase[phase(repo, rev(h))].add(h)
2005 headsbyphase[phase(repo, rev(h))].add(h)
2009 seenphases = list(headsbyphase.keys())
2006 seenphases = list(headsbyphase.keys())
2010
2007
2011 # We do not handle anything but public and draft phase for now)
2008 # We do not handle anything but public and draft phase for now)
2012 if seenphases:
2009 if seenphases:
2013 assert max(seenphases) <= phases.draft
2010 assert max(seenphases) <= phases.draft
2014
2011
2015 # if client is pulling non-public changesets, we need to find
2012 # if client is pulling non-public changesets, we need to find
2016 # intermediate public heads.
2013 # intermediate public heads.
2017 draftheads = headsbyphase.get(phases.draft, set())
2014 draftheads = headsbyphase.get(phases.draft, set())
2018 if draftheads:
2015 if draftheads:
2019 publicheads = headsbyphase.get(phases.public, set())
2016 publicheads = headsbyphase.get(phases.public, set())
2020
2017
2021 revset = 'heads(only(%ln, %ln) and public())'
2018 revset = 'heads(only(%ln, %ln) and public())'
2022 extraheads = repo.revs(revset, draftheads, publicheads)
2019 extraheads = repo.revs(revset, draftheads, publicheads)
2023 for r in extraheads:
2020 for r in extraheads:
2024 headsbyphase[phases.public].add(node(r))
2021 headsbyphase[phases.public].add(node(r))
2025
2022
2026 # transform data in a format used by the encoding function
2023 # transform data in a format used by the encoding function
2027 phasemapping = []
2024 phasemapping = []
2028 for phase in phases.allphases:
2025 for phase in phases.allphases:
2029 phasemapping.append(sorted(headsbyphase[phase]))
2026 phasemapping.append(sorted(headsbyphase[phase]))
2030
2027
2031 # generate the actual part
2028 # generate the actual part
2032 phasedata = phases.binaryencode(phasemapping)
2029 phasedata = phases.binaryencode(phasemapping)
2033 bundler.newpart('phase-heads', data=phasedata)
2030 bundler.newpart('phase-heads', data=phasedata)
2034
2031
2035 @getbundle2partsgenerator('hgtagsfnodes')
2032 @getbundle2partsgenerator('hgtagsfnodes')
2036 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
2033 def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
2037 b2caps=None, heads=None, common=None,
2034 b2caps=None, heads=None, common=None,
2038 **kwargs):
2035 **kwargs):
2039 """Transfer the .hgtags filenodes mapping.
2036 """Transfer the .hgtags filenodes mapping.
2040
2037
2041 Only values for heads in this bundle will be transferred.
2038 Only values for heads in this bundle will be transferred.
2042
2039
2043 The part data consists of pairs of 20 byte changeset node and .hgtags
2040 The part data consists of pairs of 20 byte changeset node and .hgtags
2044 filenodes raw values.
2041 filenodes raw values.
2045 """
2042 """
2046 # Don't send unless:
2043 # Don't send unless:
2047 # - changeset are being exchanged,
2044 # - changeset are being exchanged,
2048 # - the client supports it.
2045 # - the client supports it.
2049 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
2046 if not (kwargs.get(r'cg', True) and 'hgtagsfnodes' in b2caps):
2050 return
2047 return
2051
2048
2052 outgoing = _computeoutgoing(repo, heads, common)
2049 outgoing = _computeoutgoing(repo, heads, common)
2053 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2050 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2054
2051
2055 @getbundle2partsgenerator('cache:rev-branch-cache')
2052 @getbundle2partsgenerator('cache:rev-branch-cache')
2056 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
2053 def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
2057 b2caps=None, heads=None, common=None,
2054 b2caps=None, heads=None, common=None,
2058 **kwargs):
2055 **kwargs):
2059 """Transfer the rev-branch-cache mapping
2056 """Transfer the rev-branch-cache mapping
2060
2057
2061 The payload is a series of data related to each branch
2058 The payload is a series of data related to each branch
2062
2059
2063 1) branch name length
2060 1) branch name length
2064 2) number of open heads
2061 2) number of open heads
2065 3) number of closed heads
2062 3) number of closed heads
2066 4) open heads nodes
2063 4) open heads nodes
2067 5) closed heads nodes
2064 5) closed heads nodes
2068 """
2065 """
2069 # Don't send unless:
2066 # Don't send unless:
2070 # - changeset are being exchanged,
2067 # - changeset are being exchanged,
2071 # - the client supports it.
2068 # - the client supports it.
2072 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
2069 if not (kwargs.get(r'cg', True)) or 'rev-branch-cache' not in b2caps:
2073 return
2070 return
2074 outgoing = _computeoutgoing(repo, heads, common)
2071 outgoing = _computeoutgoing(repo, heads, common)
2075 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2072 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2076
2073
2077 def check_heads(repo, their_heads, context):
2074 def check_heads(repo, their_heads, context):
2078 """check if the heads of a repo have been modified
2075 """check if the heads of a repo have been modified
2079
2076
2080 Used by peer for unbundling.
2077 Used by peer for unbundling.
2081 """
2078 """
2082 heads = repo.heads()
2079 heads = repo.heads()
2083 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2080 heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
2084 if not (their_heads == ['force'] or their_heads == heads or
2081 if not (their_heads == ['force'] or their_heads == heads or
2085 their_heads == ['hashed', heads_hash]):
2082 their_heads == ['hashed', heads_hash]):
2086 # someone else committed/pushed/unbundled while we
2083 # someone else committed/pushed/unbundled while we
2087 # were transferring data
2084 # were transferring data
2088 raise error.PushRaced('repository changed while %s - '
2085 raise error.PushRaced('repository changed while %s - '
2089 'please try again' % context)
2086 'please try again' % context)
2090
2087
2091 def unbundle(repo, cg, heads, source, url):
2088 def unbundle(repo, cg, heads, source, url):
2092 """Apply a bundle to a repo.
2089 """Apply a bundle to a repo.
2093
2090
2094 this function makes sure the repo is locked during the application and have
2091 this function makes sure the repo is locked during the application and have
2095 mechanism to check that no push race occurred between the creation of the
2092 mechanism to check that no push race occurred between the creation of the
2096 bundle and its application.
2093 bundle and its application.
2097
2094
2098 If the push was raced as PushRaced exception is raised."""
2095 If the push was raced as PushRaced exception is raised."""
2099 r = 0
2096 r = 0
2100 # need a transaction when processing a bundle2 stream
2097 # need a transaction when processing a bundle2 stream
2101 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2098 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2102 lockandtr = [None, None, None]
2099 lockandtr = [None, None, None]
2103 recordout = None
2100 recordout = None
2104 # quick fix for output mismatch with bundle2 in 3.4
2101 # quick fix for output mismatch with bundle2 in 3.4
2105 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2102 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture')
2106 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2103 if url.startswith('remote:http:') or url.startswith('remote:https:'):
2107 captureoutput = True
2104 captureoutput = True
2108 try:
2105 try:
2109 # note: outside bundle1, 'heads' is expected to be empty and this
2106 # note: outside bundle1, 'heads' is expected to be empty and this
2110 # 'check_heads' call wil be a no-op
2107 # 'check_heads' call wil be a no-op
2111 check_heads(repo, heads, 'uploading changes')
2108 check_heads(repo, heads, 'uploading changes')
2112 # push can proceed
2109 # push can proceed
2113 if not isinstance(cg, bundle2.unbundle20):
2110 if not isinstance(cg, bundle2.unbundle20):
2114 # legacy case: bundle1 (changegroup 01)
2111 # legacy case: bundle1 (changegroup 01)
2115 txnname = "\n".join([source, util.hidepassword(url)])
2112 txnname = "\n".join([source, util.hidepassword(url)])
2116 with repo.lock(), repo.transaction(txnname) as tr:
2113 with repo.lock(), repo.transaction(txnname) as tr:
2117 op = bundle2.applybundle(repo, cg, tr, source, url)
2114 op = bundle2.applybundle(repo, cg, tr, source, url)
2118 r = bundle2.combinechangegroupresults(op)
2115 r = bundle2.combinechangegroupresults(op)
2119 else:
2116 else:
2120 r = None
2117 r = None
2121 try:
2118 try:
2122 def gettransaction():
2119 def gettransaction():
2123 if not lockandtr[2]:
2120 if not lockandtr[2]:
2124 lockandtr[0] = repo.wlock()
2121 lockandtr[0] = repo.wlock()
2125 lockandtr[1] = repo.lock()
2122 lockandtr[1] = repo.lock()
2126 lockandtr[2] = repo.transaction(source)
2123 lockandtr[2] = repo.transaction(source)
2127 lockandtr[2].hookargs['source'] = source
2124 lockandtr[2].hookargs['source'] = source
2128 lockandtr[2].hookargs['url'] = url
2125 lockandtr[2].hookargs['url'] = url
2129 lockandtr[2].hookargs['bundle2'] = '1'
2126 lockandtr[2].hookargs['bundle2'] = '1'
2130 return lockandtr[2]
2127 return lockandtr[2]
2131
2128
2132 # Do greedy locking by default until we're satisfied with lazy
2129 # Do greedy locking by default until we're satisfied with lazy
2133 # locking.
2130 # locking.
2134 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2131 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
2135 gettransaction()
2132 gettransaction()
2136
2133
2137 op = bundle2.bundleoperation(repo, gettransaction,
2134 op = bundle2.bundleoperation(repo, gettransaction,
2138 captureoutput=captureoutput,
2135 captureoutput=captureoutput,
2139 source='push')
2136 source='push')
2140 try:
2137 try:
2141 op = bundle2.processbundle(repo, cg, op=op)
2138 op = bundle2.processbundle(repo, cg, op=op)
2142 finally:
2139 finally:
2143 r = op.reply
2140 r = op.reply
2144 if captureoutput and r is not None:
2141 if captureoutput and r is not None:
2145 repo.ui.pushbuffer(error=True, subproc=True)
2142 repo.ui.pushbuffer(error=True, subproc=True)
2146 def recordout(output):
2143 def recordout(output):
2147 r.newpart('output', data=output, mandatory=False)
2144 r.newpart('output', data=output, mandatory=False)
2148 if lockandtr[2] is not None:
2145 if lockandtr[2] is not None:
2149 lockandtr[2].close()
2146 lockandtr[2].close()
2150 except BaseException as exc:
2147 except BaseException as exc:
2151 exc.duringunbundle2 = True
2148 exc.duringunbundle2 = True
2152 if captureoutput and r is not None:
2149 if captureoutput and r is not None:
2153 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2150 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2154 def recordout(output):
2151 def recordout(output):
2155 part = bundle2.bundlepart('output', data=output,
2152 part = bundle2.bundlepart('output', data=output,
2156 mandatory=False)
2153 mandatory=False)
2157 parts.append(part)
2154 parts.append(part)
2158 raise
2155 raise
2159 finally:
2156 finally:
2160 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2157 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2161 if recordout is not None:
2158 if recordout is not None:
2162 recordout(repo.ui.popbuffer())
2159 recordout(repo.ui.popbuffer())
2163 return r
2160 return r
2164
2161
2165 def _maybeapplyclonebundle(pullop):
2162 def _maybeapplyclonebundle(pullop):
2166 """Apply a clone bundle from a remote, if possible."""
2163 """Apply a clone bundle from a remote, if possible."""
2167
2164
2168 repo = pullop.repo
2165 repo = pullop.repo
2169 remote = pullop.remote
2166 remote = pullop.remote
2170
2167
2171 if not repo.ui.configbool('ui', 'clonebundles'):
2168 if not repo.ui.configbool('ui', 'clonebundles'):
2172 return
2169 return
2173
2170
2174 # Only run if local repo is empty.
2171 # Only run if local repo is empty.
2175 if len(repo):
2172 if len(repo):
2176 return
2173 return
2177
2174
2178 if pullop.heads:
2175 if pullop.heads:
2179 return
2176 return
2180
2177
2181 if not remote.capable('clonebundles'):
2178 if not remote.capable('clonebundles'):
2182 return
2179 return
2183
2180
2184 with remote.commandexecutor() as e:
2181 with remote.commandexecutor() as e:
2185 res = e.callcommand('clonebundles', {}).result()
2182 res = e.callcommand('clonebundles', {}).result()
2186
2183
2187 # If we call the wire protocol command, that's good enough to record the
2184 # If we call the wire protocol command, that's good enough to record the
2188 # attempt.
2185 # attempt.
2189 pullop.clonebundleattempted = True
2186 pullop.clonebundleattempted = True
2190
2187
2191 entries = parseclonebundlesmanifest(repo, res)
2188 entries = parseclonebundlesmanifest(repo, res)
2192 if not entries:
2189 if not entries:
2193 repo.ui.note(_('no clone bundles available on remote; '
2190 repo.ui.note(_('no clone bundles available on remote; '
2194 'falling back to regular clone\n'))
2191 'falling back to regular clone\n'))
2195 return
2192 return
2196
2193
2197 entries = filterclonebundleentries(
2194 entries = filterclonebundleentries(
2198 repo, entries, streamclonerequested=pullop.streamclonerequested)
2195 repo, entries, streamclonerequested=pullop.streamclonerequested)
2199
2196
2200 if not entries:
2197 if not entries:
2201 # There is a thundering herd concern here. However, if a server
2198 # There is a thundering herd concern here. However, if a server
2202 # operator doesn't advertise bundles appropriate for its clients,
2199 # operator doesn't advertise bundles appropriate for its clients,
2203 # they deserve what's coming. Furthermore, from a client's
2200 # they deserve what's coming. Furthermore, from a client's
2204 # perspective, no automatic fallback would mean not being able to
2201 # perspective, no automatic fallback would mean not being able to
2205 # clone!
2202 # clone!
2206 repo.ui.warn(_('no compatible clone bundles available on server; '
2203 repo.ui.warn(_('no compatible clone bundles available on server; '
2207 'falling back to regular clone\n'))
2204 'falling back to regular clone\n'))
2208 repo.ui.warn(_('(you may want to report this to the server '
2205 repo.ui.warn(_('(you may want to report this to the server '
2209 'operator)\n'))
2206 'operator)\n'))
2210 return
2207 return
2211
2208
2212 entries = sortclonebundleentries(repo.ui, entries)
2209 entries = sortclonebundleentries(repo.ui, entries)
2213
2210
2214 url = entries[0]['URL']
2211 url = entries[0]['URL']
2215 repo.ui.status(_('applying clone bundle from %s\n') % url)
2212 repo.ui.status(_('applying clone bundle from %s\n') % url)
2216 if trypullbundlefromurl(repo.ui, repo, url):
2213 if trypullbundlefromurl(repo.ui, repo, url):
2217 repo.ui.status(_('finished applying clone bundle\n'))
2214 repo.ui.status(_('finished applying clone bundle\n'))
2218 # Bundle failed.
2215 # Bundle failed.
2219 #
2216 #
2220 # We abort by default to avoid the thundering herd of
2217 # We abort by default to avoid the thundering herd of
2221 # clients flooding a server that was expecting expensive
2218 # clients flooding a server that was expecting expensive
2222 # clone load to be offloaded.
2219 # clone load to be offloaded.
2223 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2220 elif repo.ui.configbool('ui', 'clonebundlefallback'):
2224 repo.ui.warn(_('falling back to normal clone\n'))
2221 repo.ui.warn(_('falling back to normal clone\n'))
2225 else:
2222 else:
2226 raise error.Abort(_('error applying bundle'),
2223 raise error.Abort(_('error applying bundle'),
2227 hint=_('if this error persists, consider contacting '
2224 hint=_('if this error persists, consider contacting '
2228 'the server operator or disable clone '
2225 'the server operator or disable clone '
2229 'bundles via '
2226 'bundles via '
2230 '"--config ui.clonebundles=false"'))
2227 '"--config ui.clonebundles=false"'))
2231
2228
2232 def parseclonebundlesmanifest(repo, s):
2229 def parseclonebundlesmanifest(repo, s):
2233 """Parses the raw text of a clone bundles manifest.
2230 """Parses the raw text of a clone bundles manifest.
2234
2231
2235 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2232 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2236 to the URL and other keys are the attributes for the entry.
2233 to the URL and other keys are the attributes for the entry.
2237 """
2234 """
2238 m = []
2235 m = []
2239 for line in s.splitlines():
2236 for line in s.splitlines():
2240 fields = line.split()
2237 fields = line.split()
2241 if not fields:
2238 if not fields:
2242 continue
2239 continue
2243 attrs = {'URL': fields[0]}
2240 attrs = {'URL': fields[0]}
2244 for rawattr in fields[1:]:
2241 for rawattr in fields[1:]:
2245 key, value = rawattr.split('=', 1)
2242 key, value = rawattr.split('=', 1)
2246 key = urlreq.unquote(key)
2243 key = urlreq.unquote(key)
2247 value = urlreq.unquote(value)
2244 value = urlreq.unquote(value)
2248 attrs[key] = value
2245 attrs[key] = value
2249
2246
2250 # Parse BUNDLESPEC into components. This makes client-side
2247 # Parse BUNDLESPEC into components. This makes client-side
2251 # preferences easier to specify since you can prefer a single
2248 # preferences easier to specify since you can prefer a single
2252 # component of the BUNDLESPEC.
2249 # component of the BUNDLESPEC.
2253 if key == 'BUNDLESPEC':
2250 if key == 'BUNDLESPEC':
2254 try:
2251 try:
2255 bundlespec = parsebundlespec(repo, value)
2252 bundlespec = parsebundlespec(repo, value)
2256 attrs['COMPRESSION'] = bundlespec.compression
2253 attrs['COMPRESSION'] = bundlespec.compression
2257 attrs['VERSION'] = bundlespec.version
2254 attrs['VERSION'] = bundlespec.version
2258 except error.InvalidBundleSpecification:
2255 except error.InvalidBundleSpecification:
2259 pass
2256 pass
2260 except error.UnsupportedBundleSpecification:
2257 except error.UnsupportedBundleSpecification:
2261 pass
2258 pass
2262
2259
2263 m.append(attrs)
2260 m.append(attrs)
2264
2261
2265 return m
2262 return m
2266
2263
2267 def isstreamclonespec(bundlespec):
2264 def isstreamclonespec(bundlespec):
2268 # Stream clone v1
2265 # Stream clone v1
2269 if (bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1'):
2266 if (bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1'):
2270 return True
2267 return True
2271
2268
2272 # Stream clone v2
2269 # Stream clone v2
2273 if (bundlespec.wirecompression == 'UN' and \
2270 if (bundlespec.wirecompression == 'UN' and \
2274 bundlespec.wireversion == '02' and \
2271 bundlespec.wireversion == '02' and \
2275 bundlespec.contentopts.get('streamv2')):
2272 bundlespec.contentopts.get('streamv2')):
2276 return True
2273 return True
2277
2274
2278 return False
2275 return False
2279
2276
2280 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2277 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2281 """Remove incompatible clone bundle manifest entries.
2278 """Remove incompatible clone bundle manifest entries.
2282
2279
2283 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2280 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2284 and returns a new list consisting of only the entries that this client
2281 and returns a new list consisting of only the entries that this client
2285 should be able to apply.
2282 should be able to apply.
2286
2283
2287 There is no guarantee we'll be able to apply all returned entries because
2284 There is no guarantee we'll be able to apply all returned entries because
2288 the metadata we use to filter on may be missing or wrong.
2285 the metadata we use to filter on may be missing or wrong.
2289 """
2286 """
2290 newentries = []
2287 newentries = []
2291 for entry in entries:
2288 for entry in entries:
2292 spec = entry.get('BUNDLESPEC')
2289 spec = entry.get('BUNDLESPEC')
2293 if spec:
2290 if spec:
2294 try:
2291 try:
2295 bundlespec = parsebundlespec(repo, spec, strict=True)
2292 bundlespec = parsebundlespec(repo, spec, strict=True)
2296
2293
2297 # If a stream clone was requested, filter out non-streamclone
2294 # If a stream clone was requested, filter out non-streamclone
2298 # entries.
2295 # entries.
2299 if streamclonerequested and not isstreamclonespec(bundlespec):
2296 if streamclonerequested and not isstreamclonespec(bundlespec):
2300 repo.ui.debug('filtering %s because not a stream clone\n' %
2297 repo.ui.debug('filtering %s because not a stream clone\n' %
2301 entry['URL'])
2298 entry['URL'])
2302 continue
2299 continue
2303
2300
2304 except error.InvalidBundleSpecification as e:
2301 except error.InvalidBundleSpecification as e:
2305 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
2302 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
2306 continue
2303 continue
2307 except error.UnsupportedBundleSpecification as e:
2304 except error.UnsupportedBundleSpecification as e:
2308 repo.ui.debug('filtering %s because unsupported bundle '
2305 repo.ui.debug('filtering %s because unsupported bundle '
2309 'spec: %s\n' % (
2306 'spec: %s\n' % (
2310 entry['URL'], stringutil.forcebytestr(e)))
2307 entry['URL'], stringutil.forcebytestr(e)))
2311 continue
2308 continue
2312 # If we don't have a spec and requested a stream clone, we don't know
2309 # If we don't have a spec and requested a stream clone, we don't know
2313 # what the entry is so don't attempt to apply it.
2310 # what the entry is so don't attempt to apply it.
2314 elif streamclonerequested:
2311 elif streamclonerequested:
2315 repo.ui.debug('filtering %s because cannot determine if a stream '
2312 repo.ui.debug('filtering %s because cannot determine if a stream '
2316 'clone bundle\n' % entry['URL'])
2313 'clone bundle\n' % entry['URL'])
2317 continue
2314 continue
2318
2315
2319 if 'REQUIRESNI' in entry and not sslutil.hassni:
2316 if 'REQUIRESNI' in entry and not sslutil.hassni:
2320 repo.ui.debug('filtering %s because SNI not supported\n' %
2317 repo.ui.debug('filtering %s because SNI not supported\n' %
2321 entry['URL'])
2318 entry['URL'])
2322 continue
2319 continue
2323
2320
2324 newentries.append(entry)
2321 newentries.append(entry)
2325
2322
2326 return newentries
2323 return newentries
2327
2324
2328 class clonebundleentry(object):
2325 class clonebundleentry(object):
2329 """Represents an item in a clone bundles manifest.
2326 """Represents an item in a clone bundles manifest.
2330
2327
2331 This rich class is needed to support sorting since sorted() in Python 3
2328 This rich class is needed to support sorting since sorted() in Python 3
2332 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2329 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
2333 won't work.
2330 won't work.
2334 """
2331 """
2335
2332
2336 def __init__(self, value, prefers):
2333 def __init__(self, value, prefers):
2337 self.value = value
2334 self.value = value
2338 self.prefers = prefers
2335 self.prefers = prefers
2339
2336
2340 def _cmp(self, other):
2337 def _cmp(self, other):
2341 for prefkey, prefvalue in self.prefers:
2338 for prefkey, prefvalue in self.prefers:
2342 avalue = self.value.get(prefkey)
2339 avalue = self.value.get(prefkey)
2343 bvalue = other.value.get(prefkey)
2340 bvalue = other.value.get(prefkey)
2344
2341
2345 # Special case for b missing attribute and a matches exactly.
2342 # Special case for b missing attribute and a matches exactly.
2346 if avalue is not None and bvalue is None and avalue == prefvalue:
2343 if avalue is not None and bvalue is None and avalue == prefvalue:
2347 return -1
2344 return -1
2348
2345
2349 # Special case for a missing attribute and b matches exactly.
2346 # Special case for a missing attribute and b matches exactly.
2350 if bvalue is not None and avalue is None and bvalue == prefvalue:
2347 if bvalue is not None and avalue is None and bvalue == prefvalue:
2351 return 1
2348 return 1
2352
2349
2353 # We can't compare unless attribute present on both.
2350 # We can't compare unless attribute present on both.
2354 if avalue is None or bvalue is None:
2351 if avalue is None or bvalue is None:
2355 continue
2352 continue
2356
2353
2357 # Same values should fall back to next attribute.
2354 # Same values should fall back to next attribute.
2358 if avalue == bvalue:
2355 if avalue == bvalue:
2359 continue
2356 continue
2360
2357
2361 # Exact matches come first.
2358 # Exact matches come first.
2362 if avalue == prefvalue:
2359 if avalue == prefvalue:
2363 return -1
2360 return -1
2364 if bvalue == prefvalue:
2361 if bvalue == prefvalue:
2365 return 1
2362 return 1
2366
2363
2367 # Fall back to next attribute.
2364 # Fall back to next attribute.
2368 continue
2365 continue
2369
2366
2370 # If we got here we couldn't sort by attributes and prefers. Fall
2367 # If we got here we couldn't sort by attributes and prefers. Fall
2371 # back to index order.
2368 # back to index order.
2372 return 0
2369 return 0
2373
2370
2374 def __lt__(self, other):
2371 def __lt__(self, other):
2375 return self._cmp(other) < 0
2372 return self._cmp(other) < 0
2376
2373
2377 def __gt__(self, other):
2374 def __gt__(self, other):
2378 return self._cmp(other) > 0
2375 return self._cmp(other) > 0
2379
2376
2380 def __eq__(self, other):
2377 def __eq__(self, other):
2381 return self._cmp(other) == 0
2378 return self._cmp(other) == 0
2382
2379
2383 def __le__(self, other):
2380 def __le__(self, other):
2384 return self._cmp(other) <= 0
2381 return self._cmp(other) <= 0
2385
2382
2386 def __ge__(self, other):
2383 def __ge__(self, other):
2387 return self._cmp(other) >= 0
2384 return self._cmp(other) >= 0
2388
2385
2389 def __ne__(self, other):
2386 def __ne__(self, other):
2390 return self._cmp(other) != 0
2387 return self._cmp(other) != 0
2391
2388
2392 def sortclonebundleentries(ui, entries):
2389 def sortclonebundleentries(ui, entries):
2393 prefers = ui.configlist('ui', 'clonebundleprefers')
2390 prefers = ui.configlist('ui', 'clonebundleprefers')
2394 if not prefers:
2391 if not prefers:
2395 return list(entries)
2392 return list(entries)
2396
2393
2397 prefers = [p.split('=', 1) for p in prefers]
2394 prefers = [p.split('=', 1) for p in prefers]
2398
2395
2399 items = sorted(clonebundleentry(v, prefers) for v in entries)
2396 items = sorted(clonebundleentry(v, prefers) for v in entries)
2400 return [i.value for i in items]
2397 return [i.value for i in items]
2401
2398
2402 def trypullbundlefromurl(ui, repo, url):
2399 def trypullbundlefromurl(ui, repo, url):
2403 """Attempt to apply a bundle from a URL."""
2400 """Attempt to apply a bundle from a URL."""
2404 with repo.lock(), repo.transaction('bundleurl') as tr:
2401 with repo.lock(), repo.transaction('bundleurl') as tr:
2405 try:
2402 try:
2406 fh = urlmod.open(ui, url)
2403 fh = urlmod.open(ui, url)
2407 cg = readbundle(ui, fh, 'stream')
2404 cg = readbundle(ui, fh, 'stream')
2408
2405
2409 if isinstance(cg, streamclone.streamcloneapplier):
2406 if isinstance(cg, streamclone.streamcloneapplier):
2410 cg.apply(repo)
2407 cg.apply(repo)
2411 else:
2408 else:
2412 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2409 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
2413 return True
2410 return True
2414 except urlerr.httperror as e:
2411 except urlerr.httperror as e:
2415 ui.warn(_('HTTP error fetching bundle: %s\n') %
2412 ui.warn(_('HTTP error fetching bundle: %s\n') %
2416 stringutil.forcebytestr(e))
2413 stringutil.forcebytestr(e))
2417 except urlerr.urlerror as e:
2414 except urlerr.urlerror as e:
2418 ui.warn(_('error fetching bundle: %s\n') %
2415 ui.warn(_('error fetching bundle: %s\n') %
2419 stringutil.forcebytestr(e.reason))
2416 stringutil.forcebytestr(e.reason))
2420
2417
2421 return False
2418 return False
@@ -1,1367 +1,1379 b''
1 #require killdaemons
1 #require killdaemons
2
2
3 $ cat >> $HGRCPATH << EOF
3 $ cat >> $HGRCPATH << EOF
4 > [extensions]
4 > [extensions]
5 > phasereport=$TESTDIR/testlib/ext-phase-report.py
5 > phasereport=$TESTDIR/testlib/ext-phase-report.py
6 > EOF
6 > EOF
7
7
8 $ hgph() { hg log -G --template "{rev} {phase} {desc} - {node|short}\n" $*; }
8 $ hgph() { hg log -G --template "{rev} {phase} {desc} - {node|short}\n" $*; }
9
9
10 $ mkcommit() {
10 $ mkcommit() {
11 > echo "$1" > "$1"
11 > echo "$1" > "$1"
12 > hg add "$1"
12 > hg add "$1"
13 > message="$1"
13 > message="$1"
14 > shift
14 > shift
15 > hg ci -m "$message" $*
15 > hg ci -m "$message" $*
16 > }
16 > }
17
17
18 $ hg init alpha
18 $ hg init alpha
19 $ cd alpha
19 $ cd alpha
20 $ mkcommit a-A
20 $ mkcommit a-A
21 test-debug-phase: new rev 0: x -> 1
21 test-debug-phase: new rev 0: x -> 1
22 $ mkcommit a-B
22 $ mkcommit a-B
23 test-debug-phase: new rev 1: x -> 1
23 test-debug-phase: new rev 1: x -> 1
24 $ mkcommit a-C
24 $ mkcommit a-C
25 test-debug-phase: new rev 2: x -> 1
25 test-debug-phase: new rev 2: x -> 1
26 $ mkcommit a-D
26 $ mkcommit a-D
27 test-debug-phase: new rev 3: x -> 1
27 test-debug-phase: new rev 3: x -> 1
28 $ hgph
28 $ hgph
29 @ 3 draft a-D - b555f63b6063
29 @ 3 draft a-D - b555f63b6063
30 |
30 |
31 o 2 draft a-C - 54acac6f23ab
31 o 2 draft a-C - 54acac6f23ab
32 |
32 |
33 o 1 draft a-B - 548a3d25dbf0
33 o 1 draft a-B - 548a3d25dbf0
34 |
34 |
35 o 0 draft a-A - 054250a37db4
35 o 0 draft a-A - 054250a37db4
36
36
37
37
38 $ hg init ../beta
38 $ hg init ../beta
39 $ hg push -r 1 ../beta
39 $ hg push -r 1 ../beta
40 pushing to ../beta
40 pushing to ../beta
41 searching for changes
41 searching for changes
42 adding changesets
42 adding changesets
43 adding manifests
43 adding manifests
44 adding file changes
44 adding file changes
45 added 2 changesets with 2 changes to 2 files
45 added 2 changesets with 2 changes to 2 files
46 test-debug-phase: new rev 0: x -> 0
46 test-debug-phase: new rev 0: x -> 0
47 test-debug-phase: new rev 1: x -> 0
47 test-debug-phase: new rev 1: x -> 0
48 test-debug-phase: move rev 0: 1 -> 0
48 test-debug-phase: move rev 0: 1 -> 0
49 test-debug-phase: move rev 1: 1 -> 0
49 test-debug-phase: move rev 1: 1 -> 0
50 $ hgph
50 $ hgph
51 @ 3 draft a-D - b555f63b6063
51 @ 3 draft a-D - b555f63b6063
52 |
52 |
53 o 2 draft a-C - 54acac6f23ab
53 o 2 draft a-C - 54acac6f23ab
54 |
54 |
55 o 1 public a-B - 548a3d25dbf0
55 o 1 public a-B - 548a3d25dbf0
56 |
56 |
57 o 0 public a-A - 054250a37db4
57 o 0 public a-A - 054250a37db4
58
58
59
59
60 $ cd ../beta
60 $ cd ../beta
61 $ hgph
61 $ hgph
62 o 1 public a-B - 548a3d25dbf0
62 o 1 public a-B - 548a3d25dbf0
63 |
63 |
64 o 0 public a-A - 054250a37db4
64 o 0 public a-A - 054250a37db4
65
65
66 $ hg up -q
66 $ hg up -q
67 $ mkcommit b-A
67 $ mkcommit b-A
68 test-debug-phase: new rev 2: x -> 1
68 test-debug-phase: new rev 2: x -> 1
69 $ hgph
69 $ hgph
70 @ 2 draft b-A - f54f1bb90ff3
70 @ 2 draft b-A - f54f1bb90ff3
71 |
71 |
72 o 1 public a-B - 548a3d25dbf0
72 o 1 public a-B - 548a3d25dbf0
73 |
73 |
74 o 0 public a-A - 054250a37db4
74 o 0 public a-A - 054250a37db4
75
75
76 $ hg pull ../alpha
76 $ hg pull ../alpha
77 pulling from ../alpha
77 pulling from ../alpha
78 searching for changes
78 searching for changes
79 adding changesets
79 adding changesets
80 adding manifests
80 adding manifests
81 adding file changes
81 adding file changes
82 added 2 changesets with 2 changes to 2 files (+1 heads)
82 added 2 changesets with 2 changes to 2 files (+1 heads)
83 new changesets 54acac6f23ab:b555f63b6063
83 new changesets 54acac6f23ab:b555f63b6063
84 test-debug-phase: new rev 3: x -> 0
84 test-debug-phase: new rev 3: x -> 0
85 test-debug-phase: new rev 4: x -> 0
85 test-debug-phase: new rev 4: x -> 0
86 (run 'hg heads' to see heads, 'hg merge' to merge)
86 (run 'hg heads' to see heads, 'hg merge' to merge)
87 $ hgph
87 $ hgph
88 o 4 public a-D - b555f63b6063
88 o 4 public a-D - b555f63b6063
89 |
89 |
90 o 3 public a-C - 54acac6f23ab
90 o 3 public a-C - 54acac6f23ab
91 |
91 |
92 | @ 2 draft b-A - f54f1bb90ff3
92 | @ 2 draft b-A - f54f1bb90ff3
93 |/
93 |/
94 o 1 public a-B - 548a3d25dbf0
94 o 1 public a-B - 548a3d25dbf0
95 |
95 |
96 o 0 public a-A - 054250a37db4
96 o 0 public a-A - 054250a37db4
97
97
98
98
99 pull did not updated ../alpha state.
99 pull did not updated ../alpha state.
100 push from alpha to beta should update phase even if nothing is transferred
100 push from alpha to beta should update phase even if nothing is transferred
101
101
102 $ cd ../alpha
102 $ cd ../alpha
103 $ hgph # not updated by remote pull
103 $ hgph # not updated by remote pull
104 @ 3 draft a-D - b555f63b6063
104 @ 3 draft a-D - b555f63b6063
105 |
105 |
106 o 2 draft a-C - 54acac6f23ab
106 o 2 draft a-C - 54acac6f23ab
107 |
107 |
108 o 1 public a-B - 548a3d25dbf0
108 o 1 public a-B - 548a3d25dbf0
109 |
109 |
110 o 0 public a-A - 054250a37db4
110 o 0 public a-A - 054250a37db4
111
111
112 $ hg push -r 2 ../beta
112 $ hg push -r 2 ../beta
113 pushing to ../beta
113 pushing to ../beta
114 searching for changes
114 searching for changes
115 no changes found
115 no changes found
116 test-debug-phase: move rev 2: 1 -> 0
116 test-debug-phase: move rev 2: 1 -> 0
117 [1]
117 [1]
118 $ hgph
118 $ hgph
119 @ 3 draft a-D - b555f63b6063
119 @ 3 draft a-D - b555f63b6063
120 |
120 |
121 o 2 public a-C - 54acac6f23ab
121 o 2 public a-C - 54acac6f23ab
122 |
122 |
123 o 1 public a-B - 548a3d25dbf0
123 o 1 public a-B - 548a3d25dbf0
124 |
124 |
125 o 0 public a-A - 054250a37db4
125 o 0 public a-A - 054250a37db4
126
126
127 $ hg push ../beta
127 $ hg push ../beta
128 pushing to ../beta
128 pushing to ../beta
129 searching for changes
129 searching for changes
130 no changes found
130 no changes found
131 test-debug-phase: move rev 3: 1 -> 0
131 test-debug-phase: move rev 3: 1 -> 0
132 [1]
132 [1]
133 $ hgph
133 $ hgph
134 @ 3 public a-D - b555f63b6063
134 @ 3 public a-D - b555f63b6063
135 |
135 |
136 o 2 public a-C - 54acac6f23ab
136 o 2 public a-C - 54acac6f23ab
137 |
137 |
138 o 1 public a-B - 548a3d25dbf0
138 o 1 public a-B - 548a3d25dbf0
139 |
139 |
140 o 0 public a-A - 054250a37db4
140 o 0 public a-A - 054250a37db4
141
141
142
142
143 update must update phase of common changeset too
143 update must update phase of common changeset too
144
144
145 $ hg pull ../beta # getting b-A
145 $ hg pull ../beta # getting b-A
146 pulling from ../beta
146 pulling from ../beta
147 searching for changes
147 searching for changes
148 adding changesets
148 adding changesets
149 adding manifests
149 adding manifests
150 adding file changes
150 adding file changes
151 added 1 changesets with 1 changes to 1 files (+1 heads)
151 added 1 changesets with 1 changes to 1 files (+1 heads)
152 new changesets f54f1bb90ff3
152 new changesets f54f1bb90ff3
153 test-debug-phase: new rev 4: x -> 0
153 test-debug-phase: new rev 4: x -> 0
154 (run 'hg heads' to see heads, 'hg merge' to merge)
154 (run 'hg heads' to see heads, 'hg merge' to merge)
155
155
156 $ cd ../beta
156 $ cd ../beta
157 $ hgph # not updated by remote pull
157 $ hgph # not updated by remote pull
158 o 4 public a-D - b555f63b6063
158 o 4 public a-D - b555f63b6063
159 |
159 |
160 o 3 public a-C - 54acac6f23ab
160 o 3 public a-C - 54acac6f23ab
161 |
161 |
162 | @ 2 draft b-A - f54f1bb90ff3
162 | @ 2 draft b-A - f54f1bb90ff3
163 |/
163 |/
164 o 1 public a-B - 548a3d25dbf0
164 o 1 public a-B - 548a3d25dbf0
165 |
165 |
166 o 0 public a-A - 054250a37db4
166 o 0 public a-A - 054250a37db4
167
167
168 $ hg pull ../alpha
168 $ hg pull ../alpha
169 pulling from ../alpha
169 pulling from ../alpha
170 searching for changes
170 searching for changes
171 no changes found
171 no changes found
172 test-debug-phase: move rev 2: 1 -> 0
172 test-debug-phase: move rev 2: 1 -> 0
173 $ hgph
173 $ hgph
174 o 4 public a-D - b555f63b6063
174 o 4 public a-D - b555f63b6063
175 |
175 |
176 o 3 public a-C - 54acac6f23ab
176 o 3 public a-C - 54acac6f23ab
177 |
177 |
178 | @ 2 public b-A - f54f1bb90ff3
178 | @ 2 public b-A - f54f1bb90ff3
179 |/
179 |/
180 o 1 public a-B - 548a3d25dbf0
180 o 1 public a-B - 548a3d25dbf0
181 |
181 |
182 o 0 public a-A - 054250a37db4
182 o 0 public a-A - 054250a37db4
183
183
184
184
185 Publish configuration option
185 Publish configuration option
186 ----------------------------
186 ----------------------------
187
187
188 Pull
188 Pull
189 ````
189 ````
190
190
191 changegroup are added without phase movement
191 changegroup are added without phase movement
192
192
193 $ hg bundle -a ../base.bundle
193 $ hg bundle -a ../base.bundle
194 5 changesets found
194 5 changesets found
195 $ cd ..
195 $ cd ..
196 $ hg init mu
196 $ hg init mu
197 $ cd mu
197 $ cd mu
198 $ cat > .hg/hgrc << EOF
198 $ cat > .hg/hgrc << EOF
199 > [phases]
199 > [phases]
200 > publish=0
200 > publish=0
201 > EOF
201 > EOF
202 $ hg unbundle ../base.bundle
202 $ hg unbundle ../base.bundle
203 adding changesets
203 adding changesets
204 adding manifests
204 adding manifests
205 adding file changes
205 adding file changes
206 added 5 changesets with 5 changes to 5 files (+1 heads)
206 added 5 changesets with 5 changes to 5 files (+1 heads)
207 new changesets 054250a37db4:b555f63b6063
207 new changesets 054250a37db4:b555f63b6063
208 test-debug-phase: new rev 0: x -> 1
208 test-debug-phase: new rev 0: x -> 1
209 test-debug-phase: new rev 1: x -> 1
209 test-debug-phase: new rev 1: x -> 1
210 test-debug-phase: new rev 2: x -> 1
210 test-debug-phase: new rev 2: x -> 1
211 test-debug-phase: new rev 3: x -> 1
211 test-debug-phase: new rev 3: x -> 1
212 test-debug-phase: new rev 4: x -> 1
212 test-debug-phase: new rev 4: x -> 1
213 (run 'hg heads' to see heads, 'hg merge' to merge)
213 (run 'hg heads' to see heads, 'hg merge' to merge)
214 $ hgph
214 $ hgph
215 o 4 draft a-D - b555f63b6063
215 o 4 draft a-D - b555f63b6063
216 |
216 |
217 o 3 draft a-C - 54acac6f23ab
217 o 3 draft a-C - 54acac6f23ab
218 |
218 |
219 | o 2 draft b-A - f54f1bb90ff3
219 | o 2 draft b-A - f54f1bb90ff3
220 |/
220 |/
221 o 1 draft a-B - 548a3d25dbf0
221 o 1 draft a-B - 548a3d25dbf0
222 |
222 |
223 o 0 draft a-A - 054250a37db4
223 o 0 draft a-A - 054250a37db4
224
224
225 $ cd ..
225 $ cd ..
226
226
227 Pulling from publish=False to publish=False does not move boundary.
227 Pulling from publish=False to publish=False does not move boundary.
228
228
229 $ hg init nu
229 $ hg init nu
230 $ cd nu
230 $ cd nu
231 $ cat > .hg/hgrc << EOF
231 $ cat > .hg/hgrc << EOF
232 > [phases]
232 > [phases]
233 > publish=0
233 > publish=0
234 > EOF
234 > EOF
235 $ hg pull ../mu -r 54acac6f23ab
235 $ hg pull ../mu -r 54acac6f23ab
236 pulling from ../mu
236 pulling from ../mu
237 adding changesets
237 adding changesets
238 adding manifests
238 adding manifests
239 adding file changes
239 adding file changes
240 added 3 changesets with 3 changes to 3 files
240 added 3 changesets with 3 changes to 3 files
241 new changesets 054250a37db4:54acac6f23ab
241 new changesets 054250a37db4:54acac6f23ab
242 test-debug-phase: new rev 0: x -> 1
242 test-debug-phase: new rev 0: x -> 1
243 test-debug-phase: new rev 1: x -> 1
243 test-debug-phase: new rev 1: x -> 1
244 test-debug-phase: new rev 2: x -> 1
244 test-debug-phase: new rev 2: x -> 1
245 (run 'hg update' to get a working copy)
245 (run 'hg update' to get a working copy)
246 $ hgph
246 $ hgph
247 o 2 draft a-C - 54acac6f23ab
247 o 2 draft a-C - 54acac6f23ab
248 |
248 |
249 o 1 draft a-B - 548a3d25dbf0
249 o 1 draft a-B - 548a3d25dbf0
250 |
250 |
251 o 0 draft a-A - 054250a37db4
251 o 0 draft a-A - 054250a37db4
252
252
253
253
254 Even for common
254 Even for common
255
255
256 $ hg pull ../mu -r f54f1bb90ff3
256 $ hg pull ../mu -r f54f1bb90ff3
257 pulling from ../mu
257 pulling from ../mu
258 searching for changes
258 searching for changes
259 adding changesets
259 adding changesets
260 adding manifests
260 adding manifests
261 adding file changes
261 adding file changes
262 added 1 changesets with 1 changes to 1 files (+1 heads)
262 added 1 changesets with 1 changes to 1 files (+1 heads)
263 new changesets f54f1bb90ff3
263 new changesets f54f1bb90ff3
264 test-debug-phase: new rev 3: x -> 1
264 test-debug-phase: new rev 3: x -> 1
265 (run 'hg heads' to see heads, 'hg merge' to merge)
265 (run 'hg heads' to see heads, 'hg merge' to merge)
266 $ hgph
266 $ hgph
267 o 3 draft b-A - f54f1bb90ff3
267 o 3 draft b-A - f54f1bb90ff3
268 |
268 |
269 | o 2 draft a-C - 54acac6f23ab
269 | o 2 draft a-C - 54acac6f23ab
270 |/
270 |/
271 o 1 draft a-B - 548a3d25dbf0
271 o 1 draft a-B - 548a3d25dbf0
272 |
272 |
273 o 0 draft a-A - 054250a37db4
273 o 0 draft a-A - 054250a37db4
274
274
275
275
276
276
277 Pulling from Publish=True to Publish=False move boundary in common set.
277 Pulling from Publish=True to Publish=False move boundary in common set.
278 we are in nu
278 we are in nu
279
279
280 $ hg pull ../alpha -r b555f63b6063
280 $ hg pull ../alpha -r b555f63b6063
281 pulling from ../alpha
281 pulling from ../alpha
282 searching for changes
282 searching for changes
283 adding changesets
283 adding changesets
284 adding manifests
284 adding manifests
285 adding file changes
285 adding file changes
286 added 1 changesets with 1 changes to 1 files
286 added 1 changesets with 1 changes to 1 files
287 new changesets b555f63b6063
287 new changesets b555f63b6063
288 test-debug-phase: move rev 0: 1 -> 0
288 test-debug-phase: move rev 0: 1 -> 0
289 test-debug-phase: move rev 1: 1 -> 0
289 test-debug-phase: move rev 1: 1 -> 0
290 test-debug-phase: move rev 2: 1 -> 0
290 test-debug-phase: move rev 2: 1 -> 0
291 test-debug-phase: new rev 4: x -> 0
291 test-debug-phase: new rev 4: x -> 0
292 (run 'hg update' to get a working copy)
292 (run 'hg update' to get a working copy)
293 $ hgph # f54f1bb90ff3 stay draft, not ancestor of -r
293 $ hgph # f54f1bb90ff3 stay draft, not ancestor of -r
294 o 4 public a-D - b555f63b6063
294 o 4 public a-D - b555f63b6063
295 |
295 |
296 | o 3 draft b-A - f54f1bb90ff3
296 | o 3 draft b-A - f54f1bb90ff3
297 | |
297 | |
298 o | 2 public a-C - 54acac6f23ab
298 o | 2 public a-C - 54acac6f23ab
299 |/
299 |/
300 o 1 public a-B - 548a3d25dbf0
300 o 1 public a-B - 548a3d25dbf0
301 |
301 |
302 o 0 public a-A - 054250a37db4
302 o 0 public a-A - 054250a37db4
303
303
304
304
305 pulling from Publish=False to publish=False with some public
305 pulling from Publish=False to publish=False with some public
306
306
307 $ hg up -q f54f1bb90ff3
307 $ hg up -q f54f1bb90ff3
308 $ mkcommit n-A
308 $ mkcommit n-A
309 test-debug-phase: new rev 5: x -> 1
309 test-debug-phase: new rev 5: x -> 1
310 $ mkcommit n-B
310 $ mkcommit n-B
311 test-debug-phase: new rev 6: x -> 1
311 test-debug-phase: new rev 6: x -> 1
312 $ hgph
312 $ hgph
313 @ 6 draft n-B - 145e75495359
313 @ 6 draft n-B - 145e75495359
314 |
314 |
315 o 5 draft n-A - d6bcb4f74035
315 o 5 draft n-A - d6bcb4f74035
316 |
316 |
317 | o 4 public a-D - b555f63b6063
317 | o 4 public a-D - b555f63b6063
318 | |
318 | |
319 o | 3 draft b-A - f54f1bb90ff3
319 o | 3 draft b-A - f54f1bb90ff3
320 | |
320 | |
321 | o 2 public a-C - 54acac6f23ab
321 | o 2 public a-C - 54acac6f23ab
322 |/
322 |/
323 o 1 public a-B - 548a3d25dbf0
323 o 1 public a-B - 548a3d25dbf0
324 |
324 |
325 o 0 public a-A - 054250a37db4
325 o 0 public a-A - 054250a37db4
326
326
327 $ cd ../mu
327 $ cd ../mu
328 $ hg pull ../nu
328 $ hg pull ../nu
329 pulling from ../nu
329 pulling from ../nu
330 searching for changes
330 searching for changes
331 adding changesets
331 adding changesets
332 adding manifests
332 adding manifests
333 adding file changes
333 adding file changes
334 added 2 changesets with 2 changes to 2 files
334 added 2 changesets with 2 changes to 2 files
335 new changesets d6bcb4f74035:145e75495359
335 new changesets d6bcb4f74035:145e75495359
336 test-debug-phase: move rev 0: 1 -> 0
336 test-debug-phase: move rev 0: 1 -> 0
337 test-debug-phase: move rev 1: 1 -> 0
337 test-debug-phase: move rev 1: 1 -> 0
338 test-debug-phase: move rev 3: 1 -> 0
338 test-debug-phase: move rev 3: 1 -> 0
339 test-debug-phase: move rev 4: 1 -> 0
339 test-debug-phase: move rev 4: 1 -> 0
340 test-debug-phase: new rev 5: x -> 1
340 test-debug-phase: new rev 5: x -> 1
341 test-debug-phase: new rev 6: x -> 1
341 test-debug-phase: new rev 6: x -> 1
342 (run 'hg update' to get a working copy)
342 (run 'hg update' to get a working copy)
343 $ hgph
343 $ hgph
344 o 6 draft n-B - 145e75495359
344 o 6 draft n-B - 145e75495359
345 |
345 |
346 o 5 draft n-A - d6bcb4f74035
346 o 5 draft n-A - d6bcb4f74035
347 |
347 |
348 | o 4 public a-D - b555f63b6063
348 | o 4 public a-D - b555f63b6063
349 | |
349 | |
350 | o 3 public a-C - 54acac6f23ab
350 | o 3 public a-C - 54acac6f23ab
351 | |
351 | |
352 o | 2 draft b-A - f54f1bb90ff3
352 o | 2 draft b-A - f54f1bb90ff3
353 |/
353 |/
354 o 1 public a-B - 548a3d25dbf0
354 o 1 public a-B - 548a3d25dbf0
355 |
355 |
356 o 0 public a-A - 054250a37db4
356 o 0 public a-A - 054250a37db4
357
357
358 $ cd ..
358 $ cd ..
359
359
360 pulling into publish=True
360 pulling into publish=True
361
361
362 $ cd alpha
362 $ cd alpha
363 $ hgph
363 $ hgph
364 o 4 public b-A - f54f1bb90ff3
364 o 4 public b-A - f54f1bb90ff3
365 |
365 |
366 | @ 3 public a-D - b555f63b6063
366 | @ 3 public a-D - b555f63b6063
367 | |
367 | |
368 | o 2 public a-C - 54acac6f23ab
368 | o 2 public a-C - 54acac6f23ab
369 |/
369 |/
370 o 1 public a-B - 548a3d25dbf0
370 o 1 public a-B - 548a3d25dbf0
371 |
371 |
372 o 0 public a-A - 054250a37db4
372 o 0 public a-A - 054250a37db4
373
373
374 $ hg pull ../mu
374 $ hg pull ../mu
375 pulling from ../mu
375 pulling from ../mu
376 searching for changes
376 searching for changes
377 adding changesets
377 adding changesets
378 adding manifests
378 adding manifests
379 adding file changes
379 adding file changes
380 added 2 changesets with 2 changes to 2 files
380 added 2 changesets with 2 changes to 2 files
381 new changesets d6bcb4f74035:145e75495359
381 new changesets d6bcb4f74035:145e75495359
382 test-debug-phase: new rev 5: x -> 1
382 test-debug-phase: new rev 5: x -> 1
383 test-debug-phase: new rev 6: x -> 1
383 test-debug-phase: new rev 6: x -> 1
384 (run 'hg update' to get a working copy)
384 (run 'hg update' to get a working copy)
385 $ hgph
385 $ hgph
386 o 6 draft n-B - 145e75495359
386 o 6 draft n-B - 145e75495359
387 |
387 |
388 o 5 draft n-A - d6bcb4f74035
388 o 5 draft n-A - d6bcb4f74035
389 |
389 |
390 o 4 public b-A - f54f1bb90ff3
390 o 4 public b-A - f54f1bb90ff3
391 |
391 |
392 | @ 3 public a-D - b555f63b6063
392 | @ 3 public a-D - b555f63b6063
393 | |
393 | |
394 | o 2 public a-C - 54acac6f23ab
394 | o 2 public a-C - 54acac6f23ab
395 |/
395 |/
396 o 1 public a-B - 548a3d25dbf0
396 o 1 public a-B - 548a3d25dbf0
397 |
397 |
398 o 0 public a-A - 054250a37db4
398 o 0 public a-A - 054250a37db4
399
399
400 $ cd ..
400 $ cd ..
401
401
402 pulling back into original repo
402 pulling back into original repo
403
403
404 $ cd nu
404 $ cd nu
405 $ hg pull ../alpha
405 $ hg pull ../alpha
406 pulling from ../alpha
406 pulling from ../alpha
407 searching for changes
407 searching for changes
408 no changes found
408 no changes found
409 test-debug-phase: move rev 3: 1 -> 0
409 test-debug-phase: move rev 3: 1 -> 0
410 test-debug-phase: move rev 5: 1 -> 0
410 test-debug-phase: move rev 5: 1 -> 0
411 test-debug-phase: move rev 6: 1 -> 0
411 test-debug-phase: move rev 6: 1 -> 0
412 $ hgph
412 $ hgph
413 @ 6 public n-B - 145e75495359
413 @ 6 public n-B - 145e75495359
414 |
414 |
415 o 5 public n-A - d6bcb4f74035
415 o 5 public n-A - d6bcb4f74035
416 |
416 |
417 | o 4 public a-D - b555f63b6063
417 | o 4 public a-D - b555f63b6063
418 | |
418 | |
419 o | 3 public b-A - f54f1bb90ff3
419 o | 3 public b-A - f54f1bb90ff3
420 | |
420 | |
421 | o 2 public a-C - 54acac6f23ab
421 | o 2 public a-C - 54acac6f23ab
422 |/
422 |/
423 o 1 public a-B - 548a3d25dbf0
423 o 1 public a-B - 548a3d25dbf0
424 |
424 |
425 o 0 public a-A - 054250a37db4
425 o 0 public a-A - 054250a37db4
426
426
427
427
428 Push
428 Push
429 ````
429 ````
430
430
431 (inserted)
431 (inserted)
432
432
433 Test that phase are pushed even when they are nothing to pus
433 Test that phase are pushed even when they are nothing to pus
434 (this might be tested later bu are very convenient to not alter too much test)
434 (this might be tested later bu are very convenient to not alter too much test)
435
435
436 Push back to alpha
436 Push back to alpha
437
437
438 $ hg push ../alpha # from nu
438 $ hg push ../alpha # from nu
439 pushing to ../alpha
439 pushing to ../alpha
440 searching for changes
440 searching for changes
441 no changes found
441 no changes found
442 test-debug-phase: move rev 5: 1 -> 0
442 test-debug-phase: move rev 5: 1 -> 0
443 test-debug-phase: move rev 6: 1 -> 0
443 test-debug-phase: move rev 6: 1 -> 0
444 [1]
444 [1]
445 $ cd ..
445 $ cd ..
446 $ cd alpha
446 $ cd alpha
447 $ hgph
447 $ hgph
448 o 6 public n-B - 145e75495359
448 o 6 public n-B - 145e75495359
449 |
449 |
450 o 5 public n-A - d6bcb4f74035
450 o 5 public n-A - d6bcb4f74035
451 |
451 |
452 o 4 public b-A - f54f1bb90ff3
452 o 4 public b-A - f54f1bb90ff3
453 |
453 |
454 | @ 3 public a-D - b555f63b6063
454 | @ 3 public a-D - b555f63b6063
455 | |
455 | |
456 | o 2 public a-C - 54acac6f23ab
456 | o 2 public a-C - 54acac6f23ab
457 |/
457 |/
458 o 1 public a-B - 548a3d25dbf0
458 o 1 public a-B - 548a3d25dbf0
459 |
459 |
460 o 0 public a-A - 054250a37db4
460 o 0 public a-A - 054250a37db4
461
461
462
462
463 (end insertion)
463 (end insertion)
464
464
465
465
466 initial setup
466 initial setup
467
467
468 $ hg log -G # of alpha
468 $ hg log -G # of alpha
469 o changeset: 6:145e75495359
469 o changeset: 6:145e75495359
470 | tag: tip
470 | tag: tip
471 | user: test
471 | user: test
472 | date: Thu Jan 01 00:00:00 1970 +0000
472 | date: Thu Jan 01 00:00:00 1970 +0000
473 | summary: n-B
473 | summary: n-B
474 |
474 |
475 o changeset: 5:d6bcb4f74035
475 o changeset: 5:d6bcb4f74035
476 | user: test
476 | user: test
477 | date: Thu Jan 01 00:00:00 1970 +0000
477 | date: Thu Jan 01 00:00:00 1970 +0000
478 | summary: n-A
478 | summary: n-A
479 |
479 |
480 o changeset: 4:f54f1bb90ff3
480 o changeset: 4:f54f1bb90ff3
481 | parent: 1:548a3d25dbf0
481 | parent: 1:548a3d25dbf0
482 | user: test
482 | user: test
483 | date: Thu Jan 01 00:00:00 1970 +0000
483 | date: Thu Jan 01 00:00:00 1970 +0000
484 | summary: b-A
484 | summary: b-A
485 |
485 |
486 | @ changeset: 3:b555f63b6063
486 | @ changeset: 3:b555f63b6063
487 | | user: test
487 | | user: test
488 | | date: Thu Jan 01 00:00:00 1970 +0000
488 | | date: Thu Jan 01 00:00:00 1970 +0000
489 | | summary: a-D
489 | | summary: a-D
490 | |
490 | |
491 | o changeset: 2:54acac6f23ab
491 | o changeset: 2:54acac6f23ab
492 |/ user: test
492 |/ user: test
493 | date: Thu Jan 01 00:00:00 1970 +0000
493 | date: Thu Jan 01 00:00:00 1970 +0000
494 | summary: a-C
494 | summary: a-C
495 |
495 |
496 o changeset: 1:548a3d25dbf0
496 o changeset: 1:548a3d25dbf0
497 | user: test
497 | user: test
498 | date: Thu Jan 01 00:00:00 1970 +0000
498 | date: Thu Jan 01 00:00:00 1970 +0000
499 | summary: a-B
499 | summary: a-B
500 |
500 |
501 o changeset: 0:054250a37db4
501 o changeset: 0:054250a37db4
502 user: test
502 user: test
503 date: Thu Jan 01 00:00:00 1970 +0000
503 date: Thu Jan 01 00:00:00 1970 +0000
504 summary: a-A
504 summary: a-A
505
505
506 $ mkcommit a-E
506 $ mkcommit a-E
507 test-debug-phase: new rev 7: x -> 1
507 test-debug-phase: new rev 7: x -> 1
508 $ mkcommit a-F
508 $ mkcommit a-F
509 test-debug-phase: new rev 8: x -> 1
509 test-debug-phase: new rev 8: x -> 1
510 $ mkcommit a-G
510 $ mkcommit a-G
511 test-debug-phase: new rev 9: x -> 1
511 test-debug-phase: new rev 9: x -> 1
512 $ hg up d6bcb4f74035 -q
512 $ hg up d6bcb4f74035 -q
513 $ mkcommit a-H
513 $ mkcommit a-H
514 test-debug-phase: new rev 10: x -> 1
514 test-debug-phase: new rev 10: x -> 1
515 created new head
515 created new head
516 $ hgph
516 $ hgph
517 @ 10 draft a-H - 967b449fbc94
517 @ 10 draft a-H - 967b449fbc94
518 |
518 |
519 | o 9 draft a-G - 3e27b6f1eee1
519 | o 9 draft a-G - 3e27b6f1eee1
520 | |
520 | |
521 | o 8 draft a-F - b740e3e5c05d
521 | o 8 draft a-F - b740e3e5c05d
522 | |
522 | |
523 | o 7 draft a-E - e9f537e46dea
523 | o 7 draft a-E - e9f537e46dea
524 | |
524 | |
525 +---o 6 public n-B - 145e75495359
525 +---o 6 public n-B - 145e75495359
526 | |
526 | |
527 o | 5 public n-A - d6bcb4f74035
527 o | 5 public n-A - d6bcb4f74035
528 | |
528 | |
529 o | 4 public b-A - f54f1bb90ff3
529 o | 4 public b-A - f54f1bb90ff3
530 | |
530 | |
531 | o 3 public a-D - b555f63b6063
531 | o 3 public a-D - b555f63b6063
532 | |
532 | |
533 | o 2 public a-C - 54acac6f23ab
533 | o 2 public a-C - 54acac6f23ab
534 |/
534 |/
535 o 1 public a-B - 548a3d25dbf0
535 o 1 public a-B - 548a3d25dbf0
536 |
536 |
537 o 0 public a-A - 054250a37db4
537 o 0 public a-A - 054250a37db4
538
538
539
539
540 Pulling from bundle does not alter phases of changeset not present in the bundle
540 Pulling from bundle does not alter phases of changeset not present in the bundle
541
541
542 #if repobundlerepo
542 #if repobundlerepo
543 $ hg bundle --base 1 -r 6 -r 3 ../partial-bundle.hg
543 $ hg bundle --base 1 -r 6 -r 3 ../partial-bundle.hg
544 5 changesets found
544 5 changesets found
545 $ hg pull ../partial-bundle.hg
545 $ hg pull ../partial-bundle.hg
546 pulling from ../partial-bundle.hg
546 pulling from ../partial-bundle.hg
547 searching for changes
547 searching for changes
548 no changes found
548 no changes found
549 $ hgph
549 $ hgph
550 @ 10 draft a-H - 967b449fbc94
550 @ 10 draft a-H - 967b449fbc94
551 |
551 |
552 | o 9 draft a-G - 3e27b6f1eee1
552 | o 9 draft a-G - 3e27b6f1eee1
553 | |
553 | |
554 | o 8 draft a-F - b740e3e5c05d
554 | o 8 draft a-F - b740e3e5c05d
555 | |
555 | |
556 | o 7 draft a-E - e9f537e46dea
556 | o 7 draft a-E - e9f537e46dea
557 | |
557 | |
558 +---o 6 public n-B - 145e75495359
558 +---o 6 public n-B - 145e75495359
559 | |
559 | |
560 o | 5 public n-A - d6bcb4f74035
560 o | 5 public n-A - d6bcb4f74035
561 | |
561 | |
562 o | 4 public b-A - f54f1bb90ff3
562 o | 4 public b-A - f54f1bb90ff3
563 | |
563 | |
564 | o 3 public a-D - b555f63b6063
564 | o 3 public a-D - b555f63b6063
565 | |
565 | |
566 | o 2 public a-C - 54acac6f23ab
566 | o 2 public a-C - 54acac6f23ab
567 |/
567 |/
568 o 1 public a-B - 548a3d25dbf0
568 o 1 public a-B - 548a3d25dbf0
569 |
569 |
570 o 0 public a-A - 054250a37db4
570 o 0 public a-A - 054250a37db4
571
571
572 #endif
572 #endif
573
573
574 Pushing to Publish=False (unknown changeset)
574 Pushing to Publish=False (unknown changeset)
575
575
576 $ hg push ../mu -r b740e3e5c05d # a-F
576 $ hg push ../mu -r b740e3e5c05d # a-F
577 pushing to ../mu
577 pushing to ../mu
578 searching for changes
578 searching for changes
579 adding changesets
579 adding changesets
580 adding manifests
580 adding manifests
581 adding file changes
581 adding file changes
582 added 2 changesets with 2 changes to 2 files
582 added 2 changesets with 2 changes to 2 files
583 test-debug-phase: new rev 7: x -> 1
583 test-debug-phase: new rev 7: x -> 1
584 test-debug-phase: new rev 8: x -> 1
584 test-debug-phase: new rev 8: x -> 1
585 $ hgph
585 $ hgph
586 @ 10 draft a-H - 967b449fbc94
586 @ 10 draft a-H - 967b449fbc94
587 |
587 |
588 | o 9 draft a-G - 3e27b6f1eee1
588 | o 9 draft a-G - 3e27b6f1eee1
589 | |
589 | |
590 | o 8 draft a-F - b740e3e5c05d
590 | o 8 draft a-F - b740e3e5c05d
591 | |
591 | |
592 | o 7 draft a-E - e9f537e46dea
592 | o 7 draft a-E - e9f537e46dea
593 | |
593 | |
594 +---o 6 public n-B - 145e75495359
594 +---o 6 public n-B - 145e75495359
595 | |
595 | |
596 o | 5 public n-A - d6bcb4f74035
596 o | 5 public n-A - d6bcb4f74035
597 | |
597 | |
598 o | 4 public b-A - f54f1bb90ff3
598 o | 4 public b-A - f54f1bb90ff3
599 | |
599 | |
600 | o 3 public a-D - b555f63b6063
600 | o 3 public a-D - b555f63b6063
601 | |
601 | |
602 | o 2 public a-C - 54acac6f23ab
602 | o 2 public a-C - 54acac6f23ab
603 |/
603 |/
604 o 1 public a-B - 548a3d25dbf0
604 o 1 public a-B - 548a3d25dbf0
605 |
605 |
606 o 0 public a-A - 054250a37db4
606 o 0 public a-A - 054250a37db4
607
607
608
608
609 $ cd ../mu
609 $ cd ../mu
610 $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
610 $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
611 > # not ancestor of -r
611 > # not ancestor of -r
612 o 8 draft a-F - b740e3e5c05d
612 o 8 draft a-F - b740e3e5c05d
613 |
613 |
614 o 7 draft a-E - e9f537e46dea
614 o 7 draft a-E - e9f537e46dea
615 |
615 |
616 | o 6 draft n-B - 145e75495359
616 | o 6 draft n-B - 145e75495359
617 | |
617 | |
618 | o 5 draft n-A - d6bcb4f74035
618 | o 5 draft n-A - d6bcb4f74035
619 | |
619 | |
620 o | 4 public a-D - b555f63b6063
620 o | 4 public a-D - b555f63b6063
621 | |
621 | |
622 o | 3 public a-C - 54acac6f23ab
622 o | 3 public a-C - 54acac6f23ab
623 | |
623 | |
624 | o 2 draft b-A - f54f1bb90ff3
624 | o 2 draft b-A - f54f1bb90ff3
625 |/
625 |/
626 o 1 public a-B - 548a3d25dbf0
626 o 1 public a-B - 548a3d25dbf0
627 |
627 |
628 o 0 public a-A - 054250a37db4
628 o 0 public a-A - 054250a37db4
629
629
630
630
631 Pushing to Publish=True (unknown changeset)
631 Pushing to Publish=True (unknown changeset)
632
632
633 $ hg push ../beta -r b740e3e5c05d
633 $ hg push ../beta -r b740e3e5c05d
634 pushing to ../beta
634 pushing to ../beta
635 searching for changes
635 searching for changes
636 adding changesets
636 adding changesets
637 adding manifests
637 adding manifests
638 adding file changes
638 adding file changes
639 added 2 changesets with 2 changes to 2 files
639 added 2 changesets with 2 changes to 2 files
640 test-debug-phase: new rev 5: x -> 0
640 test-debug-phase: new rev 5: x -> 0
641 test-debug-phase: new rev 6: x -> 0
641 test-debug-phase: new rev 6: x -> 0
642 test-debug-phase: move rev 7: 1 -> 0
642 test-debug-phase: move rev 7: 1 -> 0
643 test-debug-phase: move rev 8: 1 -> 0
643 test-debug-phase: move rev 8: 1 -> 0
644 $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
644 $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft,
645 > # not ancestor of -r
645 > # not ancestor of -r
646 o 8 public a-F - b740e3e5c05d
646 o 8 public a-F - b740e3e5c05d
647 |
647 |
648 o 7 public a-E - e9f537e46dea
648 o 7 public a-E - e9f537e46dea
649 |
649 |
650 | o 6 draft n-B - 145e75495359
650 | o 6 draft n-B - 145e75495359
651 | |
651 | |
652 | o 5 draft n-A - d6bcb4f74035
652 | o 5 draft n-A - d6bcb4f74035
653 | |
653 | |
654 o | 4 public a-D - b555f63b6063
654 o | 4 public a-D - b555f63b6063
655 | |
655 | |
656 o | 3 public a-C - 54acac6f23ab
656 o | 3 public a-C - 54acac6f23ab
657 | |
657 | |
658 | o 2 draft b-A - f54f1bb90ff3
658 | o 2 draft b-A - f54f1bb90ff3
659 |/
659 |/
660 o 1 public a-B - 548a3d25dbf0
660 o 1 public a-B - 548a3d25dbf0
661 |
661 |
662 o 0 public a-A - 054250a37db4
662 o 0 public a-A - 054250a37db4
663
663
664
664
665 Pushing to Publish=True (common changeset)
665 Pushing to Publish=True (common changeset)
666
666
667 $ cd ../beta
667 $ cd ../beta
668 $ hg push ../alpha
668 $ hg push ../alpha
669 pushing to ../alpha
669 pushing to ../alpha
670 searching for changes
670 searching for changes
671 no changes found
671 no changes found
672 test-debug-phase: move rev 7: 1 -> 0
672 test-debug-phase: move rev 7: 1 -> 0
673 test-debug-phase: move rev 8: 1 -> 0
673 test-debug-phase: move rev 8: 1 -> 0
674 [1]
674 [1]
675 $ hgph
675 $ hgph
676 o 6 public a-F - b740e3e5c05d
676 o 6 public a-F - b740e3e5c05d
677 |
677 |
678 o 5 public a-E - e9f537e46dea
678 o 5 public a-E - e9f537e46dea
679 |
679 |
680 o 4 public a-D - b555f63b6063
680 o 4 public a-D - b555f63b6063
681 |
681 |
682 o 3 public a-C - 54acac6f23ab
682 o 3 public a-C - 54acac6f23ab
683 |
683 |
684 | @ 2 public b-A - f54f1bb90ff3
684 | @ 2 public b-A - f54f1bb90ff3
685 |/
685 |/
686 o 1 public a-B - 548a3d25dbf0
686 o 1 public a-B - 548a3d25dbf0
687 |
687 |
688 o 0 public a-A - 054250a37db4
688 o 0 public a-A - 054250a37db4
689
689
690 $ cd ../alpha
690 $ cd ../alpha
691 $ hgph
691 $ hgph
692 @ 10 draft a-H - 967b449fbc94
692 @ 10 draft a-H - 967b449fbc94
693 |
693 |
694 | o 9 draft a-G - 3e27b6f1eee1
694 | o 9 draft a-G - 3e27b6f1eee1
695 | |
695 | |
696 | o 8 public a-F - b740e3e5c05d
696 | o 8 public a-F - b740e3e5c05d
697 | |
697 | |
698 | o 7 public a-E - e9f537e46dea
698 | o 7 public a-E - e9f537e46dea
699 | |
699 | |
700 +---o 6 public n-B - 145e75495359
700 +---o 6 public n-B - 145e75495359
701 | |
701 | |
702 o | 5 public n-A - d6bcb4f74035
702 o | 5 public n-A - d6bcb4f74035
703 | |
703 | |
704 o | 4 public b-A - f54f1bb90ff3
704 o | 4 public b-A - f54f1bb90ff3
705 | |
705 | |
706 | o 3 public a-D - b555f63b6063
706 | o 3 public a-D - b555f63b6063
707 | |
707 | |
708 | o 2 public a-C - 54acac6f23ab
708 | o 2 public a-C - 54acac6f23ab
709 |/
709 |/
710 o 1 public a-B - 548a3d25dbf0
710 o 1 public a-B - 548a3d25dbf0
711 |
711 |
712 o 0 public a-A - 054250a37db4
712 o 0 public a-A - 054250a37db4
713
713
714
714
715 Pushing to Publish=False (common changeset that change phase + unknown one)
715 Pushing to Publish=False (common changeset that change phase + unknown one)
716
716
717 $ hg push ../mu -r 967b449fbc94 -f
717 $ hg push ../mu -r 967b449fbc94 -f
718 pushing to ../mu
718 pushing to ../mu
719 searching for changes
719 searching for changes
720 adding changesets
720 adding changesets
721 adding manifests
721 adding manifests
722 adding file changes
722 adding file changes
723 added 1 changesets with 1 changes to 1 files (+1 heads)
723 added 1 changesets with 1 changes to 1 files (+1 heads)
724 test-debug-phase: move rev 2: 1 -> 0
724 test-debug-phase: move rev 2: 1 -> 0
725 test-debug-phase: move rev 5: 1 -> 0
725 test-debug-phase: move rev 5: 1 -> 0
726 test-debug-phase: new rev 9: x -> 1
726 test-debug-phase: new rev 9: x -> 1
727 $ hgph
727 $ hgph
728 @ 10 draft a-H - 967b449fbc94
728 @ 10 draft a-H - 967b449fbc94
729 |
729 |
730 | o 9 draft a-G - 3e27b6f1eee1
730 | o 9 draft a-G - 3e27b6f1eee1
731 | |
731 | |
732 | o 8 public a-F - b740e3e5c05d
732 | o 8 public a-F - b740e3e5c05d
733 | |
733 | |
734 | o 7 public a-E - e9f537e46dea
734 | o 7 public a-E - e9f537e46dea
735 | |
735 | |
736 +---o 6 public n-B - 145e75495359
736 +---o 6 public n-B - 145e75495359
737 | |
737 | |
738 o | 5 public n-A - d6bcb4f74035
738 o | 5 public n-A - d6bcb4f74035
739 | |
739 | |
740 o | 4 public b-A - f54f1bb90ff3
740 o | 4 public b-A - f54f1bb90ff3
741 | |
741 | |
742 | o 3 public a-D - b555f63b6063
742 | o 3 public a-D - b555f63b6063
743 | |
743 | |
744 | o 2 public a-C - 54acac6f23ab
744 | o 2 public a-C - 54acac6f23ab
745 |/
745 |/
746 o 1 public a-B - 548a3d25dbf0
746 o 1 public a-B - 548a3d25dbf0
747 |
747 |
748 o 0 public a-A - 054250a37db4
748 o 0 public a-A - 054250a37db4
749
749
750 $ cd ../mu
750 $ cd ../mu
751 $ hgph # d6bcb4f74035 should have changed phase
751 $ hgph # d6bcb4f74035 should have changed phase
752 > # 145e75495359 is still draft. not ancestor of -r
752 > # 145e75495359 is still draft. not ancestor of -r
753 o 9 draft a-H - 967b449fbc94
753 o 9 draft a-H - 967b449fbc94
754 |
754 |
755 | o 8 public a-F - b740e3e5c05d
755 | o 8 public a-F - b740e3e5c05d
756 | |
756 | |
757 | o 7 public a-E - e9f537e46dea
757 | o 7 public a-E - e9f537e46dea
758 | |
758 | |
759 +---o 6 draft n-B - 145e75495359
759 +---o 6 draft n-B - 145e75495359
760 | |
760 | |
761 o | 5 public n-A - d6bcb4f74035
761 o | 5 public n-A - d6bcb4f74035
762 | |
762 | |
763 | o 4 public a-D - b555f63b6063
763 | o 4 public a-D - b555f63b6063
764 | |
764 | |
765 | o 3 public a-C - 54acac6f23ab
765 | o 3 public a-C - 54acac6f23ab
766 | |
766 | |
767 o | 2 public b-A - f54f1bb90ff3
767 o | 2 public b-A - f54f1bb90ff3
768 |/
768 |/
769 o 1 public a-B - 548a3d25dbf0
769 o 1 public a-B - 548a3d25dbf0
770 |
770 |
771 o 0 public a-A - 054250a37db4
771 o 0 public a-A - 054250a37db4
772
772
773
773
774
774
775 Pushing to Publish=True (common changeset from publish=False)
775 Pushing to Publish=True (common changeset from publish=False)
776
776
777 (in mu)
777 (in mu)
778 $ hg push ../alpha
778 $ hg push ../alpha
779 pushing to ../alpha
779 pushing to ../alpha
780 searching for changes
780 searching for changes
781 no changes found
781 no changes found
782 test-debug-phase: move rev 10: 1 -> 0
782 test-debug-phase: move rev 10: 1 -> 0
783 test-debug-phase: move rev 6: 1 -> 0
783 test-debug-phase: move rev 6: 1 -> 0
784 test-debug-phase: move rev 9: 1 -> 0
784 test-debug-phase: move rev 9: 1 -> 0
785 [1]
785 [1]
786 $ hgph
786 $ hgph
787 o 9 public a-H - 967b449fbc94
787 o 9 public a-H - 967b449fbc94
788 |
788 |
789 | o 8 public a-F - b740e3e5c05d
789 | o 8 public a-F - b740e3e5c05d
790 | |
790 | |
791 | o 7 public a-E - e9f537e46dea
791 | o 7 public a-E - e9f537e46dea
792 | |
792 | |
793 +---o 6 public n-B - 145e75495359
793 +---o 6 public n-B - 145e75495359
794 | |
794 | |
795 o | 5 public n-A - d6bcb4f74035
795 o | 5 public n-A - d6bcb4f74035
796 | |
796 | |
797 | o 4 public a-D - b555f63b6063
797 | o 4 public a-D - b555f63b6063
798 | |
798 | |
799 | o 3 public a-C - 54acac6f23ab
799 | o 3 public a-C - 54acac6f23ab
800 | |
800 | |
801 o | 2 public b-A - f54f1bb90ff3
801 o | 2 public b-A - f54f1bb90ff3
802 |/
802 |/
803 o 1 public a-B - 548a3d25dbf0
803 o 1 public a-B - 548a3d25dbf0
804 |
804 |
805 o 0 public a-A - 054250a37db4
805 o 0 public a-A - 054250a37db4
806
806
807 $ hgph -R ../alpha # a-H should have been synced to 0
807 $ hgph -R ../alpha # a-H should have been synced to 0
808 @ 10 public a-H - 967b449fbc94
808 @ 10 public a-H - 967b449fbc94
809 |
809 |
810 | o 9 draft a-G - 3e27b6f1eee1
810 | o 9 draft a-G - 3e27b6f1eee1
811 | |
811 | |
812 | o 8 public a-F - b740e3e5c05d
812 | o 8 public a-F - b740e3e5c05d
813 | |
813 | |
814 | o 7 public a-E - e9f537e46dea
814 | o 7 public a-E - e9f537e46dea
815 | |
815 | |
816 +---o 6 public n-B - 145e75495359
816 +---o 6 public n-B - 145e75495359
817 | |
817 | |
818 o | 5 public n-A - d6bcb4f74035
818 o | 5 public n-A - d6bcb4f74035
819 | |
819 | |
820 o | 4 public b-A - f54f1bb90ff3
820 o | 4 public b-A - f54f1bb90ff3
821 | |
821 | |
822 | o 3 public a-D - b555f63b6063
822 | o 3 public a-D - b555f63b6063
823 | |
823 | |
824 | o 2 public a-C - 54acac6f23ab
824 | o 2 public a-C - 54acac6f23ab
825 |/
825 |/
826 o 1 public a-B - 548a3d25dbf0
826 o 1 public a-B - 548a3d25dbf0
827 |
827 |
828 o 0 public a-A - 054250a37db4
828 o 0 public a-A - 054250a37db4
829
829
830
830
831
831
832 Bare push with next changeset and common changeset needing sync (issue3575)
832 Bare push with next changeset and common changeset needing sync (issue3575)
833
833
834 (reset some stat on remote repo to avoid confusing other tests)
834 (reset some stat on remote repo to avoid confusing other tests)
835
835
836 $ hg -R ../alpha --config extensions.strip= strip --no-backup 967b449fbc94
836 $ hg -R ../alpha --config extensions.strip= strip --no-backup 967b449fbc94
837 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
837 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
838 $ hg phase --force --draft b740e3e5c05d 967b449fbc94
838 $ hg phase --force --draft b740e3e5c05d 967b449fbc94
839 test-debug-phase: move rev 8: 0 -> 1
839 test-debug-phase: move rev 8: 0 -> 1
840 test-debug-phase: move rev 9: 0 -> 1
840 test-debug-phase: move rev 9: 0 -> 1
841 $ hg push -fv ../alpha
841 $ hg push -fv ../alpha
842 pushing to ../alpha
842 pushing to ../alpha
843 searching for changes
843 searching for changes
844 1 changesets found
844 1 changesets found
845 uncompressed size of bundle content:
845 uncompressed size of bundle content:
846 178 (changelog)
846 178 (changelog)
847 165 (manifests)
847 165 (manifests)
848 131 a-H
848 131 a-H
849 adding changesets
849 adding changesets
850 adding manifests
850 adding manifests
851 adding file changes
851 adding file changes
852 added 1 changesets with 1 changes to 1 files (+1 heads)
852 added 1 changesets with 1 changes to 1 files (+1 heads)
853 test-debug-phase: new rev 10: x -> 0
853 test-debug-phase: new rev 10: x -> 0
854 test-debug-phase: move rev 8: 1 -> 0
854 test-debug-phase: move rev 8: 1 -> 0
855 test-debug-phase: move rev 9: 1 -> 0
855 test-debug-phase: move rev 9: 1 -> 0
856 $ hgph
856 $ hgph
857 o 9 public a-H - 967b449fbc94
857 o 9 public a-H - 967b449fbc94
858 |
858 |
859 | o 8 public a-F - b740e3e5c05d
859 | o 8 public a-F - b740e3e5c05d
860 | |
860 | |
861 | o 7 public a-E - e9f537e46dea
861 | o 7 public a-E - e9f537e46dea
862 | |
862 | |
863 +---o 6 public n-B - 145e75495359
863 +---o 6 public n-B - 145e75495359
864 | |
864 | |
865 o | 5 public n-A - d6bcb4f74035
865 o | 5 public n-A - d6bcb4f74035
866 | |
866 | |
867 | o 4 public a-D - b555f63b6063
867 | o 4 public a-D - b555f63b6063
868 | |
868 | |
869 | o 3 public a-C - 54acac6f23ab
869 | o 3 public a-C - 54acac6f23ab
870 | |
870 | |
871 o | 2 public b-A - f54f1bb90ff3
871 o | 2 public b-A - f54f1bb90ff3
872 |/
872 |/
873 o 1 public a-B - 548a3d25dbf0
873 o 1 public a-B - 548a3d25dbf0
874 |
874 |
875 o 0 public a-A - 054250a37db4
875 o 0 public a-A - 054250a37db4
876
876
877
877
878 $ hg -R ../alpha update 967b449fbc94 #for latter test consistency
878 $ hg -R ../alpha update 967b449fbc94 #for latter test consistency
879 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
879 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
880 $ hgph -R ../alpha
880 $ hgph -R ../alpha
881 @ 10 public a-H - 967b449fbc94
881 @ 10 public a-H - 967b449fbc94
882 |
882 |
883 | o 9 draft a-G - 3e27b6f1eee1
883 | o 9 draft a-G - 3e27b6f1eee1
884 | |
884 | |
885 | o 8 public a-F - b740e3e5c05d
885 | o 8 public a-F - b740e3e5c05d
886 | |
886 | |
887 | o 7 public a-E - e9f537e46dea
887 | o 7 public a-E - e9f537e46dea
888 | |
888 | |
889 +---o 6 public n-B - 145e75495359
889 +---o 6 public n-B - 145e75495359
890 | |
890 | |
891 o | 5 public n-A - d6bcb4f74035
891 o | 5 public n-A - d6bcb4f74035
892 | |
892 | |
893 o | 4 public b-A - f54f1bb90ff3
893 o | 4 public b-A - f54f1bb90ff3
894 | |
894 | |
895 | o 3 public a-D - b555f63b6063
895 | o 3 public a-D - b555f63b6063
896 | |
896 | |
897 | o 2 public a-C - 54acac6f23ab
897 | o 2 public a-C - 54acac6f23ab
898 |/
898 |/
899 o 1 public a-B - 548a3d25dbf0
899 o 1 public a-B - 548a3d25dbf0
900 |
900 |
901 o 0 public a-A - 054250a37db4
901 o 0 public a-A - 054250a37db4
902
902
903
903
904 Discovery locally secret changeset on a remote repository:
904 Discovery locally secret changeset on a remote repository:
905
905
906 - should make it non-secret
906 - should make it non-secret
907
907
908 $ cd ../alpha
908 $ cd ../alpha
909 $ mkcommit A-secret --config phases.new-commit=2
909 $ mkcommit A-secret --config phases.new-commit=2
910 test-debug-phase: new rev 11: x -> 2
910 test-debug-phase: new rev 11: x -> 2
911 $ hgph
911 $ hgph
912 @ 11 secret A-secret - 435b5d83910c
912 @ 11 secret A-secret - 435b5d83910c
913 |
913 |
914 o 10 public a-H - 967b449fbc94
914 o 10 public a-H - 967b449fbc94
915 |
915 |
916 | o 9 draft a-G - 3e27b6f1eee1
916 | o 9 draft a-G - 3e27b6f1eee1
917 | |
917 | |
918 | o 8 public a-F - b740e3e5c05d
918 | o 8 public a-F - b740e3e5c05d
919 | |
919 | |
920 | o 7 public a-E - e9f537e46dea
920 | o 7 public a-E - e9f537e46dea
921 | |
921 | |
922 +---o 6 public n-B - 145e75495359
922 +---o 6 public n-B - 145e75495359
923 | |
923 | |
924 o | 5 public n-A - d6bcb4f74035
924 o | 5 public n-A - d6bcb4f74035
925 | |
925 | |
926 o | 4 public b-A - f54f1bb90ff3
926 o | 4 public b-A - f54f1bb90ff3
927 | |
927 | |
928 | o 3 public a-D - b555f63b6063
928 | o 3 public a-D - b555f63b6063
929 | |
929 | |
930 | o 2 public a-C - 54acac6f23ab
930 | o 2 public a-C - 54acac6f23ab
931 |/
931 |/
932 o 1 public a-B - 548a3d25dbf0
932 o 1 public a-B - 548a3d25dbf0
933 |
933 |
934 o 0 public a-A - 054250a37db4
934 o 0 public a-A - 054250a37db4
935
935
936 $ hg bundle --base 'parents(.)' -r . ../secret-bundle.hg
936 $ hg bundle --base 'parents(.)' -r . ../secret-bundle.hg
937 1 changesets found
937 1 changesets found
938 $ hg -R ../mu unbundle ../secret-bundle.hg
938 $ hg -R ../mu unbundle ../secret-bundle.hg
939 adding changesets
939 adding changesets
940 adding manifests
940 adding manifests
941 adding file changes
941 adding file changes
942 added 1 changesets with 1 changes to 1 files
942 added 1 changesets with 1 changes to 1 files
943 new changesets 435b5d83910c
943 new changesets 435b5d83910c
944 test-debug-phase: new rev 10: x -> 1
944 test-debug-phase: new rev 10: x -> 1
945 (run 'hg update' to get a working copy)
945 (run 'hg update' to get a working copy)
946 $ hgph -R ../mu
946 $ hgph -R ../mu
947 o 10 draft A-secret - 435b5d83910c
947 o 10 draft A-secret - 435b5d83910c
948 |
948 |
949 o 9 public a-H - 967b449fbc94
949 o 9 public a-H - 967b449fbc94
950 |
950 |
951 | o 8 public a-F - b740e3e5c05d
951 | o 8 public a-F - b740e3e5c05d
952 | |
952 | |
953 | o 7 public a-E - e9f537e46dea
953 | o 7 public a-E - e9f537e46dea
954 | |
954 | |
955 +---o 6 public n-B - 145e75495359
955 +---o 6 public n-B - 145e75495359
956 | |
956 | |
957 o | 5 public n-A - d6bcb4f74035
957 o | 5 public n-A - d6bcb4f74035
958 | |
958 | |
959 | o 4 public a-D - b555f63b6063
959 | o 4 public a-D - b555f63b6063
960 | |
960 | |
961 | o 3 public a-C - 54acac6f23ab
961 | o 3 public a-C - 54acac6f23ab
962 | |
962 | |
963 o | 2 public b-A - f54f1bb90ff3
963 o | 2 public b-A - f54f1bb90ff3
964 |/
964 |/
965 o 1 public a-B - 548a3d25dbf0
965 o 1 public a-B - 548a3d25dbf0
966 |
966 |
967 o 0 public a-A - 054250a37db4
967 o 0 public a-A - 054250a37db4
968
968
969 $ hg pull ../mu
969 $ hg pull ../mu
970 pulling from ../mu
970 pulling from ../mu
971 searching for changes
971 searching for changes
972 no changes found
972 no changes found
973 test-debug-phase: move rev 11: 2 -> 1
973 test-debug-phase: move rev 11: 2 -> 1
974 $ hgph
974 $ hgph
975 @ 11 draft A-secret - 435b5d83910c
975 @ 11 draft A-secret - 435b5d83910c
976 |
976 |
977 o 10 public a-H - 967b449fbc94
977 o 10 public a-H - 967b449fbc94
978 |
978 |
979 | o 9 draft a-G - 3e27b6f1eee1
979 | o 9 draft a-G - 3e27b6f1eee1
980 | |
980 | |
981 | o 8 public a-F - b740e3e5c05d
981 | o 8 public a-F - b740e3e5c05d
982 | |
982 | |
983 | o 7 public a-E - e9f537e46dea
983 | o 7 public a-E - e9f537e46dea
984 | |
984 | |
985 +---o 6 public n-B - 145e75495359
985 +---o 6 public n-B - 145e75495359
986 | |
986 | |
987 o | 5 public n-A - d6bcb4f74035
987 o | 5 public n-A - d6bcb4f74035
988 | |
988 | |
989 o | 4 public b-A - f54f1bb90ff3
989 o | 4 public b-A - f54f1bb90ff3
990 | |
990 | |
991 | o 3 public a-D - b555f63b6063
991 | o 3 public a-D - b555f63b6063
992 | |
992 | |
993 | o 2 public a-C - 54acac6f23ab
993 | o 2 public a-C - 54acac6f23ab
994 |/
994 |/
995 o 1 public a-B - 548a3d25dbf0
995 o 1 public a-B - 548a3d25dbf0
996 |
996 |
997 o 0 public a-A - 054250a37db4
997 o 0 public a-A - 054250a37db4
998
998
999
999
1000 pushing a locally public and draft changesets remotely secret should make them
1000 pushing a locally public and draft changesets remotely secret should make them
1001 appear on the remote side.
1001 appear on the remote side.
1002
1002
1003 $ hg -R ../mu phase --secret --force 967b449fbc94
1003 $ hg -R ../mu phase --secret --force 967b449fbc94
1004 test-debug-phase: move rev 9: 0 -> 2
1004 test-debug-phase: move rev 9: 0 -> 2
1005 test-debug-phase: move rev 10: 1 -> 2
1005 test-debug-phase: move rev 10: 1 -> 2
1006 $ hg push -r 435b5d83910c ../mu
1006 $ hg push -r 435b5d83910c ../mu
1007 pushing to ../mu
1007 pushing to ../mu
1008 searching for changes
1008 searching for changes
1009 abort: push creates new remote head 435b5d83910c!
1009 abort: push creates new remote head 435b5d83910c!
1010 (merge or see 'hg help push' for details about pushing new heads)
1010 (merge or see 'hg help push' for details about pushing new heads)
1011 [255]
1011 [255]
1012 $ hg push -fr 435b5d83910c ../mu # because the push will create new visible head
1012 $ hg push -fr 435b5d83910c ../mu # because the push will create new visible head
1013 pushing to ../mu
1013 pushing to ../mu
1014 searching for changes
1014 searching for changes
1015 adding changesets
1015 adding changesets
1016 adding manifests
1016 adding manifests
1017 adding file changes
1017 adding file changes
1018 added 0 changesets with 0 changes to 2 files
1018 added 0 changesets with 0 changes to 2 files
1019 test-debug-phase: move rev 9: 2 -> 0
1019 test-debug-phase: move rev 9: 2 -> 0
1020 test-debug-phase: move rev 10: 2 -> 1
1020 test-debug-phase: move rev 10: 2 -> 1
1021 $ hgph -R ../mu
1021 $ hgph -R ../mu
1022 o 10 draft A-secret - 435b5d83910c
1022 o 10 draft A-secret - 435b5d83910c
1023 |
1023 |
1024 o 9 public a-H - 967b449fbc94
1024 o 9 public a-H - 967b449fbc94
1025 |
1025 |
1026 | o 8 public a-F - b740e3e5c05d
1026 | o 8 public a-F - b740e3e5c05d
1027 | |
1027 | |
1028 | o 7 public a-E - e9f537e46dea
1028 | o 7 public a-E - e9f537e46dea
1029 | |
1029 | |
1030 +---o 6 public n-B - 145e75495359
1030 +---o 6 public n-B - 145e75495359
1031 | |
1031 | |
1032 o | 5 public n-A - d6bcb4f74035
1032 o | 5 public n-A - d6bcb4f74035
1033 | |
1033 | |
1034 | o 4 public a-D - b555f63b6063
1034 | o 4 public a-D - b555f63b6063
1035 | |
1035 | |
1036 | o 3 public a-C - 54acac6f23ab
1036 | o 3 public a-C - 54acac6f23ab
1037 | |
1037 | |
1038 o | 2 public b-A - f54f1bb90ff3
1038 o | 2 public b-A - f54f1bb90ff3
1039 |/
1039 |/
1040 o 1 public a-B - 548a3d25dbf0
1040 o 1 public a-B - 548a3d25dbf0
1041 |
1041 |
1042 o 0 public a-A - 054250a37db4
1042 o 0 public a-A - 054250a37db4
1043
1043
1044
1044
1045 pull new changeset with common draft locally
1045 pull new changeset with common draft locally
1046
1046
1047 $ hg up -q 967b449fbc94 # create a new root for draft
1047 $ hg up -q 967b449fbc94 # create a new root for draft
1048 $ mkcommit 'alpha-more'
1048 $ mkcommit 'alpha-more'
1049 test-debug-phase: new rev 12: x -> 1
1049 test-debug-phase: new rev 12: x -> 1
1050 created new head
1050 created new head
1051 $ hg push -fr . ../mu
1051 $ hg push -fr . ../mu
1052 pushing to ../mu
1052 pushing to ../mu
1053 searching for changes
1053 searching for changes
1054 adding changesets
1054 adding changesets
1055 adding manifests
1055 adding manifests
1056 adding file changes
1056 adding file changes
1057 added 1 changesets with 1 changes to 1 files (+1 heads)
1057 added 1 changesets with 1 changes to 1 files (+1 heads)
1058 test-debug-phase: new rev 11: x -> 1
1058 test-debug-phase: new rev 11: x -> 1
1059 $ cd ../mu
1059 $ cd ../mu
1060 $ hg phase --secret --force 1c5cfd894796
1060 $ hg phase --secret --force 1c5cfd894796
1061 test-debug-phase: move rev 11: 1 -> 2
1061 test-debug-phase: move rev 11: 1 -> 2
1062 $ hg up -q 435b5d83910c
1062 $ hg up -q 435b5d83910c
1063 $ mkcommit 'mu-more'
1063 $ mkcommit 'mu-more'
1064 test-debug-phase: new rev 12: x -> 1
1064 test-debug-phase: new rev 12: x -> 1
1065 $ cd ../alpha
1065 $ cd ../alpha
1066 $ hg pull ../mu
1066 $ hg pull ../mu
1067 pulling from ../mu
1067 pulling from ../mu
1068 searching for changes
1068 searching for changes
1069 adding changesets
1069 adding changesets
1070 adding manifests
1070 adding manifests
1071 adding file changes
1071 adding file changes
1072 added 1 changesets with 1 changes to 1 files
1072 added 1 changesets with 1 changes to 1 files
1073 new changesets 5237fb433fc8
1073 new changesets 5237fb433fc8
1074 test-debug-phase: new rev 13: x -> 1
1074 test-debug-phase: new rev 13: x -> 1
1075 (run 'hg update' to get a working copy)
1075 (run 'hg update' to get a working copy)
1076 $ hgph
1076 $ hgph
1077 o 13 draft mu-more - 5237fb433fc8
1077 o 13 draft mu-more - 5237fb433fc8
1078 |
1078 |
1079 | @ 12 draft alpha-more - 1c5cfd894796
1079 | @ 12 draft alpha-more - 1c5cfd894796
1080 | |
1080 | |
1081 o | 11 draft A-secret - 435b5d83910c
1081 o | 11 draft A-secret - 435b5d83910c
1082 |/
1082 |/
1083 o 10 public a-H - 967b449fbc94
1083 o 10 public a-H - 967b449fbc94
1084 |
1084 |
1085 | o 9 draft a-G - 3e27b6f1eee1
1085 | o 9 draft a-G - 3e27b6f1eee1
1086 | |
1086 | |
1087 | o 8 public a-F - b740e3e5c05d
1087 | o 8 public a-F - b740e3e5c05d
1088 | |
1088 | |
1089 | o 7 public a-E - e9f537e46dea
1089 | o 7 public a-E - e9f537e46dea
1090 | |
1090 | |
1091 +---o 6 public n-B - 145e75495359
1091 +---o 6 public n-B - 145e75495359
1092 | |
1092 | |
1093 o | 5 public n-A - d6bcb4f74035
1093 o | 5 public n-A - d6bcb4f74035
1094 | |
1094 | |
1095 o | 4 public b-A - f54f1bb90ff3
1095 o | 4 public b-A - f54f1bb90ff3
1096 | |
1096 | |
1097 | o 3 public a-D - b555f63b6063
1097 | o 3 public a-D - b555f63b6063
1098 | |
1098 | |
1099 | o 2 public a-C - 54acac6f23ab
1099 | o 2 public a-C - 54acac6f23ab
1100 |/
1100 |/
1101 o 1 public a-B - 548a3d25dbf0
1101 o 1 public a-B - 548a3d25dbf0
1102 |
1102 |
1103 o 0 public a-A - 054250a37db4
1103 o 0 public a-A - 054250a37db4
1104
1104
1105
1105
1106 Test that test are properly ignored on remote event when existing locally
1106 Test that test are properly ignored on remote event when existing locally
1107
1107
1108 $ cd ..
1108 $ cd ..
1109 $ hg clone -qU -r b555f63b6063 -r f54f1bb90ff3 beta gamma
1109 $ hg clone -qU -r b555f63b6063 -r f54f1bb90ff3 beta gamma
1110 test-debug-phase: new rev 0: x -> 0
1110 test-debug-phase: new rev 0: x -> 0
1111 test-debug-phase: new rev 1: x -> 0
1111 test-debug-phase: new rev 1: x -> 0
1112 test-debug-phase: new rev 2: x -> 0
1112 test-debug-phase: new rev 2: x -> 0
1113 test-debug-phase: new rev 3: x -> 0
1113 test-debug-phase: new rev 3: x -> 0
1114 test-debug-phase: new rev 4: x -> 0
1114 test-debug-phase: new rev 4: x -> 0
1115
1115
1116 # pathological case are
1116 # pathological case are
1117 #
1117 #
1118 # * secret remotely
1118 # * secret remotely
1119 # * known locally
1119 # * known locally
1120 # * repo have uncommon changeset
1120 # * repo have uncommon changeset
1121
1121
1122 $ hg -R beta phase --secret --force f54f1bb90ff3
1122 $ hg -R beta phase --secret --force f54f1bb90ff3
1123 test-debug-phase: move rev 2: 0 -> 2
1123 test-debug-phase: move rev 2: 0 -> 2
1124 $ hg -R gamma phase --draft --force f54f1bb90ff3
1124 $ hg -R gamma phase --draft --force f54f1bb90ff3
1125 test-debug-phase: move rev 2: 0 -> 1
1125 test-debug-phase: move rev 2: 0 -> 1
1126
1126
1127 $ cd gamma
1127 $ cd gamma
1128 $ hg pull ../beta
1128 $ hg pull ../beta
1129 pulling from ../beta
1129 pulling from ../beta
1130 searching for changes
1130 searching for changes
1131 adding changesets
1131 adding changesets
1132 adding manifests
1132 adding manifests
1133 adding file changes
1133 adding file changes
1134 added 2 changesets with 2 changes to 2 files
1134 added 2 changesets with 2 changes to 2 files
1135 new changesets e9f537e46dea:b740e3e5c05d
1135 new changesets e9f537e46dea:b740e3e5c05d
1136 test-debug-phase: new rev 5: x -> 0
1136 test-debug-phase: new rev 5: x -> 0
1137 test-debug-phase: new rev 6: x -> 0
1137 test-debug-phase: new rev 6: x -> 0
1138 (run 'hg update' to get a working copy)
1138 (run 'hg update' to get a working copy)
1139 $ hg phase f54f1bb90ff3
1139 $ hg phase f54f1bb90ff3
1140 2: draft
1140 2: draft
1141
1141
1142 same over the wire
1142 same over the wire
1143
1143
1144 $ cd ../beta
1144 $ cd ../beta
1145 $ hg serve -p $HGPORT -d --pid-file=../beta.pid -E ../beta-error.log
1145 $ hg serve -p $HGPORT -d --pid-file=../beta.pid -E ../beta-error.log
1146 $ cat ../beta.pid >> $DAEMON_PIDS
1146 $ cat ../beta.pid >> $DAEMON_PIDS
1147 $ cd ../gamma
1147 $ cd ../gamma
1148
1148
1149 $ hg pull http://localhost:$HGPORT/ # bundle2+
1149 $ hg pull http://localhost:$HGPORT/ # bundle2+
1150 pulling from http://localhost:$HGPORT/
1150 pulling from http://localhost:$HGPORT/
1151 searching for changes
1151 searching for changes
1152 no changes found
1152 no changes found
1153 $ hg phase f54f1bb90ff3
1153 $ hg phase f54f1bb90ff3
1154 2: draft
1154 2: draft
1155
1155
1156 enforce bundle1
1156 enforce bundle1
1157
1157
1158 $ hg pull http://localhost:$HGPORT/ --config devel.legacy.exchange=bundle1
1158 $ hg pull http://localhost:$HGPORT/ --config devel.legacy.exchange=bundle1
1159 pulling from http://localhost:$HGPORT/
1159 pulling from http://localhost:$HGPORT/
1160 searching for changes
1160 searching for changes
1161 no changes found
1161 no changes found
1162 $ hg phase f54f1bb90ff3
1162 $ hg phase f54f1bb90ff3
1163 2: draft
1163 2: draft
1164
1164
1165 check that secret local on both side are not synced to public
1165 check that secret local on both side are not synced to public
1166
1166
1167 $ hg push -r b555f63b6063 http://localhost:$HGPORT/
1167 $ hg push -r b555f63b6063 http://localhost:$HGPORT/
1168 pushing to http://localhost:$HGPORT/
1168 pushing to http://localhost:$HGPORT/
1169 searching for changes
1169 searching for changes
1170 no changes found
1170 no changes found
1171 [1]
1171 [1]
1172 $ hg phase f54f1bb90ff3
1172 $ hg phase f54f1bb90ff3
1173 2: draft
1173 2: draft
1174
1174
1175 put the changeset in the draft state again
1175 put the changeset in the draft state again
1176 (first test after this one expect to be able to copy)
1176 (first test after this one expect to be able to copy)
1177
1177
1178 $ cd ..
1178 $ cd ..
1179
1179
1180
1180
1181 Test Clone behavior
1181 Test Clone behavior
1182
1182
1183 A. Clone without secret changeset
1183 A. Clone without secret changeset
1184
1184
1185 1. cloning non-publishing repository
1185 1. cloning non-publishing repository
1186 (Phase should be preserved)
1186 (Phase should be preserved)
1187
1187
1188 # make sure there is no secret so we can use a copy clone
1188 # make sure there is no secret so we can use a copy clone
1189
1189
1190 $ hg -R mu phase --draft 'secret()'
1190 $ hg -R mu phase --draft 'secret()'
1191 test-debug-phase: move rev 11: 2 -> 1
1191 test-debug-phase: move rev 11: 2 -> 1
1192
1192
1193 $ hg clone -U mu Tau
1193 $ hg clone -U mu Tau
1194 $ hgph -R Tau
1194 $ hgph -R Tau
1195 o 12 draft mu-more - 5237fb433fc8
1195 o 12 draft mu-more - 5237fb433fc8
1196 |
1196 |
1197 | o 11 draft alpha-more - 1c5cfd894796
1197 | o 11 draft alpha-more - 1c5cfd894796
1198 | |
1198 | |
1199 o | 10 draft A-secret - 435b5d83910c
1199 o | 10 draft A-secret - 435b5d83910c
1200 |/
1200 |/
1201 o 9 public a-H - 967b449fbc94
1201 o 9 public a-H - 967b449fbc94
1202 |
1202 |
1203 | o 8 public a-F - b740e3e5c05d
1203 | o 8 public a-F - b740e3e5c05d
1204 | |
1204 | |
1205 | o 7 public a-E - e9f537e46dea
1205 | o 7 public a-E - e9f537e46dea
1206 | |
1206 | |
1207 +---o 6 public n-B - 145e75495359
1207 +---o 6 public n-B - 145e75495359
1208 | |
1208 | |
1209 o | 5 public n-A - d6bcb4f74035
1209 o | 5 public n-A - d6bcb4f74035
1210 | |
1210 | |
1211 | o 4 public a-D - b555f63b6063
1211 | o 4 public a-D - b555f63b6063
1212 | |
1212 | |
1213 | o 3 public a-C - 54acac6f23ab
1213 | o 3 public a-C - 54acac6f23ab
1214 | |
1214 | |
1215 o | 2 public b-A - f54f1bb90ff3
1215 o | 2 public b-A - f54f1bb90ff3
1216 |/
1216 |/
1217 o 1 public a-B - 548a3d25dbf0
1217 o 1 public a-B - 548a3d25dbf0
1218 |
1218 |
1219 o 0 public a-A - 054250a37db4
1219 o 0 public a-A - 054250a37db4
1220
1220
1221
1221
1222 2. cloning publishing repository
1222 2. cloning publishing repository
1223
1223
1224 (everything should be public)
1224 (everything should be public)
1225
1225
1226 $ hg clone -U alpha Upsilon
1226 $ hg clone -U alpha Upsilon
1227 $ hgph -R Upsilon
1227 $ hgph -R Upsilon
1228 o 13 public mu-more - 5237fb433fc8
1228 o 13 public mu-more - 5237fb433fc8
1229 |
1229 |
1230 | o 12 public alpha-more - 1c5cfd894796
1230 | o 12 public alpha-more - 1c5cfd894796
1231 | |
1231 | |
1232 o | 11 public A-secret - 435b5d83910c
1232 o | 11 public A-secret - 435b5d83910c
1233 |/
1233 |/
1234 o 10 public a-H - 967b449fbc94
1234 o 10 public a-H - 967b449fbc94
1235 |
1235 |
1236 | o 9 public a-G - 3e27b6f1eee1
1236 | o 9 public a-G - 3e27b6f1eee1
1237 | |
1237 | |
1238 | o 8 public a-F - b740e3e5c05d
1238 | o 8 public a-F - b740e3e5c05d
1239 | |
1239 | |
1240 | o 7 public a-E - e9f537e46dea
1240 | o 7 public a-E - e9f537e46dea
1241 | |
1241 | |
1242 +---o 6 public n-B - 145e75495359
1242 +---o 6 public n-B - 145e75495359
1243 | |
1243 | |
1244 o | 5 public n-A - d6bcb4f74035
1244 o | 5 public n-A - d6bcb4f74035
1245 | |
1245 | |
1246 o | 4 public b-A - f54f1bb90ff3
1246 o | 4 public b-A - f54f1bb90ff3
1247 | |
1247 | |
1248 | o 3 public a-D - b555f63b6063
1248 | o 3 public a-D - b555f63b6063
1249 | |
1249 | |
1250 | o 2 public a-C - 54acac6f23ab
1250 | o 2 public a-C - 54acac6f23ab
1251 |/
1251 |/
1252 o 1 public a-B - 548a3d25dbf0
1252 o 1 public a-B - 548a3d25dbf0
1253 |
1253 |
1254 o 0 public a-A - 054250a37db4
1254 o 0 public a-A - 054250a37db4
1255
1255
1256 #if unix-permissions no-root
1256 #if unix-permissions no-root
1257
1257
1258 Pushing From an unlockable repo
1258 Pushing From an unlockable repo
1259 --------------------------------
1259 --------------------------------
1260 (issue3684)
1260 (issue3684)
1261
1261
1262 Unability to lock the source repo should not prevent the push. It will prevent
1262 Unability to lock the source repo should not prevent the push. It will prevent
1263 the retrieval of remote phase during push. For example, pushing to a publishing
1263 the retrieval of remote phase during push. For example, pushing to a publishing
1264 server won't turn changeset public.
1264 server won't turn changeset public.
1265
1265
1266 1. Test that push is not prevented
1266 1. Test that push is not prevented
1267
1267
1268 $ hg init Phi
1268 $ hg init Phi
1269 $ cd Upsilon
1269 $ cd Upsilon
1270 $ chmod -R -w .hg
1270 $ chmod -R -w .hg
1271 $ hg push ../Phi
1271 $ hg push ../Phi
1272 pushing to ../Phi
1272 pushing to ../Phi
1273 searching for changes
1273 searching for changes
1274 adding changesets
1274 adding changesets
1275 adding manifests
1275 adding manifests
1276 adding file changes
1276 adding file changes
1277 added 14 changesets with 14 changes to 14 files (+3 heads)
1277 added 14 changesets with 14 changes to 14 files (+3 heads)
1278 test-debug-phase: new rev 0: x -> 0
1278 test-debug-phase: new rev 0: x -> 0
1279 test-debug-phase: new rev 1: x -> 0
1279 test-debug-phase: new rev 1: x -> 0
1280 test-debug-phase: new rev 2: x -> 0
1280 test-debug-phase: new rev 2: x -> 0
1281 test-debug-phase: new rev 3: x -> 0
1281 test-debug-phase: new rev 3: x -> 0
1282 test-debug-phase: new rev 4: x -> 0
1282 test-debug-phase: new rev 4: x -> 0
1283 test-debug-phase: new rev 5: x -> 0
1283 test-debug-phase: new rev 5: x -> 0
1284 test-debug-phase: new rev 6: x -> 0
1284 test-debug-phase: new rev 6: x -> 0
1285 test-debug-phase: new rev 7: x -> 0
1285 test-debug-phase: new rev 7: x -> 0
1286 test-debug-phase: new rev 8: x -> 0
1286 test-debug-phase: new rev 8: x -> 0
1287 test-debug-phase: new rev 9: x -> 0
1287 test-debug-phase: new rev 9: x -> 0
1288 test-debug-phase: new rev 10: x -> 0
1288 test-debug-phase: new rev 10: x -> 0
1289 test-debug-phase: new rev 11: x -> 0
1289 test-debug-phase: new rev 11: x -> 0
1290 test-debug-phase: new rev 12: x -> 0
1290 test-debug-phase: new rev 12: x -> 0
1291 test-debug-phase: new rev 13: x -> 0
1291 test-debug-phase: new rev 13: x -> 0
1292 $ chmod -R +w .hg
1292 $ chmod -R +w .hg
1293
1293
1294 2. Test that failed phases movement are reported
1294 2. Test that failed phases movement are reported
1295
1295
1296 $ hg phase --force --draft 3
1296 $ hg phase --force --draft 3
1297 test-debug-phase: move rev 3: 0 -> 1
1297 test-debug-phase: move rev 3: 0 -> 1
1298 test-debug-phase: move rev 7: 0 -> 1
1298 test-debug-phase: move rev 7: 0 -> 1
1299 test-debug-phase: move rev 8: 0 -> 1
1299 test-debug-phase: move rev 8: 0 -> 1
1300 test-debug-phase: move rev 9: 0 -> 1
1300 test-debug-phase: move rev 9: 0 -> 1
1301 $ chmod -R -w .hg
1301 $ chmod -R -w .hg
1302 $ hg push ../Phi
1302 $ hg push ../Phi
1303 pushing to ../Phi
1303 pushing to ../Phi
1304 searching for changes
1304 searching for changes
1305 no changes found
1305 no changes found
1306 cannot lock source repo, skipping local public phase update
1306 cannot lock source repo, skipping local public phase update
1307 [1]
1307 [1]
1308 $ chmod -R +w .hg
1308 $ chmod -R +w .hg
1309
1309
1310 3. Test that push is prevented if lock was already acquired (not a permission
1311 error, but EEXIST)
1312
1313 $ touch .hg/store/lock
1314 $ hg push ../Phi --config ui.timeout=1
1315 pushing to ../Phi
1316 waiting for lock on repository $TESTTMP/Upsilon held by ''
1317 abort: repository $TESTTMP/Upsilon: timed out waiting for lock held by ''
1318 (lock might be very busy)
1319 [255]
1320 $ rm .hg/store/lock
1321
1310 $ cd ..
1322 $ cd ..
1311
1323
1312 #endif
1324 #endif
1313
1325
1314 Test that clone behaves like pull and doesn't publish changesets as plain push
1326 Test that clone behaves like pull and doesn't publish changesets as plain push
1315 does. The conditional output accounts for changes in the conditional block
1327 does. The conditional output accounts for changes in the conditional block
1316 above.
1328 above.
1317
1329
1318 #if unix-permissions no-root
1330 #if unix-permissions no-root
1319 $ hg -R Upsilon phase -q --force --draft 2
1331 $ hg -R Upsilon phase -q --force --draft 2
1320 test-debug-phase: move rev 2: 0 -> 1
1332 test-debug-phase: move rev 2: 0 -> 1
1321 #else
1333 #else
1322 $ hg -R Upsilon phase -q --force --draft 2
1334 $ hg -R Upsilon phase -q --force --draft 2
1323 test-debug-phase: move rev 2: 0 -> 1
1335 test-debug-phase: move rev 2: 0 -> 1
1324 test-debug-phase: move rev 3: 0 -> 1
1336 test-debug-phase: move rev 3: 0 -> 1
1325 test-debug-phase: move rev 7: 0 -> 1
1337 test-debug-phase: move rev 7: 0 -> 1
1326 test-debug-phase: move rev 8: 0 -> 1
1338 test-debug-phase: move rev 8: 0 -> 1
1327 test-debug-phase: move rev 9: 0 -> 1
1339 test-debug-phase: move rev 9: 0 -> 1
1328 #endif
1340 #endif
1329
1341
1330 $ hg clone -q Upsilon Pi -r 7
1342 $ hg clone -q Upsilon Pi -r 7
1331 test-debug-phase: new rev 0: x -> 0
1343 test-debug-phase: new rev 0: x -> 0
1332 test-debug-phase: new rev 1: x -> 0
1344 test-debug-phase: new rev 1: x -> 0
1333 test-debug-phase: new rev 2: x -> 0
1345 test-debug-phase: new rev 2: x -> 0
1334 test-debug-phase: new rev 3: x -> 0
1346 test-debug-phase: new rev 3: x -> 0
1335 test-debug-phase: new rev 4: x -> 0
1347 test-debug-phase: new rev 4: x -> 0
1336 $ hgph Upsilon -r 'min(draft())'
1348 $ hgph Upsilon -r 'min(draft())'
1337 o 2 draft a-C - 54acac6f23ab
1349 o 2 draft a-C - 54acac6f23ab
1338 |
1350 |
1339 ~
1351 ~
1340
1352
1341 $ hg -R Upsilon push Pi -r 7
1353 $ hg -R Upsilon push Pi -r 7
1342 pushing to Pi
1354 pushing to Pi
1343 searching for changes
1355 searching for changes
1344 no changes found
1356 no changes found
1345 test-debug-phase: move rev 2: 1 -> 0
1357 test-debug-phase: move rev 2: 1 -> 0
1346 test-debug-phase: move rev 3: 1 -> 0
1358 test-debug-phase: move rev 3: 1 -> 0
1347 test-debug-phase: move rev 7: 1 -> 0
1359 test-debug-phase: move rev 7: 1 -> 0
1348 [1]
1360 [1]
1349 $ hgph Upsilon -r 'min(draft())'
1361 $ hgph Upsilon -r 'min(draft())'
1350 o 8 draft a-F - b740e3e5c05d
1362 o 8 draft a-F - b740e3e5c05d
1351 |
1363 |
1352 ~
1364 ~
1353
1365
1354 $ hg -R Upsilon push Pi -r 8
1366 $ hg -R Upsilon push Pi -r 8
1355 pushing to Pi
1367 pushing to Pi
1356 searching for changes
1368 searching for changes
1357 adding changesets
1369 adding changesets
1358 adding manifests
1370 adding manifests
1359 adding file changes
1371 adding file changes
1360 added 1 changesets with 1 changes to 1 files
1372 added 1 changesets with 1 changes to 1 files
1361 test-debug-phase: new rev 5: x -> 0
1373 test-debug-phase: new rev 5: x -> 0
1362 test-debug-phase: move rev 8: 1 -> 0
1374 test-debug-phase: move rev 8: 1 -> 0
1363
1375
1364 $ hgph Upsilon -r 'min(draft())'
1376 $ hgph Upsilon -r 'min(draft())'
1365 o 9 draft a-G - 3e27b6f1eee1
1377 o 9 draft a-G - 3e27b6f1eee1
1366 |
1378 |
1367 ~
1379 ~
General Comments 0
You need to be logged in to leave comments. Login now