##// END OF EJS Templates
exchange: check actually missing revs for obsolete / unstable revs (issue6372)...
Manuel Jacob -
r45716:c26335fa default
parent child Browse files
Show More
@@ -1,3157 +1,3162 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import weakref
11 import weakref
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 nullrev,
17 nullrev,
18 )
18 )
19 from .thirdparty import attr
19 from .thirdparty import attr
20 from . import (
20 from . import (
21 bookmarks as bookmod,
21 bookmarks as bookmod,
22 bundle2,
22 bundle2,
23 changegroup,
23 changegroup,
24 discovery,
24 discovery,
25 error,
25 error,
26 exchangev2,
26 exchangev2,
27 lock as lockmod,
27 lock as lockmod,
28 logexchange,
28 logexchange,
29 narrowspec,
29 narrowspec,
30 obsolete,
30 obsolete,
31 obsutil,
31 obsutil,
32 phases,
32 phases,
33 pushkey,
33 pushkey,
34 pycompat,
34 pycompat,
35 scmutil,
35 scmutil,
36 sslutil,
36 sslutil,
37 streamclone,
37 streamclone,
38 url as urlmod,
38 url as urlmod,
39 util,
39 util,
40 wireprototypes,
40 wireprototypes,
41 )
41 )
42 from .interfaces import repository
42 from .interfaces import repository
43 from .utils import (
43 from .utils import (
44 hashutil,
44 hashutil,
45 stringutil,
45 stringutil,
46 )
46 )
47
47
48 urlerr = util.urlerr
48 urlerr = util.urlerr
49 urlreq = util.urlreq
49 urlreq = util.urlreq
50
50
51 _NARROWACL_SECTION = b'narrowacl'
51 _NARROWACL_SECTION = b'narrowacl'
52
52
53 # Maps bundle version human names to changegroup versions.
53 # Maps bundle version human names to changegroup versions.
54 _bundlespeccgversions = {
54 _bundlespeccgversions = {
55 b'v1': b'01',
55 b'v1': b'01',
56 b'v2': b'02',
56 b'v2': b'02',
57 b'packed1': b's1',
57 b'packed1': b's1',
58 b'bundle2': b'02', # legacy
58 b'bundle2': b'02', # legacy
59 }
59 }
60
60
61 # Maps bundle version with content opts to choose which part to bundle
61 # Maps bundle version with content opts to choose which part to bundle
62 _bundlespeccontentopts = {
62 _bundlespeccontentopts = {
63 b'v1': {
63 b'v1': {
64 b'changegroup': True,
64 b'changegroup': True,
65 b'cg.version': b'01',
65 b'cg.version': b'01',
66 b'obsolescence': False,
66 b'obsolescence': False,
67 b'phases': False,
67 b'phases': False,
68 b'tagsfnodescache': False,
68 b'tagsfnodescache': False,
69 b'revbranchcache': False,
69 b'revbranchcache': False,
70 },
70 },
71 b'v2': {
71 b'v2': {
72 b'changegroup': True,
72 b'changegroup': True,
73 b'cg.version': b'02',
73 b'cg.version': b'02',
74 b'obsolescence': False,
74 b'obsolescence': False,
75 b'phases': False,
75 b'phases': False,
76 b'tagsfnodescache': True,
76 b'tagsfnodescache': True,
77 b'revbranchcache': True,
77 b'revbranchcache': True,
78 },
78 },
79 b'packed1': {b'cg.version': b's1'},
79 b'packed1': {b'cg.version': b's1'},
80 }
80 }
81 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
81 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
82
82
83 _bundlespecvariants = {
83 _bundlespecvariants = {
84 b"streamv2": {
84 b"streamv2": {
85 b"changegroup": False,
85 b"changegroup": False,
86 b"streamv2": True,
86 b"streamv2": True,
87 b"tagsfnodescache": False,
87 b"tagsfnodescache": False,
88 b"revbranchcache": False,
88 b"revbranchcache": False,
89 }
89 }
90 }
90 }
91
91
92 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
92 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
93 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
93 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
94
94
95
95
96 @attr.s
96 @attr.s
97 class bundlespec(object):
97 class bundlespec(object):
98 compression = attr.ib()
98 compression = attr.ib()
99 wirecompression = attr.ib()
99 wirecompression = attr.ib()
100 version = attr.ib()
100 version = attr.ib()
101 wireversion = attr.ib()
101 wireversion = attr.ib()
102 params = attr.ib()
102 params = attr.ib()
103 contentopts = attr.ib()
103 contentopts = attr.ib()
104
104
105
105
106 def parsebundlespec(repo, spec, strict=True):
106 def parsebundlespec(repo, spec, strict=True):
107 """Parse a bundle string specification into parts.
107 """Parse a bundle string specification into parts.
108
108
109 Bundle specifications denote a well-defined bundle/exchange format.
109 Bundle specifications denote a well-defined bundle/exchange format.
110 The content of a given specification should not change over time in
110 The content of a given specification should not change over time in
111 order to ensure that bundles produced by a newer version of Mercurial are
111 order to ensure that bundles produced by a newer version of Mercurial are
112 readable from an older version.
112 readable from an older version.
113
113
114 The string currently has the form:
114 The string currently has the form:
115
115
116 <compression>-<type>[;<parameter0>[;<parameter1>]]
116 <compression>-<type>[;<parameter0>[;<parameter1>]]
117
117
118 Where <compression> is one of the supported compression formats
118 Where <compression> is one of the supported compression formats
119 and <type> is (currently) a version string. A ";" can follow the type and
119 and <type> is (currently) a version string. A ";" can follow the type and
120 all text afterwards is interpreted as URI encoded, ";" delimited key=value
120 all text afterwards is interpreted as URI encoded, ";" delimited key=value
121 pairs.
121 pairs.
122
122
123 If ``strict`` is True (the default) <compression> is required. Otherwise,
123 If ``strict`` is True (the default) <compression> is required. Otherwise,
124 it is optional.
124 it is optional.
125
125
126 Returns a bundlespec object of (compression, version, parameters).
126 Returns a bundlespec object of (compression, version, parameters).
127 Compression will be ``None`` if not in strict mode and a compression isn't
127 Compression will be ``None`` if not in strict mode and a compression isn't
128 defined.
128 defined.
129
129
130 An ``InvalidBundleSpecification`` is raised when the specification is
130 An ``InvalidBundleSpecification`` is raised when the specification is
131 not syntactically well formed.
131 not syntactically well formed.
132
132
133 An ``UnsupportedBundleSpecification`` is raised when the compression or
133 An ``UnsupportedBundleSpecification`` is raised when the compression or
134 bundle type/version is not recognized.
134 bundle type/version is not recognized.
135
135
136 Note: this function will likely eventually return a more complex data
136 Note: this function will likely eventually return a more complex data
137 structure, including bundle2 part information.
137 structure, including bundle2 part information.
138 """
138 """
139
139
140 def parseparams(s):
140 def parseparams(s):
141 if b';' not in s:
141 if b';' not in s:
142 return s, {}
142 return s, {}
143
143
144 params = {}
144 params = {}
145 version, paramstr = s.split(b';', 1)
145 version, paramstr = s.split(b';', 1)
146
146
147 for p in paramstr.split(b';'):
147 for p in paramstr.split(b';'):
148 if b'=' not in p:
148 if b'=' not in p:
149 raise error.InvalidBundleSpecification(
149 raise error.InvalidBundleSpecification(
150 _(
150 _(
151 b'invalid bundle specification: '
151 b'invalid bundle specification: '
152 b'missing "=" in parameter: %s'
152 b'missing "=" in parameter: %s'
153 )
153 )
154 % p
154 % p
155 )
155 )
156
156
157 key, value = p.split(b'=', 1)
157 key, value = p.split(b'=', 1)
158 key = urlreq.unquote(key)
158 key = urlreq.unquote(key)
159 value = urlreq.unquote(value)
159 value = urlreq.unquote(value)
160 params[key] = value
160 params[key] = value
161
161
162 return version, params
162 return version, params
163
163
164 if strict and b'-' not in spec:
164 if strict and b'-' not in spec:
165 raise error.InvalidBundleSpecification(
165 raise error.InvalidBundleSpecification(
166 _(
166 _(
167 b'invalid bundle specification; '
167 b'invalid bundle specification; '
168 b'must be prefixed with compression: %s'
168 b'must be prefixed with compression: %s'
169 )
169 )
170 % spec
170 % spec
171 )
171 )
172
172
173 if b'-' in spec:
173 if b'-' in spec:
174 compression, version = spec.split(b'-', 1)
174 compression, version = spec.split(b'-', 1)
175
175
176 if compression not in util.compengines.supportedbundlenames:
176 if compression not in util.compengines.supportedbundlenames:
177 raise error.UnsupportedBundleSpecification(
177 raise error.UnsupportedBundleSpecification(
178 _(b'%s compression is not supported') % compression
178 _(b'%s compression is not supported') % compression
179 )
179 )
180
180
181 version, params = parseparams(version)
181 version, params = parseparams(version)
182
182
183 if version not in _bundlespeccgversions:
183 if version not in _bundlespeccgversions:
184 raise error.UnsupportedBundleSpecification(
184 raise error.UnsupportedBundleSpecification(
185 _(b'%s is not a recognized bundle version') % version
185 _(b'%s is not a recognized bundle version') % version
186 )
186 )
187 else:
187 else:
188 # Value could be just the compression or just the version, in which
188 # Value could be just the compression or just the version, in which
189 # case some defaults are assumed (but only when not in strict mode).
189 # case some defaults are assumed (but only when not in strict mode).
190 assert not strict
190 assert not strict
191
191
192 spec, params = parseparams(spec)
192 spec, params = parseparams(spec)
193
193
194 if spec in util.compengines.supportedbundlenames:
194 if spec in util.compengines.supportedbundlenames:
195 compression = spec
195 compression = spec
196 version = b'v1'
196 version = b'v1'
197 # Generaldelta repos require v2.
197 # Generaldelta repos require v2.
198 if b'generaldelta' in repo.requirements:
198 if b'generaldelta' in repo.requirements:
199 version = b'v2'
199 version = b'v2'
200 # Modern compression engines require v2.
200 # Modern compression engines require v2.
201 if compression not in _bundlespecv1compengines:
201 if compression not in _bundlespecv1compengines:
202 version = b'v2'
202 version = b'v2'
203 elif spec in _bundlespeccgversions:
203 elif spec in _bundlespeccgversions:
204 if spec == b'packed1':
204 if spec == b'packed1':
205 compression = b'none'
205 compression = b'none'
206 else:
206 else:
207 compression = b'bzip2'
207 compression = b'bzip2'
208 version = spec
208 version = spec
209 else:
209 else:
210 raise error.UnsupportedBundleSpecification(
210 raise error.UnsupportedBundleSpecification(
211 _(b'%s is not a recognized bundle specification') % spec
211 _(b'%s is not a recognized bundle specification') % spec
212 )
212 )
213
213
214 # Bundle version 1 only supports a known set of compression engines.
214 # Bundle version 1 only supports a known set of compression engines.
215 if version == b'v1' and compression not in _bundlespecv1compengines:
215 if version == b'v1' and compression not in _bundlespecv1compengines:
216 raise error.UnsupportedBundleSpecification(
216 raise error.UnsupportedBundleSpecification(
217 _(b'compression engine %s is not supported on v1 bundles')
217 _(b'compression engine %s is not supported on v1 bundles')
218 % compression
218 % compression
219 )
219 )
220
220
221 # The specification for packed1 can optionally declare the data formats
221 # The specification for packed1 can optionally declare the data formats
222 # required to apply it. If we see this metadata, compare against what the
222 # required to apply it. If we see this metadata, compare against what the
223 # repo supports and error if the bundle isn't compatible.
223 # repo supports and error if the bundle isn't compatible.
224 if version == b'packed1' and b'requirements' in params:
224 if version == b'packed1' and b'requirements' in params:
225 requirements = set(params[b'requirements'].split(b','))
225 requirements = set(params[b'requirements'].split(b','))
226 missingreqs = requirements - repo.supportedformats
226 missingreqs = requirements - repo.supportedformats
227 if missingreqs:
227 if missingreqs:
228 raise error.UnsupportedBundleSpecification(
228 raise error.UnsupportedBundleSpecification(
229 _(b'missing support for repository features: %s')
229 _(b'missing support for repository features: %s')
230 % b', '.join(sorted(missingreqs))
230 % b', '.join(sorted(missingreqs))
231 )
231 )
232
232
233 # Compute contentopts based on the version
233 # Compute contentopts based on the version
234 contentopts = _bundlespeccontentopts.get(version, {}).copy()
234 contentopts = _bundlespeccontentopts.get(version, {}).copy()
235
235
236 # Process the variants
236 # Process the variants
237 if b"stream" in params and params[b"stream"] == b"v2":
237 if b"stream" in params and params[b"stream"] == b"v2":
238 variant = _bundlespecvariants[b"streamv2"]
238 variant = _bundlespecvariants[b"streamv2"]
239 contentopts.update(variant)
239 contentopts.update(variant)
240
240
241 engine = util.compengines.forbundlename(compression)
241 engine = util.compengines.forbundlename(compression)
242 compression, wirecompression = engine.bundletype()
242 compression, wirecompression = engine.bundletype()
243 wireversion = _bundlespeccgversions[version]
243 wireversion = _bundlespeccgversions[version]
244
244
245 return bundlespec(
245 return bundlespec(
246 compression, wirecompression, version, wireversion, params, contentopts
246 compression, wirecompression, version, wireversion, params, contentopts
247 )
247 )
248
248
249
249
250 def readbundle(ui, fh, fname, vfs=None):
250 def readbundle(ui, fh, fname, vfs=None):
251 header = changegroup.readexactly(fh, 4)
251 header = changegroup.readexactly(fh, 4)
252
252
253 alg = None
253 alg = None
254 if not fname:
254 if not fname:
255 fname = b"stream"
255 fname = b"stream"
256 if not header.startswith(b'HG') and header.startswith(b'\0'):
256 if not header.startswith(b'HG') and header.startswith(b'\0'):
257 fh = changegroup.headerlessfixup(fh, header)
257 fh = changegroup.headerlessfixup(fh, header)
258 header = b"HG10"
258 header = b"HG10"
259 alg = b'UN'
259 alg = b'UN'
260 elif vfs:
260 elif vfs:
261 fname = vfs.join(fname)
261 fname = vfs.join(fname)
262
262
263 magic, version = header[0:2], header[2:4]
263 magic, version = header[0:2], header[2:4]
264
264
265 if magic != b'HG':
265 if magic != b'HG':
266 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
266 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
267 if version == b'10':
267 if version == b'10':
268 if alg is None:
268 if alg is None:
269 alg = changegroup.readexactly(fh, 2)
269 alg = changegroup.readexactly(fh, 2)
270 return changegroup.cg1unpacker(fh, alg)
270 return changegroup.cg1unpacker(fh, alg)
271 elif version.startswith(b'2'):
271 elif version.startswith(b'2'):
272 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
272 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
273 elif version == b'S1':
273 elif version == b'S1':
274 return streamclone.streamcloneapplier(fh)
274 return streamclone.streamcloneapplier(fh)
275 else:
275 else:
276 raise error.Abort(
276 raise error.Abort(
277 _(b'%s: unknown bundle version %s') % (fname, version)
277 _(b'%s: unknown bundle version %s') % (fname, version)
278 )
278 )
279
279
280
280
281 def getbundlespec(ui, fh):
281 def getbundlespec(ui, fh):
282 """Infer the bundlespec from a bundle file handle.
282 """Infer the bundlespec from a bundle file handle.
283
283
284 The input file handle is seeked and the original seek position is not
284 The input file handle is seeked and the original seek position is not
285 restored.
285 restored.
286 """
286 """
287
287
288 def speccompression(alg):
288 def speccompression(alg):
289 try:
289 try:
290 return util.compengines.forbundletype(alg).bundletype()[0]
290 return util.compengines.forbundletype(alg).bundletype()[0]
291 except KeyError:
291 except KeyError:
292 return None
292 return None
293
293
294 b = readbundle(ui, fh, None)
294 b = readbundle(ui, fh, None)
295 if isinstance(b, changegroup.cg1unpacker):
295 if isinstance(b, changegroup.cg1unpacker):
296 alg = b._type
296 alg = b._type
297 if alg == b'_truncatedBZ':
297 if alg == b'_truncatedBZ':
298 alg = b'BZ'
298 alg = b'BZ'
299 comp = speccompression(alg)
299 comp = speccompression(alg)
300 if not comp:
300 if not comp:
301 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
301 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
302 return b'%s-v1' % comp
302 return b'%s-v1' % comp
303 elif isinstance(b, bundle2.unbundle20):
303 elif isinstance(b, bundle2.unbundle20):
304 if b'Compression' in b.params:
304 if b'Compression' in b.params:
305 comp = speccompression(b.params[b'Compression'])
305 comp = speccompression(b.params[b'Compression'])
306 if not comp:
306 if not comp:
307 raise error.Abort(
307 raise error.Abort(
308 _(b'unknown compression algorithm: %s') % comp
308 _(b'unknown compression algorithm: %s') % comp
309 )
309 )
310 else:
310 else:
311 comp = b'none'
311 comp = b'none'
312
312
313 version = None
313 version = None
314 for part in b.iterparts():
314 for part in b.iterparts():
315 if part.type == b'changegroup':
315 if part.type == b'changegroup':
316 version = part.params[b'version']
316 version = part.params[b'version']
317 if version in (b'01', b'02'):
317 if version in (b'01', b'02'):
318 version = b'v2'
318 version = b'v2'
319 else:
319 else:
320 raise error.Abort(
320 raise error.Abort(
321 _(
321 _(
322 b'changegroup version %s does not have '
322 b'changegroup version %s does not have '
323 b'a known bundlespec'
323 b'a known bundlespec'
324 )
324 )
325 % version,
325 % version,
326 hint=_(b'try upgrading your Mercurial client'),
326 hint=_(b'try upgrading your Mercurial client'),
327 )
327 )
328 elif part.type == b'stream2' and version is None:
328 elif part.type == b'stream2' and version is None:
329 # A stream2 part requires to be part of a v2 bundle
329 # A stream2 part requires to be part of a v2 bundle
330 requirements = urlreq.unquote(part.params[b'requirements'])
330 requirements = urlreq.unquote(part.params[b'requirements'])
331 splitted = requirements.split()
331 splitted = requirements.split()
332 params = bundle2._formatrequirementsparams(splitted)
332 params = bundle2._formatrequirementsparams(splitted)
333 return b'none-v2;stream=v2;%s' % params
333 return b'none-v2;stream=v2;%s' % params
334
334
335 if not version:
335 if not version:
336 raise error.Abort(
336 raise error.Abort(
337 _(b'could not identify changegroup version in bundle')
337 _(b'could not identify changegroup version in bundle')
338 )
338 )
339
339
340 return b'%s-%s' % (comp, version)
340 return b'%s-%s' % (comp, version)
341 elif isinstance(b, streamclone.streamcloneapplier):
341 elif isinstance(b, streamclone.streamcloneapplier):
342 requirements = streamclone.readbundle1header(fh)[2]
342 requirements = streamclone.readbundle1header(fh)[2]
343 formatted = bundle2._formatrequirementsparams(requirements)
343 formatted = bundle2._formatrequirementsparams(requirements)
344 return b'none-packed1;%s' % formatted
344 return b'none-packed1;%s' % formatted
345 else:
345 else:
346 raise error.Abort(_(b'unknown bundle type: %s') % b)
346 raise error.Abort(_(b'unknown bundle type: %s') % b)
347
347
348
348
349 def _computeoutgoing(repo, heads, common):
349 def _computeoutgoing(repo, heads, common):
350 """Computes which revs are outgoing given a set of common
350 """Computes which revs are outgoing given a set of common
351 and a set of heads.
351 and a set of heads.
352
352
353 This is a separate function so extensions can have access to
353 This is a separate function so extensions can have access to
354 the logic.
354 the logic.
355
355
356 Returns a discovery.outgoing object.
356 Returns a discovery.outgoing object.
357 """
357 """
358 cl = repo.changelog
358 cl = repo.changelog
359 if common:
359 if common:
360 hasnode = cl.hasnode
360 hasnode = cl.hasnode
361 common = [n for n in common if hasnode(n)]
361 common = [n for n in common if hasnode(n)]
362 else:
362 else:
363 common = [nullid]
363 common = [nullid]
364 if not heads:
364 if not heads:
365 heads = cl.heads()
365 heads = cl.heads()
366 return discovery.outgoing(repo, common, heads)
366 return discovery.outgoing(repo, common, heads)
367
367
368
368
369 def _checkpublish(pushop):
369 def _checkpublish(pushop):
370 repo = pushop.repo
370 repo = pushop.repo
371 ui = repo.ui
371 ui = repo.ui
372 behavior = ui.config(b'experimental', b'auto-publish')
372 behavior = ui.config(b'experimental', b'auto-publish')
373 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
373 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
374 return
374 return
375 remotephases = listkeys(pushop.remote, b'phases')
375 remotephases = listkeys(pushop.remote, b'phases')
376 if not remotephases.get(b'publishing', False):
376 if not remotephases.get(b'publishing', False):
377 return
377 return
378
378
379 if pushop.revs is None:
379 if pushop.revs is None:
380 published = repo.filtered(b'served').revs(b'not public()')
380 published = repo.filtered(b'served').revs(b'not public()')
381 else:
381 else:
382 published = repo.revs(b'::%ln - public()', pushop.revs)
382 published = repo.revs(b'::%ln - public()', pushop.revs)
383 if published:
383 if published:
384 if behavior == b'warn':
384 if behavior == b'warn':
385 ui.warn(
385 ui.warn(
386 _(b'%i changesets about to be published\n') % len(published)
386 _(b'%i changesets about to be published\n') % len(published)
387 )
387 )
388 elif behavior == b'confirm':
388 elif behavior == b'confirm':
389 if ui.promptchoice(
389 if ui.promptchoice(
390 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
390 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
391 % len(published)
391 % len(published)
392 ):
392 ):
393 raise error.Abort(_(b'user quit'))
393 raise error.Abort(_(b'user quit'))
394 elif behavior == b'abort':
394 elif behavior == b'abort':
395 msg = _(b'push would publish %i changesets') % len(published)
395 msg = _(b'push would publish %i changesets') % len(published)
396 hint = _(
396 hint = _(
397 b"use --publish or adjust 'experimental.auto-publish'"
397 b"use --publish or adjust 'experimental.auto-publish'"
398 b" config"
398 b" config"
399 )
399 )
400 raise error.Abort(msg, hint=hint)
400 raise error.Abort(msg, hint=hint)
401
401
402
402
403 def _forcebundle1(op):
403 def _forcebundle1(op):
404 """return true if a pull/push must use bundle1
404 """return true if a pull/push must use bundle1
405
405
406 This function is used to allow testing of the older bundle version"""
406 This function is used to allow testing of the older bundle version"""
407 ui = op.repo.ui
407 ui = op.repo.ui
408 # The goal is this config is to allow developer to choose the bundle
408 # The goal is this config is to allow developer to choose the bundle
409 # version used during exchanged. This is especially handy during test.
409 # version used during exchanged. This is especially handy during test.
410 # Value is a list of bundle version to be picked from, highest version
410 # Value is a list of bundle version to be picked from, highest version
411 # should be used.
411 # should be used.
412 #
412 #
413 # developer config: devel.legacy.exchange
413 # developer config: devel.legacy.exchange
414 exchange = ui.configlist(b'devel', b'legacy.exchange')
414 exchange = ui.configlist(b'devel', b'legacy.exchange')
415 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
415 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
416 return forcebundle1 or not op.remote.capable(b'bundle2')
416 return forcebundle1 or not op.remote.capable(b'bundle2')
417
417
418
418
419 class pushoperation(object):
419 class pushoperation(object):
420 """A object that represent a single push operation
420 """A object that represent a single push operation
421
421
422 Its purpose is to carry push related state and very common operations.
422 Its purpose is to carry push related state and very common operations.
423
423
424 A new pushoperation should be created at the beginning of each push and
424 A new pushoperation should be created at the beginning of each push and
425 discarded afterward.
425 discarded afterward.
426 """
426 """
427
427
428 def __init__(
428 def __init__(
429 self,
429 self,
430 repo,
430 repo,
431 remote,
431 remote,
432 force=False,
432 force=False,
433 revs=None,
433 revs=None,
434 newbranch=False,
434 newbranch=False,
435 bookmarks=(),
435 bookmarks=(),
436 publish=False,
436 publish=False,
437 pushvars=None,
437 pushvars=None,
438 ):
438 ):
439 # repo we push from
439 # repo we push from
440 self.repo = repo
440 self.repo = repo
441 self.ui = repo.ui
441 self.ui = repo.ui
442 # repo we push to
442 # repo we push to
443 self.remote = remote
443 self.remote = remote
444 # force option provided
444 # force option provided
445 self.force = force
445 self.force = force
446 # revs to be pushed (None is "all")
446 # revs to be pushed (None is "all")
447 self.revs = revs
447 self.revs = revs
448 # bookmark explicitly pushed
448 # bookmark explicitly pushed
449 self.bookmarks = bookmarks
449 self.bookmarks = bookmarks
450 # allow push of new branch
450 # allow push of new branch
451 self.newbranch = newbranch
451 self.newbranch = newbranch
452 # step already performed
452 # step already performed
453 # (used to check what steps have been already performed through bundle2)
453 # (used to check what steps have been already performed through bundle2)
454 self.stepsdone = set()
454 self.stepsdone = set()
455 # Integer version of the changegroup push result
455 # Integer version of the changegroup push result
456 # - None means nothing to push
456 # - None means nothing to push
457 # - 0 means HTTP error
457 # - 0 means HTTP error
458 # - 1 means we pushed and remote head count is unchanged *or*
458 # - 1 means we pushed and remote head count is unchanged *or*
459 # we have outgoing changesets but refused to push
459 # we have outgoing changesets but refused to push
460 # - other values as described by addchangegroup()
460 # - other values as described by addchangegroup()
461 self.cgresult = None
461 self.cgresult = None
462 # Boolean value for the bookmark push
462 # Boolean value for the bookmark push
463 self.bkresult = None
463 self.bkresult = None
464 # discover.outgoing object (contains common and outgoing data)
464 # discover.outgoing object (contains common and outgoing data)
465 self.outgoing = None
465 self.outgoing = None
466 # all remote topological heads before the push
466 # all remote topological heads before the push
467 self.remoteheads = None
467 self.remoteheads = None
468 # Details of the remote branch pre and post push
468 # Details of the remote branch pre and post push
469 #
469 #
470 # mapping: {'branch': ([remoteheads],
470 # mapping: {'branch': ([remoteheads],
471 # [newheads],
471 # [newheads],
472 # [unsyncedheads],
472 # [unsyncedheads],
473 # [discardedheads])}
473 # [discardedheads])}
474 # - branch: the branch name
474 # - branch: the branch name
475 # - remoteheads: the list of remote heads known locally
475 # - remoteheads: the list of remote heads known locally
476 # None if the branch is new
476 # None if the branch is new
477 # - newheads: the new remote heads (known locally) with outgoing pushed
477 # - newheads: the new remote heads (known locally) with outgoing pushed
478 # - unsyncedheads: the list of remote heads unknown locally.
478 # - unsyncedheads: the list of remote heads unknown locally.
479 # - discardedheads: the list of remote heads made obsolete by the push
479 # - discardedheads: the list of remote heads made obsolete by the push
480 self.pushbranchmap = None
480 self.pushbranchmap = None
481 # testable as a boolean indicating if any nodes are missing locally.
481 # testable as a boolean indicating if any nodes are missing locally.
482 self.incoming = None
482 self.incoming = None
483 # summary of the remote phase situation
483 # summary of the remote phase situation
484 self.remotephases = None
484 self.remotephases = None
485 # phases changes that must be pushed along side the changesets
485 # phases changes that must be pushed along side the changesets
486 self.outdatedphases = None
486 self.outdatedphases = None
487 # phases changes that must be pushed if changeset push fails
487 # phases changes that must be pushed if changeset push fails
488 self.fallbackoutdatedphases = None
488 self.fallbackoutdatedphases = None
489 # outgoing obsmarkers
489 # outgoing obsmarkers
490 self.outobsmarkers = set()
490 self.outobsmarkers = set()
491 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
491 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
492 self.outbookmarks = []
492 self.outbookmarks = []
493 # transaction manager
493 # transaction manager
494 self.trmanager = None
494 self.trmanager = None
495 # map { pushkey partid -> callback handling failure}
495 # map { pushkey partid -> callback handling failure}
496 # used to handle exception from mandatory pushkey part failure
496 # used to handle exception from mandatory pushkey part failure
497 self.pkfailcb = {}
497 self.pkfailcb = {}
498 # an iterable of pushvars or None
498 # an iterable of pushvars or None
499 self.pushvars = pushvars
499 self.pushvars = pushvars
500 # publish pushed changesets
500 # publish pushed changesets
501 self.publish = publish
501 self.publish = publish
502
502
503 @util.propertycache
503 @util.propertycache
504 def futureheads(self):
504 def futureheads(self):
505 """future remote heads if the changeset push succeeds"""
505 """future remote heads if the changeset push succeeds"""
506 return self.outgoing.ancestorsof
506 return self.outgoing.ancestorsof
507
507
508 @util.propertycache
508 @util.propertycache
509 def fallbackheads(self):
509 def fallbackheads(self):
510 """future remote heads if the changeset push fails"""
510 """future remote heads if the changeset push fails"""
511 if self.revs is None:
511 if self.revs is None:
512 # not target to push, all common are relevant
512 # not target to push, all common are relevant
513 return self.outgoing.commonheads
513 return self.outgoing.commonheads
514 unfi = self.repo.unfiltered()
514 unfi = self.repo.unfiltered()
515 # I want cheads = heads(::ancestorsof and ::commonheads)
515 # I want cheads = heads(::ancestorsof and ::commonheads)
516 # (ancestorsof is revs with secret changeset filtered out)
516 # (ancestorsof is revs with secret changeset filtered out)
517 #
517 #
518 # This can be expressed as:
518 # This can be expressed as:
519 # cheads = ( (ancestorsof and ::commonheads)
519 # cheads = ( (ancestorsof and ::commonheads)
520 # + (commonheads and ::ancestorsof))"
520 # + (commonheads and ::ancestorsof))"
521 # )
521 # )
522 #
522 #
523 # while trying to push we already computed the following:
523 # while trying to push we already computed the following:
524 # common = (::commonheads)
524 # common = (::commonheads)
525 # missing = ((commonheads::ancestorsof) - commonheads)
525 # missing = ((commonheads::ancestorsof) - commonheads)
526 #
526 #
527 # We can pick:
527 # We can pick:
528 # * ancestorsof part of common (::commonheads)
528 # * ancestorsof part of common (::commonheads)
529 common = self.outgoing.common
529 common = self.outgoing.common
530 rev = self.repo.changelog.index.rev
530 rev = self.repo.changelog.index.rev
531 cheads = [node for node in self.revs if rev(node) in common]
531 cheads = [node for node in self.revs if rev(node) in common]
532 # and
532 # and
533 # * commonheads parents on missing
533 # * commonheads parents on missing
534 revset = unfi.set(
534 revset = unfi.set(
535 b'%ln and parents(roots(%ln))',
535 b'%ln and parents(roots(%ln))',
536 self.outgoing.commonheads,
536 self.outgoing.commonheads,
537 self.outgoing.missing,
537 self.outgoing.missing,
538 )
538 )
539 cheads.extend(c.node() for c in revset)
539 cheads.extend(c.node() for c in revset)
540 return cheads
540 return cheads
541
541
542 @property
542 @property
543 def commonheads(self):
543 def commonheads(self):
544 """set of all common heads after changeset bundle push"""
544 """set of all common heads after changeset bundle push"""
545 if self.cgresult:
545 if self.cgresult:
546 return self.futureheads
546 return self.futureheads
547 else:
547 else:
548 return self.fallbackheads
548 return self.fallbackheads
549
549
550
550
551 # mapping of message used when pushing bookmark
551 # mapping of message used when pushing bookmark
552 bookmsgmap = {
552 bookmsgmap = {
553 b'update': (
553 b'update': (
554 _(b"updating bookmark %s\n"),
554 _(b"updating bookmark %s\n"),
555 _(b'updating bookmark %s failed!\n'),
555 _(b'updating bookmark %s failed!\n'),
556 ),
556 ),
557 b'export': (
557 b'export': (
558 _(b"exporting bookmark %s\n"),
558 _(b"exporting bookmark %s\n"),
559 _(b'exporting bookmark %s failed!\n'),
559 _(b'exporting bookmark %s failed!\n'),
560 ),
560 ),
561 b'delete': (
561 b'delete': (
562 _(b"deleting remote bookmark %s\n"),
562 _(b"deleting remote bookmark %s\n"),
563 _(b'deleting remote bookmark %s failed!\n'),
563 _(b'deleting remote bookmark %s failed!\n'),
564 ),
564 ),
565 }
565 }
566
566
567
567
568 def push(
568 def push(
569 repo,
569 repo,
570 remote,
570 remote,
571 force=False,
571 force=False,
572 revs=None,
572 revs=None,
573 newbranch=False,
573 newbranch=False,
574 bookmarks=(),
574 bookmarks=(),
575 publish=False,
575 publish=False,
576 opargs=None,
576 opargs=None,
577 ):
577 ):
578 '''Push outgoing changesets (limited by revs) from a local
578 '''Push outgoing changesets (limited by revs) from a local
579 repository to remote. Return an integer:
579 repository to remote. Return an integer:
580 - None means nothing to push
580 - None means nothing to push
581 - 0 means HTTP error
581 - 0 means HTTP error
582 - 1 means we pushed and remote head count is unchanged *or*
582 - 1 means we pushed and remote head count is unchanged *or*
583 we have outgoing changesets but refused to push
583 we have outgoing changesets but refused to push
584 - other values as described by addchangegroup()
584 - other values as described by addchangegroup()
585 '''
585 '''
586 if opargs is None:
586 if opargs is None:
587 opargs = {}
587 opargs = {}
588 pushop = pushoperation(
588 pushop = pushoperation(
589 repo,
589 repo,
590 remote,
590 remote,
591 force,
591 force,
592 revs,
592 revs,
593 newbranch,
593 newbranch,
594 bookmarks,
594 bookmarks,
595 publish,
595 publish,
596 **pycompat.strkwargs(opargs)
596 **pycompat.strkwargs(opargs)
597 )
597 )
598 if pushop.remote.local():
598 if pushop.remote.local():
599 missing = (
599 missing = (
600 set(pushop.repo.requirements) - pushop.remote.local().supported
600 set(pushop.repo.requirements) - pushop.remote.local().supported
601 )
601 )
602 if missing:
602 if missing:
603 msg = _(
603 msg = _(
604 b"required features are not"
604 b"required features are not"
605 b" supported in the destination:"
605 b" supported in the destination:"
606 b" %s"
606 b" %s"
607 ) % (b', '.join(sorted(missing)))
607 ) % (b', '.join(sorted(missing)))
608 raise error.Abort(msg)
608 raise error.Abort(msg)
609
609
610 if not pushop.remote.canpush():
610 if not pushop.remote.canpush():
611 raise error.Abort(_(b"destination does not support push"))
611 raise error.Abort(_(b"destination does not support push"))
612
612
613 if not pushop.remote.capable(b'unbundle'):
613 if not pushop.remote.capable(b'unbundle'):
614 raise error.Abort(
614 raise error.Abort(
615 _(
615 _(
616 b'cannot push: destination does not support the '
616 b'cannot push: destination does not support the '
617 b'unbundle wire protocol command'
617 b'unbundle wire protocol command'
618 )
618 )
619 )
619 )
620
620
621 # get lock as we might write phase data
621 # get lock as we might write phase data
622 wlock = lock = None
622 wlock = lock = None
623 try:
623 try:
624 # bundle2 push may receive a reply bundle touching bookmarks
624 # bundle2 push may receive a reply bundle touching bookmarks
625 # requiring the wlock. Take it now to ensure proper ordering.
625 # requiring the wlock. Take it now to ensure proper ordering.
626 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
626 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
627 if (
627 if (
628 (not _forcebundle1(pushop))
628 (not _forcebundle1(pushop))
629 and maypushback
629 and maypushback
630 and not bookmod.bookmarksinstore(repo)
630 and not bookmod.bookmarksinstore(repo)
631 ):
631 ):
632 wlock = pushop.repo.wlock()
632 wlock = pushop.repo.wlock()
633 lock = pushop.repo.lock()
633 lock = pushop.repo.lock()
634 pushop.trmanager = transactionmanager(
634 pushop.trmanager = transactionmanager(
635 pushop.repo, b'push-response', pushop.remote.url()
635 pushop.repo, b'push-response', pushop.remote.url()
636 )
636 )
637 except error.LockUnavailable as err:
637 except error.LockUnavailable as err:
638 # source repo cannot be locked.
638 # source repo cannot be locked.
639 # We do not abort the push, but just disable the local phase
639 # We do not abort the push, but just disable the local phase
640 # synchronisation.
640 # synchronisation.
641 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
641 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
642 err
642 err
643 )
643 )
644 pushop.ui.debug(msg)
644 pushop.ui.debug(msg)
645
645
646 with wlock or util.nullcontextmanager():
646 with wlock or util.nullcontextmanager():
647 with lock or util.nullcontextmanager():
647 with lock or util.nullcontextmanager():
648 with pushop.trmanager or util.nullcontextmanager():
648 with pushop.trmanager or util.nullcontextmanager():
649 pushop.repo.checkpush(pushop)
649 pushop.repo.checkpush(pushop)
650 _checkpublish(pushop)
650 _checkpublish(pushop)
651 _pushdiscovery(pushop)
651 _pushdiscovery(pushop)
652 if not pushop.force:
652 if not pushop.force:
653 _checksubrepostate(pushop)
653 _checksubrepostate(pushop)
654 if not _forcebundle1(pushop):
654 if not _forcebundle1(pushop):
655 _pushbundle2(pushop)
655 _pushbundle2(pushop)
656 _pushchangeset(pushop)
656 _pushchangeset(pushop)
657 _pushsyncphase(pushop)
657 _pushsyncphase(pushop)
658 _pushobsolete(pushop)
658 _pushobsolete(pushop)
659 _pushbookmark(pushop)
659 _pushbookmark(pushop)
660
660
661 if repo.ui.configbool(b'experimental', b'remotenames'):
661 if repo.ui.configbool(b'experimental', b'remotenames'):
662 logexchange.pullremotenames(repo, remote)
662 logexchange.pullremotenames(repo, remote)
663
663
664 return pushop
664 return pushop
665
665
666
666
667 # list of steps to perform discovery before push
667 # list of steps to perform discovery before push
668 pushdiscoveryorder = []
668 pushdiscoveryorder = []
669
669
670 # Mapping between step name and function
670 # Mapping between step name and function
671 #
671 #
672 # This exists to help extensions wrap steps if necessary
672 # This exists to help extensions wrap steps if necessary
673 pushdiscoverymapping = {}
673 pushdiscoverymapping = {}
674
674
675
675
676 def pushdiscovery(stepname):
676 def pushdiscovery(stepname):
677 """decorator for function performing discovery before push
677 """decorator for function performing discovery before push
678
678
679 The function is added to the step -> function mapping and appended to the
679 The function is added to the step -> function mapping and appended to the
680 list of steps. Beware that decorated function will be added in order (this
680 list of steps. Beware that decorated function will be added in order (this
681 may matter).
681 may matter).
682
682
683 You can only use this decorator for a new step, if you want to wrap a step
683 You can only use this decorator for a new step, if you want to wrap a step
684 from an extension, change the pushdiscovery dictionary directly."""
684 from an extension, change the pushdiscovery dictionary directly."""
685
685
686 def dec(func):
686 def dec(func):
687 assert stepname not in pushdiscoverymapping
687 assert stepname not in pushdiscoverymapping
688 pushdiscoverymapping[stepname] = func
688 pushdiscoverymapping[stepname] = func
689 pushdiscoveryorder.append(stepname)
689 pushdiscoveryorder.append(stepname)
690 return func
690 return func
691
691
692 return dec
692 return dec
693
693
694
694
695 def _pushdiscovery(pushop):
695 def _pushdiscovery(pushop):
696 """Run all discovery steps"""
696 """Run all discovery steps"""
697 for stepname in pushdiscoveryorder:
697 for stepname in pushdiscoveryorder:
698 step = pushdiscoverymapping[stepname]
698 step = pushdiscoverymapping[stepname]
699 step(pushop)
699 step(pushop)
700
700
701
701
702 def _checksubrepostate(pushop):
702 def _checksubrepostate(pushop):
703 """Ensure all outgoing referenced subrepo revisions are present locally"""
703 """Ensure all outgoing referenced subrepo revisions are present locally"""
704 for n in pushop.outgoing.missing:
704 for n in pushop.outgoing.missing:
705 ctx = pushop.repo[n]
705 ctx = pushop.repo[n]
706
706
707 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
707 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
708 for subpath in sorted(ctx.substate):
708 for subpath in sorted(ctx.substate):
709 sub = ctx.sub(subpath)
709 sub = ctx.sub(subpath)
710 sub.verify(onpush=True)
710 sub.verify(onpush=True)
711
711
712
712
713 @pushdiscovery(b'changeset')
713 @pushdiscovery(b'changeset')
714 def _pushdiscoverychangeset(pushop):
714 def _pushdiscoverychangeset(pushop):
715 """discover the changeset that need to be pushed"""
715 """discover the changeset that need to be pushed"""
716 fci = discovery.findcommonincoming
716 fci = discovery.findcommonincoming
717 if pushop.revs:
717 if pushop.revs:
718 commoninc = fci(
718 commoninc = fci(
719 pushop.repo,
719 pushop.repo,
720 pushop.remote,
720 pushop.remote,
721 force=pushop.force,
721 force=pushop.force,
722 ancestorsof=pushop.revs,
722 ancestorsof=pushop.revs,
723 )
723 )
724 else:
724 else:
725 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
725 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
726 common, inc, remoteheads = commoninc
726 common, inc, remoteheads = commoninc
727 fco = discovery.findcommonoutgoing
727 fco = discovery.findcommonoutgoing
728 outgoing = fco(
728 outgoing = fco(
729 pushop.repo,
729 pushop.repo,
730 pushop.remote,
730 pushop.remote,
731 onlyheads=pushop.revs,
731 onlyheads=pushop.revs,
732 commoninc=commoninc,
732 commoninc=commoninc,
733 force=pushop.force,
733 force=pushop.force,
734 )
734 )
735 pushop.outgoing = outgoing
735 pushop.outgoing = outgoing
736 pushop.remoteheads = remoteheads
736 pushop.remoteheads = remoteheads
737 pushop.incoming = inc
737 pushop.incoming = inc
738
738
739
739
740 @pushdiscovery(b'phase')
740 @pushdiscovery(b'phase')
741 def _pushdiscoveryphase(pushop):
741 def _pushdiscoveryphase(pushop):
742 """discover the phase that needs to be pushed
742 """discover the phase that needs to be pushed
743
743
744 (computed for both success and failure case for changesets push)"""
744 (computed for both success and failure case for changesets push)"""
745 outgoing = pushop.outgoing
745 outgoing = pushop.outgoing
746 unfi = pushop.repo.unfiltered()
746 unfi = pushop.repo.unfiltered()
747 remotephases = listkeys(pushop.remote, b'phases')
747 remotephases = listkeys(pushop.remote, b'phases')
748
748
749 if (
749 if (
750 pushop.ui.configbool(b'ui', b'_usedassubrepo')
750 pushop.ui.configbool(b'ui', b'_usedassubrepo')
751 and remotephases # server supports phases
751 and remotephases # server supports phases
752 and not pushop.outgoing.missing # no changesets to be pushed
752 and not pushop.outgoing.missing # no changesets to be pushed
753 and remotephases.get(b'publishing', False)
753 and remotephases.get(b'publishing', False)
754 ):
754 ):
755 # When:
755 # When:
756 # - this is a subrepo push
756 # - this is a subrepo push
757 # - and remote support phase
757 # - and remote support phase
758 # - and no changeset are to be pushed
758 # - and no changeset are to be pushed
759 # - and remote is publishing
759 # - and remote is publishing
760 # We may be in issue 3781 case!
760 # We may be in issue 3781 case!
761 # We drop the possible phase synchronisation done by
761 # We drop the possible phase synchronisation done by
762 # courtesy to publish changesets possibly locally draft
762 # courtesy to publish changesets possibly locally draft
763 # on the remote.
763 # on the remote.
764 pushop.outdatedphases = []
764 pushop.outdatedphases = []
765 pushop.fallbackoutdatedphases = []
765 pushop.fallbackoutdatedphases = []
766 return
766 return
767
767
768 pushop.remotephases = phases.remotephasessummary(
768 pushop.remotephases = phases.remotephasessummary(
769 pushop.repo, pushop.fallbackheads, remotephases
769 pushop.repo, pushop.fallbackheads, remotephases
770 )
770 )
771 droots = pushop.remotephases.draftroots
771 droots = pushop.remotephases.draftroots
772
772
773 extracond = b''
773 extracond = b''
774 if not pushop.remotephases.publishing:
774 if not pushop.remotephases.publishing:
775 extracond = b' and public()'
775 extracond = b' and public()'
776 revset = b'heads((%%ln::%%ln) %s)' % extracond
776 revset = b'heads((%%ln::%%ln) %s)' % extracond
777 # Get the list of all revs draft on remote by public here.
777 # Get the list of all revs draft on remote by public here.
778 # XXX Beware that revset break if droots is not strictly
778 # XXX Beware that revset break if droots is not strictly
779 # XXX root we may want to ensure it is but it is costly
779 # XXX root we may want to ensure it is but it is costly
780 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
780 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
781 if not pushop.remotephases.publishing and pushop.publish:
781 if not pushop.remotephases.publishing and pushop.publish:
782 future = list(
782 future = list(
783 unfi.set(
783 unfi.set(
784 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
784 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
785 )
785 )
786 )
786 )
787 elif not outgoing.missing:
787 elif not outgoing.missing:
788 future = fallback
788 future = fallback
789 else:
789 else:
790 # adds changeset we are going to push as draft
790 # adds changeset we are going to push as draft
791 #
791 #
792 # should not be necessary for publishing server, but because of an
792 # should not be necessary for publishing server, but because of an
793 # issue fixed in xxxxx we have to do it anyway.
793 # issue fixed in xxxxx we have to do it anyway.
794 fdroots = list(
794 fdroots = list(
795 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
795 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
796 )
796 )
797 fdroots = [f.node() for f in fdroots]
797 fdroots = [f.node() for f in fdroots]
798 future = list(unfi.set(revset, fdroots, pushop.futureheads))
798 future = list(unfi.set(revset, fdroots, pushop.futureheads))
799 pushop.outdatedphases = future
799 pushop.outdatedphases = future
800 pushop.fallbackoutdatedphases = fallback
800 pushop.fallbackoutdatedphases = fallback
801
801
802
802
803 @pushdiscovery(b'obsmarker')
803 @pushdiscovery(b'obsmarker')
804 def _pushdiscoveryobsmarkers(pushop):
804 def _pushdiscoveryobsmarkers(pushop):
805 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
805 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
806 return
806 return
807
807
808 if not pushop.repo.obsstore:
808 if not pushop.repo.obsstore:
809 return
809 return
810
810
811 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
811 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
812 return
812 return
813
813
814 repo = pushop.repo
814 repo = pushop.repo
815 # very naive computation, that can be quite expensive on big repo.
815 # very naive computation, that can be quite expensive on big repo.
816 # However: evolution is currently slow on them anyway.
816 # However: evolution is currently slow on them anyway.
817 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
817 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
818 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
818 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
819
819
820
820
821 @pushdiscovery(b'bookmarks')
821 @pushdiscovery(b'bookmarks')
822 def _pushdiscoverybookmarks(pushop):
822 def _pushdiscoverybookmarks(pushop):
823 ui = pushop.ui
823 ui = pushop.ui
824 repo = pushop.repo.unfiltered()
824 repo = pushop.repo.unfiltered()
825 remote = pushop.remote
825 remote = pushop.remote
826 ui.debug(b"checking for updated bookmarks\n")
826 ui.debug(b"checking for updated bookmarks\n")
827 ancestors = ()
827 ancestors = ()
828 if pushop.revs:
828 if pushop.revs:
829 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
829 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
830 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
830 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
831
831
832 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
832 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
833
833
834 explicit = {
834 explicit = {
835 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
835 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
836 }
836 }
837
837
838 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
838 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
839 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
839 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
840
840
841
841
842 def _processcompared(pushop, pushed, explicit, remotebms, comp):
842 def _processcompared(pushop, pushed, explicit, remotebms, comp):
843 """take decision on bookmarks to push to the remote repo
843 """take decision on bookmarks to push to the remote repo
844
844
845 Exists to help extensions alter this behavior.
845 Exists to help extensions alter this behavior.
846 """
846 """
847 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
847 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
848
848
849 repo = pushop.repo
849 repo = pushop.repo
850
850
851 for b, scid, dcid in advsrc:
851 for b, scid, dcid in advsrc:
852 if b in explicit:
852 if b in explicit:
853 explicit.remove(b)
853 explicit.remove(b)
854 if not pushed or repo[scid].rev() in pushed:
854 if not pushed or repo[scid].rev() in pushed:
855 pushop.outbookmarks.append((b, dcid, scid))
855 pushop.outbookmarks.append((b, dcid, scid))
856 # search added bookmark
856 # search added bookmark
857 for b, scid, dcid in addsrc:
857 for b, scid, dcid in addsrc:
858 if b in explicit:
858 if b in explicit:
859 explicit.remove(b)
859 explicit.remove(b)
860 if bookmod.isdivergent(b):
860 if bookmod.isdivergent(b):
861 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
861 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
862 pushop.bkresult = 2
862 pushop.bkresult = 2
863 else:
863 else:
864 pushop.outbookmarks.append((b, b'', scid))
864 pushop.outbookmarks.append((b, b'', scid))
865 # search for overwritten bookmark
865 # search for overwritten bookmark
866 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
866 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
867 if b in explicit:
867 if b in explicit:
868 explicit.remove(b)
868 explicit.remove(b)
869 pushop.outbookmarks.append((b, dcid, scid))
869 pushop.outbookmarks.append((b, dcid, scid))
870 # search for bookmark to delete
870 # search for bookmark to delete
871 for b, scid, dcid in adddst:
871 for b, scid, dcid in adddst:
872 if b in explicit:
872 if b in explicit:
873 explicit.remove(b)
873 explicit.remove(b)
874 # treat as "deleted locally"
874 # treat as "deleted locally"
875 pushop.outbookmarks.append((b, dcid, b''))
875 pushop.outbookmarks.append((b, dcid, b''))
876 # identical bookmarks shouldn't get reported
876 # identical bookmarks shouldn't get reported
877 for b, scid, dcid in same:
877 for b, scid, dcid in same:
878 if b in explicit:
878 if b in explicit:
879 explicit.remove(b)
879 explicit.remove(b)
880
880
881 if explicit:
881 if explicit:
882 explicit = sorted(explicit)
882 explicit = sorted(explicit)
883 # we should probably list all of them
883 # we should probably list all of them
884 pushop.ui.warn(
884 pushop.ui.warn(
885 _(
885 _(
886 b'bookmark %s does not exist on the local '
886 b'bookmark %s does not exist on the local '
887 b'or remote repository!\n'
887 b'or remote repository!\n'
888 )
888 )
889 % explicit[0]
889 % explicit[0]
890 )
890 )
891 pushop.bkresult = 2
891 pushop.bkresult = 2
892
892
893 pushop.outbookmarks.sort()
893 pushop.outbookmarks.sort()
894
894
895
895
896 def _pushcheckoutgoing(pushop):
896 def _pushcheckoutgoing(pushop):
897 outgoing = pushop.outgoing
897 outgoing = pushop.outgoing
898 unfi = pushop.repo.unfiltered()
898 unfi = pushop.repo.unfiltered()
899 if not outgoing.missing:
899 if not outgoing.missing:
900 # nothing to push
900 # nothing to push
901 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
901 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
902 return False
902 return False
903 # something to push
903 # something to push
904 if not pushop.force:
904 if not pushop.force:
905 # if repo.obsstore == False --> no obsolete
905 # if repo.obsstore == False --> no obsolete
906 # then, save the iteration
906 # then, save the iteration
907 if unfi.obsstore:
907 if unfi.obsstore:
908 # this message are here for 80 char limit reason
908 obsoletes = []
909 mso = _(b"push includes obsolete changeset: %s!")
909 unstables = []
910 mspd = _(b"push includes phase-divergent changeset: %s!")
910 for node in outgoing.missing:
911 mscd = _(b"push includes content-divergent changeset: %s!")
912 mst = {
913 b"orphan": _(b"push includes orphan changeset: %s!"),
914 b"phase-divergent": mspd,
915 b"content-divergent": mscd,
916 }
917 # If we are to push if there is at least one
918 # obsolete or unstable changeset in missing, at
919 # least one of the missinghead will be obsolete or
920 # unstable. So checking heads only is ok
921 for node in outgoing.ancestorsof:
922 ctx = unfi[node]
911 ctx = unfi[node]
923 if ctx.obsolete():
912 if ctx.obsolete():
924 raise error.Abort(mso % ctx)
913 obsoletes.append(ctx)
925 elif ctx.isunstable():
914 elif ctx.isunstable():
926 # TODO print more than one instability in the abort
915 unstables.append(ctx)
927 # message
916 if obsoletes or unstables:
928 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
917 msg = b""
918 if obsoletes:
919 msg += _(b"push includes obsolete changesets:\n")
920 msg += b"\n".join(b' %s' % ctx for ctx in obsoletes)
921 if unstables:
922 if msg:
923 msg += b"\n"
924 msg += _(b"push includes unstable changesets:\n")
925 msg += b"\n".join(
926 b' %s (%s)'
927 % (
928 ctx,
929 b", ".join(_(ins) for ins in ctx.instabilities()),
930 )
931 for ctx in unstables
932 )
933 raise error.Abort(msg)
929
934
930 discovery.checkheads(pushop)
935 discovery.checkheads(pushop)
931 return True
936 return True
932
937
933
938
934 # List of names of steps to perform for an outgoing bundle2, order matters.
939 # List of names of steps to perform for an outgoing bundle2, order matters.
935 b2partsgenorder = []
940 b2partsgenorder = []
936
941
937 # Mapping between step name and function
942 # Mapping between step name and function
938 #
943 #
939 # This exists to help extensions wrap steps if necessary
944 # This exists to help extensions wrap steps if necessary
940 b2partsgenmapping = {}
945 b2partsgenmapping = {}
941
946
942
947
943 def b2partsgenerator(stepname, idx=None):
948 def b2partsgenerator(stepname, idx=None):
944 """decorator for function generating bundle2 part
949 """decorator for function generating bundle2 part
945
950
946 The function is added to the step -> function mapping and appended to the
951 The function is added to the step -> function mapping and appended to the
947 list of steps. Beware that decorated functions will be added in order
952 list of steps. Beware that decorated functions will be added in order
948 (this may matter).
953 (this may matter).
949
954
950 You can only use this decorator for new steps, if you want to wrap a step
955 You can only use this decorator for new steps, if you want to wrap a step
951 from an extension, attack the b2partsgenmapping dictionary directly."""
956 from an extension, attack the b2partsgenmapping dictionary directly."""
952
957
953 def dec(func):
958 def dec(func):
954 assert stepname not in b2partsgenmapping
959 assert stepname not in b2partsgenmapping
955 b2partsgenmapping[stepname] = func
960 b2partsgenmapping[stepname] = func
956 if idx is None:
961 if idx is None:
957 b2partsgenorder.append(stepname)
962 b2partsgenorder.append(stepname)
958 else:
963 else:
959 b2partsgenorder.insert(idx, stepname)
964 b2partsgenorder.insert(idx, stepname)
960 return func
965 return func
961
966
962 return dec
967 return dec
963
968
964
969
965 def _pushb2ctxcheckheads(pushop, bundler):
970 def _pushb2ctxcheckheads(pushop, bundler):
966 """Generate race condition checking parts
971 """Generate race condition checking parts
967
972
968 Exists as an independent function to aid extensions
973 Exists as an independent function to aid extensions
969 """
974 """
970 # * 'force' do not check for push race,
975 # * 'force' do not check for push race,
971 # * if we don't push anything, there are nothing to check.
976 # * if we don't push anything, there are nothing to check.
972 if not pushop.force and pushop.outgoing.ancestorsof:
977 if not pushop.force and pushop.outgoing.ancestorsof:
973 allowunrelated = b'related' in bundler.capabilities.get(
978 allowunrelated = b'related' in bundler.capabilities.get(
974 b'checkheads', ()
979 b'checkheads', ()
975 )
980 )
976 emptyremote = pushop.pushbranchmap is None
981 emptyremote = pushop.pushbranchmap is None
977 if not allowunrelated or emptyremote:
982 if not allowunrelated or emptyremote:
978 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
983 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
979 else:
984 else:
980 affected = set()
985 affected = set()
981 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
986 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
982 remoteheads, newheads, unsyncedheads, discardedheads = heads
987 remoteheads, newheads, unsyncedheads, discardedheads = heads
983 if remoteheads is not None:
988 if remoteheads is not None:
984 remote = set(remoteheads)
989 remote = set(remoteheads)
985 affected |= set(discardedheads) & remote
990 affected |= set(discardedheads) & remote
986 affected |= remote - set(newheads)
991 affected |= remote - set(newheads)
987 if affected:
992 if affected:
988 data = iter(sorted(affected))
993 data = iter(sorted(affected))
989 bundler.newpart(b'check:updated-heads', data=data)
994 bundler.newpart(b'check:updated-heads', data=data)
990
995
991
996
992 def _pushing(pushop):
997 def _pushing(pushop):
993 """return True if we are pushing anything"""
998 """return True if we are pushing anything"""
994 return bool(
999 return bool(
995 pushop.outgoing.missing
1000 pushop.outgoing.missing
996 or pushop.outdatedphases
1001 or pushop.outdatedphases
997 or pushop.outobsmarkers
1002 or pushop.outobsmarkers
998 or pushop.outbookmarks
1003 or pushop.outbookmarks
999 )
1004 )
1000
1005
1001
1006
1002 @b2partsgenerator(b'check-bookmarks')
1007 @b2partsgenerator(b'check-bookmarks')
1003 def _pushb2checkbookmarks(pushop, bundler):
1008 def _pushb2checkbookmarks(pushop, bundler):
1004 """insert bookmark move checking"""
1009 """insert bookmark move checking"""
1005 if not _pushing(pushop) or pushop.force:
1010 if not _pushing(pushop) or pushop.force:
1006 return
1011 return
1007 b2caps = bundle2.bundle2caps(pushop.remote)
1012 b2caps = bundle2.bundle2caps(pushop.remote)
1008 hasbookmarkcheck = b'bookmarks' in b2caps
1013 hasbookmarkcheck = b'bookmarks' in b2caps
1009 if not (pushop.outbookmarks and hasbookmarkcheck):
1014 if not (pushop.outbookmarks and hasbookmarkcheck):
1010 return
1015 return
1011 data = []
1016 data = []
1012 for book, old, new in pushop.outbookmarks:
1017 for book, old, new in pushop.outbookmarks:
1013 data.append((book, old))
1018 data.append((book, old))
1014 checkdata = bookmod.binaryencode(data)
1019 checkdata = bookmod.binaryencode(data)
1015 bundler.newpart(b'check:bookmarks', data=checkdata)
1020 bundler.newpart(b'check:bookmarks', data=checkdata)
1016
1021
1017
1022
1018 @b2partsgenerator(b'check-phases')
1023 @b2partsgenerator(b'check-phases')
1019 def _pushb2checkphases(pushop, bundler):
1024 def _pushb2checkphases(pushop, bundler):
1020 """insert phase move checking"""
1025 """insert phase move checking"""
1021 if not _pushing(pushop) or pushop.force:
1026 if not _pushing(pushop) or pushop.force:
1022 return
1027 return
1023 b2caps = bundle2.bundle2caps(pushop.remote)
1028 b2caps = bundle2.bundle2caps(pushop.remote)
1024 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1029 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1025 if pushop.remotephases is not None and hasphaseheads:
1030 if pushop.remotephases is not None and hasphaseheads:
1026 # check that the remote phase has not changed
1031 # check that the remote phase has not changed
1027 checks = {p: [] for p in phases.allphases}
1032 checks = {p: [] for p in phases.allphases}
1028 checks[phases.public].extend(pushop.remotephases.publicheads)
1033 checks[phases.public].extend(pushop.remotephases.publicheads)
1029 checks[phases.draft].extend(pushop.remotephases.draftroots)
1034 checks[phases.draft].extend(pushop.remotephases.draftroots)
1030 if any(pycompat.itervalues(checks)):
1035 if any(pycompat.itervalues(checks)):
1031 for phase in checks:
1036 for phase in checks:
1032 checks[phase].sort()
1037 checks[phase].sort()
1033 checkdata = phases.binaryencode(checks)
1038 checkdata = phases.binaryencode(checks)
1034 bundler.newpart(b'check:phases', data=checkdata)
1039 bundler.newpart(b'check:phases', data=checkdata)
1035
1040
1036
1041
1037 @b2partsgenerator(b'changeset')
1042 @b2partsgenerator(b'changeset')
1038 def _pushb2ctx(pushop, bundler):
1043 def _pushb2ctx(pushop, bundler):
1039 """handle changegroup push through bundle2
1044 """handle changegroup push through bundle2
1040
1045
1041 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
1046 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
1042 """
1047 """
1043 if b'changesets' in pushop.stepsdone:
1048 if b'changesets' in pushop.stepsdone:
1044 return
1049 return
1045 pushop.stepsdone.add(b'changesets')
1050 pushop.stepsdone.add(b'changesets')
1046 # Send known heads to the server for race detection.
1051 # Send known heads to the server for race detection.
1047 if not _pushcheckoutgoing(pushop):
1052 if not _pushcheckoutgoing(pushop):
1048 return
1053 return
1049 pushop.repo.prepushoutgoinghooks(pushop)
1054 pushop.repo.prepushoutgoinghooks(pushop)
1050
1055
1051 _pushb2ctxcheckheads(pushop, bundler)
1056 _pushb2ctxcheckheads(pushop, bundler)
1052
1057
1053 b2caps = bundle2.bundle2caps(pushop.remote)
1058 b2caps = bundle2.bundle2caps(pushop.remote)
1054 version = b'01'
1059 version = b'01'
1055 cgversions = b2caps.get(b'changegroup')
1060 cgversions = b2caps.get(b'changegroup')
1056 if cgversions: # 3.1 and 3.2 ship with an empty value
1061 if cgversions: # 3.1 and 3.2 ship with an empty value
1057 cgversions = [
1062 cgversions = [
1058 v
1063 v
1059 for v in cgversions
1064 for v in cgversions
1060 if v in changegroup.supportedoutgoingversions(pushop.repo)
1065 if v in changegroup.supportedoutgoingversions(pushop.repo)
1061 ]
1066 ]
1062 if not cgversions:
1067 if not cgversions:
1063 raise error.Abort(_(b'no common changegroup version'))
1068 raise error.Abort(_(b'no common changegroup version'))
1064 version = max(cgversions)
1069 version = max(cgversions)
1065 cgstream = changegroup.makestream(
1070 cgstream = changegroup.makestream(
1066 pushop.repo, pushop.outgoing, version, b'push'
1071 pushop.repo, pushop.outgoing, version, b'push'
1067 )
1072 )
1068 cgpart = bundler.newpart(b'changegroup', data=cgstream)
1073 cgpart = bundler.newpart(b'changegroup', data=cgstream)
1069 if cgversions:
1074 if cgversions:
1070 cgpart.addparam(b'version', version)
1075 cgpart.addparam(b'version', version)
1071 if b'treemanifest' in pushop.repo.requirements:
1076 if b'treemanifest' in pushop.repo.requirements:
1072 cgpart.addparam(b'treemanifest', b'1')
1077 cgpart.addparam(b'treemanifest', b'1')
1073 if b'exp-sidedata-flag' in pushop.repo.requirements:
1078 if b'exp-sidedata-flag' in pushop.repo.requirements:
1074 cgpart.addparam(b'exp-sidedata', b'1')
1079 cgpart.addparam(b'exp-sidedata', b'1')
1075
1080
1076 def handlereply(op):
1081 def handlereply(op):
1077 """extract addchangegroup returns from server reply"""
1082 """extract addchangegroup returns from server reply"""
1078 cgreplies = op.records.getreplies(cgpart.id)
1083 cgreplies = op.records.getreplies(cgpart.id)
1079 assert len(cgreplies[b'changegroup']) == 1
1084 assert len(cgreplies[b'changegroup']) == 1
1080 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
1085 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
1081
1086
1082 return handlereply
1087 return handlereply
1083
1088
1084
1089
1085 @b2partsgenerator(b'phase')
1090 @b2partsgenerator(b'phase')
1086 def _pushb2phases(pushop, bundler):
1091 def _pushb2phases(pushop, bundler):
1087 """handle phase push through bundle2"""
1092 """handle phase push through bundle2"""
1088 if b'phases' in pushop.stepsdone:
1093 if b'phases' in pushop.stepsdone:
1089 return
1094 return
1090 b2caps = bundle2.bundle2caps(pushop.remote)
1095 b2caps = bundle2.bundle2caps(pushop.remote)
1091 ui = pushop.repo.ui
1096 ui = pushop.repo.ui
1092
1097
1093 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1098 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1094 haspushkey = b'pushkey' in b2caps
1099 haspushkey = b'pushkey' in b2caps
1095 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1100 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1096
1101
1097 if hasphaseheads and not legacyphase:
1102 if hasphaseheads and not legacyphase:
1098 return _pushb2phaseheads(pushop, bundler)
1103 return _pushb2phaseheads(pushop, bundler)
1099 elif haspushkey:
1104 elif haspushkey:
1100 return _pushb2phasespushkey(pushop, bundler)
1105 return _pushb2phasespushkey(pushop, bundler)
1101
1106
1102
1107
1103 def _pushb2phaseheads(pushop, bundler):
1108 def _pushb2phaseheads(pushop, bundler):
1104 """push phase information through a bundle2 - binary part"""
1109 """push phase information through a bundle2 - binary part"""
1105 pushop.stepsdone.add(b'phases')
1110 pushop.stepsdone.add(b'phases')
1106 if pushop.outdatedphases:
1111 if pushop.outdatedphases:
1107 updates = {p: [] for p in phases.allphases}
1112 updates = {p: [] for p in phases.allphases}
1108 updates[0].extend(h.node() for h in pushop.outdatedphases)
1113 updates[0].extend(h.node() for h in pushop.outdatedphases)
1109 phasedata = phases.binaryencode(updates)
1114 phasedata = phases.binaryencode(updates)
1110 bundler.newpart(b'phase-heads', data=phasedata)
1115 bundler.newpart(b'phase-heads', data=phasedata)
1111
1116
1112
1117
1113 def _pushb2phasespushkey(pushop, bundler):
1118 def _pushb2phasespushkey(pushop, bundler):
1114 """push phase information through a bundle2 - pushkey part"""
1119 """push phase information through a bundle2 - pushkey part"""
1115 pushop.stepsdone.add(b'phases')
1120 pushop.stepsdone.add(b'phases')
1116 part2node = []
1121 part2node = []
1117
1122
1118 def handlefailure(pushop, exc):
1123 def handlefailure(pushop, exc):
1119 targetid = int(exc.partid)
1124 targetid = int(exc.partid)
1120 for partid, node in part2node:
1125 for partid, node in part2node:
1121 if partid == targetid:
1126 if partid == targetid:
1122 raise error.Abort(_(b'updating %s to public failed') % node)
1127 raise error.Abort(_(b'updating %s to public failed') % node)
1123
1128
1124 enc = pushkey.encode
1129 enc = pushkey.encode
1125 for newremotehead in pushop.outdatedphases:
1130 for newremotehead in pushop.outdatedphases:
1126 part = bundler.newpart(b'pushkey')
1131 part = bundler.newpart(b'pushkey')
1127 part.addparam(b'namespace', enc(b'phases'))
1132 part.addparam(b'namespace', enc(b'phases'))
1128 part.addparam(b'key', enc(newremotehead.hex()))
1133 part.addparam(b'key', enc(newremotehead.hex()))
1129 part.addparam(b'old', enc(b'%d' % phases.draft))
1134 part.addparam(b'old', enc(b'%d' % phases.draft))
1130 part.addparam(b'new', enc(b'%d' % phases.public))
1135 part.addparam(b'new', enc(b'%d' % phases.public))
1131 part2node.append((part.id, newremotehead))
1136 part2node.append((part.id, newremotehead))
1132 pushop.pkfailcb[part.id] = handlefailure
1137 pushop.pkfailcb[part.id] = handlefailure
1133
1138
1134 def handlereply(op):
1139 def handlereply(op):
1135 for partid, node in part2node:
1140 for partid, node in part2node:
1136 partrep = op.records.getreplies(partid)
1141 partrep = op.records.getreplies(partid)
1137 results = partrep[b'pushkey']
1142 results = partrep[b'pushkey']
1138 assert len(results) <= 1
1143 assert len(results) <= 1
1139 msg = None
1144 msg = None
1140 if not results:
1145 if not results:
1141 msg = _(b'server ignored update of %s to public!\n') % node
1146 msg = _(b'server ignored update of %s to public!\n') % node
1142 elif not int(results[0][b'return']):
1147 elif not int(results[0][b'return']):
1143 msg = _(b'updating %s to public failed!\n') % node
1148 msg = _(b'updating %s to public failed!\n') % node
1144 if msg is not None:
1149 if msg is not None:
1145 pushop.ui.warn(msg)
1150 pushop.ui.warn(msg)
1146
1151
1147 return handlereply
1152 return handlereply
1148
1153
1149
1154
1150 @b2partsgenerator(b'obsmarkers')
1155 @b2partsgenerator(b'obsmarkers')
1151 def _pushb2obsmarkers(pushop, bundler):
1156 def _pushb2obsmarkers(pushop, bundler):
1152 if b'obsmarkers' in pushop.stepsdone:
1157 if b'obsmarkers' in pushop.stepsdone:
1153 return
1158 return
1154 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
1159 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
1155 if obsolete.commonversion(remoteversions) is None:
1160 if obsolete.commonversion(remoteversions) is None:
1156 return
1161 return
1157 pushop.stepsdone.add(b'obsmarkers')
1162 pushop.stepsdone.add(b'obsmarkers')
1158 if pushop.outobsmarkers:
1163 if pushop.outobsmarkers:
1159 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1164 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1160 bundle2.buildobsmarkerspart(bundler, markers)
1165 bundle2.buildobsmarkerspart(bundler, markers)
1161
1166
1162
1167
1163 @b2partsgenerator(b'bookmarks')
1168 @b2partsgenerator(b'bookmarks')
1164 def _pushb2bookmarks(pushop, bundler):
1169 def _pushb2bookmarks(pushop, bundler):
1165 """handle bookmark push through bundle2"""
1170 """handle bookmark push through bundle2"""
1166 if b'bookmarks' in pushop.stepsdone:
1171 if b'bookmarks' in pushop.stepsdone:
1167 return
1172 return
1168 b2caps = bundle2.bundle2caps(pushop.remote)
1173 b2caps = bundle2.bundle2caps(pushop.remote)
1169
1174
1170 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
1175 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
1171 legacybooks = b'bookmarks' in legacy
1176 legacybooks = b'bookmarks' in legacy
1172
1177
1173 if not legacybooks and b'bookmarks' in b2caps:
1178 if not legacybooks and b'bookmarks' in b2caps:
1174 return _pushb2bookmarkspart(pushop, bundler)
1179 return _pushb2bookmarkspart(pushop, bundler)
1175 elif b'pushkey' in b2caps:
1180 elif b'pushkey' in b2caps:
1176 return _pushb2bookmarkspushkey(pushop, bundler)
1181 return _pushb2bookmarkspushkey(pushop, bundler)
1177
1182
1178
1183
1179 def _bmaction(old, new):
1184 def _bmaction(old, new):
1180 """small utility for bookmark pushing"""
1185 """small utility for bookmark pushing"""
1181 if not old:
1186 if not old:
1182 return b'export'
1187 return b'export'
1183 elif not new:
1188 elif not new:
1184 return b'delete'
1189 return b'delete'
1185 return b'update'
1190 return b'update'
1186
1191
1187
1192
1188 def _abortonsecretctx(pushop, node, b):
1193 def _abortonsecretctx(pushop, node, b):
1189 """abort if a given bookmark points to a secret changeset"""
1194 """abort if a given bookmark points to a secret changeset"""
1190 if node and pushop.repo[node].phase() == phases.secret:
1195 if node and pushop.repo[node].phase() == phases.secret:
1191 raise error.Abort(
1196 raise error.Abort(
1192 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1197 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1193 )
1198 )
1194
1199
1195
1200
1196 def _pushb2bookmarkspart(pushop, bundler):
1201 def _pushb2bookmarkspart(pushop, bundler):
1197 pushop.stepsdone.add(b'bookmarks')
1202 pushop.stepsdone.add(b'bookmarks')
1198 if not pushop.outbookmarks:
1203 if not pushop.outbookmarks:
1199 return
1204 return
1200
1205
1201 allactions = []
1206 allactions = []
1202 data = []
1207 data = []
1203 for book, old, new in pushop.outbookmarks:
1208 for book, old, new in pushop.outbookmarks:
1204 _abortonsecretctx(pushop, new, book)
1209 _abortonsecretctx(pushop, new, book)
1205 data.append((book, new))
1210 data.append((book, new))
1206 allactions.append((book, _bmaction(old, new)))
1211 allactions.append((book, _bmaction(old, new)))
1207 checkdata = bookmod.binaryencode(data)
1212 checkdata = bookmod.binaryencode(data)
1208 bundler.newpart(b'bookmarks', data=checkdata)
1213 bundler.newpart(b'bookmarks', data=checkdata)
1209
1214
1210 def handlereply(op):
1215 def handlereply(op):
1211 ui = pushop.ui
1216 ui = pushop.ui
1212 # if success
1217 # if success
1213 for book, action in allactions:
1218 for book, action in allactions:
1214 ui.status(bookmsgmap[action][0] % book)
1219 ui.status(bookmsgmap[action][0] % book)
1215
1220
1216 return handlereply
1221 return handlereply
1217
1222
1218
1223
1219 def _pushb2bookmarkspushkey(pushop, bundler):
1224 def _pushb2bookmarkspushkey(pushop, bundler):
1220 pushop.stepsdone.add(b'bookmarks')
1225 pushop.stepsdone.add(b'bookmarks')
1221 part2book = []
1226 part2book = []
1222 enc = pushkey.encode
1227 enc = pushkey.encode
1223
1228
1224 def handlefailure(pushop, exc):
1229 def handlefailure(pushop, exc):
1225 targetid = int(exc.partid)
1230 targetid = int(exc.partid)
1226 for partid, book, action in part2book:
1231 for partid, book, action in part2book:
1227 if partid == targetid:
1232 if partid == targetid:
1228 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1233 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1229 # we should not be called for part we did not generated
1234 # we should not be called for part we did not generated
1230 assert False
1235 assert False
1231
1236
1232 for book, old, new in pushop.outbookmarks:
1237 for book, old, new in pushop.outbookmarks:
1233 _abortonsecretctx(pushop, new, book)
1238 _abortonsecretctx(pushop, new, book)
1234 part = bundler.newpart(b'pushkey')
1239 part = bundler.newpart(b'pushkey')
1235 part.addparam(b'namespace', enc(b'bookmarks'))
1240 part.addparam(b'namespace', enc(b'bookmarks'))
1236 part.addparam(b'key', enc(book))
1241 part.addparam(b'key', enc(book))
1237 part.addparam(b'old', enc(hex(old)))
1242 part.addparam(b'old', enc(hex(old)))
1238 part.addparam(b'new', enc(hex(new)))
1243 part.addparam(b'new', enc(hex(new)))
1239 action = b'update'
1244 action = b'update'
1240 if not old:
1245 if not old:
1241 action = b'export'
1246 action = b'export'
1242 elif not new:
1247 elif not new:
1243 action = b'delete'
1248 action = b'delete'
1244 part2book.append((part.id, book, action))
1249 part2book.append((part.id, book, action))
1245 pushop.pkfailcb[part.id] = handlefailure
1250 pushop.pkfailcb[part.id] = handlefailure
1246
1251
1247 def handlereply(op):
1252 def handlereply(op):
1248 ui = pushop.ui
1253 ui = pushop.ui
1249 for partid, book, action in part2book:
1254 for partid, book, action in part2book:
1250 partrep = op.records.getreplies(partid)
1255 partrep = op.records.getreplies(partid)
1251 results = partrep[b'pushkey']
1256 results = partrep[b'pushkey']
1252 assert len(results) <= 1
1257 assert len(results) <= 1
1253 if not results:
1258 if not results:
1254 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1259 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1255 else:
1260 else:
1256 ret = int(results[0][b'return'])
1261 ret = int(results[0][b'return'])
1257 if ret:
1262 if ret:
1258 ui.status(bookmsgmap[action][0] % book)
1263 ui.status(bookmsgmap[action][0] % book)
1259 else:
1264 else:
1260 ui.warn(bookmsgmap[action][1] % book)
1265 ui.warn(bookmsgmap[action][1] % book)
1261 if pushop.bkresult is not None:
1266 if pushop.bkresult is not None:
1262 pushop.bkresult = 1
1267 pushop.bkresult = 1
1263
1268
1264 return handlereply
1269 return handlereply
1265
1270
1266
1271
1267 @b2partsgenerator(b'pushvars', idx=0)
1272 @b2partsgenerator(b'pushvars', idx=0)
1268 def _getbundlesendvars(pushop, bundler):
1273 def _getbundlesendvars(pushop, bundler):
1269 '''send shellvars via bundle2'''
1274 '''send shellvars via bundle2'''
1270 pushvars = pushop.pushvars
1275 pushvars = pushop.pushvars
1271 if pushvars:
1276 if pushvars:
1272 shellvars = {}
1277 shellvars = {}
1273 for raw in pushvars:
1278 for raw in pushvars:
1274 if b'=' not in raw:
1279 if b'=' not in raw:
1275 msg = (
1280 msg = (
1276 b"unable to parse variable '%s', should follow "
1281 b"unable to parse variable '%s', should follow "
1277 b"'KEY=VALUE' or 'KEY=' format"
1282 b"'KEY=VALUE' or 'KEY=' format"
1278 )
1283 )
1279 raise error.Abort(msg % raw)
1284 raise error.Abort(msg % raw)
1280 k, v = raw.split(b'=', 1)
1285 k, v = raw.split(b'=', 1)
1281 shellvars[k] = v
1286 shellvars[k] = v
1282
1287
1283 part = bundler.newpart(b'pushvars')
1288 part = bundler.newpart(b'pushvars')
1284
1289
1285 for key, value in pycompat.iteritems(shellvars):
1290 for key, value in pycompat.iteritems(shellvars):
1286 part.addparam(key, value, mandatory=False)
1291 part.addparam(key, value, mandatory=False)
1287
1292
1288
1293
1289 def _pushbundle2(pushop):
1294 def _pushbundle2(pushop):
1290 """push data to the remote using bundle2
1295 """push data to the remote using bundle2
1291
1296
1292 The only currently supported type of data is changegroup but this will
1297 The only currently supported type of data is changegroup but this will
1293 evolve in the future."""
1298 evolve in the future."""
1294 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1299 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1295 pushback = pushop.trmanager and pushop.ui.configbool(
1300 pushback = pushop.trmanager and pushop.ui.configbool(
1296 b'experimental', b'bundle2.pushback'
1301 b'experimental', b'bundle2.pushback'
1297 )
1302 )
1298
1303
1299 # create reply capability
1304 # create reply capability
1300 capsblob = bundle2.encodecaps(
1305 capsblob = bundle2.encodecaps(
1301 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1306 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1302 )
1307 )
1303 bundler.newpart(b'replycaps', data=capsblob)
1308 bundler.newpart(b'replycaps', data=capsblob)
1304 replyhandlers = []
1309 replyhandlers = []
1305 for partgenname in b2partsgenorder:
1310 for partgenname in b2partsgenorder:
1306 partgen = b2partsgenmapping[partgenname]
1311 partgen = b2partsgenmapping[partgenname]
1307 ret = partgen(pushop, bundler)
1312 ret = partgen(pushop, bundler)
1308 if callable(ret):
1313 if callable(ret):
1309 replyhandlers.append(ret)
1314 replyhandlers.append(ret)
1310 # do not push if nothing to push
1315 # do not push if nothing to push
1311 if bundler.nbparts <= 1:
1316 if bundler.nbparts <= 1:
1312 return
1317 return
1313 stream = util.chunkbuffer(bundler.getchunks())
1318 stream = util.chunkbuffer(bundler.getchunks())
1314 try:
1319 try:
1315 try:
1320 try:
1316 with pushop.remote.commandexecutor() as e:
1321 with pushop.remote.commandexecutor() as e:
1317 reply = e.callcommand(
1322 reply = e.callcommand(
1318 b'unbundle',
1323 b'unbundle',
1319 {
1324 {
1320 b'bundle': stream,
1325 b'bundle': stream,
1321 b'heads': [b'force'],
1326 b'heads': [b'force'],
1322 b'url': pushop.remote.url(),
1327 b'url': pushop.remote.url(),
1323 },
1328 },
1324 ).result()
1329 ).result()
1325 except error.BundleValueError as exc:
1330 except error.BundleValueError as exc:
1326 raise error.Abort(_(b'missing support for %s') % exc)
1331 raise error.Abort(_(b'missing support for %s') % exc)
1327 try:
1332 try:
1328 trgetter = None
1333 trgetter = None
1329 if pushback:
1334 if pushback:
1330 trgetter = pushop.trmanager.transaction
1335 trgetter = pushop.trmanager.transaction
1331 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1336 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1332 except error.BundleValueError as exc:
1337 except error.BundleValueError as exc:
1333 raise error.Abort(_(b'missing support for %s') % exc)
1338 raise error.Abort(_(b'missing support for %s') % exc)
1334 except bundle2.AbortFromPart as exc:
1339 except bundle2.AbortFromPart as exc:
1335 pushop.ui.status(_(b'remote: %s\n') % exc)
1340 pushop.ui.status(_(b'remote: %s\n') % exc)
1336 if exc.hint is not None:
1341 if exc.hint is not None:
1337 pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1342 pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1338 raise error.Abort(_(b'push failed on remote'))
1343 raise error.Abort(_(b'push failed on remote'))
1339 except error.PushkeyFailed as exc:
1344 except error.PushkeyFailed as exc:
1340 partid = int(exc.partid)
1345 partid = int(exc.partid)
1341 if partid not in pushop.pkfailcb:
1346 if partid not in pushop.pkfailcb:
1342 raise
1347 raise
1343 pushop.pkfailcb[partid](pushop, exc)
1348 pushop.pkfailcb[partid](pushop, exc)
1344 for rephand in replyhandlers:
1349 for rephand in replyhandlers:
1345 rephand(op)
1350 rephand(op)
1346
1351
1347
1352
1348 def _pushchangeset(pushop):
1353 def _pushchangeset(pushop):
1349 """Make the actual push of changeset bundle to remote repo"""
1354 """Make the actual push of changeset bundle to remote repo"""
1350 if b'changesets' in pushop.stepsdone:
1355 if b'changesets' in pushop.stepsdone:
1351 return
1356 return
1352 pushop.stepsdone.add(b'changesets')
1357 pushop.stepsdone.add(b'changesets')
1353 if not _pushcheckoutgoing(pushop):
1358 if not _pushcheckoutgoing(pushop):
1354 return
1359 return
1355
1360
1356 # Should have verified this in push().
1361 # Should have verified this in push().
1357 assert pushop.remote.capable(b'unbundle')
1362 assert pushop.remote.capable(b'unbundle')
1358
1363
1359 pushop.repo.prepushoutgoinghooks(pushop)
1364 pushop.repo.prepushoutgoinghooks(pushop)
1360 outgoing = pushop.outgoing
1365 outgoing = pushop.outgoing
1361 # TODO: get bundlecaps from remote
1366 # TODO: get bundlecaps from remote
1362 bundlecaps = None
1367 bundlecaps = None
1363 # create a changegroup from local
1368 # create a changegroup from local
1364 if pushop.revs is None and not (
1369 if pushop.revs is None and not (
1365 outgoing.excluded or pushop.repo.changelog.filteredrevs
1370 outgoing.excluded or pushop.repo.changelog.filteredrevs
1366 ):
1371 ):
1367 # push everything,
1372 # push everything,
1368 # use the fast path, no race possible on push
1373 # use the fast path, no race possible on push
1369 cg = changegroup.makechangegroup(
1374 cg = changegroup.makechangegroup(
1370 pushop.repo,
1375 pushop.repo,
1371 outgoing,
1376 outgoing,
1372 b'01',
1377 b'01',
1373 b'push',
1378 b'push',
1374 fastpath=True,
1379 fastpath=True,
1375 bundlecaps=bundlecaps,
1380 bundlecaps=bundlecaps,
1376 )
1381 )
1377 else:
1382 else:
1378 cg = changegroup.makechangegroup(
1383 cg = changegroup.makechangegroup(
1379 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1384 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1380 )
1385 )
1381
1386
1382 # apply changegroup to remote
1387 # apply changegroup to remote
1383 # local repo finds heads on server, finds out what
1388 # local repo finds heads on server, finds out what
1384 # revs it must push. once revs transferred, if server
1389 # revs it must push. once revs transferred, if server
1385 # finds it has different heads (someone else won
1390 # finds it has different heads (someone else won
1386 # commit/push race), server aborts.
1391 # commit/push race), server aborts.
1387 if pushop.force:
1392 if pushop.force:
1388 remoteheads = [b'force']
1393 remoteheads = [b'force']
1389 else:
1394 else:
1390 remoteheads = pushop.remoteheads
1395 remoteheads = pushop.remoteheads
1391 # ssh: return remote's addchangegroup()
1396 # ssh: return remote's addchangegroup()
1392 # http: return remote's addchangegroup() or 0 for error
1397 # http: return remote's addchangegroup() or 0 for error
1393 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1398 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1394
1399
1395
1400
1396 def _pushsyncphase(pushop):
1401 def _pushsyncphase(pushop):
1397 """synchronise phase information locally and remotely"""
1402 """synchronise phase information locally and remotely"""
1398 cheads = pushop.commonheads
1403 cheads = pushop.commonheads
1399 # even when we don't push, exchanging phase data is useful
1404 # even when we don't push, exchanging phase data is useful
1400 remotephases = listkeys(pushop.remote, b'phases')
1405 remotephases = listkeys(pushop.remote, b'phases')
1401 if (
1406 if (
1402 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1407 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1403 and remotephases # server supports phases
1408 and remotephases # server supports phases
1404 and pushop.cgresult is None # nothing was pushed
1409 and pushop.cgresult is None # nothing was pushed
1405 and remotephases.get(b'publishing', False)
1410 and remotephases.get(b'publishing', False)
1406 ):
1411 ):
1407 # When:
1412 # When:
1408 # - this is a subrepo push
1413 # - this is a subrepo push
1409 # - and remote support phase
1414 # - and remote support phase
1410 # - and no changeset was pushed
1415 # - and no changeset was pushed
1411 # - and remote is publishing
1416 # - and remote is publishing
1412 # We may be in issue 3871 case!
1417 # We may be in issue 3871 case!
1413 # We drop the possible phase synchronisation done by
1418 # We drop the possible phase synchronisation done by
1414 # courtesy to publish changesets possibly locally draft
1419 # courtesy to publish changesets possibly locally draft
1415 # on the remote.
1420 # on the remote.
1416 remotephases = {b'publishing': b'True'}
1421 remotephases = {b'publishing': b'True'}
1417 if not remotephases: # old server or public only reply from non-publishing
1422 if not remotephases: # old server or public only reply from non-publishing
1418 _localphasemove(pushop, cheads)
1423 _localphasemove(pushop, cheads)
1419 # don't push any phase data as there is nothing to push
1424 # don't push any phase data as there is nothing to push
1420 else:
1425 else:
1421 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1426 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1422 pheads, droots = ana
1427 pheads, droots = ana
1423 ### Apply remote phase on local
1428 ### Apply remote phase on local
1424 if remotephases.get(b'publishing', False):
1429 if remotephases.get(b'publishing', False):
1425 _localphasemove(pushop, cheads)
1430 _localphasemove(pushop, cheads)
1426 else: # publish = False
1431 else: # publish = False
1427 _localphasemove(pushop, pheads)
1432 _localphasemove(pushop, pheads)
1428 _localphasemove(pushop, cheads, phases.draft)
1433 _localphasemove(pushop, cheads, phases.draft)
1429 ### Apply local phase on remote
1434 ### Apply local phase on remote
1430
1435
1431 if pushop.cgresult:
1436 if pushop.cgresult:
1432 if b'phases' in pushop.stepsdone:
1437 if b'phases' in pushop.stepsdone:
1433 # phases already pushed though bundle2
1438 # phases already pushed though bundle2
1434 return
1439 return
1435 outdated = pushop.outdatedphases
1440 outdated = pushop.outdatedphases
1436 else:
1441 else:
1437 outdated = pushop.fallbackoutdatedphases
1442 outdated = pushop.fallbackoutdatedphases
1438
1443
1439 pushop.stepsdone.add(b'phases')
1444 pushop.stepsdone.add(b'phases')
1440
1445
1441 # filter heads already turned public by the push
1446 # filter heads already turned public by the push
1442 outdated = [c for c in outdated if c.node() not in pheads]
1447 outdated = [c for c in outdated if c.node() not in pheads]
1443 # fallback to independent pushkey command
1448 # fallback to independent pushkey command
1444 for newremotehead in outdated:
1449 for newremotehead in outdated:
1445 with pushop.remote.commandexecutor() as e:
1450 with pushop.remote.commandexecutor() as e:
1446 r = e.callcommand(
1451 r = e.callcommand(
1447 b'pushkey',
1452 b'pushkey',
1448 {
1453 {
1449 b'namespace': b'phases',
1454 b'namespace': b'phases',
1450 b'key': newremotehead.hex(),
1455 b'key': newremotehead.hex(),
1451 b'old': b'%d' % phases.draft,
1456 b'old': b'%d' % phases.draft,
1452 b'new': b'%d' % phases.public,
1457 b'new': b'%d' % phases.public,
1453 },
1458 },
1454 ).result()
1459 ).result()
1455
1460
1456 if not r:
1461 if not r:
1457 pushop.ui.warn(
1462 pushop.ui.warn(
1458 _(b'updating %s to public failed!\n') % newremotehead
1463 _(b'updating %s to public failed!\n') % newremotehead
1459 )
1464 )
1460
1465
1461
1466
1462 def _localphasemove(pushop, nodes, phase=phases.public):
1467 def _localphasemove(pushop, nodes, phase=phases.public):
1463 """move <nodes> to <phase> in the local source repo"""
1468 """move <nodes> to <phase> in the local source repo"""
1464 if pushop.trmanager:
1469 if pushop.trmanager:
1465 phases.advanceboundary(
1470 phases.advanceboundary(
1466 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1471 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1467 )
1472 )
1468 else:
1473 else:
1469 # repo is not locked, do not change any phases!
1474 # repo is not locked, do not change any phases!
1470 # Informs the user that phases should have been moved when
1475 # Informs the user that phases should have been moved when
1471 # applicable.
1476 # applicable.
1472 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1477 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1473 phasestr = phases.phasenames[phase]
1478 phasestr = phases.phasenames[phase]
1474 if actualmoves:
1479 if actualmoves:
1475 pushop.ui.status(
1480 pushop.ui.status(
1476 _(
1481 _(
1477 b'cannot lock source repo, skipping '
1482 b'cannot lock source repo, skipping '
1478 b'local %s phase update\n'
1483 b'local %s phase update\n'
1479 )
1484 )
1480 % phasestr
1485 % phasestr
1481 )
1486 )
1482
1487
1483
1488
1484 def _pushobsolete(pushop):
1489 def _pushobsolete(pushop):
1485 """utility function to push obsolete markers to a remote"""
1490 """utility function to push obsolete markers to a remote"""
1486 if b'obsmarkers' in pushop.stepsdone:
1491 if b'obsmarkers' in pushop.stepsdone:
1487 return
1492 return
1488 repo = pushop.repo
1493 repo = pushop.repo
1489 remote = pushop.remote
1494 remote = pushop.remote
1490 pushop.stepsdone.add(b'obsmarkers')
1495 pushop.stepsdone.add(b'obsmarkers')
1491 if pushop.outobsmarkers:
1496 if pushop.outobsmarkers:
1492 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1497 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1493 rslts = []
1498 rslts = []
1494 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1499 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1495 remotedata = obsolete._pushkeyescape(markers)
1500 remotedata = obsolete._pushkeyescape(markers)
1496 for key in sorted(remotedata, reverse=True):
1501 for key in sorted(remotedata, reverse=True):
1497 # reverse sort to ensure we end with dump0
1502 # reverse sort to ensure we end with dump0
1498 data = remotedata[key]
1503 data = remotedata[key]
1499 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1504 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1500 if [r for r in rslts if not r]:
1505 if [r for r in rslts if not r]:
1501 msg = _(b'failed to push some obsolete markers!\n')
1506 msg = _(b'failed to push some obsolete markers!\n')
1502 repo.ui.warn(msg)
1507 repo.ui.warn(msg)
1503
1508
1504
1509
1505 def _pushbookmark(pushop):
1510 def _pushbookmark(pushop):
1506 """Update bookmark position on remote"""
1511 """Update bookmark position on remote"""
1507 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1512 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1508 return
1513 return
1509 pushop.stepsdone.add(b'bookmarks')
1514 pushop.stepsdone.add(b'bookmarks')
1510 ui = pushop.ui
1515 ui = pushop.ui
1511 remote = pushop.remote
1516 remote = pushop.remote
1512
1517
1513 for b, old, new in pushop.outbookmarks:
1518 for b, old, new in pushop.outbookmarks:
1514 action = b'update'
1519 action = b'update'
1515 if not old:
1520 if not old:
1516 action = b'export'
1521 action = b'export'
1517 elif not new:
1522 elif not new:
1518 action = b'delete'
1523 action = b'delete'
1519
1524
1520 with remote.commandexecutor() as e:
1525 with remote.commandexecutor() as e:
1521 r = e.callcommand(
1526 r = e.callcommand(
1522 b'pushkey',
1527 b'pushkey',
1523 {
1528 {
1524 b'namespace': b'bookmarks',
1529 b'namespace': b'bookmarks',
1525 b'key': b,
1530 b'key': b,
1526 b'old': hex(old),
1531 b'old': hex(old),
1527 b'new': hex(new),
1532 b'new': hex(new),
1528 },
1533 },
1529 ).result()
1534 ).result()
1530
1535
1531 if r:
1536 if r:
1532 ui.status(bookmsgmap[action][0] % b)
1537 ui.status(bookmsgmap[action][0] % b)
1533 else:
1538 else:
1534 ui.warn(bookmsgmap[action][1] % b)
1539 ui.warn(bookmsgmap[action][1] % b)
1535 # discovery can have set the value form invalid entry
1540 # discovery can have set the value form invalid entry
1536 if pushop.bkresult is not None:
1541 if pushop.bkresult is not None:
1537 pushop.bkresult = 1
1542 pushop.bkresult = 1
1538
1543
1539
1544
1540 class pulloperation(object):
1545 class pulloperation(object):
1541 """A object that represent a single pull operation
1546 """A object that represent a single pull operation
1542
1547
1543 It purpose is to carry pull related state and very common operation.
1548 It purpose is to carry pull related state and very common operation.
1544
1549
1545 A new should be created at the beginning of each pull and discarded
1550 A new should be created at the beginning of each pull and discarded
1546 afterward.
1551 afterward.
1547 """
1552 """
1548
1553
1549 def __init__(
1554 def __init__(
1550 self,
1555 self,
1551 repo,
1556 repo,
1552 remote,
1557 remote,
1553 heads=None,
1558 heads=None,
1554 force=False,
1559 force=False,
1555 bookmarks=(),
1560 bookmarks=(),
1556 remotebookmarks=None,
1561 remotebookmarks=None,
1557 streamclonerequested=None,
1562 streamclonerequested=None,
1558 includepats=None,
1563 includepats=None,
1559 excludepats=None,
1564 excludepats=None,
1560 depth=None,
1565 depth=None,
1561 ):
1566 ):
1562 # repo we pull into
1567 # repo we pull into
1563 self.repo = repo
1568 self.repo = repo
1564 # repo we pull from
1569 # repo we pull from
1565 self.remote = remote
1570 self.remote = remote
1566 # revision we try to pull (None is "all")
1571 # revision we try to pull (None is "all")
1567 self.heads = heads
1572 self.heads = heads
1568 # bookmark pulled explicitly
1573 # bookmark pulled explicitly
1569 self.explicitbookmarks = [
1574 self.explicitbookmarks = [
1570 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1575 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1571 ]
1576 ]
1572 # do we force pull?
1577 # do we force pull?
1573 self.force = force
1578 self.force = force
1574 # whether a streaming clone was requested
1579 # whether a streaming clone was requested
1575 self.streamclonerequested = streamclonerequested
1580 self.streamclonerequested = streamclonerequested
1576 # transaction manager
1581 # transaction manager
1577 self.trmanager = None
1582 self.trmanager = None
1578 # set of common changeset between local and remote before pull
1583 # set of common changeset between local and remote before pull
1579 self.common = None
1584 self.common = None
1580 # set of pulled head
1585 # set of pulled head
1581 self.rheads = None
1586 self.rheads = None
1582 # list of missing changeset to fetch remotely
1587 # list of missing changeset to fetch remotely
1583 self.fetch = None
1588 self.fetch = None
1584 # remote bookmarks data
1589 # remote bookmarks data
1585 self.remotebookmarks = remotebookmarks
1590 self.remotebookmarks = remotebookmarks
1586 # result of changegroup pulling (used as return code by pull)
1591 # result of changegroup pulling (used as return code by pull)
1587 self.cgresult = None
1592 self.cgresult = None
1588 # list of step already done
1593 # list of step already done
1589 self.stepsdone = set()
1594 self.stepsdone = set()
1590 # Whether we attempted a clone from pre-generated bundles.
1595 # Whether we attempted a clone from pre-generated bundles.
1591 self.clonebundleattempted = False
1596 self.clonebundleattempted = False
1592 # Set of file patterns to include.
1597 # Set of file patterns to include.
1593 self.includepats = includepats
1598 self.includepats = includepats
1594 # Set of file patterns to exclude.
1599 # Set of file patterns to exclude.
1595 self.excludepats = excludepats
1600 self.excludepats = excludepats
1596 # Number of ancestor changesets to pull from each pulled head.
1601 # Number of ancestor changesets to pull from each pulled head.
1597 self.depth = depth
1602 self.depth = depth
1598
1603
1599 @util.propertycache
1604 @util.propertycache
1600 def pulledsubset(self):
1605 def pulledsubset(self):
1601 """heads of the set of changeset target by the pull"""
1606 """heads of the set of changeset target by the pull"""
1602 # compute target subset
1607 # compute target subset
1603 if self.heads is None:
1608 if self.heads is None:
1604 # We pulled every thing possible
1609 # We pulled every thing possible
1605 # sync on everything common
1610 # sync on everything common
1606 c = set(self.common)
1611 c = set(self.common)
1607 ret = list(self.common)
1612 ret = list(self.common)
1608 for n in self.rheads:
1613 for n in self.rheads:
1609 if n not in c:
1614 if n not in c:
1610 ret.append(n)
1615 ret.append(n)
1611 return ret
1616 return ret
1612 else:
1617 else:
1613 # We pulled a specific subset
1618 # We pulled a specific subset
1614 # sync on this subset
1619 # sync on this subset
1615 return self.heads
1620 return self.heads
1616
1621
1617 @util.propertycache
1622 @util.propertycache
1618 def canusebundle2(self):
1623 def canusebundle2(self):
1619 return not _forcebundle1(self)
1624 return not _forcebundle1(self)
1620
1625
1621 @util.propertycache
1626 @util.propertycache
1622 def remotebundle2caps(self):
1627 def remotebundle2caps(self):
1623 return bundle2.bundle2caps(self.remote)
1628 return bundle2.bundle2caps(self.remote)
1624
1629
1625 def gettransaction(self):
1630 def gettransaction(self):
1626 # deprecated; talk to trmanager directly
1631 # deprecated; talk to trmanager directly
1627 return self.trmanager.transaction()
1632 return self.trmanager.transaction()
1628
1633
1629
1634
1630 class transactionmanager(util.transactional):
1635 class transactionmanager(util.transactional):
1631 """An object to manage the life cycle of a transaction
1636 """An object to manage the life cycle of a transaction
1632
1637
1633 It creates the transaction on demand and calls the appropriate hooks when
1638 It creates the transaction on demand and calls the appropriate hooks when
1634 closing the transaction."""
1639 closing the transaction."""
1635
1640
1636 def __init__(self, repo, source, url):
1641 def __init__(self, repo, source, url):
1637 self.repo = repo
1642 self.repo = repo
1638 self.source = source
1643 self.source = source
1639 self.url = url
1644 self.url = url
1640 self._tr = None
1645 self._tr = None
1641
1646
1642 def transaction(self):
1647 def transaction(self):
1643 """Return an open transaction object, constructing if necessary"""
1648 """Return an open transaction object, constructing if necessary"""
1644 if not self._tr:
1649 if not self._tr:
1645 trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
1650 trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
1646 self._tr = self.repo.transaction(trname)
1651 self._tr = self.repo.transaction(trname)
1647 self._tr.hookargs[b'source'] = self.source
1652 self._tr.hookargs[b'source'] = self.source
1648 self._tr.hookargs[b'url'] = self.url
1653 self._tr.hookargs[b'url'] = self.url
1649 return self._tr
1654 return self._tr
1650
1655
1651 def close(self):
1656 def close(self):
1652 """close transaction if created"""
1657 """close transaction if created"""
1653 if self._tr is not None:
1658 if self._tr is not None:
1654 self._tr.close()
1659 self._tr.close()
1655
1660
1656 def release(self):
1661 def release(self):
1657 """release transaction if created"""
1662 """release transaction if created"""
1658 if self._tr is not None:
1663 if self._tr is not None:
1659 self._tr.release()
1664 self._tr.release()
1660
1665
1661
1666
1662 def listkeys(remote, namespace):
1667 def listkeys(remote, namespace):
1663 with remote.commandexecutor() as e:
1668 with remote.commandexecutor() as e:
1664 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1669 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1665
1670
1666
1671
1667 def _fullpullbundle2(repo, pullop):
1672 def _fullpullbundle2(repo, pullop):
1668 # The server may send a partial reply, i.e. when inlining
1673 # The server may send a partial reply, i.e. when inlining
1669 # pre-computed bundles. In that case, update the common
1674 # pre-computed bundles. In that case, update the common
1670 # set based on the results and pull another bundle.
1675 # set based on the results and pull another bundle.
1671 #
1676 #
1672 # There are two indicators that the process is finished:
1677 # There are two indicators that the process is finished:
1673 # - no changeset has been added, or
1678 # - no changeset has been added, or
1674 # - all remote heads are known locally.
1679 # - all remote heads are known locally.
1675 # The head check must use the unfiltered view as obsoletion
1680 # The head check must use the unfiltered view as obsoletion
1676 # markers can hide heads.
1681 # markers can hide heads.
1677 unfi = repo.unfiltered()
1682 unfi = repo.unfiltered()
1678 unficl = unfi.changelog
1683 unficl = unfi.changelog
1679
1684
1680 def headsofdiff(h1, h2):
1685 def headsofdiff(h1, h2):
1681 """Returns heads(h1 % h2)"""
1686 """Returns heads(h1 % h2)"""
1682 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1687 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1683 return {ctx.node() for ctx in res}
1688 return {ctx.node() for ctx in res}
1684
1689
1685 def headsofunion(h1, h2):
1690 def headsofunion(h1, h2):
1686 """Returns heads((h1 + h2) - null)"""
1691 """Returns heads((h1 + h2) - null)"""
1687 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1692 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1688 return {ctx.node() for ctx in res}
1693 return {ctx.node() for ctx in res}
1689
1694
1690 while True:
1695 while True:
1691 old_heads = unficl.heads()
1696 old_heads = unficl.heads()
1692 clstart = len(unficl)
1697 clstart = len(unficl)
1693 _pullbundle2(pullop)
1698 _pullbundle2(pullop)
1694 if repository.NARROW_REQUIREMENT in repo.requirements:
1699 if repository.NARROW_REQUIREMENT in repo.requirements:
1695 # XXX narrow clones filter the heads on the server side during
1700 # XXX narrow clones filter the heads on the server side during
1696 # XXX getbundle and result in partial replies as well.
1701 # XXX getbundle and result in partial replies as well.
1697 # XXX Disable pull bundles in this case as band aid to avoid
1702 # XXX Disable pull bundles in this case as band aid to avoid
1698 # XXX extra round trips.
1703 # XXX extra round trips.
1699 break
1704 break
1700 if clstart == len(unficl):
1705 if clstart == len(unficl):
1701 break
1706 break
1702 if all(unficl.hasnode(n) for n in pullop.rheads):
1707 if all(unficl.hasnode(n) for n in pullop.rheads):
1703 break
1708 break
1704 new_heads = headsofdiff(unficl.heads(), old_heads)
1709 new_heads = headsofdiff(unficl.heads(), old_heads)
1705 pullop.common = headsofunion(new_heads, pullop.common)
1710 pullop.common = headsofunion(new_heads, pullop.common)
1706 pullop.rheads = set(pullop.rheads) - pullop.common
1711 pullop.rheads = set(pullop.rheads) - pullop.common
1707
1712
1708
1713
1709 def add_confirm_callback(repo, pullop):
1714 def add_confirm_callback(repo, pullop):
1710 """ adds a finalize callback to transaction which can be used to show stats
1715 """ adds a finalize callback to transaction which can be used to show stats
1711 to user and confirm the pull before committing transaction """
1716 to user and confirm the pull before committing transaction """
1712
1717
1713 tr = pullop.trmanager.transaction()
1718 tr = pullop.trmanager.transaction()
1714 scmutil.registersummarycallback(
1719 scmutil.registersummarycallback(
1715 repo, tr, txnname=b'pull', as_validator=True
1720 repo, tr, txnname=b'pull', as_validator=True
1716 )
1721 )
1717 reporef = weakref.ref(repo.unfiltered())
1722 reporef = weakref.ref(repo.unfiltered())
1718
1723
1719 def prompt(tr):
1724 def prompt(tr):
1720 repo = reporef()
1725 repo = reporef()
1721 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1726 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1722 if repo.ui.promptchoice(cm):
1727 if repo.ui.promptchoice(cm):
1723 raise error.Abort("user aborted")
1728 raise error.Abort("user aborted")
1724
1729
1725 tr.addvalidator(b'900-pull-prompt', prompt)
1730 tr.addvalidator(b'900-pull-prompt', prompt)
1726
1731
1727
1732
1728 def pull(
1733 def pull(
1729 repo,
1734 repo,
1730 remote,
1735 remote,
1731 heads=None,
1736 heads=None,
1732 force=False,
1737 force=False,
1733 bookmarks=(),
1738 bookmarks=(),
1734 opargs=None,
1739 opargs=None,
1735 streamclonerequested=None,
1740 streamclonerequested=None,
1736 includepats=None,
1741 includepats=None,
1737 excludepats=None,
1742 excludepats=None,
1738 depth=None,
1743 depth=None,
1739 confirm=None,
1744 confirm=None,
1740 ):
1745 ):
1741 """Fetch repository data from a remote.
1746 """Fetch repository data from a remote.
1742
1747
1743 This is the main function used to retrieve data from a remote repository.
1748 This is the main function used to retrieve data from a remote repository.
1744
1749
1745 ``repo`` is the local repository to clone into.
1750 ``repo`` is the local repository to clone into.
1746 ``remote`` is a peer instance.
1751 ``remote`` is a peer instance.
1747 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1752 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1748 default) means to pull everything from the remote.
1753 default) means to pull everything from the remote.
1749 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1754 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1750 default, all remote bookmarks are pulled.
1755 default, all remote bookmarks are pulled.
1751 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1756 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1752 initialization.
1757 initialization.
1753 ``streamclonerequested`` is a boolean indicating whether a "streaming
1758 ``streamclonerequested`` is a boolean indicating whether a "streaming
1754 clone" is requested. A "streaming clone" is essentially a raw file copy
1759 clone" is requested. A "streaming clone" is essentially a raw file copy
1755 of revlogs from the server. This only works when the local repository is
1760 of revlogs from the server. This only works when the local repository is
1756 empty. The default value of ``None`` means to respect the server
1761 empty. The default value of ``None`` means to respect the server
1757 configuration for preferring stream clones.
1762 configuration for preferring stream clones.
1758 ``includepats`` and ``excludepats`` define explicit file patterns to
1763 ``includepats`` and ``excludepats`` define explicit file patterns to
1759 include and exclude in storage, respectively. If not defined, narrow
1764 include and exclude in storage, respectively. If not defined, narrow
1760 patterns from the repo instance are used, if available.
1765 patterns from the repo instance are used, if available.
1761 ``depth`` is an integer indicating the DAG depth of history we're
1766 ``depth`` is an integer indicating the DAG depth of history we're
1762 interested in. If defined, for each revision specified in ``heads``, we
1767 interested in. If defined, for each revision specified in ``heads``, we
1763 will fetch up to this many of its ancestors and data associated with them.
1768 will fetch up to this many of its ancestors and data associated with them.
1764 ``confirm`` is a boolean indicating whether the pull should be confirmed
1769 ``confirm`` is a boolean indicating whether the pull should be confirmed
1765 before committing the transaction. This overrides HGPLAIN.
1770 before committing the transaction. This overrides HGPLAIN.
1766
1771
1767 Returns the ``pulloperation`` created for this pull.
1772 Returns the ``pulloperation`` created for this pull.
1768 """
1773 """
1769 if opargs is None:
1774 if opargs is None:
1770 opargs = {}
1775 opargs = {}
1771
1776
1772 # We allow the narrow patterns to be passed in explicitly to provide more
1777 # We allow the narrow patterns to be passed in explicitly to provide more
1773 # flexibility for API consumers.
1778 # flexibility for API consumers.
1774 if includepats or excludepats:
1779 if includepats or excludepats:
1775 includepats = includepats or set()
1780 includepats = includepats or set()
1776 excludepats = excludepats or set()
1781 excludepats = excludepats or set()
1777 else:
1782 else:
1778 includepats, excludepats = repo.narrowpats
1783 includepats, excludepats = repo.narrowpats
1779
1784
1780 narrowspec.validatepatterns(includepats)
1785 narrowspec.validatepatterns(includepats)
1781 narrowspec.validatepatterns(excludepats)
1786 narrowspec.validatepatterns(excludepats)
1782
1787
1783 pullop = pulloperation(
1788 pullop = pulloperation(
1784 repo,
1789 repo,
1785 remote,
1790 remote,
1786 heads,
1791 heads,
1787 force,
1792 force,
1788 bookmarks=bookmarks,
1793 bookmarks=bookmarks,
1789 streamclonerequested=streamclonerequested,
1794 streamclonerequested=streamclonerequested,
1790 includepats=includepats,
1795 includepats=includepats,
1791 excludepats=excludepats,
1796 excludepats=excludepats,
1792 depth=depth,
1797 depth=depth,
1793 **pycompat.strkwargs(opargs)
1798 **pycompat.strkwargs(opargs)
1794 )
1799 )
1795
1800
1796 peerlocal = pullop.remote.local()
1801 peerlocal = pullop.remote.local()
1797 if peerlocal:
1802 if peerlocal:
1798 missing = set(peerlocal.requirements) - pullop.repo.supported
1803 missing = set(peerlocal.requirements) - pullop.repo.supported
1799 if missing:
1804 if missing:
1800 msg = _(
1805 msg = _(
1801 b"required features are not"
1806 b"required features are not"
1802 b" supported in the destination:"
1807 b" supported in the destination:"
1803 b" %s"
1808 b" %s"
1804 ) % (b', '.join(sorted(missing)))
1809 ) % (b', '.join(sorted(missing)))
1805 raise error.Abort(msg)
1810 raise error.Abort(msg)
1806
1811
1807 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1812 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1808 wlock = util.nullcontextmanager()
1813 wlock = util.nullcontextmanager()
1809 if not bookmod.bookmarksinstore(repo):
1814 if not bookmod.bookmarksinstore(repo):
1810 wlock = repo.wlock()
1815 wlock = repo.wlock()
1811 with wlock, repo.lock(), pullop.trmanager:
1816 with wlock, repo.lock(), pullop.trmanager:
1812 if confirm or (
1817 if confirm or (
1813 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1818 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1814 ):
1819 ):
1815 add_confirm_callback(repo, pullop)
1820 add_confirm_callback(repo, pullop)
1816
1821
1817 # Use the modern wire protocol, if available.
1822 # Use the modern wire protocol, if available.
1818 if remote.capable(b'command-changesetdata'):
1823 if remote.capable(b'command-changesetdata'):
1819 exchangev2.pull(pullop)
1824 exchangev2.pull(pullop)
1820 else:
1825 else:
1821 # This should ideally be in _pullbundle2(). However, it needs to run
1826 # This should ideally be in _pullbundle2(). However, it needs to run
1822 # before discovery to avoid extra work.
1827 # before discovery to avoid extra work.
1823 _maybeapplyclonebundle(pullop)
1828 _maybeapplyclonebundle(pullop)
1824 streamclone.maybeperformlegacystreamclone(pullop)
1829 streamclone.maybeperformlegacystreamclone(pullop)
1825 _pulldiscovery(pullop)
1830 _pulldiscovery(pullop)
1826 if pullop.canusebundle2:
1831 if pullop.canusebundle2:
1827 _fullpullbundle2(repo, pullop)
1832 _fullpullbundle2(repo, pullop)
1828 _pullchangeset(pullop)
1833 _pullchangeset(pullop)
1829 _pullphase(pullop)
1834 _pullphase(pullop)
1830 _pullbookmarks(pullop)
1835 _pullbookmarks(pullop)
1831 _pullobsolete(pullop)
1836 _pullobsolete(pullop)
1832
1837
1833 # storing remotenames
1838 # storing remotenames
1834 if repo.ui.configbool(b'experimental', b'remotenames'):
1839 if repo.ui.configbool(b'experimental', b'remotenames'):
1835 logexchange.pullremotenames(repo, remote)
1840 logexchange.pullremotenames(repo, remote)
1836
1841
1837 return pullop
1842 return pullop
1838
1843
1839
1844
1840 # list of steps to perform discovery before pull
1845 # list of steps to perform discovery before pull
1841 pulldiscoveryorder = []
1846 pulldiscoveryorder = []
1842
1847
1843 # Mapping between step name and function
1848 # Mapping between step name and function
1844 #
1849 #
1845 # This exists to help extensions wrap steps if necessary
1850 # This exists to help extensions wrap steps if necessary
1846 pulldiscoverymapping = {}
1851 pulldiscoverymapping = {}
1847
1852
1848
1853
1849 def pulldiscovery(stepname):
1854 def pulldiscovery(stepname):
1850 """decorator for function performing discovery before pull
1855 """decorator for function performing discovery before pull
1851
1856
1852 The function is added to the step -> function mapping and appended to the
1857 The function is added to the step -> function mapping and appended to the
1853 list of steps. Beware that decorated function will be added in order (this
1858 list of steps. Beware that decorated function will be added in order (this
1854 may matter).
1859 may matter).
1855
1860
1856 You can only use this decorator for a new step, if you want to wrap a step
1861 You can only use this decorator for a new step, if you want to wrap a step
1857 from an extension, change the pulldiscovery dictionary directly."""
1862 from an extension, change the pulldiscovery dictionary directly."""
1858
1863
1859 def dec(func):
1864 def dec(func):
1860 assert stepname not in pulldiscoverymapping
1865 assert stepname not in pulldiscoverymapping
1861 pulldiscoverymapping[stepname] = func
1866 pulldiscoverymapping[stepname] = func
1862 pulldiscoveryorder.append(stepname)
1867 pulldiscoveryorder.append(stepname)
1863 return func
1868 return func
1864
1869
1865 return dec
1870 return dec
1866
1871
1867
1872
1868 def _pulldiscovery(pullop):
1873 def _pulldiscovery(pullop):
1869 """Run all discovery steps"""
1874 """Run all discovery steps"""
1870 for stepname in pulldiscoveryorder:
1875 for stepname in pulldiscoveryorder:
1871 step = pulldiscoverymapping[stepname]
1876 step = pulldiscoverymapping[stepname]
1872 step(pullop)
1877 step(pullop)
1873
1878
1874
1879
1875 @pulldiscovery(b'b1:bookmarks')
1880 @pulldiscovery(b'b1:bookmarks')
1876 def _pullbookmarkbundle1(pullop):
1881 def _pullbookmarkbundle1(pullop):
1877 """fetch bookmark data in bundle1 case
1882 """fetch bookmark data in bundle1 case
1878
1883
1879 If not using bundle2, we have to fetch bookmarks before changeset
1884 If not using bundle2, we have to fetch bookmarks before changeset
1880 discovery to reduce the chance and impact of race conditions."""
1885 discovery to reduce the chance and impact of race conditions."""
1881 if pullop.remotebookmarks is not None:
1886 if pullop.remotebookmarks is not None:
1882 return
1887 return
1883 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1888 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1884 # all known bundle2 servers now support listkeys, but lets be nice with
1889 # all known bundle2 servers now support listkeys, but lets be nice with
1885 # new implementation.
1890 # new implementation.
1886 return
1891 return
1887 books = listkeys(pullop.remote, b'bookmarks')
1892 books = listkeys(pullop.remote, b'bookmarks')
1888 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1893 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1889
1894
1890
1895
1891 @pulldiscovery(b'changegroup')
1896 @pulldiscovery(b'changegroup')
1892 def _pulldiscoverychangegroup(pullop):
1897 def _pulldiscoverychangegroup(pullop):
1893 """discovery phase for the pull
1898 """discovery phase for the pull
1894
1899
1895 Current handle changeset discovery only, will change handle all discovery
1900 Current handle changeset discovery only, will change handle all discovery
1896 at some point."""
1901 at some point."""
1897 tmp = discovery.findcommonincoming(
1902 tmp = discovery.findcommonincoming(
1898 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1903 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1899 )
1904 )
1900 common, fetch, rheads = tmp
1905 common, fetch, rheads = tmp
1901 has_node = pullop.repo.unfiltered().changelog.index.has_node
1906 has_node = pullop.repo.unfiltered().changelog.index.has_node
1902 if fetch and rheads:
1907 if fetch and rheads:
1903 # If a remote heads is filtered locally, put in back in common.
1908 # If a remote heads is filtered locally, put in back in common.
1904 #
1909 #
1905 # This is a hackish solution to catch most of "common but locally
1910 # This is a hackish solution to catch most of "common but locally
1906 # hidden situation". We do not performs discovery on unfiltered
1911 # hidden situation". We do not performs discovery on unfiltered
1907 # repository because it end up doing a pathological amount of round
1912 # repository because it end up doing a pathological amount of round
1908 # trip for w huge amount of changeset we do not care about.
1913 # trip for w huge amount of changeset we do not care about.
1909 #
1914 #
1910 # If a set of such "common but filtered" changeset exist on the server
1915 # If a set of such "common but filtered" changeset exist on the server
1911 # but are not including a remote heads, we'll not be able to detect it,
1916 # but are not including a remote heads, we'll not be able to detect it,
1912 scommon = set(common)
1917 scommon = set(common)
1913 for n in rheads:
1918 for n in rheads:
1914 if has_node(n):
1919 if has_node(n):
1915 if n not in scommon:
1920 if n not in scommon:
1916 common.append(n)
1921 common.append(n)
1917 if set(rheads).issubset(set(common)):
1922 if set(rheads).issubset(set(common)):
1918 fetch = []
1923 fetch = []
1919 pullop.common = common
1924 pullop.common = common
1920 pullop.fetch = fetch
1925 pullop.fetch = fetch
1921 pullop.rheads = rheads
1926 pullop.rheads = rheads
1922
1927
1923
1928
1924 def _pullbundle2(pullop):
1929 def _pullbundle2(pullop):
1925 """pull data using bundle2
1930 """pull data using bundle2
1926
1931
1927 For now, the only supported data are changegroup."""
1932 For now, the only supported data are changegroup."""
1928 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1933 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1929
1934
1930 # make ui easier to access
1935 # make ui easier to access
1931 ui = pullop.repo.ui
1936 ui = pullop.repo.ui
1932
1937
1933 # At the moment we don't do stream clones over bundle2. If that is
1938 # At the moment we don't do stream clones over bundle2. If that is
1934 # implemented then here's where the check for that will go.
1939 # implemented then here's where the check for that will go.
1935 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1940 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1936
1941
1937 # declare pull perimeters
1942 # declare pull perimeters
1938 kwargs[b'common'] = pullop.common
1943 kwargs[b'common'] = pullop.common
1939 kwargs[b'heads'] = pullop.heads or pullop.rheads
1944 kwargs[b'heads'] = pullop.heads or pullop.rheads
1940
1945
1941 # check server supports narrow and then adding includepats and excludepats
1946 # check server supports narrow and then adding includepats and excludepats
1942 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1947 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1943 if servernarrow and pullop.includepats:
1948 if servernarrow and pullop.includepats:
1944 kwargs[b'includepats'] = pullop.includepats
1949 kwargs[b'includepats'] = pullop.includepats
1945 if servernarrow and pullop.excludepats:
1950 if servernarrow and pullop.excludepats:
1946 kwargs[b'excludepats'] = pullop.excludepats
1951 kwargs[b'excludepats'] = pullop.excludepats
1947
1952
1948 if streaming:
1953 if streaming:
1949 kwargs[b'cg'] = False
1954 kwargs[b'cg'] = False
1950 kwargs[b'stream'] = True
1955 kwargs[b'stream'] = True
1951 pullop.stepsdone.add(b'changegroup')
1956 pullop.stepsdone.add(b'changegroup')
1952 pullop.stepsdone.add(b'phases')
1957 pullop.stepsdone.add(b'phases')
1953
1958
1954 else:
1959 else:
1955 # pulling changegroup
1960 # pulling changegroup
1956 pullop.stepsdone.add(b'changegroup')
1961 pullop.stepsdone.add(b'changegroup')
1957
1962
1958 kwargs[b'cg'] = pullop.fetch
1963 kwargs[b'cg'] = pullop.fetch
1959
1964
1960 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1965 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1961 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1966 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1962 if not legacyphase and hasbinaryphase:
1967 if not legacyphase and hasbinaryphase:
1963 kwargs[b'phases'] = True
1968 kwargs[b'phases'] = True
1964 pullop.stepsdone.add(b'phases')
1969 pullop.stepsdone.add(b'phases')
1965
1970
1966 if b'listkeys' in pullop.remotebundle2caps:
1971 if b'listkeys' in pullop.remotebundle2caps:
1967 if b'phases' not in pullop.stepsdone:
1972 if b'phases' not in pullop.stepsdone:
1968 kwargs[b'listkeys'] = [b'phases']
1973 kwargs[b'listkeys'] = [b'phases']
1969
1974
1970 bookmarksrequested = False
1975 bookmarksrequested = False
1971 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1976 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1972 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1977 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1973
1978
1974 if pullop.remotebookmarks is not None:
1979 if pullop.remotebookmarks is not None:
1975 pullop.stepsdone.add(b'request-bookmarks')
1980 pullop.stepsdone.add(b'request-bookmarks')
1976
1981
1977 if (
1982 if (
1978 b'request-bookmarks' not in pullop.stepsdone
1983 b'request-bookmarks' not in pullop.stepsdone
1979 and pullop.remotebookmarks is None
1984 and pullop.remotebookmarks is None
1980 and not legacybookmark
1985 and not legacybookmark
1981 and hasbinarybook
1986 and hasbinarybook
1982 ):
1987 ):
1983 kwargs[b'bookmarks'] = True
1988 kwargs[b'bookmarks'] = True
1984 bookmarksrequested = True
1989 bookmarksrequested = True
1985
1990
1986 if b'listkeys' in pullop.remotebundle2caps:
1991 if b'listkeys' in pullop.remotebundle2caps:
1987 if b'request-bookmarks' not in pullop.stepsdone:
1992 if b'request-bookmarks' not in pullop.stepsdone:
1988 # make sure to always includes bookmark data when migrating
1993 # make sure to always includes bookmark data when migrating
1989 # `hg incoming --bundle` to using this function.
1994 # `hg incoming --bundle` to using this function.
1990 pullop.stepsdone.add(b'request-bookmarks')
1995 pullop.stepsdone.add(b'request-bookmarks')
1991 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1996 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1992
1997
1993 # If this is a full pull / clone and the server supports the clone bundles
1998 # If this is a full pull / clone and the server supports the clone bundles
1994 # feature, tell the server whether we attempted a clone bundle. The
1999 # feature, tell the server whether we attempted a clone bundle. The
1995 # presence of this flag indicates the client supports clone bundles. This
2000 # presence of this flag indicates the client supports clone bundles. This
1996 # will enable the server to treat clients that support clone bundles
2001 # will enable the server to treat clients that support clone bundles
1997 # differently from those that don't.
2002 # differently from those that don't.
1998 if (
2003 if (
1999 pullop.remote.capable(b'clonebundles')
2004 pullop.remote.capable(b'clonebundles')
2000 and pullop.heads is None
2005 and pullop.heads is None
2001 and list(pullop.common) == [nullid]
2006 and list(pullop.common) == [nullid]
2002 ):
2007 ):
2003 kwargs[b'cbattempted'] = pullop.clonebundleattempted
2008 kwargs[b'cbattempted'] = pullop.clonebundleattempted
2004
2009
2005 if streaming:
2010 if streaming:
2006 pullop.repo.ui.status(_(b'streaming all changes\n'))
2011 pullop.repo.ui.status(_(b'streaming all changes\n'))
2007 elif not pullop.fetch:
2012 elif not pullop.fetch:
2008 pullop.repo.ui.status(_(b"no changes found\n"))
2013 pullop.repo.ui.status(_(b"no changes found\n"))
2009 pullop.cgresult = 0
2014 pullop.cgresult = 0
2010 else:
2015 else:
2011 if pullop.heads is None and list(pullop.common) == [nullid]:
2016 if pullop.heads is None and list(pullop.common) == [nullid]:
2012 pullop.repo.ui.status(_(b"requesting all changes\n"))
2017 pullop.repo.ui.status(_(b"requesting all changes\n"))
2013 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2018 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2014 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
2019 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
2015 if obsolete.commonversion(remoteversions) is not None:
2020 if obsolete.commonversion(remoteversions) is not None:
2016 kwargs[b'obsmarkers'] = True
2021 kwargs[b'obsmarkers'] = True
2017 pullop.stepsdone.add(b'obsmarkers')
2022 pullop.stepsdone.add(b'obsmarkers')
2018 _pullbundle2extraprepare(pullop, kwargs)
2023 _pullbundle2extraprepare(pullop, kwargs)
2019
2024
2020 with pullop.remote.commandexecutor() as e:
2025 with pullop.remote.commandexecutor() as e:
2021 args = dict(kwargs)
2026 args = dict(kwargs)
2022 args[b'source'] = b'pull'
2027 args[b'source'] = b'pull'
2023 bundle = e.callcommand(b'getbundle', args).result()
2028 bundle = e.callcommand(b'getbundle', args).result()
2024
2029
2025 try:
2030 try:
2026 op = bundle2.bundleoperation(
2031 op = bundle2.bundleoperation(
2027 pullop.repo, pullop.gettransaction, source=b'pull'
2032 pullop.repo, pullop.gettransaction, source=b'pull'
2028 )
2033 )
2029 op.modes[b'bookmarks'] = b'records'
2034 op.modes[b'bookmarks'] = b'records'
2030 bundle2.processbundle(pullop.repo, bundle, op=op)
2035 bundle2.processbundle(pullop.repo, bundle, op=op)
2031 except bundle2.AbortFromPart as exc:
2036 except bundle2.AbortFromPart as exc:
2032 pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
2037 pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
2033 raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
2038 raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
2034 except error.BundleValueError as exc:
2039 except error.BundleValueError as exc:
2035 raise error.Abort(_(b'missing support for %s') % exc)
2040 raise error.Abort(_(b'missing support for %s') % exc)
2036
2041
2037 if pullop.fetch:
2042 if pullop.fetch:
2038 pullop.cgresult = bundle2.combinechangegroupresults(op)
2043 pullop.cgresult = bundle2.combinechangegroupresults(op)
2039
2044
2040 # processing phases change
2045 # processing phases change
2041 for namespace, value in op.records[b'listkeys']:
2046 for namespace, value in op.records[b'listkeys']:
2042 if namespace == b'phases':
2047 if namespace == b'phases':
2043 _pullapplyphases(pullop, value)
2048 _pullapplyphases(pullop, value)
2044
2049
2045 # processing bookmark update
2050 # processing bookmark update
2046 if bookmarksrequested:
2051 if bookmarksrequested:
2047 books = {}
2052 books = {}
2048 for record in op.records[b'bookmarks']:
2053 for record in op.records[b'bookmarks']:
2049 books[record[b'bookmark']] = record[b"node"]
2054 books[record[b'bookmark']] = record[b"node"]
2050 pullop.remotebookmarks = books
2055 pullop.remotebookmarks = books
2051 else:
2056 else:
2052 for namespace, value in op.records[b'listkeys']:
2057 for namespace, value in op.records[b'listkeys']:
2053 if namespace == b'bookmarks':
2058 if namespace == b'bookmarks':
2054 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
2059 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
2055
2060
2056 # bookmark data were either already there or pulled in the bundle
2061 # bookmark data were either already there or pulled in the bundle
2057 if pullop.remotebookmarks is not None:
2062 if pullop.remotebookmarks is not None:
2058 _pullbookmarks(pullop)
2063 _pullbookmarks(pullop)
2059
2064
2060
2065
2061 def _pullbundle2extraprepare(pullop, kwargs):
2066 def _pullbundle2extraprepare(pullop, kwargs):
2062 """hook function so that extensions can extend the getbundle call"""
2067 """hook function so that extensions can extend the getbundle call"""
2063
2068
2064
2069
2065 def _pullchangeset(pullop):
2070 def _pullchangeset(pullop):
2066 """pull changeset from unbundle into the local repo"""
2071 """pull changeset from unbundle into the local repo"""
2067 # We delay the open of the transaction as late as possible so we
2072 # We delay the open of the transaction as late as possible so we
2068 # don't open transaction for nothing or you break future useful
2073 # don't open transaction for nothing or you break future useful
2069 # rollback call
2074 # rollback call
2070 if b'changegroup' in pullop.stepsdone:
2075 if b'changegroup' in pullop.stepsdone:
2071 return
2076 return
2072 pullop.stepsdone.add(b'changegroup')
2077 pullop.stepsdone.add(b'changegroup')
2073 if not pullop.fetch:
2078 if not pullop.fetch:
2074 pullop.repo.ui.status(_(b"no changes found\n"))
2079 pullop.repo.ui.status(_(b"no changes found\n"))
2075 pullop.cgresult = 0
2080 pullop.cgresult = 0
2076 return
2081 return
2077 tr = pullop.gettransaction()
2082 tr = pullop.gettransaction()
2078 if pullop.heads is None and list(pullop.common) == [nullid]:
2083 if pullop.heads is None and list(pullop.common) == [nullid]:
2079 pullop.repo.ui.status(_(b"requesting all changes\n"))
2084 pullop.repo.ui.status(_(b"requesting all changes\n"))
2080 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
2085 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
2081 # issue1320, avoid a race if remote changed after discovery
2086 # issue1320, avoid a race if remote changed after discovery
2082 pullop.heads = pullop.rheads
2087 pullop.heads = pullop.rheads
2083
2088
2084 if pullop.remote.capable(b'getbundle'):
2089 if pullop.remote.capable(b'getbundle'):
2085 # TODO: get bundlecaps from remote
2090 # TODO: get bundlecaps from remote
2086 cg = pullop.remote.getbundle(
2091 cg = pullop.remote.getbundle(
2087 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
2092 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
2088 )
2093 )
2089 elif pullop.heads is None:
2094 elif pullop.heads is None:
2090 with pullop.remote.commandexecutor() as e:
2095 with pullop.remote.commandexecutor() as e:
2091 cg = e.callcommand(
2096 cg = e.callcommand(
2092 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
2097 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
2093 ).result()
2098 ).result()
2094
2099
2095 elif not pullop.remote.capable(b'changegroupsubset'):
2100 elif not pullop.remote.capable(b'changegroupsubset'):
2096 raise error.Abort(
2101 raise error.Abort(
2097 _(
2102 _(
2098 b"partial pull cannot be done because "
2103 b"partial pull cannot be done because "
2099 b"other repository doesn't support "
2104 b"other repository doesn't support "
2100 b"changegroupsubset."
2105 b"changegroupsubset."
2101 )
2106 )
2102 )
2107 )
2103 else:
2108 else:
2104 with pullop.remote.commandexecutor() as e:
2109 with pullop.remote.commandexecutor() as e:
2105 cg = e.callcommand(
2110 cg = e.callcommand(
2106 b'changegroupsubset',
2111 b'changegroupsubset',
2107 {
2112 {
2108 b'bases': pullop.fetch,
2113 b'bases': pullop.fetch,
2109 b'heads': pullop.heads,
2114 b'heads': pullop.heads,
2110 b'source': b'pull',
2115 b'source': b'pull',
2111 },
2116 },
2112 ).result()
2117 ).result()
2113
2118
2114 bundleop = bundle2.applybundle(
2119 bundleop = bundle2.applybundle(
2115 pullop.repo, cg, tr, b'pull', pullop.remote.url()
2120 pullop.repo, cg, tr, b'pull', pullop.remote.url()
2116 )
2121 )
2117 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
2122 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
2118
2123
2119
2124
2120 def _pullphase(pullop):
2125 def _pullphase(pullop):
2121 # Get remote phases data from remote
2126 # Get remote phases data from remote
2122 if b'phases' in pullop.stepsdone:
2127 if b'phases' in pullop.stepsdone:
2123 return
2128 return
2124 remotephases = listkeys(pullop.remote, b'phases')
2129 remotephases = listkeys(pullop.remote, b'phases')
2125 _pullapplyphases(pullop, remotephases)
2130 _pullapplyphases(pullop, remotephases)
2126
2131
2127
2132
2128 def _pullapplyphases(pullop, remotephases):
2133 def _pullapplyphases(pullop, remotephases):
2129 """apply phase movement from observed remote state"""
2134 """apply phase movement from observed remote state"""
2130 if b'phases' in pullop.stepsdone:
2135 if b'phases' in pullop.stepsdone:
2131 return
2136 return
2132 pullop.stepsdone.add(b'phases')
2137 pullop.stepsdone.add(b'phases')
2133 publishing = bool(remotephases.get(b'publishing', False))
2138 publishing = bool(remotephases.get(b'publishing', False))
2134 if remotephases and not publishing:
2139 if remotephases and not publishing:
2135 # remote is new and non-publishing
2140 # remote is new and non-publishing
2136 pheads, _dr = phases.analyzeremotephases(
2141 pheads, _dr = phases.analyzeremotephases(
2137 pullop.repo, pullop.pulledsubset, remotephases
2142 pullop.repo, pullop.pulledsubset, remotephases
2138 )
2143 )
2139 dheads = pullop.pulledsubset
2144 dheads = pullop.pulledsubset
2140 else:
2145 else:
2141 # Remote is old or publishing all common changesets
2146 # Remote is old or publishing all common changesets
2142 # should be seen as public
2147 # should be seen as public
2143 pheads = pullop.pulledsubset
2148 pheads = pullop.pulledsubset
2144 dheads = []
2149 dheads = []
2145 unfi = pullop.repo.unfiltered()
2150 unfi = pullop.repo.unfiltered()
2146 phase = unfi._phasecache.phase
2151 phase = unfi._phasecache.phase
2147 rev = unfi.changelog.index.get_rev
2152 rev = unfi.changelog.index.get_rev
2148 public = phases.public
2153 public = phases.public
2149 draft = phases.draft
2154 draft = phases.draft
2150
2155
2151 # exclude changesets already public locally and update the others
2156 # exclude changesets already public locally and update the others
2152 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2157 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2153 if pheads:
2158 if pheads:
2154 tr = pullop.gettransaction()
2159 tr = pullop.gettransaction()
2155 phases.advanceboundary(pullop.repo, tr, public, pheads)
2160 phases.advanceboundary(pullop.repo, tr, public, pheads)
2156
2161
2157 # exclude changesets already draft locally and update the others
2162 # exclude changesets already draft locally and update the others
2158 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2163 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2159 if dheads:
2164 if dheads:
2160 tr = pullop.gettransaction()
2165 tr = pullop.gettransaction()
2161 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2166 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2162
2167
2163
2168
2164 def _pullbookmarks(pullop):
2169 def _pullbookmarks(pullop):
2165 """process the remote bookmark information to update the local one"""
2170 """process the remote bookmark information to update the local one"""
2166 if b'bookmarks' in pullop.stepsdone:
2171 if b'bookmarks' in pullop.stepsdone:
2167 return
2172 return
2168 pullop.stepsdone.add(b'bookmarks')
2173 pullop.stepsdone.add(b'bookmarks')
2169 repo = pullop.repo
2174 repo = pullop.repo
2170 remotebookmarks = pullop.remotebookmarks
2175 remotebookmarks = pullop.remotebookmarks
2171 bookmod.updatefromremote(
2176 bookmod.updatefromremote(
2172 repo.ui,
2177 repo.ui,
2173 repo,
2178 repo,
2174 remotebookmarks,
2179 remotebookmarks,
2175 pullop.remote.url(),
2180 pullop.remote.url(),
2176 pullop.gettransaction,
2181 pullop.gettransaction,
2177 explicit=pullop.explicitbookmarks,
2182 explicit=pullop.explicitbookmarks,
2178 )
2183 )
2179
2184
2180
2185
2181 def _pullobsolete(pullop):
2186 def _pullobsolete(pullop):
2182 """utility function to pull obsolete markers from a remote
2187 """utility function to pull obsolete markers from a remote
2183
2188
2184 The `gettransaction` is function that return the pull transaction, creating
2189 The `gettransaction` is function that return the pull transaction, creating
2185 one if necessary. We return the transaction to inform the calling code that
2190 one if necessary. We return the transaction to inform the calling code that
2186 a new transaction have been created (when applicable).
2191 a new transaction have been created (when applicable).
2187
2192
2188 Exists mostly to allow overriding for experimentation purpose"""
2193 Exists mostly to allow overriding for experimentation purpose"""
2189 if b'obsmarkers' in pullop.stepsdone:
2194 if b'obsmarkers' in pullop.stepsdone:
2190 return
2195 return
2191 pullop.stepsdone.add(b'obsmarkers')
2196 pullop.stepsdone.add(b'obsmarkers')
2192 tr = None
2197 tr = None
2193 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2198 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2194 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2199 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2195 remoteobs = listkeys(pullop.remote, b'obsolete')
2200 remoteobs = listkeys(pullop.remote, b'obsolete')
2196 if b'dump0' in remoteobs:
2201 if b'dump0' in remoteobs:
2197 tr = pullop.gettransaction()
2202 tr = pullop.gettransaction()
2198 markers = []
2203 markers = []
2199 for key in sorted(remoteobs, reverse=True):
2204 for key in sorted(remoteobs, reverse=True):
2200 if key.startswith(b'dump'):
2205 if key.startswith(b'dump'):
2201 data = util.b85decode(remoteobs[key])
2206 data = util.b85decode(remoteobs[key])
2202 version, newmarks = obsolete._readmarkers(data)
2207 version, newmarks = obsolete._readmarkers(data)
2203 markers += newmarks
2208 markers += newmarks
2204 if markers:
2209 if markers:
2205 pullop.repo.obsstore.add(tr, markers)
2210 pullop.repo.obsstore.add(tr, markers)
2206 pullop.repo.invalidatevolatilesets()
2211 pullop.repo.invalidatevolatilesets()
2207 return tr
2212 return tr
2208
2213
2209
2214
2210 def applynarrowacl(repo, kwargs):
2215 def applynarrowacl(repo, kwargs):
2211 """Apply narrow fetch access control.
2216 """Apply narrow fetch access control.
2212
2217
2213 This massages the named arguments for getbundle wire protocol commands
2218 This massages the named arguments for getbundle wire protocol commands
2214 so requested data is filtered through access control rules.
2219 so requested data is filtered through access control rules.
2215 """
2220 """
2216 ui = repo.ui
2221 ui = repo.ui
2217 # TODO this assumes existence of HTTP and is a layering violation.
2222 # TODO this assumes existence of HTTP and is a layering violation.
2218 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2223 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2219 user_includes = ui.configlist(
2224 user_includes = ui.configlist(
2220 _NARROWACL_SECTION,
2225 _NARROWACL_SECTION,
2221 username + b'.includes',
2226 username + b'.includes',
2222 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2227 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2223 )
2228 )
2224 user_excludes = ui.configlist(
2229 user_excludes = ui.configlist(
2225 _NARROWACL_SECTION,
2230 _NARROWACL_SECTION,
2226 username + b'.excludes',
2231 username + b'.excludes',
2227 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2232 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2228 )
2233 )
2229 if not user_includes:
2234 if not user_includes:
2230 raise error.Abort(
2235 raise error.Abort(
2231 _(b"%s configuration for user %s is empty")
2236 _(b"%s configuration for user %s is empty")
2232 % (_NARROWACL_SECTION, username)
2237 % (_NARROWACL_SECTION, username)
2233 )
2238 )
2234
2239
2235 user_includes = [
2240 user_includes = [
2236 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2241 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2237 ]
2242 ]
2238 user_excludes = [
2243 user_excludes = [
2239 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2244 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2240 ]
2245 ]
2241
2246
2242 req_includes = set(kwargs.get('includepats', []))
2247 req_includes = set(kwargs.get('includepats', []))
2243 req_excludes = set(kwargs.get('excludepats', []))
2248 req_excludes = set(kwargs.get('excludepats', []))
2244
2249
2245 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2250 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2246 req_includes, req_excludes, user_includes, user_excludes
2251 req_includes, req_excludes, user_includes, user_excludes
2247 )
2252 )
2248
2253
2249 if invalid_includes:
2254 if invalid_includes:
2250 raise error.Abort(
2255 raise error.Abort(
2251 _(b"The following includes are not accessible for %s: %s")
2256 _(b"The following includes are not accessible for %s: %s")
2252 % (username, stringutil.pprint(invalid_includes))
2257 % (username, stringutil.pprint(invalid_includes))
2253 )
2258 )
2254
2259
2255 new_args = {}
2260 new_args = {}
2256 new_args.update(kwargs)
2261 new_args.update(kwargs)
2257 new_args['narrow'] = True
2262 new_args['narrow'] = True
2258 new_args['narrow_acl'] = True
2263 new_args['narrow_acl'] = True
2259 new_args['includepats'] = req_includes
2264 new_args['includepats'] = req_includes
2260 if req_excludes:
2265 if req_excludes:
2261 new_args['excludepats'] = req_excludes
2266 new_args['excludepats'] = req_excludes
2262
2267
2263 return new_args
2268 return new_args
2264
2269
2265
2270
2266 def _computeellipsis(repo, common, heads, known, match, depth=None):
2271 def _computeellipsis(repo, common, heads, known, match, depth=None):
2267 """Compute the shape of a narrowed DAG.
2272 """Compute the shape of a narrowed DAG.
2268
2273
2269 Args:
2274 Args:
2270 repo: The repository we're transferring.
2275 repo: The repository we're transferring.
2271 common: The roots of the DAG range we're transferring.
2276 common: The roots of the DAG range we're transferring.
2272 May be just [nullid], which means all ancestors of heads.
2277 May be just [nullid], which means all ancestors of heads.
2273 heads: The heads of the DAG range we're transferring.
2278 heads: The heads of the DAG range we're transferring.
2274 match: The narrowmatcher that allows us to identify relevant changes.
2279 match: The narrowmatcher that allows us to identify relevant changes.
2275 depth: If not None, only consider nodes to be full nodes if they are at
2280 depth: If not None, only consider nodes to be full nodes if they are at
2276 most depth changesets away from one of heads.
2281 most depth changesets away from one of heads.
2277
2282
2278 Returns:
2283 Returns:
2279 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2284 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2280
2285
2281 visitnodes: The list of nodes (either full or ellipsis) which
2286 visitnodes: The list of nodes (either full or ellipsis) which
2282 need to be sent to the client.
2287 need to be sent to the client.
2283 relevant_nodes: The set of changelog nodes which change a file inside
2288 relevant_nodes: The set of changelog nodes which change a file inside
2284 the narrowspec. The client needs these as non-ellipsis nodes.
2289 the narrowspec. The client needs these as non-ellipsis nodes.
2285 ellipsisroots: A dict of {rev: parents} that is used in
2290 ellipsisroots: A dict of {rev: parents} that is used in
2286 narrowchangegroup to produce ellipsis nodes with the
2291 narrowchangegroup to produce ellipsis nodes with the
2287 correct parents.
2292 correct parents.
2288 """
2293 """
2289 cl = repo.changelog
2294 cl = repo.changelog
2290 mfl = repo.manifestlog
2295 mfl = repo.manifestlog
2291
2296
2292 clrev = cl.rev
2297 clrev = cl.rev
2293
2298
2294 commonrevs = {clrev(n) for n in common} | {nullrev}
2299 commonrevs = {clrev(n) for n in common} | {nullrev}
2295 headsrevs = {clrev(n) for n in heads}
2300 headsrevs = {clrev(n) for n in heads}
2296
2301
2297 if depth:
2302 if depth:
2298 revdepth = {h: 0 for h in headsrevs}
2303 revdepth = {h: 0 for h in headsrevs}
2299
2304
2300 ellipsisheads = collections.defaultdict(set)
2305 ellipsisheads = collections.defaultdict(set)
2301 ellipsisroots = collections.defaultdict(set)
2306 ellipsisroots = collections.defaultdict(set)
2302
2307
2303 def addroot(head, curchange):
2308 def addroot(head, curchange):
2304 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2309 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2305 ellipsisroots[head].add(curchange)
2310 ellipsisroots[head].add(curchange)
2306 # Recursively split ellipsis heads with 3 roots by finding the
2311 # Recursively split ellipsis heads with 3 roots by finding the
2307 # roots' youngest common descendant which is an elided merge commit.
2312 # roots' youngest common descendant which is an elided merge commit.
2308 # That descendant takes 2 of the 3 roots as its own, and becomes a
2313 # That descendant takes 2 of the 3 roots as its own, and becomes a
2309 # root of the head.
2314 # root of the head.
2310 while len(ellipsisroots[head]) > 2:
2315 while len(ellipsisroots[head]) > 2:
2311 child, roots = splithead(head)
2316 child, roots = splithead(head)
2312 splitroots(head, child, roots)
2317 splitroots(head, child, roots)
2313 head = child # Recurse in case we just added a 3rd root
2318 head = child # Recurse in case we just added a 3rd root
2314
2319
2315 def splitroots(head, child, roots):
2320 def splitroots(head, child, roots):
2316 ellipsisroots[head].difference_update(roots)
2321 ellipsisroots[head].difference_update(roots)
2317 ellipsisroots[head].add(child)
2322 ellipsisroots[head].add(child)
2318 ellipsisroots[child].update(roots)
2323 ellipsisroots[child].update(roots)
2319 ellipsisroots[child].discard(child)
2324 ellipsisroots[child].discard(child)
2320
2325
2321 def splithead(head):
2326 def splithead(head):
2322 r1, r2, r3 = sorted(ellipsisroots[head])
2327 r1, r2, r3 = sorted(ellipsisroots[head])
2323 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2328 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2324 mid = repo.revs(
2329 mid = repo.revs(
2325 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2330 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2326 )
2331 )
2327 for j in mid:
2332 for j in mid:
2328 if j == nr2:
2333 if j == nr2:
2329 return nr2, (nr1, nr2)
2334 return nr2, (nr1, nr2)
2330 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2335 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2331 return j, (nr1, nr2)
2336 return j, (nr1, nr2)
2332 raise error.Abort(
2337 raise error.Abort(
2333 _(
2338 _(
2334 b'Failed to split up ellipsis node! head: %d, '
2339 b'Failed to split up ellipsis node! head: %d, '
2335 b'roots: %d %d %d'
2340 b'roots: %d %d %d'
2336 )
2341 )
2337 % (head, r1, r2, r3)
2342 % (head, r1, r2, r3)
2338 )
2343 )
2339
2344
2340 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2345 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2341 visit = reversed(missing)
2346 visit = reversed(missing)
2342 relevant_nodes = set()
2347 relevant_nodes = set()
2343 visitnodes = [cl.node(m) for m in missing]
2348 visitnodes = [cl.node(m) for m in missing]
2344 required = set(headsrevs) | known
2349 required = set(headsrevs) | known
2345 for rev in visit:
2350 for rev in visit:
2346 clrev = cl.changelogrevision(rev)
2351 clrev = cl.changelogrevision(rev)
2347 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2352 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2348 if depth is not None:
2353 if depth is not None:
2349 curdepth = revdepth[rev]
2354 curdepth = revdepth[rev]
2350 for p in ps:
2355 for p in ps:
2351 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2356 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2352 needed = False
2357 needed = False
2353 shallow_enough = depth is None or revdepth[rev] <= depth
2358 shallow_enough = depth is None or revdepth[rev] <= depth
2354 if shallow_enough:
2359 if shallow_enough:
2355 curmf = mfl[clrev.manifest].read()
2360 curmf = mfl[clrev.manifest].read()
2356 if ps:
2361 if ps:
2357 # We choose to not trust the changed files list in
2362 # We choose to not trust the changed files list in
2358 # changesets because it's not always correct. TODO: could
2363 # changesets because it's not always correct. TODO: could
2359 # we trust it for the non-merge case?
2364 # we trust it for the non-merge case?
2360 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2365 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2361 needed = bool(curmf.diff(p1mf, match))
2366 needed = bool(curmf.diff(p1mf, match))
2362 if not needed and len(ps) > 1:
2367 if not needed and len(ps) > 1:
2363 # For merge changes, the list of changed files is not
2368 # For merge changes, the list of changed files is not
2364 # helpful, since we need to emit the merge if a file
2369 # helpful, since we need to emit the merge if a file
2365 # in the narrow spec has changed on either side of the
2370 # in the narrow spec has changed on either side of the
2366 # merge. As a result, we do a manifest diff to check.
2371 # merge. As a result, we do a manifest diff to check.
2367 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2372 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2368 needed = bool(curmf.diff(p2mf, match))
2373 needed = bool(curmf.diff(p2mf, match))
2369 else:
2374 else:
2370 # For a root node, we need to include the node if any
2375 # For a root node, we need to include the node if any
2371 # files in the node match the narrowspec.
2376 # files in the node match the narrowspec.
2372 needed = any(curmf.walk(match))
2377 needed = any(curmf.walk(match))
2373
2378
2374 if needed:
2379 if needed:
2375 for head in ellipsisheads[rev]:
2380 for head in ellipsisheads[rev]:
2376 addroot(head, rev)
2381 addroot(head, rev)
2377 for p in ps:
2382 for p in ps:
2378 required.add(p)
2383 required.add(p)
2379 relevant_nodes.add(cl.node(rev))
2384 relevant_nodes.add(cl.node(rev))
2380 else:
2385 else:
2381 if not ps:
2386 if not ps:
2382 ps = [nullrev]
2387 ps = [nullrev]
2383 if rev in required:
2388 if rev in required:
2384 for head in ellipsisheads[rev]:
2389 for head in ellipsisheads[rev]:
2385 addroot(head, rev)
2390 addroot(head, rev)
2386 for p in ps:
2391 for p in ps:
2387 ellipsisheads[p].add(rev)
2392 ellipsisheads[p].add(rev)
2388 else:
2393 else:
2389 for p in ps:
2394 for p in ps:
2390 ellipsisheads[p] |= ellipsisheads[rev]
2395 ellipsisheads[p] |= ellipsisheads[rev]
2391
2396
2392 # add common changesets as roots of their reachable ellipsis heads
2397 # add common changesets as roots of their reachable ellipsis heads
2393 for c in commonrevs:
2398 for c in commonrevs:
2394 for head in ellipsisheads[c]:
2399 for head in ellipsisheads[c]:
2395 addroot(head, c)
2400 addroot(head, c)
2396 return visitnodes, relevant_nodes, ellipsisroots
2401 return visitnodes, relevant_nodes, ellipsisroots
2397
2402
2398
2403
2399 def caps20to10(repo, role):
2404 def caps20to10(repo, role):
2400 """return a set with appropriate options to use bundle20 during getbundle"""
2405 """return a set with appropriate options to use bundle20 during getbundle"""
2401 caps = {b'HG20'}
2406 caps = {b'HG20'}
2402 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2407 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2403 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2408 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2404 return caps
2409 return caps
2405
2410
2406
2411
2407 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2412 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2408 getbundle2partsorder = []
2413 getbundle2partsorder = []
2409
2414
2410 # Mapping between step name and function
2415 # Mapping between step name and function
2411 #
2416 #
2412 # This exists to help extensions wrap steps if necessary
2417 # This exists to help extensions wrap steps if necessary
2413 getbundle2partsmapping = {}
2418 getbundle2partsmapping = {}
2414
2419
2415
2420
2416 def getbundle2partsgenerator(stepname, idx=None):
2421 def getbundle2partsgenerator(stepname, idx=None):
2417 """decorator for function generating bundle2 part for getbundle
2422 """decorator for function generating bundle2 part for getbundle
2418
2423
2419 The function is added to the step -> function mapping and appended to the
2424 The function is added to the step -> function mapping and appended to the
2420 list of steps. Beware that decorated functions will be added in order
2425 list of steps. Beware that decorated functions will be added in order
2421 (this may matter).
2426 (this may matter).
2422
2427
2423 You can only use this decorator for new steps, if you want to wrap a step
2428 You can only use this decorator for new steps, if you want to wrap a step
2424 from an extension, attack the getbundle2partsmapping dictionary directly."""
2429 from an extension, attack the getbundle2partsmapping dictionary directly."""
2425
2430
2426 def dec(func):
2431 def dec(func):
2427 assert stepname not in getbundle2partsmapping
2432 assert stepname not in getbundle2partsmapping
2428 getbundle2partsmapping[stepname] = func
2433 getbundle2partsmapping[stepname] = func
2429 if idx is None:
2434 if idx is None:
2430 getbundle2partsorder.append(stepname)
2435 getbundle2partsorder.append(stepname)
2431 else:
2436 else:
2432 getbundle2partsorder.insert(idx, stepname)
2437 getbundle2partsorder.insert(idx, stepname)
2433 return func
2438 return func
2434
2439
2435 return dec
2440 return dec
2436
2441
2437
2442
2438 def bundle2requested(bundlecaps):
2443 def bundle2requested(bundlecaps):
2439 if bundlecaps is not None:
2444 if bundlecaps is not None:
2440 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2445 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2441 return False
2446 return False
2442
2447
2443
2448
2444 def getbundlechunks(
2449 def getbundlechunks(
2445 repo, source, heads=None, common=None, bundlecaps=None, **kwargs
2450 repo, source, heads=None, common=None, bundlecaps=None, **kwargs
2446 ):
2451 ):
2447 """Return chunks constituting a bundle's raw data.
2452 """Return chunks constituting a bundle's raw data.
2448
2453
2449 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2454 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2450 passed.
2455 passed.
2451
2456
2452 Returns a 2-tuple of a dict with metadata about the generated bundle
2457 Returns a 2-tuple of a dict with metadata about the generated bundle
2453 and an iterator over raw chunks (of varying sizes).
2458 and an iterator over raw chunks (of varying sizes).
2454 """
2459 """
2455 kwargs = pycompat.byteskwargs(kwargs)
2460 kwargs = pycompat.byteskwargs(kwargs)
2456 info = {}
2461 info = {}
2457 usebundle2 = bundle2requested(bundlecaps)
2462 usebundle2 = bundle2requested(bundlecaps)
2458 # bundle10 case
2463 # bundle10 case
2459 if not usebundle2:
2464 if not usebundle2:
2460 if bundlecaps and not kwargs.get(b'cg', True):
2465 if bundlecaps and not kwargs.get(b'cg', True):
2461 raise ValueError(
2466 raise ValueError(
2462 _(b'request for bundle10 must include changegroup')
2467 _(b'request for bundle10 must include changegroup')
2463 )
2468 )
2464
2469
2465 if kwargs:
2470 if kwargs:
2466 raise ValueError(
2471 raise ValueError(
2467 _(b'unsupported getbundle arguments: %s')
2472 _(b'unsupported getbundle arguments: %s')
2468 % b', '.join(sorted(kwargs.keys()))
2473 % b', '.join(sorted(kwargs.keys()))
2469 )
2474 )
2470 outgoing = _computeoutgoing(repo, heads, common)
2475 outgoing = _computeoutgoing(repo, heads, common)
2471 info[b'bundleversion'] = 1
2476 info[b'bundleversion'] = 1
2472 return (
2477 return (
2473 info,
2478 info,
2474 changegroup.makestream(
2479 changegroup.makestream(
2475 repo, outgoing, b'01', source, bundlecaps=bundlecaps
2480 repo, outgoing, b'01', source, bundlecaps=bundlecaps
2476 ),
2481 ),
2477 )
2482 )
2478
2483
2479 # bundle20 case
2484 # bundle20 case
2480 info[b'bundleversion'] = 2
2485 info[b'bundleversion'] = 2
2481 b2caps = {}
2486 b2caps = {}
2482 for bcaps in bundlecaps:
2487 for bcaps in bundlecaps:
2483 if bcaps.startswith(b'bundle2='):
2488 if bcaps.startswith(b'bundle2='):
2484 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2489 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2485 b2caps.update(bundle2.decodecaps(blob))
2490 b2caps.update(bundle2.decodecaps(blob))
2486 bundler = bundle2.bundle20(repo.ui, b2caps)
2491 bundler = bundle2.bundle20(repo.ui, b2caps)
2487
2492
2488 kwargs[b'heads'] = heads
2493 kwargs[b'heads'] = heads
2489 kwargs[b'common'] = common
2494 kwargs[b'common'] = common
2490
2495
2491 for name in getbundle2partsorder:
2496 for name in getbundle2partsorder:
2492 func = getbundle2partsmapping[name]
2497 func = getbundle2partsmapping[name]
2493 func(
2498 func(
2494 bundler,
2499 bundler,
2495 repo,
2500 repo,
2496 source,
2501 source,
2497 bundlecaps=bundlecaps,
2502 bundlecaps=bundlecaps,
2498 b2caps=b2caps,
2503 b2caps=b2caps,
2499 **pycompat.strkwargs(kwargs)
2504 **pycompat.strkwargs(kwargs)
2500 )
2505 )
2501
2506
2502 info[b'prefercompressed'] = bundler.prefercompressed
2507 info[b'prefercompressed'] = bundler.prefercompressed
2503
2508
2504 return info, bundler.getchunks()
2509 return info, bundler.getchunks()
2505
2510
2506
2511
2507 @getbundle2partsgenerator(b'stream2')
2512 @getbundle2partsgenerator(b'stream2')
2508 def _getbundlestream2(bundler, repo, *args, **kwargs):
2513 def _getbundlestream2(bundler, repo, *args, **kwargs):
2509 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2514 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2510
2515
2511
2516
2512 @getbundle2partsgenerator(b'changegroup')
2517 @getbundle2partsgenerator(b'changegroup')
2513 def _getbundlechangegrouppart(
2518 def _getbundlechangegrouppart(
2514 bundler,
2519 bundler,
2515 repo,
2520 repo,
2516 source,
2521 source,
2517 bundlecaps=None,
2522 bundlecaps=None,
2518 b2caps=None,
2523 b2caps=None,
2519 heads=None,
2524 heads=None,
2520 common=None,
2525 common=None,
2521 **kwargs
2526 **kwargs
2522 ):
2527 ):
2523 """add a changegroup part to the requested bundle"""
2528 """add a changegroup part to the requested bundle"""
2524 if not kwargs.get('cg', True) or not b2caps:
2529 if not kwargs.get('cg', True) or not b2caps:
2525 return
2530 return
2526
2531
2527 version = b'01'
2532 version = b'01'
2528 cgversions = b2caps.get(b'changegroup')
2533 cgversions = b2caps.get(b'changegroup')
2529 if cgversions: # 3.1 and 3.2 ship with an empty value
2534 if cgversions: # 3.1 and 3.2 ship with an empty value
2530 cgversions = [
2535 cgversions = [
2531 v
2536 v
2532 for v in cgversions
2537 for v in cgversions
2533 if v in changegroup.supportedoutgoingversions(repo)
2538 if v in changegroup.supportedoutgoingversions(repo)
2534 ]
2539 ]
2535 if not cgversions:
2540 if not cgversions:
2536 raise error.Abort(_(b'no common changegroup version'))
2541 raise error.Abort(_(b'no common changegroup version'))
2537 version = max(cgversions)
2542 version = max(cgversions)
2538
2543
2539 outgoing = _computeoutgoing(repo, heads, common)
2544 outgoing = _computeoutgoing(repo, heads, common)
2540 if not outgoing.missing:
2545 if not outgoing.missing:
2541 return
2546 return
2542
2547
2543 if kwargs.get('narrow', False):
2548 if kwargs.get('narrow', False):
2544 include = sorted(filter(bool, kwargs.get('includepats', [])))
2549 include = sorted(filter(bool, kwargs.get('includepats', [])))
2545 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2550 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2546 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2551 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2547 else:
2552 else:
2548 matcher = None
2553 matcher = None
2549
2554
2550 cgstream = changegroup.makestream(
2555 cgstream = changegroup.makestream(
2551 repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
2556 repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
2552 )
2557 )
2553
2558
2554 part = bundler.newpart(b'changegroup', data=cgstream)
2559 part = bundler.newpart(b'changegroup', data=cgstream)
2555 if cgversions:
2560 if cgversions:
2556 part.addparam(b'version', version)
2561 part.addparam(b'version', version)
2557
2562
2558 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2563 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2559
2564
2560 if b'treemanifest' in repo.requirements:
2565 if b'treemanifest' in repo.requirements:
2561 part.addparam(b'treemanifest', b'1')
2566 part.addparam(b'treemanifest', b'1')
2562
2567
2563 if b'exp-sidedata-flag' in repo.requirements:
2568 if b'exp-sidedata-flag' in repo.requirements:
2564 part.addparam(b'exp-sidedata', b'1')
2569 part.addparam(b'exp-sidedata', b'1')
2565
2570
2566 if (
2571 if (
2567 kwargs.get('narrow', False)
2572 kwargs.get('narrow', False)
2568 and kwargs.get('narrow_acl', False)
2573 and kwargs.get('narrow_acl', False)
2569 and (include or exclude)
2574 and (include or exclude)
2570 ):
2575 ):
2571 # this is mandatory because otherwise ACL clients won't work
2576 # this is mandatory because otherwise ACL clients won't work
2572 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2577 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2573 narrowspecpart.data = b'%s\0%s' % (
2578 narrowspecpart.data = b'%s\0%s' % (
2574 b'\n'.join(include),
2579 b'\n'.join(include),
2575 b'\n'.join(exclude),
2580 b'\n'.join(exclude),
2576 )
2581 )
2577
2582
2578
2583
2579 @getbundle2partsgenerator(b'bookmarks')
2584 @getbundle2partsgenerator(b'bookmarks')
2580 def _getbundlebookmarkpart(
2585 def _getbundlebookmarkpart(
2581 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2586 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2582 ):
2587 ):
2583 """add a bookmark part to the requested bundle"""
2588 """add a bookmark part to the requested bundle"""
2584 if not kwargs.get('bookmarks', False):
2589 if not kwargs.get('bookmarks', False):
2585 return
2590 return
2586 if not b2caps or b'bookmarks' not in b2caps:
2591 if not b2caps or b'bookmarks' not in b2caps:
2587 raise error.Abort(_(b'no common bookmarks exchange method'))
2592 raise error.Abort(_(b'no common bookmarks exchange method'))
2588 books = bookmod.listbinbookmarks(repo)
2593 books = bookmod.listbinbookmarks(repo)
2589 data = bookmod.binaryencode(books)
2594 data = bookmod.binaryencode(books)
2590 if data:
2595 if data:
2591 bundler.newpart(b'bookmarks', data=data)
2596 bundler.newpart(b'bookmarks', data=data)
2592
2597
2593
2598
2594 @getbundle2partsgenerator(b'listkeys')
2599 @getbundle2partsgenerator(b'listkeys')
2595 def _getbundlelistkeysparts(
2600 def _getbundlelistkeysparts(
2596 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2601 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2597 ):
2602 ):
2598 """add parts containing listkeys namespaces to the requested bundle"""
2603 """add parts containing listkeys namespaces to the requested bundle"""
2599 listkeys = kwargs.get('listkeys', ())
2604 listkeys = kwargs.get('listkeys', ())
2600 for namespace in listkeys:
2605 for namespace in listkeys:
2601 part = bundler.newpart(b'listkeys')
2606 part = bundler.newpart(b'listkeys')
2602 part.addparam(b'namespace', namespace)
2607 part.addparam(b'namespace', namespace)
2603 keys = repo.listkeys(namespace).items()
2608 keys = repo.listkeys(namespace).items()
2604 part.data = pushkey.encodekeys(keys)
2609 part.data = pushkey.encodekeys(keys)
2605
2610
2606
2611
2607 @getbundle2partsgenerator(b'obsmarkers')
2612 @getbundle2partsgenerator(b'obsmarkers')
2608 def _getbundleobsmarkerpart(
2613 def _getbundleobsmarkerpart(
2609 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2614 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2610 ):
2615 ):
2611 """add an obsolescence markers part to the requested bundle"""
2616 """add an obsolescence markers part to the requested bundle"""
2612 if kwargs.get('obsmarkers', False):
2617 if kwargs.get('obsmarkers', False):
2613 if heads is None:
2618 if heads is None:
2614 heads = repo.heads()
2619 heads = repo.heads()
2615 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2620 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2616 markers = repo.obsstore.relevantmarkers(subset)
2621 markers = repo.obsstore.relevantmarkers(subset)
2617 markers = obsutil.sortedmarkers(markers)
2622 markers = obsutil.sortedmarkers(markers)
2618 bundle2.buildobsmarkerspart(bundler, markers)
2623 bundle2.buildobsmarkerspart(bundler, markers)
2619
2624
2620
2625
2621 @getbundle2partsgenerator(b'phases')
2626 @getbundle2partsgenerator(b'phases')
2622 def _getbundlephasespart(
2627 def _getbundlephasespart(
2623 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2628 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2624 ):
2629 ):
2625 """add phase heads part to the requested bundle"""
2630 """add phase heads part to the requested bundle"""
2626 if kwargs.get('phases', False):
2631 if kwargs.get('phases', False):
2627 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2632 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2628 raise error.Abort(_(b'no common phases exchange method'))
2633 raise error.Abort(_(b'no common phases exchange method'))
2629 if heads is None:
2634 if heads is None:
2630 heads = repo.heads()
2635 heads = repo.heads()
2631
2636
2632 headsbyphase = collections.defaultdict(set)
2637 headsbyphase = collections.defaultdict(set)
2633 if repo.publishing():
2638 if repo.publishing():
2634 headsbyphase[phases.public] = heads
2639 headsbyphase[phases.public] = heads
2635 else:
2640 else:
2636 # find the appropriate heads to move
2641 # find the appropriate heads to move
2637
2642
2638 phase = repo._phasecache.phase
2643 phase = repo._phasecache.phase
2639 node = repo.changelog.node
2644 node = repo.changelog.node
2640 rev = repo.changelog.rev
2645 rev = repo.changelog.rev
2641 for h in heads:
2646 for h in heads:
2642 headsbyphase[phase(repo, rev(h))].add(h)
2647 headsbyphase[phase(repo, rev(h))].add(h)
2643 seenphases = list(headsbyphase.keys())
2648 seenphases = list(headsbyphase.keys())
2644
2649
2645 # We do not handle anything but public and draft phase for now)
2650 # We do not handle anything but public and draft phase for now)
2646 if seenphases:
2651 if seenphases:
2647 assert max(seenphases) <= phases.draft
2652 assert max(seenphases) <= phases.draft
2648
2653
2649 # if client is pulling non-public changesets, we need to find
2654 # if client is pulling non-public changesets, we need to find
2650 # intermediate public heads.
2655 # intermediate public heads.
2651 draftheads = headsbyphase.get(phases.draft, set())
2656 draftheads = headsbyphase.get(phases.draft, set())
2652 if draftheads:
2657 if draftheads:
2653 publicheads = headsbyphase.get(phases.public, set())
2658 publicheads = headsbyphase.get(phases.public, set())
2654
2659
2655 revset = b'heads(only(%ln, %ln) and public())'
2660 revset = b'heads(only(%ln, %ln) and public())'
2656 extraheads = repo.revs(revset, draftheads, publicheads)
2661 extraheads = repo.revs(revset, draftheads, publicheads)
2657 for r in extraheads:
2662 for r in extraheads:
2658 headsbyphase[phases.public].add(node(r))
2663 headsbyphase[phases.public].add(node(r))
2659
2664
2660 # transform data in a format used by the encoding function
2665 # transform data in a format used by the encoding function
2661 phasemapping = {
2666 phasemapping = {
2662 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2667 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2663 }
2668 }
2664
2669
2665 # generate the actual part
2670 # generate the actual part
2666 phasedata = phases.binaryencode(phasemapping)
2671 phasedata = phases.binaryencode(phasemapping)
2667 bundler.newpart(b'phase-heads', data=phasedata)
2672 bundler.newpart(b'phase-heads', data=phasedata)
2668
2673
2669
2674
2670 @getbundle2partsgenerator(b'hgtagsfnodes')
2675 @getbundle2partsgenerator(b'hgtagsfnodes')
2671 def _getbundletagsfnodes(
2676 def _getbundletagsfnodes(
2672 bundler,
2677 bundler,
2673 repo,
2678 repo,
2674 source,
2679 source,
2675 bundlecaps=None,
2680 bundlecaps=None,
2676 b2caps=None,
2681 b2caps=None,
2677 heads=None,
2682 heads=None,
2678 common=None,
2683 common=None,
2679 **kwargs
2684 **kwargs
2680 ):
2685 ):
2681 """Transfer the .hgtags filenodes mapping.
2686 """Transfer the .hgtags filenodes mapping.
2682
2687
2683 Only values for heads in this bundle will be transferred.
2688 Only values for heads in this bundle will be transferred.
2684
2689
2685 The part data consists of pairs of 20 byte changeset node and .hgtags
2690 The part data consists of pairs of 20 byte changeset node and .hgtags
2686 filenodes raw values.
2691 filenodes raw values.
2687 """
2692 """
2688 # Don't send unless:
2693 # Don't send unless:
2689 # - changeset are being exchanged,
2694 # - changeset are being exchanged,
2690 # - the client supports it.
2695 # - the client supports it.
2691 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2696 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2692 return
2697 return
2693
2698
2694 outgoing = _computeoutgoing(repo, heads, common)
2699 outgoing = _computeoutgoing(repo, heads, common)
2695 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2700 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2696
2701
2697
2702
2698 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2703 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2699 def _getbundlerevbranchcache(
2704 def _getbundlerevbranchcache(
2700 bundler,
2705 bundler,
2701 repo,
2706 repo,
2702 source,
2707 source,
2703 bundlecaps=None,
2708 bundlecaps=None,
2704 b2caps=None,
2709 b2caps=None,
2705 heads=None,
2710 heads=None,
2706 common=None,
2711 common=None,
2707 **kwargs
2712 **kwargs
2708 ):
2713 ):
2709 """Transfer the rev-branch-cache mapping
2714 """Transfer the rev-branch-cache mapping
2710
2715
2711 The payload is a series of data related to each branch
2716 The payload is a series of data related to each branch
2712
2717
2713 1) branch name length
2718 1) branch name length
2714 2) number of open heads
2719 2) number of open heads
2715 3) number of closed heads
2720 3) number of closed heads
2716 4) open heads nodes
2721 4) open heads nodes
2717 5) closed heads nodes
2722 5) closed heads nodes
2718 """
2723 """
2719 # Don't send unless:
2724 # Don't send unless:
2720 # - changeset are being exchanged,
2725 # - changeset are being exchanged,
2721 # - the client supports it.
2726 # - the client supports it.
2722 # - narrow bundle isn't in play (not currently compatible).
2727 # - narrow bundle isn't in play (not currently compatible).
2723 if (
2728 if (
2724 not kwargs.get('cg', True)
2729 not kwargs.get('cg', True)
2725 or not b2caps
2730 or not b2caps
2726 or b'rev-branch-cache' not in b2caps
2731 or b'rev-branch-cache' not in b2caps
2727 or kwargs.get('narrow', False)
2732 or kwargs.get('narrow', False)
2728 or repo.ui.has_section(_NARROWACL_SECTION)
2733 or repo.ui.has_section(_NARROWACL_SECTION)
2729 ):
2734 ):
2730 return
2735 return
2731
2736
2732 outgoing = _computeoutgoing(repo, heads, common)
2737 outgoing = _computeoutgoing(repo, heads, common)
2733 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2738 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2734
2739
2735
2740
2736 def check_heads(repo, their_heads, context):
2741 def check_heads(repo, their_heads, context):
2737 """check if the heads of a repo have been modified
2742 """check if the heads of a repo have been modified
2738
2743
2739 Used by peer for unbundling.
2744 Used by peer for unbundling.
2740 """
2745 """
2741 heads = repo.heads()
2746 heads = repo.heads()
2742 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2747 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2743 if not (
2748 if not (
2744 their_heads == [b'force']
2749 their_heads == [b'force']
2745 or their_heads == heads
2750 or their_heads == heads
2746 or their_heads == [b'hashed', heads_hash]
2751 or their_heads == [b'hashed', heads_hash]
2747 ):
2752 ):
2748 # someone else committed/pushed/unbundled while we
2753 # someone else committed/pushed/unbundled while we
2749 # were transferring data
2754 # were transferring data
2750 raise error.PushRaced(
2755 raise error.PushRaced(
2751 b'repository changed while %s - please try again' % context
2756 b'repository changed while %s - please try again' % context
2752 )
2757 )
2753
2758
2754
2759
2755 def unbundle(repo, cg, heads, source, url):
2760 def unbundle(repo, cg, heads, source, url):
2756 """Apply a bundle to a repo.
2761 """Apply a bundle to a repo.
2757
2762
2758 this function makes sure the repo is locked during the application and have
2763 this function makes sure the repo is locked during the application and have
2759 mechanism to check that no push race occurred between the creation of the
2764 mechanism to check that no push race occurred between the creation of the
2760 bundle and its application.
2765 bundle and its application.
2761
2766
2762 If the push was raced as PushRaced exception is raised."""
2767 If the push was raced as PushRaced exception is raised."""
2763 r = 0
2768 r = 0
2764 # need a transaction when processing a bundle2 stream
2769 # need a transaction when processing a bundle2 stream
2765 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2770 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2766 lockandtr = [None, None, None]
2771 lockandtr = [None, None, None]
2767 recordout = None
2772 recordout = None
2768 # quick fix for output mismatch with bundle2 in 3.4
2773 # quick fix for output mismatch with bundle2 in 3.4
2769 captureoutput = repo.ui.configbool(
2774 captureoutput = repo.ui.configbool(
2770 b'experimental', b'bundle2-output-capture'
2775 b'experimental', b'bundle2-output-capture'
2771 )
2776 )
2772 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2777 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2773 captureoutput = True
2778 captureoutput = True
2774 try:
2779 try:
2775 # note: outside bundle1, 'heads' is expected to be empty and this
2780 # note: outside bundle1, 'heads' is expected to be empty and this
2776 # 'check_heads' call wil be a no-op
2781 # 'check_heads' call wil be a no-op
2777 check_heads(repo, heads, b'uploading changes')
2782 check_heads(repo, heads, b'uploading changes')
2778 # push can proceed
2783 # push can proceed
2779 if not isinstance(cg, bundle2.unbundle20):
2784 if not isinstance(cg, bundle2.unbundle20):
2780 # legacy case: bundle1 (changegroup 01)
2785 # legacy case: bundle1 (changegroup 01)
2781 txnname = b"\n".join([source, util.hidepassword(url)])
2786 txnname = b"\n".join([source, util.hidepassword(url)])
2782 with repo.lock(), repo.transaction(txnname) as tr:
2787 with repo.lock(), repo.transaction(txnname) as tr:
2783 op = bundle2.applybundle(repo, cg, tr, source, url)
2788 op = bundle2.applybundle(repo, cg, tr, source, url)
2784 r = bundle2.combinechangegroupresults(op)
2789 r = bundle2.combinechangegroupresults(op)
2785 else:
2790 else:
2786 r = None
2791 r = None
2787 try:
2792 try:
2788
2793
2789 def gettransaction():
2794 def gettransaction():
2790 if not lockandtr[2]:
2795 if not lockandtr[2]:
2791 if not bookmod.bookmarksinstore(repo):
2796 if not bookmod.bookmarksinstore(repo):
2792 lockandtr[0] = repo.wlock()
2797 lockandtr[0] = repo.wlock()
2793 lockandtr[1] = repo.lock()
2798 lockandtr[1] = repo.lock()
2794 lockandtr[2] = repo.transaction(source)
2799 lockandtr[2] = repo.transaction(source)
2795 lockandtr[2].hookargs[b'source'] = source
2800 lockandtr[2].hookargs[b'source'] = source
2796 lockandtr[2].hookargs[b'url'] = url
2801 lockandtr[2].hookargs[b'url'] = url
2797 lockandtr[2].hookargs[b'bundle2'] = b'1'
2802 lockandtr[2].hookargs[b'bundle2'] = b'1'
2798 return lockandtr[2]
2803 return lockandtr[2]
2799
2804
2800 # Do greedy locking by default until we're satisfied with lazy
2805 # Do greedy locking by default until we're satisfied with lazy
2801 # locking.
2806 # locking.
2802 if not repo.ui.configbool(
2807 if not repo.ui.configbool(
2803 b'experimental', b'bundle2lazylocking'
2808 b'experimental', b'bundle2lazylocking'
2804 ):
2809 ):
2805 gettransaction()
2810 gettransaction()
2806
2811
2807 op = bundle2.bundleoperation(
2812 op = bundle2.bundleoperation(
2808 repo,
2813 repo,
2809 gettransaction,
2814 gettransaction,
2810 captureoutput=captureoutput,
2815 captureoutput=captureoutput,
2811 source=b'push',
2816 source=b'push',
2812 )
2817 )
2813 try:
2818 try:
2814 op = bundle2.processbundle(repo, cg, op=op)
2819 op = bundle2.processbundle(repo, cg, op=op)
2815 finally:
2820 finally:
2816 r = op.reply
2821 r = op.reply
2817 if captureoutput and r is not None:
2822 if captureoutput and r is not None:
2818 repo.ui.pushbuffer(error=True, subproc=True)
2823 repo.ui.pushbuffer(error=True, subproc=True)
2819
2824
2820 def recordout(output):
2825 def recordout(output):
2821 r.newpart(b'output', data=output, mandatory=False)
2826 r.newpart(b'output', data=output, mandatory=False)
2822
2827
2823 if lockandtr[2] is not None:
2828 if lockandtr[2] is not None:
2824 lockandtr[2].close()
2829 lockandtr[2].close()
2825 except BaseException as exc:
2830 except BaseException as exc:
2826 exc.duringunbundle2 = True
2831 exc.duringunbundle2 = True
2827 if captureoutput and r is not None:
2832 if captureoutput and r is not None:
2828 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2833 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2829
2834
2830 def recordout(output):
2835 def recordout(output):
2831 part = bundle2.bundlepart(
2836 part = bundle2.bundlepart(
2832 b'output', data=output, mandatory=False
2837 b'output', data=output, mandatory=False
2833 )
2838 )
2834 parts.append(part)
2839 parts.append(part)
2835
2840
2836 raise
2841 raise
2837 finally:
2842 finally:
2838 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2843 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2839 if recordout is not None:
2844 if recordout is not None:
2840 recordout(repo.ui.popbuffer())
2845 recordout(repo.ui.popbuffer())
2841 return r
2846 return r
2842
2847
2843
2848
2844 def _maybeapplyclonebundle(pullop):
2849 def _maybeapplyclonebundle(pullop):
2845 """Apply a clone bundle from a remote, if possible."""
2850 """Apply a clone bundle from a remote, if possible."""
2846
2851
2847 repo = pullop.repo
2852 repo = pullop.repo
2848 remote = pullop.remote
2853 remote = pullop.remote
2849
2854
2850 if not repo.ui.configbool(b'ui', b'clonebundles'):
2855 if not repo.ui.configbool(b'ui', b'clonebundles'):
2851 return
2856 return
2852
2857
2853 # Only run if local repo is empty.
2858 # Only run if local repo is empty.
2854 if len(repo):
2859 if len(repo):
2855 return
2860 return
2856
2861
2857 if pullop.heads:
2862 if pullop.heads:
2858 return
2863 return
2859
2864
2860 if not remote.capable(b'clonebundles'):
2865 if not remote.capable(b'clonebundles'):
2861 return
2866 return
2862
2867
2863 with remote.commandexecutor() as e:
2868 with remote.commandexecutor() as e:
2864 res = e.callcommand(b'clonebundles', {}).result()
2869 res = e.callcommand(b'clonebundles', {}).result()
2865
2870
2866 # If we call the wire protocol command, that's good enough to record the
2871 # If we call the wire protocol command, that's good enough to record the
2867 # attempt.
2872 # attempt.
2868 pullop.clonebundleattempted = True
2873 pullop.clonebundleattempted = True
2869
2874
2870 entries = parseclonebundlesmanifest(repo, res)
2875 entries = parseclonebundlesmanifest(repo, res)
2871 if not entries:
2876 if not entries:
2872 repo.ui.note(
2877 repo.ui.note(
2873 _(
2878 _(
2874 b'no clone bundles available on remote; '
2879 b'no clone bundles available on remote; '
2875 b'falling back to regular clone\n'
2880 b'falling back to regular clone\n'
2876 )
2881 )
2877 )
2882 )
2878 return
2883 return
2879
2884
2880 entries = filterclonebundleentries(
2885 entries = filterclonebundleentries(
2881 repo, entries, streamclonerequested=pullop.streamclonerequested
2886 repo, entries, streamclonerequested=pullop.streamclonerequested
2882 )
2887 )
2883
2888
2884 if not entries:
2889 if not entries:
2885 # There is a thundering herd concern here. However, if a server
2890 # There is a thundering herd concern here. However, if a server
2886 # operator doesn't advertise bundles appropriate for its clients,
2891 # operator doesn't advertise bundles appropriate for its clients,
2887 # they deserve what's coming. Furthermore, from a client's
2892 # they deserve what's coming. Furthermore, from a client's
2888 # perspective, no automatic fallback would mean not being able to
2893 # perspective, no automatic fallback would mean not being able to
2889 # clone!
2894 # clone!
2890 repo.ui.warn(
2895 repo.ui.warn(
2891 _(
2896 _(
2892 b'no compatible clone bundles available on server; '
2897 b'no compatible clone bundles available on server; '
2893 b'falling back to regular clone\n'
2898 b'falling back to regular clone\n'
2894 )
2899 )
2895 )
2900 )
2896 repo.ui.warn(
2901 repo.ui.warn(
2897 _(b'(you may want to report this to the server operator)\n')
2902 _(b'(you may want to report this to the server operator)\n')
2898 )
2903 )
2899 return
2904 return
2900
2905
2901 entries = sortclonebundleentries(repo.ui, entries)
2906 entries = sortclonebundleentries(repo.ui, entries)
2902
2907
2903 url = entries[0][b'URL']
2908 url = entries[0][b'URL']
2904 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2909 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2905 if trypullbundlefromurl(repo.ui, repo, url):
2910 if trypullbundlefromurl(repo.ui, repo, url):
2906 repo.ui.status(_(b'finished applying clone bundle\n'))
2911 repo.ui.status(_(b'finished applying clone bundle\n'))
2907 # Bundle failed.
2912 # Bundle failed.
2908 #
2913 #
2909 # We abort by default to avoid the thundering herd of
2914 # We abort by default to avoid the thundering herd of
2910 # clients flooding a server that was expecting expensive
2915 # clients flooding a server that was expecting expensive
2911 # clone load to be offloaded.
2916 # clone load to be offloaded.
2912 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2917 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2913 repo.ui.warn(_(b'falling back to normal clone\n'))
2918 repo.ui.warn(_(b'falling back to normal clone\n'))
2914 else:
2919 else:
2915 raise error.Abort(
2920 raise error.Abort(
2916 _(b'error applying bundle'),
2921 _(b'error applying bundle'),
2917 hint=_(
2922 hint=_(
2918 b'if this error persists, consider contacting '
2923 b'if this error persists, consider contacting '
2919 b'the server operator or disable clone '
2924 b'the server operator or disable clone '
2920 b'bundles via '
2925 b'bundles via '
2921 b'"--config ui.clonebundles=false"'
2926 b'"--config ui.clonebundles=false"'
2922 ),
2927 ),
2923 )
2928 )
2924
2929
2925
2930
2926 def parseclonebundlesmanifest(repo, s):
2931 def parseclonebundlesmanifest(repo, s):
2927 """Parses the raw text of a clone bundles manifest.
2932 """Parses the raw text of a clone bundles manifest.
2928
2933
2929 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2934 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2930 to the URL and other keys are the attributes for the entry.
2935 to the URL and other keys are the attributes for the entry.
2931 """
2936 """
2932 m = []
2937 m = []
2933 for line in s.splitlines():
2938 for line in s.splitlines():
2934 fields = line.split()
2939 fields = line.split()
2935 if not fields:
2940 if not fields:
2936 continue
2941 continue
2937 attrs = {b'URL': fields[0]}
2942 attrs = {b'URL': fields[0]}
2938 for rawattr in fields[1:]:
2943 for rawattr in fields[1:]:
2939 key, value = rawattr.split(b'=', 1)
2944 key, value = rawattr.split(b'=', 1)
2940 key = urlreq.unquote(key)
2945 key = urlreq.unquote(key)
2941 value = urlreq.unquote(value)
2946 value = urlreq.unquote(value)
2942 attrs[key] = value
2947 attrs[key] = value
2943
2948
2944 # Parse BUNDLESPEC into components. This makes client-side
2949 # Parse BUNDLESPEC into components. This makes client-side
2945 # preferences easier to specify since you can prefer a single
2950 # preferences easier to specify since you can prefer a single
2946 # component of the BUNDLESPEC.
2951 # component of the BUNDLESPEC.
2947 if key == b'BUNDLESPEC':
2952 if key == b'BUNDLESPEC':
2948 try:
2953 try:
2949 bundlespec = parsebundlespec(repo, value)
2954 bundlespec = parsebundlespec(repo, value)
2950 attrs[b'COMPRESSION'] = bundlespec.compression
2955 attrs[b'COMPRESSION'] = bundlespec.compression
2951 attrs[b'VERSION'] = bundlespec.version
2956 attrs[b'VERSION'] = bundlespec.version
2952 except error.InvalidBundleSpecification:
2957 except error.InvalidBundleSpecification:
2953 pass
2958 pass
2954 except error.UnsupportedBundleSpecification:
2959 except error.UnsupportedBundleSpecification:
2955 pass
2960 pass
2956
2961
2957 m.append(attrs)
2962 m.append(attrs)
2958
2963
2959 return m
2964 return m
2960
2965
2961
2966
2962 def isstreamclonespec(bundlespec):
2967 def isstreamclonespec(bundlespec):
2963 # Stream clone v1
2968 # Stream clone v1
2964 if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
2969 if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
2965 return True
2970 return True
2966
2971
2967 # Stream clone v2
2972 # Stream clone v2
2968 if (
2973 if (
2969 bundlespec.wirecompression == b'UN'
2974 bundlespec.wirecompression == b'UN'
2970 and bundlespec.wireversion == b'02'
2975 and bundlespec.wireversion == b'02'
2971 and bundlespec.contentopts.get(b'streamv2')
2976 and bundlespec.contentopts.get(b'streamv2')
2972 ):
2977 ):
2973 return True
2978 return True
2974
2979
2975 return False
2980 return False
2976
2981
2977
2982
2978 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2983 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2979 """Remove incompatible clone bundle manifest entries.
2984 """Remove incompatible clone bundle manifest entries.
2980
2985
2981 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2986 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2982 and returns a new list consisting of only the entries that this client
2987 and returns a new list consisting of only the entries that this client
2983 should be able to apply.
2988 should be able to apply.
2984
2989
2985 There is no guarantee we'll be able to apply all returned entries because
2990 There is no guarantee we'll be able to apply all returned entries because
2986 the metadata we use to filter on may be missing or wrong.
2991 the metadata we use to filter on may be missing or wrong.
2987 """
2992 """
2988 newentries = []
2993 newentries = []
2989 for entry in entries:
2994 for entry in entries:
2990 spec = entry.get(b'BUNDLESPEC')
2995 spec = entry.get(b'BUNDLESPEC')
2991 if spec:
2996 if spec:
2992 try:
2997 try:
2993 bundlespec = parsebundlespec(repo, spec, strict=True)
2998 bundlespec = parsebundlespec(repo, spec, strict=True)
2994
2999
2995 # If a stream clone was requested, filter out non-streamclone
3000 # If a stream clone was requested, filter out non-streamclone
2996 # entries.
3001 # entries.
2997 if streamclonerequested and not isstreamclonespec(bundlespec):
3002 if streamclonerequested and not isstreamclonespec(bundlespec):
2998 repo.ui.debug(
3003 repo.ui.debug(
2999 b'filtering %s because not a stream clone\n'
3004 b'filtering %s because not a stream clone\n'
3000 % entry[b'URL']
3005 % entry[b'URL']
3001 )
3006 )
3002 continue
3007 continue
3003
3008
3004 except error.InvalidBundleSpecification as e:
3009 except error.InvalidBundleSpecification as e:
3005 repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
3010 repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
3006 continue
3011 continue
3007 except error.UnsupportedBundleSpecification as e:
3012 except error.UnsupportedBundleSpecification as e:
3008 repo.ui.debug(
3013 repo.ui.debug(
3009 b'filtering %s because unsupported bundle '
3014 b'filtering %s because unsupported bundle '
3010 b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
3015 b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
3011 )
3016 )
3012 continue
3017 continue
3013 # If we don't have a spec and requested a stream clone, we don't know
3018 # If we don't have a spec and requested a stream clone, we don't know
3014 # what the entry is so don't attempt to apply it.
3019 # what the entry is so don't attempt to apply it.
3015 elif streamclonerequested:
3020 elif streamclonerequested:
3016 repo.ui.debug(
3021 repo.ui.debug(
3017 b'filtering %s because cannot determine if a stream '
3022 b'filtering %s because cannot determine if a stream '
3018 b'clone bundle\n' % entry[b'URL']
3023 b'clone bundle\n' % entry[b'URL']
3019 )
3024 )
3020 continue
3025 continue
3021
3026
3022 if b'REQUIRESNI' in entry and not sslutil.hassni:
3027 if b'REQUIRESNI' in entry and not sslutil.hassni:
3023 repo.ui.debug(
3028 repo.ui.debug(
3024 b'filtering %s because SNI not supported\n' % entry[b'URL']
3029 b'filtering %s because SNI not supported\n' % entry[b'URL']
3025 )
3030 )
3026 continue
3031 continue
3027
3032
3028 if b'REQUIREDRAM' in entry:
3033 if b'REQUIREDRAM' in entry:
3029 try:
3034 try:
3030 requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
3035 requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
3031 except error.ParseError:
3036 except error.ParseError:
3032 repo.ui.debug(
3037 repo.ui.debug(
3033 b'filtering %s due to a bad REQUIREDRAM attribute\n'
3038 b'filtering %s due to a bad REQUIREDRAM attribute\n'
3034 % entry[b'URL']
3039 % entry[b'URL']
3035 )
3040 )
3036 continue
3041 continue
3037 actualram = repo.ui.estimatememory()
3042 actualram = repo.ui.estimatememory()
3038 if actualram is not None and actualram * 0.66 < requiredram:
3043 if actualram is not None and actualram * 0.66 < requiredram:
3039 repo.ui.debug(
3044 repo.ui.debug(
3040 b'filtering %s as it needs more than 2/3 of system memory\n'
3045 b'filtering %s as it needs more than 2/3 of system memory\n'
3041 % entry[b'URL']
3046 % entry[b'URL']
3042 )
3047 )
3043 continue
3048 continue
3044
3049
3045 newentries.append(entry)
3050 newentries.append(entry)
3046
3051
3047 return newentries
3052 return newentries
3048
3053
3049
3054
3050 class clonebundleentry(object):
3055 class clonebundleentry(object):
3051 """Represents an item in a clone bundles manifest.
3056 """Represents an item in a clone bundles manifest.
3052
3057
3053 This rich class is needed to support sorting since sorted() in Python 3
3058 This rich class is needed to support sorting since sorted() in Python 3
3054 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
3059 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
3055 won't work.
3060 won't work.
3056 """
3061 """
3057
3062
3058 def __init__(self, value, prefers):
3063 def __init__(self, value, prefers):
3059 self.value = value
3064 self.value = value
3060 self.prefers = prefers
3065 self.prefers = prefers
3061
3066
3062 def _cmp(self, other):
3067 def _cmp(self, other):
3063 for prefkey, prefvalue in self.prefers:
3068 for prefkey, prefvalue in self.prefers:
3064 avalue = self.value.get(prefkey)
3069 avalue = self.value.get(prefkey)
3065 bvalue = other.value.get(prefkey)
3070 bvalue = other.value.get(prefkey)
3066
3071
3067 # Special case for b missing attribute and a matches exactly.
3072 # Special case for b missing attribute and a matches exactly.
3068 if avalue is not None and bvalue is None and avalue == prefvalue:
3073 if avalue is not None and bvalue is None and avalue == prefvalue:
3069 return -1
3074 return -1
3070
3075
3071 # Special case for a missing attribute and b matches exactly.
3076 # Special case for a missing attribute and b matches exactly.
3072 if bvalue is not None and avalue is None and bvalue == prefvalue:
3077 if bvalue is not None and avalue is None and bvalue == prefvalue:
3073 return 1
3078 return 1
3074
3079
3075 # We can't compare unless attribute present on both.
3080 # We can't compare unless attribute present on both.
3076 if avalue is None or bvalue is None:
3081 if avalue is None or bvalue is None:
3077 continue
3082 continue
3078
3083
3079 # Same values should fall back to next attribute.
3084 # Same values should fall back to next attribute.
3080 if avalue == bvalue:
3085 if avalue == bvalue:
3081 continue
3086 continue
3082
3087
3083 # Exact matches come first.
3088 # Exact matches come first.
3084 if avalue == prefvalue:
3089 if avalue == prefvalue:
3085 return -1
3090 return -1
3086 if bvalue == prefvalue:
3091 if bvalue == prefvalue:
3087 return 1
3092 return 1
3088
3093
3089 # Fall back to next attribute.
3094 # Fall back to next attribute.
3090 continue
3095 continue
3091
3096
3092 # If we got here we couldn't sort by attributes and prefers. Fall
3097 # If we got here we couldn't sort by attributes and prefers. Fall
3093 # back to index order.
3098 # back to index order.
3094 return 0
3099 return 0
3095
3100
3096 def __lt__(self, other):
3101 def __lt__(self, other):
3097 return self._cmp(other) < 0
3102 return self._cmp(other) < 0
3098
3103
3099 def __gt__(self, other):
3104 def __gt__(self, other):
3100 return self._cmp(other) > 0
3105 return self._cmp(other) > 0
3101
3106
3102 def __eq__(self, other):
3107 def __eq__(self, other):
3103 return self._cmp(other) == 0
3108 return self._cmp(other) == 0
3104
3109
3105 def __le__(self, other):
3110 def __le__(self, other):
3106 return self._cmp(other) <= 0
3111 return self._cmp(other) <= 0
3107
3112
3108 def __ge__(self, other):
3113 def __ge__(self, other):
3109 return self._cmp(other) >= 0
3114 return self._cmp(other) >= 0
3110
3115
3111 def __ne__(self, other):
3116 def __ne__(self, other):
3112 return self._cmp(other) != 0
3117 return self._cmp(other) != 0
3113
3118
3114
3119
3115 def sortclonebundleentries(ui, entries):
3120 def sortclonebundleentries(ui, entries):
3116 prefers = ui.configlist(b'ui', b'clonebundleprefers')
3121 prefers = ui.configlist(b'ui', b'clonebundleprefers')
3117 if not prefers:
3122 if not prefers:
3118 return list(entries)
3123 return list(entries)
3119
3124
3120 def _split(p):
3125 def _split(p):
3121 if b'=' not in p:
3126 if b'=' not in p:
3122 hint = _(b"each comma separated item should be key=value pairs")
3127 hint = _(b"each comma separated item should be key=value pairs")
3123 raise error.Abort(
3128 raise error.Abort(
3124 _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
3129 _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
3125 )
3130 )
3126 return p.split(b'=', 1)
3131 return p.split(b'=', 1)
3127
3132
3128 prefers = [_split(p) for p in prefers]
3133 prefers = [_split(p) for p in prefers]
3129
3134
3130 items = sorted(clonebundleentry(v, prefers) for v in entries)
3135 items = sorted(clonebundleentry(v, prefers) for v in entries)
3131 return [i.value for i in items]
3136 return [i.value for i in items]
3132
3137
3133
3138
3134 def trypullbundlefromurl(ui, repo, url):
3139 def trypullbundlefromurl(ui, repo, url):
3135 """Attempt to apply a bundle from a URL."""
3140 """Attempt to apply a bundle from a URL."""
3136 with repo.lock(), repo.transaction(b'bundleurl') as tr:
3141 with repo.lock(), repo.transaction(b'bundleurl') as tr:
3137 try:
3142 try:
3138 fh = urlmod.open(ui, url)
3143 fh = urlmod.open(ui, url)
3139 cg = readbundle(ui, fh, b'stream')
3144 cg = readbundle(ui, fh, b'stream')
3140
3145
3141 if isinstance(cg, streamclone.streamcloneapplier):
3146 if isinstance(cg, streamclone.streamcloneapplier):
3142 cg.apply(repo)
3147 cg.apply(repo)
3143 else:
3148 else:
3144 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
3149 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
3145 return True
3150 return True
3146 except urlerr.httperror as e:
3151 except urlerr.httperror as e:
3147 ui.warn(
3152 ui.warn(
3148 _(b'HTTP error fetching bundle: %s\n')
3153 _(b'HTTP error fetching bundle: %s\n')
3149 % stringutil.forcebytestr(e)
3154 % stringutil.forcebytestr(e)
3150 )
3155 )
3151 except urlerr.urlerror as e:
3156 except urlerr.urlerror as e:
3152 ui.warn(
3157 ui.warn(
3153 _(b'error fetching bundle: %s\n')
3158 _(b'error fetching bundle: %s\n')
3154 % stringutil.forcebytestr(e.reason)
3159 % stringutil.forcebytestr(e.reason)
3155 )
3160 )
3156
3161
3157 return False
3162 return False
@@ -1,798 +1,800 b''
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
1 Test file dedicated to testing the divergent troubles from obsolete changeset.
2
2
3 This is the most complex troubles from far so we isolate it in a dedicated
3 This is the most complex troubles from far so we isolate it in a dedicated
4 file.
4 file.
5
5
6 Enable obsolete
6 Enable obsolete
7
7
8 $ cat >> $HGRCPATH << EOF
8 $ cat >> $HGRCPATH << EOF
9 > [ui]
9 > [ui]
10 > logtemplate = {rev}:{node|short} {desc}{if(obsfate, " [{join(obsfate, "; ")}]")}\n
10 > logtemplate = {rev}:{node|short} {desc}{if(obsfate, " [{join(obsfate, "; ")}]")}\n
11 > [experimental]
11 > [experimental]
12 > evolution.createmarkers=True
12 > evolution.createmarkers=True
13 > [extensions]
13 > [extensions]
14 > drawdag=$TESTDIR/drawdag.py
14 > drawdag=$TESTDIR/drawdag.py
15 > [alias]
15 > [alias]
16 > debugobsolete = debugobsolete -d '0 0'
16 > debugobsolete = debugobsolete -d '0 0'
17 > [phases]
17 > [phases]
18 > publish=False
18 > publish=False
19 > [templates]
19 > [templates]
20 > wuentryshort = '{instability}:{if(divergentnodes, " ")}{divergentnodes} {reason} {node|shortest}\n'
20 > wuentryshort = '{instability}:{if(divergentnodes, " ")}{divergentnodes} {reason} {node|shortest}\n'
21 > whyunstableshort = '{whyunstable % wuentryshort}'
21 > whyunstableshort = '{whyunstable % wuentryshort}'
22 > wuentryshorter = '{instability}:{divergentnodes % " {node|shortest} ({phase})"} {reason} {node|shortest}\n'
22 > wuentryshorter = '{instability}:{divergentnodes % " {node|shortest} ({phase})"} {reason} {node|shortest}\n'
23 > whyunstableshorter = '{whyunstable % wuentryshorter}'
23 > whyunstableshorter = '{whyunstable % wuentryshorter}'
24 > EOF
24 > EOF
25
25
26
26
27 $ mkcommit() {
27 $ mkcommit() {
28 > echo "$1" > "$1"
28 > echo "$1" > "$1"
29 > hg add "$1"
29 > hg add "$1"
30 > hg ci -m "$1"
30 > hg ci -m "$1"
31 > }
31 > }
32 $ getid() {
32 $ getid() {
33 > hg log --hidden -r "desc('$1')" -T '{node}\n'
33 > hg log --hidden -r "desc('$1')" -T '{node}\n'
34 > }
34 > }
35
35
36 setup repo
36 setup repo
37
37
38 $ hg init reference
38 $ hg init reference
39 $ cd reference
39 $ cd reference
40 $ mkcommit base
40 $ mkcommit base
41 $ mkcommit A_0
41 $ mkcommit A_0
42 $ hg up 0
42 $ hg up 0
43 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
43 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
44 $ mkcommit A_1
44 $ mkcommit A_1
45 created new head
45 created new head
46 $ hg up 0
46 $ hg up 0
47 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
47 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
48 $ mkcommit A_2
48 $ mkcommit A_2
49 created new head
49 created new head
50 $ hg up 0
50 $ hg up 0
51 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
51 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
52 $ cd ..
52 $ cd ..
53
53
54
54
55 $ newcase() {
55 $ newcase() {
56 > hg clone -u 0 -q reference $1
56 > hg clone -u 0 -q reference $1
57 > cd $1
57 > cd $1
58 > }
58 > }
59
59
60 direct divergence
60 direct divergence
61 -----------------
61 -----------------
62
62
63 A_1 have two direct and divergent successors A_1 and A_1
63 A_1 have two direct and divergent successors A_1 and A_1
64
64
65 $ newcase direct
65 $ newcase direct
66 $ hg debugobsolete `getid A_0` `getid A_1`
66 $ hg debugobsolete `getid A_0` `getid A_1`
67 1 new obsolescence markers
67 1 new obsolescence markers
68 obsoleted 1 changesets
68 obsoleted 1 changesets
69 $ hg debugobsolete `getid A_0` `getid A_2`
69 $ hg debugobsolete `getid A_0` `getid A_2`
70 1 new obsolescence markers
70 1 new obsolescence markers
71 2 new content-divergent changesets
71 2 new content-divergent changesets
72 $ hg log -G --hidden
72 $ hg log -G --hidden
73 * 3:392fd25390da A_2
73 * 3:392fd25390da A_2
74 |
74 |
75 | * 2:82623d38b9ba A_1
75 | * 2:82623d38b9ba A_1
76 |/
76 |/
77 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
77 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
78 |/
78 |/
79 @ 0:d20a80d4def3 base
79 @ 0:d20a80d4def3 base
80
80
81 $ hg debugsuccessorssets --hidden 'all()'
81 $ hg debugsuccessorssets --hidden 'all()'
82 d20a80d4def3
82 d20a80d4def3
83 d20a80d4def3
83 d20a80d4def3
84 007dc284c1f8
84 007dc284c1f8
85 82623d38b9ba
85 82623d38b9ba
86 392fd25390da
86 392fd25390da
87 82623d38b9ba
87 82623d38b9ba
88 82623d38b9ba
88 82623d38b9ba
89 392fd25390da
89 392fd25390da
90 392fd25390da
90 392fd25390da
91 $ hg log -r 'contentdivergent()'
91 $ hg log -r 'contentdivergent()'
92 2:82623d38b9ba A_1
92 2:82623d38b9ba A_1
93 3:392fd25390da A_2
93 3:392fd25390da A_2
94 $ hg log -r 'unstable()'
94 $ hg log -r 'unstable()'
95 2:82623d38b9ba A_1
95 2:82623d38b9ba A_1
96 3:392fd25390da A_2
96 3:392fd25390da A_2
97 $ hg debugsuccessorssets 'all()' --closest
97 $ hg debugsuccessorssets 'all()' --closest
98 d20a80d4def3
98 d20a80d4def3
99 d20a80d4def3
99 d20a80d4def3
100 82623d38b9ba
100 82623d38b9ba
101 82623d38b9ba
101 82623d38b9ba
102 392fd25390da
102 392fd25390da
103 392fd25390da
103 392fd25390da
104 $ hg debugsuccessorssets 'all()' --closest --hidden
104 $ hg debugsuccessorssets 'all()' --closest --hidden
105 d20a80d4def3
105 d20a80d4def3
106 d20a80d4def3
106 d20a80d4def3
107 007dc284c1f8
107 007dc284c1f8
108 82623d38b9ba
108 82623d38b9ba
109 392fd25390da
109 392fd25390da
110 82623d38b9ba
110 82623d38b9ba
111 82623d38b9ba
111 82623d38b9ba
112 392fd25390da
112 392fd25390da
113 392fd25390da
113 392fd25390da
114
114
115 check that mercurial refuse to push
115 check that mercurial refuse to push
116
116
117 $ hg init ../other
117 $ hg init ../other
118 $ hg push ../other
118 $ hg push ../other
119 pushing to ../other
119 pushing to ../other
120 searching for changes
120 searching for changes
121 abort: push includes content-divergent changeset: 392fd25390da!
121 abort: push includes unstable changesets:
122 82623d38b9ba (content-divergent)
123 392fd25390da (content-divergent)
122 [255]
124 [255]
123
125
124 $ cd ..
126 $ cd ..
125
127
126
128
127 indirect divergence with known changeset
129 indirect divergence with known changeset
128 -------------------------------------------
130 -------------------------------------------
129
131
130 $ newcase indirect_known
132 $ newcase indirect_known
131 $ hg debugobsolete `getid A_0` `getid A_1`
133 $ hg debugobsolete `getid A_0` `getid A_1`
132 1 new obsolescence markers
134 1 new obsolescence markers
133 obsoleted 1 changesets
135 obsoleted 1 changesets
134 $ hg debugobsolete `getid A_0` `getid A_2`
136 $ hg debugobsolete `getid A_0` `getid A_2`
135 1 new obsolescence markers
137 1 new obsolescence markers
136 2 new content-divergent changesets
138 2 new content-divergent changesets
137 $ mkcommit A_3
139 $ mkcommit A_3
138 created new head
140 created new head
139 $ hg debugobsolete `getid A_2` `getid A_3`
141 $ hg debugobsolete `getid A_2` `getid A_3`
140 1 new obsolescence markers
142 1 new obsolescence markers
141 obsoleted 1 changesets
143 obsoleted 1 changesets
142 $ hg log -G --hidden
144 $ hg log -G --hidden
143 @ 4:01f36c5a8fda A_3
145 @ 4:01f36c5a8fda A_3
144 |
146 |
145 | x 3:392fd25390da A_2 [rewritten as 4:01f36c5a8fda]
147 | x 3:392fd25390da A_2 [rewritten as 4:01f36c5a8fda]
146 |/
148 |/
147 | * 2:82623d38b9ba A_1
149 | * 2:82623d38b9ba A_1
148 |/
150 |/
149 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
151 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
150 |/
152 |/
151 o 0:d20a80d4def3 base
153 o 0:d20a80d4def3 base
152
154
153 $ hg debugsuccessorssets --hidden 'all()'
155 $ hg debugsuccessorssets --hidden 'all()'
154 d20a80d4def3
156 d20a80d4def3
155 d20a80d4def3
157 d20a80d4def3
156 007dc284c1f8
158 007dc284c1f8
157 82623d38b9ba
159 82623d38b9ba
158 01f36c5a8fda
160 01f36c5a8fda
159 82623d38b9ba
161 82623d38b9ba
160 82623d38b9ba
162 82623d38b9ba
161 392fd25390da
163 392fd25390da
162 01f36c5a8fda
164 01f36c5a8fda
163 01f36c5a8fda
165 01f36c5a8fda
164 01f36c5a8fda
166 01f36c5a8fda
165 $ hg log -r 'contentdivergent()'
167 $ hg log -r 'contentdivergent()'
166 2:82623d38b9ba A_1
168 2:82623d38b9ba A_1
167 4:01f36c5a8fda A_3
169 4:01f36c5a8fda A_3
168 $ hg debugsuccessorssets 'all()' --closest
170 $ hg debugsuccessorssets 'all()' --closest
169 d20a80d4def3
171 d20a80d4def3
170 d20a80d4def3
172 d20a80d4def3
171 82623d38b9ba
173 82623d38b9ba
172 82623d38b9ba
174 82623d38b9ba
173 01f36c5a8fda
175 01f36c5a8fda
174 01f36c5a8fda
176 01f36c5a8fda
175 $ hg debugsuccessorssets 'all()' --closest --hidden
177 $ hg debugsuccessorssets 'all()' --closest --hidden
176 d20a80d4def3
178 d20a80d4def3
177 d20a80d4def3
179 d20a80d4def3
178 007dc284c1f8
180 007dc284c1f8
179 82623d38b9ba
181 82623d38b9ba
180 392fd25390da
182 392fd25390da
181 82623d38b9ba
183 82623d38b9ba
182 82623d38b9ba
184 82623d38b9ba
183 392fd25390da
185 392fd25390da
184 392fd25390da
186 392fd25390da
185 01f36c5a8fda
187 01f36c5a8fda
186 01f36c5a8fda
188 01f36c5a8fda
187 $ cd ..
189 $ cd ..
188
190
189
191
190 indirect divergence with known changeset
192 indirect divergence with known changeset
191 -------------------------------------------
193 -------------------------------------------
192
194
193 $ newcase indirect_unknown
195 $ newcase indirect_unknown
194 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
196 $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
195 1 new obsolescence markers
197 1 new obsolescence markers
196 obsoleted 1 changesets
198 obsoleted 1 changesets
197 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
199 $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1`
198 1 new obsolescence markers
200 1 new obsolescence markers
199 $ hg debugobsolete `getid A_0` `getid A_2`
201 $ hg debugobsolete `getid A_0` `getid A_2`
200 1 new obsolescence markers
202 1 new obsolescence markers
201 2 new content-divergent changesets
203 2 new content-divergent changesets
202 $ hg log -G --hidden
204 $ hg log -G --hidden
203 * 3:392fd25390da A_2
205 * 3:392fd25390da A_2
204 |
206 |
205 | * 2:82623d38b9ba A_1
207 | * 2:82623d38b9ba A_1
206 |/
208 |/
207 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
209 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
208 |/
210 |/
209 @ 0:d20a80d4def3 base
211 @ 0:d20a80d4def3 base
210
212
211 $ hg debugsuccessorssets --hidden 'all()'
213 $ hg debugsuccessorssets --hidden 'all()'
212 d20a80d4def3
214 d20a80d4def3
213 d20a80d4def3
215 d20a80d4def3
214 007dc284c1f8
216 007dc284c1f8
215 82623d38b9ba
217 82623d38b9ba
216 392fd25390da
218 392fd25390da
217 82623d38b9ba
219 82623d38b9ba
218 82623d38b9ba
220 82623d38b9ba
219 392fd25390da
221 392fd25390da
220 392fd25390da
222 392fd25390da
221 $ hg log -r 'contentdivergent()'
223 $ hg log -r 'contentdivergent()'
222 2:82623d38b9ba A_1
224 2:82623d38b9ba A_1
223 3:392fd25390da A_2
225 3:392fd25390da A_2
224 $ hg debugsuccessorssets 'all()' --closest
226 $ hg debugsuccessorssets 'all()' --closest
225 d20a80d4def3
227 d20a80d4def3
226 d20a80d4def3
228 d20a80d4def3
227 82623d38b9ba
229 82623d38b9ba
228 82623d38b9ba
230 82623d38b9ba
229 392fd25390da
231 392fd25390da
230 392fd25390da
232 392fd25390da
231 $ hg debugsuccessorssets 'all()' --closest --hidden
233 $ hg debugsuccessorssets 'all()' --closest --hidden
232 d20a80d4def3
234 d20a80d4def3
233 d20a80d4def3
235 d20a80d4def3
234 007dc284c1f8
236 007dc284c1f8
235 82623d38b9ba
237 82623d38b9ba
236 392fd25390da
238 392fd25390da
237 82623d38b9ba
239 82623d38b9ba
238 82623d38b9ba
240 82623d38b9ba
239 392fd25390da
241 392fd25390da
240 392fd25390da
242 392fd25390da
241 $ cd ..
243 $ cd ..
242
244
243 do not take unknown node in account if they are final
245 do not take unknown node in account if they are final
244 -----------------------------------------------------
246 -----------------------------------------------------
245
247
246 $ newcase final-unknown
248 $ newcase final-unknown
247 $ hg debugobsolete `getid A_0` `getid A_1`
249 $ hg debugobsolete `getid A_0` `getid A_1`
248 1 new obsolescence markers
250 1 new obsolescence markers
249 obsoleted 1 changesets
251 obsoleted 1 changesets
250 $ hg debugobsolete `getid A_1` `getid A_2`
252 $ hg debugobsolete `getid A_1` `getid A_2`
251 1 new obsolescence markers
253 1 new obsolescence markers
252 obsoleted 1 changesets
254 obsoleted 1 changesets
253 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
255 $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
254 1 new obsolescence markers
256 1 new obsolescence markers
255 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
257 $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc
256 1 new obsolescence markers
258 1 new obsolescence markers
257 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
259 $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd
258 1 new obsolescence markers
260 1 new obsolescence markers
259
261
260 $ hg debugsuccessorssets --hidden 'desc('A_0')'
262 $ hg debugsuccessorssets --hidden 'desc('A_0')'
261 007dc284c1f8
263 007dc284c1f8
262 392fd25390da
264 392fd25390da
263 $ hg debugsuccessorssets 'desc('A_0')' --closest
265 $ hg debugsuccessorssets 'desc('A_0')' --closest
264 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
266 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
265 007dc284c1f8
267 007dc284c1f8
266 82623d38b9ba
268 82623d38b9ba
267
269
268 $ cd ..
270 $ cd ..
269
271
270 divergence that converge again is not divergence anymore
272 divergence that converge again is not divergence anymore
271 -----------------------------------------------------
273 -----------------------------------------------------
272
274
273 $ newcase converged_divergence
275 $ newcase converged_divergence
274 $ hg debugobsolete `getid A_0` `getid A_1`
276 $ hg debugobsolete `getid A_0` `getid A_1`
275 1 new obsolescence markers
277 1 new obsolescence markers
276 obsoleted 1 changesets
278 obsoleted 1 changesets
277 $ hg debugobsolete `getid A_0` `getid A_2`
279 $ hg debugobsolete `getid A_0` `getid A_2`
278 1 new obsolescence markers
280 1 new obsolescence markers
279 2 new content-divergent changesets
281 2 new content-divergent changesets
280 $ mkcommit A_3
282 $ mkcommit A_3
281 created new head
283 created new head
282 $ hg debugobsolete `getid A_1` `getid A_3`
284 $ hg debugobsolete `getid A_1` `getid A_3`
283 1 new obsolescence markers
285 1 new obsolescence markers
284 obsoleted 1 changesets
286 obsoleted 1 changesets
285 $ hg debugobsolete `getid A_2` `getid A_3`
287 $ hg debugobsolete `getid A_2` `getid A_3`
286 1 new obsolescence markers
288 1 new obsolescence markers
287 obsoleted 1 changesets
289 obsoleted 1 changesets
288 $ hg log -G --hidden
290 $ hg log -G --hidden
289 @ 4:01f36c5a8fda A_3
291 @ 4:01f36c5a8fda A_3
290 |
292 |
291 | x 3:392fd25390da A_2 [rewritten as 4:01f36c5a8fda]
293 | x 3:392fd25390da A_2 [rewritten as 4:01f36c5a8fda]
292 |/
294 |/
293 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
295 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
294 |/
296 |/
295 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
297 | x 1:007dc284c1f8 A_0 [rewritten as 2:82623d38b9ba; rewritten as 3:392fd25390da]
296 |/
298 |/
297 o 0:d20a80d4def3 base
299 o 0:d20a80d4def3 base
298
300
299 $ hg debugsuccessorssets --hidden 'all()'
301 $ hg debugsuccessorssets --hidden 'all()'
300 d20a80d4def3
302 d20a80d4def3
301 d20a80d4def3
303 d20a80d4def3
302 007dc284c1f8
304 007dc284c1f8
303 01f36c5a8fda
305 01f36c5a8fda
304 82623d38b9ba
306 82623d38b9ba
305 01f36c5a8fda
307 01f36c5a8fda
306 392fd25390da
308 392fd25390da
307 01f36c5a8fda
309 01f36c5a8fda
308 01f36c5a8fda
310 01f36c5a8fda
309 01f36c5a8fda
311 01f36c5a8fda
310 $ hg log -r 'contentdivergent()'
312 $ hg log -r 'contentdivergent()'
311 $ hg debugsuccessorssets 'all()' --closest
313 $ hg debugsuccessorssets 'all()' --closest
312 d20a80d4def3
314 d20a80d4def3
313 d20a80d4def3
315 d20a80d4def3
314 01f36c5a8fda
316 01f36c5a8fda
315 01f36c5a8fda
317 01f36c5a8fda
316 $ hg debugsuccessorssets 'all()' --closest --hidden
318 $ hg debugsuccessorssets 'all()' --closest --hidden
317 d20a80d4def3
319 d20a80d4def3
318 d20a80d4def3
320 d20a80d4def3
319 007dc284c1f8
321 007dc284c1f8
320 82623d38b9ba
322 82623d38b9ba
321 392fd25390da
323 392fd25390da
322 82623d38b9ba
324 82623d38b9ba
323 82623d38b9ba
325 82623d38b9ba
324 392fd25390da
326 392fd25390da
325 392fd25390da
327 392fd25390da
326 01f36c5a8fda
328 01f36c5a8fda
327 01f36c5a8fda
329 01f36c5a8fda
328 $ cd ..
330 $ cd ..
329
331
330 split is not divergences
332 split is not divergences
331 -----------------------------
333 -----------------------------
332
334
333 $ newcase split
335 $ newcase split
334 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
336 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
335 1 new obsolescence markers
337 1 new obsolescence markers
336 obsoleted 1 changesets
338 obsoleted 1 changesets
337 $ hg log -G --hidden
339 $ hg log -G --hidden
338 o 3:392fd25390da A_2
340 o 3:392fd25390da A_2
339 |
341 |
340 | o 2:82623d38b9ba A_1
342 | o 2:82623d38b9ba A_1
341 |/
343 |/
342 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
344 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
343 |/
345 |/
344 @ 0:d20a80d4def3 base
346 @ 0:d20a80d4def3 base
345
347
346 $ hg debugsuccessorssets --hidden 'all()'
348 $ hg debugsuccessorssets --hidden 'all()'
347 d20a80d4def3
349 d20a80d4def3
348 d20a80d4def3
350 d20a80d4def3
349 007dc284c1f8
351 007dc284c1f8
350 82623d38b9ba 392fd25390da
352 82623d38b9ba 392fd25390da
351 82623d38b9ba
353 82623d38b9ba
352 82623d38b9ba
354 82623d38b9ba
353 392fd25390da
355 392fd25390da
354 392fd25390da
356 392fd25390da
355 $ hg log -r 'contentdivergent()'
357 $ hg log -r 'contentdivergent()'
356 $ hg debugsuccessorssets 'all()' --closest
358 $ hg debugsuccessorssets 'all()' --closest
357 d20a80d4def3
359 d20a80d4def3
358 d20a80d4def3
360 d20a80d4def3
359 82623d38b9ba
361 82623d38b9ba
360 82623d38b9ba
362 82623d38b9ba
361 392fd25390da
363 392fd25390da
362 392fd25390da
364 392fd25390da
363 $ hg debugsuccessorssets 'all()' --closest --hidden
365 $ hg debugsuccessorssets 'all()' --closest --hidden
364 d20a80d4def3
366 d20a80d4def3
365 d20a80d4def3
367 d20a80d4def3
366 007dc284c1f8
368 007dc284c1f8
367 82623d38b9ba 392fd25390da
369 82623d38b9ba 392fd25390da
368 82623d38b9ba
370 82623d38b9ba
369 82623d38b9ba
371 82623d38b9ba
370 392fd25390da
372 392fd25390da
371 392fd25390da
373 392fd25390da
372
374
373 Even when subsequent rewriting happen
375 Even when subsequent rewriting happen
374
376
375 $ mkcommit A_3
377 $ mkcommit A_3
376 created new head
378 created new head
377 $ hg debugobsolete `getid A_1` `getid A_3`
379 $ hg debugobsolete `getid A_1` `getid A_3`
378 1 new obsolescence markers
380 1 new obsolescence markers
379 obsoleted 1 changesets
381 obsoleted 1 changesets
380 $ hg up 0
382 $ hg up 0
381 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
383 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
382 $ mkcommit A_4
384 $ mkcommit A_4
383 created new head
385 created new head
384 $ hg debugobsolete `getid A_2` `getid A_4`
386 $ hg debugobsolete `getid A_2` `getid A_4`
385 1 new obsolescence markers
387 1 new obsolescence markers
386 obsoleted 1 changesets
388 obsoleted 1 changesets
387 $ hg up 0
389 $ hg up 0
388 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
390 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
389 $ mkcommit A_5
391 $ mkcommit A_5
390 created new head
392 created new head
391 $ hg debugobsolete `getid A_4` `getid A_5`
393 $ hg debugobsolete `getid A_4` `getid A_5`
392 1 new obsolescence markers
394 1 new obsolescence markers
393 obsoleted 1 changesets
395 obsoleted 1 changesets
394 $ hg log -G --hidden
396 $ hg log -G --hidden
395 @ 6:e442cfc57690 A_5
397 @ 6:e442cfc57690 A_5
396 |
398 |
397 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
399 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
398 |/
400 |/
399 | o 4:01f36c5a8fda A_3
401 | o 4:01f36c5a8fda A_3
400 |/
402 |/
401 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
403 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
402 |/
404 |/
403 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
405 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
404 |/
406 |/
405 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
407 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
406 |/
408 |/
407 o 0:d20a80d4def3 base
409 o 0:d20a80d4def3 base
408
410
409 $ hg debugsuccessorssets --hidden 'all()'
411 $ hg debugsuccessorssets --hidden 'all()'
410 d20a80d4def3
412 d20a80d4def3
411 d20a80d4def3
413 d20a80d4def3
412 007dc284c1f8
414 007dc284c1f8
413 01f36c5a8fda e442cfc57690
415 01f36c5a8fda e442cfc57690
414 82623d38b9ba
416 82623d38b9ba
415 01f36c5a8fda
417 01f36c5a8fda
416 392fd25390da
418 392fd25390da
417 e442cfc57690
419 e442cfc57690
418 01f36c5a8fda
420 01f36c5a8fda
419 01f36c5a8fda
421 01f36c5a8fda
420 6a411f0d7a0a
422 6a411f0d7a0a
421 e442cfc57690
423 e442cfc57690
422 e442cfc57690
424 e442cfc57690
423 e442cfc57690
425 e442cfc57690
424 $ hg debugsuccessorssets 'all()' --closest
426 $ hg debugsuccessorssets 'all()' --closest
425 d20a80d4def3
427 d20a80d4def3
426 d20a80d4def3
428 d20a80d4def3
427 01f36c5a8fda
429 01f36c5a8fda
428 01f36c5a8fda
430 01f36c5a8fda
429 e442cfc57690
431 e442cfc57690
430 e442cfc57690
432 e442cfc57690
431 $ hg debugsuccessorssets 'all()' --closest --hidden
433 $ hg debugsuccessorssets 'all()' --closest --hidden
432 d20a80d4def3
434 d20a80d4def3
433 d20a80d4def3
435 d20a80d4def3
434 007dc284c1f8
436 007dc284c1f8
435 82623d38b9ba 392fd25390da
437 82623d38b9ba 392fd25390da
436 82623d38b9ba
438 82623d38b9ba
437 82623d38b9ba
439 82623d38b9ba
438 392fd25390da
440 392fd25390da
439 392fd25390da
441 392fd25390da
440 01f36c5a8fda
442 01f36c5a8fda
441 01f36c5a8fda
443 01f36c5a8fda
442 6a411f0d7a0a
444 6a411f0d7a0a
443 e442cfc57690
445 e442cfc57690
444 e442cfc57690
446 e442cfc57690
445 e442cfc57690
447 e442cfc57690
446 $ hg log -r 'contentdivergent()'
448 $ hg log -r 'contentdivergent()'
447
449
448 Check more complex obsolescence graft (with divergence)
450 Check more complex obsolescence graft (with divergence)
449
451
450 $ mkcommit B_0; hg up 0
452 $ mkcommit B_0; hg up 0
451 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
453 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
452 $ hg debugobsolete `getid B_0` `getid A_2`
454 $ hg debugobsolete `getid B_0` `getid A_2`
453 1 new obsolescence markers
455 1 new obsolescence markers
454 obsoleted 1 changesets
456 obsoleted 1 changesets
455 $ mkcommit A_7; hg up 0
457 $ mkcommit A_7; hg up 0
456 created new head
458 created new head
457 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
459 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
458 $ mkcommit A_8; hg up 0
460 $ mkcommit A_8; hg up 0
459 created new head
461 created new head
460 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
462 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
461 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
463 $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8`
462 1 new obsolescence markers
464 1 new obsolescence markers
463 obsoleted 1 changesets
465 obsoleted 1 changesets
464 $ mkcommit A_9; hg up 0
466 $ mkcommit A_9; hg up 0
465 created new head
467 created new head
466 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
468 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
467 $ hg debugobsolete `getid A_5` `getid A_9`
469 $ hg debugobsolete `getid A_5` `getid A_9`
468 1 new obsolescence markers
470 1 new obsolescence markers
469 4 new content-divergent changesets
471 4 new content-divergent changesets
470 $ hg log -G --hidden
472 $ hg log -G --hidden
471 * 10:bed64f5d2f5a A_9
473 * 10:bed64f5d2f5a A_9
472 |
474 |
473 | * 9:14608b260df8 A_8
475 | * 9:14608b260df8 A_8
474 |/
476 |/
475 | * 8:7ae126973a96 A_7
477 | * 8:7ae126973a96 A_7
476 |/
478 |/
477 | x 7:3750ebee865d B_0 [rewritten as 3:392fd25390da]
479 | x 7:3750ebee865d B_0 [rewritten as 3:392fd25390da]
478 | |
480 | |
479 | x 6:e442cfc57690 A_5 [rewritten as 10:bed64f5d2f5a; split as 8:7ae126973a96, 9:14608b260df8]
481 | x 6:e442cfc57690 A_5 [rewritten as 10:bed64f5d2f5a; split as 8:7ae126973a96, 9:14608b260df8]
480 |/
482 |/
481 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
483 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
482 |/
484 |/
483 | * 4:01f36c5a8fda A_3
485 | * 4:01f36c5a8fda A_3
484 |/
486 |/
485 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
487 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
486 |/
488 |/
487 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
489 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
488 |/
490 |/
489 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
491 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
490 |/
492 |/
491 @ 0:d20a80d4def3 base
493 @ 0:d20a80d4def3 base
492
494
493 $ hg debugsuccessorssets --hidden 'all()'
495 $ hg debugsuccessorssets --hidden 'all()'
494 d20a80d4def3
496 d20a80d4def3
495 d20a80d4def3
497 d20a80d4def3
496 007dc284c1f8
498 007dc284c1f8
497 01f36c5a8fda bed64f5d2f5a
499 01f36c5a8fda bed64f5d2f5a
498 01f36c5a8fda 7ae126973a96 14608b260df8
500 01f36c5a8fda 7ae126973a96 14608b260df8
499 82623d38b9ba
501 82623d38b9ba
500 01f36c5a8fda
502 01f36c5a8fda
501 392fd25390da
503 392fd25390da
502 bed64f5d2f5a
504 bed64f5d2f5a
503 7ae126973a96 14608b260df8
505 7ae126973a96 14608b260df8
504 01f36c5a8fda
506 01f36c5a8fda
505 01f36c5a8fda
507 01f36c5a8fda
506 6a411f0d7a0a
508 6a411f0d7a0a
507 bed64f5d2f5a
509 bed64f5d2f5a
508 7ae126973a96 14608b260df8
510 7ae126973a96 14608b260df8
509 e442cfc57690
511 e442cfc57690
510 bed64f5d2f5a
512 bed64f5d2f5a
511 7ae126973a96 14608b260df8
513 7ae126973a96 14608b260df8
512 3750ebee865d
514 3750ebee865d
513 bed64f5d2f5a
515 bed64f5d2f5a
514 7ae126973a96 14608b260df8
516 7ae126973a96 14608b260df8
515 7ae126973a96
517 7ae126973a96
516 7ae126973a96
518 7ae126973a96
517 14608b260df8
519 14608b260df8
518 14608b260df8
520 14608b260df8
519 bed64f5d2f5a
521 bed64f5d2f5a
520 bed64f5d2f5a
522 bed64f5d2f5a
521 $ hg debugsuccessorssets 'all()' --closest
523 $ hg debugsuccessorssets 'all()' --closest
522 d20a80d4def3
524 d20a80d4def3
523 d20a80d4def3
525 d20a80d4def3
524 01f36c5a8fda
526 01f36c5a8fda
525 01f36c5a8fda
527 01f36c5a8fda
526 7ae126973a96
528 7ae126973a96
527 7ae126973a96
529 7ae126973a96
528 14608b260df8
530 14608b260df8
529 14608b260df8
531 14608b260df8
530 bed64f5d2f5a
532 bed64f5d2f5a
531 bed64f5d2f5a
533 bed64f5d2f5a
532 $ hg debugsuccessorssets 'all()' --closest --hidden
534 $ hg debugsuccessorssets 'all()' --closest --hidden
533 d20a80d4def3
535 d20a80d4def3
534 d20a80d4def3
536 d20a80d4def3
535 007dc284c1f8
537 007dc284c1f8
536 82623d38b9ba 392fd25390da
538 82623d38b9ba 392fd25390da
537 82623d38b9ba
539 82623d38b9ba
538 82623d38b9ba
540 82623d38b9ba
539 392fd25390da
541 392fd25390da
540 392fd25390da
542 392fd25390da
541 01f36c5a8fda
543 01f36c5a8fda
542 01f36c5a8fda
544 01f36c5a8fda
543 6a411f0d7a0a
545 6a411f0d7a0a
544 e442cfc57690
546 e442cfc57690
545 e442cfc57690
547 e442cfc57690
546 e442cfc57690
548 e442cfc57690
547 3750ebee865d
549 3750ebee865d
548 392fd25390da
550 392fd25390da
549 7ae126973a96
551 7ae126973a96
550 7ae126973a96
552 7ae126973a96
551 14608b260df8
553 14608b260df8
552 14608b260df8
554 14608b260df8
553 bed64f5d2f5a
555 bed64f5d2f5a
554 bed64f5d2f5a
556 bed64f5d2f5a
555 $ hg log -r 'contentdivergent()'
557 $ hg log -r 'contentdivergent()'
556 4:01f36c5a8fda A_3
558 4:01f36c5a8fda A_3
557 8:7ae126973a96 A_7
559 8:7ae126973a96 A_7
558 9:14608b260df8 A_8
560 9:14608b260df8 A_8
559 10:bed64f5d2f5a A_9
561 10:bed64f5d2f5a A_9
560
562
561 $ hg log -r bed64f5d2f5a -T '{whyunstable}\n' | sort
563 $ hg log -r bed64f5d2f5a -T '{whyunstable}\n' | sort
562 content-divergent: 4:01f36c5a8fda (draft) 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor 007dc284c1f8
564 content-divergent: 4:01f36c5a8fda (draft) 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor 007dc284c1f8
563 content-divergent: 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor e442cfc57690
565 content-divergent: 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor e442cfc57690
564 $ hg log -r bed64f5d2f5a -T whyunstableshort | sort
566 $ hg log -r bed64f5d2f5a -T whyunstableshort | sort
565 content-divergent: 4:01f36c5a8fda (draft) 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor 007d
567 content-divergent: 4:01f36c5a8fda (draft) 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor 007d
566 content-divergent: 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor e442
568 content-divergent: 8:7ae126973a96 (draft) 9:14608b260df8 (draft) predecessor e442
567 $ hg log -r bed64f5d2f5a -T whyunstableshorter | sort
569 $ hg log -r bed64f5d2f5a -T whyunstableshorter | sort
568 content-divergent: 01f3 (draft) 7ae1 (draft) 1460 (draft) predecessor 007d
570 content-divergent: 01f3 (draft) 7ae1 (draft) 1460 (draft) predecessor 007d
569 content-divergent: 7ae1 (draft) 1460 (draft) predecessor e442
571 content-divergent: 7ae1 (draft) 1460 (draft) predecessor e442
570
572
571 fix the divergence
573 fix the divergence
572
574
573 $ mkcommit A_A; hg up 0
575 $ mkcommit A_A; hg up 0
574 created new head
576 created new head
575 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
577 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
576 $ hg debugobsolete `getid A_9` `getid A_A`
578 $ hg debugobsolete `getid A_9` `getid A_A`
577 1 new obsolescence markers
579 1 new obsolescence markers
578 obsoleted 1 changesets
580 obsoleted 1 changesets
579 $ hg debugobsolete `getid A_7` `getid A_A`
581 $ hg debugobsolete `getid A_7` `getid A_A`
580 1 new obsolescence markers
582 1 new obsolescence markers
581 obsoleted 1 changesets
583 obsoleted 1 changesets
582 $ hg debugobsolete `getid A_8` `getid A_A`
584 $ hg debugobsolete `getid A_8` `getid A_A`
583 1 new obsolescence markers
585 1 new obsolescence markers
584 obsoleted 1 changesets
586 obsoleted 1 changesets
585 $ hg log -G --hidden
587 $ hg log -G --hidden
586 o 11:a139f71be9da A_A
588 o 11:a139f71be9da A_A
587 |
589 |
588 | x 10:bed64f5d2f5a A_9 [rewritten as 11:a139f71be9da]
590 | x 10:bed64f5d2f5a A_9 [rewritten as 11:a139f71be9da]
589 |/
591 |/
590 | x 9:14608b260df8 A_8 [rewritten as 11:a139f71be9da]
592 | x 9:14608b260df8 A_8 [rewritten as 11:a139f71be9da]
591 |/
593 |/
592 | x 8:7ae126973a96 A_7 [rewritten as 11:a139f71be9da]
594 | x 8:7ae126973a96 A_7 [rewritten as 11:a139f71be9da]
593 |/
595 |/
594 | x 7:3750ebee865d B_0 [rewritten as 3:392fd25390da]
596 | x 7:3750ebee865d B_0 [rewritten as 3:392fd25390da]
595 | |
597 | |
596 | x 6:e442cfc57690 A_5 [rewritten as 10:bed64f5d2f5a; split as 8:7ae126973a96, 9:14608b260df8]
598 | x 6:e442cfc57690 A_5 [rewritten as 10:bed64f5d2f5a; split as 8:7ae126973a96, 9:14608b260df8]
597 |/
599 |/
598 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
600 | x 5:6a411f0d7a0a A_4 [rewritten as 6:e442cfc57690]
599 |/
601 |/
600 | o 4:01f36c5a8fda A_3
602 | o 4:01f36c5a8fda A_3
601 |/
603 |/
602 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
604 | x 3:392fd25390da A_2 [rewritten as 5:6a411f0d7a0a]
603 |/
605 |/
604 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
606 | x 2:82623d38b9ba A_1 [rewritten as 4:01f36c5a8fda]
605 |/
607 |/
606 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
608 | x 1:007dc284c1f8 A_0 [split as 2:82623d38b9ba, 3:392fd25390da]
607 |/
609 |/
608 @ 0:d20a80d4def3 base
610 @ 0:d20a80d4def3 base
609
611
610 $ hg debugsuccessorssets --hidden 'all()'
612 $ hg debugsuccessorssets --hidden 'all()'
611 d20a80d4def3
613 d20a80d4def3
612 d20a80d4def3
614 d20a80d4def3
613 007dc284c1f8
615 007dc284c1f8
614 01f36c5a8fda a139f71be9da
616 01f36c5a8fda a139f71be9da
615 82623d38b9ba
617 82623d38b9ba
616 01f36c5a8fda
618 01f36c5a8fda
617 392fd25390da
619 392fd25390da
618 a139f71be9da
620 a139f71be9da
619 01f36c5a8fda
621 01f36c5a8fda
620 01f36c5a8fda
622 01f36c5a8fda
621 6a411f0d7a0a
623 6a411f0d7a0a
622 a139f71be9da
624 a139f71be9da
623 e442cfc57690
625 e442cfc57690
624 a139f71be9da
626 a139f71be9da
625 3750ebee865d
627 3750ebee865d
626 a139f71be9da
628 a139f71be9da
627 7ae126973a96
629 7ae126973a96
628 a139f71be9da
630 a139f71be9da
629 14608b260df8
631 14608b260df8
630 a139f71be9da
632 a139f71be9da
631 bed64f5d2f5a
633 bed64f5d2f5a
632 a139f71be9da
634 a139f71be9da
633 a139f71be9da
635 a139f71be9da
634 a139f71be9da
636 a139f71be9da
635 $ hg debugsuccessorssets 'all()' --closest
637 $ hg debugsuccessorssets 'all()' --closest
636 d20a80d4def3
638 d20a80d4def3
637 d20a80d4def3
639 d20a80d4def3
638 01f36c5a8fda
640 01f36c5a8fda
639 01f36c5a8fda
641 01f36c5a8fda
640 a139f71be9da
642 a139f71be9da
641 a139f71be9da
643 a139f71be9da
642 $ hg debugsuccessorssets 'all()' --closest --hidden
644 $ hg debugsuccessorssets 'all()' --closest --hidden
643 d20a80d4def3
645 d20a80d4def3
644 d20a80d4def3
646 d20a80d4def3
645 007dc284c1f8
647 007dc284c1f8
646 82623d38b9ba 392fd25390da
648 82623d38b9ba 392fd25390da
647 82623d38b9ba
649 82623d38b9ba
648 82623d38b9ba
650 82623d38b9ba
649 392fd25390da
651 392fd25390da
650 392fd25390da
652 392fd25390da
651 01f36c5a8fda
653 01f36c5a8fda
652 01f36c5a8fda
654 01f36c5a8fda
653 6a411f0d7a0a
655 6a411f0d7a0a
654 e442cfc57690
656 e442cfc57690
655 e442cfc57690
657 e442cfc57690
656 e442cfc57690
658 e442cfc57690
657 3750ebee865d
659 3750ebee865d
658 392fd25390da
660 392fd25390da
659 7ae126973a96
661 7ae126973a96
660 a139f71be9da
662 a139f71be9da
661 14608b260df8
663 14608b260df8
662 a139f71be9da
664 a139f71be9da
663 bed64f5d2f5a
665 bed64f5d2f5a
664 a139f71be9da
666 a139f71be9da
665 a139f71be9da
667 a139f71be9da
666 a139f71be9da
668 a139f71be9da
667 $ hg log -r 'contentdivergent()'
669 $ hg log -r 'contentdivergent()'
668
670
669 #if serve
671 #if serve
670
672
671 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid --config web.view=all \
673 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid --config web.view=all \
672 > -A access.log -E errors.log
674 > -A access.log -E errors.log
673 $ cat hg.pid >> $DAEMON_PIDS
675 $ cat hg.pid >> $DAEMON_PIDS
674
676
675 check an obsolete changeset that was rewritten and also split
677 check an obsolete changeset that was rewritten and also split
676
678
677 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=paper' | egrep 'rewritten|split'
679 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=paper' | egrep 'rewritten|split'
678 <td>rewritten as <a href="/rev/bed64f5d2f5a?style=paper">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span><br>
680 <td>rewritten as <a href="/rev/bed64f5d2f5a?style=paper">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span><br>
679 split as <a href="/rev/7ae126973a96?style=paper">7ae126973a96</a> <a href="/rev/14608b260df8?style=paper">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
681 split as <a href="/rev/7ae126973a96?style=paper">7ae126973a96</a> <a href="/rev/14608b260df8?style=paper">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
680 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=coal' | egrep 'rewritten|split'
682 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=coal' | egrep 'rewritten|split'
681 <td>rewritten as <a href="/rev/bed64f5d2f5a?style=coal">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span><br>
683 <td>rewritten as <a href="/rev/bed64f5d2f5a?style=coal">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span><br>
682 split as <a href="/rev/7ae126973a96?style=coal">7ae126973a96</a> <a href="/rev/14608b260df8?style=coal">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
684 split as <a href="/rev/7ae126973a96?style=coal">7ae126973a96</a> <a href="/rev/14608b260df8?style=coal">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
683 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=gitweb' | egrep 'rewritten|split'
685 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=gitweb' | egrep 'rewritten|split'
684 <td>rewritten as <a class="list" href="/rev/bed64f5d2f5a?style=gitweb">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
686 <td>rewritten as <a class="list" href="/rev/bed64f5d2f5a?style=gitweb">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
685 <td>split as <a class="list" href="/rev/7ae126973a96?style=gitweb">7ae126973a96</a> <a class="list" href="/rev/14608b260df8?style=gitweb">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
687 <td>split as <a class="list" href="/rev/7ae126973a96?style=gitweb">7ae126973a96</a> <a class="list" href="/rev/14608b260df8?style=gitweb">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
686 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=monoblue' | egrep 'rewritten|split'
688 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=monoblue' | egrep 'rewritten|split'
687 <dd>rewritten as <a href="/rev/bed64f5d2f5a?style=monoblue">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></dd>
689 <dd>rewritten as <a href="/rev/bed64f5d2f5a?style=monoblue">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></dd>
688 <dd>split as <a href="/rev/7ae126973a96?style=monoblue">7ae126973a96</a> <a href="/rev/14608b260df8?style=monoblue">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></dd>
690 <dd>split as <a href="/rev/7ae126973a96?style=monoblue">7ae126973a96</a> <a href="/rev/14608b260df8?style=monoblue">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></dd>
689 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=spartan' | egrep 'rewritten|split'
691 $ get-with-headers.py localhost:$HGPORT 'rev/e442cfc57690?style=spartan' | egrep 'rewritten|split'
690 <td class="obsolete">rewritten as <a href="/rev/bed64f5d2f5a?style=spartan">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
692 <td class="obsolete">rewritten as <a href="/rev/bed64f5d2f5a?style=spartan">bed64f5d2f5a</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
691 <td class="obsolete">split as <a href="/rev/7ae126973a96?style=spartan">7ae126973a96</a> <a href="/rev/14608b260df8?style=spartan">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
693 <td class="obsolete">split as <a href="/rev/7ae126973a96?style=spartan">7ae126973a96</a> <a href="/rev/14608b260df8?style=spartan">14608b260df8</a> by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
692
694
693 $ killdaemons.py
695 $ killdaemons.py
694
696
695 #endif
697 #endif
696
698
697 $ cd ..
699 $ cd ..
698
700
699
701
700 Subset does not diverge
702 Subset does not diverge
701 ------------------------------
703 ------------------------------
702
704
703 Do not report divergent successors-set if it is a subset of another
705 Do not report divergent successors-set if it is a subset of another
704 successors-set. (report [A,B] not [A] + [A,B])
706 successors-set. (report [A,B] not [A] + [A,B])
705
707
706 $ newcase subset
708 $ newcase subset
707 $ hg debugobsolete `getid A_0` `getid A_2`
709 $ hg debugobsolete `getid A_0` `getid A_2`
708 1 new obsolescence markers
710 1 new obsolescence markers
709 obsoleted 1 changesets
711 obsoleted 1 changesets
710 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
712 $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2`
711 1 new obsolescence markers
713 1 new obsolescence markers
712 $ hg debugsuccessorssets --hidden 'desc('A_0')'
714 $ hg debugsuccessorssets --hidden 'desc('A_0')'
713 007dc284c1f8
715 007dc284c1f8
714 82623d38b9ba 392fd25390da
716 82623d38b9ba 392fd25390da
715 $ hg debugsuccessorssets 'desc('A_0')' --closest
717 $ hg debugsuccessorssets 'desc('A_0')' --closest
716 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
718 $ hg debugsuccessorssets 'desc('A_0')' --closest --hidden
717 007dc284c1f8
719 007dc284c1f8
718 82623d38b9ba 392fd25390da
720 82623d38b9ba 392fd25390da
719
721
720 $ cd ..
722 $ cd ..
721
723
722 Use scmutil.cleanupnodes API to create divergence
724 Use scmutil.cleanupnodes API to create divergence
723
725
724 $ hg init cleanupnodes
726 $ hg init cleanupnodes
725 $ cd cleanupnodes
727 $ cd cleanupnodes
726 $ hg debugdrawdag <<'EOS'
728 $ hg debugdrawdag <<'EOS'
727 > B1 B3 B4
729 > B1 B3 B4
728 > | \|
730 > | \|
729 > A Z
731 > A Z
730 > EOS
732 > EOS
731
733
732 $ hg update -q B1
734 $ hg update -q B1
733 $ echo 3 >> B
735 $ echo 3 >> B
734 $ hg commit --amend -m B2
736 $ hg commit --amend -m B2
735 $ cat > $TESTTMP/scmutilcleanup.py <<EOF
737 $ cat > $TESTTMP/scmutilcleanup.py <<EOF
736 > from mercurial import registrar, scmutil
738 > from mercurial import registrar, scmutil
737 > cmdtable = {}
739 > cmdtable = {}
738 > command = registrar.command(cmdtable)
740 > command = registrar.command(cmdtable)
739 > @command(b'cleanup')
741 > @command(b'cleanup')
740 > def cleanup(ui, repo):
742 > def cleanup(ui, repo):
741 > def node(expr):
743 > def node(expr):
742 > unfi = repo.unfiltered()
744 > unfi = repo.unfiltered()
743 > rev = unfi.revs(expr).first()
745 > rev = unfi.revs(expr).first()
744 > return unfi.changelog.node(rev)
746 > return unfi.changelog.node(rev)
745 > with repo.wlock(), repo.lock(), repo.transaction(b'delayedstrip'):
747 > with repo.wlock(), repo.lock(), repo.transaction(b'delayedstrip'):
746 > mapping = {node(b'desc(B1)'): [node(b'desc(B3)')],
748 > mapping = {node(b'desc(B1)'): [node(b'desc(B3)')],
747 > node(b'desc(B3)'): [node(b'desc(B4)')]}
749 > node(b'desc(B3)'): [node(b'desc(B4)')]}
748 > scmutil.cleanupnodes(repo, mapping, b'test')
750 > scmutil.cleanupnodes(repo, mapping, b'test')
749 > EOF
751 > EOF
750
752
751 $ rm .hg/localtags
753 $ rm .hg/localtags
752 $ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
754 $ hg cleanup --config extensions.t=$TESTTMP/scmutilcleanup.py
753 2 new content-divergent changesets
755 2 new content-divergent changesets
754 $ hg log -G -T '{rev}:{node|short} {desc} {instabilities}' -r 'sort(all(), topo)'
756 $ hg log -G -T '{rev}:{node|short} {desc} {instabilities}' -r 'sort(all(), topo)'
755 @ 5:1a2a9b5b0030 B2 content-divergent
757 @ 5:1a2a9b5b0030 B2 content-divergent
756 |
758 |
757 | * 4:70d5a63ca112 B4 content-divergent
759 | * 4:70d5a63ca112 B4 content-divergent
758 | |
760 | |
759 | o 1:48b9aae0607f Z
761 | o 1:48b9aae0607f Z
760 |
762 |
761 o 0:426bada5c675 A
763 o 0:426bada5c675 A
762
764
763 $ hg debugobsolete
765 $ hg debugobsolete
764 a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
766 a178212c3433c4e77b573f6011e29affb8aefa33 1a2a9b5b0030632400aa78e00388c20f99d3ec44 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
765 a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'test', 'user': 'test'}
767 a178212c3433c4e77b573f6011e29affb8aefa33 ad6478fb94ecec98b86daae98722865d494ac561 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '13', 'operation': 'test', 'user': 'test'}
766 ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '9', 'operation': 'test', 'user': 'test'}
768 ad6478fb94ecec98b86daae98722865d494ac561 70d5a63ca112acb3764bc1d7320ca90ea688d671 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '9', 'operation': 'test', 'user': 'test'}
767
769
768 $ hg debugwhyunstable 1a2a9b5b0030
770 $ hg debugwhyunstable 1a2a9b5b0030
769 content-divergent: 70d5a63ca112acb3764bc1d7320ca90ea688d671 (draft) predecessor a178212c3433c4e77b573f6011e29affb8aefa33
771 content-divergent: 70d5a63ca112acb3764bc1d7320ca90ea688d671 (draft) predecessor a178212c3433c4e77b573f6011e29affb8aefa33
770
772
771 $ hg log -r 1a2a9b5b0030 -T '{whyunstable}\n'
773 $ hg log -r 1a2a9b5b0030 -T '{whyunstable}\n'
772 content-divergent: 4:70d5a63ca112 (draft) predecessor a178212c3433
774 content-divergent: 4:70d5a63ca112 (draft) predecessor a178212c3433
773 $ hg log -r 1a2a9b5b0030 -T whyunstableshort
775 $ hg log -r 1a2a9b5b0030 -T whyunstableshort
774 content-divergent: 4:70d5a63ca112 (draft) predecessor a178
776 content-divergent: 4:70d5a63ca112 (draft) predecessor a178
775 $ hg log -r 1a2a9b5b0030 -T whyunstableshorter
777 $ hg log -r 1a2a9b5b0030 -T whyunstableshorter
776 content-divergent: 70d5 (draft) predecessor a178
778 content-divergent: 70d5 (draft) predecessor a178
777
779
778 #if serve
780 #if serve
779
781
780 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
782 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
781 $ cat hg.pid >> $DAEMON_PIDS
783 $ cat hg.pid >> $DAEMON_PIDS
782
784
783 check explanation for a content-divergent changeset
785 check explanation for a content-divergent changeset
784
786
785 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=paper' | grep divergent:
787 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=paper' | grep divergent:
786 <td>content-divergent: <a href="/rev/70d5a63ca112?style=paper">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=paper">a178212c3433</a></td>
788 <td>content-divergent: <a href="/rev/70d5a63ca112?style=paper">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=paper">a178212c3433</a></td>
787 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=coal' | grep divergent:
789 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=coal' | grep divergent:
788 <td>content-divergent: <a href="/rev/70d5a63ca112?style=coal">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=coal">a178212c3433</a></td>
790 <td>content-divergent: <a href="/rev/70d5a63ca112?style=coal">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=coal">a178212c3433</a></td>
789 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=gitweb' | grep divergent:
791 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=gitweb' | grep divergent:
790 <td>content-divergent: <a class="list" href="/rev/70d5a63ca112?style=gitweb">70d5a63ca112</a> (draft) predecessor <a class="list" href="/rev/a178212c3433?style=gitweb">a178212c3433</a></td>
792 <td>content-divergent: <a class="list" href="/rev/70d5a63ca112?style=gitweb">70d5a63ca112</a> (draft) predecessor <a class="list" href="/rev/a178212c3433?style=gitweb">a178212c3433</a></td>
791 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=monoblue' | grep divergent:
793 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=monoblue' | grep divergent:
792 <dd>content-divergent: <a href="/rev/70d5a63ca112?style=monoblue">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=monoblue">a178212c3433</a></dd>
794 <dd>content-divergent: <a href="/rev/70d5a63ca112?style=monoblue">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=monoblue">a178212c3433</a></dd>
793 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=spartan' | grep divergent:
795 $ get-with-headers.py localhost:$HGPORT 'rev/1a2a9b5b0030?style=spartan' | grep divergent:
794 <td class="unstable">content-divergent: <a href="/rev/70d5a63ca112?style=spartan">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=spartan">a178212c3433</a></td>
796 <td class="unstable">content-divergent: <a href="/rev/70d5a63ca112?style=spartan">70d5a63ca112</a> (draft) predecessor <a href="/rev/a178212c3433?style=spartan">a178212c3433</a></td>
795
797
796 $ killdaemons.py
798 $ killdaemons.py
797
799
798 #endif
800 #endif
@@ -1,1792 +1,1825 b''
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [phases]
2 > [phases]
3 > # public changeset are not obsolete
3 > # public changeset are not obsolete
4 > publish=false
4 > publish=false
5 > [ui]
5 > [ui]
6 > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(instabilities, ' {instabilities}')}) [{tags} {bookmarks}] {desc|firstline}{if(obsfate, " [{join(obsfate, "; ")}]")}\n"
6 > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(instabilities, ' {instabilities}')}) [{tags} {bookmarks}] {desc|firstline}{if(obsfate, " [{join(obsfate, "; ")}]")}\n"
7 > EOF
7 > EOF
8 $ mkcommit() {
8 $ mkcommit() {
9 > echo "$1" > "$1"
9 > echo "$1" > "$1"
10 > hg add "$1"
10 > hg add "$1"
11 > hg ci -m "add $1"
11 > hg ci -m "add $1"
12 > }
12 > }
13 $ getid() {
13 $ getid() {
14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
15 > }
15 > }
16
16
17 $ cat > debugkeys.py <<EOF
17 $ cat > debugkeys.py <<EOF
18 > def reposetup(ui, repo):
18 > def reposetup(ui, repo):
19 > class debugkeysrepo(repo.__class__):
19 > class debugkeysrepo(repo.__class__):
20 > def listkeys(self, namespace):
20 > def listkeys(self, namespace):
21 > ui.write(b'listkeys %s\n' % (namespace,))
21 > ui.write(b'listkeys %s\n' % (namespace,))
22 > return super(debugkeysrepo, self).listkeys(namespace)
22 > return super(debugkeysrepo, self).listkeys(namespace)
23 >
23 >
24 > if repo.local():
24 > if repo.local():
25 > repo.__class__ = debugkeysrepo
25 > repo.__class__ = debugkeysrepo
26 > EOF
26 > EOF
27
27
28 $ hg init tmpa
28 $ hg init tmpa
29 $ cd tmpa
29 $ cd tmpa
30 $ mkcommit kill_me
30 $ mkcommit kill_me
31
31
32 Checking that the feature is properly disabled
32 Checking that the feature is properly disabled
33
33
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
35 abort: creating obsolete markers is not enabled on this repo
35 abort: creating obsolete markers is not enabled on this repo
36 [255]
36 [255]
37
37
38 Enabling it
38 Enabling it
39
39
40 $ cat >> $HGRCPATH << EOF
40 $ cat >> $HGRCPATH << EOF
41 > [experimental]
41 > [experimental]
42 > evolution=exchange
42 > evolution=exchange
43 > evolution.createmarkers=True
43 > evolution.createmarkers=True
44 > EOF
44 > EOF
45
45
46 Killing a single changeset without replacement
46 Killing a single changeset without replacement
47
47
48 $ hg debugobsolete 0
48 $ hg debugobsolete 0
49 abort: changeset references must be full hexadecimal node identifiers
49 abort: changeset references must be full hexadecimal node identifiers
50 [255]
50 [255]
51 $ hg debugobsolete '00'
51 $ hg debugobsolete '00'
52 abort: changeset references must be full hexadecimal node identifiers
52 abort: changeset references must be full hexadecimal node identifiers
53 [255]
53 [255]
54 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
54 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
55 1 new obsolescence markers
55 1 new obsolescence markers
56 obsoleted 1 changesets
56 obsoleted 1 changesets
57 $ hg debugobsolete
57 $ hg debugobsolete
58 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
58 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
59
59
60 (test that mercurial is not confused)
60 (test that mercurial is not confused)
61
61
62 $ hg up null --quiet # having 0 as parent prevents it to be hidden
62 $ hg up null --quiet # having 0 as parent prevents it to be hidden
63 $ hg tip
63 $ hg tip
64 -1:000000000000 (public) [tip ]
64 -1:000000000000 (public) [tip ]
65 $ hg up --hidden tip --quiet
65 $ hg up --hidden tip --quiet
66 updated to hidden changeset 97b7c2d76b18
66 updated to hidden changeset 97b7c2d76b18
67 (hidden revision '97b7c2d76b18' is pruned)
67 (hidden revision '97b7c2d76b18' is pruned)
68
68
69 Killing a single changeset with itself should fail
69 Killing a single changeset with itself should fail
70 (simple local safeguard)
70 (simple local safeguard)
71
71
72 $ hg debugobsolete `getid kill_me` `getid kill_me`
72 $ hg debugobsolete `getid kill_me` `getid kill_me`
73 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
73 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
74 [255]
74 [255]
75
75
76 $ cd ..
76 $ cd ..
77
77
78 Killing a single changeset with replacement
78 Killing a single changeset with replacement
79 (and testing the format option)
79 (and testing the format option)
80
80
81 $ hg init tmpb
81 $ hg init tmpb
82 $ cd tmpb
82 $ cd tmpb
83 $ mkcommit a
83 $ mkcommit a
84 $ mkcommit b
84 $ mkcommit b
85 $ mkcommit original_c
85 $ mkcommit original_c
86 $ hg up "desc('b')"
86 $ hg up "desc('b')"
87 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
87 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
88 $ mkcommit new_c
88 $ mkcommit new_c
89 created new head
89 created new head
90 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
90 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
91 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
91 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
92 1 new obsolescence markers
92 1 new obsolescence markers
93 obsoleted 1 changesets
93 obsoleted 1 changesets
94 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
94 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
95 2:245bde4270cd add original_c
95 2:245bde4270cd add original_c
96 $ hg debugrevlog -cd
96 $ hg debugrevlog -cd
97 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
97 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
98 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
98 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
99 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
99 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
100 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
100 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
101 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
101 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
102 $ hg debugobsolete
102 $ hg debugobsolete
103 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
103 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
104
104
105 (check for version number of the obsstore)
105 (check for version number of the obsstore)
106
106
107 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
107 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
108 \x00 (no-eol) (esc)
108 \x00 (no-eol) (esc)
109
109
110 do it again (it read the obsstore before adding new changeset)
110 do it again (it read the obsstore before adding new changeset)
111
111
112 $ hg up '.^'
112 $ hg up '.^'
113 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
113 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
114 $ mkcommit new_2_c
114 $ mkcommit new_2_c
115 created new head
115 created new head
116 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
116 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
117 1 new obsolescence markers
117 1 new obsolescence markers
118 obsoleted 1 changesets
118 obsoleted 1 changesets
119 $ hg debugobsolete
119 $ hg debugobsolete
120 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
120 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
121 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
121 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
122
122
123 Register two markers with a missing node
123 Register two markers with a missing node
124
124
125 $ hg up '.^'
125 $ hg up '.^'
126 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
126 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
127 $ mkcommit new_3_c
127 $ mkcommit new_3_c
128 created new head
128 created new head
129 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
129 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
130 1 new obsolescence markers
130 1 new obsolescence markers
131 obsoleted 1 changesets
131 obsoleted 1 changesets
132 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
132 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
133 1 new obsolescence markers
133 1 new obsolescence markers
134 $ hg debugobsolete
134 $ hg debugobsolete
135 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
135 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
136 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
136 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
137 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
137 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
138 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
138 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
139
139
140 Test the --index option of debugobsolete command
140 Test the --index option of debugobsolete command
141 $ hg debugobsolete --index
141 $ hg debugobsolete --index
142 0 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
142 0 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
143 1 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
143 1 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
144 2 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
144 2 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
145 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
145 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
146
146
147 Refuse pathological nullid successors
147 Refuse pathological nullid successors
148 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
148 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
149 transaction abort!
149 transaction abort!
150 rollback completed
150 rollback completed
151 abort: bad obsolescence marker detected: invalid successors nullid
151 abort: bad obsolescence marker detected: invalid successors nullid
152 [255]
152 [255]
153
153
154 Check that graphlog detect that a changeset is obsolete:
154 Check that graphlog detect that a changeset is obsolete:
155
155
156 $ hg log -G
156 $ hg log -G
157 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
157 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
158 |
158 |
159 o 1:7c3bad9141dc (draft) [ ] add b
159 o 1:7c3bad9141dc (draft) [ ] add b
160 |
160 |
161 o 0:1f0dee641bb7 (draft) [ ] add a
161 o 0:1f0dee641bb7 (draft) [ ] add a
162
162
163
163
164 check that heads does not report them
164 check that heads does not report them
165
165
166 $ hg heads
166 $ hg heads
167 5:5601fb93a350 (draft) [tip ] add new_3_c
167 5:5601fb93a350 (draft) [tip ] add new_3_c
168 $ hg heads --hidden
168 $ hg heads --hidden
169 5:5601fb93a350 (draft) [tip ] add new_3_c
169 5:5601fb93a350 (draft) [tip ] add new_3_c
170 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
170 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
171 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
171 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
172 2:245bde4270cd (draft *obsolete*) [ ] add original_c [rewritten as 3:cdbce2fbb163]
172 2:245bde4270cd (draft *obsolete*) [ ] add original_c [rewritten as 3:cdbce2fbb163]
173
173
174
174
175 check that summary does not report them
175 check that summary does not report them
176
176
177 $ hg init ../sink
177 $ hg init ../sink
178 $ echo '[paths]' >> .hg/hgrc
178 $ echo '[paths]' >> .hg/hgrc
179 $ echo 'default=../sink' >> .hg/hgrc
179 $ echo 'default=../sink' >> .hg/hgrc
180 $ hg summary --remote
180 $ hg summary --remote
181 parent: 5:5601fb93a350 tip
181 parent: 5:5601fb93a350 tip
182 add new_3_c
182 add new_3_c
183 branch: default
183 branch: default
184 commit: (clean)
184 commit: (clean)
185 update: (current)
185 update: (current)
186 phases: 3 draft
186 phases: 3 draft
187 remote: 3 outgoing
187 remote: 3 outgoing
188
188
189 $ hg summary --remote --hidden
189 $ hg summary --remote --hidden
190 parent: 5:5601fb93a350 tip
190 parent: 5:5601fb93a350 tip
191 add new_3_c
191 add new_3_c
192 branch: default
192 branch: default
193 commit: (clean)
193 commit: (clean)
194 update: 3 new changesets, 4 branch heads (merge)
194 update: 3 new changesets, 4 branch heads (merge)
195 phases: 6 draft
195 phases: 6 draft
196 remote: 3 outgoing
196 remote: 3 outgoing
197
197
198 check that various commands work well with filtering
198 check that various commands work well with filtering
199
199
200 $ hg tip
200 $ hg tip
201 5:5601fb93a350 (draft) [tip ] add new_3_c
201 5:5601fb93a350 (draft) [tip ] add new_3_c
202 $ hg log -r 6
202 $ hg log -r 6
203 abort: unknown revision '6'!
203 abort: unknown revision '6'!
204 [255]
204 [255]
205 $ hg log -r 4
205 $ hg log -r 4
206 abort: hidden revision '4' was rewritten as: 5601fb93a350!
206 abort: hidden revision '4' was rewritten as: 5601fb93a350!
207 (use --hidden to access hidden revisions)
207 (use --hidden to access hidden revisions)
208 [255]
208 [255]
209 $ hg debugrevspec 'rev(6)'
209 $ hg debugrevspec 'rev(6)'
210 $ hg debugrevspec 'rev(4)'
210 $ hg debugrevspec 'rev(4)'
211 $ hg debugrevspec 'null'
211 $ hg debugrevspec 'null'
212 -1
212 -1
213
213
214 Check that public changeset are not accounted as obsolete:
214 Check that public changeset are not accounted as obsolete:
215
215
216 $ hg --hidden phase --public 2
216 $ hg --hidden phase --public 2
217 1 new phase-divergent changesets
217 1 new phase-divergent changesets
218 $ hg log -G
218 $ hg log -G
219 @ 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
219 @ 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
220 |
220 |
221 | o 2:245bde4270cd (public) [ ] add original_c
221 | o 2:245bde4270cd (public) [ ] add original_c
222 |/
222 |/
223 o 1:7c3bad9141dc (public) [ ] add b
223 o 1:7c3bad9141dc (public) [ ] add b
224 |
224 |
225 o 0:1f0dee641bb7 (public) [ ] add a
225 o 0:1f0dee641bb7 (public) [ ] add a
226
226
227 $ hg log -r 'unstable()'
227 $ hg log -r 'unstable()'
228 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
228 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
229
229
230
230
231 And that bumped changeset are detected
231 And that bumped changeset are detected
232 --------------------------------------
232 --------------------------------------
233
233
234 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
234 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
235 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
235 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
236 the public changeset
236 the public changeset
237
237
238 $ hg log --hidden -r 'phasedivergent()'
238 $ hg log --hidden -r 'phasedivergent()'
239 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
239 5:5601fb93a350 (draft phase-divergent) [tip ] add new_3_c
240
240
241 And that we can't push bumped changeset
241 And that we can't push bumped changeset
242
242
243 $ hg push ../tmpa -r 0 --force #(make repo related)
243 $ hg push ../tmpa -r 0 --force #(make repo related)
244 pushing to ../tmpa
244 pushing to ../tmpa
245 searching for changes
245 searching for changes
246 warning: repository is unrelated
246 warning: repository is unrelated
247 adding changesets
247 adding changesets
248 adding manifests
248 adding manifests
249 adding file changes
249 adding file changes
250 added 1 changesets with 1 changes to 1 files (+1 heads)
250 added 1 changesets with 1 changes to 1 files (+1 heads)
251 $ hg push ../tmpa
251 $ hg push ../tmpa
252 pushing to ../tmpa
252 pushing to ../tmpa
253 searching for changes
253 searching for changes
254 abort: push includes phase-divergent changeset: 5601fb93a350!
254 abort: push includes unstable changesets:
255 5601fb93a350 (phase-divergent)
255 [255]
256 [255]
256
257
257 Fixing "bumped" situation
258 Fixing "bumped" situation
258 We need to create a clone of 5 and add a special marker with a flag
259 We need to create a clone of 5 and add a special marker with a flag
259
260
260 $ hg summary
261 $ hg summary
261 parent: 5:5601fb93a350 tip (phase-divergent)
262 parent: 5:5601fb93a350 tip (phase-divergent)
262 add new_3_c
263 add new_3_c
263 branch: default
264 branch: default
264 commit: (clean)
265 commit: (clean)
265 update: 1 new changesets, 2 branch heads (merge)
266 update: 1 new changesets, 2 branch heads (merge)
266 phases: 1 draft
267 phases: 1 draft
267 phase-divergent: 1 changesets
268 phase-divergent: 1 changesets
268 $ hg up '5^'
269 $ hg up '5^'
269 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
270 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
270 $ hg revert -ar 5
271 $ hg revert -ar 5
271 adding new_3_c
272 adding new_3_c
272 $ hg ci -m 'add n3w_3_c'
273 $ hg ci -m 'add n3w_3_c'
273 created new head
274 created new head
274 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
275 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
275 1 new obsolescence markers
276 1 new obsolescence markers
276 obsoleted 1 changesets
277 obsoleted 1 changesets
277 $ hg log -r 'phasedivergent()'
278 $ hg log -r 'phasedivergent()'
278 $ hg log -G
279 $ hg log -G
279 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
280 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
280 |
281 |
281 | o 2:245bde4270cd (public) [ ] add original_c
282 | o 2:245bde4270cd (public) [ ] add original_c
282 |/
283 |/
283 o 1:7c3bad9141dc (public) [ ] add b
284 o 1:7c3bad9141dc (public) [ ] add b
284 |
285 |
285 o 0:1f0dee641bb7 (public) [ ] add a
286 o 0:1f0dee641bb7 (public) [ ] add a
286
287
287
288
288 Basic exclusive testing
289 Basic exclusive testing
289
290
290 $ hg log -G --hidden
291 $ hg log -G --hidden
291 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
292 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
292 |
293 |
293 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c [rewritten as 6:6f9641995072]
294 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c [rewritten as 6:6f9641995072]
294 |/
295 |/
295 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
296 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
296 |/
297 |/
297 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
298 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
298 |/
299 |/
299 | o 2:245bde4270cd (public) [ ] add original_c
300 | o 2:245bde4270cd (public) [ ] add original_c
300 |/
301 |/
301 o 1:7c3bad9141dc (public) [ ] add b
302 o 1:7c3bad9141dc (public) [ ] add b
302 |
303 |
303 o 0:1f0dee641bb7 (public) [ ] add a
304 o 0:1f0dee641bb7 (public) [ ] add a
304
305
305 $ hg debugobsolete --rev 6f9641995072
306 $ hg debugobsolete --rev 6f9641995072
306 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
307 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
307 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
308 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
308 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
309 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
309 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
310 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
310 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
311 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
311 $ hg debugobsolete --rev 6f9641995072 --exclusive
312 $ hg debugobsolete --rev 6f9641995072 --exclusive
312 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
313 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
313 $ hg debugobsolete --rev 5601fb93a350 --hidden
314 $ hg debugobsolete --rev 5601fb93a350 --hidden
314 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
315 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
315 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
316 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
316 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
317 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
317 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
318 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
318 $ hg debugobsolete --rev 5601fb93a350 --hidden --exclusive
319 $ hg debugobsolete --rev 5601fb93a350 --hidden --exclusive
319 $ hg debugobsolete --rev 5601fb93a350+6f9641995072 --hidden --exclusive
320 $ hg debugobsolete --rev 5601fb93a350+6f9641995072 --hidden --exclusive
320 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
321 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
321 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
322 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
322 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
323 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
323
324
324 $ cd ..
325 $ cd ..
325
326
326 Revision 0 is hidden
327 Revision 0 is hidden
327 --------------------
328 --------------------
328
329
329 $ hg init rev0hidden
330 $ hg init rev0hidden
330 $ cd rev0hidden
331 $ cd rev0hidden
331
332
332 $ mkcommit kill0
333 $ mkcommit kill0
333 $ hg up -q null
334 $ hg up -q null
334 $ hg debugobsolete `getid kill0`
335 $ hg debugobsolete `getid kill0`
335 1 new obsolescence markers
336 1 new obsolescence markers
336 obsoleted 1 changesets
337 obsoleted 1 changesets
337 $ mkcommit a
338 $ mkcommit a
338 $ mkcommit b
339 $ mkcommit b
339
340
340 Should pick the first visible revision as "repo" node
341 Should pick the first visible revision as "repo" node
341
342
342 $ hg archive ../archive-null
343 $ hg archive ../archive-null
343 $ cat ../archive-null/.hg_archival.txt
344 $ cat ../archive-null/.hg_archival.txt
344 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
345 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
345 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
346 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
346 branch: default
347 branch: default
347 latesttag: null
348 latesttag: null
348 latesttagdistance: 2
349 latesttagdistance: 2
349 changessincelatesttag: 2
350 changessincelatesttag: 2
350
351
351
352
352 $ cd ..
353 $ cd ..
353
354
354 Can disable transaction summary report
355 Can disable transaction summary report
355
356
356 $ hg init transaction-summary
357 $ hg init transaction-summary
357 $ cd transaction-summary
358 $ cd transaction-summary
358 $ mkcommit a
359 $ mkcommit a
359 $ mkcommit b
360 $ mkcommit b
360 $ hg up -q null
361 $ hg up -q null
361 $ hg --config experimental.evolution.report-instabilities=false debugobsolete `getid a`
362 $ hg --config experimental.evolution.report-instabilities=false debugobsolete `getid a`
362 1 new obsolescence markers
363 1 new obsolescence markers
363 obsoleted 1 changesets
364 obsoleted 1 changesets
364 $ cd ..
365 $ cd ..
365
366
366 Exchange Test
367 Exchange Test
367 ============================
368 ============================
368
369
369 Destination repo does not have any data
370 Destination repo does not have any data
370 ---------------------------------------
371 ---------------------------------------
371
372
372 Simple incoming test
373 Simple incoming test
373
374
374 $ hg init tmpc
375 $ hg init tmpc
375 $ cd tmpc
376 $ cd tmpc
376 $ hg incoming ../tmpb
377 $ hg incoming ../tmpb
377 comparing with ../tmpb
378 comparing with ../tmpb
378 0:1f0dee641bb7 (public) [ ] add a
379 0:1f0dee641bb7 (public) [ ] add a
379 1:7c3bad9141dc (public) [ ] add b
380 1:7c3bad9141dc (public) [ ] add b
380 2:245bde4270cd (public) [ ] add original_c
381 2:245bde4270cd (public) [ ] add original_c
381 6:6f9641995072 (draft) [tip ] add n3w_3_c
382 6:6f9641995072 (draft) [tip ] add n3w_3_c
382
383
383 Try to pull markers while testing pull --confirm
384 Try to pull markers while testing pull --confirm
384 (extinct changeset are excluded but marker are pushed)
385 (extinct changeset are excluded but marker are pushed)
385
386
386 $ hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
387 $ hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
387 > n
388 > n
388 > EOF
389 > EOF
389 pulling from ../tmpb
390 pulling from ../tmpb
390 requesting all changes
391 requesting all changes
391 adding changesets
392 adding changesets
392 adding manifests
393 adding manifests
393 adding file changes
394 adding file changes
394 adding 4 changesets with 4 changes to 4 files (+1 heads)
395 adding 4 changesets with 4 changes to 4 files (+1 heads)
395 5 new obsolescence markers
396 5 new obsolescence markers
396 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
397 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
397 accept incoming changes (yn)? n
398 accept incoming changes (yn)? n
398 transaction abort!
399 transaction abort!
399 rollback completed
400 rollback completed
400 abort: user aborted
401 abort: user aborted
401 [255]
402 [255]
402 $ HGPLAIN=1 hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
403 $ HGPLAIN=1 hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
403 > n
404 > n
404 > EOF
405 > EOF
405 pulling from ../tmpb
406 pulling from ../tmpb
406 requesting all changes
407 requesting all changes
407 adding changesets
408 adding changesets
408 adding manifests
409 adding manifests
409 adding file changes
410 adding file changes
410 adding 4 changesets with 4 changes to 4 files (+1 heads)
411 adding 4 changesets with 4 changes to 4 files (+1 heads)
411 5 new obsolescence markers
412 5 new obsolescence markers
412 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
413 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
413 accept incoming changes (yn)? n
414 accept incoming changes (yn)? n
414 transaction abort!
415 transaction abort!
415 rollback completed
416 rollback completed
416 abort: user aborted
417 abort: user aborted
417 [255]
418 [255]
418 $ hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
419 $ hg pull ../tmpb --confirm --config ui.interactive=true <<EOF
419 > y
420 > y
420 > EOF
421 > EOF
421 pulling from ../tmpb
422 pulling from ../tmpb
422 requesting all changes
423 requesting all changes
423 adding changesets
424 adding changesets
424 adding manifests
425 adding manifests
425 adding file changes
426 adding file changes
426 adding 4 changesets with 4 changes to 4 files (+1 heads)
427 adding 4 changesets with 4 changes to 4 files (+1 heads)
427 5 new obsolescence markers
428 5 new obsolescence markers
428 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
429 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
429 accept incoming changes (yn)? y
430 accept incoming changes (yn)? y
430 added 4 changesets with 4 changes to 4 files (+1 heads)
431 added 4 changesets with 4 changes to 4 files (+1 heads)
431 5 new obsolescence markers
432 5 new obsolescence markers
432 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
433 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
433 (run 'hg heads' to see heads, 'hg merge' to merge)
434 (run 'hg heads' to see heads, 'hg merge' to merge)
434 $ hg debugobsolete
435 $ hg debugobsolete
435 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
436 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
436 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
437 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
437 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
438 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
438 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
439 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
439 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
440 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
440
441
441 Rollback//Transaction support
442 Rollback//Transaction support
442
443
443 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
444 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
444 1 new obsolescence markers
445 1 new obsolescence markers
445 $ hg debugobsolete
446 $ hg debugobsolete
446 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
447 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
447 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
448 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
448 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
449 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
449 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
450 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
450 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
451 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
451 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
452 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
452 $ hg rollback -n
453 $ hg rollback -n
453 repository tip rolled back to revision 3 (undo debugobsolete)
454 repository tip rolled back to revision 3 (undo debugobsolete)
454 $ hg rollback
455 $ hg rollback
455 repository tip rolled back to revision 3 (undo debugobsolete)
456 repository tip rolled back to revision 3 (undo debugobsolete)
456 $ hg debugobsolete
457 $ hg debugobsolete
457 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
458 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
458 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
459 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
459 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
460 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
460 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
461 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
461 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
462 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
462
463
463 $ cd ..
464 $ cd ..
464
465
465 Try to push markers
466 Try to push markers
466
467
467 $ hg init tmpd
468 $ hg init tmpd
468 $ hg -R tmpb push tmpd
469 $ hg -R tmpb push tmpd
469 pushing to tmpd
470 pushing to tmpd
470 searching for changes
471 searching for changes
471 adding changesets
472 adding changesets
472 adding manifests
473 adding manifests
473 adding file changes
474 adding file changes
474 added 4 changesets with 4 changes to 4 files (+1 heads)
475 added 4 changesets with 4 changes to 4 files (+1 heads)
475 5 new obsolescence markers
476 5 new obsolescence markers
476 $ hg -R tmpd debugobsolete | sort
477 $ hg -R tmpd debugobsolete | sort
477 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
478 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
478 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
479 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
479 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
480 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
480 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
481 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
481 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
482 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
482
483
483 Check obsolete keys are exchanged only if source has an obsolete store
484 Check obsolete keys are exchanged only if source has an obsolete store
484
485
485 $ hg init empty
486 $ hg init empty
486 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
487 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
487 pushing to tmpd
488 pushing to tmpd
488 listkeys phases
489 listkeys phases
489 listkeys bookmarks
490 listkeys bookmarks
490 no changes found
491 no changes found
491 listkeys phases
492 listkeys phases
492 [1]
493 [1]
493
494
494 clone support
495 clone support
495 (markers are copied and extinct changesets are included to allow hardlinks)
496 (markers are copied and extinct changesets are included to allow hardlinks)
496
497
497 $ hg clone tmpb clone-dest
498 $ hg clone tmpb clone-dest
498 updating to branch default
499 updating to branch default
499 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
500 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
500 $ hg -R clone-dest log -G --hidden
501 $ hg -R clone-dest log -G --hidden
501 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
502 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
502 |
503 |
503 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c [rewritten as 6:6f9641995072]
504 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c [rewritten as 6:6f9641995072]
504 |/
505 |/
505 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
506 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c [rewritten as 5:5601fb93a350]
506 |/
507 |/
507 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
508 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c [rewritten as 4:ca819180edb9]
508 |/
509 |/
509 | o 2:245bde4270cd (public) [ ] add original_c
510 | o 2:245bde4270cd (public) [ ] add original_c
510 |/
511 |/
511 o 1:7c3bad9141dc (public) [ ] add b
512 o 1:7c3bad9141dc (public) [ ] add b
512 |
513 |
513 o 0:1f0dee641bb7 (public) [ ] add a
514 o 0:1f0dee641bb7 (public) [ ] add a
514
515
515 $ hg -R clone-dest debugobsolete
516 $ hg -R clone-dest debugobsolete
516 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
517 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
517 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
518 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
518 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
519 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
519 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
520 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
520 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
521 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
521
522
522
523
523 Destination repo have existing data
524 Destination repo have existing data
524 ---------------------------------------
525 ---------------------------------------
525
526
526 On pull
527 On pull
527
528
528 $ hg init tmpe
529 $ hg init tmpe
529 $ cd tmpe
530 $ cd tmpe
530 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
531 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
531 1 new obsolescence markers
532 1 new obsolescence markers
532 $ hg pull ../tmpb
533 $ hg pull ../tmpb
533 pulling from ../tmpb
534 pulling from ../tmpb
534 requesting all changes
535 requesting all changes
535 adding changesets
536 adding changesets
536 adding manifests
537 adding manifests
537 adding file changes
538 adding file changes
538 added 4 changesets with 4 changes to 4 files (+1 heads)
539 added 4 changesets with 4 changes to 4 files (+1 heads)
539 5 new obsolescence markers
540 5 new obsolescence markers
540 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
541 new changesets 1f0dee641bb7:6f9641995072 (1 drafts)
541 (run 'hg heads' to see heads, 'hg merge' to merge)
542 (run 'hg heads' to see heads, 'hg merge' to merge)
542 $ hg debugobsolete
543 $ hg debugobsolete
543 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
544 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
544 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
545 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
545 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
546 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
546 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
547 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
547 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
548 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
548 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
549 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
549
550
550
551
551 On push
552 On push
552
553
553 $ hg push ../tmpc
554 $ hg push ../tmpc
554 pushing to ../tmpc
555 pushing to ../tmpc
555 searching for changes
556 searching for changes
556 no changes found
557 no changes found
557 1 new obsolescence markers
558 1 new obsolescence markers
558 [1]
559 [1]
559 $ hg -R ../tmpc debugobsolete
560 $ hg -R ../tmpc debugobsolete
560 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
561 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
561 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
562 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
562 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
563 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
563 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
564 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
564 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
565 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
565 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
566 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
566
567
567 detect outgoing obsolete and unstable
568 detect outgoing obsolete and unstable
568 ---------------------------------------
569 ---------------------------------------
569
570
570
571
571 $ hg log -G
572 $ hg log -G
572 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
573 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
573 |
574 |
574 | o 2:245bde4270cd (public) [ ] add original_c
575 | o 2:245bde4270cd (public) [ ] add original_c
575 |/
576 |/
576 o 1:7c3bad9141dc (public) [ ] add b
577 o 1:7c3bad9141dc (public) [ ] add b
577 |
578 |
578 o 0:1f0dee641bb7 (public) [ ] add a
579 o 0:1f0dee641bb7 (public) [ ] add a
579
580
580 $ hg up 'desc("n3w_3_c")'
581 $ hg up 'desc("n3w_3_c")'
581 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
582 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
582 $ mkcommit original_d
583 $ mkcommit original_d
583 $ mkcommit original_e
584 $ mkcommit original_e
584 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
585 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
585 1 new obsolescence markers
586 1 new obsolescence markers
586 obsoleted 1 changesets
587 obsoleted 1 changesets
587 1 new orphan changesets
588 1 new orphan changesets
588 $ hg log -r 'unstable()'
589 $ hg log -r 'unstable()'
589 5:cda648ca50f5 (draft orphan) [tip ] add original_e
590 5:cda648ca50f5 (draft orphan) [tip ] add original_e
590 $ hg debugobsolete | grep `getid original_d`
591 $ hg debugobsolete | grep `getid original_d`
591 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
592 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
592 $ hg log -r 'obsolete()'
593 $ hg log -r 'obsolete()'
593 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
594 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
594 $ hg summary
595 $ hg summary
595 parent: 5:cda648ca50f5 tip (orphan)
596 parent: 5:cda648ca50f5 tip (orphan)
596 add original_e
597 add original_e
597 branch: default
598 branch: default
598 commit: (clean)
599 commit: (clean)
599 update: 1 new changesets, 2 branch heads (merge)
600 update: 1 new changesets, 2 branch heads (merge)
600 phases: 3 draft
601 phases: 3 draft
601 orphan: 1 changesets
602 orphan: 1 changesets
602 $ hg log -G -r '::orphan()'
603 $ hg log -G -r '::orphan()'
603 @ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
604 @ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
604 |
605 |
605 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
606 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
606 |
607 |
607 o 3:6f9641995072 (draft) [ ] add n3w_3_c
608 o 3:6f9641995072 (draft) [ ] add n3w_3_c
608 |
609 |
609 o 1:7c3bad9141dc (public) [ ] add b
610 o 1:7c3bad9141dc (public) [ ] add b
610 |
611 |
611 o 0:1f0dee641bb7 (public) [ ] add a
612 o 0:1f0dee641bb7 (public) [ ] add a
612
613
613
614
614 refuse to push obsolete changeset
615 refuse to push obsolete changeset
615
616
616 $ hg push ../tmpc/ -r 'desc("original_d")'
617 $ hg push ../tmpc/ -r 'desc("original_d")'
617 pushing to ../tmpc/
618 pushing to ../tmpc/
618 searching for changes
619 searching for changes
619 abort: push includes obsolete changeset: 94b33453f93b!
620 abort: push includes obsolete changesets:
621 94b33453f93b
620 [255]
622 [255]
621
623
622 refuse to push unstable changeset
624 refuse to push unstable changeset
623
625
624 $ hg push ../tmpc/
626 $ hg push ../tmpc/
625 pushing to ../tmpc/
627 pushing to ../tmpc/
626 searching for changes
628 searching for changes
627 abort: push includes orphan changeset: cda648ca50f5!
629 abort: push includes obsolete changesets:
630 94b33453f93b
631 push includes unstable changesets:
632 cda648ca50f5 (orphan)
628 [255]
633 [255]
629
634
630 with --force it will work anyway
635 with --force it will work anyway
631
636
632 $ hg push ../tmpc/ --force
637 $ hg push ../tmpc/ --force
633 pushing to ../tmpc/
638 pushing to ../tmpc/
634 searching for changes
639 searching for changes
635 adding changesets
640 adding changesets
636 adding manifests
641 adding manifests
637 adding file changes
642 adding file changes
638 added 2 changesets with 2 changes to 2 files
643 added 2 changesets with 2 changes to 2 files
639 1 new obsolescence markers
644 1 new obsolescence markers
640 1 new orphan changesets
645 1 new orphan changesets
641
646
642 if the orphan changeset is already on the server, pushing should work
647 if the orphan changeset is already on the server, pushing should work
643
648
644 $ hg push ../tmpc/
649 $ hg push ../tmpc/
645 pushing to ../tmpc/
650 pushing to ../tmpc/
646 searching for changes
651 searching for changes
647 no changes found
652 no changes found
648 [1]
653 [1]
649
654
655 pushing should work even if the outgoing changes contain an unrelated changeset
656 (neither obsolete nor unstable) (issue6372)
657
658 $ hg up 1 -q
659 $ hg branch new -q
660 $ mkcommit c
661
662 $ hg push ../tmpc/ --new-branch
663 pushing to ../tmpc/
664 searching for changes
665 adding changesets
666 adding manifests
667 adding file changes
668 added 1 changesets with 1 changes to 1 files (+1 heads)
669
670 make later tests work unmodified
671
672 $ hg --config extensions.strip= strip tip -q
673 $ hg up 5 -q
674
650 Test that extinct changeset are properly detected
675 Test that extinct changeset are properly detected
651
676
652 $ hg log -r 'extinct()'
677 $ hg log -r 'extinct()'
653
678
654 Don't try to push extinct changeset
679 Don't try to push extinct changeset
655
680
656 $ hg init ../tmpf
681 $ hg init ../tmpf
657 $ hg out ../tmpf
682 $ hg out ../tmpf
658 comparing with ../tmpf
683 comparing with ../tmpf
659 searching for changes
684 searching for changes
660 0:1f0dee641bb7 (public) [ ] add a
685 0:1f0dee641bb7 (public) [ ] add a
661 1:7c3bad9141dc (public) [ ] add b
686 1:7c3bad9141dc (public) [ ] add b
662 2:245bde4270cd (public) [ ] add original_c
687 2:245bde4270cd (public) [ ] add original_c
663 3:6f9641995072 (draft) [ ] add n3w_3_c
688 3:6f9641995072 (draft) [ ] add n3w_3_c
664 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
689 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
665 5:cda648ca50f5 (draft orphan) [tip ] add original_e
690 5:cda648ca50f5 (draft orphan) [tip ] add original_e
666 $ hg push ../tmpf -f # -f because be push unstable too
691 $ hg push ../tmpf -f # -f because be push unstable too
667 pushing to ../tmpf
692 pushing to ../tmpf
668 searching for changes
693 searching for changes
669 adding changesets
694 adding changesets
670 adding manifests
695 adding manifests
671 adding file changes
696 adding file changes
672 added 6 changesets with 6 changes to 6 files (+1 heads)
697 added 6 changesets with 6 changes to 6 files (+1 heads)
673 7 new obsolescence markers
698 7 new obsolescence markers
674 1 new orphan changesets
699 1 new orphan changesets
675
700
676 no warning displayed
701 no warning displayed
677
702
678 $ hg push ../tmpf
703 $ hg push ../tmpf
679 pushing to ../tmpf
704 pushing to ../tmpf
680 searching for changes
705 searching for changes
681 no changes found
706 no changes found
682 [1]
707 [1]
683
708
684 Do not warn about new head when the new head is a successors of a remote one
709 Do not warn about new head when the new head is a successors of a remote one
685
710
686 $ hg log -G
711 $ hg log -G
687 @ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
712 @ 5:cda648ca50f5 (draft orphan) [tip ] add original_e
688 |
713 |
689 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
714 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
690 |
715 |
691 o 3:6f9641995072 (draft) [ ] add n3w_3_c
716 o 3:6f9641995072 (draft) [ ] add n3w_3_c
692 |
717 |
693 | o 2:245bde4270cd (public) [ ] add original_c
718 | o 2:245bde4270cd (public) [ ] add original_c
694 |/
719 |/
695 o 1:7c3bad9141dc (public) [ ] add b
720 o 1:7c3bad9141dc (public) [ ] add b
696 |
721 |
697 o 0:1f0dee641bb7 (public) [ ] add a
722 o 0:1f0dee641bb7 (public) [ ] add a
698
723
699 $ hg up -q 'desc(n3w_3_c)'
724 $ hg up -q 'desc(n3w_3_c)'
700 $ mkcommit obsolete_e
725 $ mkcommit obsolete_e
701 created new head
726 created new head
702 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'` \
727 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'` \
703 > -u 'test <test@example.net>'
728 > -u 'test <test@example.net>'
704 1 new obsolescence markers
729 1 new obsolescence markers
705 obsoleted 1 changesets
730 obsoleted 1 changesets
706 $ hg outgoing ../tmpf # parasite hg outgoing testin
731 $ hg outgoing ../tmpf # parasite hg outgoing testin
707 comparing with ../tmpf
732 comparing with ../tmpf
708 searching for changes
733 searching for changes
709 6:3de5eca88c00 (draft) [tip ] add obsolete_e
734 6:3de5eca88c00 (draft) [tip ] add obsolete_e
710 $ hg push ../tmpf
735 $ hg push ../tmpf
711 pushing to ../tmpf
736 pushing to ../tmpf
712 searching for changes
737 searching for changes
713 adding changesets
738 adding changesets
714 adding manifests
739 adding manifests
715 adding file changes
740 adding file changes
716 added 1 changesets with 1 changes to 1 files (+1 heads)
741 added 1 changesets with 1 changes to 1 files (+1 heads)
717 1 new obsolescence markers
742 1 new obsolescence markers
718 obsoleted 1 changesets
743 obsoleted 1 changesets
719
744
720 test relevance computation
745 test relevance computation
721 ---------------------------------------
746 ---------------------------------------
722
747
723 Checking simple case of "marker relevance".
748 Checking simple case of "marker relevance".
724
749
725
750
726 Reminder of the repo situation
751 Reminder of the repo situation
727
752
728 $ hg log --hidden --graph
753 $ hg log --hidden --graph
729 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
754 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
730 |
755 |
731 | x 5:cda648ca50f5 (draft *obsolete*) [ ] add original_e [rewritten as 6:3de5eca88c00 by test <test@example.net>]
756 | x 5:cda648ca50f5 (draft *obsolete*) [ ] add original_e [rewritten as 6:3de5eca88c00 by test <test@example.net>]
732 | |
757 | |
733 | x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
758 | x 4:94b33453f93b (draft *obsolete*) [ ] add original_d [pruned]
734 |/
759 |/
735 o 3:6f9641995072 (draft) [ ] add n3w_3_c
760 o 3:6f9641995072 (draft) [ ] add n3w_3_c
736 |
761 |
737 | o 2:245bde4270cd (public) [ ] add original_c
762 | o 2:245bde4270cd (public) [ ] add original_c
738 |/
763 |/
739 o 1:7c3bad9141dc (public) [ ] add b
764 o 1:7c3bad9141dc (public) [ ] add b
740 |
765 |
741 o 0:1f0dee641bb7 (public) [ ] add a
766 o 0:1f0dee641bb7 (public) [ ] add a
742
767
743
768
744 List of all markers
769 List of all markers
745
770
746 $ hg debugobsolete
771 $ hg debugobsolete
747 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
772 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
748 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
773 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
749 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
774 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
750 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
775 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
751 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
776 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
752 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
777 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
753 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
778 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
754 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
779 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
755
780
756 List of changesets with no chain
781 List of changesets with no chain
757
782
758 $ hg debugobsolete --hidden --rev ::2
783 $ hg debugobsolete --hidden --rev ::2
759
784
760 List of changesets that are included on marker chain
785 List of changesets that are included on marker chain
761
786
762 $ hg debugobsolete --hidden --rev 6
787 $ hg debugobsolete --hidden --rev 6
763 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
788 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
764
789
765 List of changesets with a longer chain, (including a pruned children)
790 List of changesets with a longer chain, (including a pruned children)
766
791
767 $ hg debugobsolete --hidden --rev 3
792 $ hg debugobsolete --hidden --rev 3
768 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
793 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
769 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
794 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
770 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
795 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
771 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
796 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
772 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
797 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
773 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
798 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
774 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
799 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
775
800
776 List of both
801 List of both
777
802
778 $ hg debugobsolete --hidden --rev 3::6
803 $ hg debugobsolete --hidden --rev 3::6
779 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
804 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
780 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
805 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
781 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
806 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
782 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
807 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
783 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
808 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
784 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
809 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
785 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
810 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test <test@example.net>'} (glob)
786 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
811 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
787
812
788 List of all markers in JSON
813 List of all markers in JSON
789
814
790 $ hg debugobsolete -Tjson
815 $ hg debugobsolete -Tjson
791 [
816 [
792 {
817 {
793 "date": [1339, 0],
818 "date": [1339, 0],
794 "flag": 0,
819 "flag": 0,
795 "metadata": {"user": "test"},
820 "metadata": {"user": "test"},
796 "prednode": "1339133913391339133913391339133913391339",
821 "prednode": "1339133913391339133913391339133913391339",
797 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
822 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
798 },
823 },
799 {
824 {
800 "date": [1339, 0],
825 "date": [1339, 0],
801 "flag": 0,
826 "flag": 0,
802 "metadata": {"user": "test"},
827 "metadata": {"user": "test"},
803 "prednode": "1337133713371337133713371337133713371337",
828 "prednode": "1337133713371337133713371337133713371337",
804 "succnodes": ["5601fb93a350734d935195fee37f4054c529ff39"]
829 "succnodes": ["5601fb93a350734d935195fee37f4054c529ff39"]
805 },
830 },
806 {
831 {
807 "date": [121, 120],
832 "date": [121, 120],
808 "flag": 12,
833 "flag": 12,
809 "metadata": {"user": "test"},
834 "metadata": {"user": "test"},
810 "prednode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
835 "prednode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
811 "succnodes": ["cdbce2fbb16313928851e97e0d85413f3f7eb77f"]
836 "succnodes": ["cdbce2fbb16313928851e97e0d85413f3f7eb77f"]
812 },
837 },
813 {
838 {
814 "date": [1338, 0],
839 "date": [1338, 0],
815 "flag": 1,
840 "flag": 1,
816 "metadata": {"user": "test"},
841 "metadata": {"user": "test"},
817 "prednode": "5601fb93a350734d935195fee37f4054c529ff39",
842 "prednode": "5601fb93a350734d935195fee37f4054c529ff39",
818 "succnodes": ["6f96419950729f3671185b847352890f074f7557"]
843 "succnodes": ["6f96419950729f3671185b847352890f074f7557"]
819 },
844 },
820 {
845 {
821 "date": [1338, 0],
846 "date": [1338, 0],
822 "flag": 0,
847 "flag": 0,
823 "metadata": {"user": "test"},
848 "metadata": {"user": "test"},
824 "prednode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
849 "prednode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
825 "succnodes": ["1337133713371337133713371337133713371337"]
850 "succnodes": ["1337133713371337133713371337133713371337"]
826 },
851 },
827 {
852 {
828 "date": [1337, 0],
853 "date": [1337, 0],
829 "flag": 0,
854 "flag": 0,
830 "metadata": {"user": "test"},
855 "metadata": {"user": "test"},
831 "prednode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
856 "prednode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
832 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
857 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
833 },
858 },
834 {
859 {
835 "date": [0, 0],
860 "date": [0, 0],
836 "flag": 0,
861 "flag": 0,
837 "metadata": {"user": "test"},
862 "metadata": {"user": "test"},
838 "parentnodes": ["6f96419950729f3671185b847352890f074f7557"],
863 "parentnodes": ["6f96419950729f3671185b847352890f074f7557"],
839 "prednode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
864 "prednode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
840 "succnodes": []
865 "succnodes": []
841 },
866 },
842 {
867 {
843 "date": *, (glob)
868 "date": *, (glob)
844 "flag": 0,
869 "flag": 0,
845 "metadata": {"user": "test <test@example.net>"},
870 "metadata": {"user": "test <test@example.net>"},
846 "prednode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
871 "prednode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
847 "succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
872 "succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
848 }
873 }
849 ]
874 ]
850
875
851 Template keywords
876 Template keywords
852
877
853 $ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
878 $ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
854 3de5eca88c00 ????-??-?? (glob)
879 3de5eca88c00 ????-??-?? (glob)
855 $ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
880 $ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
856 user=test <test@example.net>
881 user=test <test@example.net>
857 $ hg debugobsolete -r6 -T '{metadata}\n{metadata}\n'
882 $ hg debugobsolete -r6 -T '{metadata}\n{metadata}\n'
858 'user': 'test <test@example.net>'
883 'user': 'test <test@example.net>'
859 'user': 'test <test@example.net>'
884 'user': 'test <test@example.net>'
860 $ hg debugobsolete -r6 -T '{succnodes}\n{succnodes}\n'
885 $ hg debugobsolete -r6 -T '{succnodes}\n{succnodes}\n'
861 3de5eca88c00aa039da7399a220f4a5221faa585
886 3de5eca88c00aa039da7399a220f4a5221faa585
862 3de5eca88c00aa039da7399a220f4a5221faa585
887 3de5eca88c00aa039da7399a220f4a5221faa585
863 $ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
888 $ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
864 0 test <test@example.net>
889 0 test <test@example.net>
865
890
866 Test the debug output for exchange
891 Test the debug output for exchange
867 ----------------------------------
892 ----------------------------------
868
893
869 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' # bundle2
894 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' # bundle2
870 pulling from ../tmpb
895 pulling from ../tmpb
871 searching for changes
896 searching for changes
872 no changes found
897 no changes found
873 obsmarker-exchange: 346 bytes received
898 obsmarker-exchange: 346 bytes received
874
899
875 check hgweb does not explode
900 check hgweb does not explode
876 ====================================
901 ====================================
877
902
878 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
903 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
879 adding changesets
904 adding changesets
880 adding manifests
905 adding manifests
881 adding file changes
906 adding file changes
882 added 62 changesets with 63 changes to 9 files (+60 heads)
907 added 62 changesets with 63 changes to 9 files (+60 heads)
883 new changesets 50c51b361e60:c15e9edfca13 (62 drafts)
908 new changesets 50c51b361e60:c15e9edfca13 (62 drafts)
884 (2 other changesets obsolete on arrival)
909 (2 other changesets obsolete on arrival)
885 (run 'hg heads .' to see heads, 'hg merge' to merge)
910 (run 'hg heads .' to see heads, 'hg merge' to merge)
886 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
911 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
887 > do
912 > do
888 > hg debugobsolete $node
913 > hg debugobsolete $node
889 > done
914 > done
890 1 new obsolescence markers
915 1 new obsolescence markers
891 obsoleted 1 changesets
916 obsoleted 1 changesets
892 1 new obsolescence markers
917 1 new obsolescence markers
893 obsoleted 1 changesets
918 obsoleted 1 changesets
894 1 new obsolescence markers
919 1 new obsolescence markers
895 obsoleted 1 changesets
920 obsoleted 1 changesets
896 1 new obsolescence markers
921 1 new obsolescence markers
897 obsoleted 1 changesets
922 obsoleted 1 changesets
898 1 new obsolescence markers
923 1 new obsolescence markers
899 obsoleted 1 changesets
924 obsoleted 1 changesets
900 1 new obsolescence markers
925 1 new obsolescence markers
901 obsoleted 1 changesets
926 obsoleted 1 changesets
902 1 new obsolescence markers
927 1 new obsolescence markers
903 obsoleted 1 changesets
928 obsoleted 1 changesets
904 1 new obsolescence markers
929 1 new obsolescence markers
905 obsoleted 1 changesets
930 obsoleted 1 changesets
906 1 new obsolescence markers
931 1 new obsolescence markers
907 obsoleted 1 changesets
932 obsoleted 1 changesets
908 1 new obsolescence markers
933 1 new obsolescence markers
909 obsoleted 1 changesets
934 obsoleted 1 changesets
910 1 new obsolescence markers
935 1 new obsolescence markers
911 obsoleted 1 changesets
936 obsoleted 1 changesets
912 1 new obsolescence markers
937 1 new obsolescence markers
913 obsoleted 1 changesets
938 obsoleted 1 changesets
914 1 new obsolescence markers
939 1 new obsolescence markers
915 obsoleted 1 changesets
940 obsoleted 1 changesets
916 1 new obsolescence markers
941 1 new obsolescence markers
917 obsoleted 1 changesets
942 obsoleted 1 changesets
918 1 new obsolescence markers
943 1 new obsolescence markers
919 obsoleted 1 changesets
944 obsoleted 1 changesets
920 1 new obsolescence markers
945 1 new obsolescence markers
921 obsoleted 1 changesets
946 obsoleted 1 changesets
922 1 new obsolescence markers
947 1 new obsolescence markers
923 obsoleted 1 changesets
948 obsoleted 1 changesets
924 1 new obsolescence markers
949 1 new obsolescence markers
925 obsoleted 1 changesets
950 obsoleted 1 changesets
926 1 new obsolescence markers
951 1 new obsolescence markers
927 obsoleted 1 changesets
952 obsoleted 1 changesets
928 1 new obsolescence markers
953 1 new obsolescence markers
929 obsoleted 1 changesets
954 obsoleted 1 changesets
930 1 new obsolescence markers
955 1 new obsolescence markers
931 obsoleted 1 changesets
956 obsoleted 1 changesets
932 1 new obsolescence markers
957 1 new obsolescence markers
933 obsoleted 1 changesets
958 obsoleted 1 changesets
934 1 new obsolescence markers
959 1 new obsolescence markers
935 obsoleted 1 changesets
960 obsoleted 1 changesets
936 1 new obsolescence markers
961 1 new obsolescence markers
937 obsoleted 1 changesets
962 obsoleted 1 changesets
938 1 new obsolescence markers
963 1 new obsolescence markers
939 obsoleted 1 changesets
964 obsoleted 1 changesets
940 1 new obsolescence markers
965 1 new obsolescence markers
941 obsoleted 1 changesets
966 obsoleted 1 changesets
942 1 new obsolescence markers
967 1 new obsolescence markers
943 obsoleted 1 changesets
968 obsoleted 1 changesets
944 1 new obsolescence markers
969 1 new obsolescence markers
945 obsoleted 1 changesets
970 obsoleted 1 changesets
946 1 new obsolescence markers
971 1 new obsolescence markers
947 obsoleted 1 changesets
972 obsoleted 1 changesets
948 1 new obsolescence markers
973 1 new obsolescence markers
949 obsoleted 1 changesets
974 obsoleted 1 changesets
950 1 new obsolescence markers
975 1 new obsolescence markers
951 obsoleted 1 changesets
976 obsoleted 1 changesets
952 1 new obsolescence markers
977 1 new obsolescence markers
953 obsoleted 1 changesets
978 obsoleted 1 changesets
954 1 new obsolescence markers
979 1 new obsolescence markers
955 obsoleted 1 changesets
980 obsoleted 1 changesets
956 1 new obsolescence markers
981 1 new obsolescence markers
957 obsoleted 1 changesets
982 obsoleted 1 changesets
958 1 new obsolescence markers
983 1 new obsolescence markers
959 obsoleted 1 changesets
984 obsoleted 1 changesets
960 1 new obsolescence markers
985 1 new obsolescence markers
961 obsoleted 1 changesets
986 obsoleted 1 changesets
962 1 new obsolescence markers
987 1 new obsolescence markers
963 obsoleted 1 changesets
988 obsoleted 1 changesets
964 1 new obsolescence markers
989 1 new obsolescence markers
965 obsoleted 1 changesets
990 obsoleted 1 changesets
966 1 new obsolescence markers
991 1 new obsolescence markers
967 obsoleted 1 changesets
992 obsoleted 1 changesets
968 1 new obsolescence markers
993 1 new obsolescence markers
969 obsoleted 1 changesets
994 obsoleted 1 changesets
970 1 new obsolescence markers
995 1 new obsolescence markers
971 obsoleted 1 changesets
996 obsoleted 1 changesets
972 1 new obsolescence markers
997 1 new obsolescence markers
973 obsoleted 1 changesets
998 obsoleted 1 changesets
974 1 new obsolescence markers
999 1 new obsolescence markers
975 obsoleted 1 changesets
1000 obsoleted 1 changesets
976 1 new obsolescence markers
1001 1 new obsolescence markers
977 obsoleted 1 changesets
1002 obsoleted 1 changesets
978 1 new obsolescence markers
1003 1 new obsolescence markers
979 obsoleted 1 changesets
1004 obsoleted 1 changesets
980 1 new obsolescence markers
1005 1 new obsolescence markers
981 obsoleted 1 changesets
1006 obsoleted 1 changesets
982 1 new obsolescence markers
1007 1 new obsolescence markers
983 obsoleted 1 changesets
1008 obsoleted 1 changesets
984 1 new obsolescence markers
1009 1 new obsolescence markers
985 obsoleted 1 changesets
1010 obsoleted 1 changesets
986 1 new obsolescence markers
1011 1 new obsolescence markers
987 obsoleted 1 changesets
1012 obsoleted 1 changesets
988 1 new obsolescence markers
1013 1 new obsolescence markers
989 obsoleted 1 changesets
1014 obsoleted 1 changesets
990 1 new obsolescence markers
1015 1 new obsolescence markers
991 obsoleted 1 changesets
1016 obsoleted 1 changesets
992 1 new obsolescence markers
1017 1 new obsolescence markers
993 obsoleted 1 changesets
1018 obsoleted 1 changesets
994 1 new obsolescence markers
1019 1 new obsolescence markers
995 obsoleted 1 changesets
1020 obsoleted 1 changesets
996 1 new obsolescence markers
1021 1 new obsolescence markers
997 obsoleted 1 changesets
1022 obsoleted 1 changesets
998 1 new obsolescence markers
1023 1 new obsolescence markers
999 obsoleted 1 changesets
1024 obsoleted 1 changesets
1000 1 new obsolescence markers
1025 1 new obsolescence markers
1001 obsoleted 1 changesets
1026 obsoleted 1 changesets
1002 1 new obsolescence markers
1027 1 new obsolescence markers
1003 obsoleted 1 changesets
1028 obsoleted 1 changesets
1004 1 new obsolescence markers
1029 1 new obsolescence markers
1005 obsoleted 1 changesets
1030 obsoleted 1 changesets
1006 1 new obsolescence markers
1031 1 new obsolescence markers
1007 obsoleted 1 changesets
1032 obsoleted 1 changesets
1008 1 new obsolescence markers
1033 1 new obsolescence markers
1009 obsoleted 1 changesets
1034 obsoleted 1 changesets
1010 $ hg up tip
1035 $ hg up tip
1011 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1036 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1012
1037
1013 #if serve
1038 #if serve
1014
1039
1015 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1040 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1016 $ cat hg.pid >> $DAEMON_PIDS
1041 $ cat hg.pid >> $DAEMON_PIDS
1017
1042
1018 check changelog view
1043 check changelog view
1019
1044
1020 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
1045 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
1021 200 Script output follows
1046 200 Script output follows
1022
1047
1023 check graph view
1048 check graph view
1024
1049
1025 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
1050 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
1026 200 Script output follows
1051 200 Script output follows
1027
1052
1028 check filelog view
1053 check filelog view
1029
1054
1030 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
1055 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
1031 200 Script output follows
1056 200 Script output follows
1032
1057
1033 check filelog view for hidden commits (obsolete ones are hidden here)
1058 check filelog view for hidden commits (obsolete ones are hidden here)
1034
1059
1035 $ get-with-headers.py localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar' | grep obsolete
1060 $ get-with-headers.py localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar' | grep obsolete
1036 [1]
1061 [1]
1037
1062
1038 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
1063 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
1039 200 Script output follows
1064 200 Script output follows
1040 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
1065 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
1041 404 Not Found
1066 404 Not Found
1042 [1]
1067 [1]
1043
1068
1044 check that web.view config option:
1069 check that web.view config option:
1045
1070
1046 $ killdaemons.py hg.pid
1071 $ killdaemons.py hg.pid
1047 $ cat >> .hg/hgrc << EOF
1072 $ cat >> .hg/hgrc << EOF
1048 > [web]
1073 > [web]
1049 > view=all
1074 > view=all
1050 > EOF
1075 > EOF
1051 $ wait
1076 $ wait
1052 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1077 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1053 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
1078 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
1054 200 Script output follows
1079 200 Script output follows
1055 $ killdaemons.py hg.pid
1080 $ killdaemons.py hg.pid
1056
1081
1057 Checking _enable=False warning if obsolete marker exists
1082 Checking _enable=False warning if obsolete marker exists
1058
1083
1059 $ echo '[experimental]' >> $HGRCPATH
1084 $ echo '[experimental]' >> $HGRCPATH
1060 $ echo "evolution=" >> $HGRCPATH
1085 $ echo "evolution=" >> $HGRCPATH
1061 $ hg log -r tip
1086 $ hg log -r tip
1062 68:c15e9edfca13 (draft) [tip ] add celestine
1087 68:c15e9edfca13 (draft) [tip ] add celestine
1063
1088
1064 reenable for later test
1089 reenable for later test
1065
1090
1066 $ echo '[experimental]' >> $HGRCPATH
1091 $ echo '[experimental]' >> $HGRCPATH
1067 $ echo "evolution.exchange=True" >> $HGRCPATH
1092 $ echo "evolution.exchange=True" >> $HGRCPATH
1068 $ echo "evolution.createmarkers=True" >> $HGRCPATH
1093 $ echo "evolution.createmarkers=True" >> $HGRCPATH
1069
1094
1070 $ rm access.log errors.log
1095 $ rm access.log errors.log
1071 #endif
1096 #endif
1072
1097
1073 Several troubles on the same changeset (create an unstable and bumped and content-divergent changeset)
1098 Several troubles on the same changeset (create an unstable and bumped and content-divergent changeset)
1074
1099
1075 $ hg debugobsolete `getid obsolete_e`
1100 $ hg debugobsolete `getid obsolete_e`
1076 1 new obsolescence markers
1101 1 new obsolescence markers
1077 obsoleted 1 changesets
1102 obsoleted 1 changesets
1078 2 new orphan changesets
1103 2 new orphan changesets
1079 $ hg debugobsolete `getid original_c` `getid babar`
1104 $ hg debugobsolete `getid original_c` `getid babar`
1080 1 new obsolescence markers
1105 1 new obsolescence markers
1081 1 new phase-divergent changesets
1106 1 new phase-divergent changesets
1082 2 new content-divergent changesets
1107 2 new content-divergent changesets
1083 $ hg log --config ui.logtemplate= -r 'phasedivergent() and orphan() and contentdivergent()'
1108 $ hg log --config ui.logtemplate= -r 'phasedivergent() and orphan() and contentdivergent()'
1084 changeset: 7:50c51b361e60
1109 changeset: 7:50c51b361e60
1085 user: test
1110 user: test
1086 date: Thu Jan 01 00:00:00 1970 +0000
1111 date: Thu Jan 01 00:00:00 1970 +0000
1087 instability: orphan, phase-divergent, content-divergent
1112 instability: orphan, phase-divergent, content-divergent
1088 summary: add babar
1113 summary: add babar
1089
1114
1090 test the "obsolete" templatekw
1115 test the "obsolete" templatekw
1091
1116
1092 $ hg log -r 'obsolete()'
1117 $ hg log -r 'obsolete()'
1093 6:3de5eca88c00 (draft *obsolete*) [ ] add obsolete_e [pruned]
1118 6:3de5eca88c00 (draft *obsolete*) [ ] add obsolete_e [pruned]
1094
1119
1095 test the "troubles" templatekw
1120 test the "troubles" templatekw
1096
1121
1097 $ hg log -r 'phasedivergent() and orphan()'
1122 $ hg log -r 'phasedivergent() and orphan()'
1098 7:50c51b361e60 (draft orphan phase-divergent content-divergent) [ ] add babar
1123 7:50c51b361e60 (draft orphan phase-divergent content-divergent) [ ] add babar
1099
1124
1100 test the default cmdline template
1125 test the default cmdline template
1101
1126
1102 $ hg log -T default -r 'phasedivergent()'
1127 $ hg log -T default -r 'phasedivergent()'
1103 changeset: 7:50c51b361e60
1128 changeset: 7:50c51b361e60
1104 user: test
1129 user: test
1105 date: Thu Jan 01 00:00:00 1970 +0000
1130 date: Thu Jan 01 00:00:00 1970 +0000
1106 instability: orphan, phase-divergent, content-divergent
1131 instability: orphan, phase-divergent, content-divergent
1107 summary: add babar
1132 summary: add babar
1108
1133
1109 $ hg log -T default -r 'obsolete()'
1134 $ hg log -T default -r 'obsolete()'
1110 changeset: 6:3de5eca88c00
1135 changeset: 6:3de5eca88c00
1111 parent: 3:6f9641995072
1136 parent: 3:6f9641995072
1112 user: test
1137 user: test
1113 date: Thu Jan 01 00:00:00 1970 +0000
1138 date: Thu Jan 01 00:00:00 1970 +0000
1114 obsolete: pruned
1139 obsolete: pruned
1115 summary: add obsolete_e
1140 summary: add obsolete_e
1116
1141
1117
1142
1118 test the obsolete labels
1143 test the obsolete labels
1119
1144
1120 $ hg log --config ui.logtemplate= --color=debug -r 'phasedivergent()'
1145 $ hg log --config ui.logtemplate= --color=debug -r 'phasedivergent()'
1121 [log.changeset changeset.draft changeset.unstable instability.orphan instability.phase-divergent instability.content-divergent|changeset: 7:50c51b361e60]
1146 [log.changeset changeset.draft changeset.unstable instability.orphan instability.phase-divergent instability.content-divergent|changeset: 7:50c51b361e60]
1122 [log.user|user: test]
1147 [log.user|user: test]
1123 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1148 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1124 [log.instability|instability: orphan, phase-divergent, content-divergent]
1149 [log.instability|instability: orphan, phase-divergent, content-divergent]
1125 [log.summary|summary: add babar]
1150 [log.summary|summary: add babar]
1126
1151
1127
1152
1128 $ hg log -T default -r 'phasedivergent()' --color=debug
1153 $ hg log -T default -r 'phasedivergent()' --color=debug
1129 [log.changeset changeset.draft changeset.unstable instability.orphan instability.phase-divergent instability.content-divergent|changeset: 7:50c51b361e60]
1154 [log.changeset changeset.draft changeset.unstable instability.orphan instability.phase-divergent instability.content-divergent|changeset: 7:50c51b361e60]
1130 [log.user|user: test]
1155 [log.user|user: test]
1131 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1156 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1132 [log.instability|instability: orphan, phase-divergent, content-divergent]
1157 [log.instability|instability: orphan, phase-divergent, content-divergent]
1133 [log.summary|summary: add babar]
1158 [log.summary|summary: add babar]
1134
1159
1135
1160
1136 $ hg log --config ui.logtemplate= --color=debug -r "obsolete()"
1161 $ hg log --config ui.logtemplate= --color=debug -r "obsolete()"
1137 [log.changeset changeset.draft changeset.obsolete|changeset: 6:3de5eca88c00]
1162 [log.changeset changeset.draft changeset.obsolete|changeset: 6:3de5eca88c00]
1138 [log.parent changeset.draft|parent: 3:6f9641995072]
1163 [log.parent changeset.draft|parent: 3:6f9641995072]
1139 [log.user|user: test]
1164 [log.user|user: test]
1140 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1165 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1141 [log.obsfate|obsolete: pruned]
1166 [log.obsfate|obsolete: pruned]
1142 [log.summary|summary: add obsolete_e]
1167 [log.summary|summary: add obsolete_e]
1143
1168
1144
1169
1145 $ hg log -T default -r 'obsolete()' --color=debug
1170 $ hg log -T default -r 'obsolete()' --color=debug
1146 [log.changeset changeset.draft changeset.obsolete|changeset: 6:3de5eca88c00]
1171 [log.changeset changeset.draft changeset.obsolete|changeset: 6:3de5eca88c00]
1147 [log.parent changeset.draft|parent: 3:6f9641995072]
1172 [log.parent changeset.draft|parent: 3:6f9641995072]
1148 [log.user|user: test]
1173 [log.user|user: test]
1149 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1174 [log.date|date: Thu Jan 01 00:00:00 1970 +0000]
1150 [log.obsfate|obsolete: pruned]
1175 [log.obsfate|obsolete: pruned]
1151 [log.summary|summary: add obsolete_e]
1176 [log.summary|summary: add obsolete_e]
1152
1177
1153
1178
1154 test summary output
1179 test summary output
1155
1180
1156 $ hg up -r 'phasedivergent() and orphan()'
1181 $ hg up -r 'phasedivergent() and orphan()'
1157 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1182 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1158 $ hg summary
1183 $ hg summary
1159 parent: 7:50c51b361e60 (orphan, phase-divergent, content-divergent)
1184 parent: 7:50c51b361e60 (orphan, phase-divergent, content-divergent)
1160 add babar
1185 add babar
1161 branch: default
1186 branch: default
1162 commit: (clean)
1187 commit: (clean)
1163 update: 2 new changesets (update)
1188 update: 2 new changesets (update)
1164 phases: 4 draft
1189 phases: 4 draft
1165 orphan: 2 changesets
1190 orphan: 2 changesets
1166 content-divergent: 2 changesets
1191 content-divergent: 2 changesets
1167 phase-divergent: 1 changesets
1192 phase-divergent: 1 changesets
1168 $ hg up -r 'obsolete()'
1193 $ hg up -r 'obsolete()'
1169 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1194 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1170 $ hg summary
1195 $ hg summary
1171 parent: 6:3de5eca88c00 (obsolete)
1196 parent: 6:3de5eca88c00 (obsolete)
1172 add obsolete_e
1197 add obsolete_e
1173 branch: default
1198 branch: default
1174 commit: (clean)
1199 commit: (clean)
1175 update: 3 new changesets (update)
1200 update: 3 new changesets (update)
1176 phases: 4 draft
1201 phases: 4 draft
1177 orphan: 2 changesets
1202 orphan: 2 changesets
1178 content-divergent: 2 changesets
1203 content-divergent: 2 changesets
1179 phase-divergent: 1 changesets
1204 phase-divergent: 1 changesets
1180
1205
1181 test debugwhyunstable output
1206 test debugwhyunstable output
1182
1207
1183 $ hg debugwhyunstable 50c51b361e60
1208 $ hg debugwhyunstable 50c51b361e60
1184 orphan: obsolete parent 3de5eca88c00aa039da7399a220f4a5221faa585
1209 orphan: obsolete parent 3de5eca88c00aa039da7399a220f4a5221faa585
1185 phase-divergent: immutable predecessor 245bde4270cd1072a27757984f9cda8ba26f08ca
1210 phase-divergent: immutable predecessor 245bde4270cd1072a27757984f9cda8ba26f08ca
1186 content-divergent: 6f96419950729f3671185b847352890f074f7557 (draft) predecessor 245bde4270cd1072a27757984f9cda8ba26f08ca
1211 content-divergent: 6f96419950729f3671185b847352890f074f7557 (draft) predecessor 245bde4270cd1072a27757984f9cda8ba26f08ca
1187
1212
1188 test whyunstable template keyword
1213 test whyunstable template keyword
1189
1214
1190 $ hg log -r 50c51b361e60 -T '{whyunstable}\n'
1215 $ hg log -r 50c51b361e60 -T '{whyunstable}\n'
1191 orphan: obsolete parent 3de5eca88c00
1216 orphan: obsolete parent 3de5eca88c00
1192 phase-divergent: immutable predecessor 245bde4270cd
1217 phase-divergent: immutable predecessor 245bde4270cd
1193 content-divergent: 3:6f9641995072 (draft) predecessor 245bde4270cd
1218 content-divergent: 3:6f9641995072 (draft) predecessor 245bde4270cd
1194 $ hg log -r 50c51b361e60 -T '{whyunstable % "{instability}: {reason} {node|shortest}\n"}'
1219 $ hg log -r 50c51b361e60 -T '{whyunstable % "{instability}: {reason} {node|shortest}\n"}'
1195 orphan: obsolete parent 3de5
1220 orphan: obsolete parent 3de5
1196 phase-divergent: immutable predecessor 245b
1221 phase-divergent: immutable predecessor 245b
1197 content-divergent: predecessor 245b
1222 content-divergent: predecessor 245b
1198
1223
1224 $ hg push ../tmpf -r 50c51b361e60
1225 pushing to ../tmpf
1226 searching for changes
1227 abort: push includes unstable changesets:
1228 50c51b361e60 (orphan, phase-divergent, content-divergent)
1229 [255]
1230
1231
1199 #if serve
1232 #if serve
1200
1233
1201 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1234 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1202 $ cat hg.pid >> $DAEMON_PIDS
1235 $ cat hg.pid >> $DAEMON_PIDS
1203
1236
1204 check obsolete changeset
1237 check obsolete changeset
1205
1238
1206 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=paper' | grep '<span class="obsolete">'
1239 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=paper' | grep '<span class="obsolete">'
1207 <span class="phase">draft</span> <span class="obsolete">obsolete</span>
1240 <span class="phase">draft</span> <span class="obsolete">obsolete</span>
1208 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=coal' | grep '<span class="obsolete">'
1241 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=coal' | grep '<span class="obsolete">'
1209 <span class="phase">draft</span> <span class="obsolete">obsolete</span>
1242 <span class="phase">draft</span> <span class="obsolete">obsolete</span>
1210 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=gitweb' | grep '<span class="logtags">'
1243 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=gitweb' | grep '<span class="logtags">'
1211 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="obsoletetag" title="obsolete">obsolete</span> </span>
1244 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="obsoletetag" title="obsolete">obsolete</span> </span>
1212 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=monoblue' | grep '<span class="logtags">'
1245 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=monoblue' | grep '<span class="logtags">'
1213 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="obsoletetag" title="obsolete">obsolete</span> </span>
1246 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="obsoletetag" title="obsolete">obsolete</span> </span>
1214 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=spartan' | grep 'class="obsolete"'
1247 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(obsolete())&style=spartan' | grep 'class="obsolete"'
1215 <th class="obsolete">obsolete:</th>
1248 <th class="obsolete">obsolete:</th>
1216 <td class="obsolete">pruned by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
1249 <td class="obsolete">pruned by &#116;&#101;&#115;&#116; <span class="age">Thu, 01 Jan 1970 00:00:00 +0000</span></td>
1217
1250
1218 check changeset with instabilities
1251 check changeset with instabilities
1219
1252
1220 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=paper' | grep '<span class="instability">'
1253 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=paper' | grep '<span class="instability">'
1221 <span class="phase">draft</span> <span class="instability">orphan</span> <span class="instability">phase-divergent</span> <span class="instability">content-divergent</span>
1254 <span class="phase">draft</span> <span class="instability">orphan</span> <span class="instability">phase-divergent</span> <span class="instability">content-divergent</span>
1222 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=coal' | grep '<span class="instability">'
1255 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=coal' | grep '<span class="instability">'
1223 <span class="phase">draft</span> <span class="instability">orphan</span> <span class="instability">phase-divergent</span> <span class="instability">content-divergent</span>
1256 <span class="phase">draft</span> <span class="instability">orphan</span> <span class="instability">phase-divergent</span> <span class="instability">content-divergent</span>
1224 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=gitweb' | grep '<span class="logtags">'
1257 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=gitweb' | grep '<span class="logtags">'
1225 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="instabilitytag" title="orphan">orphan</span> <span class="instabilitytag" title="phase-divergent">phase-divergent</span> <span class="instabilitytag" title="content-divergent">content-divergent</span> </span>
1258 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="instabilitytag" title="orphan">orphan</span> <span class="instabilitytag" title="phase-divergent">phase-divergent</span> <span class="instabilitytag" title="content-divergent">content-divergent</span> </span>
1226 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=monoblue' | grep '<span class="logtags">'
1259 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=monoblue' | grep '<span class="logtags">'
1227 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="instabilitytag" title="orphan">orphan</span> <span class="instabilitytag" title="phase-divergent">phase-divergent</span> <span class="instabilitytag" title="content-divergent">content-divergent</span> </span>
1260 <span class="logtags"><span class="phasetag" title="draft">draft</span> <span class="instabilitytag" title="orphan">orphan</span> <span class="instabilitytag" title="phase-divergent">phase-divergent</span> <span class="instabilitytag" title="content-divergent">content-divergent</span> </span>
1228 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=spartan' | grep 'class="unstable"'
1261 $ get-with-headers.py localhost:$HGPORT 'log?rev=first(phasedivergent())&style=spartan' | grep 'class="unstable"'
1229 <th class="unstable">unstable:</th>
1262 <th class="unstable">unstable:</th>
1230 <td class="unstable">orphan: obsolete parent <a href="/rev/3de5eca88c00?style=spartan">3de5eca88c00</a></td>
1263 <td class="unstable">orphan: obsolete parent <a href="/rev/3de5eca88c00?style=spartan">3de5eca88c00</a></td>
1231 <th class="unstable">unstable:</th>
1264 <th class="unstable">unstable:</th>
1232 <td class="unstable">phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1265 <td class="unstable">phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1233 <th class="unstable">unstable:</th>
1266 <th class="unstable">unstable:</th>
1234 <td class="unstable">content-divergent: <a href="/rev/6f9641995072?style=spartan">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1267 <td class="unstable">content-divergent: <a href="/rev/6f9641995072?style=spartan">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1235
1268
1236 check explanation for an orphan, phase-divergent and content-divergent changeset
1269 check explanation for an orphan, phase-divergent and content-divergent changeset
1237
1270
1238 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=paper' | egrep '(orphan|phase-divergent|content-divergent):'
1271 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=paper' | egrep '(orphan|phase-divergent|content-divergent):'
1239 <td>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=paper">3de5eca88c00</a><br>
1272 <td>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=paper">3de5eca88c00</a><br>
1240 phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=paper">245bde4270cd</a><br>
1273 phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=paper">245bde4270cd</a><br>
1241 content-divergent: <a href="/rev/6f9641995072?style=paper">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=paper">245bde4270cd</a></td>
1274 content-divergent: <a href="/rev/6f9641995072?style=paper">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=paper">245bde4270cd</a></td>
1242 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=coal' | egrep '(orphan|phase-divergent|content-divergent):'
1275 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=coal' | egrep '(orphan|phase-divergent|content-divergent):'
1243 <td>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=coal">3de5eca88c00</a><br>
1276 <td>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=coal">3de5eca88c00</a><br>
1244 phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=coal">245bde4270cd</a><br>
1277 phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=coal">245bde4270cd</a><br>
1245 content-divergent: <a href="/rev/6f9641995072?style=coal">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=coal">245bde4270cd</a></td>
1278 content-divergent: <a href="/rev/6f9641995072?style=coal">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=coal">245bde4270cd</a></td>
1246 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=gitweb' | egrep '(orphan|phase-divergent|content-divergent):'
1279 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=gitweb' | egrep '(orphan|phase-divergent|content-divergent):'
1247 <td>orphan: obsolete parent <a class="list" href="/rev/3de5eca88c00?style=gitweb">3de5eca88c00</a></td>
1280 <td>orphan: obsolete parent <a class="list" href="/rev/3de5eca88c00?style=gitweb">3de5eca88c00</a></td>
1248 <td>phase-divergent: immutable predecessor <a class="list" href="/rev/245bde4270cd?style=gitweb">245bde4270cd</a></td>
1281 <td>phase-divergent: immutable predecessor <a class="list" href="/rev/245bde4270cd?style=gitweb">245bde4270cd</a></td>
1249 <td>content-divergent: <a class="list" href="/rev/6f9641995072?style=gitweb">6f9641995072</a> (draft) predecessor <a class="list" href="/rev/245bde4270cd?style=gitweb">245bde4270cd</a></td>
1282 <td>content-divergent: <a class="list" href="/rev/6f9641995072?style=gitweb">6f9641995072</a> (draft) predecessor <a class="list" href="/rev/245bde4270cd?style=gitweb">245bde4270cd</a></td>
1250 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=monoblue' | egrep '(orphan|phase-divergent|content-divergent):'
1283 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=monoblue' | egrep '(orphan|phase-divergent|content-divergent):'
1251 <dd>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=monoblue">3de5eca88c00</a></dd>
1284 <dd>orphan: obsolete parent <a href="/rev/3de5eca88c00?style=monoblue">3de5eca88c00</a></dd>
1252 <dd>phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=monoblue">245bde4270cd</a></dd>
1285 <dd>phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=monoblue">245bde4270cd</a></dd>
1253 <dd>content-divergent: <a href="/rev/6f9641995072?style=monoblue">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=monoblue">245bde4270cd</a></dd>
1286 <dd>content-divergent: <a href="/rev/6f9641995072?style=monoblue">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=monoblue">245bde4270cd</a></dd>
1254 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=spartan' | egrep '(orphan|phase-divergent|content-divergent):'
1287 $ get-with-headers.py localhost:$HGPORT 'rev/50c51b361e60?style=spartan' | egrep '(orphan|phase-divergent|content-divergent):'
1255 <td class="unstable">orphan: obsolete parent <a href="/rev/3de5eca88c00?style=spartan">3de5eca88c00</a></td>
1288 <td class="unstable">orphan: obsolete parent <a href="/rev/3de5eca88c00?style=spartan">3de5eca88c00</a></td>
1256 <td class="unstable">phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1289 <td class="unstable">phase-divergent: immutable predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1257 <td class="unstable">content-divergent: <a href="/rev/6f9641995072?style=spartan">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1290 <td class="unstable">content-divergent: <a href="/rev/6f9641995072?style=spartan">6f9641995072</a> (draft) predecessor <a href="/rev/245bde4270cd?style=spartan">245bde4270cd</a></td>
1258
1291
1259 $ killdaemons.py
1292 $ killdaemons.py
1260
1293
1261 $ rm hg.pid access.log errors.log
1294 $ rm hg.pid access.log errors.log
1262
1295
1263 #endif
1296 #endif
1264
1297
1265 Test incoming/outcoming with changesets obsoleted remotely, known locally
1298 Test incoming/outcoming with changesets obsoleted remotely, known locally
1266 ===============================================================================
1299 ===============================================================================
1267
1300
1268 This test issue 3805
1301 This test issue 3805
1269
1302
1270 $ hg init repo-issue3805
1303 $ hg init repo-issue3805
1271 $ cd repo-issue3805
1304 $ cd repo-issue3805
1272 $ echo "base" > base
1305 $ echo "base" > base
1273 $ hg ci -Am "base"
1306 $ hg ci -Am "base"
1274 adding base
1307 adding base
1275 $ echo "foo" > foo
1308 $ echo "foo" > foo
1276 $ hg ci -Am "A"
1309 $ hg ci -Am "A"
1277 adding foo
1310 adding foo
1278 $ hg clone . ../other-issue3805
1311 $ hg clone . ../other-issue3805
1279 updating to branch default
1312 updating to branch default
1280 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1313 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
1281 $ echo "bar" >> foo
1314 $ echo "bar" >> foo
1282 $ hg ci --amend
1315 $ hg ci --amend
1283 $ cd ../other-issue3805
1316 $ cd ../other-issue3805
1284 $ hg log -G
1317 $ hg log -G
1285 @ 1:29f0c6921ddd (draft) [tip ] A
1318 @ 1:29f0c6921ddd (draft) [tip ] A
1286 |
1319 |
1287 o 0:d20a80d4def3 (draft) [ ] base
1320 o 0:d20a80d4def3 (draft) [ ] base
1288
1321
1289 $ hg log -G -R ../repo-issue3805
1322 $ hg log -G -R ../repo-issue3805
1290 @ 2:323a9c3ddd91 (draft) [tip ] A
1323 @ 2:323a9c3ddd91 (draft) [tip ] A
1291 |
1324 |
1292 o 0:d20a80d4def3 (draft) [ ] base
1325 o 0:d20a80d4def3 (draft) [ ] base
1293
1326
1294 $ hg incoming
1327 $ hg incoming
1295 comparing with $TESTTMP/tmpe/repo-issue3805
1328 comparing with $TESTTMP/tmpe/repo-issue3805
1296 searching for changes
1329 searching for changes
1297 2:323a9c3ddd91 (draft) [tip ] A
1330 2:323a9c3ddd91 (draft) [tip ] A
1298 $ hg incoming --bundle ../issue3805.hg
1331 $ hg incoming --bundle ../issue3805.hg
1299 comparing with $TESTTMP/tmpe/repo-issue3805
1332 comparing with $TESTTMP/tmpe/repo-issue3805
1300 searching for changes
1333 searching for changes
1301 2:323a9c3ddd91 (draft) [tip ] A
1334 2:323a9c3ddd91 (draft) [tip ] A
1302 $ hg outgoing
1335 $ hg outgoing
1303 comparing with $TESTTMP/tmpe/repo-issue3805
1336 comparing with $TESTTMP/tmpe/repo-issue3805
1304 searching for changes
1337 searching for changes
1305 1:29f0c6921ddd (draft) [tip ] A
1338 1:29f0c6921ddd (draft) [tip ] A
1306
1339
1307 #if serve
1340 #if serve
1308
1341
1309 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1342 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1310 $ cat hg.pid >> $DAEMON_PIDS
1343 $ cat hg.pid >> $DAEMON_PIDS
1311
1344
1312 $ hg incoming http://localhost:$HGPORT
1345 $ hg incoming http://localhost:$HGPORT
1313 comparing with http://localhost:$HGPORT/
1346 comparing with http://localhost:$HGPORT/
1314 searching for changes
1347 searching for changes
1315 2:323a9c3ddd91 (draft) [tip ] A
1348 2:323a9c3ddd91 (draft) [tip ] A
1316 $ hg outgoing http://localhost:$HGPORT
1349 $ hg outgoing http://localhost:$HGPORT
1317 comparing with http://localhost:$HGPORT/
1350 comparing with http://localhost:$HGPORT/
1318 searching for changes
1351 searching for changes
1319 1:29f0c6921ddd (draft) [tip ] A
1352 1:29f0c6921ddd (draft) [tip ] A
1320
1353
1321 $ killdaemons.py
1354 $ killdaemons.py
1322
1355
1323 #endif
1356 #endif
1324
1357
1325 This test issue 3814
1358 This test issue 3814
1326
1359
1327 (nothing to push but locally hidden changeset)
1360 (nothing to push but locally hidden changeset)
1328
1361
1329 $ cd ..
1362 $ cd ..
1330 $ hg init repo-issue3814
1363 $ hg init repo-issue3814
1331 $ cd repo-issue3805
1364 $ cd repo-issue3805
1332 $ hg push -r 323a9c3ddd91 ../repo-issue3814
1365 $ hg push -r 323a9c3ddd91 ../repo-issue3814
1333 pushing to ../repo-issue3814
1366 pushing to ../repo-issue3814
1334 searching for changes
1367 searching for changes
1335 adding changesets
1368 adding changesets
1336 adding manifests
1369 adding manifests
1337 adding file changes
1370 adding file changes
1338 added 2 changesets with 2 changes to 2 files
1371 added 2 changesets with 2 changes to 2 files
1339 1 new obsolescence markers
1372 1 new obsolescence markers
1340 $ hg out ../repo-issue3814
1373 $ hg out ../repo-issue3814
1341 comparing with ../repo-issue3814
1374 comparing with ../repo-issue3814
1342 searching for changes
1375 searching for changes
1343 no changes found
1376 no changes found
1344 [1]
1377 [1]
1345
1378
1346 Test that a local tag blocks a changeset from being hidden
1379 Test that a local tag blocks a changeset from being hidden
1347
1380
1348 $ hg tag -l visible -r 1 --hidden
1381 $ hg tag -l visible -r 1 --hidden
1349 $ hg log -G
1382 $ hg log -G
1350 @ 2:323a9c3ddd91 (draft) [tip ] A
1383 @ 2:323a9c3ddd91 (draft) [tip ] A
1351 |
1384 |
1352 | x 1:29f0c6921ddd (draft *obsolete*) [visible ] A [rewritten using amend as 2:323a9c3ddd91]
1385 | x 1:29f0c6921ddd (draft *obsolete*) [visible ] A [rewritten using amend as 2:323a9c3ddd91]
1353 |/
1386 |/
1354 o 0:d20a80d4def3 (draft) [ ] base
1387 o 0:d20a80d4def3 (draft) [ ] base
1355
1388
1356 Test that removing a local tag does not cause some commands to fail
1389 Test that removing a local tag does not cause some commands to fail
1357
1390
1358 $ hg tag -l -r tip tiptag
1391 $ hg tag -l -r tip tiptag
1359 $ hg tags
1392 $ hg tags
1360 tiptag 2:323a9c3ddd91
1393 tiptag 2:323a9c3ddd91
1361 tip 2:323a9c3ddd91
1394 tip 2:323a9c3ddd91
1362 visible 1:29f0c6921ddd
1395 visible 1:29f0c6921ddd
1363 $ hg --config extensions.strip= strip -r tip --no-backup
1396 $ hg --config extensions.strip= strip -r tip --no-backup
1364 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1397 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1365 $ hg tags
1398 $ hg tags
1366 visible 1:29f0c6921ddd
1399 visible 1:29f0c6921ddd
1367 tip 1:29f0c6921ddd
1400 tip 1:29f0c6921ddd
1368
1401
1369 Test bundle overlay onto hidden revision
1402 Test bundle overlay onto hidden revision
1370
1403
1371 $ cd ..
1404 $ cd ..
1372 $ hg init repo-bundleoverlay
1405 $ hg init repo-bundleoverlay
1373 $ cd repo-bundleoverlay
1406 $ cd repo-bundleoverlay
1374 $ echo "A" > foo
1407 $ echo "A" > foo
1375 $ hg ci -Am "A"
1408 $ hg ci -Am "A"
1376 adding foo
1409 adding foo
1377 $ echo "B" >> foo
1410 $ echo "B" >> foo
1378 $ hg ci -m "B"
1411 $ hg ci -m "B"
1379 $ hg up 0
1412 $ hg up 0
1380 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1413 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1381 $ echo "C" >> foo
1414 $ echo "C" >> foo
1382 $ hg ci -m "C"
1415 $ hg ci -m "C"
1383 created new head
1416 created new head
1384 $ hg log -G
1417 $ hg log -G
1385 @ 2:c186d7714947 (draft) [tip ] C
1418 @ 2:c186d7714947 (draft) [tip ] C
1386 |
1419 |
1387 | o 1:44526ebb0f98 (draft) [ ] B
1420 | o 1:44526ebb0f98 (draft) [ ] B
1388 |/
1421 |/
1389 o 0:4b34ecfb0d56 (draft) [ ] A
1422 o 0:4b34ecfb0d56 (draft) [ ] A
1390
1423
1391
1424
1392 $ hg clone -r1 . ../other-bundleoverlay
1425 $ hg clone -r1 . ../other-bundleoverlay
1393 adding changesets
1426 adding changesets
1394 adding manifests
1427 adding manifests
1395 adding file changes
1428 adding file changes
1396 added 2 changesets with 2 changes to 1 files
1429 added 2 changesets with 2 changes to 1 files
1397 new changesets 4b34ecfb0d56:44526ebb0f98 (2 drafts)
1430 new changesets 4b34ecfb0d56:44526ebb0f98 (2 drafts)
1398 updating to branch default
1431 updating to branch default
1399 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1432 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1400 $ cd ../other-bundleoverlay
1433 $ cd ../other-bundleoverlay
1401 $ echo "B+" >> foo
1434 $ echo "B+" >> foo
1402 $ hg ci --amend -m "B+"
1435 $ hg ci --amend -m "B+"
1403 $ hg log -G --hidden
1436 $ hg log -G --hidden
1404 @ 2:b7d587542d40 (draft) [tip ] B+
1437 @ 2:b7d587542d40 (draft) [tip ] B+
1405 |
1438 |
1406 | x 1:44526ebb0f98 (draft *obsolete*) [ ] B [rewritten using amend as 2:b7d587542d40]
1439 | x 1:44526ebb0f98 (draft *obsolete*) [ ] B [rewritten using amend as 2:b7d587542d40]
1407 |/
1440 |/
1408 o 0:4b34ecfb0d56 (draft) [ ] A
1441 o 0:4b34ecfb0d56 (draft) [ ] A
1409
1442
1410
1443
1411 #if repobundlerepo
1444 #if repobundlerepo
1412 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
1445 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
1413 comparing with ../repo-bundleoverlay
1446 comparing with ../repo-bundleoverlay
1414 searching for changes
1447 searching for changes
1415 1:44526ebb0f98 (draft) [ ] B
1448 1:44526ebb0f98 (draft) [ ] B
1416 2:c186d7714947 (draft) [tip ] C
1449 2:c186d7714947 (draft) [tip ] C
1417 $ hg log -G -R ../bundleoverlay.hg
1450 $ hg log -G -R ../bundleoverlay.hg
1418 o 3:c186d7714947 (draft) [tip ] C
1451 o 3:c186d7714947 (draft) [tip ] C
1419 |
1452 |
1420 | @ 2:b7d587542d40 (draft) [ ] B+
1453 | @ 2:b7d587542d40 (draft) [ ] B+
1421 |/
1454 |/
1422 o 0:4b34ecfb0d56 (draft) [ ] A
1455 o 0:4b34ecfb0d56 (draft) [ ] A
1423
1456
1424 #endif
1457 #endif
1425
1458
1426 #if serve
1459 #if serve
1427
1460
1428 Test issue 4506
1461 Test issue 4506
1429
1462
1430 $ cd ..
1463 $ cd ..
1431 $ hg init repo-issue4506
1464 $ hg init repo-issue4506
1432 $ cd repo-issue4506
1465 $ cd repo-issue4506
1433 $ echo "0" > foo
1466 $ echo "0" > foo
1434 $ hg add foo
1467 $ hg add foo
1435 $ hg ci -m "content-0"
1468 $ hg ci -m "content-0"
1436
1469
1437 $ hg up null
1470 $ hg up null
1438 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1471 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1439 $ echo "1" > bar
1472 $ echo "1" > bar
1440 $ hg add bar
1473 $ hg add bar
1441 $ hg ci -m "content-1"
1474 $ hg ci -m "content-1"
1442 created new head
1475 created new head
1443 $ hg up 0
1476 $ hg up 0
1444 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1477 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1445 $ hg graft 1
1478 $ hg graft 1
1446 grafting 1:1c9eddb02162 "content-1" (tip)
1479 grafting 1:1c9eddb02162 "content-1" (tip)
1447
1480
1448 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
1481 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
1449 1 new obsolescence markers
1482 1 new obsolescence markers
1450 obsoleted 1 changesets
1483 obsoleted 1 changesets
1451
1484
1452 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1485 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1453 $ cat hg.pid >> $DAEMON_PIDS
1486 $ cat hg.pid >> $DAEMON_PIDS
1454
1487
1455 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
1488 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
1456 404 Not Found
1489 404 Not Found
1457 [1]
1490 [1]
1458 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
1491 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
1459 200 Script output follows
1492 200 Script output follows
1460 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
1493 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
1461 200 Script output follows
1494 200 Script output follows
1462
1495
1463 $ killdaemons.py
1496 $ killdaemons.py
1464
1497
1465 #endif
1498 #endif
1466
1499
1467 Test heads computation on pending index changes with obsolescence markers
1500 Test heads computation on pending index changes with obsolescence markers
1468 $ cd ..
1501 $ cd ..
1469 $ cat >$TESTTMP/test_extension.py << EOF
1502 $ cat >$TESTTMP/test_extension.py << EOF
1470 > from __future__ import absolute_import
1503 > from __future__ import absolute_import
1471 > from mercurial.i18n import _
1504 > from mercurial.i18n import _
1472 > from mercurial import cmdutil, pycompat, registrar
1505 > from mercurial import cmdutil, pycompat, registrar
1473 > from mercurial.utils import stringutil
1506 > from mercurial.utils import stringutil
1474 >
1507 >
1475 > cmdtable = {}
1508 > cmdtable = {}
1476 > command = registrar.command(cmdtable)
1509 > command = registrar.command(cmdtable)
1477 > @command(b"amendtransient",[], _(b'hg amendtransient [rev]'))
1510 > @command(b"amendtransient",[], _(b'hg amendtransient [rev]'))
1478 > def amend(ui, repo, *pats, **opts):
1511 > def amend(ui, repo, *pats, **opts):
1479 > opts = pycompat.byteskwargs(opts)
1512 > opts = pycompat.byteskwargs(opts)
1480 > opts[b'message'] = b'Test'
1513 > opts[b'message'] = b'Test'
1481 > opts[b'logfile'] = None
1514 > opts[b'logfile'] = None
1482 > cmdutil.amend(ui, repo, repo[b'.'], {}, pats, opts)
1515 > cmdutil.amend(ui, repo, repo[b'.'], {}, pats, opts)
1483 > ui.write(b'%s\n' % stringutil.pprint(repo.changelog.headrevs()))
1516 > ui.write(b'%s\n' % stringutil.pprint(repo.changelog.headrevs()))
1484 > EOF
1517 > EOF
1485 $ cat >> $HGRCPATH << EOF
1518 $ cat >> $HGRCPATH << EOF
1486 > [extensions]
1519 > [extensions]
1487 > testextension=$TESTTMP/test_extension.py
1520 > testextension=$TESTTMP/test_extension.py
1488 > EOF
1521 > EOF
1489 $ hg init repo-issue-nativerevs-pending-changes
1522 $ hg init repo-issue-nativerevs-pending-changes
1490 $ cd repo-issue-nativerevs-pending-changes
1523 $ cd repo-issue-nativerevs-pending-changes
1491 $ mkcommit a
1524 $ mkcommit a
1492 $ mkcommit b
1525 $ mkcommit b
1493 $ hg up ".^"
1526 $ hg up ".^"
1494 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1527 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1495 $ echo aa > a
1528 $ echo aa > a
1496 $ hg amendtransient
1529 $ hg amendtransient
1497 1 new orphan changesets
1530 1 new orphan changesets
1498 [1, 2]
1531 [1, 2]
1499
1532
1500 Test cache consistency for the visible filter
1533 Test cache consistency for the visible filter
1501 1) We want to make sure that the cached filtered revs are invalidated when
1534 1) We want to make sure that the cached filtered revs are invalidated when
1502 bookmarks change
1535 bookmarks change
1503 $ cd ..
1536 $ cd ..
1504 $ cat >$TESTTMP/test_extension.py << EOF
1537 $ cat >$TESTTMP/test_extension.py << EOF
1505 > from __future__ import absolute_import, print_function
1538 > from __future__ import absolute_import, print_function
1506 > import weakref
1539 > import weakref
1507 > from mercurial import (
1540 > from mercurial import (
1508 > bookmarks,
1541 > bookmarks,
1509 > cmdutil,
1542 > cmdutil,
1510 > extensions,
1543 > extensions,
1511 > repoview,
1544 > repoview,
1512 > )
1545 > )
1513 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
1546 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
1514 > reporef = weakref.ref(bkmstoreinst._repo)
1547 > reporef = weakref.ref(bkmstoreinst._repo)
1515 > def trhook(tr):
1548 > def trhook(tr):
1516 > repo = reporef()
1549 > repo = reporef()
1517 > hidden1 = repoview.computehidden(repo)
1550 > hidden1 = repoview.computehidden(repo)
1518 > hidden = repoview.filterrevs(repo, b'visible')
1551 > hidden = repoview.filterrevs(repo, b'visible')
1519 > if sorted(hidden1) != sorted(hidden):
1552 > if sorted(hidden1) != sorted(hidden):
1520 > print("cache inconsistency")
1553 > print("cache inconsistency")
1521 > bkmstoreinst._repo.currenttransaction().addpostclose(b'test_extension', trhook)
1554 > bkmstoreinst._repo.currenttransaction().addpostclose(b'test_extension', trhook)
1522 > orig(bkmstoreinst, *args, **kwargs)
1555 > orig(bkmstoreinst, *args, **kwargs)
1523 > def extsetup(ui):
1556 > def extsetup(ui):
1524 > extensions.wrapfunction(bookmarks.bmstore, '_recordchange',
1557 > extensions.wrapfunction(bookmarks.bmstore, '_recordchange',
1525 > _bookmarkchanged)
1558 > _bookmarkchanged)
1526 > EOF
1559 > EOF
1527
1560
1528 $ hg init repo-cache-inconsistency
1561 $ hg init repo-cache-inconsistency
1529 $ cd repo-issue-nativerevs-pending-changes
1562 $ cd repo-issue-nativerevs-pending-changes
1530 $ mkcommit a
1563 $ mkcommit a
1531 a already tracked!
1564 a already tracked!
1532 $ mkcommit b
1565 $ mkcommit b
1533 $ hg id
1566 $ hg id
1534 13bedc178fce tip
1567 13bedc178fce tip
1535 $ echo "hello" > b
1568 $ echo "hello" > b
1536 $ hg commit --amend -m "message"
1569 $ hg commit --amend -m "message"
1537 $ hg book bookb -r 13bedc178fce --hidden
1570 $ hg book bookb -r 13bedc178fce --hidden
1538 bookmarking hidden changeset 13bedc178fce
1571 bookmarking hidden changeset 13bedc178fce
1539 (hidden revision '13bedc178fce' was rewritten as: a9b1f8652753)
1572 (hidden revision '13bedc178fce' was rewritten as: a9b1f8652753)
1540 $ hg log -r 13bedc178fce
1573 $ hg log -r 13bedc178fce
1541 4:13bedc178fce (draft *obsolete*) [ bookb] add b [rewritten using amend as 5:a9b1f8652753]
1574 4:13bedc178fce (draft *obsolete*) [ bookb] add b [rewritten using amend as 5:a9b1f8652753]
1542 $ hg book -d bookb
1575 $ hg book -d bookb
1543 $ hg log -r 13bedc178fce
1576 $ hg log -r 13bedc178fce
1544 abort: hidden revision '13bedc178fce' was rewritten as: a9b1f8652753!
1577 abort: hidden revision '13bedc178fce' was rewritten as: a9b1f8652753!
1545 (use --hidden to access hidden revisions)
1578 (use --hidden to access hidden revisions)
1546 [255]
1579 [255]
1547
1580
1548 Empty out the test extension, as it isn't compatible with later parts
1581 Empty out the test extension, as it isn't compatible with later parts
1549 of the test.
1582 of the test.
1550 $ echo > $TESTTMP/test_extension.py
1583 $ echo > $TESTTMP/test_extension.py
1551
1584
1552 Test ability to pull changeset with locally applying obsolescence markers
1585 Test ability to pull changeset with locally applying obsolescence markers
1553 (issue4945)
1586 (issue4945)
1554
1587
1555 $ cd ..
1588 $ cd ..
1556 $ hg init issue4845
1589 $ hg init issue4845
1557 $ cd issue4845
1590 $ cd issue4845
1558
1591
1559 $ echo foo > f0
1592 $ echo foo > f0
1560 $ hg add f0
1593 $ hg add f0
1561 $ hg ci -m '0'
1594 $ hg ci -m '0'
1562 $ echo foo > f1
1595 $ echo foo > f1
1563 $ hg add f1
1596 $ hg add f1
1564 $ hg ci -m '1'
1597 $ hg ci -m '1'
1565 $ echo foo > f2
1598 $ echo foo > f2
1566 $ hg add f2
1599 $ hg add f2
1567 $ hg ci -m '2'
1600 $ hg ci -m '2'
1568
1601
1569 $ echo bar > f2
1602 $ echo bar > f2
1570 $ hg commit --amend --config experimental.evolution.createmarkers=True
1603 $ hg commit --amend --config experimental.evolution.createmarkers=True
1571 $ hg log -G
1604 $ hg log -G
1572 @ 3:b0551702f918 (draft) [tip ] 2
1605 @ 3:b0551702f918 (draft) [tip ] 2
1573 |
1606 |
1574 o 1:e016b03fd86f (draft) [ ] 1
1607 o 1:e016b03fd86f (draft) [ ] 1
1575 |
1608 |
1576 o 0:a78f55e5508c (draft) [ ] 0
1609 o 0:a78f55e5508c (draft) [ ] 0
1577
1610
1578 $ hg log -G --hidden
1611 $ hg log -G --hidden
1579 @ 3:b0551702f918 (draft) [tip ] 2
1612 @ 3:b0551702f918 (draft) [tip ] 2
1580 |
1613 |
1581 | x 2:e008cf283490 (draft *obsolete*) [ ] 2 [rewritten using amend as 3:b0551702f918]
1614 | x 2:e008cf283490 (draft *obsolete*) [ ] 2 [rewritten using amend as 3:b0551702f918]
1582 |/
1615 |/
1583 o 1:e016b03fd86f (draft) [ ] 1
1616 o 1:e016b03fd86f (draft) [ ] 1
1584 |
1617 |
1585 o 0:a78f55e5508c (draft) [ ] 0
1618 o 0:a78f55e5508c (draft) [ ] 0
1586
1619
1587
1620
1588 $ hg strip --hidden -r 2 --config extensions.strip= --config devel.strip-obsmarkers=no
1621 $ hg strip --hidden -r 2 --config extensions.strip= --config devel.strip-obsmarkers=no
1589 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e008cf283490-ede36964-backup.hg
1622 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e008cf283490-ede36964-backup.hg
1590 $ hg debugobsolete
1623 $ hg debugobsolete
1591 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1624 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1592 $ hg log -G
1625 $ hg log -G
1593 @ 2:b0551702f918 (draft) [tip ] 2
1626 @ 2:b0551702f918 (draft) [tip ] 2
1594 |
1627 |
1595 o 1:e016b03fd86f (draft) [ ] 1
1628 o 1:e016b03fd86f (draft) [ ] 1
1596 |
1629 |
1597 o 0:a78f55e5508c (draft) [ ] 0
1630 o 0:a78f55e5508c (draft) [ ] 0
1598
1631
1599 $ hg log -G --hidden
1632 $ hg log -G --hidden
1600 @ 2:b0551702f918 (draft) [tip ] 2
1633 @ 2:b0551702f918 (draft) [tip ] 2
1601 |
1634 |
1602 o 1:e016b03fd86f (draft) [ ] 1
1635 o 1:e016b03fd86f (draft) [ ] 1
1603 |
1636 |
1604 o 0:a78f55e5508c (draft) [ ] 0
1637 o 0:a78f55e5508c (draft) [ ] 0
1605
1638
1606 $ hg debugbundle .hg/strip-backup/e008cf283490-*-backup.hg
1639 $ hg debugbundle .hg/strip-backup/e008cf283490-*-backup.hg
1607 Stream params: {Compression: BZ}
1640 Stream params: {Compression: BZ}
1608 changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
1641 changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
1609 e008cf2834908e5d6b0f792a9d4b0e2272260fb8
1642 e008cf2834908e5d6b0f792a9d4b0e2272260fb8
1610 cache:rev-branch-cache -- {} (mandatory: False)
1643 cache:rev-branch-cache -- {} (mandatory: False)
1611 phase-heads -- {} (mandatory: True)
1644 phase-heads -- {} (mandatory: True)
1612 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 draft
1645 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 draft
1613
1646
1614 #if repobundlerepo
1647 #if repobundlerepo
1615 $ hg pull .hg/strip-backup/e008cf283490-*-backup.hg
1648 $ hg pull .hg/strip-backup/e008cf283490-*-backup.hg
1616 pulling from .hg/strip-backup/e008cf283490-ede36964-backup.hg
1649 pulling from .hg/strip-backup/e008cf283490-ede36964-backup.hg
1617 searching for changes
1650 searching for changes
1618 no changes found
1651 no changes found
1619 #endif
1652 #endif
1620 $ hg debugobsolete
1653 $ hg debugobsolete
1621 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1654 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1622 $ hg log -G
1655 $ hg log -G
1623 @ 2:b0551702f918 (draft) [tip ] 2
1656 @ 2:b0551702f918 (draft) [tip ] 2
1624 |
1657 |
1625 o 1:e016b03fd86f (draft) [ ] 1
1658 o 1:e016b03fd86f (draft) [ ] 1
1626 |
1659 |
1627 o 0:a78f55e5508c (draft) [ ] 0
1660 o 0:a78f55e5508c (draft) [ ] 0
1628
1661
1629 $ hg log -G --hidden
1662 $ hg log -G --hidden
1630 @ 2:b0551702f918 (draft) [tip ] 2
1663 @ 2:b0551702f918 (draft) [tip ] 2
1631 |
1664 |
1632 o 1:e016b03fd86f (draft) [ ] 1
1665 o 1:e016b03fd86f (draft) [ ] 1
1633 |
1666 |
1634 o 0:a78f55e5508c (draft) [ ] 0
1667 o 0:a78f55e5508c (draft) [ ] 0
1635
1668
1636
1669
1637 Testing that strip remove markers:
1670 Testing that strip remove markers:
1638
1671
1639 $ hg strip -r 1 --config extensions.strip=
1672 $ hg strip -r 1 --config extensions.strip=
1640 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1673 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1641 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-65ede734-backup.hg
1674 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-65ede734-backup.hg
1642 $ hg debugobsolete
1675 $ hg debugobsolete
1643 $ hg log -G
1676 $ hg log -G
1644 @ 0:a78f55e5508c (draft) [tip ] 0
1677 @ 0:a78f55e5508c (draft) [tip ] 0
1645
1678
1646 $ hg log -G --hidden
1679 $ hg log -G --hidden
1647 @ 0:a78f55e5508c (draft) [tip ] 0
1680 @ 0:a78f55e5508c (draft) [tip ] 0
1648
1681
1649 $ hg debugbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
1682 $ hg debugbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
1650 Stream params: {Compression: BZ}
1683 Stream params: {Compression: BZ}
1651 changegroup -- {nbchanges: 2, version: 02} (mandatory: True)
1684 changegroup -- {nbchanges: 2, version: 02} (mandatory: True)
1652 e016b03fd86fcccc54817d120b90b751aaf367d6
1685 e016b03fd86fcccc54817d120b90b751aaf367d6
1653 b0551702f918510f01ae838ab03a463054c67b46
1686 b0551702f918510f01ae838ab03a463054c67b46
1654 cache:rev-branch-cache -- {} (mandatory: False)
1687 cache:rev-branch-cache -- {} (mandatory: False)
1655 obsmarkers -- {} (mandatory: True)
1688 obsmarkers -- {} (mandatory: True)
1656 version: 1 (92 bytes)
1689 version: 1 (92 bytes)
1657 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1690 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1658 phase-heads -- {} (mandatory: True)
1691 phase-heads -- {} (mandatory: True)
1659 b0551702f918510f01ae838ab03a463054c67b46 draft
1692 b0551702f918510f01ae838ab03a463054c67b46 draft
1660
1693
1661 $ hg unbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
1694 $ hg unbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
1662 adding changesets
1695 adding changesets
1663 adding manifests
1696 adding manifests
1664 adding file changes
1697 adding file changes
1665 added 2 changesets with 2 changes to 2 files
1698 added 2 changesets with 2 changes to 2 files
1666 1 new obsolescence markers
1699 1 new obsolescence markers
1667 new changesets e016b03fd86f:b0551702f918 (2 drafts)
1700 new changesets e016b03fd86f:b0551702f918 (2 drafts)
1668 (run 'hg update' to get a working copy)
1701 (run 'hg update' to get a working copy)
1669 $ hg debugobsolete | sort
1702 $ hg debugobsolete | sort
1670 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1703 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
1671 $ hg log -G
1704 $ hg log -G
1672 o 2:b0551702f918 (draft) [tip ] 2
1705 o 2:b0551702f918 (draft) [tip ] 2
1673 |
1706 |
1674 o 1:e016b03fd86f (draft) [ ] 1
1707 o 1:e016b03fd86f (draft) [ ] 1
1675 |
1708 |
1676 @ 0:a78f55e5508c (draft) [ ] 0
1709 @ 0:a78f55e5508c (draft) [ ] 0
1677
1710
1678 $ hg log -G --hidden
1711 $ hg log -G --hidden
1679 o 2:b0551702f918 (draft) [tip ] 2
1712 o 2:b0551702f918 (draft) [tip ] 2
1680 |
1713 |
1681 o 1:e016b03fd86f (draft) [ ] 1
1714 o 1:e016b03fd86f (draft) [ ] 1
1682 |
1715 |
1683 @ 0:a78f55e5508c (draft) [ ] 0
1716 @ 0:a78f55e5508c (draft) [ ] 0
1684
1717
1685 Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when
1718 Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when
1686 only a subset of those are displayed (because of --rev option)
1719 only a subset of those are displayed (because of --rev option)
1687 $ hg init doindexrev
1720 $ hg init doindexrev
1688 $ cd doindexrev
1721 $ cd doindexrev
1689 $ echo a > a
1722 $ echo a > a
1690 $ hg ci -Am a
1723 $ hg ci -Am a
1691 adding a
1724 adding a
1692 $ hg ci --amend -m aa
1725 $ hg ci --amend -m aa
1693 $ echo b > b
1726 $ echo b > b
1694 $ hg ci -Am b
1727 $ hg ci -Am b
1695 adding b
1728 adding b
1696 $ hg ci --amend -m bb
1729 $ hg ci --amend -m bb
1697 $ echo c > c
1730 $ echo c > c
1698 $ hg ci -Am c
1731 $ hg ci -Am c
1699 adding c
1732 adding c
1700 $ hg ci --amend -m cc
1733 $ hg ci --amend -m cc
1701 $ echo d > d
1734 $ echo d > d
1702 $ hg ci -Am d
1735 $ hg ci -Am d
1703 adding d
1736 adding d
1704 $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
1737 $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
1705 $ hg debugobsolete --index --rev "3+7"
1738 $ hg debugobsolete --index --rev "3+7"
1706 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1739 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1707 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1740 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1708 $ hg debugobsolete --index --rev "3+7" -Tjson
1741 $ hg debugobsolete --index --rev "3+7" -Tjson
1709 [
1742 [
1710 {
1743 {
1711 "date": [0, 0],
1744 "date": [0, 0],
1712 "flag": 0,
1745 "flag": 0,
1713 "index": 1,
1746 "index": 1,
1714 "metadata": {"ef1": "1", "operation": "amend", "user": "test"},
1747 "metadata": {"ef1": "1", "operation": "amend", "user": "test"},
1715 "prednode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
1748 "prednode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
1716 "succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
1749 "succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
1717 },
1750 },
1718 {
1751 {
1719 "date": [0, 0],
1752 "date": [0, 0],
1720 "flag": 0,
1753 "flag": 0,
1721 "index": 3,
1754 "index": 3,
1722 "metadata": {"ef1": "1", "operation": "amend", "user": "test"},
1755 "metadata": {"ef1": "1", "operation": "amend", "user": "test"},
1723 "prednode": "4715cf767440ed891755448016c2b8cf70760c30",
1756 "prednode": "4715cf767440ed891755448016c2b8cf70760c30",
1724 "succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
1757 "succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
1725 }
1758 }
1726 ]
1759 ]
1727
1760
1728 Test the --delete option of debugobsolete command
1761 Test the --delete option of debugobsolete command
1729 $ hg debugobsolete --index
1762 $ hg debugobsolete --index
1730 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1763 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1731 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1764 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1732 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1765 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1733 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1766 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1734 $ hg debugobsolete --delete 1 --delete 3
1767 $ hg debugobsolete --delete 1 --delete 3
1735 deleted 2 obsolescence markers
1768 deleted 2 obsolescence markers
1736 $ hg debugobsolete
1769 $ hg debugobsolete
1737 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1770 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1738 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1771 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '1', 'operation': 'amend', 'user': 'test'}
1739
1772
1740 Test adding changeset after obsmarkers affecting it
1773 Test adding changeset after obsmarkers affecting it
1741 (eg: during pull, or unbundle)
1774 (eg: during pull, or unbundle)
1742
1775
1743 $ mkcommit e
1776 $ mkcommit e
1744 $ hg bundle -r . --base .~1 ../bundle-2.hg
1777 $ hg bundle -r . --base .~1 ../bundle-2.hg
1745 1 changesets found
1778 1 changesets found
1746 $ getid .
1779 $ getid .
1747 $ hg --config extensions.strip= strip -r .
1780 $ hg --config extensions.strip= strip -r .
1748 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1781 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1749 saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg
1782 saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg
1750 $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
1783 $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
1751 1 new obsolescence markers
1784 1 new obsolescence markers
1752 $ hg unbundle ../bundle-2.hg
1785 $ hg unbundle ../bundle-2.hg
1753 adding changesets
1786 adding changesets
1754 adding manifests
1787 adding manifests
1755 adding file changes
1788 adding file changes
1756 added 1 changesets with 1 changes to 1 files
1789 added 1 changesets with 1 changes to 1 files
1757 (1 other changesets obsolete on arrival)
1790 (1 other changesets obsolete on arrival)
1758 (run 'hg update' to get a working copy)
1791 (run 'hg update' to get a working copy)
1759 $ hg log -G
1792 $ hg log -G
1760 @ 7:7ae79c5d60f0 (draft) [tip ] dd
1793 @ 7:7ae79c5d60f0 (draft) [tip ] dd
1761 |
1794 |
1762 | o 6:4715cf767440 (draft) [ ] d
1795 | o 6:4715cf767440 (draft) [ ] d
1763 |/
1796 |/
1764 o 5:29346082e4a9 (draft) [ ] cc
1797 o 5:29346082e4a9 (draft) [ ] cc
1765 |
1798 |
1766 o 3:d27fb9b06607 (draft) [ ] bb
1799 o 3:d27fb9b06607 (draft) [ ] bb
1767 |
1800 |
1768 | o 2:6fdef60fcbab (draft) [ ] b
1801 | o 2:6fdef60fcbab (draft) [ ] b
1769 |/
1802 |/
1770 o 1:f9bd49731b0b (draft) [ ] aa
1803 o 1:f9bd49731b0b (draft) [ ] aa
1771
1804
1772
1805
1773 $ cd ..
1806 $ cd ..
1774
1807
1775 Test issue 5783
1808 Test issue 5783
1776
1809
1777 $ hg init issue-5783 --config format.obsstore-version=0
1810 $ hg init issue-5783 --config format.obsstore-version=0
1778 $ cd issue-5783
1811 $ cd issue-5783
1779 $ touch a.cpp
1812 $ touch a.cpp
1780 $ hg add a.cpp
1813 $ hg add a.cpp
1781 $ hg commit -m 'Add a.cpp'
1814 $ hg commit -m 'Add a.cpp'
1782 $ echo 'Hello' > a.cpp
1815 $ echo 'Hello' > a.cpp
1783 $ hg amend -n 'Testing::Obsstore' --config format.obsstore-version=0 --config extensions.amend=
1816 $ hg amend -n 'Testing::Obsstore' --config format.obsstore-version=0 --config extensions.amend=
1784 $ touch b.cpp
1817 $ touch b.cpp
1785 $ hg add b.cpp
1818 $ hg add b.cpp
1786 $ hg commit -m 'Add b.cpp'
1819 $ hg commit -m 'Add b.cpp'
1787 $ echo 'Hello' > b.cpp
1820 $ echo 'Hello' > b.cpp
1788 $ hg amend -n 'Testing::Obsstore2' --config extensions.amend=
1821 $ hg amend -n 'Testing::Obsstore2' --config extensions.amend=
1789 $ hg debugobsolete
1822 $ hg debugobsolete
1790 d1b09fe3ad2b2a03e23a72f0c582e29a49570145 1a1a11184d2588af24e767e5335d5d9d07e8c550 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'note': 'Testing::Obsstore', 'operation': 'amend', 'user': 'test'}
1823 d1b09fe3ad2b2a03e23a72f0c582e29a49570145 1a1a11184d2588af24e767e5335d5d9d07e8c550 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'note': 'Testing::Obsstore', 'operation': 'amend', 'user': 'test'}
1791 1bfd8e3868f641e048b6667cd672c68932f26d00 79959ca316d5b27ac6be1dd0cfd0843a5b5412eb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'note': 'Testing::Obsstore2', 'operation': 'amend', 'user': 'test'}
1824 1bfd8e3868f641e048b6667cd672c68932f26d00 79959ca316d5b27ac6be1dd0cfd0843a5b5412eb 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'note': 'Testing::Obsstore2', 'operation': 'amend', 'user': 'test'}
1792 $ cd ..
1825 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now