##// END OF EJS Templates
devel: use default-date config field when creating obsmarkers...
Boris Feld -
r32411:08d02c1d default
parent child Browse files
Show More
@@ -1,2161 +1,2161 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 context,
35 context,
36 dagparser,
36 dagparser,
37 dagutil,
37 dagutil,
38 encoding,
38 encoding,
39 error,
39 error,
40 exchange,
40 exchange,
41 extensions,
41 extensions,
42 filemerge,
42 filemerge,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 policy,
50 policy,
51 pvec,
51 pvec,
52 pycompat,
52 pycompat,
53 registrar,
53 registrar,
54 repair,
54 repair,
55 revlog,
55 revlog,
56 revset,
56 revset,
57 revsetlang,
57 revsetlang,
58 scmutil,
58 scmutil,
59 setdiscovery,
59 setdiscovery,
60 simplemerge,
60 simplemerge,
61 smartset,
61 smartset,
62 sslutil,
62 sslutil,
63 streamclone,
63 streamclone,
64 templater,
64 templater,
65 treediscovery,
65 treediscovery,
66 upgrade,
66 upgrade,
67 util,
67 util,
68 vfs as vfsmod,
68 vfs as vfsmod,
69 )
69 )
70
70
71 release = lockmod.release
71 release = lockmod.release
72
72
73 command = registrar.command()
73 command = registrar.command()
74
74
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 def debugancestor(ui, repo, *args):
76 def debugancestor(ui, repo, *args):
77 """find the ancestor revision of two revisions in a given index"""
77 """find the ancestor revision of two revisions in a given index"""
78 if len(args) == 3:
78 if len(args) == 3:
79 index, rev1, rev2 = args
79 index, rev1, rev2 = args
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 lookup = r.lookup
81 lookup = r.lookup
82 elif len(args) == 2:
82 elif len(args) == 2:
83 if not repo:
83 if not repo:
84 raise error.Abort(_('there is no Mercurial repository here '
84 raise error.Abort(_('there is no Mercurial repository here '
85 '(.hg not found)'))
85 '(.hg not found)'))
86 rev1, rev2 = args
86 rev1, rev2 = args
87 r = repo.changelog
87 r = repo.changelog
88 lookup = repo.lookup
88 lookup = repo.lookup
89 else:
89 else:
90 raise error.Abort(_('either two or three arguments required'))
90 raise error.Abort(_('either two or three arguments required'))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93
93
94 @command('debugapplystreamclonebundle', [], 'FILE')
94 @command('debugapplystreamclonebundle', [], 'FILE')
95 def debugapplystreamclonebundle(ui, repo, fname):
95 def debugapplystreamclonebundle(ui, repo, fname):
96 """apply a stream clone bundle file"""
96 """apply a stream clone bundle file"""
97 f = hg.openpath(ui, fname)
97 f = hg.openpath(ui, fname)
98 gen = exchange.readbundle(ui, f, fname)
98 gen = exchange.readbundle(ui, f, fname)
99 gen.apply(repo)
99 gen.apply(repo)
100
100
101 @command('debugbuilddag',
101 @command('debugbuilddag',
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('n', 'new-file', None, _('add new file at each rev'))],
104 ('n', 'new-file', None, _('add new file at each rev'))],
105 _('[OPTION]... [TEXT]'))
105 _('[OPTION]... [TEXT]'))
106 def debugbuilddag(ui, repo, text=None,
106 def debugbuilddag(ui, repo, text=None,
107 mergeable_file=False,
107 mergeable_file=False,
108 overwritten_file=False,
108 overwritten_file=False,
109 new_file=False):
109 new_file=False):
110 """builds a repo with a given DAG from scratch in the current empty repo
110 """builds a repo with a given DAG from scratch in the current empty repo
111
111
112 The description of the DAG is read from stdin if not given on the
112 The description of the DAG is read from stdin if not given on the
113 command line.
113 command line.
114
114
115 Elements:
115 Elements:
116
116
117 - "+n" is a linear run of n nodes based on the current default parent
117 - "+n" is a linear run of n nodes based on the current default parent
118 - "." is a single node based on the current default parent
118 - "." is a single node based on the current default parent
119 - "$" resets the default parent to null (implied at the start);
119 - "$" resets the default parent to null (implied at the start);
120 otherwise the default parent is always the last node created
120 otherwise the default parent is always the last node created
121 - "<p" sets the default parent to the backref p
121 - "<p" sets the default parent to the backref p
122 - "*p" is a fork at parent p, which is a backref
122 - "*p" is a fork at parent p, which is a backref
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "/p2" is a merge of the preceding node and p2
124 - "/p2" is a merge of the preceding node and p2
125 - ":tag" defines a local tag for the preceding node
125 - ":tag" defines a local tag for the preceding node
126 - "@branch" sets the named branch for subsequent nodes
126 - "@branch" sets the named branch for subsequent nodes
127 - "#...\\n" is a comment up to the end of the line
127 - "#...\\n" is a comment up to the end of the line
128
128
129 Whitespace between the above elements is ignored.
129 Whitespace between the above elements is ignored.
130
130
131 A backref is either
131 A backref is either
132
132
133 - a number n, which references the node curr-n, where curr is the current
133 - a number n, which references the node curr-n, where curr is the current
134 node, or
134 node, or
135 - the name of a local tag you placed earlier using ":tag", or
135 - the name of a local tag you placed earlier using ":tag", or
136 - empty to denote the default parent.
136 - empty to denote the default parent.
137
137
138 All string valued-elements are either strictly alphanumeric, or must
138 All string valued-elements are either strictly alphanumeric, or must
139 be enclosed in double quotes ("..."), with "\\" as escape character.
139 be enclosed in double quotes ("..."), with "\\" as escape character.
140 """
140 """
141
141
142 if text is None:
142 if text is None:
143 ui.status(_("reading DAG from stdin\n"))
143 ui.status(_("reading DAG from stdin\n"))
144 text = ui.fin.read()
144 text = ui.fin.read()
145
145
146 cl = repo.changelog
146 cl = repo.changelog
147 if len(cl) > 0:
147 if len(cl) > 0:
148 raise error.Abort(_('repository is not empty'))
148 raise error.Abort(_('repository is not empty'))
149
149
150 # determine number of revs in DAG
150 # determine number of revs in DAG
151 total = 0
151 total = 0
152 for type, data in dagparser.parsedag(text):
152 for type, data in dagparser.parsedag(text):
153 if type == 'n':
153 if type == 'n':
154 total += 1
154 total += 1
155
155
156 if mergeable_file:
156 if mergeable_file:
157 linesperrev = 2
157 linesperrev = 2
158 # make a file with k lines per rev
158 # make a file with k lines per rev
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines.append("")
160 initialmergedlines.append("")
161
161
162 tags = []
162 tags = []
163
163
164 wlock = lock = tr = None
164 wlock = lock = tr = None
165 try:
165 try:
166 wlock = repo.wlock()
166 wlock = repo.wlock()
167 lock = repo.lock()
167 lock = repo.lock()
168 tr = repo.transaction("builddag")
168 tr = repo.transaction("builddag")
169
169
170 at = -1
170 at = -1
171 atbranch = 'default'
171 atbranch = 'default'
172 nodeids = []
172 nodeids = []
173 id = 0
173 id = 0
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 ui.note(('node %s\n' % str(data)))
177 ui.note(('node %s\n' % str(data)))
178 id, ps = data
178 id, ps = data
179
179
180 files = []
180 files = []
181 fctxs = {}
181 fctxs = {}
182
182
183 p2 = None
183 p2 = None
184 if mergeable_file:
184 if mergeable_file:
185 fn = "mf"
185 fn = "mf"
186 p1 = repo[ps[0]]
186 p1 = repo[ps[0]]
187 if len(ps) > 1:
187 if len(ps) > 1:
188 p2 = repo[ps[1]]
188 p2 = repo[ps[1]]
189 pa = p1.ancestor(p2)
189 pa = p1.ancestor(p2)
190 base, local, other = [x[fn].data() for x in (pa, p1,
190 base, local, other = [x[fn].data() for x in (pa, p1,
191 p2)]
191 p2)]
192 m3 = simplemerge.Merge3Text(base, local, other)
192 m3 = simplemerge.Merge3Text(base, local, other)
193 ml = [l.strip() for l in m3.merge_lines()]
193 ml = [l.strip() for l in m3.merge_lines()]
194 ml.append("")
194 ml.append("")
195 elif at > 0:
195 elif at > 0:
196 ml = p1[fn].data().split("\n")
196 ml = p1[fn].data().split("\n")
197 else:
197 else:
198 ml = initialmergedlines
198 ml = initialmergedlines
199 ml[id * linesperrev] += " r%i" % id
199 ml[id * linesperrev] += " r%i" % id
200 mergedtext = "\n".join(ml)
200 mergedtext = "\n".join(ml)
201 files.append(fn)
201 files.append(fn)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203
203
204 if overwritten_file:
204 if overwritten_file:
205 fn = "of"
205 fn = "of"
206 files.append(fn)
206 files.append(fn)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208
208
209 if new_file:
209 if new_file:
210 fn = "nf%i" % id
210 fn = "nf%i" % id
211 files.append(fn)
211 files.append(fn)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 if len(ps) > 1:
213 if len(ps) > 1:
214 if not p2:
214 if not p2:
215 p2 = repo[ps[1]]
215 p2 = repo[ps[1]]
216 for fn in p2:
216 for fn in p2:
217 if fn.startswith("nf"):
217 if fn.startswith("nf"):
218 files.append(fn)
218 files.append(fn)
219 fctxs[fn] = p2[fn]
219 fctxs[fn] = p2[fn]
220
220
221 def fctxfn(repo, cx, path):
221 def fctxfn(repo, cx, path):
222 return fctxs.get(path)
222 return fctxs.get(path)
223
223
224 if len(ps) == 0 or ps[0] < 0:
224 if len(ps) == 0 or ps[0] < 0:
225 pars = [None, None]
225 pars = [None, None]
226 elif len(ps) == 1:
226 elif len(ps) == 1:
227 pars = [nodeids[ps[0]], None]
227 pars = [nodeids[ps[0]], None]
228 else:
228 else:
229 pars = [nodeids[p] for p in ps]
229 pars = [nodeids[p] for p in ps]
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 date=(id, 0),
231 date=(id, 0),
232 user="debugbuilddag",
232 user="debugbuilddag",
233 extra={'branch': atbranch})
233 extra={'branch': atbranch})
234 nodeid = repo.commitctx(cx)
234 nodeid = repo.commitctx(cx)
235 nodeids.append(nodeid)
235 nodeids.append(nodeid)
236 at = id
236 at = id
237 elif type == 'l':
237 elif type == 'l':
238 id, name = data
238 id, name = data
239 ui.note(('tag %s\n' % name))
239 ui.note(('tag %s\n' % name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 elif type == 'a':
241 elif type == 'a':
242 ui.note(('branch %s\n' % data))
242 ui.note(('branch %s\n' % data))
243 atbranch = data
243 atbranch = data
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 tr.close()
245 tr.close()
246
246
247 if tags:
247 if tags:
248 repo.vfs.write("localtags", "".join(tags))
248 repo.vfs.write("localtags", "".join(tags))
249 finally:
249 finally:
250 ui.progress(_('building'), None)
250 ui.progress(_('building'), None)
251 release(tr, lock, wlock)
251 release(tr, lock, wlock)
252
252
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 indent_string = ' ' * indent
254 indent_string = ' ' * indent
255 if all:
255 if all:
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 % indent_string)
257 % indent_string)
258
258
259 def showchunks(named):
259 def showchunks(named):
260 ui.write("\n%s%s\n" % (indent_string, named))
260 ui.write("\n%s%s\n" % (indent_string, named))
261 chain = None
261 chain = None
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 node = chunkdata['node']
263 node = chunkdata['node']
264 p1 = chunkdata['p1']
264 p1 = chunkdata['p1']
265 p2 = chunkdata['p2']
265 p2 = chunkdata['p2']
266 cs = chunkdata['cs']
266 cs = chunkdata['cs']
267 deltabase = chunkdata['deltabase']
267 deltabase = chunkdata['deltabase']
268 delta = chunkdata['delta']
268 delta = chunkdata['delta']
269 ui.write("%s%s %s %s %s %s %s\n" %
269 ui.write("%s%s %s %s %s %s %s\n" %
270 (indent_string, hex(node), hex(p1), hex(p2),
270 (indent_string, hex(node), hex(p1), hex(p2),
271 hex(cs), hex(deltabase), len(delta)))
271 hex(cs), hex(deltabase), len(delta)))
272 chain = node
272 chain = node
273
273
274 chunkdata = gen.changelogheader()
274 chunkdata = gen.changelogheader()
275 showchunks("changelog")
275 showchunks("changelog")
276 chunkdata = gen.manifestheader()
276 chunkdata = gen.manifestheader()
277 showchunks("manifest")
277 showchunks("manifest")
278 for chunkdata in iter(gen.filelogheader, {}):
278 for chunkdata in iter(gen.filelogheader, {}):
279 fname = chunkdata['filename']
279 fname = chunkdata['filename']
280 showchunks(fname)
280 showchunks(fname)
281 else:
281 else:
282 if isinstance(gen, bundle2.unbundle20):
282 if isinstance(gen, bundle2.unbundle20):
283 raise error.Abort(_('use debugbundle2 for this file'))
283 raise error.Abort(_('use debugbundle2 for this file'))
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 chain = None
285 chain = None
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 node = chunkdata['node']
287 node = chunkdata['node']
288 ui.write("%s%s\n" % (indent_string, hex(node)))
288 ui.write("%s%s\n" % (indent_string, hex(node)))
289 chain = node
289 chain = node
290
290
291 def _debugbundle2(ui, gen, all=None, **opts):
291 def _debugbundle2(ui, gen, all=None, **opts):
292 """lists the contents of a bundle2"""
292 """lists the contents of a bundle2"""
293 if not isinstance(gen, bundle2.unbundle20):
293 if not isinstance(gen, bundle2.unbundle20):
294 raise error.Abort(_('not a bundle2 file'))
294 raise error.Abort(_('not a bundle2 file'))
295 ui.write(('Stream params: %s\n' % repr(gen.params)))
295 ui.write(('Stream params: %s\n' % repr(gen.params)))
296 for part in gen.iterparts():
296 for part in gen.iterparts():
297 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
297 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
298 if part.type == 'changegroup':
298 if part.type == 'changegroup':
299 version = part.params.get('version', '01')
299 version = part.params.get('version', '01')
300 cg = changegroup.getunbundler(version, part, 'UN')
300 cg = changegroup.getunbundler(version, part, 'UN')
301 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
301 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
302
302
303 @command('debugbundle',
303 @command('debugbundle',
304 [('a', 'all', None, _('show all details')),
304 [('a', 'all', None, _('show all details')),
305 ('', 'spec', None, _('print the bundlespec of the bundle'))],
305 ('', 'spec', None, _('print the bundlespec of the bundle'))],
306 _('FILE'),
306 _('FILE'),
307 norepo=True)
307 norepo=True)
308 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
308 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
309 """lists the contents of a bundle"""
309 """lists the contents of a bundle"""
310 with hg.openpath(ui, bundlepath) as f:
310 with hg.openpath(ui, bundlepath) as f:
311 if spec:
311 if spec:
312 spec = exchange.getbundlespec(ui, f)
312 spec = exchange.getbundlespec(ui, f)
313 ui.write('%s\n' % spec)
313 ui.write('%s\n' % spec)
314 return
314 return
315
315
316 gen = exchange.readbundle(ui, f, bundlepath)
316 gen = exchange.readbundle(ui, f, bundlepath)
317 if isinstance(gen, bundle2.unbundle20):
317 if isinstance(gen, bundle2.unbundle20):
318 return _debugbundle2(ui, gen, all=all, **opts)
318 return _debugbundle2(ui, gen, all=all, **opts)
319 _debugchangegroup(ui, gen, all=all, **opts)
319 _debugchangegroup(ui, gen, all=all, **opts)
320
320
321 @command('debugcheckstate', [], '')
321 @command('debugcheckstate', [], '')
322 def debugcheckstate(ui, repo):
322 def debugcheckstate(ui, repo):
323 """validate the correctness of the current dirstate"""
323 """validate the correctness of the current dirstate"""
324 parent1, parent2 = repo.dirstate.parents()
324 parent1, parent2 = repo.dirstate.parents()
325 m1 = repo[parent1].manifest()
325 m1 = repo[parent1].manifest()
326 m2 = repo[parent2].manifest()
326 m2 = repo[parent2].manifest()
327 errors = 0
327 errors = 0
328 for f in repo.dirstate:
328 for f in repo.dirstate:
329 state = repo.dirstate[f]
329 state = repo.dirstate[f]
330 if state in "nr" and f not in m1:
330 if state in "nr" and f not in m1:
331 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
331 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
332 errors += 1
332 errors += 1
333 if state in "a" and f in m1:
333 if state in "a" and f in m1:
334 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
334 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
335 errors += 1
335 errors += 1
336 if state in "m" and f not in m1 and f not in m2:
336 if state in "m" and f not in m1 and f not in m2:
337 ui.warn(_("%s in state %s, but not in either manifest\n") %
337 ui.warn(_("%s in state %s, but not in either manifest\n") %
338 (f, state))
338 (f, state))
339 errors += 1
339 errors += 1
340 for f in m1:
340 for f in m1:
341 state = repo.dirstate[f]
341 state = repo.dirstate[f]
342 if state not in "nrm":
342 if state not in "nrm":
343 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
343 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
344 errors += 1
344 errors += 1
345 if errors:
345 if errors:
346 error = _(".hg/dirstate inconsistent with current parent's manifest")
346 error = _(".hg/dirstate inconsistent with current parent's manifest")
347 raise error.Abort(error)
347 raise error.Abort(error)
348
348
349 @command('debugcolor',
349 @command('debugcolor',
350 [('', 'style', None, _('show all configured styles'))],
350 [('', 'style', None, _('show all configured styles'))],
351 'hg debugcolor')
351 'hg debugcolor')
352 def debugcolor(ui, repo, **opts):
352 def debugcolor(ui, repo, **opts):
353 """show available color, effects or style"""
353 """show available color, effects or style"""
354 ui.write(('color mode: %s\n') % ui._colormode)
354 ui.write(('color mode: %s\n') % ui._colormode)
355 if opts.get('style'):
355 if opts.get('style'):
356 return _debugdisplaystyle(ui)
356 return _debugdisplaystyle(ui)
357 else:
357 else:
358 return _debugdisplaycolor(ui)
358 return _debugdisplaycolor(ui)
359
359
360 def _debugdisplaycolor(ui):
360 def _debugdisplaycolor(ui):
361 ui = ui.copy()
361 ui = ui.copy()
362 ui._styles.clear()
362 ui._styles.clear()
363 for effect in color._activeeffects(ui).keys():
363 for effect in color._activeeffects(ui).keys():
364 ui._styles[effect] = effect
364 ui._styles[effect] = effect
365 if ui._terminfoparams:
365 if ui._terminfoparams:
366 for k, v in ui.configitems('color'):
366 for k, v in ui.configitems('color'):
367 if k.startswith('color.'):
367 if k.startswith('color.'):
368 ui._styles[k] = k[6:]
368 ui._styles[k] = k[6:]
369 elif k.startswith('terminfo.'):
369 elif k.startswith('terminfo.'):
370 ui._styles[k] = k[9:]
370 ui._styles[k] = k[9:]
371 ui.write(_('available colors:\n'))
371 ui.write(_('available colors:\n'))
372 # sort label with a '_' after the other to group '_background' entry.
372 # sort label with a '_' after the other to group '_background' entry.
373 items = sorted(ui._styles.items(),
373 items = sorted(ui._styles.items(),
374 key=lambda i: ('_' in i[0], i[0], i[1]))
374 key=lambda i: ('_' in i[0], i[0], i[1]))
375 for colorname, label in items:
375 for colorname, label in items:
376 ui.write(('%s\n') % colorname, label=label)
376 ui.write(('%s\n') % colorname, label=label)
377
377
378 def _debugdisplaystyle(ui):
378 def _debugdisplaystyle(ui):
379 ui.write(_('available style:\n'))
379 ui.write(_('available style:\n'))
380 width = max(len(s) for s in ui._styles)
380 width = max(len(s) for s in ui._styles)
381 for label, effects in sorted(ui._styles.items()):
381 for label, effects in sorted(ui._styles.items()):
382 ui.write('%s' % label, label=label)
382 ui.write('%s' % label, label=label)
383 if effects:
383 if effects:
384 # 50
384 # 50
385 ui.write(': ')
385 ui.write(': ')
386 ui.write(' ' * (max(0, width - len(label))))
386 ui.write(' ' * (max(0, width - len(label))))
387 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
387 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
388 ui.write('\n')
388 ui.write('\n')
389
389
390 @command('debugcreatestreamclonebundle', [], 'FILE')
390 @command('debugcreatestreamclonebundle', [], 'FILE')
391 def debugcreatestreamclonebundle(ui, repo, fname):
391 def debugcreatestreamclonebundle(ui, repo, fname):
392 """create a stream clone bundle file
392 """create a stream clone bundle file
393
393
394 Stream bundles are special bundles that are essentially archives of
394 Stream bundles are special bundles that are essentially archives of
395 revlog files. They are commonly used for cloning very quickly.
395 revlog files. They are commonly used for cloning very quickly.
396 """
396 """
397 requirements, gen = streamclone.generatebundlev1(repo)
397 requirements, gen = streamclone.generatebundlev1(repo)
398 changegroup.writechunks(ui, gen, fname)
398 changegroup.writechunks(ui, gen, fname)
399
399
400 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
400 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
401
401
402 @command('debugdag',
402 @command('debugdag',
403 [('t', 'tags', None, _('use tags as labels')),
403 [('t', 'tags', None, _('use tags as labels')),
404 ('b', 'branches', None, _('annotate with branch names')),
404 ('b', 'branches', None, _('annotate with branch names')),
405 ('', 'dots', None, _('use dots for runs')),
405 ('', 'dots', None, _('use dots for runs')),
406 ('s', 'spaces', None, _('separate elements by spaces'))],
406 ('s', 'spaces', None, _('separate elements by spaces'))],
407 _('[OPTION]... [FILE [REV]...]'),
407 _('[OPTION]... [FILE [REV]...]'),
408 optionalrepo=True)
408 optionalrepo=True)
409 def debugdag(ui, repo, file_=None, *revs, **opts):
409 def debugdag(ui, repo, file_=None, *revs, **opts):
410 """format the changelog or an index DAG as a concise textual description
410 """format the changelog or an index DAG as a concise textual description
411
411
412 If you pass a revlog index, the revlog's DAG is emitted. If you list
412 If you pass a revlog index, the revlog's DAG is emitted. If you list
413 revision numbers, they get labeled in the output as rN.
413 revision numbers, they get labeled in the output as rN.
414
414
415 Otherwise, the changelog DAG of the current repo is emitted.
415 Otherwise, the changelog DAG of the current repo is emitted.
416 """
416 """
417 spaces = opts.get('spaces')
417 spaces = opts.get('spaces')
418 dots = opts.get('dots')
418 dots = opts.get('dots')
419 if file_:
419 if file_:
420 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
420 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
421 file_)
421 file_)
422 revs = set((int(r) for r in revs))
422 revs = set((int(r) for r in revs))
423 def events():
423 def events():
424 for r in rlog:
424 for r in rlog:
425 yield 'n', (r, list(p for p in rlog.parentrevs(r)
425 yield 'n', (r, list(p for p in rlog.parentrevs(r)
426 if p != -1))
426 if p != -1))
427 if r in revs:
427 if r in revs:
428 yield 'l', (r, "r%i" % r)
428 yield 'l', (r, "r%i" % r)
429 elif repo:
429 elif repo:
430 cl = repo.changelog
430 cl = repo.changelog
431 tags = opts.get('tags')
431 tags = opts.get('tags')
432 branches = opts.get('branches')
432 branches = opts.get('branches')
433 if tags:
433 if tags:
434 labels = {}
434 labels = {}
435 for l, n in repo.tags().items():
435 for l, n in repo.tags().items():
436 labels.setdefault(cl.rev(n), []).append(l)
436 labels.setdefault(cl.rev(n), []).append(l)
437 def events():
437 def events():
438 b = "default"
438 b = "default"
439 for r in cl:
439 for r in cl:
440 if branches:
440 if branches:
441 newb = cl.read(cl.node(r))[5]['branch']
441 newb = cl.read(cl.node(r))[5]['branch']
442 if newb != b:
442 if newb != b:
443 yield 'a', newb
443 yield 'a', newb
444 b = newb
444 b = newb
445 yield 'n', (r, list(p for p in cl.parentrevs(r)
445 yield 'n', (r, list(p for p in cl.parentrevs(r)
446 if p != -1))
446 if p != -1))
447 if tags:
447 if tags:
448 ls = labels.get(r)
448 ls = labels.get(r)
449 if ls:
449 if ls:
450 for l in ls:
450 for l in ls:
451 yield 'l', (r, l)
451 yield 'l', (r, l)
452 else:
452 else:
453 raise error.Abort(_('need repo for changelog dag'))
453 raise error.Abort(_('need repo for changelog dag'))
454
454
455 for line in dagparser.dagtextlines(events(),
455 for line in dagparser.dagtextlines(events(),
456 addspaces=spaces,
456 addspaces=spaces,
457 wraplabels=True,
457 wraplabels=True,
458 wrapannotations=True,
458 wrapannotations=True,
459 wrapnonlinear=dots,
459 wrapnonlinear=dots,
460 usedots=dots,
460 usedots=dots,
461 maxlinewidth=70):
461 maxlinewidth=70):
462 ui.write(line)
462 ui.write(line)
463 ui.write("\n")
463 ui.write("\n")
464
464
465 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
465 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
466 def debugdata(ui, repo, file_, rev=None, **opts):
466 def debugdata(ui, repo, file_, rev=None, **opts):
467 """dump the contents of a data file revision"""
467 """dump the contents of a data file revision"""
468 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
468 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
469 if rev is not None:
469 if rev is not None:
470 raise error.CommandError('debugdata', _('invalid arguments'))
470 raise error.CommandError('debugdata', _('invalid arguments'))
471 file_, rev = None, file_
471 file_, rev = None, file_
472 elif rev is None:
472 elif rev is None:
473 raise error.CommandError('debugdata', _('invalid arguments'))
473 raise error.CommandError('debugdata', _('invalid arguments'))
474 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
474 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
475 try:
475 try:
476 ui.write(r.revision(r.lookup(rev), raw=True))
476 ui.write(r.revision(r.lookup(rev), raw=True))
477 except KeyError:
477 except KeyError:
478 raise error.Abort(_('invalid revision identifier %s') % rev)
478 raise error.Abort(_('invalid revision identifier %s') % rev)
479
479
480 @command('debugdate',
480 @command('debugdate',
481 [('e', 'extended', None, _('try extended date formats'))],
481 [('e', 'extended', None, _('try extended date formats'))],
482 _('[-e] DATE [RANGE]'),
482 _('[-e] DATE [RANGE]'),
483 norepo=True, optionalrepo=True)
483 norepo=True, optionalrepo=True)
484 def debugdate(ui, date, range=None, **opts):
484 def debugdate(ui, date, range=None, **opts):
485 """parse and display a date"""
485 """parse and display a date"""
486 if opts["extended"]:
486 if opts["extended"]:
487 d = util.parsedate(date, util.extendeddateformats)
487 d = util.parsedate(date, util.extendeddateformats)
488 else:
488 else:
489 d = util.parsedate(date)
489 d = util.parsedate(date)
490 ui.write(("internal: %s %s\n") % d)
490 ui.write(("internal: %s %s\n") % d)
491 ui.write(("standard: %s\n") % util.datestr(d))
491 ui.write(("standard: %s\n") % util.datestr(d))
492 if range:
492 if range:
493 m = util.matchdate(range)
493 m = util.matchdate(range)
494 ui.write(("match: %s\n") % m(d[0]))
494 ui.write(("match: %s\n") % m(d[0]))
495
495
496 @command('debugdeltachain',
496 @command('debugdeltachain',
497 cmdutil.debugrevlogopts + cmdutil.formatteropts,
497 cmdutil.debugrevlogopts + cmdutil.formatteropts,
498 _('-c|-m|FILE'),
498 _('-c|-m|FILE'),
499 optionalrepo=True)
499 optionalrepo=True)
500 def debugdeltachain(ui, repo, file_=None, **opts):
500 def debugdeltachain(ui, repo, file_=None, **opts):
501 """dump information about delta chains in a revlog
501 """dump information about delta chains in a revlog
502
502
503 Output can be templatized. Available template keywords are:
503 Output can be templatized. Available template keywords are:
504
504
505 :``rev``: revision number
505 :``rev``: revision number
506 :``chainid``: delta chain identifier (numbered by unique base)
506 :``chainid``: delta chain identifier (numbered by unique base)
507 :``chainlen``: delta chain length to this revision
507 :``chainlen``: delta chain length to this revision
508 :``prevrev``: previous revision in delta chain
508 :``prevrev``: previous revision in delta chain
509 :``deltatype``: role of delta / how it was computed
509 :``deltatype``: role of delta / how it was computed
510 :``compsize``: compressed size of revision
510 :``compsize``: compressed size of revision
511 :``uncompsize``: uncompressed size of revision
511 :``uncompsize``: uncompressed size of revision
512 :``chainsize``: total size of compressed revisions in chain
512 :``chainsize``: total size of compressed revisions in chain
513 :``chainratio``: total chain size divided by uncompressed revision size
513 :``chainratio``: total chain size divided by uncompressed revision size
514 (new delta chains typically start at ratio 2.00)
514 (new delta chains typically start at ratio 2.00)
515 :``lindist``: linear distance from base revision in delta chain to end
515 :``lindist``: linear distance from base revision in delta chain to end
516 of this revision
516 of this revision
517 :``extradist``: total size of revisions not part of this delta chain from
517 :``extradist``: total size of revisions not part of this delta chain from
518 base of delta chain to end of this revision; a measurement
518 base of delta chain to end of this revision; a measurement
519 of how much extra data we need to read/seek across to read
519 of how much extra data we need to read/seek across to read
520 the delta chain for this revision
520 the delta chain for this revision
521 :``extraratio``: extradist divided by chainsize; another representation of
521 :``extraratio``: extradist divided by chainsize; another representation of
522 how much unrelated data is needed to load this delta chain
522 how much unrelated data is needed to load this delta chain
523 """
523 """
524 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
524 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
525 index = r.index
525 index = r.index
526 generaldelta = r.version & revlog.FLAG_GENERALDELTA
526 generaldelta = r.version & revlog.FLAG_GENERALDELTA
527
527
528 def revinfo(rev):
528 def revinfo(rev):
529 e = index[rev]
529 e = index[rev]
530 compsize = e[1]
530 compsize = e[1]
531 uncompsize = e[2]
531 uncompsize = e[2]
532 chainsize = 0
532 chainsize = 0
533
533
534 if generaldelta:
534 if generaldelta:
535 if e[3] == e[5]:
535 if e[3] == e[5]:
536 deltatype = 'p1'
536 deltatype = 'p1'
537 elif e[3] == e[6]:
537 elif e[3] == e[6]:
538 deltatype = 'p2'
538 deltatype = 'p2'
539 elif e[3] == rev - 1:
539 elif e[3] == rev - 1:
540 deltatype = 'prev'
540 deltatype = 'prev'
541 elif e[3] == rev:
541 elif e[3] == rev:
542 deltatype = 'base'
542 deltatype = 'base'
543 else:
543 else:
544 deltatype = 'other'
544 deltatype = 'other'
545 else:
545 else:
546 if e[3] == rev:
546 if e[3] == rev:
547 deltatype = 'base'
547 deltatype = 'base'
548 else:
548 else:
549 deltatype = 'prev'
549 deltatype = 'prev'
550
550
551 chain = r._deltachain(rev)[0]
551 chain = r._deltachain(rev)[0]
552 for iterrev in chain:
552 for iterrev in chain:
553 e = index[iterrev]
553 e = index[iterrev]
554 chainsize += e[1]
554 chainsize += e[1]
555
555
556 return compsize, uncompsize, deltatype, chain, chainsize
556 return compsize, uncompsize, deltatype, chain, chainsize
557
557
558 fm = ui.formatter('debugdeltachain', opts)
558 fm = ui.formatter('debugdeltachain', opts)
559
559
560 fm.plain(' rev chain# chainlen prev delta '
560 fm.plain(' rev chain# chainlen prev delta '
561 'size rawsize chainsize ratio lindist extradist '
561 'size rawsize chainsize ratio lindist extradist '
562 'extraratio\n')
562 'extraratio\n')
563
563
564 chainbases = {}
564 chainbases = {}
565 for rev in r:
565 for rev in r:
566 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
566 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
567 chainbase = chain[0]
567 chainbase = chain[0]
568 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
568 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
569 basestart = r.start(chainbase)
569 basestart = r.start(chainbase)
570 revstart = r.start(rev)
570 revstart = r.start(rev)
571 lineardist = revstart + comp - basestart
571 lineardist = revstart + comp - basestart
572 extradist = lineardist - chainsize
572 extradist = lineardist - chainsize
573 try:
573 try:
574 prevrev = chain[-2]
574 prevrev = chain[-2]
575 except IndexError:
575 except IndexError:
576 prevrev = -1
576 prevrev = -1
577
577
578 chainratio = float(chainsize) / float(uncomp)
578 chainratio = float(chainsize) / float(uncomp)
579 extraratio = float(extradist) / float(chainsize)
579 extraratio = float(extradist) / float(chainsize)
580
580
581 fm.startitem()
581 fm.startitem()
582 fm.write('rev chainid chainlen prevrev deltatype compsize '
582 fm.write('rev chainid chainlen prevrev deltatype compsize '
583 'uncompsize chainsize chainratio lindist extradist '
583 'uncompsize chainsize chainratio lindist extradist '
584 'extraratio',
584 'extraratio',
585 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
585 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
586 rev, chainid, len(chain), prevrev, deltatype, comp,
586 rev, chainid, len(chain), prevrev, deltatype, comp,
587 uncomp, chainsize, chainratio, lineardist, extradist,
587 uncomp, chainsize, chainratio, lineardist, extradist,
588 extraratio,
588 extraratio,
589 rev=rev, chainid=chainid, chainlen=len(chain),
589 rev=rev, chainid=chainid, chainlen=len(chain),
590 prevrev=prevrev, deltatype=deltatype, compsize=comp,
590 prevrev=prevrev, deltatype=deltatype, compsize=comp,
591 uncompsize=uncomp, chainsize=chainsize,
591 uncompsize=uncomp, chainsize=chainsize,
592 chainratio=chainratio, lindist=lineardist,
592 chainratio=chainratio, lindist=lineardist,
593 extradist=extradist, extraratio=extraratio)
593 extradist=extradist, extraratio=extraratio)
594
594
595 fm.end()
595 fm.end()
596
596
597 @command('debugdirstate|debugstate',
597 @command('debugdirstate|debugstate',
598 [('', 'nodates', None, _('do not display the saved mtime')),
598 [('', 'nodates', None, _('do not display the saved mtime')),
599 ('', 'datesort', None, _('sort by saved mtime'))],
599 ('', 'datesort', None, _('sort by saved mtime'))],
600 _('[OPTION]...'))
600 _('[OPTION]...'))
601 def debugstate(ui, repo, **opts):
601 def debugstate(ui, repo, **opts):
602 """show the contents of the current dirstate"""
602 """show the contents of the current dirstate"""
603
603
604 nodates = opts.get('nodates')
604 nodates = opts.get('nodates')
605 datesort = opts.get('datesort')
605 datesort = opts.get('datesort')
606
606
607 timestr = ""
607 timestr = ""
608 if datesort:
608 if datesort:
609 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
609 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
610 else:
610 else:
611 keyfunc = None # sort by filename
611 keyfunc = None # sort by filename
612 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
612 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
613 if ent[3] == -1:
613 if ent[3] == -1:
614 timestr = 'unset '
614 timestr = 'unset '
615 elif nodates:
615 elif nodates:
616 timestr = 'set '
616 timestr = 'set '
617 else:
617 else:
618 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
618 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
619 time.localtime(ent[3]))
619 time.localtime(ent[3]))
620 if ent[1] & 0o20000:
620 if ent[1] & 0o20000:
621 mode = 'lnk'
621 mode = 'lnk'
622 else:
622 else:
623 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
623 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
624 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
624 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
625 for f in repo.dirstate.copies():
625 for f in repo.dirstate.copies():
626 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
626 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
627
627
628 @command('debugdiscovery',
628 @command('debugdiscovery',
629 [('', 'old', None, _('use old-style discovery')),
629 [('', 'old', None, _('use old-style discovery')),
630 ('', 'nonheads', None,
630 ('', 'nonheads', None,
631 _('use old-style discovery with non-heads included')),
631 _('use old-style discovery with non-heads included')),
632 ] + cmdutil.remoteopts,
632 ] + cmdutil.remoteopts,
633 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
633 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
634 def debugdiscovery(ui, repo, remoteurl="default", **opts):
634 def debugdiscovery(ui, repo, remoteurl="default", **opts):
635 """runs the changeset discovery protocol in isolation"""
635 """runs the changeset discovery protocol in isolation"""
636 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
636 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
637 opts.get('branch'))
637 opts.get('branch'))
638 remote = hg.peer(repo, opts, remoteurl)
638 remote = hg.peer(repo, opts, remoteurl)
639 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
639 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
640
640
641 # make sure tests are repeatable
641 # make sure tests are repeatable
642 random.seed(12323)
642 random.seed(12323)
643
643
644 def doit(localheads, remoteheads, remote=remote):
644 def doit(localheads, remoteheads, remote=remote):
645 if opts.get('old'):
645 if opts.get('old'):
646 if localheads:
646 if localheads:
647 raise error.Abort('cannot use localheads with old style '
647 raise error.Abort('cannot use localheads with old style '
648 'discovery')
648 'discovery')
649 if not util.safehasattr(remote, 'branches'):
649 if not util.safehasattr(remote, 'branches'):
650 # enable in-client legacy support
650 # enable in-client legacy support
651 remote = localrepo.locallegacypeer(remote.local())
651 remote = localrepo.locallegacypeer(remote.local())
652 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
652 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
653 force=True)
653 force=True)
654 common = set(common)
654 common = set(common)
655 if not opts.get('nonheads'):
655 if not opts.get('nonheads'):
656 ui.write(("unpruned common: %s\n") %
656 ui.write(("unpruned common: %s\n") %
657 " ".join(sorted(short(n) for n in common)))
657 " ".join(sorted(short(n) for n in common)))
658 dag = dagutil.revlogdag(repo.changelog)
658 dag = dagutil.revlogdag(repo.changelog)
659 all = dag.ancestorset(dag.internalizeall(common))
659 all = dag.ancestorset(dag.internalizeall(common))
660 common = dag.externalizeall(dag.headsetofconnecteds(all))
660 common = dag.externalizeall(dag.headsetofconnecteds(all))
661 else:
661 else:
662 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
662 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
663 common = set(common)
663 common = set(common)
664 rheads = set(hds)
664 rheads = set(hds)
665 lheads = set(repo.heads())
665 lheads = set(repo.heads())
666 ui.write(("common heads: %s\n") %
666 ui.write(("common heads: %s\n") %
667 " ".join(sorted(short(n) for n in common)))
667 " ".join(sorted(short(n) for n in common)))
668 if lheads <= common:
668 if lheads <= common:
669 ui.write(("local is subset\n"))
669 ui.write(("local is subset\n"))
670 elif rheads <= common:
670 elif rheads <= common:
671 ui.write(("remote is subset\n"))
671 ui.write(("remote is subset\n"))
672
672
673 serverlogs = opts.get('serverlog')
673 serverlogs = opts.get('serverlog')
674 if serverlogs:
674 if serverlogs:
675 for filename in serverlogs:
675 for filename in serverlogs:
676 with open(filename, 'r') as logfile:
676 with open(filename, 'r') as logfile:
677 line = logfile.readline()
677 line = logfile.readline()
678 while line:
678 while line:
679 parts = line.strip().split(';')
679 parts = line.strip().split(';')
680 op = parts[1]
680 op = parts[1]
681 if op == 'cg':
681 if op == 'cg':
682 pass
682 pass
683 elif op == 'cgss':
683 elif op == 'cgss':
684 doit(parts[2].split(' '), parts[3].split(' '))
684 doit(parts[2].split(' '), parts[3].split(' '))
685 elif op == 'unb':
685 elif op == 'unb':
686 doit(parts[3].split(' '), parts[2].split(' '))
686 doit(parts[3].split(' '), parts[2].split(' '))
687 line = logfile.readline()
687 line = logfile.readline()
688 else:
688 else:
689 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
689 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
690 opts.get('remote_head'))
690 opts.get('remote_head'))
691 localrevs = opts.get('local_head')
691 localrevs = opts.get('local_head')
692 doit(localrevs, remoterevs)
692 doit(localrevs, remoterevs)
693
693
694 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
694 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
695 def debugextensions(ui, **opts):
695 def debugextensions(ui, **opts):
696 '''show information about active extensions'''
696 '''show information about active extensions'''
697 exts = extensions.extensions(ui)
697 exts = extensions.extensions(ui)
698 hgver = util.version()
698 hgver = util.version()
699 fm = ui.formatter('debugextensions', opts)
699 fm = ui.formatter('debugextensions', opts)
700 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
700 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
701 isinternal = extensions.ismoduleinternal(extmod)
701 isinternal = extensions.ismoduleinternal(extmod)
702 extsource = pycompat.fsencode(extmod.__file__)
702 extsource = pycompat.fsencode(extmod.__file__)
703 if isinternal:
703 if isinternal:
704 exttestedwith = [] # never expose magic string to users
704 exttestedwith = [] # never expose magic string to users
705 else:
705 else:
706 exttestedwith = getattr(extmod, 'testedwith', '').split()
706 exttestedwith = getattr(extmod, 'testedwith', '').split()
707 extbuglink = getattr(extmod, 'buglink', None)
707 extbuglink = getattr(extmod, 'buglink', None)
708
708
709 fm.startitem()
709 fm.startitem()
710
710
711 if ui.quiet or ui.verbose:
711 if ui.quiet or ui.verbose:
712 fm.write('name', '%s\n', extname)
712 fm.write('name', '%s\n', extname)
713 else:
713 else:
714 fm.write('name', '%s', extname)
714 fm.write('name', '%s', extname)
715 if isinternal or hgver in exttestedwith:
715 if isinternal or hgver in exttestedwith:
716 fm.plain('\n')
716 fm.plain('\n')
717 elif not exttestedwith:
717 elif not exttestedwith:
718 fm.plain(_(' (untested!)\n'))
718 fm.plain(_(' (untested!)\n'))
719 else:
719 else:
720 lasttestedversion = exttestedwith[-1]
720 lasttestedversion = exttestedwith[-1]
721 fm.plain(' (%s!)\n' % lasttestedversion)
721 fm.plain(' (%s!)\n' % lasttestedversion)
722
722
723 fm.condwrite(ui.verbose and extsource, 'source',
723 fm.condwrite(ui.verbose and extsource, 'source',
724 _(' location: %s\n'), extsource or "")
724 _(' location: %s\n'), extsource or "")
725
725
726 if ui.verbose:
726 if ui.verbose:
727 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
727 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
728 fm.data(bundled=isinternal)
728 fm.data(bundled=isinternal)
729
729
730 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
730 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
731 _(' tested with: %s\n'),
731 _(' tested with: %s\n'),
732 fm.formatlist(exttestedwith, name='ver'))
732 fm.formatlist(exttestedwith, name='ver'))
733
733
734 fm.condwrite(ui.verbose and extbuglink, 'buglink',
734 fm.condwrite(ui.verbose and extbuglink, 'buglink',
735 _(' bug reporting: %s\n'), extbuglink or "")
735 _(' bug reporting: %s\n'), extbuglink or "")
736
736
737 fm.end()
737 fm.end()
738
738
739 @command('debugfileset',
739 @command('debugfileset',
740 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
740 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
741 _('[-r REV] FILESPEC'))
741 _('[-r REV] FILESPEC'))
742 def debugfileset(ui, repo, expr, **opts):
742 def debugfileset(ui, repo, expr, **opts):
743 '''parse and apply a fileset specification'''
743 '''parse and apply a fileset specification'''
744 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
744 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
745 if ui.verbose:
745 if ui.verbose:
746 tree = fileset.parse(expr)
746 tree = fileset.parse(expr)
747 ui.note(fileset.prettyformat(tree), "\n")
747 ui.note(fileset.prettyformat(tree), "\n")
748
748
749 for f in ctx.getfileset(expr):
749 for f in ctx.getfileset(expr):
750 ui.write("%s\n" % f)
750 ui.write("%s\n" % f)
751
751
752 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
752 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
753 def debugfsinfo(ui, path="."):
753 def debugfsinfo(ui, path="."):
754 """show information detected about current filesystem"""
754 """show information detected about current filesystem"""
755 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
755 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
756 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
756 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
757 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
757 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
758 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
758 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
759 casesensitive = '(unknown)'
759 casesensitive = '(unknown)'
760 try:
760 try:
761 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
761 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
762 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
762 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
763 except OSError:
763 except OSError:
764 pass
764 pass
765 ui.write(('case-sensitive: %s\n') % casesensitive)
765 ui.write(('case-sensitive: %s\n') % casesensitive)
766
766
767 @command('debuggetbundle',
767 @command('debuggetbundle',
768 [('H', 'head', [], _('id of head node'), _('ID')),
768 [('H', 'head', [], _('id of head node'), _('ID')),
769 ('C', 'common', [], _('id of common node'), _('ID')),
769 ('C', 'common', [], _('id of common node'), _('ID')),
770 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
770 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
771 _('REPO FILE [-H|-C ID]...'),
771 _('REPO FILE [-H|-C ID]...'),
772 norepo=True)
772 norepo=True)
773 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
773 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
774 """retrieves a bundle from a repo
774 """retrieves a bundle from a repo
775
775
776 Every ID must be a full-length hex node id string. Saves the bundle to the
776 Every ID must be a full-length hex node id string. Saves the bundle to the
777 given file.
777 given file.
778 """
778 """
779 repo = hg.peer(ui, opts, repopath)
779 repo = hg.peer(ui, opts, repopath)
780 if not repo.capable('getbundle'):
780 if not repo.capable('getbundle'):
781 raise error.Abort("getbundle() not supported by target repository")
781 raise error.Abort("getbundle() not supported by target repository")
782 args = {}
782 args = {}
783 if common:
783 if common:
784 args['common'] = [bin(s) for s in common]
784 args['common'] = [bin(s) for s in common]
785 if head:
785 if head:
786 args['heads'] = [bin(s) for s in head]
786 args['heads'] = [bin(s) for s in head]
787 # TODO: get desired bundlecaps from command line.
787 # TODO: get desired bundlecaps from command line.
788 args['bundlecaps'] = None
788 args['bundlecaps'] = None
789 bundle = repo.getbundle('debug', **args)
789 bundle = repo.getbundle('debug', **args)
790
790
791 bundletype = opts.get('type', 'bzip2').lower()
791 bundletype = opts.get('type', 'bzip2').lower()
792 btypes = {'none': 'HG10UN',
792 btypes = {'none': 'HG10UN',
793 'bzip2': 'HG10BZ',
793 'bzip2': 'HG10BZ',
794 'gzip': 'HG10GZ',
794 'gzip': 'HG10GZ',
795 'bundle2': 'HG20'}
795 'bundle2': 'HG20'}
796 bundletype = btypes.get(bundletype)
796 bundletype = btypes.get(bundletype)
797 if bundletype not in bundle2.bundletypes:
797 if bundletype not in bundle2.bundletypes:
798 raise error.Abort(_('unknown bundle type specified with --type'))
798 raise error.Abort(_('unknown bundle type specified with --type'))
799 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
799 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
800
800
801 @command('debugignore', [], '[FILE]')
801 @command('debugignore', [], '[FILE]')
802 def debugignore(ui, repo, *files, **opts):
802 def debugignore(ui, repo, *files, **opts):
803 """display the combined ignore pattern and information about ignored files
803 """display the combined ignore pattern and information about ignored files
804
804
805 With no argument display the combined ignore pattern.
805 With no argument display the combined ignore pattern.
806
806
807 Given space separated file names, shows if the given file is ignored and
807 Given space separated file names, shows if the given file is ignored and
808 if so, show the ignore rule (file and line number) that matched it.
808 if so, show the ignore rule (file and line number) that matched it.
809 """
809 """
810 ignore = repo.dirstate._ignore
810 ignore = repo.dirstate._ignore
811 if not files:
811 if not files:
812 # Show all the patterns
812 # Show all the patterns
813 ui.write("%s\n" % repr(ignore))
813 ui.write("%s\n" % repr(ignore))
814 else:
814 else:
815 for f in files:
815 for f in files:
816 nf = util.normpath(f)
816 nf = util.normpath(f)
817 ignored = None
817 ignored = None
818 ignoredata = None
818 ignoredata = None
819 if nf != '.':
819 if nf != '.':
820 if ignore(nf):
820 if ignore(nf):
821 ignored = nf
821 ignored = nf
822 ignoredata = repo.dirstate._ignorefileandline(nf)
822 ignoredata = repo.dirstate._ignorefileandline(nf)
823 else:
823 else:
824 for p in util.finddirs(nf):
824 for p in util.finddirs(nf):
825 if ignore(p):
825 if ignore(p):
826 ignored = p
826 ignored = p
827 ignoredata = repo.dirstate._ignorefileandline(p)
827 ignoredata = repo.dirstate._ignorefileandline(p)
828 break
828 break
829 if ignored:
829 if ignored:
830 if ignored == nf:
830 if ignored == nf:
831 ui.write(_("%s is ignored\n") % f)
831 ui.write(_("%s is ignored\n") % f)
832 else:
832 else:
833 ui.write(_("%s is ignored because of "
833 ui.write(_("%s is ignored because of "
834 "containing folder %s\n")
834 "containing folder %s\n")
835 % (f, ignored))
835 % (f, ignored))
836 ignorefile, lineno, line = ignoredata
836 ignorefile, lineno, line = ignoredata
837 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
837 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
838 % (ignorefile, lineno, line))
838 % (ignorefile, lineno, line))
839 else:
839 else:
840 ui.write(_("%s is not ignored\n") % f)
840 ui.write(_("%s is not ignored\n") % f)
841
841
842 @command('debugindex', cmdutil.debugrevlogopts +
842 @command('debugindex', cmdutil.debugrevlogopts +
843 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
843 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
844 _('[-f FORMAT] -c|-m|FILE'),
844 _('[-f FORMAT] -c|-m|FILE'),
845 optionalrepo=True)
845 optionalrepo=True)
846 def debugindex(ui, repo, file_=None, **opts):
846 def debugindex(ui, repo, file_=None, **opts):
847 """dump the contents of an index file"""
847 """dump the contents of an index file"""
848 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
848 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
849 format = opts.get('format', 0)
849 format = opts.get('format', 0)
850 if format not in (0, 1):
850 if format not in (0, 1):
851 raise error.Abort(_("unknown format %d") % format)
851 raise error.Abort(_("unknown format %d") % format)
852
852
853 generaldelta = r.version & revlog.FLAG_GENERALDELTA
853 generaldelta = r.version & revlog.FLAG_GENERALDELTA
854 if generaldelta:
854 if generaldelta:
855 basehdr = ' delta'
855 basehdr = ' delta'
856 else:
856 else:
857 basehdr = ' base'
857 basehdr = ' base'
858
858
859 if ui.debugflag:
859 if ui.debugflag:
860 shortfn = hex
860 shortfn = hex
861 else:
861 else:
862 shortfn = short
862 shortfn = short
863
863
864 # There might not be anything in r, so have a sane default
864 # There might not be anything in r, so have a sane default
865 idlen = 12
865 idlen = 12
866 for i in r:
866 for i in r:
867 idlen = len(shortfn(r.node(i)))
867 idlen = len(shortfn(r.node(i)))
868 break
868 break
869
869
870 if format == 0:
870 if format == 0:
871 ui.write((" rev offset length " + basehdr + " linkrev"
871 ui.write((" rev offset length " + basehdr + " linkrev"
872 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
872 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
873 elif format == 1:
873 elif format == 1:
874 ui.write((" rev flag offset length"
874 ui.write((" rev flag offset length"
875 " size " + basehdr + " link p1 p2"
875 " size " + basehdr + " link p1 p2"
876 " %s\n") % "nodeid".rjust(idlen))
876 " %s\n") % "nodeid".rjust(idlen))
877
877
878 for i in r:
878 for i in r:
879 node = r.node(i)
879 node = r.node(i)
880 if generaldelta:
880 if generaldelta:
881 base = r.deltaparent(i)
881 base = r.deltaparent(i)
882 else:
882 else:
883 base = r.chainbase(i)
883 base = r.chainbase(i)
884 if format == 0:
884 if format == 0:
885 try:
885 try:
886 pp = r.parents(node)
886 pp = r.parents(node)
887 except Exception:
887 except Exception:
888 pp = [nullid, nullid]
888 pp = [nullid, nullid]
889 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
889 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
890 i, r.start(i), r.length(i), base, r.linkrev(i),
890 i, r.start(i), r.length(i), base, r.linkrev(i),
891 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
891 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
892 elif format == 1:
892 elif format == 1:
893 pr = r.parentrevs(i)
893 pr = r.parentrevs(i)
894 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
894 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
895 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
895 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
896 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
896 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
897
897
898 @command('debugindexdot', cmdutil.debugrevlogopts,
898 @command('debugindexdot', cmdutil.debugrevlogopts,
899 _('-c|-m|FILE'), optionalrepo=True)
899 _('-c|-m|FILE'), optionalrepo=True)
900 def debugindexdot(ui, repo, file_=None, **opts):
900 def debugindexdot(ui, repo, file_=None, **opts):
901 """dump an index DAG as a graphviz dot file"""
901 """dump an index DAG as a graphviz dot file"""
902 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
902 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
903 ui.write(("digraph G {\n"))
903 ui.write(("digraph G {\n"))
904 for i in r:
904 for i in r:
905 node = r.node(i)
905 node = r.node(i)
906 pp = r.parents(node)
906 pp = r.parents(node)
907 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
907 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
908 if pp[1] != nullid:
908 if pp[1] != nullid:
909 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
909 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
910 ui.write("}\n")
910 ui.write("}\n")
911
911
912 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
912 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
913 def debuginstall(ui, **opts):
913 def debuginstall(ui, **opts):
914 '''test Mercurial installation
914 '''test Mercurial installation
915
915
916 Returns 0 on success.
916 Returns 0 on success.
917 '''
917 '''
918
918
919 def writetemp(contents):
919 def writetemp(contents):
920 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
920 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
921 f = os.fdopen(fd, pycompat.sysstr("wb"))
921 f = os.fdopen(fd, pycompat.sysstr("wb"))
922 f.write(contents)
922 f.write(contents)
923 f.close()
923 f.close()
924 return name
924 return name
925
925
926 problems = 0
926 problems = 0
927
927
928 fm = ui.formatter('debuginstall', opts)
928 fm = ui.formatter('debuginstall', opts)
929 fm.startitem()
929 fm.startitem()
930
930
931 # encoding
931 # encoding
932 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
932 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
933 err = None
933 err = None
934 try:
934 try:
935 encoding.fromlocal("test")
935 encoding.fromlocal("test")
936 except error.Abort as inst:
936 except error.Abort as inst:
937 err = inst
937 err = inst
938 problems += 1
938 problems += 1
939 fm.condwrite(err, 'encodingerror', _(" %s\n"
939 fm.condwrite(err, 'encodingerror', _(" %s\n"
940 " (check that your locale is properly set)\n"), err)
940 " (check that your locale is properly set)\n"), err)
941
941
942 # Python
942 # Python
943 fm.write('pythonexe', _("checking Python executable (%s)\n"),
943 fm.write('pythonexe', _("checking Python executable (%s)\n"),
944 pycompat.sysexecutable)
944 pycompat.sysexecutable)
945 fm.write('pythonver', _("checking Python version (%s)\n"),
945 fm.write('pythonver', _("checking Python version (%s)\n"),
946 ("%d.%d.%d" % sys.version_info[:3]))
946 ("%d.%d.%d" % sys.version_info[:3]))
947 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
947 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
948 os.path.dirname(pycompat.fsencode(os.__file__)))
948 os.path.dirname(pycompat.fsencode(os.__file__)))
949
949
950 security = set(sslutil.supportedprotocols)
950 security = set(sslutil.supportedprotocols)
951 if sslutil.hassni:
951 if sslutil.hassni:
952 security.add('sni')
952 security.add('sni')
953
953
954 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
954 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
955 fm.formatlist(sorted(security), name='protocol',
955 fm.formatlist(sorted(security), name='protocol',
956 fmt='%s', sep=','))
956 fmt='%s', sep=','))
957
957
958 # These are warnings, not errors. So don't increment problem count. This
958 # These are warnings, not errors. So don't increment problem count. This
959 # may change in the future.
959 # may change in the future.
960 if 'tls1.2' not in security:
960 if 'tls1.2' not in security:
961 fm.plain(_(' TLS 1.2 not supported by Python install; '
961 fm.plain(_(' TLS 1.2 not supported by Python install; '
962 'network connections lack modern security\n'))
962 'network connections lack modern security\n'))
963 if 'sni' not in security:
963 if 'sni' not in security:
964 fm.plain(_(' SNI not supported by Python install; may have '
964 fm.plain(_(' SNI not supported by Python install; may have '
965 'connectivity issues with some servers\n'))
965 'connectivity issues with some servers\n'))
966
966
967 # TODO print CA cert info
967 # TODO print CA cert info
968
968
969 # hg version
969 # hg version
970 hgver = util.version()
970 hgver = util.version()
971 fm.write('hgver', _("checking Mercurial version (%s)\n"),
971 fm.write('hgver', _("checking Mercurial version (%s)\n"),
972 hgver.split('+')[0])
972 hgver.split('+')[0])
973 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
973 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
974 '+'.join(hgver.split('+')[1:]))
974 '+'.join(hgver.split('+')[1:]))
975
975
976 # compiled modules
976 # compiled modules
977 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
977 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
978 policy.policy)
978 policy.policy)
979 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
979 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
980 os.path.dirname(pycompat.fsencode(__file__)))
980 os.path.dirname(pycompat.fsencode(__file__)))
981
981
982 if policy.policy in ('c', 'allow'):
982 if policy.policy in ('c', 'allow'):
983 err = None
983 err = None
984 try:
984 try:
985 from .cext import (
985 from .cext import (
986 base85,
986 base85,
987 bdiff,
987 bdiff,
988 mpatch,
988 mpatch,
989 osutil,
989 osutil,
990 )
990 )
991 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
991 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
992 except Exception as inst:
992 except Exception as inst:
993 err = inst
993 err = inst
994 problems += 1
994 problems += 1
995 fm.condwrite(err, 'extensionserror', " %s\n", err)
995 fm.condwrite(err, 'extensionserror', " %s\n", err)
996
996
997 compengines = util.compengines._engines.values()
997 compengines = util.compengines._engines.values()
998 fm.write('compengines', _('checking registered compression engines (%s)\n'),
998 fm.write('compengines', _('checking registered compression engines (%s)\n'),
999 fm.formatlist(sorted(e.name() for e in compengines),
999 fm.formatlist(sorted(e.name() for e in compengines),
1000 name='compengine', fmt='%s', sep=', '))
1000 name='compengine', fmt='%s', sep=', '))
1001 fm.write('compenginesavail', _('checking available compression engines '
1001 fm.write('compenginesavail', _('checking available compression engines '
1002 '(%s)\n'),
1002 '(%s)\n'),
1003 fm.formatlist(sorted(e.name() for e in compengines
1003 fm.formatlist(sorted(e.name() for e in compengines
1004 if e.available()),
1004 if e.available()),
1005 name='compengine', fmt='%s', sep=', '))
1005 name='compengine', fmt='%s', sep=', '))
1006 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1006 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1007 fm.write('compenginesserver', _('checking available compression engines '
1007 fm.write('compenginesserver', _('checking available compression engines '
1008 'for wire protocol (%s)\n'),
1008 'for wire protocol (%s)\n'),
1009 fm.formatlist([e.name() for e in wirecompengines
1009 fm.formatlist([e.name() for e in wirecompengines
1010 if e.wireprotosupport()],
1010 if e.wireprotosupport()],
1011 name='compengine', fmt='%s', sep=', '))
1011 name='compengine', fmt='%s', sep=', '))
1012
1012
1013 # templates
1013 # templates
1014 p = templater.templatepaths()
1014 p = templater.templatepaths()
1015 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1015 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1016 fm.condwrite(not p, '', _(" no template directories found\n"))
1016 fm.condwrite(not p, '', _(" no template directories found\n"))
1017 if p:
1017 if p:
1018 m = templater.templatepath("map-cmdline.default")
1018 m = templater.templatepath("map-cmdline.default")
1019 if m:
1019 if m:
1020 # template found, check if it is working
1020 # template found, check if it is working
1021 err = None
1021 err = None
1022 try:
1022 try:
1023 templater.templater.frommapfile(m)
1023 templater.templater.frommapfile(m)
1024 except Exception as inst:
1024 except Exception as inst:
1025 err = inst
1025 err = inst
1026 p = None
1026 p = None
1027 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1027 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1028 else:
1028 else:
1029 p = None
1029 p = None
1030 fm.condwrite(p, 'defaulttemplate',
1030 fm.condwrite(p, 'defaulttemplate',
1031 _("checking default template (%s)\n"), m)
1031 _("checking default template (%s)\n"), m)
1032 fm.condwrite(not m, 'defaulttemplatenotfound',
1032 fm.condwrite(not m, 'defaulttemplatenotfound',
1033 _(" template '%s' not found\n"), "default")
1033 _(" template '%s' not found\n"), "default")
1034 if not p:
1034 if not p:
1035 problems += 1
1035 problems += 1
1036 fm.condwrite(not p, '',
1036 fm.condwrite(not p, '',
1037 _(" (templates seem to have been installed incorrectly)\n"))
1037 _(" (templates seem to have been installed incorrectly)\n"))
1038
1038
1039 # editor
1039 # editor
1040 editor = ui.geteditor()
1040 editor = ui.geteditor()
1041 editor = util.expandpath(editor)
1041 editor = util.expandpath(editor)
1042 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1042 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1043 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1043 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1044 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1044 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1045 _(" No commit editor set and can't find %s in PATH\n"
1045 _(" No commit editor set and can't find %s in PATH\n"
1046 " (specify a commit editor in your configuration"
1046 " (specify a commit editor in your configuration"
1047 " file)\n"), not cmdpath and editor == 'vi' and editor)
1047 " file)\n"), not cmdpath and editor == 'vi' and editor)
1048 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1048 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1049 _(" Can't find editor '%s' in PATH\n"
1049 _(" Can't find editor '%s' in PATH\n"
1050 " (specify a commit editor in your configuration"
1050 " (specify a commit editor in your configuration"
1051 " file)\n"), not cmdpath and editor)
1051 " file)\n"), not cmdpath and editor)
1052 if not cmdpath and editor != 'vi':
1052 if not cmdpath and editor != 'vi':
1053 problems += 1
1053 problems += 1
1054
1054
1055 # check username
1055 # check username
1056 username = None
1056 username = None
1057 err = None
1057 err = None
1058 try:
1058 try:
1059 username = ui.username()
1059 username = ui.username()
1060 except error.Abort as e:
1060 except error.Abort as e:
1061 err = e
1061 err = e
1062 problems += 1
1062 problems += 1
1063
1063
1064 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1064 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1065 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1065 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1066 " (specify a username in your configuration file)\n"), err)
1066 " (specify a username in your configuration file)\n"), err)
1067
1067
1068 fm.condwrite(not problems, '',
1068 fm.condwrite(not problems, '',
1069 _("no problems detected\n"))
1069 _("no problems detected\n"))
1070 if not problems:
1070 if not problems:
1071 fm.data(problems=problems)
1071 fm.data(problems=problems)
1072 fm.condwrite(problems, 'problems',
1072 fm.condwrite(problems, 'problems',
1073 _("%d problems detected,"
1073 _("%d problems detected,"
1074 " please check your install!\n"), problems)
1074 " please check your install!\n"), problems)
1075 fm.end()
1075 fm.end()
1076
1076
1077 return problems
1077 return problems
1078
1078
1079 @command('debugknown', [], _('REPO ID...'), norepo=True)
1079 @command('debugknown', [], _('REPO ID...'), norepo=True)
1080 def debugknown(ui, repopath, *ids, **opts):
1080 def debugknown(ui, repopath, *ids, **opts):
1081 """test whether node ids are known to a repo
1081 """test whether node ids are known to a repo
1082
1082
1083 Every ID must be a full-length hex node id string. Returns a list of 0s
1083 Every ID must be a full-length hex node id string. Returns a list of 0s
1084 and 1s indicating unknown/known.
1084 and 1s indicating unknown/known.
1085 """
1085 """
1086 repo = hg.peer(ui, opts, repopath)
1086 repo = hg.peer(ui, opts, repopath)
1087 if not repo.capable('known'):
1087 if not repo.capable('known'):
1088 raise error.Abort("known() not supported by target repository")
1088 raise error.Abort("known() not supported by target repository")
1089 flags = repo.known([bin(s) for s in ids])
1089 flags = repo.known([bin(s) for s in ids])
1090 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1090 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1091
1091
1092 @command('debuglabelcomplete', [], _('LABEL...'))
1092 @command('debuglabelcomplete', [], _('LABEL...'))
1093 def debuglabelcomplete(ui, repo, *args):
1093 def debuglabelcomplete(ui, repo, *args):
1094 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1094 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1095 debugnamecomplete(ui, repo, *args)
1095 debugnamecomplete(ui, repo, *args)
1096
1096
1097 @command('debuglocks',
1097 @command('debuglocks',
1098 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1098 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1099 ('W', 'force-wlock', None,
1099 ('W', 'force-wlock', None,
1100 _('free the working state lock (DANGEROUS)'))],
1100 _('free the working state lock (DANGEROUS)'))],
1101 _('[OPTION]...'))
1101 _('[OPTION]...'))
1102 def debuglocks(ui, repo, **opts):
1102 def debuglocks(ui, repo, **opts):
1103 """show or modify state of locks
1103 """show or modify state of locks
1104
1104
1105 By default, this command will show which locks are held. This
1105 By default, this command will show which locks are held. This
1106 includes the user and process holding the lock, the amount of time
1106 includes the user and process holding the lock, the amount of time
1107 the lock has been held, and the machine name where the process is
1107 the lock has been held, and the machine name where the process is
1108 running if it's not local.
1108 running if it's not local.
1109
1109
1110 Locks protect the integrity of Mercurial's data, so should be
1110 Locks protect the integrity of Mercurial's data, so should be
1111 treated with care. System crashes or other interruptions may cause
1111 treated with care. System crashes or other interruptions may cause
1112 locks to not be properly released, though Mercurial will usually
1112 locks to not be properly released, though Mercurial will usually
1113 detect and remove such stale locks automatically.
1113 detect and remove such stale locks automatically.
1114
1114
1115 However, detecting stale locks may not always be possible (for
1115 However, detecting stale locks may not always be possible (for
1116 instance, on a shared filesystem). Removing locks may also be
1116 instance, on a shared filesystem). Removing locks may also be
1117 blocked by filesystem permissions.
1117 blocked by filesystem permissions.
1118
1118
1119 Returns 0 if no locks are held.
1119 Returns 0 if no locks are held.
1120
1120
1121 """
1121 """
1122
1122
1123 if opts.get('force_lock'):
1123 if opts.get('force_lock'):
1124 repo.svfs.unlink('lock')
1124 repo.svfs.unlink('lock')
1125 if opts.get('force_wlock'):
1125 if opts.get('force_wlock'):
1126 repo.vfs.unlink('wlock')
1126 repo.vfs.unlink('wlock')
1127 if opts.get('force_lock') or opts.get('force_lock'):
1127 if opts.get('force_lock') or opts.get('force_lock'):
1128 return 0
1128 return 0
1129
1129
1130 now = time.time()
1130 now = time.time()
1131 held = 0
1131 held = 0
1132
1132
1133 def report(vfs, name, method):
1133 def report(vfs, name, method):
1134 # this causes stale locks to get reaped for more accurate reporting
1134 # this causes stale locks to get reaped for more accurate reporting
1135 try:
1135 try:
1136 l = method(False)
1136 l = method(False)
1137 except error.LockHeld:
1137 except error.LockHeld:
1138 l = None
1138 l = None
1139
1139
1140 if l:
1140 if l:
1141 l.release()
1141 l.release()
1142 else:
1142 else:
1143 try:
1143 try:
1144 stat = vfs.lstat(name)
1144 stat = vfs.lstat(name)
1145 age = now - stat.st_mtime
1145 age = now - stat.st_mtime
1146 user = util.username(stat.st_uid)
1146 user = util.username(stat.st_uid)
1147 locker = vfs.readlock(name)
1147 locker = vfs.readlock(name)
1148 if ":" in locker:
1148 if ":" in locker:
1149 host, pid = locker.split(':')
1149 host, pid = locker.split(':')
1150 if host == socket.gethostname():
1150 if host == socket.gethostname():
1151 locker = 'user %s, process %s' % (user, pid)
1151 locker = 'user %s, process %s' % (user, pid)
1152 else:
1152 else:
1153 locker = 'user %s, process %s, host %s' \
1153 locker = 'user %s, process %s, host %s' \
1154 % (user, pid, host)
1154 % (user, pid, host)
1155 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1155 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1156 return 1
1156 return 1
1157 except OSError as e:
1157 except OSError as e:
1158 if e.errno != errno.ENOENT:
1158 if e.errno != errno.ENOENT:
1159 raise
1159 raise
1160
1160
1161 ui.write(("%-6s free\n") % (name + ":"))
1161 ui.write(("%-6s free\n") % (name + ":"))
1162 return 0
1162 return 0
1163
1163
1164 held += report(repo.svfs, "lock", repo.lock)
1164 held += report(repo.svfs, "lock", repo.lock)
1165 held += report(repo.vfs, "wlock", repo.wlock)
1165 held += report(repo.vfs, "wlock", repo.wlock)
1166
1166
1167 return held
1167 return held
1168
1168
1169 @command('debugmergestate', [], '')
1169 @command('debugmergestate', [], '')
1170 def debugmergestate(ui, repo, *args):
1170 def debugmergestate(ui, repo, *args):
1171 """print merge state
1171 """print merge state
1172
1172
1173 Use --verbose to print out information about whether v1 or v2 merge state
1173 Use --verbose to print out information about whether v1 or v2 merge state
1174 was chosen."""
1174 was chosen."""
1175 def _hashornull(h):
1175 def _hashornull(h):
1176 if h == nullhex:
1176 if h == nullhex:
1177 return 'null'
1177 return 'null'
1178 else:
1178 else:
1179 return h
1179 return h
1180
1180
1181 def printrecords(version):
1181 def printrecords(version):
1182 ui.write(('* version %s records\n') % version)
1182 ui.write(('* version %s records\n') % version)
1183 if version == 1:
1183 if version == 1:
1184 records = v1records
1184 records = v1records
1185 else:
1185 else:
1186 records = v2records
1186 records = v2records
1187
1187
1188 for rtype, record in records:
1188 for rtype, record in records:
1189 # pretty print some record types
1189 # pretty print some record types
1190 if rtype == 'L':
1190 if rtype == 'L':
1191 ui.write(('local: %s\n') % record)
1191 ui.write(('local: %s\n') % record)
1192 elif rtype == 'O':
1192 elif rtype == 'O':
1193 ui.write(('other: %s\n') % record)
1193 ui.write(('other: %s\n') % record)
1194 elif rtype == 'm':
1194 elif rtype == 'm':
1195 driver, mdstate = record.split('\0', 1)
1195 driver, mdstate = record.split('\0', 1)
1196 ui.write(('merge driver: %s (state "%s")\n')
1196 ui.write(('merge driver: %s (state "%s")\n')
1197 % (driver, mdstate))
1197 % (driver, mdstate))
1198 elif rtype in 'FDC':
1198 elif rtype in 'FDC':
1199 r = record.split('\0')
1199 r = record.split('\0')
1200 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1200 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1201 if version == 1:
1201 if version == 1:
1202 onode = 'not stored in v1 format'
1202 onode = 'not stored in v1 format'
1203 flags = r[7]
1203 flags = r[7]
1204 else:
1204 else:
1205 onode, flags = r[7:9]
1205 onode, flags = r[7:9]
1206 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1206 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1207 % (f, rtype, state, _hashornull(hash)))
1207 % (f, rtype, state, _hashornull(hash)))
1208 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1208 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1209 ui.write((' ancestor path: %s (node %s)\n')
1209 ui.write((' ancestor path: %s (node %s)\n')
1210 % (afile, _hashornull(anode)))
1210 % (afile, _hashornull(anode)))
1211 ui.write((' other path: %s (node %s)\n')
1211 ui.write((' other path: %s (node %s)\n')
1212 % (ofile, _hashornull(onode)))
1212 % (ofile, _hashornull(onode)))
1213 elif rtype == 'f':
1213 elif rtype == 'f':
1214 filename, rawextras = record.split('\0', 1)
1214 filename, rawextras = record.split('\0', 1)
1215 extras = rawextras.split('\0')
1215 extras = rawextras.split('\0')
1216 i = 0
1216 i = 0
1217 extrastrings = []
1217 extrastrings = []
1218 while i < len(extras):
1218 while i < len(extras):
1219 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1219 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1220 i += 2
1220 i += 2
1221
1221
1222 ui.write(('file extras: %s (%s)\n')
1222 ui.write(('file extras: %s (%s)\n')
1223 % (filename, ', '.join(extrastrings)))
1223 % (filename, ', '.join(extrastrings)))
1224 elif rtype == 'l':
1224 elif rtype == 'l':
1225 labels = record.split('\0', 2)
1225 labels = record.split('\0', 2)
1226 labels = [l for l in labels if len(l) > 0]
1226 labels = [l for l in labels if len(l) > 0]
1227 ui.write(('labels:\n'))
1227 ui.write(('labels:\n'))
1228 ui.write((' local: %s\n' % labels[0]))
1228 ui.write((' local: %s\n' % labels[0]))
1229 ui.write((' other: %s\n' % labels[1]))
1229 ui.write((' other: %s\n' % labels[1]))
1230 if len(labels) > 2:
1230 if len(labels) > 2:
1231 ui.write((' base: %s\n' % labels[2]))
1231 ui.write((' base: %s\n' % labels[2]))
1232 else:
1232 else:
1233 ui.write(('unrecognized entry: %s\t%s\n')
1233 ui.write(('unrecognized entry: %s\t%s\n')
1234 % (rtype, record.replace('\0', '\t')))
1234 % (rtype, record.replace('\0', '\t')))
1235
1235
1236 # Avoid mergestate.read() since it may raise an exception for unsupported
1236 # Avoid mergestate.read() since it may raise an exception for unsupported
1237 # merge state records. We shouldn't be doing this, but this is OK since this
1237 # merge state records. We shouldn't be doing this, but this is OK since this
1238 # command is pretty low-level.
1238 # command is pretty low-level.
1239 ms = mergemod.mergestate(repo)
1239 ms = mergemod.mergestate(repo)
1240
1240
1241 # sort so that reasonable information is on top
1241 # sort so that reasonable information is on top
1242 v1records = ms._readrecordsv1()
1242 v1records = ms._readrecordsv1()
1243 v2records = ms._readrecordsv2()
1243 v2records = ms._readrecordsv2()
1244 order = 'LOml'
1244 order = 'LOml'
1245 def key(r):
1245 def key(r):
1246 idx = order.find(r[0])
1246 idx = order.find(r[0])
1247 if idx == -1:
1247 if idx == -1:
1248 return (1, r[1])
1248 return (1, r[1])
1249 else:
1249 else:
1250 return (0, idx)
1250 return (0, idx)
1251 v1records.sort(key=key)
1251 v1records.sort(key=key)
1252 v2records.sort(key=key)
1252 v2records.sort(key=key)
1253
1253
1254 if not v1records and not v2records:
1254 if not v1records and not v2records:
1255 ui.write(('no merge state found\n'))
1255 ui.write(('no merge state found\n'))
1256 elif not v2records:
1256 elif not v2records:
1257 ui.note(('no version 2 merge state\n'))
1257 ui.note(('no version 2 merge state\n'))
1258 printrecords(1)
1258 printrecords(1)
1259 elif ms._v1v2match(v1records, v2records):
1259 elif ms._v1v2match(v1records, v2records):
1260 ui.note(('v1 and v2 states match: using v2\n'))
1260 ui.note(('v1 and v2 states match: using v2\n'))
1261 printrecords(2)
1261 printrecords(2)
1262 else:
1262 else:
1263 ui.note(('v1 and v2 states mismatch: using v1\n'))
1263 ui.note(('v1 and v2 states mismatch: using v1\n'))
1264 printrecords(1)
1264 printrecords(1)
1265 if ui.verbose:
1265 if ui.verbose:
1266 printrecords(2)
1266 printrecords(2)
1267
1267
1268 @command('debugnamecomplete', [], _('NAME...'))
1268 @command('debugnamecomplete', [], _('NAME...'))
1269 def debugnamecomplete(ui, repo, *args):
1269 def debugnamecomplete(ui, repo, *args):
1270 '''complete "names" - tags, open branch names, bookmark names'''
1270 '''complete "names" - tags, open branch names, bookmark names'''
1271
1271
1272 names = set()
1272 names = set()
1273 # since we previously only listed open branches, we will handle that
1273 # since we previously only listed open branches, we will handle that
1274 # specially (after this for loop)
1274 # specially (after this for loop)
1275 for name, ns in repo.names.iteritems():
1275 for name, ns in repo.names.iteritems():
1276 if name != 'branches':
1276 if name != 'branches':
1277 names.update(ns.listnames(repo))
1277 names.update(ns.listnames(repo))
1278 names.update(tag for (tag, heads, tip, closed)
1278 names.update(tag for (tag, heads, tip, closed)
1279 in repo.branchmap().iterbranches() if not closed)
1279 in repo.branchmap().iterbranches() if not closed)
1280 completions = set()
1280 completions = set()
1281 if not args:
1281 if not args:
1282 args = ['']
1282 args = ['']
1283 for a in args:
1283 for a in args:
1284 completions.update(n for n in names if n.startswith(a))
1284 completions.update(n for n in names if n.startswith(a))
1285 ui.write('\n'.join(sorted(completions)))
1285 ui.write('\n'.join(sorted(completions)))
1286 ui.write('\n')
1286 ui.write('\n')
1287
1287
1288 @command('debugobsolete',
1288 @command('debugobsolete',
1289 [('', 'flags', 0, _('markers flag')),
1289 [('', 'flags', 0, _('markers flag')),
1290 ('', 'record-parents', False,
1290 ('', 'record-parents', False,
1291 _('record parent information for the precursor')),
1291 _('record parent information for the precursor')),
1292 ('r', 'rev', [], _('display markers relevant to REV')),
1292 ('r', 'rev', [], _('display markers relevant to REV')),
1293 ('', 'index', False, _('display index of the marker')),
1293 ('', 'index', False, _('display index of the marker')),
1294 ('', 'delete', [], _('delete markers specified by indices')),
1294 ('', 'delete', [], _('delete markers specified by indices')),
1295 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1295 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1296 _('[OBSOLETED [REPLACEMENT ...]]'))
1296 _('[OBSOLETED [REPLACEMENT ...]]'))
1297 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1297 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1298 """create arbitrary obsolete marker
1298 """create arbitrary obsolete marker
1299
1299
1300 With no arguments, displays the list of obsolescence markers."""
1300 With no arguments, displays the list of obsolescence markers."""
1301
1301
1302 def parsenodeid(s):
1302 def parsenodeid(s):
1303 try:
1303 try:
1304 # We do not use revsingle/revrange functions here to accept
1304 # We do not use revsingle/revrange functions here to accept
1305 # arbitrary node identifiers, possibly not present in the
1305 # arbitrary node identifiers, possibly not present in the
1306 # local repository.
1306 # local repository.
1307 n = bin(s)
1307 n = bin(s)
1308 if len(n) != len(nullid):
1308 if len(n) != len(nullid):
1309 raise TypeError()
1309 raise TypeError()
1310 return n
1310 return n
1311 except TypeError:
1311 except TypeError:
1312 raise error.Abort('changeset references must be full hexadecimal '
1312 raise error.Abort('changeset references must be full hexadecimal '
1313 'node identifiers')
1313 'node identifiers')
1314
1314
1315 if opts.get('delete'):
1315 if opts.get('delete'):
1316 indices = []
1316 indices = []
1317 for v in opts.get('delete'):
1317 for v in opts.get('delete'):
1318 try:
1318 try:
1319 indices.append(int(v))
1319 indices.append(int(v))
1320 except ValueError:
1320 except ValueError:
1321 raise error.Abort(_('invalid index value: %r') % v,
1321 raise error.Abort(_('invalid index value: %r') % v,
1322 hint=_('use integers for indices'))
1322 hint=_('use integers for indices'))
1323
1323
1324 if repo.currenttransaction():
1324 if repo.currenttransaction():
1325 raise error.Abort(_('cannot delete obsmarkers in the middle '
1325 raise error.Abort(_('cannot delete obsmarkers in the middle '
1326 'of transaction.'))
1326 'of transaction.'))
1327
1327
1328 with repo.lock():
1328 with repo.lock():
1329 n = repair.deleteobsmarkers(repo.obsstore, indices)
1329 n = repair.deleteobsmarkers(repo.obsstore, indices)
1330 ui.write(_('deleted %i obsolescence markers\n') % n)
1330 ui.write(_('deleted %i obsolescence markers\n') % n)
1331
1331
1332 return
1332 return
1333
1333
1334 if precursor is not None:
1334 if precursor is not None:
1335 if opts['rev']:
1335 if opts['rev']:
1336 raise error.Abort('cannot select revision when creating marker')
1336 raise error.Abort('cannot select revision when creating marker')
1337 metadata = {}
1337 metadata = {}
1338 metadata['user'] = opts['user'] or ui.username()
1338 metadata['user'] = opts['user'] or ui.username()
1339 succs = tuple(parsenodeid(succ) for succ in successors)
1339 succs = tuple(parsenodeid(succ) for succ in successors)
1340 l = repo.lock()
1340 l = repo.lock()
1341 try:
1341 try:
1342 tr = repo.transaction('debugobsolete')
1342 tr = repo.transaction('debugobsolete')
1343 try:
1343 try:
1344 date = opts.get('date')
1344 date = opts.get('date')
1345 if date:
1345 if date:
1346 date = util.parsedate(date)
1346 date = util.parsedate(date)
1347 else:
1347 else:
1348 date = None
1348 date = None
1349 prec = parsenodeid(precursor)
1349 prec = parsenodeid(precursor)
1350 parents = None
1350 parents = None
1351 if opts['record_parents']:
1351 if opts['record_parents']:
1352 if prec not in repo.unfiltered():
1352 if prec not in repo.unfiltered():
1353 raise error.Abort('cannot used --record-parents on '
1353 raise error.Abort('cannot used --record-parents on '
1354 'unknown changesets')
1354 'unknown changesets')
1355 parents = repo.unfiltered()[prec].parents()
1355 parents = repo.unfiltered()[prec].parents()
1356 parents = tuple(p.node() for p in parents)
1356 parents = tuple(p.node() for p in parents)
1357 repo.obsstore.create(tr, prec, succs, opts['flags'],
1357 repo.obsstore.create(tr, prec, succs, opts['flags'],
1358 parents=parents, date=date,
1358 parents=parents, date=date,
1359 metadata=metadata)
1359 metadata=metadata, ui=ui)
1360 tr.close()
1360 tr.close()
1361 except ValueError as exc:
1361 except ValueError as exc:
1362 raise error.Abort(_('bad obsmarker input: %s') % exc)
1362 raise error.Abort(_('bad obsmarker input: %s') % exc)
1363 finally:
1363 finally:
1364 tr.release()
1364 tr.release()
1365 finally:
1365 finally:
1366 l.release()
1366 l.release()
1367 else:
1367 else:
1368 if opts['rev']:
1368 if opts['rev']:
1369 revs = scmutil.revrange(repo, opts['rev'])
1369 revs = scmutil.revrange(repo, opts['rev'])
1370 nodes = [repo[r].node() for r in revs]
1370 nodes = [repo[r].node() for r in revs]
1371 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1371 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1372 markers.sort(key=lambda x: x._data)
1372 markers.sort(key=lambda x: x._data)
1373 else:
1373 else:
1374 markers = obsolete.getmarkers(repo)
1374 markers = obsolete.getmarkers(repo)
1375
1375
1376 markerstoiter = markers
1376 markerstoiter = markers
1377 isrelevant = lambda m: True
1377 isrelevant = lambda m: True
1378 if opts.get('rev') and opts.get('index'):
1378 if opts.get('rev') and opts.get('index'):
1379 markerstoiter = obsolete.getmarkers(repo)
1379 markerstoiter = obsolete.getmarkers(repo)
1380 markerset = set(markers)
1380 markerset = set(markers)
1381 isrelevant = lambda m: m in markerset
1381 isrelevant = lambda m: m in markerset
1382
1382
1383 fm = ui.formatter('debugobsolete', opts)
1383 fm = ui.formatter('debugobsolete', opts)
1384 for i, m in enumerate(markerstoiter):
1384 for i, m in enumerate(markerstoiter):
1385 if not isrelevant(m):
1385 if not isrelevant(m):
1386 # marker can be irrelevant when we're iterating over a set
1386 # marker can be irrelevant when we're iterating over a set
1387 # of markers (markerstoiter) which is bigger than the set
1387 # of markers (markerstoiter) which is bigger than the set
1388 # of markers we want to display (markers)
1388 # of markers we want to display (markers)
1389 # this can happen if both --index and --rev options are
1389 # this can happen if both --index and --rev options are
1390 # provided and thus we need to iterate over all of the markers
1390 # provided and thus we need to iterate over all of the markers
1391 # to get the correct indices, but only display the ones that
1391 # to get the correct indices, but only display the ones that
1392 # are relevant to --rev value
1392 # are relevant to --rev value
1393 continue
1393 continue
1394 fm.startitem()
1394 fm.startitem()
1395 ind = i if opts.get('index') else None
1395 ind = i if opts.get('index') else None
1396 cmdutil.showmarker(fm, m, index=ind)
1396 cmdutil.showmarker(fm, m, index=ind)
1397 fm.end()
1397 fm.end()
1398
1398
1399 @command('debugpathcomplete',
1399 @command('debugpathcomplete',
1400 [('f', 'full', None, _('complete an entire path')),
1400 [('f', 'full', None, _('complete an entire path')),
1401 ('n', 'normal', None, _('show only normal files')),
1401 ('n', 'normal', None, _('show only normal files')),
1402 ('a', 'added', None, _('show only added files')),
1402 ('a', 'added', None, _('show only added files')),
1403 ('r', 'removed', None, _('show only removed files'))],
1403 ('r', 'removed', None, _('show only removed files'))],
1404 _('FILESPEC...'))
1404 _('FILESPEC...'))
1405 def debugpathcomplete(ui, repo, *specs, **opts):
1405 def debugpathcomplete(ui, repo, *specs, **opts):
1406 '''complete part or all of a tracked path
1406 '''complete part or all of a tracked path
1407
1407
1408 This command supports shells that offer path name completion. It
1408 This command supports shells that offer path name completion. It
1409 currently completes only files already known to the dirstate.
1409 currently completes only files already known to the dirstate.
1410
1410
1411 Completion extends only to the next path segment unless
1411 Completion extends only to the next path segment unless
1412 --full is specified, in which case entire paths are used.'''
1412 --full is specified, in which case entire paths are used.'''
1413
1413
1414 def complete(path, acceptable):
1414 def complete(path, acceptable):
1415 dirstate = repo.dirstate
1415 dirstate = repo.dirstate
1416 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1416 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1417 rootdir = repo.root + pycompat.ossep
1417 rootdir = repo.root + pycompat.ossep
1418 if spec != repo.root and not spec.startswith(rootdir):
1418 if spec != repo.root and not spec.startswith(rootdir):
1419 return [], []
1419 return [], []
1420 if os.path.isdir(spec):
1420 if os.path.isdir(spec):
1421 spec += '/'
1421 spec += '/'
1422 spec = spec[len(rootdir):]
1422 spec = spec[len(rootdir):]
1423 fixpaths = pycompat.ossep != '/'
1423 fixpaths = pycompat.ossep != '/'
1424 if fixpaths:
1424 if fixpaths:
1425 spec = spec.replace(pycompat.ossep, '/')
1425 spec = spec.replace(pycompat.ossep, '/')
1426 speclen = len(spec)
1426 speclen = len(spec)
1427 fullpaths = opts['full']
1427 fullpaths = opts['full']
1428 files, dirs = set(), set()
1428 files, dirs = set(), set()
1429 adddir, addfile = dirs.add, files.add
1429 adddir, addfile = dirs.add, files.add
1430 for f, st in dirstate.iteritems():
1430 for f, st in dirstate.iteritems():
1431 if f.startswith(spec) and st[0] in acceptable:
1431 if f.startswith(spec) and st[0] in acceptable:
1432 if fixpaths:
1432 if fixpaths:
1433 f = f.replace('/', pycompat.ossep)
1433 f = f.replace('/', pycompat.ossep)
1434 if fullpaths:
1434 if fullpaths:
1435 addfile(f)
1435 addfile(f)
1436 continue
1436 continue
1437 s = f.find(pycompat.ossep, speclen)
1437 s = f.find(pycompat.ossep, speclen)
1438 if s >= 0:
1438 if s >= 0:
1439 adddir(f[:s])
1439 adddir(f[:s])
1440 else:
1440 else:
1441 addfile(f)
1441 addfile(f)
1442 return files, dirs
1442 return files, dirs
1443
1443
1444 acceptable = ''
1444 acceptable = ''
1445 if opts['normal']:
1445 if opts['normal']:
1446 acceptable += 'nm'
1446 acceptable += 'nm'
1447 if opts['added']:
1447 if opts['added']:
1448 acceptable += 'a'
1448 acceptable += 'a'
1449 if opts['removed']:
1449 if opts['removed']:
1450 acceptable += 'r'
1450 acceptable += 'r'
1451 cwd = repo.getcwd()
1451 cwd = repo.getcwd()
1452 if not specs:
1452 if not specs:
1453 specs = ['.']
1453 specs = ['.']
1454
1454
1455 files, dirs = set(), set()
1455 files, dirs = set(), set()
1456 for spec in specs:
1456 for spec in specs:
1457 f, d = complete(spec, acceptable or 'nmar')
1457 f, d = complete(spec, acceptable or 'nmar')
1458 files.update(f)
1458 files.update(f)
1459 dirs.update(d)
1459 dirs.update(d)
1460 files.update(dirs)
1460 files.update(dirs)
1461 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1461 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1462 ui.write('\n')
1462 ui.write('\n')
1463
1463
1464 @command('debugpickmergetool',
1464 @command('debugpickmergetool',
1465 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1465 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1466 ('', 'changedelete', None, _('emulate merging change and delete')),
1466 ('', 'changedelete', None, _('emulate merging change and delete')),
1467 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1467 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1468 _('[PATTERN]...'),
1468 _('[PATTERN]...'),
1469 inferrepo=True)
1469 inferrepo=True)
1470 def debugpickmergetool(ui, repo, *pats, **opts):
1470 def debugpickmergetool(ui, repo, *pats, **opts):
1471 """examine which merge tool is chosen for specified file
1471 """examine which merge tool is chosen for specified file
1472
1472
1473 As described in :hg:`help merge-tools`, Mercurial examines
1473 As described in :hg:`help merge-tools`, Mercurial examines
1474 configurations below in this order to decide which merge tool is
1474 configurations below in this order to decide which merge tool is
1475 chosen for specified file.
1475 chosen for specified file.
1476
1476
1477 1. ``--tool`` option
1477 1. ``--tool`` option
1478 2. ``HGMERGE`` environment variable
1478 2. ``HGMERGE`` environment variable
1479 3. configurations in ``merge-patterns`` section
1479 3. configurations in ``merge-patterns`` section
1480 4. configuration of ``ui.merge``
1480 4. configuration of ``ui.merge``
1481 5. configurations in ``merge-tools`` section
1481 5. configurations in ``merge-tools`` section
1482 6. ``hgmerge`` tool (for historical reason only)
1482 6. ``hgmerge`` tool (for historical reason only)
1483 7. default tool for fallback (``:merge`` or ``:prompt``)
1483 7. default tool for fallback (``:merge`` or ``:prompt``)
1484
1484
1485 This command writes out examination result in the style below::
1485 This command writes out examination result in the style below::
1486
1486
1487 FILE = MERGETOOL
1487 FILE = MERGETOOL
1488
1488
1489 By default, all files known in the first parent context of the
1489 By default, all files known in the first parent context of the
1490 working directory are examined. Use file patterns and/or -I/-X
1490 working directory are examined. Use file patterns and/or -I/-X
1491 options to limit target files. -r/--rev is also useful to examine
1491 options to limit target files. -r/--rev is also useful to examine
1492 files in another context without actual updating to it.
1492 files in another context without actual updating to it.
1493
1493
1494 With --debug, this command shows warning messages while matching
1494 With --debug, this command shows warning messages while matching
1495 against ``merge-patterns`` and so on, too. It is recommended to
1495 against ``merge-patterns`` and so on, too. It is recommended to
1496 use this option with explicit file patterns and/or -I/-X options,
1496 use this option with explicit file patterns and/or -I/-X options,
1497 because this option increases amount of output per file according
1497 because this option increases amount of output per file according
1498 to configurations in hgrc.
1498 to configurations in hgrc.
1499
1499
1500 With -v/--verbose, this command shows configurations below at
1500 With -v/--verbose, this command shows configurations below at
1501 first (only if specified).
1501 first (only if specified).
1502
1502
1503 - ``--tool`` option
1503 - ``--tool`` option
1504 - ``HGMERGE`` environment variable
1504 - ``HGMERGE`` environment variable
1505 - configuration of ``ui.merge``
1505 - configuration of ``ui.merge``
1506
1506
1507 If merge tool is chosen before matching against
1507 If merge tool is chosen before matching against
1508 ``merge-patterns``, this command can't show any helpful
1508 ``merge-patterns``, this command can't show any helpful
1509 information, even with --debug. In such case, information above is
1509 information, even with --debug. In such case, information above is
1510 useful to know why a merge tool is chosen.
1510 useful to know why a merge tool is chosen.
1511 """
1511 """
1512 overrides = {}
1512 overrides = {}
1513 if opts['tool']:
1513 if opts['tool']:
1514 overrides[('ui', 'forcemerge')] = opts['tool']
1514 overrides[('ui', 'forcemerge')] = opts['tool']
1515 ui.note(('with --tool %r\n') % (opts['tool']))
1515 ui.note(('with --tool %r\n') % (opts['tool']))
1516
1516
1517 with ui.configoverride(overrides, 'debugmergepatterns'):
1517 with ui.configoverride(overrides, 'debugmergepatterns'):
1518 hgmerge = encoding.environ.get("HGMERGE")
1518 hgmerge = encoding.environ.get("HGMERGE")
1519 if hgmerge is not None:
1519 if hgmerge is not None:
1520 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1520 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1521 uimerge = ui.config("ui", "merge")
1521 uimerge = ui.config("ui", "merge")
1522 if uimerge:
1522 if uimerge:
1523 ui.note(('with ui.merge=%r\n') % (uimerge))
1523 ui.note(('with ui.merge=%r\n') % (uimerge))
1524
1524
1525 ctx = scmutil.revsingle(repo, opts.get('rev'))
1525 ctx = scmutil.revsingle(repo, opts.get('rev'))
1526 m = scmutil.match(ctx, pats, opts)
1526 m = scmutil.match(ctx, pats, opts)
1527 changedelete = opts['changedelete']
1527 changedelete = opts['changedelete']
1528 for path in ctx.walk(m):
1528 for path in ctx.walk(m):
1529 fctx = ctx[path]
1529 fctx = ctx[path]
1530 try:
1530 try:
1531 if not ui.debugflag:
1531 if not ui.debugflag:
1532 ui.pushbuffer(error=True)
1532 ui.pushbuffer(error=True)
1533 tool, toolpath = filemerge._picktool(repo, ui, path,
1533 tool, toolpath = filemerge._picktool(repo, ui, path,
1534 fctx.isbinary(),
1534 fctx.isbinary(),
1535 'l' in fctx.flags(),
1535 'l' in fctx.flags(),
1536 changedelete)
1536 changedelete)
1537 finally:
1537 finally:
1538 if not ui.debugflag:
1538 if not ui.debugflag:
1539 ui.popbuffer()
1539 ui.popbuffer()
1540 ui.write(('%s = %s\n') % (path, tool))
1540 ui.write(('%s = %s\n') % (path, tool))
1541
1541
1542 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1542 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1543 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1543 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1544 '''access the pushkey key/value protocol
1544 '''access the pushkey key/value protocol
1545
1545
1546 With two args, list the keys in the given namespace.
1546 With two args, list the keys in the given namespace.
1547
1547
1548 With five args, set a key to new if it currently is set to old.
1548 With five args, set a key to new if it currently is set to old.
1549 Reports success or failure.
1549 Reports success or failure.
1550 '''
1550 '''
1551
1551
1552 target = hg.peer(ui, {}, repopath)
1552 target = hg.peer(ui, {}, repopath)
1553 if keyinfo:
1553 if keyinfo:
1554 key, old, new = keyinfo
1554 key, old, new = keyinfo
1555 r = target.pushkey(namespace, key, old, new)
1555 r = target.pushkey(namespace, key, old, new)
1556 ui.status(str(r) + '\n')
1556 ui.status(str(r) + '\n')
1557 return not r
1557 return not r
1558 else:
1558 else:
1559 for k, v in sorted(target.listkeys(namespace).iteritems()):
1559 for k, v in sorted(target.listkeys(namespace).iteritems()):
1560 ui.write("%s\t%s\n" % (util.escapestr(k),
1560 ui.write("%s\t%s\n" % (util.escapestr(k),
1561 util.escapestr(v)))
1561 util.escapestr(v)))
1562
1562
1563 @command('debugpvec', [], _('A B'))
1563 @command('debugpvec', [], _('A B'))
1564 def debugpvec(ui, repo, a, b=None):
1564 def debugpvec(ui, repo, a, b=None):
1565 ca = scmutil.revsingle(repo, a)
1565 ca = scmutil.revsingle(repo, a)
1566 cb = scmutil.revsingle(repo, b)
1566 cb = scmutil.revsingle(repo, b)
1567 pa = pvec.ctxpvec(ca)
1567 pa = pvec.ctxpvec(ca)
1568 pb = pvec.ctxpvec(cb)
1568 pb = pvec.ctxpvec(cb)
1569 if pa == pb:
1569 if pa == pb:
1570 rel = "="
1570 rel = "="
1571 elif pa > pb:
1571 elif pa > pb:
1572 rel = ">"
1572 rel = ">"
1573 elif pa < pb:
1573 elif pa < pb:
1574 rel = "<"
1574 rel = "<"
1575 elif pa | pb:
1575 elif pa | pb:
1576 rel = "|"
1576 rel = "|"
1577 ui.write(_("a: %s\n") % pa)
1577 ui.write(_("a: %s\n") % pa)
1578 ui.write(_("b: %s\n") % pb)
1578 ui.write(_("b: %s\n") % pb)
1579 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1579 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1580 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1580 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1581 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1581 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1582 pa.distance(pb), rel))
1582 pa.distance(pb), rel))
1583
1583
1584 @command('debugrebuilddirstate|debugrebuildstate',
1584 @command('debugrebuilddirstate|debugrebuildstate',
1585 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1585 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1586 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1586 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1587 'the working copy parent')),
1587 'the working copy parent')),
1588 ],
1588 ],
1589 _('[-r REV]'))
1589 _('[-r REV]'))
1590 def debugrebuilddirstate(ui, repo, rev, **opts):
1590 def debugrebuilddirstate(ui, repo, rev, **opts):
1591 """rebuild the dirstate as it would look like for the given revision
1591 """rebuild the dirstate as it would look like for the given revision
1592
1592
1593 If no revision is specified the first current parent will be used.
1593 If no revision is specified the first current parent will be used.
1594
1594
1595 The dirstate will be set to the files of the given revision.
1595 The dirstate will be set to the files of the given revision.
1596 The actual working directory content or existing dirstate
1596 The actual working directory content or existing dirstate
1597 information such as adds or removes is not considered.
1597 information such as adds or removes is not considered.
1598
1598
1599 ``minimal`` will only rebuild the dirstate status for files that claim to be
1599 ``minimal`` will only rebuild the dirstate status for files that claim to be
1600 tracked but are not in the parent manifest, or that exist in the parent
1600 tracked but are not in the parent manifest, or that exist in the parent
1601 manifest but are not in the dirstate. It will not change adds, removes, or
1601 manifest but are not in the dirstate. It will not change adds, removes, or
1602 modified files that are in the working copy parent.
1602 modified files that are in the working copy parent.
1603
1603
1604 One use of this command is to make the next :hg:`status` invocation
1604 One use of this command is to make the next :hg:`status` invocation
1605 check the actual file content.
1605 check the actual file content.
1606 """
1606 """
1607 ctx = scmutil.revsingle(repo, rev)
1607 ctx = scmutil.revsingle(repo, rev)
1608 with repo.wlock():
1608 with repo.wlock():
1609 dirstate = repo.dirstate
1609 dirstate = repo.dirstate
1610 changedfiles = None
1610 changedfiles = None
1611 # See command doc for what minimal does.
1611 # See command doc for what minimal does.
1612 if opts.get('minimal'):
1612 if opts.get('minimal'):
1613 manifestfiles = set(ctx.manifest().keys())
1613 manifestfiles = set(ctx.manifest().keys())
1614 dirstatefiles = set(dirstate)
1614 dirstatefiles = set(dirstate)
1615 manifestonly = manifestfiles - dirstatefiles
1615 manifestonly = manifestfiles - dirstatefiles
1616 dsonly = dirstatefiles - manifestfiles
1616 dsonly = dirstatefiles - manifestfiles
1617 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1617 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1618 changedfiles = manifestonly | dsnotadded
1618 changedfiles = manifestonly | dsnotadded
1619
1619
1620 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1620 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1621
1621
1622 @command('debugrebuildfncache', [], '')
1622 @command('debugrebuildfncache', [], '')
1623 def debugrebuildfncache(ui, repo):
1623 def debugrebuildfncache(ui, repo):
1624 """rebuild the fncache file"""
1624 """rebuild the fncache file"""
1625 repair.rebuildfncache(ui, repo)
1625 repair.rebuildfncache(ui, repo)
1626
1626
1627 @command('debugrename',
1627 @command('debugrename',
1628 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1628 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1629 _('[-r REV] FILE'))
1629 _('[-r REV] FILE'))
1630 def debugrename(ui, repo, file1, *pats, **opts):
1630 def debugrename(ui, repo, file1, *pats, **opts):
1631 """dump rename information"""
1631 """dump rename information"""
1632
1632
1633 ctx = scmutil.revsingle(repo, opts.get('rev'))
1633 ctx = scmutil.revsingle(repo, opts.get('rev'))
1634 m = scmutil.match(ctx, (file1,) + pats, opts)
1634 m = scmutil.match(ctx, (file1,) + pats, opts)
1635 for abs in ctx.walk(m):
1635 for abs in ctx.walk(m):
1636 fctx = ctx[abs]
1636 fctx = ctx[abs]
1637 o = fctx.filelog().renamed(fctx.filenode())
1637 o = fctx.filelog().renamed(fctx.filenode())
1638 rel = m.rel(abs)
1638 rel = m.rel(abs)
1639 if o:
1639 if o:
1640 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1640 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1641 else:
1641 else:
1642 ui.write(_("%s not renamed\n") % rel)
1642 ui.write(_("%s not renamed\n") % rel)
1643
1643
1644 @command('debugrevlog', cmdutil.debugrevlogopts +
1644 @command('debugrevlog', cmdutil.debugrevlogopts +
1645 [('d', 'dump', False, _('dump index data'))],
1645 [('d', 'dump', False, _('dump index data'))],
1646 _('-c|-m|FILE'),
1646 _('-c|-m|FILE'),
1647 optionalrepo=True)
1647 optionalrepo=True)
1648 def debugrevlog(ui, repo, file_=None, **opts):
1648 def debugrevlog(ui, repo, file_=None, **opts):
1649 """show data and statistics about a revlog"""
1649 """show data and statistics about a revlog"""
1650 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1650 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1651
1651
1652 if opts.get("dump"):
1652 if opts.get("dump"):
1653 numrevs = len(r)
1653 numrevs = len(r)
1654 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1654 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1655 " rawsize totalsize compression heads chainlen\n"))
1655 " rawsize totalsize compression heads chainlen\n"))
1656 ts = 0
1656 ts = 0
1657 heads = set()
1657 heads = set()
1658
1658
1659 for rev in xrange(numrevs):
1659 for rev in xrange(numrevs):
1660 dbase = r.deltaparent(rev)
1660 dbase = r.deltaparent(rev)
1661 if dbase == -1:
1661 if dbase == -1:
1662 dbase = rev
1662 dbase = rev
1663 cbase = r.chainbase(rev)
1663 cbase = r.chainbase(rev)
1664 clen = r.chainlen(rev)
1664 clen = r.chainlen(rev)
1665 p1, p2 = r.parentrevs(rev)
1665 p1, p2 = r.parentrevs(rev)
1666 rs = r.rawsize(rev)
1666 rs = r.rawsize(rev)
1667 ts = ts + rs
1667 ts = ts + rs
1668 heads -= set(r.parentrevs(rev))
1668 heads -= set(r.parentrevs(rev))
1669 heads.add(rev)
1669 heads.add(rev)
1670 try:
1670 try:
1671 compression = ts / r.end(rev)
1671 compression = ts / r.end(rev)
1672 except ZeroDivisionError:
1672 except ZeroDivisionError:
1673 compression = 0
1673 compression = 0
1674 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1674 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1675 "%11d %5d %8d\n" %
1675 "%11d %5d %8d\n" %
1676 (rev, p1, p2, r.start(rev), r.end(rev),
1676 (rev, p1, p2, r.start(rev), r.end(rev),
1677 r.start(dbase), r.start(cbase),
1677 r.start(dbase), r.start(cbase),
1678 r.start(p1), r.start(p2),
1678 r.start(p1), r.start(p2),
1679 rs, ts, compression, len(heads), clen))
1679 rs, ts, compression, len(heads), clen))
1680 return 0
1680 return 0
1681
1681
1682 v = r.version
1682 v = r.version
1683 format = v & 0xFFFF
1683 format = v & 0xFFFF
1684 flags = []
1684 flags = []
1685 gdelta = False
1685 gdelta = False
1686 if v & revlog.FLAG_INLINE_DATA:
1686 if v & revlog.FLAG_INLINE_DATA:
1687 flags.append('inline')
1687 flags.append('inline')
1688 if v & revlog.FLAG_GENERALDELTA:
1688 if v & revlog.FLAG_GENERALDELTA:
1689 gdelta = True
1689 gdelta = True
1690 flags.append('generaldelta')
1690 flags.append('generaldelta')
1691 if not flags:
1691 if not flags:
1692 flags = ['(none)']
1692 flags = ['(none)']
1693
1693
1694 nummerges = 0
1694 nummerges = 0
1695 numfull = 0
1695 numfull = 0
1696 numprev = 0
1696 numprev = 0
1697 nump1 = 0
1697 nump1 = 0
1698 nump2 = 0
1698 nump2 = 0
1699 numother = 0
1699 numother = 0
1700 nump1prev = 0
1700 nump1prev = 0
1701 nump2prev = 0
1701 nump2prev = 0
1702 chainlengths = []
1702 chainlengths = []
1703
1703
1704 datasize = [None, 0, 0]
1704 datasize = [None, 0, 0]
1705 fullsize = [None, 0, 0]
1705 fullsize = [None, 0, 0]
1706 deltasize = [None, 0, 0]
1706 deltasize = [None, 0, 0]
1707 chunktypecounts = {}
1707 chunktypecounts = {}
1708 chunktypesizes = {}
1708 chunktypesizes = {}
1709
1709
1710 def addsize(size, l):
1710 def addsize(size, l):
1711 if l[0] is None or size < l[0]:
1711 if l[0] is None or size < l[0]:
1712 l[0] = size
1712 l[0] = size
1713 if size > l[1]:
1713 if size > l[1]:
1714 l[1] = size
1714 l[1] = size
1715 l[2] += size
1715 l[2] += size
1716
1716
1717 numrevs = len(r)
1717 numrevs = len(r)
1718 for rev in xrange(numrevs):
1718 for rev in xrange(numrevs):
1719 p1, p2 = r.parentrevs(rev)
1719 p1, p2 = r.parentrevs(rev)
1720 delta = r.deltaparent(rev)
1720 delta = r.deltaparent(rev)
1721 if format > 0:
1721 if format > 0:
1722 addsize(r.rawsize(rev), datasize)
1722 addsize(r.rawsize(rev), datasize)
1723 if p2 != nullrev:
1723 if p2 != nullrev:
1724 nummerges += 1
1724 nummerges += 1
1725 size = r.length(rev)
1725 size = r.length(rev)
1726 if delta == nullrev:
1726 if delta == nullrev:
1727 chainlengths.append(0)
1727 chainlengths.append(0)
1728 numfull += 1
1728 numfull += 1
1729 addsize(size, fullsize)
1729 addsize(size, fullsize)
1730 else:
1730 else:
1731 chainlengths.append(chainlengths[delta] + 1)
1731 chainlengths.append(chainlengths[delta] + 1)
1732 addsize(size, deltasize)
1732 addsize(size, deltasize)
1733 if delta == rev - 1:
1733 if delta == rev - 1:
1734 numprev += 1
1734 numprev += 1
1735 if delta == p1:
1735 if delta == p1:
1736 nump1prev += 1
1736 nump1prev += 1
1737 elif delta == p2:
1737 elif delta == p2:
1738 nump2prev += 1
1738 nump2prev += 1
1739 elif delta == p1:
1739 elif delta == p1:
1740 nump1 += 1
1740 nump1 += 1
1741 elif delta == p2:
1741 elif delta == p2:
1742 nump2 += 1
1742 nump2 += 1
1743 elif delta != nullrev:
1743 elif delta != nullrev:
1744 numother += 1
1744 numother += 1
1745
1745
1746 # Obtain data on the raw chunks in the revlog.
1746 # Obtain data on the raw chunks in the revlog.
1747 segment = r._getsegmentforrevs(rev, rev)[1]
1747 segment = r._getsegmentforrevs(rev, rev)[1]
1748 if segment:
1748 if segment:
1749 chunktype = segment[0]
1749 chunktype = segment[0]
1750 else:
1750 else:
1751 chunktype = 'empty'
1751 chunktype = 'empty'
1752
1752
1753 if chunktype not in chunktypecounts:
1753 if chunktype not in chunktypecounts:
1754 chunktypecounts[chunktype] = 0
1754 chunktypecounts[chunktype] = 0
1755 chunktypesizes[chunktype] = 0
1755 chunktypesizes[chunktype] = 0
1756
1756
1757 chunktypecounts[chunktype] += 1
1757 chunktypecounts[chunktype] += 1
1758 chunktypesizes[chunktype] += size
1758 chunktypesizes[chunktype] += size
1759
1759
1760 # Adjust size min value for empty cases
1760 # Adjust size min value for empty cases
1761 for size in (datasize, fullsize, deltasize):
1761 for size in (datasize, fullsize, deltasize):
1762 if size[0] is None:
1762 if size[0] is None:
1763 size[0] = 0
1763 size[0] = 0
1764
1764
1765 numdeltas = numrevs - numfull
1765 numdeltas = numrevs - numfull
1766 numoprev = numprev - nump1prev - nump2prev
1766 numoprev = numprev - nump1prev - nump2prev
1767 totalrawsize = datasize[2]
1767 totalrawsize = datasize[2]
1768 datasize[2] /= numrevs
1768 datasize[2] /= numrevs
1769 fulltotal = fullsize[2]
1769 fulltotal = fullsize[2]
1770 fullsize[2] /= numfull
1770 fullsize[2] /= numfull
1771 deltatotal = deltasize[2]
1771 deltatotal = deltasize[2]
1772 if numrevs - numfull > 0:
1772 if numrevs - numfull > 0:
1773 deltasize[2] /= numrevs - numfull
1773 deltasize[2] /= numrevs - numfull
1774 totalsize = fulltotal + deltatotal
1774 totalsize = fulltotal + deltatotal
1775 avgchainlen = sum(chainlengths) / numrevs
1775 avgchainlen = sum(chainlengths) / numrevs
1776 maxchainlen = max(chainlengths)
1776 maxchainlen = max(chainlengths)
1777 compratio = 1
1777 compratio = 1
1778 if totalsize:
1778 if totalsize:
1779 compratio = totalrawsize / totalsize
1779 compratio = totalrawsize / totalsize
1780
1780
1781 basedfmtstr = '%%%dd\n'
1781 basedfmtstr = '%%%dd\n'
1782 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1782 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1783
1783
1784 def dfmtstr(max):
1784 def dfmtstr(max):
1785 return basedfmtstr % len(str(max))
1785 return basedfmtstr % len(str(max))
1786 def pcfmtstr(max, padding=0):
1786 def pcfmtstr(max, padding=0):
1787 return basepcfmtstr % (len(str(max)), ' ' * padding)
1787 return basepcfmtstr % (len(str(max)), ' ' * padding)
1788
1788
1789 def pcfmt(value, total):
1789 def pcfmt(value, total):
1790 if total:
1790 if total:
1791 return (value, 100 * float(value) / total)
1791 return (value, 100 * float(value) / total)
1792 else:
1792 else:
1793 return value, 100.0
1793 return value, 100.0
1794
1794
1795 ui.write(('format : %d\n') % format)
1795 ui.write(('format : %d\n') % format)
1796 ui.write(('flags : %s\n') % ', '.join(flags))
1796 ui.write(('flags : %s\n') % ', '.join(flags))
1797
1797
1798 ui.write('\n')
1798 ui.write('\n')
1799 fmt = pcfmtstr(totalsize)
1799 fmt = pcfmtstr(totalsize)
1800 fmt2 = dfmtstr(totalsize)
1800 fmt2 = dfmtstr(totalsize)
1801 ui.write(('revisions : ') + fmt2 % numrevs)
1801 ui.write(('revisions : ') + fmt2 % numrevs)
1802 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1802 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1803 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1803 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1804 ui.write(('revisions : ') + fmt2 % numrevs)
1804 ui.write(('revisions : ') + fmt2 % numrevs)
1805 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1805 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1806 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1806 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1807 ui.write(('revision size : ') + fmt2 % totalsize)
1807 ui.write(('revision size : ') + fmt2 % totalsize)
1808 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1808 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1809 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1809 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1810
1810
1811 def fmtchunktype(chunktype):
1811 def fmtchunktype(chunktype):
1812 if chunktype == 'empty':
1812 if chunktype == 'empty':
1813 return ' %s : ' % chunktype
1813 return ' %s : ' % chunktype
1814 elif chunktype in string.ascii_letters:
1814 elif chunktype in string.ascii_letters:
1815 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1815 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1816 else:
1816 else:
1817 return ' 0x%s : ' % hex(chunktype)
1817 return ' 0x%s : ' % hex(chunktype)
1818
1818
1819 ui.write('\n')
1819 ui.write('\n')
1820 ui.write(('chunks : ') + fmt2 % numrevs)
1820 ui.write(('chunks : ') + fmt2 % numrevs)
1821 for chunktype in sorted(chunktypecounts):
1821 for chunktype in sorted(chunktypecounts):
1822 ui.write(fmtchunktype(chunktype))
1822 ui.write(fmtchunktype(chunktype))
1823 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1823 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1824 ui.write(('chunks size : ') + fmt2 % totalsize)
1824 ui.write(('chunks size : ') + fmt2 % totalsize)
1825 for chunktype in sorted(chunktypecounts):
1825 for chunktype in sorted(chunktypecounts):
1826 ui.write(fmtchunktype(chunktype))
1826 ui.write(fmtchunktype(chunktype))
1827 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1827 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1828
1828
1829 ui.write('\n')
1829 ui.write('\n')
1830 fmt = dfmtstr(max(avgchainlen, compratio))
1830 fmt = dfmtstr(max(avgchainlen, compratio))
1831 ui.write(('avg chain length : ') + fmt % avgchainlen)
1831 ui.write(('avg chain length : ') + fmt % avgchainlen)
1832 ui.write(('max chain length : ') + fmt % maxchainlen)
1832 ui.write(('max chain length : ') + fmt % maxchainlen)
1833 ui.write(('compression ratio : ') + fmt % compratio)
1833 ui.write(('compression ratio : ') + fmt % compratio)
1834
1834
1835 if format > 0:
1835 if format > 0:
1836 ui.write('\n')
1836 ui.write('\n')
1837 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1837 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1838 % tuple(datasize))
1838 % tuple(datasize))
1839 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1839 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1840 % tuple(fullsize))
1840 % tuple(fullsize))
1841 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1841 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1842 % tuple(deltasize))
1842 % tuple(deltasize))
1843
1843
1844 if numdeltas > 0:
1844 if numdeltas > 0:
1845 ui.write('\n')
1845 ui.write('\n')
1846 fmt = pcfmtstr(numdeltas)
1846 fmt = pcfmtstr(numdeltas)
1847 fmt2 = pcfmtstr(numdeltas, 4)
1847 fmt2 = pcfmtstr(numdeltas, 4)
1848 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1848 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1849 if numprev > 0:
1849 if numprev > 0:
1850 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1850 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1851 numprev))
1851 numprev))
1852 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1852 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1853 numprev))
1853 numprev))
1854 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1854 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1855 numprev))
1855 numprev))
1856 if gdelta:
1856 if gdelta:
1857 ui.write(('deltas against p1 : ')
1857 ui.write(('deltas against p1 : ')
1858 + fmt % pcfmt(nump1, numdeltas))
1858 + fmt % pcfmt(nump1, numdeltas))
1859 ui.write(('deltas against p2 : ')
1859 ui.write(('deltas against p2 : ')
1860 + fmt % pcfmt(nump2, numdeltas))
1860 + fmt % pcfmt(nump2, numdeltas))
1861 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1861 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1862 numdeltas))
1862 numdeltas))
1863
1863
1864 @command('debugrevspec',
1864 @command('debugrevspec',
1865 [('', 'optimize', None,
1865 [('', 'optimize', None,
1866 _('print parsed tree after optimizing (DEPRECATED)')),
1866 _('print parsed tree after optimizing (DEPRECATED)')),
1867 ('p', 'show-stage', [],
1867 ('p', 'show-stage', [],
1868 _('print parsed tree at the given stage'), _('NAME')),
1868 _('print parsed tree at the given stage'), _('NAME')),
1869 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1869 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1870 ('', 'verify-optimized', False, _('verify optimized result')),
1870 ('', 'verify-optimized', False, _('verify optimized result')),
1871 ],
1871 ],
1872 ('REVSPEC'))
1872 ('REVSPEC'))
1873 def debugrevspec(ui, repo, expr, **opts):
1873 def debugrevspec(ui, repo, expr, **opts):
1874 """parse and apply a revision specification
1874 """parse and apply a revision specification
1875
1875
1876 Use -p/--show-stage option to print the parsed tree at the given stages.
1876 Use -p/--show-stage option to print the parsed tree at the given stages.
1877 Use -p all to print tree at every stage.
1877 Use -p all to print tree at every stage.
1878
1878
1879 Use --verify-optimized to compare the optimized result with the unoptimized
1879 Use --verify-optimized to compare the optimized result with the unoptimized
1880 one. Returns 1 if the optimized result differs.
1880 one. Returns 1 if the optimized result differs.
1881 """
1881 """
1882 stages = [
1882 stages = [
1883 ('parsed', lambda tree: tree),
1883 ('parsed', lambda tree: tree),
1884 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1884 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1885 ('concatenated', revsetlang.foldconcat),
1885 ('concatenated', revsetlang.foldconcat),
1886 ('analyzed', revsetlang.analyze),
1886 ('analyzed', revsetlang.analyze),
1887 ('optimized', revsetlang.optimize),
1887 ('optimized', revsetlang.optimize),
1888 ]
1888 ]
1889 if opts['no_optimized']:
1889 if opts['no_optimized']:
1890 stages = stages[:-1]
1890 stages = stages[:-1]
1891 if opts['verify_optimized'] and opts['no_optimized']:
1891 if opts['verify_optimized'] and opts['no_optimized']:
1892 raise error.Abort(_('cannot use --verify-optimized with '
1892 raise error.Abort(_('cannot use --verify-optimized with '
1893 '--no-optimized'))
1893 '--no-optimized'))
1894 stagenames = set(n for n, f in stages)
1894 stagenames = set(n for n, f in stages)
1895
1895
1896 showalways = set()
1896 showalways = set()
1897 showchanged = set()
1897 showchanged = set()
1898 if ui.verbose and not opts['show_stage']:
1898 if ui.verbose and not opts['show_stage']:
1899 # show parsed tree by --verbose (deprecated)
1899 # show parsed tree by --verbose (deprecated)
1900 showalways.add('parsed')
1900 showalways.add('parsed')
1901 showchanged.update(['expanded', 'concatenated'])
1901 showchanged.update(['expanded', 'concatenated'])
1902 if opts['optimize']:
1902 if opts['optimize']:
1903 showalways.add('optimized')
1903 showalways.add('optimized')
1904 if opts['show_stage'] and opts['optimize']:
1904 if opts['show_stage'] and opts['optimize']:
1905 raise error.Abort(_('cannot use --optimize with --show-stage'))
1905 raise error.Abort(_('cannot use --optimize with --show-stage'))
1906 if opts['show_stage'] == ['all']:
1906 if opts['show_stage'] == ['all']:
1907 showalways.update(stagenames)
1907 showalways.update(stagenames)
1908 else:
1908 else:
1909 for n in opts['show_stage']:
1909 for n in opts['show_stage']:
1910 if n not in stagenames:
1910 if n not in stagenames:
1911 raise error.Abort(_('invalid stage name: %s') % n)
1911 raise error.Abort(_('invalid stage name: %s') % n)
1912 showalways.update(opts['show_stage'])
1912 showalways.update(opts['show_stage'])
1913
1913
1914 treebystage = {}
1914 treebystage = {}
1915 printedtree = None
1915 printedtree = None
1916 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1916 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1917 for n, f in stages:
1917 for n, f in stages:
1918 treebystage[n] = tree = f(tree)
1918 treebystage[n] = tree = f(tree)
1919 if n in showalways or (n in showchanged and tree != printedtree):
1919 if n in showalways or (n in showchanged and tree != printedtree):
1920 if opts['show_stage'] or n != 'parsed':
1920 if opts['show_stage'] or n != 'parsed':
1921 ui.write(("* %s:\n") % n)
1921 ui.write(("* %s:\n") % n)
1922 ui.write(revsetlang.prettyformat(tree), "\n")
1922 ui.write(revsetlang.prettyformat(tree), "\n")
1923 printedtree = tree
1923 printedtree = tree
1924
1924
1925 if opts['verify_optimized']:
1925 if opts['verify_optimized']:
1926 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1926 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1927 brevs = revset.makematcher(treebystage['optimized'])(repo)
1927 brevs = revset.makematcher(treebystage['optimized'])(repo)
1928 if ui.verbose:
1928 if ui.verbose:
1929 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1929 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1930 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1930 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1931 arevs = list(arevs)
1931 arevs = list(arevs)
1932 brevs = list(brevs)
1932 brevs = list(brevs)
1933 if arevs == brevs:
1933 if arevs == brevs:
1934 return 0
1934 return 0
1935 ui.write(('--- analyzed\n'), label='diff.file_a')
1935 ui.write(('--- analyzed\n'), label='diff.file_a')
1936 ui.write(('+++ optimized\n'), label='diff.file_b')
1936 ui.write(('+++ optimized\n'), label='diff.file_b')
1937 sm = difflib.SequenceMatcher(None, arevs, brevs)
1937 sm = difflib.SequenceMatcher(None, arevs, brevs)
1938 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1938 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1939 if tag in ('delete', 'replace'):
1939 if tag in ('delete', 'replace'):
1940 for c in arevs[alo:ahi]:
1940 for c in arevs[alo:ahi]:
1941 ui.write('-%s\n' % c, label='diff.deleted')
1941 ui.write('-%s\n' % c, label='diff.deleted')
1942 if tag in ('insert', 'replace'):
1942 if tag in ('insert', 'replace'):
1943 for c in brevs[blo:bhi]:
1943 for c in brevs[blo:bhi]:
1944 ui.write('+%s\n' % c, label='diff.inserted')
1944 ui.write('+%s\n' % c, label='diff.inserted')
1945 if tag == 'equal':
1945 if tag == 'equal':
1946 for c in arevs[alo:ahi]:
1946 for c in arevs[alo:ahi]:
1947 ui.write(' %s\n' % c)
1947 ui.write(' %s\n' % c)
1948 return 1
1948 return 1
1949
1949
1950 func = revset.makematcher(tree)
1950 func = revset.makematcher(tree)
1951 revs = func(repo)
1951 revs = func(repo)
1952 if ui.verbose:
1952 if ui.verbose:
1953 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1953 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1954 for c in revs:
1954 for c in revs:
1955 ui.write("%s\n" % c)
1955 ui.write("%s\n" % c)
1956
1956
1957 @command('debugsetparents', [], _('REV1 [REV2]'))
1957 @command('debugsetparents', [], _('REV1 [REV2]'))
1958 def debugsetparents(ui, repo, rev1, rev2=None):
1958 def debugsetparents(ui, repo, rev1, rev2=None):
1959 """manually set the parents of the current working directory
1959 """manually set the parents of the current working directory
1960
1960
1961 This is useful for writing repository conversion tools, but should
1961 This is useful for writing repository conversion tools, but should
1962 be used with care. For example, neither the working directory nor the
1962 be used with care. For example, neither the working directory nor the
1963 dirstate is updated, so file status may be incorrect after running this
1963 dirstate is updated, so file status may be incorrect after running this
1964 command.
1964 command.
1965
1965
1966 Returns 0 on success.
1966 Returns 0 on success.
1967 """
1967 """
1968
1968
1969 r1 = scmutil.revsingle(repo, rev1).node()
1969 r1 = scmutil.revsingle(repo, rev1).node()
1970 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1970 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1971
1971
1972 with repo.wlock():
1972 with repo.wlock():
1973 repo.setparents(r1, r2)
1973 repo.setparents(r1, r2)
1974
1974
1975 @command('debugsub',
1975 @command('debugsub',
1976 [('r', 'rev', '',
1976 [('r', 'rev', '',
1977 _('revision to check'), _('REV'))],
1977 _('revision to check'), _('REV'))],
1978 _('[-r REV] [REV]'))
1978 _('[-r REV] [REV]'))
1979 def debugsub(ui, repo, rev=None):
1979 def debugsub(ui, repo, rev=None):
1980 ctx = scmutil.revsingle(repo, rev, None)
1980 ctx = scmutil.revsingle(repo, rev, None)
1981 for k, v in sorted(ctx.substate.items()):
1981 for k, v in sorted(ctx.substate.items()):
1982 ui.write(('path %s\n') % k)
1982 ui.write(('path %s\n') % k)
1983 ui.write((' source %s\n') % v[0])
1983 ui.write((' source %s\n') % v[0])
1984 ui.write((' revision %s\n') % v[1])
1984 ui.write((' revision %s\n') % v[1])
1985
1985
1986 @command('debugsuccessorssets',
1986 @command('debugsuccessorssets',
1987 [],
1987 [],
1988 _('[REV]'))
1988 _('[REV]'))
1989 def debugsuccessorssets(ui, repo, *revs):
1989 def debugsuccessorssets(ui, repo, *revs):
1990 """show set of successors for revision
1990 """show set of successors for revision
1991
1991
1992 A successors set of changeset A is a consistent group of revisions that
1992 A successors set of changeset A is a consistent group of revisions that
1993 succeed A. It contains non-obsolete changesets only.
1993 succeed A. It contains non-obsolete changesets only.
1994
1994
1995 In most cases a changeset A has a single successors set containing a single
1995 In most cases a changeset A has a single successors set containing a single
1996 successor (changeset A replaced by A').
1996 successor (changeset A replaced by A').
1997
1997
1998 A changeset that is made obsolete with no successors are called "pruned".
1998 A changeset that is made obsolete with no successors are called "pruned".
1999 Such changesets have no successors sets at all.
1999 Such changesets have no successors sets at all.
2000
2000
2001 A changeset that has been "split" will have a successors set containing
2001 A changeset that has been "split" will have a successors set containing
2002 more than one successor.
2002 more than one successor.
2003
2003
2004 A changeset that has been rewritten in multiple different ways is called
2004 A changeset that has been rewritten in multiple different ways is called
2005 "divergent". Such changesets have multiple successor sets (each of which
2005 "divergent". Such changesets have multiple successor sets (each of which
2006 may also be split, i.e. have multiple successors).
2006 may also be split, i.e. have multiple successors).
2007
2007
2008 Results are displayed as follows::
2008 Results are displayed as follows::
2009
2009
2010 <rev1>
2010 <rev1>
2011 <successors-1A>
2011 <successors-1A>
2012 <rev2>
2012 <rev2>
2013 <successors-2A>
2013 <successors-2A>
2014 <successors-2B1> <successors-2B2> <successors-2B3>
2014 <successors-2B1> <successors-2B2> <successors-2B3>
2015
2015
2016 Here rev2 has two possible (i.e. divergent) successors sets. The first
2016 Here rev2 has two possible (i.e. divergent) successors sets. The first
2017 holds one element, whereas the second holds three (i.e. the changeset has
2017 holds one element, whereas the second holds three (i.e. the changeset has
2018 been split).
2018 been split).
2019 """
2019 """
2020 # passed to successorssets caching computation from one call to another
2020 # passed to successorssets caching computation from one call to another
2021 cache = {}
2021 cache = {}
2022 ctx2str = str
2022 ctx2str = str
2023 node2str = short
2023 node2str = short
2024 if ui.debug():
2024 if ui.debug():
2025 def ctx2str(ctx):
2025 def ctx2str(ctx):
2026 return ctx.hex()
2026 return ctx.hex()
2027 node2str = hex
2027 node2str = hex
2028 for rev in scmutil.revrange(repo, revs):
2028 for rev in scmutil.revrange(repo, revs):
2029 ctx = repo[rev]
2029 ctx = repo[rev]
2030 ui.write('%s\n'% ctx2str(ctx))
2030 ui.write('%s\n'% ctx2str(ctx))
2031 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2031 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2032 if succsset:
2032 if succsset:
2033 ui.write(' ')
2033 ui.write(' ')
2034 ui.write(node2str(succsset[0]))
2034 ui.write(node2str(succsset[0]))
2035 for node in succsset[1:]:
2035 for node in succsset[1:]:
2036 ui.write(' ')
2036 ui.write(' ')
2037 ui.write(node2str(node))
2037 ui.write(node2str(node))
2038 ui.write('\n')
2038 ui.write('\n')
2039
2039
2040 @command('debugtemplate',
2040 @command('debugtemplate',
2041 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2041 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2042 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2042 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2043 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2043 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2044 optionalrepo=True)
2044 optionalrepo=True)
2045 def debugtemplate(ui, repo, tmpl, **opts):
2045 def debugtemplate(ui, repo, tmpl, **opts):
2046 """parse and apply a template
2046 """parse and apply a template
2047
2047
2048 If -r/--rev is given, the template is processed as a log template and
2048 If -r/--rev is given, the template is processed as a log template and
2049 applied to the given changesets. Otherwise, it is processed as a generic
2049 applied to the given changesets. Otherwise, it is processed as a generic
2050 template.
2050 template.
2051
2051
2052 Use --verbose to print the parsed tree.
2052 Use --verbose to print the parsed tree.
2053 """
2053 """
2054 revs = None
2054 revs = None
2055 if opts['rev']:
2055 if opts['rev']:
2056 if repo is None:
2056 if repo is None:
2057 raise error.RepoError(_('there is no Mercurial repository here '
2057 raise error.RepoError(_('there is no Mercurial repository here '
2058 '(.hg not found)'))
2058 '(.hg not found)'))
2059 revs = scmutil.revrange(repo, opts['rev'])
2059 revs = scmutil.revrange(repo, opts['rev'])
2060
2060
2061 props = {}
2061 props = {}
2062 for d in opts['define']:
2062 for d in opts['define']:
2063 try:
2063 try:
2064 k, v = (e.strip() for e in d.split('=', 1))
2064 k, v = (e.strip() for e in d.split('=', 1))
2065 if not k or k == 'ui':
2065 if not k or k == 'ui':
2066 raise ValueError
2066 raise ValueError
2067 props[k] = v
2067 props[k] = v
2068 except ValueError:
2068 except ValueError:
2069 raise error.Abort(_('malformed keyword definition: %s') % d)
2069 raise error.Abort(_('malformed keyword definition: %s') % d)
2070
2070
2071 if ui.verbose:
2071 if ui.verbose:
2072 aliases = ui.configitems('templatealias')
2072 aliases = ui.configitems('templatealias')
2073 tree = templater.parse(tmpl)
2073 tree = templater.parse(tmpl)
2074 ui.note(templater.prettyformat(tree), '\n')
2074 ui.note(templater.prettyformat(tree), '\n')
2075 newtree = templater.expandaliases(tree, aliases)
2075 newtree = templater.expandaliases(tree, aliases)
2076 if newtree != tree:
2076 if newtree != tree:
2077 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2077 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2078
2078
2079 mapfile = None
2079 mapfile = None
2080 if revs is None:
2080 if revs is None:
2081 k = 'debugtemplate'
2081 k = 'debugtemplate'
2082 t = formatter.maketemplater(ui, k, tmpl)
2082 t = formatter.maketemplater(ui, k, tmpl)
2083 ui.write(templater.stringify(t(k, ui=ui, **props)))
2083 ui.write(templater.stringify(t(k, ui=ui, **props)))
2084 else:
2084 else:
2085 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2085 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2086 mapfile, buffered=False)
2086 mapfile, buffered=False)
2087 for r in revs:
2087 for r in revs:
2088 displayer.show(repo[r], **props)
2088 displayer.show(repo[r], **props)
2089 displayer.close()
2089 displayer.close()
2090
2090
2091 @command('debugupdatecaches', [])
2091 @command('debugupdatecaches', [])
2092 def debugupdatecaches(ui, repo, *pats, **opts):
2092 def debugupdatecaches(ui, repo, *pats, **opts):
2093 """warm all known caches in the repository"""
2093 """warm all known caches in the repository"""
2094 with repo.wlock():
2094 with repo.wlock():
2095 with repo.lock():
2095 with repo.lock():
2096 repo.updatecaches()
2096 repo.updatecaches()
2097
2097
2098 @command('debugupgraderepo', [
2098 @command('debugupgraderepo', [
2099 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2099 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2100 ('', 'run', False, _('performs an upgrade')),
2100 ('', 'run', False, _('performs an upgrade')),
2101 ])
2101 ])
2102 def debugupgraderepo(ui, repo, run=False, optimize=None):
2102 def debugupgraderepo(ui, repo, run=False, optimize=None):
2103 """upgrade a repository to use different features
2103 """upgrade a repository to use different features
2104
2104
2105 If no arguments are specified, the repository is evaluated for upgrade
2105 If no arguments are specified, the repository is evaluated for upgrade
2106 and a list of problems and potential optimizations is printed.
2106 and a list of problems and potential optimizations is printed.
2107
2107
2108 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2108 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2109 can be influenced via additional arguments. More details will be provided
2109 can be influenced via additional arguments. More details will be provided
2110 by the command output when run without ``--run``.
2110 by the command output when run without ``--run``.
2111
2111
2112 During the upgrade, the repository will be locked and no writes will be
2112 During the upgrade, the repository will be locked and no writes will be
2113 allowed.
2113 allowed.
2114
2114
2115 At the end of the upgrade, the repository may not be readable while new
2115 At the end of the upgrade, the repository may not be readable while new
2116 repository data is swapped in. This window will be as long as it takes to
2116 repository data is swapped in. This window will be as long as it takes to
2117 rename some directories inside the ``.hg`` directory. On most machines, this
2117 rename some directories inside the ``.hg`` directory. On most machines, this
2118 should complete almost instantaneously and the chances of a consumer being
2118 should complete almost instantaneously and the chances of a consumer being
2119 unable to access the repository should be low.
2119 unable to access the repository should be low.
2120 """
2120 """
2121 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2121 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2122
2122
2123 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2123 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2124 inferrepo=True)
2124 inferrepo=True)
2125 def debugwalk(ui, repo, *pats, **opts):
2125 def debugwalk(ui, repo, *pats, **opts):
2126 """show how files match on given patterns"""
2126 """show how files match on given patterns"""
2127 m = scmutil.match(repo[None], pats, opts)
2127 m = scmutil.match(repo[None], pats, opts)
2128 items = list(repo[None].walk(m))
2128 items = list(repo[None].walk(m))
2129 if not items:
2129 if not items:
2130 return
2130 return
2131 f = lambda fn: fn
2131 f = lambda fn: fn
2132 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2132 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2133 f = lambda fn: util.normpath(fn)
2133 f = lambda fn: util.normpath(fn)
2134 fmt = 'f %%-%ds %%-%ds %%s' % (
2134 fmt = 'f %%-%ds %%-%ds %%s' % (
2135 max([len(abs) for abs in items]),
2135 max([len(abs) for abs in items]),
2136 max([len(m.rel(abs)) for abs in items]))
2136 max([len(m.rel(abs)) for abs in items]))
2137 for abs in items:
2137 for abs in items:
2138 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2138 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2139 ui.write("%s\n" % line.rstrip())
2139 ui.write("%s\n" % line.rstrip())
2140
2140
2141 @command('debugwireargs',
2141 @command('debugwireargs',
2142 [('', 'three', '', 'three'),
2142 [('', 'three', '', 'three'),
2143 ('', 'four', '', 'four'),
2143 ('', 'four', '', 'four'),
2144 ('', 'five', '', 'five'),
2144 ('', 'five', '', 'five'),
2145 ] + cmdutil.remoteopts,
2145 ] + cmdutil.remoteopts,
2146 _('REPO [OPTIONS]... [ONE [TWO]]'),
2146 _('REPO [OPTIONS]... [ONE [TWO]]'),
2147 norepo=True)
2147 norepo=True)
2148 def debugwireargs(ui, repopath, *vals, **opts):
2148 def debugwireargs(ui, repopath, *vals, **opts):
2149 repo = hg.peer(ui, opts, repopath)
2149 repo = hg.peer(ui, opts, repopath)
2150 for opt in cmdutil.remoteopts:
2150 for opt in cmdutil.remoteopts:
2151 del opts[opt[1]]
2151 del opts[opt[1]]
2152 args = {}
2152 args = {}
2153 for k, v in opts.iteritems():
2153 for k, v in opts.iteritems():
2154 if v:
2154 if v:
2155 args[k] = v
2155 args[k] = v
2156 # run twice to check that we don't mess up the stream for the next command
2156 # run twice to check that we don't mess up the stream for the next command
2157 res1 = repo.debugwireargs(*vals, **args)
2157 res1 = repo.debugwireargs(*vals, **args)
2158 res2 = repo.debugwireargs(*vals, **args)
2158 res2 = repo.debugwireargs(*vals, **args)
2159 ui.write("%s\n" % res1)
2159 ui.write("%s\n" % res1)
2160 if res1 != res2:
2160 if res1 != res2:
2161 ui.warn("%s\n" % res2)
2161 ui.warn("%s\n" % res2)
@@ -1,1293 +1,1298 b''
1 # obsolete.py - obsolete markers handling
1 # obsolete.py - obsolete markers handling
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """Obsolete marker handling
9 """Obsolete marker handling
10
10
11 An obsolete marker maps an old changeset to a list of new
11 An obsolete marker maps an old changeset to a list of new
12 changesets. If the list of new changesets is empty, the old changeset
12 changesets. If the list of new changesets is empty, the old changeset
13 is said to be "killed". Otherwise, the old changeset is being
13 is said to be "killed". Otherwise, the old changeset is being
14 "replaced" by the new changesets.
14 "replaced" by the new changesets.
15
15
16 Obsolete markers can be used to record and distribute changeset graph
16 Obsolete markers can be used to record and distribute changeset graph
17 transformations performed by history rewrite operations, and help
17 transformations performed by history rewrite operations, and help
18 building new tools to reconcile conflicting rewrite actions. To
18 building new tools to reconcile conflicting rewrite actions. To
19 facilitate conflict resolution, markers include various annotations
19 facilitate conflict resolution, markers include various annotations
20 besides old and news changeset identifiers, such as creation date or
20 besides old and news changeset identifiers, such as creation date or
21 author name.
21 author name.
22
22
23 The old obsoleted changeset is called a "precursor" and possible
23 The old obsoleted changeset is called a "precursor" and possible
24 replacements are called "successors". Markers that used changeset X as
24 replacements are called "successors". Markers that used changeset X as
25 a precursor are called "successor markers of X" because they hold
25 a precursor are called "successor markers of X" because they hold
26 information about the successors of X. Markers that use changeset Y as
26 information about the successors of X. Markers that use changeset Y as
27 a successors are call "precursor markers of Y" because they hold
27 a successors are call "precursor markers of Y" because they hold
28 information about the precursors of Y.
28 information about the precursors of Y.
29
29
30 Examples:
30 Examples:
31
31
32 - When changeset A is replaced by changeset A', one marker is stored:
32 - When changeset A is replaced by changeset A', one marker is stored:
33
33
34 (A, (A',))
34 (A, (A',))
35
35
36 - When changesets A and B are folded into a new changeset C, two markers are
36 - When changesets A and B are folded into a new changeset C, two markers are
37 stored:
37 stored:
38
38
39 (A, (C,)) and (B, (C,))
39 (A, (C,)) and (B, (C,))
40
40
41 - When changeset A is simply "pruned" from the graph, a marker is created:
41 - When changeset A is simply "pruned" from the graph, a marker is created:
42
42
43 (A, ())
43 (A, ())
44
44
45 - When changeset A is split into B and C, a single marker is used:
45 - When changeset A is split into B and C, a single marker is used:
46
46
47 (A, (B, C))
47 (A, (B, C))
48
48
49 We use a single marker to distinguish the "split" case from the "divergence"
49 We use a single marker to distinguish the "split" case from the "divergence"
50 case. If two independent operations rewrite the same changeset A in to A' and
50 case. If two independent operations rewrite the same changeset A in to A' and
51 A'', we have an error case: divergent rewriting. We can detect it because
51 A'', we have an error case: divergent rewriting. We can detect it because
52 two markers will be created independently:
52 two markers will be created independently:
53
53
54 (A, (B,)) and (A, (C,))
54 (A, (B,)) and (A, (C,))
55
55
56 Format
56 Format
57 ------
57 ------
58
58
59 Markers are stored in an append-only file stored in
59 Markers are stored in an append-only file stored in
60 '.hg/store/obsstore'.
60 '.hg/store/obsstore'.
61
61
62 The file starts with a version header:
62 The file starts with a version header:
63
63
64 - 1 unsigned byte: version number, starting at zero.
64 - 1 unsigned byte: version number, starting at zero.
65
65
66 The header is followed by the markers. Marker format depend of the version. See
66 The header is followed by the markers. Marker format depend of the version. See
67 comment associated with each format for details.
67 comment associated with each format for details.
68
68
69 """
69 """
70 from __future__ import absolute_import
70 from __future__ import absolute_import
71
71
72 import errno
72 import errno
73 import struct
73 import struct
74
74
75 from .i18n import _
75 from .i18n import _
76 from . import (
76 from . import (
77 error,
77 error,
78 node,
78 node,
79 phases,
79 phases,
80 policy,
80 policy,
81 util,
81 util,
82 )
82 )
83
83
84 parsers = policy.importmod(r'parsers')
84 parsers = policy.importmod(r'parsers')
85
85
86 _pack = struct.pack
86 _pack = struct.pack
87 _unpack = struct.unpack
87 _unpack = struct.unpack
88 _calcsize = struct.calcsize
88 _calcsize = struct.calcsize
89 propertycache = util.propertycache
89 propertycache = util.propertycache
90
90
91 # the obsolete feature is not mature enough to be enabled by default.
91 # the obsolete feature is not mature enough to be enabled by default.
92 # you have to rely on third party extension extension to enable this.
92 # you have to rely on third party extension extension to enable this.
93 _enabled = False
93 _enabled = False
94
94
95 # Options for obsolescence
95 # Options for obsolescence
96 createmarkersopt = 'createmarkers'
96 createmarkersopt = 'createmarkers'
97 allowunstableopt = 'allowunstable'
97 allowunstableopt = 'allowunstable'
98 exchangeopt = 'exchange'
98 exchangeopt = 'exchange'
99
99
100 def isenabled(repo, option):
100 def isenabled(repo, option):
101 """Returns True if the given repository has the given obsolete option
101 """Returns True if the given repository has the given obsolete option
102 enabled.
102 enabled.
103 """
103 """
104 result = set(repo.ui.configlist('experimental', 'evolution'))
104 result = set(repo.ui.configlist('experimental', 'evolution'))
105 if 'all' in result:
105 if 'all' in result:
106 return True
106 return True
107
107
108 # For migration purposes, temporarily return true if the config hasn't been
108 # For migration purposes, temporarily return true if the config hasn't been
109 # set but _enabled is true.
109 # set but _enabled is true.
110 if len(result) == 0 and _enabled:
110 if len(result) == 0 and _enabled:
111 return True
111 return True
112
112
113 # createmarkers must be enabled if other options are enabled
113 # createmarkers must be enabled if other options are enabled
114 if ((allowunstableopt in result or exchangeopt in result) and
114 if ((allowunstableopt in result or exchangeopt in result) and
115 not createmarkersopt in result):
115 not createmarkersopt in result):
116 raise error.Abort(_("'createmarkers' obsolete option must be enabled "
116 raise error.Abort(_("'createmarkers' obsolete option must be enabled "
117 "if other obsolete options are enabled"))
117 "if other obsolete options are enabled"))
118
118
119 return option in result
119 return option in result
120
120
121 ### obsolescence marker flag
121 ### obsolescence marker flag
122
122
123 ## bumpedfix flag
123 ## bumpedfix flag
124 #
124 #
125 # When a changeset A' succeed to a changeset A which became public, we call A'
125 # When a changeset A' succeed to a changeset A which became public, we call A'
126 # "bumped" because it's a successors of a public changesets
126 # "bumped" because it's a successors of a public changesets
127 #
127 #
128 # o A' (bumped)
128 # o A' (bumped)
129 # |`:
129 # |`:
130 # | o A
130 # | o A
131 # |/
131 # |/
132 # o Z
132 # o Z
133 #
133 #
134 # The way to solve this situation is to create a new changeset Ad as children
134 # The way to solve this situation is to create a new changeset Ad as children
135 # of A. This changeset have the same content than A'. So the diff from A to A'
135 # of A. This changeset have the same content than A'. So the diff from A to A'
136 # is the same than the diff from A to Ad. Ad is marked as a successors of A'
136 # is the same than the diff from A to Ad. Ad is marked as a successors of A'
137 #
137 #
138 # o Ad
138 # o Ad
139 # |`:
139 # |`:
140 # | x A'
140 # | x A'
141 # |'|
141 # |'|
142 # o | A
142 # o | A
143 # |/
143 # |/
144 # o Z
144 # o Z
145 #
145 #
146 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
146 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
147 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
147 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
148 # This flag mean that the successors express the changes between the public and
148 # This flag mean that the successors express the changes between the public and
149 # bumped version and fix the situation, breaking the transitivity of
149 # bumped version and fix the situation, breaking the transitivity of
150 # "bumped" here.
150 # "bumped" here.
151 bumpedfix = 1
151 bumpedfix = 1
152 usingsha256 = 2
152 usingsha256 = 2
153
153
154 ## Parsing and writing of version "0"
154 ## Parsing and writing of version "0"
155 #
155 #
156 # The header is followed by the markers. Each marker is made of:
156 # The header is followed by the markers. Each marker is made of:
157 #
157 #
158 # - 1 uint8 : number of new changesets "N", can be zero.
158 # - 1 uint8 : number of new changesets "N", can be zero.
159 #
159 #
160 # - 1 uint32: metadata size "M" in bytes.
160 # - 1 uint32: metadata size "M" in bytes.
161 #
161 #
162 # - 1 byte: a bit field. It is reserved for flags used in common
162 # - 1 byte: a bit field. It is reserved for flags used in common
163 # obsolete marker operations, to avoid repeated decoding of metadata
163 # obsolete marker operations, to avoid repeated decoding of metadata
164 # entries.
164 # entries.
165 #
165 #
166 # - 20 bytes: obsoleted changeset identifier.
166 # - 20 bytes: obsoleted changeset identifier.
167 #
167 #
168 # - N*20 bytes: new changesets identifiers.
168 # - N*20 bytes: new changesets identifiers.
169 #
169 #
170 # - M bytes: metadata as a sequence of nul-terminated strings. Each
170 # - M bytes: metadata as a sequence of nul-terminated strings. Each
171 # string contains a key and a value, separated by a colon ':', without
171 # string contains a key and a value, separated by a colon ':', without
172 # additional encoding. Keys cannot contain '\0' or ':' and values
172 # additional encoding. Keys cannot contain '\0' or ':' and values
173 # cannot contain '\0'.
173 # cannot contain '\0'.
174 _fm0version = 0
174 _fm0version = 0
175 _fm0fixed = '>BIB20s'
175 _fm0fixed = '>BIB20s'
176 _fm0node = '20s'
176 _fm0node = '20s'
177 _fm0fsize = _calcsize(_fm0fixed)
177 _fm0fsize = _calcsize(_fm0fixed)
178 _fm0fnodesize = _calcsize(_fm0node)
178 _fm0fnodesize = _calcsize(_fm0node)
179
179
180 def _fm0readmarkers(data, off):
180 def _fm0readmarkers(data, off):
181 # Loop on markers
181 # Loop on markers
182 l = len(data)
182 l = len(data)
183 while off + _fm0fsize <= l:
183 while off + _fm0fsize <= l:
184 # read fixed part
184 # read fixed part
185 cur = data[off:off + _fm0fsize]
185 cur = data[off:off + _fm0fsize]
186 off += _fm0fsize
186 off += _fm0fsize
187 numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur)
187 numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur)
188 # read replacement
188 # read replacement
189 sucs = ()
189 sucs = ()
190 if numsuc:
190 if numsuc:
191 s = (_fm0fnodesize * numsuc)
191 s = (_fm0fnodesize * numsuc)
192 cur = data[off:off + s]
192 cur = data[off:off + s]
193 sucs = _unpack(_fm0node * numsuc, cur)
193 sucs = _unpack(_fm0node * numsuc, cur)
194 off += s
194 off += s
195 # read metadata
195 # read metadata
196 # (metadata will be decoded on demand)
196 # (metadata will be decoded on demand)
197 metadata = data[off:off + mdsize]
197 metadata = data[off:off + mdsize]
198 if len(metadata) != mdsize:
198 if len(metadata) != mdsize:
199 raise error.Abort(_('parsing obsolete marker: metadata is too '
199 raise error.Abort(_('parsing obsolete marker: metadata is too '
200 'short, %d bytes expected, got %d')
200 'short, %d bytes expected, got %d')
201 % (mdsize, len(metadata)))
201 % (mdsize, len(metadata)))
202 off += mdsize
202 off += mdsize
203 metadata = _fm0decodemeta(metadata)
203 metadata = _fm0decodemeta(metadata)
204 try:
204 try:
205 when, offset = metadata.pop('date', '0 0').split(' ')
205 when, offset = metadata.pop('date', '0 0').split(' ')
206 date = float(when), int(offset)
206 date = float(when), int(offset)
207 except ValueError:
207 except ValueError:
208 date = (0., 0)
208 date = (0., 0)
209 parents = None
209 parents = None
210 if 'p2' in metadata:
210 if 'p2' in metadata:
211 parents = (metadata.pop('p1', None), metadata.pop('p2', None))
211 parents = (metadata.pop('p1', None), metadata.pop('p2', None))
212 elif 'p1' in metadata:
212 elif 'p1' in metadata:
213 parents = (metadata.pop('p1', None),)
213 parents = (metadata.pop('p1', None),)
214 elif 'p0' in metadata:
214 elif 'p0' in metadata:
215 parents = ()
215 parents = ()
216 if parents is not None:
216 if parents is not None:
217 try:
217 try:
218 parents = tuple(node.bin(p) for p in parents)
218 parents = tuple(node.bin(p) for p in parents)
219 # if parent content is not a nodeid, drop the data
219 # if parent content is not a nodeid, drop the data
220 for p in parents:
220 for p in parents:
221 if len(p) != 20:
221 if len(p) != 20:
222 parents = None
222 parents = None
223 break
223 break
224 except TypeError:
224 except TypeError:
225 # if content cannot be translated to nodeid drop the data.
225 # if content cannot be translated to nodeid drop the data.
226 parents = None
226 parents = None
227
227
228 metadata = tuple(sorted(metadata.iteritems()))
228 metadata = tuple(sorted(metadata.iteritems()))
229
229
230 yield (pre, sucs, flags, metadata, date, parents)
230 yield (pre, sucs, flags, metadata, date, parents)
231
231
232 def _fm0encodeonemarker(marker):
232 def _fm0encodeonemarker(marker):
233 pre, sucs, flags, metadata, date, parents = marker
233 pre, sucs, flags, metadata, date, parents = marker
234 if flags & usingsha256:
234 if flags & usingsha256:
235 raise error.Abort(_('cannot handle sha256 with old obsstore format'))
235 raise error.Abort(_('cannot handle sha256 with old obsstore format'))
236 metadata = dict(metadata)
236 metadata = dict(metadata)
237 time, tz = date
237 time, tz = date
238 metadata['date'] = '%r %i' % (time, tz)
238 metadata['date'] = '%r %i' % (time, tz)
239 if parents is not None:
239 if parents is not None:
240 if not parents:
240 if not parents:
241 # mark that we explicitly recorded no parents
241 # mark that we explicitly recorded no parents
242 metadata['p0'] = ''
242 metadata['p0'] = ''
243 for i, p in enumerate(parents, 1):
243 for i, p in enumerate(parents, 1):
244 metadata['p%i' % i] = node.hex(p)
244 metadata['p%i' % i] = node.hex(p)
245 metadata = _fm0encodemeta(metadata)
245 metadata = _fm0encodemeta(metadata)
246 numsuc = len(sucs)
246 numsuc = len(sucs)
247 format = _fm0fixed + (_fm0node * numsuc)
247 format = _fm0fixed + (_fm0node * numsuc)
248 data = [numsuc, len(metadata), flags, pre]
248 data = [numsuc, len(metadata), flags, pre]
249 data.extend(sucs)
249 data.extend(sucs)
250 return _pack(format, *data) + metadata
250 return _pack(format, *data) + metadata
251
251
252 def _fm0encodemeta(meta):
252 def _fm0encodemeta(meta):
253 """Return encoded metadata string to string mapping.
253 """Return encoded metadata string to string mapping.
254
254
255 Assume no ':' in key and no '\0' in both key and value."""
255 Assume no ':' in key and no '\0' in both key and value."""
256 for key, value in meta.iteritems():
256 for key, value in meta.iteritems():
257 if ':' in key or '\0' in key:
257 if ':' in key or '\0' in key:
258 raise ValueError("':' and '\0' are forbidden in metadata key'")
258 raise ValueError("':' and '\0' are forbidden in metadata key'")
259 if '\0' in value:
259 if '\0' in value:
260 raise ValueError("':' is forbidden in metadata value'")
260 raise ValueError("':' is forbidden in metadata value'")
261 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
261 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
262
262
263 def _fm0decodemeta(data):
263 def _fm0decodemeta(data):
264 """Return string to string dictionary from encoded version."""
264 """Return string to string dictionary from encoded version."""
265 d = {}
265 d = {}
266 for l in data.split('\0'):
266 for l in data.split('\0'):
267 if l:
267 if l:
268 key, value = l.split(':')
268 key, value = l.split(':')
269 d[key] = value
269 d[key] = value
270 return d
270 return d
271
271
272 ## Parsing and writing of version "1"
272 ## Parsing and writing of version "1"
273 #
273 #
274 # The header is followed by the markers. Each marker is made of:
274 # The header is followed by the markers. Each marker is made of:
275 #
275 #
276 # - uint32: total size of the marker (including this field)
276 # - uint32: total size of the marker (including this field)
277 #
277 #
278 # - float64: date in seconds since epoch
278 # - float64: date in seconds since epoch
279 #
279 #
280 # - int16: timezone offset in minutes
280 # - int16: timezone offset in minutes
281 #
281 #
282 # - uint16: a bit field. It is reserved for flags used in common
282 # - uint16: a bit field. It is reserved for flags used in common
283 # obsolete marker operations, to avoid repeated decoding of metadata
283 # obsolete marker operations, to avoid repeated decoding of metadata
284 # entries.
284 # entries.
285 #
285 #
286 # - uint8: number of successors "N", can be zero.
286 # - uint8: number of successors "N", can be zero.
287 #
287 #
288 # - uint8: number of parents "P", can be zero.
288 # - uint8: number of parents "P", can be zero.
289 #
289 #
290 # 0: parents data stored but no parent,
290 # 0: parents data stored but no parent,
291 # 1: one parent stored,
291 # 1: one parent stored,
292 # 2: two parents stored,
292 # 2: two parents stored,
293 # 3: no parent data stored
293 # 3: no parent data stored
294 #
294 #
295 # - uint8: number of metadata entries M
295 # - uint8: number of metadata entries M
296 #
296 #
297 # - 20 or 32 bytes: precursor changeset identifier.
297 # - 20 or 32 bytes: precursor changeset identifier.
298 #
298 #
299 # - N*(20 or 32) bytes: successors changesets identifiers.
299 # - N*(20 or 32) bytes: successors changesets identifiers.
300 #
300 #
301 # - P*(20 or 32) bytes: parents of the precursors changesets.
301 # - P*(20 or 32) bytes: parents of the precursors changesets.
302 #
302 #
303 # - M*(uint8, uint8): size of all metadata entries (key and value)
303 # - M*(uint8, uint8): size of all metadata entries (key and value)
304 #
304 #
305 # - remaining bytes: the metadata, each (key, value) pair after the other.
305 # - remaining bytes: the metadata, each (key, value) pair after the other.
306 _fm1version = 1
306 _fm1version = 1
307 _fm1fixed = '>IdhHBBB20s'
307 _fm1fixed = '>IdhHBBB20s'
308 _fm1nodesha1 = '20s'
308 _fm1nodesha1 = '20s'
309 _fm1nodesha256 = '32s'
309 _fm1nodesha256 = '32s'
310 _fm1nodesha1size = _calcsize(_fm1nodesha1)
310 _fm1nodesha1size = _calcsize(_fm1nodesha1)
311 _fm1nodesha256size = _calcsize(_fm1nodesha256)
311 _fm1nodesha256size = _calcsize(_fm1nodesha256)
312 _fm1fsize = _calcsize(_fm1fixed)
312 _fm1fsize = _calcsize(_fm1fixed)
313 _fm1parentnone = 3
313 _fm1parentnone = 3
314 _fm1parentshift = 14
314 _fm1parentshift = 14
315 _fm1parentmask = (_fm1parentnone << _fm1parentshift)
315 _fm1parentmask = (_fm1parentnone << _fm1parentshift)
316 _fm1metapair = 'BB'
316 _fm1metapair = 'BB'
317 _fm1metapairsize = _calcsize('BB')
317 _fm1metapairsize = _calcsize('BB')
318
318
319 def _fm1purereadmarkers(data, off):
319 def _fm1purereadmarkers(data, off):
320 # make some global constants local for performance
320 # make some global constants local for performance
321 noneflag = _fm1parentnone
321 noneflag = _fm1parentnone
322 sha2flag = usingsha256
322 sha2flag = usingsha256
323 sha1size = _fm1nodesha1size
323 sha1size = _fm1nodesha1size
324 sha2size = _fm1nodesha256size
324 sha2size = _fm1nodesha256size
325 sha1fmt = _fm1nodesha1
325 sha1fmt = _fm1nodesha1
326 sha2fmt = _fm1nodesha256
326 sha2fmt = _fm1nodesha256
327 metasize = _fm1metapairsize
327 metasize = _fm1metapairsize
328 metafmt = _fm1metapair
328 metafmt = _fm1metapair
329 fsize = _fm1fsize
329 fsize = _fm1fsize
330 unpack = _unpack
330 unpack = _unpack
331
331
332 # Loop on markers
332 # Loop on markers
333 stop = len(data) - _fm1fsize
333 stop = len(data) - _fm1fsize
334 ufixed = struct.Struct(_fm1fixed).unpack
334 ufixed = struct.Struct(_fm1fixed).unpack
335
335
336 while off <= stop:
336 while off <= stop:
337 # read fixed part
337 # read fixed part
338 o1 = off + fsize
338 o1 = off + fsize
339 t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
339 t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
340
340
341 if flags & sha2flag:
341 if flags & sha2flag:
342 # FIXME: prec was read as a SHA1, needs to be amended
342 # FIXME: prec was read as a SHA1, needs to be amended
343
343
344 # read 0 or more successors
344 # read 0 or more successors
345 if numsuc == 1:
345 if numsuc == 1:
346 o2 = o1 + sha2size
346 o2 = o1 + sha2size
347 sucs = (data[o1:o2],)
347 sucs = (data[o1:o2],)
348 else:
348 else:
349 o2 = o1 + sha2size * numsuc
349 o2 = o1 + sha2size * numsuc
350 sucs = unpack(sha2fmt * numsuc, data[o1:o2])
350 sucs = unpack(sha2fmt * numsuc, data[o1:o2])
351
351
352 # read parents
352 # read parents
353 if numpar == noneflag:
353 if numpar == noneflag:
354 o3 = o2
354 o3 = o2
355 parents = None
355 parents = None
356 elif numpar == 1:
356 elif numpar == 1:
357 o3 = o2 + sha2size
357 o3 = o2 + sha2size
358 parents = (data[o2:o3],)
358 parents = (data[o2:o3],)
359 else:
359 else:
360 o3 = o2 + sha2size * numpar
360 o3 = o2 + sha2size * numpar
361 parents = unpack(sha2fmt * numpar, data[o2:o3])
361 parents = unpack(sha2fmt * numpar, data[o2:o3])
362 else:
362 else:
363 # read 0 or more successors
363 # read 0 or more successors
364 if numsuc == 1:
364 if numsuc == 1:
365 o2 = o1 + sha1size
365 o2 = o1 + sha1size
366 sucs = (data[o1:o2],)
366 sucs = (data[o1:o2],)
367 else:
367 else:
368 o2 = o1 + sha1size * numsuc
368 o2 = o1 + sha1size * numsuc
369 sucs = unpack(sha1fmt * numsuc, data[o1:o2])
369 sucs = unpack(sha1fmt * numsuc, data[o1:o2])
370
370
371 # read parents
371 # read parents
372 if numpar == noneflag:
372 if numpar == noneflag:
373 o3 = o2
373 o3 = o2
374 parents = None
374 parents = None
375 elif numpar == 1:
375 elif numpar == 1:
376 o3 = o2 + sha1size
376 o3 = o2 + sha1size
377 parents = (data[o2:o3],)
377 parents = (data[o2:o3],)
378 else:
378 else:
379 o3 = o2 + sha1size * numpar
379 o3 = o2 + sha1size * numpar
380 parents = unpack(sha1fmt * numpar, data[o2:o3])
380 parents = unpack(sha1fmt * numpar, data[o2:o3])
381
381
382 # read metadata
382 # read metadata
383 off = o3 + metasize * nummeta
383 off = o3 + metasize * nummeta
384 metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
384 metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
385 metadata = []
385 metadata = []
386 for idx in xrange(0, len(metapairsize), 2):
386 for idx in xrange(0, len(metapairsize), 2):
387 o1 = off + metapairsize[idx]
387 o1 = off + metapairsize[idx]
388 o2 = o1 + metapairsize[idx + 1]
388 o2 = o1 + metapairsize[idx + 1]
389 metadata.append((data[off:o1], data[o1:o2]))
389 metadata.append((data[off:o1], data[o1:o2]))
390 off = o2
390 off = o2
391
391
392 yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
392 yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
393
393
394 def _fm1encodeonemarker(marker):
394 def _fm1encodeonemarker(marker):
395 pre, sucs, flags, metadata, date, parents = marker
395 pre, sucs, flags, metadata, date, parents = marker
396 # determine node size
396 # determine node size
397 _fm1node = _fm1nodesha1
397 _fm1node = _fm1nodesha1
398 if flags & usingsha256:
398 if flags & usingsha256:
399 _fm1node = _fm1nodesha256
399 _fm1node = _fm1nodesha256
400 numsuc = len(sucs)
400 numsuc = len(sucs)
401 numextranodes = numsuc
401 numextranodes = numsuc
402 if parents is None:
402 if parents is None:
403 numpar = _fm1parentnone
403 numpar = _fm1parentnone
404 else:
404 else:
405 numpar = len(parents)
405 numpar = len(parents)
406 numextranodes += numpar
406 numextranodes += numpar
407 formatnodes = _fm1node * numextranodes
407 formatnodes = _fm1node * numextranodes
408 formatmeta = _fm1metapair * len(metadata)
408 formatmeta = _fm1metapair * len(metadata)
409 format = _fm1fixed + formatnodes + formatmeta
409 format = _fm1fixed + formatnodes + formatmeta
410 # tz is stored in minutes so we divide by 60
410 # tz is stored in minutes so we divide by 60
411 tz = date[1]//60
411 tz = date[1]//60
412 data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre]
412 data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre]
413 data.extend(sucs)
413 data.extend(sucs)
414 if parents is not None:
414 if parents is not None:
415 data.extend(parents)
415 data.extend(parents)
416 totalsize = _calcsize(format)
416 totalsize = _calcsize(format)
417 for key, value in metadata:
417 for key, value in metadata:
418 lk = len(key)
418 lk = len(key)
419 lv = len(value)
419 lv = len(value)
420 data.append(lk)
420 data.append(lk)
421 data.append(lv)
421 data.append(lv)
422 totalsize += lk + lv
422 totalsize += lk + lv
423 data[0] = totalsize
423 data[0] = totalsize
424 data = [_pack(format, *data)]
424 data = [_pack(format, *data)]
425 for key, value in metadata:
425 for key, value in metadata:
426 data.append(key)
426 data.append(key)
427 data.append(value)
427 data.append(value)
428 return ''.join(data)
428 return ''.join(data)
429
429
430 def _fm1readmarkers(data, off):
430 def _fm1readmarkers(data, off):
431 native = getattr(parsers, 'fm1readmarkers', None)
431 native = getattr(parsers, 'fm1readmarkers', None)
432 if not native:
432 if not native:
433 return _fm1purereadmarkers(data, off)
433 return _fm1purereadmarkers(data, off)
434 stop = len(data) - _fm1fsize
434 stop = len(data) - _fm1fsize
435 return native(data, off, stop)
435 return native(data, off, stop)
436
436
437 # mapping to read/write various marker formats
437 # mapping to read/write various marker formats
438 # <version> -> (decoder, encoder)
438 # <version> -> (decoder, encoder)
439 formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
439 formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
440 _fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
440 _fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
441
441
442 @util.nogc
442 @util.nogc
443 def _readmarkers(data):
443 def _readmarkers(data):
444 """Read and enumerate markers from raw data"""
444 """Read and enumerate markers from raw data"""
445 off = 0
445 off = 0
446 diskversion = _unpack('>B', data[off:off + 1])[0]
446 diskversion = _unpack('>B', data[off:off + 1])[0]
447 off += 1
447 off += 1
448 if diskversion not in formats:
448 if diskversion not in formats:
449 raise error.Abort(_('parsing obsolete marker: unknown version %r')
449 raise error.Abort(_('parsing obsolete marker: unknown version %r')
450 % diskversion)
450 % diskversion)
451 return diskversion, formats[diskversion][0](data, off)
451 return diskversion, formats[diskversion][0](data, off)
452
452
453 def encodemarkers(markers, addheader=False, version=_fm0version):
453 def encodemarkers(markers, addheader=False, version=_fm0version):
454 # Kept separate from flushmarkers(), it will be reused for
454 # Kept separate from flushmarkers(), it will be reused for
455 # markers exchange.
455 # markers exchange.
456 encodeone = formats[version][1]
456 encodeone = formats[version][1]
457 if addheader:
457 if addheader:
458 yield _pack('>B', version)
458 yield _pack('>B', version)
459 for marker in markers:
459 for marker in markers:
460 yield encodeone(marker)
460 yield encodeone(marker)
461
461
462
462
463 class marker(object):
463 class marker(object):
464 """Wrap obsolete marker raw data"""
464 """Wrap obsolete marker raw data"""
465
465
466 def __init__(self, repo, data):
466 def __init__(self, repo, data):
467 # the repo argument will be used to create changectx in later version
467 # the repo argument will be used to create changectx in later version
468 self._repo = repo
468 self._repo = repo
469 self._data = data
469 self._data = data
470 self._decodedmeta = None
470 self._decodedmeta = None
471
471
472 def __hash__(self):
472 def __hash__(self):
473 return hash(self._data)
473 return hash(self._data)
474
474
475 def __eq__(self, other):
475 def __eq__(self, other):
476 if type(other) != type(self):
476 if type(other) != type(self):
477 return False
477 return False
478 return self._data == other._data
478 return self._data == other._data
479
479
480 def precnode(self):
480 def precnode(self):
481 """Precursor changeset node identifier"""
481 """Precursor changeset node identifier"""
482 return self._data[0]
482 return self._data[0]
483
483
484 def succnodes(self):
484 def succnodes(self):
485 """List of successor changesets node identifiers"""
485 """List of successor changesets node identifiers"""
486 return self._data[1]
486 return self._data[1]
487
487
488 def parentnodes(self):
488 def parentnodes(self):
489 """Parents of the precursors (None if not recorded)"""
489 """Parents of the precursors (None if not recorded)"""
490 return self._data[5]
490 return self._data[5]
491
491
492 def metadata(self):
492 def metadata(self):
493 """Decoded metadata dictionary"""
493 """Decoded metadata dictionary"""
494 return dict(self._data[3])
494 return dict(self._data[3])
495
495
496 def date(self):
496 def date(self):
497 """Creation date as (unixtime, offset)"""
497 """Creation date as (unixtime, offset)"""
498 return self._data[4]
498 return self._data[4]
499
499
500 def flags(self):
500 def flags(self):
501 """The flags field of the marker"""
501 """The flags field of the marker"""
502 return self._data[2]
502 return self._data[2]
503
503
504 @util.nogc
504 @util.nogc
505 def _addsuccessors(successors, markers):
505 def _addsuccessors(successors, markers):
506 for mark in markers:
506 for mark in markers:
507 successors.setdefault(mark[0], set()).add(mark)
507 successors.setdefault(mark[0], set()).add(mark)
508
508
509 @util.nogc
509 @util.nogc
510 def _addprecursors(precursors, markers):
510 def _addprecursors(precursors, markers):
511 for mark in markers:
511 for mark in markers:
512 for suc in mark[1]:
512 for suc in mark[1]:
513 precursors.setdefault(suc, set()).add(mark)
513 precursors.setdefault(suc, set()).add(mark)
514
514
515 @util.nogc
515 @util.nogc
516 def _addchildren(children, markers):
516 def _addchildren(children, markers):
517 for mark in markers:
517 for mark in markers:
518 parents = mark[5]
518 parents = mark[5]
519 if parents is not None:
519 if parents is not None:
520 for p in parents:
520 for p in parents:
521 children.setdefault(p, set()).add(mark)
521 children.setdefault(p, set()).add(mark)
522
522
523 def _checkinvalidmarkers(markers):
523 def _checkinvalidmarkers(markers):
524 """search for marker with invalid data and raise error if needed
524 """search for marker with invalid data and raise error if needed
525
525
526 Exist as a separated function to allow the evolve extension for a more
526 Exist as a separated function to allow the evolve extension for a more
527 subtle handling.
527 subtle handling.
528 """
528 """
529 for mark in markers:
529 for mark in markers:
530 if node.nullid in mark[1]:
530 if node.nullid in mark[1]:
531 raise error.Abort(_('bad obsolescence marker detected: '
531 raise error.Abort(_('bad obsolescence marker detected: '
532 'invalid successors nullid'))
532 'invalid successors nullid'))
533
533
534 class obsstore(object):
534 class obsstore(object):
535 """Store obsolete markers
535 """Store obsolete markers
536
536
537 Markers can be accessed with two mappings:
537 Markers can be accessed with two mappings:
538 - precursors[x] -> set(markers on precursors edges of x)
538 - precursors[x] -> set(markers on precursors edges of x)
539 - successors[x] -> set(markers on successors edges of x)
539 - successors[x] -> set(markers on successors edges of x)
540 - children[x] -> set(markers on precursors edges of children(x)
540 - children[x] -> set(markers on precursors edges of children(x)
541 """
541 """
542
542
543 fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
543 fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
544 # prec: nodeid, precursor changesets
544 # prec: nodeid, precursor changesets
545 # succs: tuple of nodeid, successor changesets (0-N length)
545 # succs: tuple of nodeid, successor changesets (0-N length)
546 # flag: integer, flag field carrying modifier for the markers (see doc)
546 # flag: integer, flag field carrying modifier for the markers (see doc)
547 # meta: binary blob, encoded metadata dictionary
547 # meta: binary blob, encoded metadata dictionary
548 # date: (float, int) tuple, date of marker creation
548 # date: (float, int) tuple, date of marker creation
549 # parents: (tuple of nodeid) or None, parents of precursors
549 # parents: (tuple of nodeid) or None, parents of precursors
550 # None is used when no data has been recorded
550 # None is used when no data has been recorded
551
551
552 def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
552 def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
553 # caches for various obsolescence related cache
553 # caches for various obsolescence related cache
554 self.caches = {}
554 self.caches = {}
555 self.svfs = svfs
555 self.svfs = svfs
556 self._version = defaultformat
556 self._version = defaultformat
557 self._readonly = readonly
557 self._readonly = readonly
558
558
559 def __iter__(self):
559 def __iter__(self):
560 return iter(self._all)
560 return iter(self._all)
561
561
562 def __len__(self):
562 def __len__(self):
563 return len(self._all)
563 return len(self._all)
564
564
565 def __nonzero__(self):
565 def __nonzero__(self):
566 if not self._cached('_all'):
566 if not self._cached('_all'):
567 try:
567 try:
568 return self.svfs.stat('obsstore').st_size > 1
568 return self.svfs.stat('obsstore').st_size > 1
569 except OSError as inst:
569 except OSError as inst:
570 if inst.errno != errno.ENOENT:
570 if inst.errno != errno.ENOENT:
571 raise
571 raise
572 # just build an empty _all list if no obsstore exists, which
572 # just build an empty _all list if no obsstore exists, which
573 # avoids further stat() syscalls
573 # avoids further stat() syscalls
574 pass
574 pass
575 return bool(self._all)
575 return bool(self._all)
576
576
577 __bool__ = __nonzero__
577 __bool__ = __nonzero__
578
578
579 @property
579 @property
580 def readonly(self):
580 def readonly(self):
581 """True if marker creation is disabled
581 """True if marker creation is disabled
582
582
583 Remove me in the future when obsolete marker is always on."""
583 Remove me in the future when obsolete marker is always on."""
584 return self._readonly
584 return self._readonly
585
585
586 def create(self, transaction, prec, succs=(), flag=0, parents=None,
586 def create(self, transaction, prec, succs=(), flag=0, parents=None,
587 date=None, metadata=None):
587 date=None, metadata=None, ui=None):
588 """obsolete: add a new obsolete marker
588 """obsolete: add a new obsolete marker
589
589
590 * ensuring it is hashable
590 * ensuring it is hashable
591 * check mandatory metadata
591 * check mandatory metadata
592 * encode metadata
592 * encode metadata
593
593
594 If you are a human writing code creating marker you want to use the
594 If you are a human writing code creating marker you want to use the
595 `createmarkers` function in this module instead.
595 `createmarkers` function in this module instead.
596
596
597 return True if a new marker have been added, False if the markers
597 return True if a new marker have been added, False if the markers
598 already existed (no op).
598 already existed (no op).
599 """
599 """
600 if metadata is None:
600 if metadata is None:
601 metadata = {}
601 metadata = {}
602 if date is None:
602 if date is None:
603 if 'date' in metadata:
603 if 'date' in metadata:
604 # as a courtesy for out-of-tree extensions
604 # as a courtesy for out-of-tree extensions
605 date = util.parsedate(metadata.pop('date'))
605 date = util.parsedate(metadata.pop('date'))
606 elif ui is not None:
607 date = ui.configdate('devel', 'default-date')
608 if date is None:
609 date = util.makedate()
606 else:
610 else:
607 date = util.makedate()
611 date = util.makedate()
608 if len(prec) != 20:
612 if len(prec) != 20:
609 raise ValueError(prec)
613 raise ValueError(prec)
610 for succ in succs:
614 for succ in succs:
611 if len(succ) != 20:
615 if len(succ) != 20:
612 raise ValueError(succ)
616 raise ValueError(succ)
613 if prec in succs:
617 if prec in succs:
614 raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
618 raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
615
619
616 metadata = tuple(sorted(metadata.iteritems()))
620 metadata = tuple(sorted(metadata.iteritems()))
617
621
618 marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
622 marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
619 return bool(self.add(transaction, [marker]))
623 return bool(self.add(transaction, [marker]))
620
624
621 def add(self, transaction, markers):
625 def add(self, transaction, markers):
622 """Add new markers to the store
626 """Add new markers to the store
623
627
624 Take care of filtering duplicate.
628 Take care of filtering duplicate.
625 Return the number of new marker."""
629 Return the number of new marker."""
626 if self._readonly:
630 if self._readonly:
627 raise error.Abort(_('creating obsolete markers is not enabled on '
631 raise error.Abort(_('creating obsolete markers is not enabled on '
628 'this repo'))
632 'this repo'))
629 known = set(self._all)
633 known = set(self._all)
630 new = []
634 new = []
631 for m in markers:
635 for m in markers:
632 if m not in known:
636 if m not in known:
633 known.add(m)
637 known.add(m)
634 new.append(m)
638 new.append(m)
635 if new:
639 if new:
636 f = self.svfs('obsstore', 'ab')
640 f = self.svfs('obsstore', 'ab')
637 try:
641 try:
638 offset = f.tell()
642 offset = f.tell()
639 transaction.add('obsstore', offset)
643 transaction.add('obsstore', offset)
640 # offset == 0: new file - add the version header
644 # offset == 0: new file - add the version header
641 for bytes in encodemarkers(new, offset == 0, self._version):
645 for bytes in encodemarkers(new, offset == 0, self._version):
642 f.write(bytes)
646 f.write(bytes)
643 finally:
647 finally:
644 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
648 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
645 # call 'filecacheentry.refresh()' here
649 # call 'filecacheentry.refresh()' here
646 f.close()
650 f.close()
647 self._addmarkers(new)
651 self._addmarkers(new)
648 # new marker *may* have changed several set. invalidate the cache.
652 # new marker *may* have changed several set. invalidate the cache.
649 self.caches.clear()
653 self.caches.clear()
650 # records the number of new markers for the transaction hooks
654 # records the number of new markers for the transaction hooks
651 previous = int(transaction.hookargs.get('new_obsmarkers', '0'))
655 previous = int(transaction.hookargs.get('new_obsmarkers', '0'))
652 transaction.hookargs['new_obsmarkers'] = str(previous + len(new))
656 transaction.hookargs['new_obsmarkers'] = str(previous + len(new))
653 return len(new)
657 return len(new)
654
658
655 def mergemarkers(self, transaction, data):
659 def mergemarkers(self, transaction, data):
656 """merge a binary stream of markers inside the obsstore
660 """merge a binary stream of markers inside the obsstore
657
661
658 Returns the number of new markers added."""
662 Returns the number of new markers added."""
659 version, markers = _readmarkers(data)
663 version, markers = _readmarkers(data)
660 return self.add(transaction, markers)
664 return self.add(transaction, markers)
661
665
662 @propertycache
666 @propertycache
663 def _all(self):
667 def _all(self):
664 data = self.svfs.tryread('obsstore')
668 data = self.svfs.tryread('obsstore')
665 if not data:
669 if not data:
666 return []
670 return []
667 self._version, markers = _readmarkers(data)
671 self._version, markers = _readmarkers(data)
668 markers = list(markers)
672 markers = list(markers)
669 _checkinvalidmarkers(markers)
673 _checkinvalidmarkers(markers)
670 return markers
674 return markers
671
675
672 @propertycache
676 @propertycache
673 def successors(self):
677 def successors(self):
674 successors = {}
678 successors = {}
675 _addsuccessors(successors, self._all)
679 _addsuccessors(successors, self._all)
676 return successors
680 return successors
677
681
678 @propertycache
682 @propertycache
679 def precursors(self):
683 def precursors(self):
680 precursors = {}
684 precursors = {}
681 _addprecursors(precursors, self._all)
685 _addprecursors(precursors, self._all)
682 return precursors
686 return precursors
683
687
684 @propertycache
688 @propertycache
685 def children(self):
689 def children(self):
686 children = {}
690 children = {}
687 _addchildren(children, self._all)
691 _addchildren(children, self._all)
688 return children
692 return children
689
693
690 def _cached(self, attr):
694 def _cached(self, attr):
691 return attr in self.__dict__
695 return attr in self.__dict__
692
696
693 def _addmarkers(self, markers):
697 def _addmarkers(self, markers):
694 markers = list(markers) # to allow repeated iteration
698 markers = list(markers) # to allow repeated iteration
695 self._all.extend(markers)
699 self._all.extend(markers)
696 if self._cached('successors'):
700 if self._cached('successors'):
697 _addsuccessors(self.successors, markers)
701 _addsuccessors(self.successors, markers)
698 if self._cached('precursors'):
702 if self._cached('precursors'):
699 _addprecursors(self.precursors, markers)
703 _addprecursors(self.precursors, markers)
700 if self._cached('children'):
704 if self._cached('children'):
701 _addchildren(self.children, markers)
705 _addchildren(self.children, markers)
702 _checkinvalidmarkers(markers)
706 _checkinvalidmarkers(markers)
703
707
704 def relevantmarkers(self, nodes):
708 def relevantmarkers(self, nodes):
705 """return a set of all obsolescence markers relevant to a set of nodes.
709 """return a set of all obsolescence markers relevant to a set of nodes.
706
710
707 "relevant" to a set of nodes mean:
711 "relevant" to a set of nodes mean:
708
712
709 - marker that use this changeset as successor
713 - marker that use this changeset as successor
710 - prune marker of direct children on this changeset
714 - prune marker of direct children on this changeset
711 - recursive application of the two rules on precursors of these markers
715 - recursive application of the two rules on precursors of these markers
712
716
713 It is a set so you cannot rely on order."""
717 It is a set so you cannot rely on order."""
714
718
715 pendingnodes = set(nodes)
719 pendingnodes = set(nodes)
716 seenmarkers = set()
720 seenmarkers = set()
717 seennodes = set(pendingnodes)
721 seennodes = set(pendingnodes)
718 precursorsmarkers = self.precursors
722 precursorsmarkers = self.precursors
719 children = self.children
723 children = self.children
720 while pendingnodes:
724 while pendingnodes:
721 direct = set()
725 direct = set()
722 for current in pendingnodes:
726 for current in pendingnodes:
723 direct.update(precursorsmarkers.get(current, ()))
727 direct.update(precursorsmarkers.get(current, ()))
724 pruned = [m for m in children.get(current, ()) if not m[1]]
728 pruned = [m for m in children.get(current, ()) if not m[1]]
725 direct.update(pruned)
729 direct.update(pruned)
726 direct -= seenmarkers
730 direct -= seenmarkers
727 pendingnodes = set([m[0] for m in direct])
731 pendingnodes = set([m[0] for m in direct])
728 seenmarkers |= direct
732 seenmarkers |= direct
729 pendingnodes -= seennodes
733 pendingnodes -= seennodes
730 seennodes |= pendingnodes
734 seennodes |= pendingnodes
731 return seenmarkers
735 return seenmarkers
732
736
733 def commonversion(versions):
737 def commonversion(versions):
734 """Return the newest version listed in both versions and our local formats.
738 """Return the newest version listed in both versions and our local formats.
735
739
736 Returns None if no common version exists.
740 Returns None if no common version exists.
737 """
741 """
738 versions.sort(reverse=True)
742 versions.sort(reverse=True)
739 # search for highest version known on both side
743 # search for highest version known on both side
740 for v in versions:
744 for v in versions:
741 if v in formats:
745 if v in formats:
742 return v
746 return v
743 return None
747 return None
744
748
745 # arbitrary picked to fit into 8K limit from HTTP server
749 # arbitrary picked to fit into 8K limit from HTTP server
746 # you have to take in account:
750 # you have to take in account:
747 # - the version header
751 # - the version header
748 # - the base85 encoding
752 # - the base85 encoding
749 _maxpayload = 5300
753 _maxpayload = 5300
750
754
751 def _pushkeyescape(markers):
755 def _pushkeyescape(markers):
752 """encode markers into a dict suitable for pushkey exchange
756 """encode markers into a dict suitable for pushkey exchange
753
757
754 - binary data is base85 encoded
758 - binary data is base85 encoded
755 - split in chunks smaller than 5300 bytes"""
759 - split in chunks smaller than 5300 bytes"""
756 keys = {}
760 keys = {}
757 parts = []
761 parts = []
758 currentlen = _maxpayload * 2 # ensure we create a new part
762 currentlen = _maxpayload * 2 # ensure we create a new part
759 for marker in markers:
763 for marker in markers:
760 nextdata = _fm0encodeonemarker(marker)
764 nextdata = _fm0encodeonemarker(marker)
761 if (len(nextdata) + currentlen > _maxpayload):
765 if (len(nextdata) + currentlen > _maxpayload):
762 currentpart = []
766 currentpart = []
763 currentlen = 0
767 currentlen = 0
764 parts.append(currentpart)
768 parts.append(currentpart)
765 currentpart.append(nextdata)
769 currentpart.append(nextdata)
766 currentlen += len(nextdata)
770 currentlen += len(nextdata)
767 for idx, part in enumerate(reversed(parts)):
771 for idx, part in enumerate(reversed(parts)):
768 data = ''.join([_pack('>B', _fm0version)] + part)
772 data = ''.join([_pack('>B', _fm0version)] + part)
769 keys['dump%i' % idx] = util.b85encode(data)
773 keys['dump%i' % idx] = util.b85encode(data)
770 return keys
774 return keys
771
775
772 def listmarkers(repo):
776 def listmarkers(repo):
773 """List markers over pushkey"""
777 """List markers over pushkey"""
774 if not repo.obsstore:
778 if not repo.obsstore:
775 return {}
779 return {}
776 return _pushkeyescape(sorted(repo.obsstore))
780 return _pushkeyescape(sorted(repo.obsstore))
777
781
778 def pushmarker(repo, key, old, new):
782 def pushmarker(repo, key, old, new):
779 """Push markers over pushkey"""
783 """Push markers over pushkey"""
780 if not key.startswith('dump'):
784 if not key.startswith('dump'):
781 repo.ui.warn(_('unknown key: %r') % key)
785 repo.ui.warn(_('unknown key: %r') % key)
782 return 0
786 return 0
783 if old:
787 if old:
784 repo.ui.warn(_('unexpected old value for %r') % key)
788 repo.ui.warn(_('unexpected old value for %r') % key)
785 return 0
789 return 0
786 data = util.b85decode(new)
790 data = util.b85decode(new)
787 lock = repo.lock()
791 lock = repo.lock()
788 try:
792 try:
789 tr = repo.transaction('pushkey: obsolete markers')
793 tr = repo.transaction('pushkey: obsolete markers')
790 try:
794 try:
791 repo.obsstore.mergemarkers(tr, data)
795 repo.obsstore.mergemarkers(tr, data)
792 repo.invalidatevolatilesets()
796 repo.invalidatevolatilesets()
793 tr.close()
797 tr.close()
794 return 1
798 return 1
795 finally:
799 finally:
796 tr.release()
800 tr.release()
797 finally:
801 finally:
798 lock.release()
802 lock.release()
799
803
800 def getmarkers(repo, nodes=None):
804 def getmarkers(repo, nodes=None):
801 """returns markers known in a repository
805 """returns markers known in a repository
802
806
803 If <nodes> is specified, only markers "relevant" to those nodes are are
807 If <nodes> is specified, only markers "relevant" to those nodes are are
804 returned"""
808 returned"""
805 if nodes is None:
809 if nodes is None:
806 rawmarkers = repo.obsstore
810 rawmarkers = repo.obsstore
807 else:
811 else:
808 rawmarkers = repo.obsstore.relevantmarkers(nodes)
812 rawmarkers = repo.obsstore.relevantmarkers(nodes)
809
813
810 for markerdata in rawmarkers:
814 for markerdata in rawmarkers:
811 yield marker(repo, markerdata)
815 yield marker(repo, markerdata)
812
816
813 def relevantmarkers(repo, node):
817 def relevantmarkers(repo, node):
814 """all obsolete markers relevant to some revision"""
818 """all obsolete markers relevant to some revision"""
815 for markerdata in repo.obsstore.relevantmarkers(node):
819 for markerdata in repo.obsstore.relevantmarkers(node):
816 yield marker(repo, markerdata)
820 yield marker(repo, markerdata)
817
821
818
822
819 def precursormarkers(ctx):
823 def precursormarkers(ctx):
820 """obsolete marker marking this changeset as a successors"""
824 """obsolete marker marking this changeset as a successors"""
821 for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()):
825 for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()):
822 yield marker(ctx.repo(), data)
826 yield marker(ctx.repo(), data)
823
827
824 def successormarkers(ctx):
828 def successormarkers(ctx):
825 """obsolete marker making this changeset obsolete"""
829 """obsolete marker making this changeset obsolete"""
826 for data in ctx.repo().obsstore.successors.get(ctx.node(), ()):
830 for data in ctx.repo().obsstore.successors.get(ctx.node(), ()):
827 yield marker(ctx.repo(), data)
831 yield marker(ctx.repo(), data)
828
832
829 def allsuccessors(obsstore, nodes, ignoreflags=0):
833 def allsuccessors(obsstore, nodes, ignoreflags=0):
830 """Yield node for every successor of <nodes>.
834 """Yield node for every successor of <nodes>.
831
835
832 Some successors may be unknown locally.
836 Some successors may be unknown locally.
833
837
834 This is a linear yield unsuited to detecting split changesets. It includes
838 This is a linear yield unsuited to detecting split changesets. It includes
835 initial nodes too."""
839 initial nodes too."""
836 remaining = set(nodes)
840 remaining = set(nodes)
837 seen = set(remaining)
841 seen = set(remaining)
838 while remaining:
842 while remaining:
839 current = remaining.pop()
843 current = remaining.pop()
840 yield current
844 yield current
841 for mark in obsstore.successors.get(current, ()):
845 for mark in obsstore.successors.get(current, ()):
842 # ignore marker flagged with specified flag
846 # ignore marker flagged with specified flag
843 if mark[2] & ignoreflags:
847 if mark[2] & ignoreflags:
844 continue
848 continue
845 for suc in mark[1]:
849 for suc in mark[1]:
846 if suc not in seen:
850 if suc not in seen:
847 seen.add(suc)
851 seen.add(suc)
848 remaining.add(suc)
852 remaining.add(suc)
849
853
850 def allprecursors(obsstore, nodes, ignoreflags=0):
854 def allprecursors(obsstore, nodes, ignoreflags=0):
851 """Yield node for every precursors of <nodes>.
855 """Yield node for every precursors of <nodes>.
852
856
853 Some precursors may be unknown locally.
857 Some precursors may be unknown locally.
854
858
855 This is a linear yield unsuited to detecting folded changesets. It includes
859 This is a linear yield unsuited to detecting folded changesets. It includes
856 initial nodes too."""
860 initial nodes too."""
857
861
858 remaining = set(nodes)
862 remaining = set(nodes)
859 seen = set(remaining)
863 seen = set(remaining)
860 while remaining:
864 while remaining:
861 current = remaining.pop()
865 current = remaining.pop()
862 yield current
866 yield current
863 for mark in obsstore.precursors.get(current, ()):
867 for mark in obsstore.precursors.get(current, ()):
864 # ignore marker flagged with specified flag
868 # ignore marker flagged with specified flag
865 if mark[2] & ignoreflags:
869 if mark[2] & ignoreflags:
866 continue
870 continue
867 suc = mark[0]
871 suc = mark[0]
868 if suc not in seen:
872 if suc not in seen:
869 seen.add(suc)
873 seen.add(suc)
870 remaining.add(suc)
874 remaining.add(suc)
871
875
872 def foreground(repo, nodes):
876 def foreground(repo, nodes):
873 """return all nodes in the "foreground" of other node
877 """return all nodes in the "foreground" of other node
874
878
875 The foreground of a revision is anything reachable using parent -> children
879 The foreground of a revision is anything reachable using parent -> children
876 or precursor -> successor relation. It is very similar to "descendant" but
880 or precursor -> successor relation. It is very similar to "descendant" but
877 augmented with obsolescence information.
881 augmented with obsolescence information.
878
882
879 Beware that possible obsolescence cycle may result if complex situation.
883 Beware that possible obsolescence cycle may result if complex situation.
880 """
884 """
881 repo = repo.unfiltered()
885 repo = repo.unfiltered()
882 foreground = set(repo.set('%ln::', nodes))
886 foreground = set(repo.set('%ln::', nodes))
883 if repo.obsstore:
887 if repo.obsstore:
884 # We only need this complicated logic if there is obsolescence
888 # We only need this complicated logic if there is obsolescence
885 # XXX will probably deserve an optimised revset.
889 # XXX will probably deserve an optimised revset.
886 nm = repo.changelog.nodemap
890 nm = repo.changelog.nodemap
887 plen = -1
891 plen = -1
888 # compute the whole set of successors or descendants
892 # compute the whole set of successors or descendants
889 while len(foreground) != plen:
893 while len(foreground) != plen:
890 plen = len(foreground)
894 plen = len(foreground)
891 succs = set(c.node() for c in foreground)
895 succs = set(c.node() for c in foreground)
892 mutable = [c.node() for c in foreground if c.mutable()]
896 mutable = [c.node() for c in foreground if c.mutable()]
893 succs.update(allsuccessors(repo.obsstore, mutable))
897 succs.update(allsuccessors(repo.obsstore, mutable))
894 known = (n for n in succs if n in nm)
898 known = (n for n in succs if n in nm)
895 foreground = set(repo.set('%ln::', known))
899 foreground = set(repo.set('%ln::', known))
896 return set(c.node() for c in foreground)
900 return set(c.node() for c in foreground)
897
901
898
902
899 def successorssets(repo, initialnode, cache=None):
903 def successorssets(repo, initialnode, cache=None):
900 """Return set of all latest successors of initial nodes
904 """Return set of all latest successors of initial nodes
901
905
902 The successors set of a changeset A are the group of revisions that succeed
906 The successors set of a changeset A are the group of revisions that succeed
903 A. It succeeds A as a consistent whole, each revision being only a partial
907 A. It succeeds A as a consistent whole, each revision being only a partial
904 replacement. The successors set contains non-obsolete changesets only.
908 replacement. The successors set contains non-obsolete changesets only.
905
909
906 This function returns the full list of successor sets which is why it
910 This function returns the full list of successor sets which is why it
907 returns a list of tuples and not just a single tuple. Each tuple is a valid
911 returns a list of tuples and not just a single tuple. Each tuple is a valid
908 successors set. Note that (A,) may be a valid successors set for changeset A
912 successors set. Note that (A,) may be a valid successors set for changeset A
909 (see below).
913 (see below).
910
914
911 In most cases, a changeset A will have a single element (e.g. the changeset
915 In most cases, a changeset A will have a single element (e.g. the changeset
912 A is replaced by A') in its successors set. Though, it is also common for a
916 A is replaced by A') in its successors set. Though, it is also common for a
913 changeset A to have no elements in its successor set (e.g. the changeset
917 changeset A to have no elements in its successor set (e.g. the changeset
914 has been pruned). Therefore, the returned list of successors sets will be
918 has been pruned). Therefore, the returned list of successors sets will be
915 [(A',)] or [], respectively.
919 [(A',)] or [], respectively.
916
920
917 When a changeset A is split into A' and B', however, it will result in a
921 When a changeset A is split into A' and B', however, it will result in a
918 successors set containing more than a single element, i.e. [(A',B')].
922 successors set containing more than a single element, i.e. [(A',B')].
919 Divergent changesets will result in multiple successors sets, i.e. [(A',),
923 Divergent changesets will result in multiple successors sets, i.e. [(A',),
920 (A'')].
924 (A'')].
921
925
922 If a changeset A is not obsolete, then it will conceptually have no
926 If a changeset A is not obsolete, then it will conceptually have no
923 successors set. To distinguish this from a pruned changeset, the successor
927 successors set. To distinguish this from a pruned changeset, the successor
924 set will contain itself only, i.e. [(A,)].
928 set will contain itself only, i.e. [(A,)].
925
929
926 Finally, successors unknown locally are considered to be pruned (obsoleted
930 Finally, successors unknown locally are considered to be pruned (obsoleted
927 without any successors).
931 without any successors).
928
932
929 The optional `cache` parameter is a dictionary that may contain precomputed
933 The optional `cache` parameter is a dictionary that may contain precomputed
930 successors sets. It is meant to reuse the computation of a previous call to
934 successors sets. It is meant to reuse the computation of a previous call to
931 `successorssets` when multiple calls are made at the same time. The cache
935 `successorssets` when multiple calls are made at the same time. The cache
932 dictionary is updated in place. The caller is responsible for its life
936 dictionary is updated in place. The caller is responsible for its life
933 span. Code that makes multiple calls to `successorssets` *must* use this
937 span. Code that makes multiple calls to `successorssets` *must* use this
934 cache mechanism or suffer terrible performance.
938 cache mechanism or suffer terrible performance.
935 """
939 """
936
940
937 succmarkers = repo.obsstore.successors
941 succmarkers = repo.obsstore.successors
938
942
939 # Stack of nodes we search successors sets for
943 # Stack of nodes we search successors sets for
940 toproceed = [initialnode]
944 toproceed = [initialnode]
941 # set version of above list for fast loop detection
945 # set version of above list for fast loop detection
942 # element added to "toproceed" must be added here
946 # element added to "toproceed" must be added here
943 stackedset = set(toproceed)
947 stackedset = set(toproceed)
944 if cache is None:
948 if cache is None:
945 cache = {}
949 cache = {}
946
950
947 # This while loop is the flattened version of a recursive search for
951 # This while loop is the flattened version of a recursive search for
948 # successors sets
952 # successors sets
949 #
953 #
950 # def successorssets(x):
954 # def successorssets(x):
951 # successors = directsuccessors(x)
955 # successors = directsuccessors(x)
952 # ss = [[]]
956 # ss = [[]]
953 # for succ in directsuccessors(x):
957 # for succ in directsuccessors(x):
954 # # product as in itertools cartesian product
958 # # product as in itertools cartesian product
955 # ss = product(ss, successorssets(succ))
959 # ss = product(ss, successorssets(succ))
956 # return ss
960 # return ss
957 #
961 #
958 # But we can not use plain recursive calls here:
962 # But we can not use plain recursive calls here:
959 # - that would blow the python call stack
963 # - that would blow the python call stack
960 # - obsolescence markers may have cycles, we need to handle them.
964 # - obsolescence markers may have cycles, we need to handle them.
961 #
965 #
962 # The `toproceed` list act as our call stack. Every node we search
966 # The `toproceed` list act as our call stack. Every node we search
963 # successors set for are stacked there.
967 # successors set for are stacked there.
964 #
968 #
965 # The `stackedset` is set version of this stack used to check if a node is
969 # The `stackedset` is set version of this stack used to check if a node is
966 # already stacked. This check is used to detect cycles and prevent infinite
970 # already stacked. This check is used to detect cycles and prevent infinite
967 # loop.
971 # loop.
968 #
972 #
969 # successors set of all nodes are stored in the `cache` dictionary.
973 # successors set of all nodes are stored in the `cache` dictionary.
970 #
974 #
971 # After this while loop ends we use the cache to return the successors sets
975 # After this while loop ends we use the cache to return the successors sets
972 # for the node requested by the caller.
976 # for the node requested by the caller.
973 while toproceed:
977 while toproceed:
974 # Every iteration tries to compute the successors sets of the topmost
978 # Every iteration tries to compute the successors sets of the topmost
975 # node of the stack: CURRENT.
979 # node of the stack: CURRENT.
976 #
980 #
977 # There are four possible outcomes:
981 # There are four possible outcomes:
978 #
982 #
979 # 1) We already know the successors sets of CURRENT:
983 # 1) We already know the successors sets of CURRENT:
980 # -> mission accomplished, pop it from the stack.
984 # -> mission accomplished, pop it from the stack.
981 # 2) Node is not obsolete:
985 # 2) Node is not obsolete:
982 # -> the node is its own successors sets. Add it to the cache.
986 # -> the node is its own successors sets. Add it to the cache.
983 # 3) We do not know successors set of direct successors of CURRENT:
987 # 3) We do not know successors set of direct successors of CURRENT:
984 # -> We add those successors to the stack.
988 # -> We add those successors to the stack.
985 # 4) We know successors sets of all direct successors of CURRENT:
989 # 4) We know successors sets of all direct successors of CURRENT:
986 # -> We can compute CURRENT successors set and add it to the
990 # -> We can compute CURRENT successors set and add it to the
987 # cache.
991 # cache.
988 #
992 #
989 current = toproceed[-1]
993 current = toproceed[-1]
990 if current in cache:
994 if current in cache:
991 # case (1): We already know the successors sets
995 # case (1): We already know the successors sets
992 stackedset.remove(toproceed.pop())
996 stackedset.remove(toproceed.pop())
993 elif current not in succmarkers:
997 elif current not in succmarkers:
994 # case (2): The node is not obsolete.
998 # case (2): The node is not obsolete.
995 if current in repo:
999 if current in repo:
996 # We have a valid last successors.
1000 # We have a valid last successors.
997 cache[current] = [(current,)]
1001 cache[current] = [(current,)]
998 else:
1002 else:
999 # Final obsolete version is unknown locally.
1003 # Final obsolete version is unknown locally.
1000 # Do not count that as a valid successors
1004 # Do not count that as a valid successors
1001 cache[current] = []
1005 cache[current] = []
1002 else:
1006 else:
1003 # cases (3) and (4)
1007 # cases (3) and (4)
1004 #
1008 #
1005 # We proceed in two phases. Phase 1 aims to distinguish case (3)
1009 # We proceed in two phases. Phase 1 aims to distinguish case (3)
1006 # from case (4):
1010 # from case (4):
1007 #
1011 #
1008 # For each direct successors of CURRENT, we check whether its
1012 # For each direct successors of CURRENT, we check whether its
1009 # successors sets are known. If they are not, we stack the
1013 # successors sets are known. If they are not, we stack the
1010 # unknown node and proceed to the next iteration of the while
1014 # unknown node and proceed to the next iteration of the while
1011 # loop. (case 3)
1015 # loop. (case 3)
1012 #
1016 #
1013 # During this step, we may detect obsolescence cycles: a node
1017 # During this step, we may detect obsolescence cycles: a node
1014 # with unknown successors sets but already in the call stack.
1018 # with unknown successors sets but already in the call stack.
1015 # In such a situation, we arbitrary set the successors sets of
1019 # In such a situation, we arbitrary set the successors sets of
1016 # the node to nothing (node pruned) to break the cycle.
1020 # the node to nothing (node pruned) to break the cycle.
1017 #
1021 #
1018 # If no break was encountered we proceed to phase 2.
1022 # If no break was encountered we proceed to phase 2.
1019 #
1023 #
1020 # Phase 2 computes successors sets of CURRENT (case 4); see details
1024 # Phase 2 computes successors sets of CURRENT (case 4); see details
1021 # in phase 2 itself.
1025 # in phase 2 itself.
1022 #
1026 #
1023 # Note the two levels of iteration in each phase.
1027 # Note the two levels of iteration in each phase.
1024 # - The first one handles obsolescence markers using CURRENT as
1028 # - The first one handles obsolescence markers using CURRENT as
1025 # precursor (successors markers of CURRENT).
1029 # precursor (successors markers of CURRENT).
1026 #
1030 #
1027 # Having multiple entry here means divergence.
1031 # Having multiple entry here means divergence.
1028 #
1032 #
1029 # - The second one handles successors defined in each marker.
1033 # - The second one handles successors defined in each marker.
1030 #
1034 #
1031 # Having none means pruned node, multiple successors means split,
1035 # Having none means pruned node, multiple successors means split,
1032 # single successors are standard replacement.
1036 # single successors are standard replacement.
1033 #
1037 #
1034 for mark in sorted(succmarkers[current]):
1038 for mark in sorted(succmarkers[current]):
1035 for suc in mark[1]:
1039 for suc in mark[1]:
1036 if suc not in cache:
1040 if suc not in cache:
1037 if suc in stackedset:
1041 if suc in stackedset:
1038 # cycle breaking
1042 # cycle breaking
1039 cache[suc] = []
1043 cache[suc] = []
1040 else:
1044 else:
1041 # case (3) If we have not computed successors sets
1045 # case (3) If we have not computed successors sets
1042 # of one of those successors we add it to the
1046 # of one of those successors we add it to the
1043 # `toproceed` stack and stop all work for this
1047 # `toproceed` stack and stop all work for this
1044 # iteration.
1048 # iteration.
1045 toproceed.append(suc)
1049 toproceed.append(suc)
1046 stackedset.add(suc)
1050 stackedset.add(suc)
1047 break
1051 break
1048 else:
1052 else:
1049 continue
1053 continue
1050 break
1054 break
1051 else:
1055 else:
1052 # case (4): we know all successors sets of all direct
1056 # case (4): we know all successors sets of all direct
1053 # successors
1057 # successors
1054 #
1058 #
1055 # Successors set contributed by each marker depends on the
1059 # Successors set contributed by each marker depends on the
1056 # successors sets of all its "successors" node.
1060 # successors sets of all its "successors" node.
1057 #
1061 #
1058 # Each different marker is a divergence in the obsolescence
1062 # Each different marker is a divergence in the obsolescence
1059 # history. It contributes successors sets distinct from other
1063 # history. It contributes successors sets distinct from other
1060 # markers.
1064 # markers.
1061 #
1065 #
1062 # Within a marker, a successor may have divergent successors
1066 # Within a marker, a successor may have divergent successors
1063 # sets. In such a case, the marker will contribute multiple
1067 # sets. In such a case, the marker will contribute multiple
1064 # divergent successors sets. If multiple successors have
1068 # divergent successors sets. If multiple successors have
1065 # divergent successors sets, a Cartesian product is used.
1069 # divergent successors sets, a Cartesian product is used.
1066 #
1070 #
1067 # At the end we post-process successors sets to remove
1071 # At the end we post-process successors sets to remove
1068 # duplicated entry and successors set that are strict subset of
1072 # duplicated entry and successors set that are strict subset of
1069 # another one.
1073 # another one.
1070 succssets = []
1074 succssets = []
1071 for mark in sorted(succmarkers[current]):
1075 for mark in sorted(succmarkers[current]):
1072 # successors sets contributed by this marker
1076 # successors sets contributed by this marker
1073 markss = [[]]
1077 markss = [[]]
1074 for suc in mark[1]:
1078 for suc in mark[1]:
1075 # cardinal product with previous successors
1079 # cardinal product with previous successors
1076 productresult = []
1080 productresult = []
1077 for prefix in markss:
1081 for prefix in markss:
1078 for suffix in cache[suc]:
1082 for suffix in cache[suc]:
1079 newss = list(prefix)
1083 newss = list(prefix)
1080 for part in suffix:
1084 for part in suffix:
1081 # do not duplicated entry in successors set
1085 # do not duplicated entry in successors set
1082 # first entry wins.
1086 # first entry wins.
1083 if part not in newss:
1087 if part not in newss:
1084 newss.append(part)
1088 newss.append(part)
1085 productresult.append(newss)
1089 productresult.append(newss)
1086 markss = productresult
1090 markss = productresult
1087 succssets.extend(markss)
1091 succssets.extend(markss)
1088 # remove duplicated and subset
1092 # remove duplicated and subset
1089 seen = []
1093 seen = []
1090 final = []
1094 final = []
1091 candidate = sorted(((set(s), s) for s in succssets if s),
1095 candidate = sorted(((set(s), s) for s in succssets if s),
1092 key=lambda x: len(x[1]), reverse=True)
1096 key=lambda x: len(x[1]), reverse=True)
1093 for setversion, listversion in candidate:
1097 for setversion, listversion in candidate:
1094 for seenset in seen:
1098 for seenset in seen:
1095 if setversion.issubset(seenset):
1099 if setversion.issubset(seenset):
1096 break
1100 break
1097 else:
1101 else:
1098 final.append(listversion)
1102 final.append(listversion)
1099 seen.append(setversion)
1103 seen.append(setversion)
1100 final.reverse() # put small successors set first
1104 final.reverse() # put small successors set first
1101 cache[current] = final
1105 cache[current] = final
1102 return cache[initialnode]
1106 return cache[initialnode]
1103
1107
1104 # mapping of 'set-name' -> <function to compute this set>
1108 # mapping of 'set-name' -> <function to compute this set>
1105 cachefuncs = {}
1109 cachefuncs = {}
1106 def cachefor(name):
1110 def cachefor(name):
1107 """Decorator to register a function as computing the cache for a set"""
1111 """Decorator to register a function as computing the cache for a set"""
1108 def decorator(func):
1112 def decorator(func):
1109 assert name not in cachefuncs
1113 assert name not in cachefuncs
1110 cachefuncs[name] = func
1114 cachefuncs[name] = func
1111 return func
1115 return func
1112 return decorator
1116 return decorator
1113
1117
1114 def getrevs(repo, name):
1118 def getrevs(repo, name):
1115 """Return the set of revision that belong to the <name> set
1119 """Return the set of revision that belong to the <name> set
1116
1120
1117 Such access may compute the set and cache it for future use"""
1121 Such access may compute the set and cache it for future use"""
1118 repo = repo.unfiltered()
1122 repo = repo.unfiltered()
1119 if not repo.obsstore:
1123 if not repo.obsstore:
1120 return frozenset()
1124 return frozenset()
1121 if name not in repo.obsstore.caches:
1125 if name not in repo.obsstore.caches:
1122 repo.obsstore.caches[name] = cachefuncs[name](repo)
1126 repo.obsstore.caches[name] = cachefuncs[name](repo)
1123 return repo.obsstore.caches[name]
1127 return repo.obsstore.caches[name]
1124
1128
1125 # To be simple we need to invalidate obsolescence cache when:
1129 # To be simple we need to invalidate obsolescence cache when:
1126 #
1130 #
1127 # - new changeset is added:
1131 # - new changeset is added:
1128 # - public phase is changed
1132 # - public phase is changed
1129 # - obsolescence marker are added
1133 # - obsolescence marker are added
1130 # - strip is used a repo
1134 # - strip is used a repo
1131 def clearobscaches(repo):
1135 def clearobscaches(repo):
1132 """Remove all obsolescence related cache from a repo
1136 """Remove all obsolescence related cache from a repo
1133
1137
1134 This remove all cache in obsstore is the obsstore already exist on the
1138 This remove all cache in obsstore is the obsstore already exist on the
1135 repo.
1139 repo.
1136
1140
1137 (We could be smarter here given the exact event that trigger the cache
1141 (We could be smarter here given the exact event that trigger the cache
1138 clearing)"""
1142 clearing)"""
1139 # only clear cache is there is obsstore data in this repo
1143 # only clear cache is there is obsstore data in this repo
1140 if 'obsstore' in repo._filecache:
1144 if 'obsstore' in repo._filecache:
1141 repo.obsstore.caches.clear()
1145 repo.obsstore.caches.clear()
1142
1146
1143 @cachefor('obsolete')
1147 @cachefor('obsolete')
1144 def _computeobsoleteset(repo):
1148 def _computeobsoleteset(repo):
1145 """the set of obsolete revisions"""
1149 """the set of obsolete revisions"""
1146 obs = set()
1150 obs = set()
1147 getnode = repo.changelog.node
1151 getnode = repo.changelog.node
1148 notpublic = repo._phasecache.getrevset(repo, (phases.draft, phases.secret))
1152 notpublic = repo._phasecache.getrevset(repo, (phases.draft, phases.secret))
1149 for r in notpublic:
1153 for r in notpublic:
1150 if getnode(r) in repo.obsstore.successors:
1154 if getnode(r) in repo.obsstore.successors:
1151 obs.add(r)
1155 obs.add(r)
1152 return obs
1156 return obs
1153
1157
1154 @cachefor('unstable')
1158 @cachefor('unstable')
1155 def _computeunstableset(repo):
1159 def _computeunstableset(repo):
1156 """the set of non obsolete revisions with obsolete parents"""
1160 """the set of non obsolete revisions with obsolete parents"""
1157 revs = [(ctx.rev(), ctx) for ctx in
1161 revs = [(ctx.rev(), ctx) for ctx in
1158 repo.set('(not public()) and (not obsolete())')]
1162 repo.set('(not public()) and (not obsolete())')]
1159 revs.sort(key=lambda x:x[0])
1163 revs.sort(key=lambda x:x[0])
1160 unstable = set()
1164 unstable = set()
1161 for rev, ctx in revs:
1165 for rev, ctx in revs:
1162 # A rev is unstable if one of its parent is obsolete or unstable
1166 # A rev is unstable if one of its parent is obsolete or unstable
1163 # this works since we traverse following growing rev order
1167 # this works since we traverse following growing rev order
1164 if any((x.obsolete() or (x.rev() in unstable))
1168 if any((x.obsolete() or (x.rev() in unstable))
1165 for x in ctx.parents()):
1169 for x in ctx.parents()):
1166 unstable.add(rev)
1170 unstable.add(rev)
1167 return unstable
1171 return unstable
1168
1172
1169 @cachefor('suspended')
1173 @cachefor('suspended')
1170 def _computesuspendedset(repo):
1174 def _computesuspendedset(repo):
1171 """the set of obsolete parents with non obsolete descendants"""
1175 """the set of obsolete parents with non obsolete descendants"""
1172 suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
1176 suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
1173 return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
1177 return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
1174
1178
1175 @cachefor('extinct')
1179 @cachefor('extinct')
1176 def _computeextinctset(repo):
1180 def _computeextinctset(repo):
1177 """the set of obsolete parents without non obsolete descendants"""
1181 """the set of obsolete parents without non obsolete descendants"""
1178 return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
1182 return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
1179
1183
1180
1184
1181 @cachefor('bumped')
1185 @cachefor('bumped')
1182 def _computebumpedset(repo):
1186 def _computebumpedset(repo):
1183 """the set of revs trying to obsolete public revisions"""
1187 """the set of revs trying to obsolete public revisions"""
1184 bumped = set()
1188 bumped = set()
1185 # util function (avoid attribute lookup in the loop)
1189 # util function (avoid attribute lookup in the loop)
1186 phase = repo._phasecache.phase # would be faster to grab the full list
1190 phase = repo._phasecache.phase # would be faster to grab the full list
1187 public = phases.public
1191 public = phases.public
1188 cl = repo.changelog
1192 cl = repo.changelog
1189 torev = cl.nodemap.get
1193 torev = cl.nodemap.get
1190 for ctx in repo.set('(not public()) and (not obsolete())'):
1194 for ctx in repo.set('(not public()) and (not obsolete())'):
1191 rev = ctx.rev()
1195 rev = ctx.rev()
1192 # We only evaluate mutable, non-obsolete revision
1196 # We only evaluate mutable, non-obsolete revision
1193 node = ctx.node()
1197 node = ctx.node()
1194 # (future) A cache of precursors may worth if split is very common
1198 # (future) A cache of precursors may worth if split is very common
1195 for pnode in allprecursors(repo.obsstore, [node],
1199 for pnode in allprecursors(repo.obsstore, [node],
1196 ignoreflags=bumpedfix):
1200 ignoreflags=bumpedfix):
1197 prev = torev(pnode) # unfiltered! but so is phasecache
1201 prev = torev(pnode) # unfiltered! but so is phasecache
1198 if (prev is not None) and (phase(repo, prev) <= public):
1202 if (prev is not None) and (phase(repo, prev) <= public):
1199 # we have a public precursor
1203 # we have a public precursor
1200 bumped.add(rev)
1204 bumped.add(rev)
1201 break # Next draft!
1205 break # Next draft!
1202 return bumped
1206 return bumped
1203
1207
1204 @cachefor('divergent')
1208 @cachefor('divergent')
1205 def _computedivergentset(repo):
1209 def _computedivergentset(repo):
1206 """the set of rev that compete to be the final successors of some revision.
1210 """the set of rev that compete to be the final successors of some revision.
1207 """
1211 """
1208 divergent = set()
1212 divergent = set()
1209 obsstore = repo.obsstore
1213 obsstore = repo.obsstore
1210 newermap = {}
1214 newermap = {}
1211 for ctx in repo.set('(not public()) - obsolete()'):
1215 for ctx in repo.set('(not public()) - obsolete()'):
1212 mark = obsstore.precursors.get(ctx.node(), ())
1216 mark = obsstore.precursors.get(ctx.node(), ())
1213 toprocess = set(mark)
1217 toprocess = set(mark)
1214 seen = set()
1218 seen = set()
1215 while toprocess:
1219 while toprocess:
1216 prec = toprocess.pop()[0]
1220 prec = toprocess.pop()[0]
1217 if prec in seen:
1221 if prec in seen:
1218 continue # emergency cycle hanging prevention
1222 continue # emergency cycle hanging prevention
1219 seen.add(prec)
1223 seen.add(prec)
1220 if prec not in newermap:
1224 if prec not in newermap:
1221 successorssets(repo, prec, newermap)
1225 successorssets(repo, prec, newermap)
1222 newer = [n for n in newermap[prec] if n]
1226 newer = [n for n in newermap[prec] if n]
1223 if len(newer) > 1:
1227 if len(newer) > 1:
1224 divergent.add(ctx.rev())
1228 divergent.add(ctx.rev())
1225 break
1229 break
1226 toprocess.update(obsstore.precursors.get(prec, ()))
1230 toprocess.update(obsstore.precursors.get(prec, ()))
1227 return divergent
1231 return divergent
1228
1232
1229
1233
1230 def createmarkers(repo, relations, flag=0, date=None, metadata=None,
1234 def createmarkers(repo, relations, flag=0, date=None, metadata=None,
1231 operation=None):
1235 operation=None):
1232 """Add obsolete markers between changesets in a repo
1236 """Add obsolete markers between changesets in a repo
1233
1237
1234 <relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
1238 <relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
1235 tuple. `old` and `news` are changectx. metadata is an optional dictionary
1239 tuple. `old` and `news` are changectx. metadata is an optional dictionary
1236 containing metadata for this marker only. It is merged with the global
1240 containing metadata for this marker only. It is merged with the global
1237 metadata specified through the `metadata` argument of this function,
1241 metadata specified through the `metadata` argument of this function,
1238
1242
1239 Trying to obsolete a public changeset will raise an exception.
1243 Trying to obsolete a public changeset will raise an exception.
1240
1244
1241 Current user and date are used except if specified otherwise in the
1245 Current user and date are used except if specified otherwise in the
1242 metadata attribute.
1246 metadata attribute.
1243
1247
1244 This function operates within a transaction of its own, but does
1248 This function operates within a transaction of its own, but does
1245 not take any lock on the repo.
1249 not take any lock on the repo.
1246 """
1250 """
1247 # prepare metadata
1251 # prepare metadata
1248 if metadata is None:
1252 if metadata is None:
1249 metadata = {}
1253 metadata = {}
1250 if 'user' not in metadata:
1254 if 'user' not in metadata:
1251 metadata['user'] = repo.ui.username()
1255 metadata['user'] = repo.ui.username()
1252 useoperation = repo.ui.configbool('experimental',
1256 useoperation = repo.ui.configbool('experimental',
1253 'evolution.track-operation',
1257 'evolution.track-operation',
1254 False)
1258 False)
1255 if useoperation and operation:
1259 if useoperation and operation:
1256 metadata['operation'] = operation
1260 metadata['operation'] = operation
1257 tr = repo.transaction('add-obsolescence-marker')
1261 tr = repo.transaction('add-obsolescence-marker')
1258 try:
1262 try:
1259 markerargs = []
1263 markerargs = []
1260 for rel in relations:
1264 for rel in relations:
1261 prec = rel[0]
1265 prec = rel[0]
1262 sucs = rel[1]
1266 sucs = rel[1]
1263 localmetadata = metadata.copy()
1267 localmetadata = metadata.copy()
1264 if 2 < len(rel):
1268 if 2 < len(rel):
1265 localmetadata.update(rel[2])
1269 localmetadata.update(rel[2])
1266
1270
1267 if not prec.mutable():
1271 if not prec.mutable():
1268 raise error.Abort(_("cannot obsolete public changeset: %s")
1272 raise error.Abort(_("cannot obsolete public changeset: %s")
1269 % prec,
1273 % prec,
1270 hint="see 'hg help phases' for details")
1274 hint="see 'hg help phases' for details")
1271 nprec = prec.node()
1275 nprec = prec.node()
1272 nsucs = tuple(s.node() for s in sucs)
1276 nsucs = tuple(s.node() for s in sucs)
1273 npare = None
1277 npare = None
1274 if not nsucs:
1278 if not nsucs:
1275 npare = tuple(p.node() for p in prec.parents())
1279 npare = tuple(p.node() for p in prec.parents())
1276 if nprec in nsucs:
1280 if nprec in nsucs:
1277 raise error.Abort(_("changeset %s cannot obsolete itself")
1281 raise error.Abort(_("changeset %s cannot obsolete itself")
1278 % prec)
1282 % prec)
1279
1283
1280 # Creating the marker causes the hidden cache to become invalid,
1284 # Creating the marker causes the hidden cache to become invalid,
1281 # which causes recomputation when we ask for prec.parents() above.
1285 # which causes recomputation when we ask for prec.parents() above.
1282 # Resulting in n^2 behavior. So let's prepare all of the args
1286 # Resulting in n^2 behavior. So let's prepare all of the args
1283 # first, then create the markers.
1287 # first, then create the markers.
1284 markerargs.append((nprec, nsucs, npare, localmetadata))
1288 markerargs.append((nprec, nsucs, npare, localmetadata))
1285
1289
1286 for args in markerargs:
1290 for args in markerargs:
1287 nprec, nsucs, npare, localmetadata = args
1291 nprec, nsucs, npare, localmetadata = args
1288 repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
1292 repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
1289 date=date, metadata=localmetadata)
1293 date=date, metadata=localmetadata,
1294 ui=repo.ui)
1290 repo.filteredrevcache.clear()
1295 repo.filteredrevcache.clear()
1291 tr.close()
1296 tr.close()
1292 finally:
1297 finally:
1293 tr.release()
1298 tr.release()
@@ -1,1315 +1,1315 b''
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [phases]
2 > [phases]
3 > # public changeset are not obsolete
3 > # public changeset are not obsolete
4 > publish=false
4 > publish=false
5 > [ui]
5 > [ui]
6 > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(troubles, ' {troubles}')}) [{tags} {bookmarks}] {desc|firstline}\n"
6 > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(troubles, ' {troubles}')}) [{tags} {bookmarks}] {desc|firstline}\n"
7 > EOF
7 > EOF
8 $ mkcommit() {
8 $ mkcommit() {
9 > echo "$1" > "$1"
9 > echo "$1" > "$1"
10 > hg add "$1"
10 > hg add "$1"
11 > hg ci -m "add $1"
11 > hg ci -m "add $1"
12 > }
12 > }
13 $ getid() {
13 $ getid() {
14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
15 > }
15 > }
16
16
17 $ cat > debugkeys.py <<EOF
17 $ cat > debugkeys.py <<EOF
18 > def reposetup(ui, repo):
18 > def reposetup(ui, repo):
19 > class debugkeysrepo(repo.__class__):
19 > class debugkeysrepo(repo.__class__):
20 > def listkeys(self, namespace):
20 > def listkeys(self, namespace):
21 > ui.write('listkeys %s\n' % (namespace,))
21 > ui.write('listkeys %s\n' % (namespace,))
22 > return super(debugkeysrepo, self).listkeys(namespace)
22 > return super(debugkeysrepo, self).listkeys(namespace)
23 >
23 >
24 > if repo.local():
24 > if repo.local():
25 > repo.__class__ = debugkeysrepo
25 > repo.__class__ = debugkeysrepo
26 > EOF
26 > EOF
27
27
28 $ hg init tmpa
28 $ hg init tmpa
29 $ cd tmpa
29 $ cd tmpa
30 $ mkcommit kill_me
30 $ mkcommit kill_me
31
31
32 Checking that the feature is properly disabled
32 Checking that the feature is properly disabled
33
33
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
35 abort: creating obsolete markers is not enabled on this repo
35 abort: creating obsolete markers is not enabled on this repo
36 [255]
36 [255]
37
37
38 Enabling it
38 Enabling it
39
39
40 $ cat >> $HGRCPATH << EOF
40 $ cat >> $HGRCPATH << EOF
41 > [experimental]
41 > [experimental]
42 > evolution=createmarkers,exchange
42 > evolution=createmarkers,exchange
43 > EOF
43 > EOF
44
44
45 Killing a single changeset without replacement
45 Killing a single changeset without replacement
46
46
47 $ hg debugobsolete 0
47 $ hg debugobsolete 0
48 abort: changeset references must be full hexadecimal node identifiers
48 abort: changeset references must be full hexadecimal node identifiers
49 [255]
49 [255]
50 $ hg debugobsolete '00'
50 $ hg debugobsolete '00'
51 abort: changeset references must be full hexadecimal node identifiers
51 abort: changeset references must be full hexadecimal node identifiers
52 [255]
52 [255]
53 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
53 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
54 $ hg debugobsolete
54 $ hg debugobsolete
55 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
55 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
56
56
57 (test that mercurial is not confused)
57 (test that mercurial is not confused)
58
58
59 $ hg up null --quiet # having 0 as parent prevents it to be hidden
59 $ hg up null --quiet # having 0 as parent prevents it to be hidden
60 $ hg tip
60 $ hg tip
61 -1:000000000000 (public) [tip ]
61 -1:000000000000 (public) [tip ]
62 $ hg up --hidden tip --quiet
62 $ hg up --hidden tip --quiet
63
63
64 Killing a single changeset with itself should fail
64 Killing a single changeset with itself should fail
65 (simple local safeguard)
65 (simple local safeguard)
66
66
67 $ hg debugobsolete `getid kill_me` `getid kill_me`
67 $ hg debugobsolete `getid kill_me` `getid kill_me`
68 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
68 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
69 [255]
69 [255]
70
70
71 $ cd ..
71 $ cd ..
72
72
73 Killing a single changeset with replacement
73 Killing a single changeset with replacement
74 (and testing the format option)
74 (and testing the format option)
75
75
76 $ hg init tmpb
76 $ hg init tmpb
77 $ cd tmpb
77 $ cd tmpb
78 $ mkcommit a
78 $ mkcommit a
79 $ mkcommit b
79 $ mkcommit b
80 $ mkcommit original_c
80 $ mkcommit original_c
81 $ hg up "desc('b')"
81 $ hg up "desc('b')"
82 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
82 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
83 $ mkcommit new_c
83 $ mkcommit new_c
84 created new head
84 created new head
85 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
85 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
86 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
86 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
87 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
87 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
88 2:245bde4270cd add original_c
88 2:245bde4270cd add original_c
89 $ hg debugrevlog -cd
89 $ hg debugrevlog -cd
90 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
90 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
91 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
91 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
92 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
92 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
93 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
93 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
94 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
94 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
95 $ hg debugobsolete
95 $ hg debugobsolete
96 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
96 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
97
97
98 (check for version number of the obsstore)
98 (check for version number of the obsstore)
99
99
100 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
100 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
101 \x00 (no-eol) (esc)
101 \x00 (no-eol) (esc)
102
102
103 do it again (it read the obsstore before adding new changeset)
103 do it again (it read the obsstore before adding new changeset)
104
104
105 $ hg up '.^'
105 $ hg up '.^'
106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
107 $ mkcommit new_2_c
107 $ mkcommit new_2_c
108 created new head
108 created new head
109 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
109 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
110 $ hg debugobsolete
110 $ hg debugobsolete
111 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
111 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
112 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
112 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
113
113
114 Register two markers with a missing node
114 Register two markers with a missing node
115
115
116 $ hg up '.^'
116 $ hg up '.^'
117 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
117 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
118 $ mkcommit new_3_c
118 $ mkcommit new_3_c
119 created new head
119 created new head
120 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
120 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
121 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
121 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
122 $ hg debugobsolete
122 $ hg debugobsolete
123 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
123 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
124 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
124 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
125 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
125 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
126 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
126 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
127
127
128 Test the --index option of debugobsolete command
128 Test the --index option of debugobsolete command
129 $ hg debugobsolete --index
129 $ hg debugobsolete --index
130 0 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
130 0 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
131 1 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
131 1 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
132 2 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
132 2 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
133 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
133 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
134
134
135 Refuse pathological nullid successors
135 Refuse pathological nullid successors
136 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
136 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
137 transaction abort!
137 transaction abort!
138 rollback completed
138 rollback completed
139 abort: bad obsolescence marker detected: invalid successors nullid
139 abort: bad obsolescence marker detected: invalid successors nullid
140 [255]
140 [255]
141
141
142 Check that graphlog detect that a changeset is obsolete:
142 Check that graphlog detect that a changeset is obsolete:
143
143
144 $ hg log -G
144 $ hg log -G
145 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
145 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
146 |
146 |
147 o 1:7c3bad9141dc (draft) [ ] add b
147 o 1:7c3bad9141dc (draft) [ ] add b
148 |
148 |
149 o 0:1f0dee641bb7 (draft) [ ] add a
149 o 0:1f0dee641bb7 (draft) [ ] add a
150
150
151
151
152 check that heads does not report them
152 check that heads does not report them
153
153
154 $ hg heads
154 $ hg heads
155 5:5601fb93a350 (draft) [tip ] add new_3_c
155 5:5601fb93a350 (draft) [tip ] add new_3_c
156 $ hg heads --hidden
156 $ hg heads --hidden
157 5:5601fb93a350 (draft) [tip ] add new_3_c
157 5:5601fb93a350 (draft) [tip ] add new_3_c
158 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
158 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
159 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
159 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
160 2:245bde4270cd (draft *obsolete*) [ ] add original_c
160 2:245bde4270cd (draft *obsolete*) [ ] add original_c
161
161
162
162
163 check that summary does not report them
163 check that summary does not report them
164
164
165 $ hg init ../sink
165 $ hg init ../sink
166 $ echo '[paths]' >> .hg/hgrc
166 $ echo '[paths]' >> .hg/hgrc
167 $ echo 'default=../sink' >> .hg/hgrc
167 $ echo 'default=../sink' >> .hg/hgrc
168 $ hg summary --remote
168 $ hg summary --remote
169 parent: 5:5601fb93a350 tip
169 parent: 5:5601fb93a350 tip
170 add new_3_c
170 add new_3_c
171 branch: default
171 branch: default
172 commit: (clean)
172 commit: (clean)
173 update: (current)
173 update: (current)
174 phases: 3 draft
174 phases: 3 draft
175 remote: 3 outgoing
175 remote: 3 outgoing
176
176
177 $ hg summary --remote --hidden
177 $ hg summary --remote --hidden
178 parent: 5:5601fb93a350 tip
178 parent: 5:5601fb93a350 tip
179 add new_3_c
179 add new_3_c
180 branch: default
180 branch: default
181 commit: (clean)
181 commit: (clean)
182 update: 3 new changesets, 4 branch heads (merge)
182 update: 3 new changesets, 4 branch heads (merge)
183 phases: 6 draft
183 phases: 6 draft
184 remote: 3 outgoing
184 remote: 3 outgoing
185
185
186 check that various commands work well with filtering
186 check that various commands work well with filtering
187
187
188 $ hg tip
188 $ hg tip
189 5:5601fb93a350 (draft) [tip ] add new_3_c
189 5:5601fb93a350 (draft) [tip ] add new_3_c
190 $ hg log -r 6
190 $ hg log -r 6
191 abort: unknown revision '6'!
191 abort: unknown revision '6'!
192 [255]
192 [255]
193 $ hg log -r 4
193 $ hg log -r 4
194 abort: hidden revision '4'!
194 abort: hidden revision '4'!
195 (use --hidden to access hidden revisions)
195 (use --hidden to access hidden revisions)
196 [255]
196 [255]
197 $ hg debugrevspec 'rev(6)'
197 $ hg debugrevspec 'rev(6)'
198 $ hg debugrevspec 'rev(4)'
198 $ hg debugrevspec 'rev(4)'
199 $ hg debugrevspec 'null'
199 $ hg debugrevspec 'null'
200 -1
200 -1
201
201
202 Check that public changeset are not accounted as obsolete:
202 Check that public changeset are not accounted as obsolete:
203
203
204 $ hg --hidden phase --public 2
204 $ hg --hidden phase --public 2
205 $ hg log -G
205 $ hg log -G
206 @ 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
206 @ 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
207 |
207 |
208 | o 2:245bde4270cd (public) [ ] add original_c
208 | o 2:245bde4270cd (public) [ ] add original_c
209 |/
209 |/
210 o 1:7c3bad9141dc (public) [ ] add b
210 o 1:7c3bad9141dc (public) [ ] add b
211 |
211 |
212 o 0:1f0dee641bb7 (public) [ ] add a
212 o 0:1f0dee641bb7 (public) [ ] add a
213
213
214
214
215 And that bumped changeset are detected
215 And that bumped changeset are detected
216 --------------------------------------
216 --------------------------------------
217
217
218 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
218 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
219 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
219 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
220 the public changeset
220 the public changeset
221
221
222 $ hg log --hidden -r 'bumped()'
222 $ hg log --hidden -r 'bumped()'
223 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
223 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
224
224
225 And that we can't push bumped changeset
225 And that we can't push bumped changeset
226
226
227 $ hg push ../tmpa -r 0 --force #(make repo related)
227 $ hg push ../tmpa -r 0 --force #(make repo related)
228 pushing to ../tmpa
228 pushing to ../tmpa
229 searching for changes
229 searching for changes
230 warning: repository is unrelated
230 warning: repository is unrelated
231 adding changesets
231 adding changesets
232 adding manifests
232 adding manifests
233 adding file changes
233 adding file changes
234 added 1 changesets with 1 changes to 1 files (+1 heads)
234 added 1 changesets with 1 changes to 1 files (+1 heads)
235 $ hg push ../tmpa
235 $ hg push ../tmpa
236 pushing to ../tmpa
236 pushing to ../tmpa
237 searching for changes
237 searching for changes
238 abort: push includes bumped changeset: 5601fb93a350!
238 abort: push includes bumped changeset: 5601fb93a350!
239 [255]
239 [255]
240
240
241 Fixing "bumped" situation
241 Fixing "bumped" situation
242 We need to create a clone of 5 and add a special marker with a flag
242 We need to create a clone of 5 and add a special marker with a flag
243
243
244 $ hg summary
244 $ hg summary
245 parent: 5:5601fb93a350 tip (bumped)
245 parent: 5:5601fb93a350 tip (bumped)
246 add new_3_c
246 add new_3_c
247 branch: default
247 branch: default
248 commit: (clean)
248 commit: (clean)
249 update: 1 new changesets, 2 branch heads (merge)
249 update: 1 new changesets, 2 branch heads (merge)
250 phases: 1 draft
250 phases: 1 draft
251 bumped: 1 changesets
251 bumped: 1 changesets
252 $ hg up '5^'
252 $ hg up '5^'
253 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
253 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
254 $ hg revert -ar 5
254 $ hg revert -ar 5
255 adding new_3_c
255 adding new_3_c
256 $ hg ci -m 'add n3w_3_c'
256 $ hg ci -m 'add n3w_3_c'
257 created new head
257 created new head
258 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
258 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
259 $ hg log -r 'bumped()'
259 $ hg log -r 'bumped()'
260 $ hg log -G
260 $ hg log -G
261 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
261 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
262 |
262 |
263 | o 2:245bde4270cd (public) [ ] add original_c
263 | o 2:245bde4270cd (public) [ ] add original_c
264 |/
264 |/
265 o 1:7c3bad9141dc (public) [ ] add b
265 o 1:7c3bad9141dc (public) [ ] add b
266 |
266 |
267 o 0:1f0dee641bb7 (public) [ ] add a
267 o 0:1f0dee641bb7 (public) [ ] add a
268
268
269
269
270 $ cd ..
270 $ cd ..
271
271
272 Revision 0 is hidden
272 Revision 0 is hidden
273 --------------------
273 --------------------
274
274
275 $ hg init rev0hidden
275 $ hg init rev0hidden
276 $ cd rev0hidden
276 $ cd rev0hidden
277
277
278 $ mkcommit kill0
278 $ mkcommit kill0
279 $ hg up -q null
279 $ hg up -q null
280 $ hg debugobsolete `getid kill0`
280 $ hg debugobsolete `getid kill0`
281 $ mkcommit a
281 $ mkcommit a
282 $ mkcommit b
282 $ mkcommit b
283
283
284 Should pick the first visible revision as "repo" node
284 Should pick the first visible revision as "repo" node
285
285
286 $ hg archive ../archive-null
286 $ hg archive ../archive-null
287 $ cat ../archive-null/.hg_archival.txt
287 $ cat ../archive-null/.hg_archival.txt
288 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
288 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
289 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
289 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
290 branch: default
290 branch: default
291 latesttag: null
291 latesttag: null
292 latesttagdistance: 2
292 latesttagdistance: 2
293 changessincelatesttag: 2
293 changessincelatesttag: 2
294
294
295
295
296 $ cd ..
296 $ cd ..
297
297
298 Exchange Test
298 Exchange Test
299 ============================
299 ============================
300
300
301 Destination repo does not have any data
301 Destination repo does not have any data
302 ---------------------------------------
302 ---------------------------------------
303
303
304 Simple incoming test
304 Simple incoming test
305
305
306 $ hg init tmpc
306 $ hg init tmpc
307 $ cd tmpc
307 $ cd tmpc
308 $ hg incoming ../tmpb
308 $ hg incoming ../tmpb
309 comparing with ../tmpb
309 comparing with ../tmpb
310 0:1f0dee641bb7 (public) [ ] add a
310 0:1f0dee641bb7 (public) [ ] add a
311 1:7c3bad9141dc (public) [ ] add b
311 1:7c3bad9141dc (public) [ ] add b
312 2:245bde4270cd (public) [ ] add original_c
312 2:245bde4270cd (public) [ ] add original_c
313 6:6f9641995072 (draft) [tip ] add n3w_3_c
313 6:6f9641995072 (draft) [tip ] add n3w_3_c
314
314
315 Try to pull markers
315 Try to pull markers
316 (extinct changeset are excluded but marker are pushed)
316 (extinct changeset are excluded but marker are pushed)
317
317
318 $ hg pull ../tmpb
318 $ hg pull ../tmpb
319 pulling from ../tmpb
319 pulling from ../tmpb
320 requesting all changes
320 requesting all changes
321 adding changesets
321 adding changesets
322 adding manifests
322 adding manifests
323 adding file changes
323 adding file changes
324 added 4 changesets with 4 changes to 4 files (+1 heads)
324 added 4 changesets with 4 changes to 4 files (+1 heads)
325 5 new obsolescence markers
325 5 new obsolescence markers
326 (run 'hg heads' to see heads, 'hg merge' to merge)
326 (run 'hg heads' to see heads, 'hg merge' to merge)
327 $ hg debugobsolete
327 $ hg debugobsolete
328 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
328 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
329 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
329 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
330 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
330 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
331 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
331 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
332 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
332 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
333
333
334 Rollback//Transaction support
334 Rollback//Transaction support
335
335
336 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
336 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
337 $ hg debugobsolete
337 $ hg debugobsolete
338 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
338 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
339 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
339 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
340 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
340 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
341 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
341 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
342 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
342 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
343 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
343 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
344 $ hg rollback -n
344 $ hg rollback -n
345 repository tip rolled back to revision 3 (undo debugobsolete)
345 repository tip rolled back to revision 3 (undo debugobsolete)
346 $ hg rollback
346 $ hg rollback
347 repository tip rolled back to revision 3 (undo debugobsolete)
347 repository tip rolled back to revision 3 (undo debugobsolete)
348 $ hg debugobsolete
348 $ hg debugobsolete
349 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
349 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
350 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
350 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
351 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
351 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
352 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
352 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
353 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
353 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
354
354
355 $ cd ..
355 $ cd ..
356
356
357 Try to push markers
357 Try to push markers
358
358
359 $ hg init tmpd
359 $ hg init tmpd
360 $ hg -R tmpb push tmpd
360 $ hg -R tmpb push tmpd
361 pushing to tmpd
361 pushing to tmpd
362 searching for changes
362 searching for changes
363 adding changesets
363 adding changesets
364 adding manifests
364 adding manifests
365 adding file changes
365 adding file changes
366 added 4 changesets with 4 changes to 4 files (+1 heads)
366 added 4 changesets with 4 changes to 4 files (+1 heads)
367 5 new obsolescence markers
367 5 new obsolescence markers
368 $ hg -R tmpd debugobsolete | sort
368 $ hg -R tmpd debugobsolete | sort
369 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
369 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
370 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
370 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
371 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
371 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
372 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
372 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
373 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
373 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
374
374
375 Check obsolete keys are exchanged only if source has an obsolete store
375 Check obsolete keys are exchanged only if source has an obsolete store
376
376
377 $ hg init empty
377 $ hg init empty
378 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
378 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
379 pushing to tmpd
379 pushing to tmpd
380 listkeys phases
380 listkeys phases
381 listkeys bookmarks
381 listkeys bookmarks
382 no changes found
382 no changes found
383 listkeys phases
383 listkeys phases
384 [1]
384 [1]
385
385
386 clone support
386 clone support
387 (markers are copied and extinct changesets are included to allow hardlinks)
387 (markers are copied and extinct changesets are included to allow hardlinks)
388
388
389 $ hg clone tmpb clone-dest
389 $ hg clone tmpb clone-dest
390 updating to branch default
390 updating to branch default
391 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
391 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
392 $ hg -R clone-dest log -G --hidden
392 $ hg -R clone-dest log -G --hidden
393 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
393 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
394 |
394 |
395 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c
395 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c
396 |/
396 |/
397 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
397 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
398 |/
398 |/
399 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
399 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
400 |/
400 |/
401 | o 2:245bde4270cd (public) [ ] add original_c
401 | o 2:245bde4270cd (public) [ ] add original_c
402 |/
402 |/
403 o 1:7c3bad9141dc (public) [ ] add b
403 o 1:7c3bad9141dc (public) [ ] add b
404 |
404 |
405 o 0:1f0dee641bb7 (public) [ ] add a
405 o 0:1f0dee641bb7 (public) [ ] add a
406
406
407 $ hg -R clone-dest debugobsolete
407 $ hg -R clone-dest debugobsolete
408 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
408 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
409 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
409 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
410 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
410 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
411 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
411 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
412 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
412 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
413
413
414
414
415 Destination repo have existing data
415 Destination repo have existing data
416 ---------------------------------------
416 ---------------------------------------
417
417
418 On pull
418 On pull
419
419
420 $ hg init tmpe
420 $ hg init tmpe
421 $ cd tmpe
421 $ cd tmpe
422 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
422 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
423 $ hg pull ../tmpb
423 $ hg pull ../tmpb
424 pulling from ../tmpb
424 pulling from ../tmpb
425 requesting all changes
425 requesting all changes
426 adding changesets
426 adding changesets
427 adding manifests
427 adding manifests
428 adding file changes
428 adding file changes
429 added 4 changesets with 4 changes to 4 files (+1 heads)
429 added 4 changesets with 4 changes to 4 files (+1 heads)
430 5 new obsolescence markers
430 5 new obsolescence markers
431 (run 'hg heads' to see heads, 'hg merge' to merge)
431 (run 'hg heads' to see heads, 'hg merge' to merge)
432 $ hg debugobsolete
432 $ hg debugobsolete
433 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
433 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
434 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
434 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
435 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
435 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
436 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
436 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
437 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
437 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
438 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
438 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
439
439
440
440
441 On push
441 On push
442
442
443 $ hg push ../tmpc
443 $ hg push ../tmpc
444 pushing to ../tmpc
444 pushing to ../tmpc
445 searching for changes
445 searching for changes
446 no changes found
446 no changes found
447 1 new obsolescence markers
447 1 new obsolescence markers
448 [1]
448 [1]
449 $ hg -R ../tmpc debugobsolete
449 $ hg -R ../tmpc debugobsolete
450 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
450 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
451 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
451 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
452 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
452 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
453 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
453 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
454 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
454 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
455 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
455 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
456
456
457 detect outgoing obsolete and unstable
457 detect outgoing obsolete and unstable
458 ---------------------------------------
458 ---------------------------------------
459
459
460
460
461 $ hg log -G
461 $ hg log -G
462 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
462 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
463 |
463 |
464 | o 2:245bde4270cd (public) [ ] add original_c
464 | o 2:245bde4270cd (public) [ ] add original_c
465 |/
465 |/
466 o 1:7c3bad9141dc (public) [ ] add b
466 o 1:7c3bad9141dc (public) [ ] add b
467 |
467 |
468 o 0:1f0dee641bb7 (public) [ ] add a
468 o 0:1f0dee641bb7 (public) [ ] add a
469
469
470 $ hg up 'desc("n3w_3_c")'
470 $ hg up 'desc("n3w_3_c")'
471 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
471 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
472 $ mkcommit original_d
472 $ mkcommit original_d
473 $ mkcommit original_e
473 $ mkcommit original_e
474 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
474 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
475 $ hg debugobsolete | grep `getid original_d`
475 $ hg debugobsolete | grep `getid original_d`
476 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
476 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
477 $ hg log -r 'obsolete()'
477 $ hg log -r 'obsolete()'
478 4:94b33453f93b (draft *obsolete*) [ ] add original_d
478 4:94b33453f93b (draft *obsolete*) [ ] add original_d
479 $ hg summary
479 $ hg summary
480 parent: 5:cda648ca50f5 tip (unstable)
480 parent: 5:cda648ca50f5 tip (unstable)
481 add original_e
481 add original_e
482 branch: default
482 branch: default
483 commit: (clean)
483 commit: (clean)
484 update: 1 new changesets, 2 branch heads (merge)
484 update: 1 new changesets, 2 branch heads (merge)
485 phases: 3 draft
485 phases: 3 draft
486 unstable: 1 changesets
486 unstable: 1 changesets
487 $ hg log -G -r '::unstable()'
487 $ hg log -G -r '::unstable()'
488 @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
488 @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
489 |
489 |
490 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
490 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
491 |
491 |
492 o 3:6f9641995072 (draft) [ ] add n3w_3_c
492 o 3:6f9641995072 (draft) [ ] add n3w_3_c
493 |
493 |
494 o 1:7c3bad9141dc (public) [ ] add b
494 o 1:7c3bad9141dc (public) [ ] add b
495 |
495 |
496 o 0:1f0dee641bb7 (public) [ ] add a
496 o 0:1f0dee641bb7 (public) [ ] add a
497
497
498
498
499 refuse to push obsolete changeset
499 refuse to push obsolete changeset
500
500
501 $ hg push ../tmpc/ -r 'desc("original_d")'
501 $ hg push ../tmpc/ -r 'desc("original_d")'
502 pushing to ../tmpc/
502 pushing to ../tmpc/
503 searching for changes
503 searching for changes
504 abort: push includes obsolete changeset: 94b33453f93b!
504 abort: push includes obsolete changeset: 94b33453f93b!
505 [255]
505 [255]
506
506
507 refuse to push unstable changeset
507 refuse to push unstable changeset
508
508
509 $ hg push ../tmpc/
509 $ hg push ../tmpc/
510 pushing to ../tmpc/
510 pushing to ../tmpc/
511 searching for changes
511 searching for changes
512 abort: push includes unstable changeset: cda648ca50f5!
512 abort: push includes unstable changeset: cda648ca50f5!
513 [255]
513 [255]
514
514
515 Test that extinct changeset are properly detected
515 Test that extinct changeset are properly detected
516
516
517 $ hg log -r 'extinct()'
517 $ hg log -r 'extinct()'
518
518
519 Don't try to push extinct changeset
519 Don't try to push extinct changeset
520
520
521 $ hg init ../tmpf
521 $ hg init ../tmpf
522 $ hg out ../tmpf
522 $ hg out ../tmpf
523 comparing with ../tmpf
523 comparing with ../tmpf
524 searching for changes
524 searching for changes
525 0:1f0dee641bb7 (public) [ ] add a
525 0:1f0dee641bb7 (public) [ ] add a
526 1:7c3bad9141dc (public) [ ] add b
526 1:7c3bad9141dc (public) [ ] add b
527 2:245bde4270cd (public) [ ] add original_c
527 2:245bde4270cd (public) [ ] add original_c
528 3:6f9641995072 (draft) [ ] add n3w_3_c
528 3:6f9641995072 (draft) [ ] add n3w_3_c
529 4:94b33453f93b (draft *obsolete*) [ ] add original_d
529 4:94b33453f93b (draft *obsolete*) [ ] add original_d
530 5:cda648ca50f5 (draft unstable) [tip ] add original_e
530 5:cda648ca50f5 (draft unstable) [tip ] add original_e
531 $ hg push ../tmpf -f # -f because be push unstable too
531 $ hg push ../tmpf -f # -f because be push unstable too
532 pushing to ../tmpf
532 pushing to ../tmpf
533 searching for changes
533 searching for changes
534 adding changesets
534 adding changesets
535 adding manifests
535 adding manifests
536 adding file changes
536 adding file changes
537 added 6 changesets with 6 changes to 6 files (+1 heads)
537 added 6 changesets with 6 changes to 6 files (+1 heads)
538 7 new obsolescence markers
538 7 new obsolescence markers
539
539
540 no warning displayed
540 no warning displayed
541
541
542 $ hg push ../tmpf
542 $ hg push ../tmpf
543 pushing to ../tmpf
543 pushing to ../tmpf
544 searching for changes
544 searching for changes
545 no changes found
545 no changes found
546 [1]
546 [1]
547
547
548 Do not warn about new head when the new head is a successors of a remote one
548 Do not warn about new head when the new head is a successors of a remote one
549
549
550 $ hg log -G
550 $ hg log -G
551 @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
551 @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
552 |
552 |
553 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
553 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
554 |
554 |
555 o 3:6f9641995072 (draft) [ ] add n3w_3_c
555 o 3:6f9641995072 (draft) [ ] add n3w_3_c
556 |
556 |
557 | o 2:245bde4270cd (public) [ ] add original_c
557 | o 2:245bde4270cd (public) [ ] add original_c
558 |/
558 |/
559 o 1:7c3bad9141dc (public) [ ] add b
559 o 1:7c3bad9141dc (public) [ ] add b
560 |
560 |
561 o 0:1f0dee641bb7 (public) [ ] add a
561 o 0:1f0dee641bb7 (public) [ ] add a
562
562
563 $ hg up -q 'desc(n3w_3_c)'
563 $ hg up -q 'desc(n3w_3_c)'
564 $ mkcommit obsolete_e
564 $ mkcommit obsolete_e
565 created new head
565 created new head
566 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
566 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
567 $ hg outgoing ../tmpf # parasite hg outgoing testin
567 $ hg outgoing ../tmpf # parasite hg outgoing testin
568 comparing with ../tmpf
568 comparing with ../tmpf
569 searching for changes
569 searching for changes
570 6:3de5eca88c00 (draft) [tip ] add obsolete_e
570 6:3de5eca88c00 (draft) [tip ] add obsolete_e
571 $ hg push ../tmpf
571 $ hg push ../tmpf
572 pushing to ../tmpf
572 pushing to ../tmpf
573 searching for changes
573 searching for changes
574 adding changesets
574 adding changesets
575 adding manifests
575 adding manifests
576 adding file changes
576 adding file changes
577 added 1 changesets with 1 changes to 1 files (+1 heads)
577 added 1 changesets with 1 changes to 1 files (+1 heads)
578 1 new obsolescence markers
578 1 new obsolescence markers
579
579
580 test relevance computation
580 test relevance computation
581 ---------------------------------------
581 ---------------------------------------
582
582
583 Checking simple case of "marker relevance".
583 Checking simple case of "marker relevance".
584
584
585
585
586 Reminder of the repo situation
586 Reminder of the repo situation
587
587
588 $ hg log --hidden --graph
588 $ hg log --hidden --graph
589 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
589 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
590 |
590 |
591 | x 5:cda648ca50f5 (draft *obsolete*) [ ] add original_e
591 | x 5:cda648ca50f5 (draft *obsolete*) [ ] add original_e
592 | |
592 | |
593 | x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
593 | x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
594 |/
594 |/
595 o 3:6f9641995072 (draft) [ ] add n3w_3_c
595 o 3:6f9641995072 (draft) [ ] add n3w_3_c
596 |
596 |
597 | o 2:245bde4270cd (public) [ ] add original_c
597 | o 2:245bde4270cd (public) [ ] add original_c
598 |/
598 |/
599 o 1:7c3bad9141dc (public) [ ] add b
599 o 1:7c3bad9141dc (public) [ ] add b
600 |
600 |
601 o 0:1f0dee641bb7 (public) [ ] add a
601 o 0:1f0dee641bb7 (public) [ ] add a
602
602
603
603
604 List of all markers
604 List of all markers
605
605
606 $ hg debugobsolete
606 $ hg debugobsolete
607 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
607 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
608 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
608 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
609 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
609 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
610 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
610 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
611 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
611 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
612 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
612 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
613 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
613 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
614 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
614 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
615
615
616 List of changesets with no chain
616 List of changesets with no chain
617
617
618 $ hg debugobsolete --hidden --rev ::2
618 $ hg debugobsolete --hidden --rev ::2
619
619
620 List of changesets that are included on marker chain
620 List of changesets that are included on marker chain
621
621
622 $ hg debugobsolete --hidden --rev 6
622 $ hg debugobsolete --hidden --rev 6
623 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
623 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
624
624
625 List of changesets with a longer chain, (including a pruned children)
625 List of changesets with a longer chain, (including a pruned children)
626
626
627 $ hg debugobsolete --hidden --rev 3
627 $ hg debugobsolete --hidden --rev 3
628 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
628 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
629 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
629 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
630 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
630 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
631 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
631 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
632 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
632 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
633 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
633 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
634 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
634 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
635
635
636 List of both
636 List of both
637
637
638 $ hg debugobsolete --hidden --rev 3::6
638 $ hg debugobsolete --hidden --rev 3::6
639 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
639 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
640 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
640 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
641 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
641 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
642 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
642 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
643 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
643 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
644 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
644 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
645 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
645 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
646 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
646 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
647
647
648 List of all markers in JSON
648 List of all markers in JSON
649
649
650 $ hg debugobsolete -Tjson
650 $ hg debugobsolete -Tjson
651 [
651 [
652 {
652 {
653 "date": [1339.0, 0],
653 "date": [1339.0, 0],
654 "flag": 0,
654 "flag": 0,
655 "metadata": {"user": "test"},
655 "metadata": {"user": "test"},
656 "precnode": "1339133913391339133913391339133913391339",
656 "precnode": "1339133913391339133913391339133913391339",
657 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
657 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
658 },
658 },
659 {
659 {
660 "date": [1339.0, 0],
660 "date": [1339.0, 0],
661 "flag": 0,
661 "flag": 0,
662 "metadata": {"user": "test"},
662 "metadata": {"user": "test"},
663 "precnode": "1337133713371337133713371337133713371337",
663 "precnode": "1337133713371337133713371337133713371337",
664 "succnodes": ["5601fb93a350734d935195fee37f4054c529ff39"]
664 "succnodes": ["5601fb93a350734d935195fee37f4054c529ff39"]
665 },
665 },
666 {
666 {
667 "date": [121.0, 120],
667 "date": [121.0, 120],
668 "flag": 12,
668 "flag": 12,
669 "metadata": {"user": "test"},
669 "metadata": {"user": "test"},
670 "precnode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
670 "precnode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
671 "succnodes": ["cdbce2fbb16313928851e97e0d85413f3f7eb77f"]
671 "succnodes": ["cdbce2fbb16313928851e97e0d85413f3f7eb77f"]
672 },
672 },
673 {
673 {
674 "date": [1338.0, 0],
674 "date": [1338.0, 0],
675 "flag": 1,
675 "flag": 1,
676 "metadata": {"user": "test"},
676 "metadata": {"user": "test"},
677 "precnode": "5601fb93a350734d935195fee37f4054c529ff39",
677 "precnode": "5601fb93a350734d935195fee37f4054c529ff39",
678 "succnodes": ["6f96419950729f3671185b847352890f074f7557"]
678 "succnodes": ["6f96419950729f3671185b847352890f074f7557"]
679 },
679 },
680 {
680 {
681 "date": [1338.0, 0],
681 "date": [1338.0, 0],
682 "flag": 0,
682 "flag": 0,
683 "metadata": {"user": "test"},
683 "metadata": {"user": "test"},
684 "precnode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
684 "precnode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
685 "succnodes": ["1337133713371337133713371337133713371337"]
685 "succnodes": ["1337133713371337133713371337133713371337"]
686 },
686 },
687 {
687 {
688 "date": [1337.0, 0],
688 "date": [1337.0, 0],
689 "flag": 0,
689 "flag": 0,
690 "metadata": {"user": "test"},
690 "metadata": {"user": "test"},
691 "precnode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
691 "precnode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
692 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
692 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
693 },
693 },
694 {
694 {
695 "date": [0.0, 0],
695 "date": [0.0, 0],
696 "flag": 0,
696 "flag": 0,
697 "metadata": {"user": "test"},
697 "metadata": {"user": "test"},
698 "parentnodes": ["6f96419950729f3671185b847352890f074f7557"],
698 "parentnodes": ["6f96419950729f3671185b847352890f074f7557"],
699 "precnode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
699 "precnode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
700 "succnodes": []
700 "succnodes": []
701 },
701 },
702 {
702 {
703 "date": *, (glob)
703 "date": *, (glob)
704 "flag": 0,
704 "flag": 0,
705 "metadata": {"user": "test"},
705 "metadata": {"user": "test"},
706 "precnode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
706 "precnode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
707 "succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
707 "succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
708 }
708 }
709 ]
709 ]
710
710
711 Template keywords
711 Template keywords
712
712
713 $ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
713 $ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
714 3de5eca88c00 ????-??-?? (glob)
714 3de5eca88c00 ????-??-?? (glob)
715 $ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
715 $ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
716 user=test
716 user=test
717 $ hg debugobsolete -r6 -T '{metadata}\n'
717 $ hg debugobsolete -r6 -T '{metadata}\n'
718 'user': 'test'
718 'user': 'test'
719 $ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
719 $ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
720 0 test
720 0 test
721
721
722 Test the debug output for exchange
722 Test the debug output for exchange
723 ----------------------------------
723 ----------------------------------
724
724
725 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' # bundle2
725 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' # bundle2
726 pulling from ../tmpb
726 pulling from ../tmpb
727 searching for changes
727 searching for changes
728 no changes found
728 no changes found
729 obsmarker-exchange: 346 bytes received
729 obsmarker-exchange: 346 bytes received
730
730
731 check hgweb does not explode
731 check hgweb does not explode
732 ====================================
732 ====================================
733
733
734 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
734 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
735 adding changesets
735 adding changesets
736 adding manifests
736 adding manifests
737 adding file changes
737 adding file changes
738 added 62 changesets with 63 changes to 9 files (+60 heads)
738 added 62 changesets with 63 changes to 9 files (+60 heads)
739 (run 'hg heads .' to see heads, 'hg merge' to merge)
739 (run 'hg heads .' to see heads, 'hg merge' to merge)
740 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
740 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
741 > do
741 > do
742 > hg debugobsolete $node
742 > hg debugobsolete $node
743 > done
743 > done
744 $ hg up tip
744 $ hg up tip
745 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
745 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
746
746
747 #if serve
747 #if serve
748
748
749 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
749 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
750 $ cat hg.pid >> $DAEMON_PIDS
750 $ cat hg.pid >> $DAEMON_PIDS
751
751
752 check changelog view
752 check changelog view
753
753
754 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
754 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
755 200 Script output follows
755 200 Script output follows
756
756
757 check graph view
757 check graph view
758
758
759 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
759 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
760 200 Script output follows
760 200 Script output follows
761
761
762 check filelog view
762 check filelog view
763
763
764 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
764 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
765 200 Script output follows
765 200 Script output follows
766
766
767 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
767 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
768 200 Script output follows
768 200 Script output follows
769 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
769 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
770 404 Not Found
770 404 Not Found
771 [1]
771 [1]
772
772
773 check that web.view config option:
773 check that web.view config option:
774
774
775 $ killdaemons.py hg.pid
775 $ killdaemons.py hg.pid
776 $ cat >> .hg/hgrc << EOF
776 $ cat >> .hg/hgrc << EOF
777 > [web]
777 > [web]
778 > view=all
778 > view=all
779 > EOF
779 > EOF
780 $ wait
780 $ wait
781 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
781 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
782 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
782 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
783 200 Script output follows
783 200 Script output follows
784 $ killdaemons.py hg.pid
784 $ killdaemons.py hg.pid
785
785
786 Checking _enable=False warning if obsolete marker exists
786 Checking _enable=False warning if obsolete marker exists
787
787
788 $ echo '[experimental]' >> $HGRCPATH
788 $ echo '[experimental]' >> $HGRCPATH
789 $ echo "evolution=" >> $HGRCPATH
789 $ echo "evolution=" >> $HGRCPATH
790 $ hg log -r tip
790 $ hg log -r tip
791 obsolete feature not enabled but 68 markers found!
791 obsolete feature not enabled but 68 markers found!
792 68:c15e9edfca13 (draft) [tip ] add celestine
792 68:c15e9edfca13 (draft) [tip ] add celestine
793
793
794 reenable for later test
794 reenable for later test
795
795
796 $ echo '[experimental]' >> $HGRCPATH
796 $ echo '[experimental]' >> $HGRCPATH
797 $ echo "evolution=createmarkers,exchange" >> $HGRCPATH
797 $ echo "evolution=createmarkers,exchange" >> $HGRCPATH
798
798
799 $ rm hg.pid access.log errors.log
799 $ rm hg.pid access.log errors.log
800 #endif
800 #endif
801
801
802 Several troubles on the same changeset (create an unstable and bumped changeset)
802 Several troubles on the same changeset (create an unstable and bumped changeset)
803
803
804 $ hg debugobsolete `getid obsolete_e`
804 $ hg debugobsolete `getid obsolete_e`
805 $ hg debugobsolete `getid original_c` `getid babar`
805 $ hg debugobsolete `getid original_c` `getid babar`
806 $ hg log --config ui.logtemplate= -r 'bumped() and unstable()'
806 $ hg log --config ui.logtemplate= -r 'bumped() and unstable()'
807 changeset: 7:50c51b361e60
807 changeset: 7:50c51b361e60
808 user: test
808 user: test
809 date: Thu Jan 01 00:00:00 1970 +0000
809 date: Thu Jan 01 00:00:00 1970 +0000
810 trouble: unstable, bumped
810 trouble: unstable, bumped
811 summary: add babar
811 summary: add babar
812
812
813
813
814 test the "obsolete" templatekw
814 test the "obsolete" templatekw
815
815
816 $ hg log -r 'obsolete()'
816 $ hg log -r 'obsolete()'
817 6:3de5eca88c00 (draft *obsolete*) [ ] add obsolete_e
817 6:3de5eca88c00 (draft *obsolete*) [ ] add obsolete_e
818
818
819 test the "troubles" templatekw
819 test the "troubles" templatekw
820
820
821 $ hg log -r 'bumped() and unstable()'
821 $ hg log -r 'bumped() and unstable()'
822 7:50c51b361e60 (draft unstable bumped) [ ] add babar
822 7:50c51b361e60 (draft unstable bumped) [ ] add babar
823
823
824 test the default cmdline template
824 test the default cmdline template
825
825
826 $ hg log -T default -r 'bumped()'
826 $ hg log -T default -r 'bumped()'
827 changeset: 7:50c51b361e60
827 changeset: 7:50c51b361e60
828 user: test
828 user: test
829 date: Thu Jan 01 00:00:00 1970 +0000
829 date: Thu Jan 01 00:00:00 1970 +0000
830 trouble: unstable, bumped
830 trouble: unstable, bumped
831 summary: add babar
831 summary: add babar
832
832
833 $ hg log -T default -r 'obsolete()'
833 $ hg log -T default -r 'obsolete()'
834 changeset: 6:3de5eca88c00
834 changeset: 6:3de5eca88c00
835 parent: 3:6f9641995072
835 parent: 3:6f9641995072
836 user: test
836 user: test
837 date: Thu Jan 01 00:00:00 1970 +0000
837 date: Thu Jan 01 00:00:00 1970 +0000
838 summary: add obsolete_e
838 summary: add obsolete_e
839
839
840
840
841 test summary output
841 test summary output
842
842
843 $ hg up -r 'bumped() and unstable()'
843 $ hg up -r 'bumped() and unstable()'
844 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
844 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
845 $ hg summary
845 $ hg summary
846 parent: 7:50c51b361e60 (unstable, bumped)
846 parent: 7:50c51b361e60 (unstable, bumped)
847 add babar
847 add babar
848 branch: default
848 branch: default
849 commit: (clean)
849 commit: (clean)
850 update: 2 new changesets (update)
850 update: 2 new changesets (update)
851 phases: 4 draft
851 phases: 4 draft
852 unstable: 2 changesets
852 unstable: 2 changesets
853 bumped: 1 changesets
853 bumped: 1 changesets
854 $ hg up -r 'obsolete()'
854 $ hg up -r 'obsolete()'
855 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
855 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
856 $ hg summary
856 $ hg summary
857 parent: 6:3de5eca88c00 (obsolete)
857 parent: 6:3de5eca88c00 (obsolete)
858 add obsolete_e
858 add obsolete_e
859 branch: default
859 branch: default
860 commit: (clean)
860 commit: (clean)
861 update: 3 new changesets (update)
861 update: 3 new changesets (update)
862 phases: 4 draft
862 phases: 4 draft
863 unstable: 2 changesets
863 unstable: 2 changesets
864 bumped: 1 changesets
864 bumped: 1 changesets
865
865
866 Test incoming/outcoming with changesets obsoleted remotely, known locally
866 Test incoming/outcoming with changesets obsoleted remotely, known locally
867 ===============================================================================
867 ===============================================================================
868
868
869 This test issue 3805
869 This test issue 3805
870
870
871 $ hg init repo-issue3805
871 $ hg init repo-issue3805
872 $ cd repo-issue3805
872 $ cd repo-issue3805
873 $ echo "base" > base
873 $ echo "base" > base
874 $ hg ci -Am "base"
874 $ hg ci -Am "base"
875 adding base
875 adding base
876 $ echo "foo" > foo
876 $ echo "foo" > foo
877 $ hg ci -Am "A"
877 $ hg ci -Am "A"
878 adding foo
878 adding foo
879 $ hg clone . ../other-issue3805
879 $ hg clone . ../other-issue3805
880 updating to branch default
880 updating to branch default
881 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
881 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
882 $ echo "bar" >> foo
882 $ echo "bar" >> foo
883 $ hg ci --amend
883 $ hg ci --amend
884 $ cd ../other-issue3805
884 $ cd ../other-issue3805
885 $ hg log -G
885 $ hg log -G
886 @ 1:29f0c6921ddd (draft) [tip ] A
886 @ 1:29f0c6921ddd (draft) [tip ] A
887 |
887 |
888 o 0:d20a80d4def3 (draft) [ ] base
888 o 0:d20a80d4def3 (draft) [ ] base
889
889
890 $ hg log -G -R ../repo-issue3805
890 $ hg log -G -R ../repo-issue3805
891 @ 3:323a9c3ddd91 (draft) [tip ] A
891 @ 3:323a9c3ddd91 (draft) [tip ] A
892 |
892 |
893 o 0:d20a80d4def3 (draft) [ ] base
893 o 0:d20a80d4def3 (draft) [ ] base
894
894
895 $ hg incoming
895 $ hg incoming
896 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
896 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
897 searching for changes
897 searching for changes
898 3:323a9c3ddd91 (draft) [tip ] A
898 3:323a9c3ddd91 (draft) [tip ] A
899 $ hg incoming --bundle ../issue3805.hg
899 $ hg incoming --bundle ../issue3805.hg
900 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
900 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
901 searching for changes
901 searching for changes
902 3:323a9c3ddd91 (draft) [tip ] A
902 3:323a9c3ddd91 (draft) [tip ] A
903 $ hg outgoing
903 $ hg outgoing
904 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
904 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
905 searching for changes
905 searching for changes
906 1:29f0c6921ddd (draft) [tip ] A
906 1:29f0c6921ddd (draft) [tip ] A
907
907
908 #if serve
908 #if serve
909
909
910 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
910 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
911 $ cat hg.pid >> $DAEMON_PIDS
911 $ cat hg.pid >> $DAEMON_PIDS
912
912
913 $ hg incoming http://localhost:$HGPORT
913 $ hg incoming http://localhost:$HGPORT
914 comparing with http://localhost:$HGPORT/
914 comparing with http://localhost:$HGPORT/
915 searching for changes
915 searching for changes
916 2:323a9c3ddd91 (draft) [tip ] A
916 2:323a9c3ddd91 (draft) [tip ] A
917 $ hg outgoing http://localhost:$HGPORT
917 $ hg outgoing http://localhost:$HGPORT
918 comparing with http://localhost:$HGPORT/
918 comparing with http://localhost:$HGPORT/
919 searching for changes
919 searching for changes
920 1:29f0c6921ddd (draft) [tip ] A
920 1:29f0c6921ddd (draft) [tip ] A
921
921
922 $ killdaemons.py
922 $ killdaemons.py
923
923
924 #endif
924 #endif
925
925
926 This test issue 3814
926 This test issue 3814
927
927
928 (nothing to push but locally hidden changeset)
928 (nothing to push but locally hidden changeset)
929
929
930 $ cd ..
930 $ cd ..
931 $ hg init repo-issue3814
931 $ hg init repo-issue3814
932 $ cd repo-issue3805
932 $ cd repo-issue3805
933 $ hg push -r 323a9c3ddd91 ../repo-issue3814
933 $ hg push -r 323a9c3ddd91 ../repo-issue3814
934 pushing to ../repo-issue3814
934 pushing to ../repo-issue3814
935 searching for changes
935 searching for changes
936 adding changesets
936 adding changesets
937 adding manifests
937 adding manifests
938 adding file changes
938 adding file changes
939 added 2 changesets with 2 changes to 2 files
939 added 2 changesets with 2 changes to 2 files
940 2 new obsolescence markers
940 2 new obsolescence markers
941 $ hg out ../repo-issue3814
941 $ hg out ../repo-issue3814
942 comparing with ../repo-issue3814
942 comparing with ../repo-issue3814
943 searching for changes
943 searching for changes
944 no changes found
944 no changes found
945 [1]
945 [1]
946
946
947 Test that a local tag blocks a changeset from being hidden
947 Test that a local tag blocks a changeset from being hidden
948
948
949 $ hg tag -l visible -r 1 --hidden
949 $ hg tag -l visible -r 1 --hidden
950 $ hg log -G
950 $ hg log -G
951 @ 3:323a9c3ddd91 (draft) [tip ] A
951 @ 3:323a9c3ddd91 (draft) [tip ] A
952 |
952 |
953 | x 1:29f0c6921ddd (draft *obsolete*) [visible ] A
953 | x 1:29f0c6921ddd (draft *obsolete*) [visible ] A
954 |/
954 |/
955 o 0:d20a80d4def3 (draft) [ ] base
955 o 0:d20a80d4def3 (draft) [ ] base
956
956
957 Test that removing a local tag does not cause some commands to fail
957 Test that removing a local tag does not cause some commands to fail
958
958
959 $ hg tag -l -r tip tiptag
959 $ hg tag -l -r tip tiptag
960 $ hg tags
960 $ hg tags
961 tiptag 3:323a9c3ddd91
961 tiptag 3:323a9c3ddd91
962 tip 3:323a9c3ddd91
962 tip 3:323a9c3ddd91
963 visible 1:29f0c6921ddd
963 visible 1:29f0c6921ddd
964 $ hg --config extensions.strip= strip -r tip --no-backup
964 $ hg --config extensions.strip= strip -r tip --no-backup
965 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
965 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
966 $ hg tags
966 $ hg tags
967 visible 1:29f0c6921ddd
967 visible 1:29f0c6921ddd
968 tip 1:29f0c6921ddd
968 tip 1:29f0c6921ddd
969
969
970 Test bundle overlay onto hidden revision
970 Test bundle overlay onto hidden revision
971
971
972 $ cd ..
972 $ cd ..
973 $ hg init repo-bundleoverlay
973 $ hg init repo-bundleoverlay
974 $ cd repo-bundleoverlay
974 $ cd repo-bundleoverlay
975 $ echo "A" > foo
975 $ echo "A" > foo
976 $ hg ci -Am "A"
976 $ hg ci -Am "A"
977 adding foo
977 adding foo
978 $ echo "B" >> foo
978 $ echo "B" >> foo
979 $ hg ci -m "B"
979 $ hg ci -m "B"
980 $ hg up 0
980 $ hg up 0
981 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
981 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
982 $ echo "C" >> foo
982 $ echo "C" >> foo
983 $ hg ci -m "C"
983 $ hg ci -m "C"
984 created new head
984 created new head
985 $ hg log -G
985 $ hg log -G
986 @ 2:c186d7714947 (draft) [tip ] C
986 @ 2:c186d7714947 (draft) [tip ] C
987 |
987 |
988 | o 1:44526ebb0f98 (draft) [ ] B
988 | o 1:44526ebb0f98 (draft) [ ] B
989 |/
989 |/
990 o 0:4b34ecfb0d56 (draft) [ ] A
990 o 0:4b34ecfb0d56 (draft) [ ] A
991
991
992
992
993 $ hg clone -r1 . ../other-bundleoverlay
993 $ hg clone -r1 . ../other-bundleoverlay
994 adding changesets
994 adding changesets
995 adding manifests
995 adding manifests
996 adding file changes
996 adding file changes
997 added 2 changesets with 2 changes to 1 files
997 added 2 changesets with 2 changes to 1 files
998 updating to branch default
998 updating to branch default
999 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
999 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1000 $ cd ../other-bundleoverlay
1000 $ cd ../other-bundleoverlay
1001 $ echo "B+" >> foo
1001 $ echo "B+" >> foo
1002 $ hg ci --amend -m "B+"
1002 $ hg ci --amend -m "B+"
1003 $ hg log -G --hidden
1003 $ hg log -G --hidden
1004 @ 3:b7d587542d40 (draft) [tip ] B+
1004 @ 3:b7d587542d40 (draft) [tip ] B+
1005 |
1005 |
1006 | x 2:eb95e9297e18 (draft *obsolete*) [ ] temporary amend commit for 44526ebb0f98
1006 | x 2:eb95e9297e18 (draft *obsolete*) [ ] temporary amend commit for 44526ebb0f98
1007 | |
1007 | |
1008 | x 1:44526ebb0f98 (draft *obsolete*) [ ] B
1008 | x 1:44526ebb0f98 (draft *obsolete*) [ ] B
1009 |/
1009 |/
1010 o 0:4b34ecfb0d56 (draft) [ ] A
1010 o 0:4b34ecfb0d56 (draft) [ ] A
1011
1011
1012
1012
1013 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
1013 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
1014 comparing with ../repo-bundleoverlay
1014 comparing with ../repo-bundleoverlay
1015 searching for changes
1015 searching for changes
1016 1:44526ebb0f98 (draft) [ ] B
1016 1:44526ebb0f98 (draft) [ ] B
1017 2:c186d7714947 (draft) [tip ] C
1017 2:c186d7714947 (draft) [tip ] C
1018 $ hg log -G -R ../bundleoverlay.hg
1018 $ hg log -G -R ../bundleoverlay.hg
1019 o 4:c186d7714947 (draft) [tip ] C
1019 o 4:c186d7714947 (draft) [tip ] C
1020 |
1020 |
1021 | @ 3:b7d587542d40 (draft) [ ] B+
1021 | @ 3:b7d587542d40 (draft) [ ] B+
1022 |/
1022 |/
1023 o 0:4b34ecfb0d56 (draft) [ ] A
1023 o 0:4b34ecfb0d56 (draft) [ ] A
1024
1024
1025
1025
1026 #if serve
1026 #if serve
1027
1027
1028 Test issue 4506
1028 Test issue 4506
1029
1029
1030 $ cd ..
1030 $ cd ..
1031 $ hg init repo-issue4506
1031 $ hg init repo-issue4506
1032 $ cd repo-issue4506
1032 $ cd repo-issue4506
1033 $ echo "0" > foo
1033 $ echo "0" > foo
1034 $ hg add foo
1034 $ hg add foo
1035 $ hg ci -m "content-0"
1035 $ hg ci -m "content-0"
1036
1036
1037 $ hg up null
1037 $ hg up null
1038 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1038 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1039 $ echo "1" > bar
1039 $ echo "1" > bar
1040 $ hg add bar
1040 $ hg add bar
1041 $ hg ci -m "content-1"
1041 $ hg ci -m "content-1"
1042 created new head
1042 created new head
1043 $ hg up 0
1043 $ hg up 0
1044 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1044 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1045 $ hg graft 1
1045 $ hg graft 1
1046 grafting 1:1c9eddb02162 "content-1" (tip)
1046 grafting 1:1c9eddb02162 "content-1" (tip)
1047
1047
1048 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
1048 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
1049
1049
1050 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1050 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1051 $ cat hg.pid >> $DAEMON_PIDS
1051 $ cat hg.pid >> $DAEMON_PIDS
1052
1052
1053 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
1053 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
1054 404 Not Found
1054 404 Not Found
1055 [1]
1055 [1]
1056 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
1056 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
1057 200 Script output follows
1057 200 Script output follows
1058 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
1058 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
1059 200 Script output follows
1059 200 Script output follows
1060
1060
1061 $ killdaemons.py
1061 $ killdaemons.py
1062
1062
1063 #endif
1063 #endif
1064
1064
1065 Test heads computation on pending index changes with obsolescence markers
1065 Test heads computation on pending index changes with obsolescence markers
1066 $ cd ..
1066 $ cd ..
1067 $ cat >$TESTTMP/test_extension.py << EOF
1067 $ cat >$TESTTMP/test_extension.py << EOF
1068 > from mercurial import cmdutil, registrar
1068 > from mercurial import cmdutil, registrar
1069 > from mercurial.i18n import _
1069 > from mercurial.i18n import _
1070 >
1070 >
1071 > cmdtable = {}
1071 > cmdtable = {}
1072 > command = registrar.command(cmdtable)
1072 > command = registrar.command(cmdtable)
1073 > @command("amendtransient",[], _('hg amendtransient [rev]'))
1073 > @command("amendtransient",[], _('hg amendtransient [rev]'))
1074 > def amend(ui, repo, *pats, **opts):
1074 > def amend(ui, repo, *pats, **opts):
1075 > def commitfunc(ui, repo, message, match, opts):
1075 > def commitfunc(ui, repo, message, match, opts):
1076 > return repo.commit(message, repo['.'].user(), repo['.'].date(), match)
1076 > return repo.commit(message, repo['.'].user(), repo['.'].date(), match)
1077 > opts['message'] = 'Test'
1077 > opts['message'] = 'Test'
1078 > opts['logfile'] = None
1078 > opts['logfile'] = None
1079 > cmdutil.amend(ui, repo, commitfunc, repo['.'], {}, pats, opts)
1079 > cmdutil.amend(ui, repo, commitfunc, repo['.'], {}, pats, opts)
1080 > ui.write('%s\n' % repo.changelog.headrevs())
1080 > ui.write('%s\n' % repo.changelog.headrevs())
1081 > EOF
1081 > EOF
1082 $ cat >> $HGRCPATH << EOF
1082 $ cat >> $HGRCPATH << EOF
1083 > [extensions]
1083 > [extensions]
1084 > testextension=$TESTTMP/test_extension.py
1084 > testextension=$TESTTMP/test_extension.py
1085 > EOF
1085 > EOF
1086 $ hg init repo-issue-nativerevs-pending-changes
1086 $ hg init repo-issue-nativerevs-pending-changes
1087 $ cd repo-issue-nativerevs-pending-changes
1087 $ cd repo-issue-nativerevs-pending-changes
1088 $ mkcommit a
1088 $ mkcommit a
1089 $ mkcommit b
1089 $ mkcommit b
1090 $ hg up ".^"
1090 $ hg up ".^"
1091 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1091 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1092 $ echo aa > a
1092 $ echo aa > a
1093 $ hg amendtransient
1093 $ hg amendtransient
1094 [1, 3]
1094 [1, 3]
1095
1095
1096 Check that corrupted hidden cache does not crash
1096 Check that corrupted hidden cache does not crash
1097
1097
1098 $ printf "" > .hg/cache/hidden
1098 $ printf "" > .hg/cache/hidden
1099 $ hg log -r . -T '{node}' --debug
1099 $ hg log -r . -T '{node}' --debug
1100 corrupted hidden cache
1100 corrupted hidden cache
1101 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
1101 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
1102 $ hg log -r . -T '{node}' --debug
1102 $ hg log -r . -T '{node}' --debug
1103 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
1103 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
1104
1104
1105 #if unix-permissions
1105 #if unix-permissions
1106 Check that wrong hidden cache permission does not crash
1106 Check that wrong hidden cache permission does not crash
1107
1107
1108 $ chmod 000 .hg/cache/hidden
1108 $ chmod 000 .hg/cache/hidden
1109 $ hg log -r . -T '{node}' --debug
1109 $ hg log -r . -T '{node}' --debug
1110 cannot read hidden cache
1110 cannot read hidden cache
1111 error writing hidden changesets cache
1111 error writing hidden changesets cache
1112 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
1112 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol)
1113 #endif
1113 #endif
1114
1114
1115 Test cache consistency for the visible filter
1115 Test cache consistency for the visible filter
1116 1) We want to make sure that the cached filtered revs are invalidated when
1116 1) We want to make sure that the cached filtered revs are invalidated when
1117 bookmarks change
1117 bookmarks change
1118 $ cd ..
1118 $ cd ..
1119 $ cat >$TESTTMP/test_extension.py << EOF
1119 $ cat >$TESTTMP/test_extension.py << EOF
1120 > import weakref
1120 > import weakref
1121 > from mercurial import cmdutil, extensions, bookmarks, repoview
1121 > from mercurial import cmdutil, extensions, bookmarks, repoview
1122 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
1122 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
1123 > reporef = weakref.ref(bkmstoreinst._repo)
1123 > reporef = weakref.ref(bkmstoreinst._repo)
1124 > def trhook(tr):
1124 > def trhook(tr):
1125 > repo = reporef()
1125 > repo = reporef()
1126 > hidden1 = repoview.computehidden(repo)
1126 > hidden1 = repoview.computehidden(repo)
1127 > hidden = repoview.filterrevs(repo, 'visible')
1127 > hidden = repoview.filterrevs(repo, 'visible')
1128 > if sorted(hidden1) != sorted(hidden):
1128 > if sorted(hidden1) != sorted(hidden):
1129 > print "cache inconsistency"
1129 > print "cache inconsistency"
1130 > bkmstoreinst._repo.currenttransaction().addpostclose('test_extension', trhook)
1130 > bkmstoreinst._repo.currenttransaction().addpostclose('test_extension', trhook)
1131 > orig(bkmstoreinst, *args, **kwargs)
1131 > orig(bkmstoreinst, *args, **kwargs)
1132 > def extsetup(ui):
1132 > def extsetup(ui):
1133 > extensions.wrapfunction(bookmarks.bmstore, 'recordchange',
1133 > extensions.wrapfunction(bookmarks.bmstore, 'recordchange',
1134 > _bookmarkchanged)
1134 > _bookmarkchanged)
1135 > EOF
1135 > EOF
1136
1136
1137 $ hg init repo-cache-inconsistency
1137 $ hg init repo-cache-inconsistency
1138 $ cd repo-issue-nativerevs-pending-changes
1138 $ cd repo-issue-nativerevs-pending-changes
1139 $ mkcommit a
1139 $ mkcommit a
1140 a already tracked!
1140 a already tracked!
1141 $ mkcommit b
1141 $ mkcommit b
1142 $ hg id
1142 $ hg id
1143 13bedc178fce tip
1143 13bedc178fce tip
1144 $ echo "hello" > b
1144 $ echo "hello" > b
1145 $ hg commit --amend -m "message"
1145 $ hg commit --amend -m "message"
1146 $ hg book bookb -r 13bedc178fce --hidden
1146 $ hg book bookb -r 13bedc178fce --hidden
1147 $ hg log -r 13bedc178fce
1147 $ hg log -r 13bedc178fce
1148 5:13bedc178fce (draft *obsolete*) [ bookb] add b
1148 5:13bedc178fce (draft *obsolete*) [ bookb] add b
1149 $ hg book -d bookb
1149 $ hg book -d bookb
1150 $ hg log -r 13bedc178fce
1150 $ hg log -r 13bedc178fce
1151 abort: hidden revision '13bedc178fce'!
1151 abort: hidden revision '13bedc178fce'!
1152 (use --hidden to access hidden revisions)
1152 (use --hidden to access hidden revisions)
1153 [255]
1153 [255]
1154
1154
1155 Empty out the test extension, as it isn't compatible with later parts
1155 Empty out the test extension, as it isn't compatible with later parts
1156 of the test.
1156 of the test.
1157 $ echo > $TESTTMP/test_extension.py
1157 $ echo > $TESTTMP/test_extension.py
1158
1158
1159 Test ability to pull changeset with locally applying obsolescence markers
1159 Test ability to pull changeset with locally applying obsolescence markers
1160 (issue4945)
1160 (issue4945)
1161
1161
1162 $ cd ..
1162 $ cd ..
1163 $ hg init issue4845
1163 $ hg init issue4845
1164 $ cd issue4845
1164 $ cd issue4845
1165
1165
1166 $ echo foo > f0
1166 $ echo foo > f0
1167 $ hg add f0
1167 $ hg add f0
1168 $ hg ci -m '0'
1168 $ hg ci -m '0'
1169 $ echo foo > f1
1169 $ echo foo > f1
1170 $ hg add f1
1170 $ hg add f1
1171 $ hg ci -m '1'
1171 $ hg ci -m '1'
1172 $ echo foo > f2
1172 $ echo foo > f2
1173 $ hg add f2
1173 $ hg add f2
1174 $ hg ci -m '2'
1174 $ hg ci -m '2'
1175
1175
1176 $ echo bar > f2
1176 $ echo bar > f2
1177 $ hg commit --amend --config experimetnal.evolution=createmarkers
1177 $ hg commit --amend --config experimetnal.evolution=createmarkers
1178 $ hg log -G
1178 $ hg log -G
1179 @ 4:b0551702f918 (draft) [tip ] 2
1179 @ 4:b0551702f918 (draft) [tip ] 2
1180 |
1180 |
1181 o 1:e016b03fd86f (draft) [ ] 1
1181 o 1:e016b03fd86f (draft) [ ] 1
1182 |
1182 |
1183 o 0:a78f55e5508c (draft) [ ] 0
1183 o 0:a78f55e5508c (draft) [ ] 0
1184
1184
1185 $ hg log -G --hidden
1185 $ hg log -G --hidden
1186 @ 4:b0551702f918 (draft) [tip ] 2
1186 @ 4:b0551702f918 (draft) [tip ] 2
1187 |
1187 |
1188 | x 3:f27abbcc1f77 (draft *obsolete*) [ ] temporary amend commit for e008cf283490
1188 | x 3:f27abbcc1f77 (draft *obsolete*) [ ] temporary amend commit for e008cf283490
1189 | |
1189 | |
1190 | x 2:e008cf283490 (draft *obsolete*) [ ] 2
1190 | x 2:e008cf283490 (draft *obsolete*) [ ] 2
1191 |/
1191 |/
1192 o 1:e016b03fd86f (draft) [ ] 1
1192 o 1:e016b03fd86f (draft) [ ] 1
1193 |
1193 |
1194 o 0:a78f55e5508c (draft) [ ] 0
1194 o 0:a78f55e5508c (draft) [ ] 0
1195
1195
1196
1196
1197 $ hg strip -r 1 --config extensions.strip=
1197 $ hg strip -r 1 --config extensions.strip=
1198 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1198 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1199 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg (glob)
1199 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg (glob)
1200 $ hg log -G
1200 $ hg log -G
1201 @ 0:a78f55e5508c (draft) [tip ] 0
1201 @ 0:a78f55e5508c (draft) [tip ] 0
1202
1202
1203 $ hg log -G --hidden
1203 $ hg log -G --hidden
1204 @ 0:a78f55e5508c (draft) [tip ] 0
1204 @ 0:a78f55e5508c (draft) [tip ] 0
1205
1205
1206
1206
1207 $ hg pull .hg/strip-backup/*
1207 $ hg pull .hg/strip-backup/*
1208 pulling from .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg
1208 pulling from .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg
1209 searching for changes
1209 searching for changes
1210 adding changesets
1210 adding changesets
1211 adding manifests
1211 adding manifests
1212 adding file changes
1212 adding file changes
1213 added 2 changesets with 2 changes to 2 files
1213 added 2 changesets with 2 changes to 2 files
1214 (run 'hg update' to get a working copy)
1214 (run 'hg update' to get a working copy)
1215 $ hg log -G
1215 $ hg log -G
1216 o 2:b0551702f918 (draft) [tip ] 2
1216 o 2:b0551702f918 (draft) [tip ] 2
1217 |
1217 |
1218 o 1:e016b03fd86f (draft) [ ] 1
1218 o 1:e016b03fd86f (draft) [ ] 1
1219 |
1219 |
1220 @ 0:a78f55e5508c (draft) [ ] 0
1220 @ 0:a78f55e5508c (draft) [ ] 0
1221
1221
1222 $ hg log -G --hidden
1222 $ hg log -G --hidden
1223 o 2:b0551702f918 (draft) [tip ] 2
1223 o 2:b0551702f918 (draft) [tip ] 2
1224 |
1224 |
1225 o 1:e016b03fd86f (draft) [ ] 1
1225 o 1:e016b03fd86f (draft) [ ] 1
1226 |
1226 |
1227 @ 0:a78f55e5508c (draft) [ ] 0
1227 @ 0:a78f55e5508c (draft) [ ] 0
1228
1228
1229 Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when
1229 Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when
1230 only a subset of those are displayed (because of --rev option)
1230 only a subset of those are displayed (because of --rev option)
1231 $ hg init doindexrev
1231 $ hg init doindexrev
1232 $ cd doindexrev
1232 $ cd doindexrev
1233 $ echo a > a
1233 $ echo a > a
1234 $ hg ci -Am a
1234 $ hg ci -Am a
1235 adding a
1235 adding a
1236 $ hg ci --amend -m aa
1236 $ hg ci --amend -m aa
1237 $ echo b > b
1237 $ echo b > b
1238 $ hg ci -Am b
1238 $ hg ci -Am b
1239 adding b
1239 adding b
1240 $ hg ci --amend -m bb
1240 $ hg ci --amend -m bb
1241 $ echo c > c
1241 $ echo c > c
1242 $ hg ci -Am c
1242 $ hg ci -Am c
1243 adding c
1243 adding c
1244 $ hg ci --amend -m cc
1244 $ hg ci --amend -m cc
1245 $ echo d > d
1245 $ echo d > d
1246 $ hg ci -Am d
1246 $ hg ci -Am d
1247 adding d
1247 adding d
1248 $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
1248 $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
1249 $ hg debugobsolete --index --rev "3+7"
1249 $ hg debugobsolete --index --rev "3+7"
1250 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
1250 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
1251 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'operation': 'amend', 'user': 'test'} (re)
1251 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'operation': 'amend', 'user': 'test'} (re)
1252 $ hg debugobsolete --index --rev "3+7" -Tjson
1252 $ hg debugobsolete --index --rev "3+7" -Tjson
1253 [
1253 [
1254 {
1254 {
1255 "date": *, (glob)
1255 "date": [0.0, 0],
1256 "flag": 0,
1256 "flag": 0,
1257 "index": 1,
1257 "index": 1,
1258 "metadata": {"user": "test"},
1258 "metadata": {"user": "test"},
1259 "precnode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
1259 "precnode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
1260 "succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
1260 "succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
1261 },
1261 },
1262 {
1262 {
1263 "date": *, (glob)
1263 "date": [0.0, 0],
1264 "flag": 0,
1264 "flag": 0,
1265 "index": 3,
1265 "index": 3,
1266 "metadata": {"operation": "amend", "user": "test"},
1266 "metadata": {"operation": "amend", "user": "test"},
1267 "precnode": "4715cf767440ed891755448016c2b8cf70760c30",
1267 "precnode": "4715cf767440ed891755448016c2b8cf70760c30",
1268 "succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
1268 "succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
1269 }
1269 }
1270 ]
1270 ]
1271
1271
1272 Test the --delete option of debugobsolete command
1272 Test the --delete option of debugobsolete command
1273 $ hg debugobsolete --index
1273 $ hg debugobsolete --index
1274 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 \(.*\) {'user': 'test'} (re)
1274 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1275 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
1275 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1276 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 \(.*\) {'user': 'test'} (re)
1276 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1277 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (*) {'operation': 'amend', 'user': 'test'} (glob)
1277 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
1278 $ hg debugobsolete --delete 1 --delete 3
1278 $ hg debugobsolete --delete 1 --delete 3
1279 deleted 2 obsolescence markers
1279 deleted 2 obsolescence markers
1280 $ hg debugobsolete
1280 $ hg debugobsolete
1281 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 \(.*\) {'user': 'test'} (re)
1281 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1282 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 \(.*\) {'user': 'test'} (re)
1282 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1283
1283
1284 Test adding changeset after obsmarkers affecting it
1284 Test adding changeset after obsmarkers affecting it
1285 (eg: during pull, or unbundle)
1285 (eg: during pull, or unbundle)
1286
1286
1287 $ mkcommit e
1287 $ mkcommit e
1288 $ hg bundle -r . --base .~1 ../bundle-2.hg
1288 $ hg bundle -r . --base .~1 ../bundle-2.hg
1289 1 changesets found
1289 1 changesets found
1290 $ getid .
1290 $ getid .
1291 $ hg --config extensions.strip= strip -r .
1291 $ hg --config extensions.strip= strip -r .
1292 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1292 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1293 saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg (glob)
1293 saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg (glob)
1294 $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
1294 $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
1295 $ hg unbundle ../bundle-2.hg
1295 $ hg unbundle ../bundle-2.hg
1296 adding changesets
1296 adding changesets
1297 adding manifests
1297 adding manifests
1298 adding file changes
1298 adding file changes
1299 added 1 changesets with 1 changes to 1 files
1299 added 1 changesets with 1 changes to 1 files
1300 (run 'hg update' to get a working copy)
1300 (run 'hg update' to get a working copy)
1301 $ hg log -G
1301 $ hg log -G
1302 @ 7:7ae79c5d60f0 (draft) [tip ] dd
1302 @ 7:7ae79c5d60f0 (draft) [tip ] dd
1303 |
1303 |
1304 | o 6:4715cf767440 (draft) [ ] d
1304 | o 6:4715cf767440 (draft) [ ] d
1305 |/
1305 |/
1306 o 5:29346082e4a9 (draft) [ ] cc
1306 o 5:29346082e4a9 (draft) [ ] cc
1307 |
1307 |
1308 o 3:d27fb9b06607 (draft) [ ] bb
1308 o 3:d27fb9b06607 (draft) [ ] bb
1309 |
1309 |
1310 | o 2:6fdef60fcbab (draft) [ ] b
1310 | o 2:6fdef60fcbab (draft) [ ] b
1311 |/
1311 |/
1312 o 1:f9bd49731b0b (draft) [ ] aa
1312 o 1:f9bd49731b0b (draft) [ ] aa
1313
1313
1314
1314
1315 $ cd ..
1315 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now