##// END OF EJS Templates
obsolete: add a function to compute "exclusive-markers" for a set of nodes...
marmoute -
r32626:00a7f7b1 default
parent child Browse files
Show More
@@ -1,2185 +1,2188 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 context,
35 context,
36 dagparser,
36 dagparser,
37 dagutil,
37 dagutil,
38 encoding,
38 encoding,
39 error,
39 error,
40 exchange,
40 exchange,
41 extensions,
41 extensions,
42 filemerge,
42 filemerge,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 policy,
50 policy,
51 pvec,
51 pvec,
52 pycompat,
52 pycompat,
53 registrar,
53 registrar,
54 repair,
54 repair,
55 revlog,
55 revlog,
56 revset,
56 revset,
57 revsetlang,
57 revsetlang,
58 scmutil,
58 scmutil,
59 setdiscovery,
59 setdiscovery,
60 simplemerge,
60 simplemerge,
61 smartset,
61 smartset,
62 sslutil,
62 sslutil,
63 streamclone,
63 streamclone,
64 templater,
64 templater,
65 treediscovery,
65 treediscovery,
66 upgrade,
66 upgrade,
67 util,
67 util,
68 vfs as vfsmod,
68 vfs as vfsmod,
69 )
69 )
70
70
71 release = lockmod.release
71 release = lockmod.release
72
72
73 command = registrar.command()
73 command = registrar.command()
74
74
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 def debugancestor(ui, repo, *args):
76 def debugancestor(ui, repo, *args):
77 """find the ancestor revision of two revisions in a given index"""
77 """find the ancestor revision of two revisions in a given index"""
78 if len(args) == 3:
78 if len(args) == 3:
79 index, rev1, rev2 = args
79 index, rev1, rev2 = args
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 lookup = r.lookup
81 lookup = r.lookup
82 elif len(args) == 2:
82 elif len(args) == 2:
83 if not repo:
83 if not repo:
84 raise error.Abort(_('there is no Mercurial repository here '
84 raise error.Abort(_('there is no Mercurial repository here '
85 '(.hg not found)'))
85 '(.hg not found)'))
86 rev1, rev2 = args
86 rev1, rev2 = args
87 r = repo.changelog
87 r = repo.changelog
88 lookup = repo.lookup
88 lookup = repo.lookup
89 else:
89 else:
90 raise error.Abort(_('either two or three arguments required'))
90 raise error.Abort(_('either two or three arguments required'))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93
93
94 @command('debugapplystreamclonebundle', [], 'FILE')
94 @command('debugapplystreamclonebundle', [], 'FILE')
95 def debugapplystreamclonebundle(ui, repo, fname):
95 def debugapplystreamclonebundle(ui, repo, fname):
96 """apply a stream clone bundle file"""
96 """apply a stream clone bundle file"""
97 f = hg.openpath(ui, fname)
97 f = hg.openpath(ui, fname)
98 gen = exchange.readbundle(ui, f, fname)
98 gen = exchange.readbundle(ui, f, fname)
99 gen.apply(repo)
99 gen.apply(repo)
100
100
101 @command('debugbuilddag',
101 @command('debugbuilddag',
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('n', 'new-file', None, _('add new file at each rev'))],
104 ('n', 'new-file', None, _('add new file at each rev'))],
105 _('[OPTION]... [TEXT]'))
105 _('[OPTION]... [TEXT]'))
106 def debugbuilddag(ui, repo, text=None,
106 def debugbuilddag(ui, repo, text=None,
107 mergeable_file=False,
107 mergeable_file=False,
108 overwritten_file=False,
108 overwritten_file=False,
109 new_file=False):
109 new_file=False):
110 """builds a repo with a given DAG from scratch in the current empty repo
110 """builds a repo with a given DAG from scratch in the current empty repo
111
111
112 The description of the DAG is read from stdin if not given on the
112 The description of the DAG is read from stdin if not given on the
113 command line.
113 command line.
114
114
115 Elements:
115 Elements:
116
116
117 - "+n" is a linear run of n nodes based on the current default parent
117 - "+n" is a linear run of n nodes based on the current default parent
118 - "." is a single node based on the current default parent
118 - "." is a single node based on the current default parent
119 - "$" resets the default parent to null (implied at the start);
119 - "$" resets the default parent to null (implied at the start);
120 otherwise the default parent is always the last node created
120 otherwise the default parent is always the last node created
121 - "<p" sets the default parent to the backref p
121 - "<p" sets the default parent to the backref p
122 - "*p" is a fork at parent p, which is a backref
122 - "*p" is a fork at parent p, which is a backref
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "/p2" is a merge of the preceding node and p2
124 - "/p2" is a merge of the preceding node and p2
125 - ":tag" defines a local tag for the preceding node
125 - ":tag" defines a local tag for the preceding node
126 - "@branch" sets the named branch for subsequent nodes
126 - "@branch" sets the named branch for subsequent nodes
127 - "#...\\n" is a comment up to the end of the line
127 - "#...\\n" is a comment up to the end of the line
128
128
129 Whitespace between the above elements is ignored.
129 Whitespace between the above elements is ignored.
130
130
131 A backref is either
131 A backref is either
132
132
133 - a number n, which references the node curr-n, where curr is the current
133 - a number n, which references the node curr-n, where curr is the current
134 node, or
134 node, or
135 - the name of a local tag you placed earlier using ":tag", or
135 - the name of a local tag you placed earlier using ":tag", or
136 - empty to denote the default parent.
136 - empty to denote the default parent.
137
137
138 All string valued-elements are either strictly alphanumeric, or must
138 All string valued-elements are either strictly alphanumeric, or must
139 be enclosed in double quotes ("..."), with "\\" as escape character.
139 be enclosed in double quotes ("..."), with "\\" as escape character.
140 """
140 """
141
141
142 if text is None:
142 if text is None:
143 ui.status(_("reading DAG from stdin\n"))
143 ui.status(_("reading DAG from stdin\n"))
144 text = ui.fin.read()
144 text = ui.fin.read()
145
145
146 cl = repo.changelog
146 cl = repo.changelog
147 if len(cl) > 0:
147 if len(cl) > 0:
148 raise error.Abort(_('repository is not empty'))
148 raise error.Abort(_('repository is not empty'))
149
149
150 # determine number of revs in DAG
150 # determine number of revs in DAG
151 total = 0
151 total = 0
152 for type, data in dagparser.parsedag(text):
152 for type, data in dagparser.parsedag(text):
153 if type == 'n':
153 if type == 'n':
154 total += 1
154 total += 1
155
155
156 if mergeable_file:
156 if mergeable_file:
157 linesperrev = 2
157 linesperrev = 2
158 # make a file with k lines per rev
158 # make a file with k lines per rev
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines.append("")
160 initialmergedlines.append("")
161
161
162 tags = []
162 tags = []
163
163
164 wlock = lock = tr = None
164 wlock = lock = tr = None
165 try:
165 try:
166 wlock = repo.wlock()
166 wlock = repo.wlock()
167 lock = repo.lock()
167 lock = repo.lock()
168 tr = repo.transaction("builddag")
168 tr = repo.transaction("builddag")
169
169
170 at = -1
170 at = -1
171 atbranch = 'default'
171 atbranch = 'default'
172 nodeids = []
172 nodeids = []
173 id = 0
173 id = 0
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 ui.note(('node %s\n' % str(data)))
177 ui.note(('node %s\n' % str(data)))
178 id, ps = data
178 id, ps = data
179
179
180 files = []
180 files = []
181 fctxs = {}
181 fctxs = {}
182
182
183 p2 = None
183 p2 = None
184 if mergeable_file:
184 if mergeable_file:
185 fn = "mf"
185 fn = "mf"
186 p1 = repo[ps[0]]
186 p1 = repo[ps[0]]
187 if len(ps) > 1:
187 if len(ps) > 1:
188 p2 = repo[ps[1]]
188 p2 = repo[ps[1]]
189 pa = p1.ancestor(p2)
189 pa = p1.ancestor(p2)
190 base, local, other = [x[fn].data() for x in (pa, p1,
190 base, local, other = [x[fn].data() for x in (pa, p1,
191 p2)]
191 p2)]
192 m3 = simplemerge.Merge3Text(base, local, other)
192 m3 = simplemerge.Merge3Text(base, local, other)
193 ml = [l.strip() for l in m3.merge_lines()]
193 ml = [l.strip() for l in m3.merge_lines()]
194 ml.append("")
194 ml.append("")
195 elif at > 0:
195 elif at > 0:
196 ml = p1[fn].data().split("\n")
196 ml = p1[fn].data().split("\n")
197 else:
197 else:
198 ml = initialmergedlines
198 ml = initialmergedlines
199 ml[id * linesperrev] += " r%i" % id
199 ml[id * linesperrev] += " r%i" % id
200 mergedtext = "\n".join(ml)
200 mergedtext = "\n".join(ml)
201 files.append(fn)
201 files.append(fn)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203
203
204 if overwritten_file:
204 if overwritten_file:
205 fn = "of"
205 fn = "of"
206 files.append(fn)
206 files.append(fn)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208
208
209 if new_file:
209 if new_file:
210 fn = "nf%i" % id
210 fn = "nf%i" % id
211 files.append(fn)
211 files.append(fn)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 if len(ps) > 1:
213 if len(ps) > 1:
214 if not p2:
214 if not p2:
215 p2 = repo[ps[1]]
215 p2 = repo[ps[1]]
216 for fn in p2:
216 for fn in p2:
217 if fn.startswith("nf"):
217 if fn.startswith("nf"):
218 files.append(fn)
218 files.append(fn)
219 fctxs[fn] = p2[fn]
219 fctxs[fn] = p2[fn]
220
220
221 def fctxfn(repo, cx, path):
221 def fctxfn(repo, cx, path):
222 return fctxs.get(path)
222 return fctxs.get(path)
223
223
224 if len(ps) == 0 or ps[0] < 0:
224 if len(ps) == 0 or ps[0] < 0:
225 pars = [None, None]
225 pars = [None, None]
226 elif len(ps) == 1:
226 elif len(ps) == 1:
227 pars = [nodeids[ps[0]], None]
227 pars = [nodeids[ps[0]], None]
228 else:
228 else:
229 pars = [nodeids[p] for p in ps]
229 pars = [nodeids[p] for p in ps]
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 date=(id, 0),
231 date=(id, 0),
232 user="debugbuilddag",
232 user="debugbuilddag",
233 extra={'branch': atbranch})
233 extra={'branch': atbranch})
234 nodeid = repo.commitctx(cx)
234 nodeid = repo.commitctx(cx)
235 nodeids.append(nodeid)
235 nodeids.append(nodeid)
236 at = id
236 at = id
237 elif type == 'l':
237 elif type == 'l':
238 id, name = data
238 id, name = data
239 ui.note(('tag %s\n' % name))
239 ui.note(('tag %s\n' % name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 elif type == 'a':
241 elif type == 'a':
242 ui.note(('branch %s\n' % data))
242 ui.note(('branch %s\n' % data))
243 atbranch = data
243 atbranch = data
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 tr.close()
245 tr.close()
246
246
247 if tags:
247 if tags:
248 repo.vfs.write("localtags", "".join(tags))
248 repo.vfs.write("localtags", "".join(tags))
249 finally:
249 finally:
250 ui.progress(_('building'), None)
250 ui.progress(_('building'), None)
251 release(tr, lock, wlock)
251 release(tr, lock, wlock)
252
252
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 indent_string = ' ' * indent
254 indent_string = ' ' * indent
255 if all:
255 if all:
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 % indent_string)
257 % indent_string)
258
258
259 def showchunks(named):
259 def showchunks(named):
260 ui.write("\n%s%s\n" % (indent_string, named))
260 ui.write("\n%s%s\n" % (indent_string, named))
261 chain = None
261 chain = None
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 node = chunkdata['node']
263 node = chunkdata['node']
264 p1 = chunkdata['p1']
264 p1 = chunkdata['p1']
265 p2 = chunkdata['p2']
265 p2 = chunkdata['p2']
266 cs = chunkdata['cs']
266 cs = chunkdata['cs']
267 deltabase = chunkdata['deltabase']
267 deltabase = chunkdata['deltabase']
268 delta = chunkdata['delta']
268 delta = chunkdata['delta']
269 ui.write("%s%s %s %s %s %s %s\n" %
269 ui.write("%s%s %s %s %s %s %s\n" %
270 (indent_string, hex(node), hex(p1), hex(p2),
270 (indent_string, hex(node), hex(p1), hex(p2),
271 hex(cs), hex(deltabase), len(delta)))
271 hex(cs), hex(deltabase), len(delta)))
272 chain = node
272 chain = node
273
273
274 chunkdata = gen.changelogheader()
274 chunkdata = gen.changelogheader()
275 showchunks("changelog")
275 showchunks("changelog")
276 chunkdata = gen.manifestheader()
276 chunkdata = gen.manifestheader()
277 showchunks("manifest")
277 showchunks("manifest")
278 for chunkdata in iter(gen.filelogheader, {}):
278 for chunkdata in iter(gen.filelogheader, {}):
279 fname = chunkdata['filename']
279 fname = chunkdata['filename']
280 showchunks(fname)
280 showchunks(fname)
281 else:
281 else:
282 if isinstance(gen, bundle2.unbundle20):
282 if isinstance(gen, bundle2.unbundle20):
283 raise error.Abort(_('use debugbundle2 for this file'))
283 raise error.Abort(_('use debugbundle2 for this file'))
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 chain = None
285 chain = None
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 node = chunkdata['node']
287 node = chunkdata['node']
288 ui.write("%s%s\n" % (indent_string, hex(node)))
288 ui.write("%s%s\n" % (indent_string, hex(node)))
289 chain = node
289 chain = node
290
290
291 def _debugobsmarkers(ui, data, all=None, indent=0, **opts):
291 def _debugobsmarkers(ui, data, all=None, indent=0, **opts):
292 """display version and markers contained in 'data'"""
292 """display version and markers contained in 'data'"""
293 indent_string = ' ' * indent
293 indent_string = ' ' * indent
294 try:
294 try:
295 version, markers = obsolete._readmarkers(data)
295 version, markers = obsolete._readmarkers(data)
296 except error.UnknownVersion as exc:
296 except error.UnknownVersion as exc:
297 msg = "%sunsupported version: %s (%d bytes)\n"
297 msg = "%sunsupported version: %s (%d bytes)\n"
298 msg %= indent_string, exc.version, len(data)
298 msg %= indent_string, exc.version, len(data)
299 ui.write(msg)
299 ui.write(msg)
300 else:
300 else:
301 msg = "%sversion: %s (%d bytes)\n"
301 msg = "%sversion: %s (%d bytes)\n"
302 msg %= indent_string, version, len(data)
302 msg %= indent_string, version, len(data)
303 ui.write(msg)
303 ui.write(msg)
304 fm = ui.formatter('debugobsolete', opts)
304 fm = ui.formatter('debugobsolete', opts)
305 for rawmarker in sorted(markers):
305 for rawmarker in sorted(markers):
306 m = obsolete.marker(None, rawmarker)
306 m = obsolete.marker(None, rawmarker)
307 fm.startitem()
307 fm.startitem()
308 fm.plain(indent_string)
308 fm.plain(indent_string)
309 cmdutil.showmarker(fm, m)
309 cmdutil.showmarker(fm, m)
310 fm.end()
310 fm.end()
311
311
312 def _debugbundle2(ui, gen, all=None, **opts):
312 def _debugbundle2(ui, gen, all=None, **opts):
313 """lists the contents of a bundle2"""
313 """lists the contents of a bundle2"""
314 if not isinstance(gen, bundle2.unbundle20):
314 if not isinstance(gen, bundle2.unbundle20):
315 raise error.Abort(_('not a bundle2 file'))
315 raise error.Abort(_('not a bundle2 file'))
316 ui.write(('Stream params: %s\n' % repr(gen.params)))
316 ui.write(('Stream params: %s\n' % repr(gen.params)))
317 for part in gen.iterparts():
317 for part in gen.iterparts():
318 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
318 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
319 if part.type == 'changegroup':
319 if part.type == 'changegroup':
320 version = part.params.get('version', '01')
320 version = part.params.get('version', '01')
321 cg = changegroup.getunbundler(version, part, 'UN')
321 cg = changegroup.getunbundler(version, part, 'UN')
322 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
322 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
323 if part.type == 'obsmarkers':
323 if part.type == 'obsmarkers':
324 _debugobsmarkers(ui, part.read(), all=all, indent=4, **opts)
324 _debugobsmarkers(ui, part.read(), all=all, indent=4, **opts)
325
325
326 @command('debugbundle',
326 @command('debugbundle',
327 [('a', 'all', None, _('show all details')),
327 [('a', 'all', None, _('show all details')),
328 ('', 'spec', None, _('print the bundlespec of the bundle'))],
328 ('', 'spec', None, _('print the bundlespec of the bundle'))],
329 _('FILE'),
329 _('FILE'),
330 norepo=True)
330 norepo=True)
331 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
331 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
332 """lists the contents of a bundle"""
332 """lists the contents of a bundle"""
333 with hg.openpath(ui, bundlepath) as f:
333 with hg.openpath(ui, bundlepath) as f:
334 if spec:
334 if spec:
335 spec = exchange.getbundlespec(ui, f)
335 spec = exchange.getbundlespec(ui, f)
336 ui.write('%s\n' % spec)
336 ui.write('%s\n' % spec)
337 return
337 return
338
338
339 gen = exchange.readbundle(ui, f, bundlepath)
339 gen = exchange.readbundle(ui, f, bundlepath)
340 if isinstance(gen, bundle2.unbundle20):
340 if isinstance(gen, bundle2.unbundle20):
341 return _debugbundle2(ui, gen, all=all, **opts)
341 return _debugbundle2(ui, gen, all=all, **opts)
342 _debugchangegroup(ui, gen, all=all, **opts)
342 _debugchangegroup(ui, gen, all=all, **opts)
343
343
344 @command('debugcheckstate', [], '')
344 @command('debugcheckstate', [], '')
345 def debugcheckstate(ui, repo):
345 def debugcheckstate(ui, repo):
346 """validate the correctness of the current dirstate"""
346 """validate the correctness of the current dirstate"""
347 parent1, parent2 = repo.dirstate.parents()
347 parent1, parent2 = repo.dirstate.parents()
348 m1 = repo[parent1].manifest()
348 m1 = repo[parent1].manifest()
349 m2 = repo[parent2].manifest()
349 m2 = repo[parent2].manifest()
350 errors = 0
350 errors = 0
351 for f in repo.dirstate:
351 for f in repo.dirstate:
352 state = repo.dirstate[f]
352 state = repo.dirstate[f]
353 if state in "nr" and f not in m1:
353 if state in "nr" and f not in m1:
354 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
354 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
355 errors += 1
355 errors += 1
356 if state in "a" and f in m1:
356 if state in "a" and f in m1:
357 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
357 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
358 errors += 1
358 errors += 1
359 if state in "m" and f not in m1 and f not in m2:
359 if state in "m" and f not in m1 and f not in m2:
360 ui.warn(_("%s in state %s, but not in either manifest\n") %
360 ui.warn(_("%s in state %s, but not in either manifest\n") %
361 (f, state))
361 (f, state))
362 errors += 1
362 errors += 1
363 for f in m1:
363 for f in m1:
364 state = repo.dirstate[f]
364 state = repo.dirstate[f]
365 if state not in "nrm":
365 if state not in "nrm":
366 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
366 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
367 errors += 1
367 errors += 1
368 if errors:
368 if errors:
369 error = _(".hg/dirstate inconsistent with current parent's manifest")
369 error = _(".hg/dirstate inconsistent with current parent's manifest")
370 raise error.Abort(error)
370 raise error.Abort(error)
371
371
372 @command('debugcolor',
372 @command('debugcolor',
373 [('', 'style', None, _('show all configured styles'))],
373 [('', 'style', None, _('show all configured styles'))],
374 'hg debugcolor')
374 'hg debugcolor')
375 def debugcolor(ui, repo, **opts):
375 def debugcolor(ui, repo, **opts):
376 """show available color, effects or style"""
376 """show available color, effects or style"""
377 ui.write(('color mode: %s\n') % ui._colormode)
377 ui.write(('color mode: %s\n') % ui._colormode)
378 if opts.get('style'):
378 if opts.get('style'):
379 return _debugdisplaystyle(ui)
379 return _debugdisplaystyle(ui)
380 else:
380 else:
381 return _debugdisplaycolor(ui)
381 return _debugdisplaycolor(ui)
382
382
383 def _debugdisplaycolor(ui):
383 def _debugdisplaycolor(ui):
384 ui = ui.copy()
384 ui = ui.copy()
385 ui._styles.clear()
385 ui._styles.clear()
386 for effect in color._activeeffects(ui).keys():
386 for effect in color._activeeffects(ui).keys():
387 ui._styles[effect] = effect
387 ui._styles[effect] = effect
388 if ui._terminfoparams:
388 if ui._terminfoparams:
389 for k, v in ui.configitems('color'):
389 for k, v in ui.configitems('color'):
390 if k.startswith('color.'):
390 if k.startswith('color.'):
391 ui._styles[k] = k[6:]
391 ui._styles[k] = k[6:]
392 elif k.startswith('terminfo.'):
392 elif k.startswith('terminfo.'):
393 ui._styles[k] = k[9:]
393 ui._styles[k] = k[9:]
394 ui.write(_('available colors:\n'))
394 ui.write(_('available colors:\n'))
395 # sort label with a '_' after the other to group '_background' entry.
395 # sort label with a '_' after the other to group '_background' entry.
396 items = sorted(ui._styles.items(),
396 items = sorted(ui._styles.items(),
397 key=lambda i: ('_' in i[0], i[0], i[1]))
397 key=lambda i: ('_' in i[0], i[0], i[1]))
398 for colorname, label in items:
398 for colorname, label in items:
399 ui.write(('%s\n') % colorname, label=label)
399 ui.write(('%s\n') % colorname, label=label)
400
400
401 def _debugdisplaystyle(ui):
401 def _debugdisplaystyle(ui):
402 ui.write(_('available style:\n'))
402 ui.write(_('available style:\n'))
403 width = max(len(s) for s in ui._styles)
403 width = max(len(s) for s in ui._styles)
404 for label, effects in sorted(ui._styles.items()):
404 for label, effects in sorted(ui._styles.items()):
405 ui.write('%s' % label, label=label)
405 ui.write('%s' % label, label=label)
406 if effects:
406 if effects:
407 # 50
407 # 50
408 ui.write(': ')
408 ui.write(': ')
409 ui.write(' ' * (max(0, width - len(label))))
409 ui.write(' ' * (max(0, width - len(label))))
410 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
410 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
411 ui.write('\n')
411 ui.write('\n')
412
412
413 @command('debugcreatestreamclonebundle', [], 'FILE')
413 @command('debugcreatestreamclonebundle', [], 'FILE')
414 def debugcreatestreamclonebundle(ui, repo, fname):
414 def debugcreatestreamclonebundle(ui, repo, fname):
415 """create a stream clone bundle file
415 """create a stream clone bundle file
416
416
417 Stream bundles are special bundles that are essentially archives of
417 Stream bundles are special bundles that are essentially archives of
418 revlog files. They are commonly used for cloning very quickly.
418 revlog files. They are commonly used for cloning very quickly.
419 """
419 """
420 requirements, gen = streamclone.generatebundlev1(repo)
420 requirements, gen = streamclone.generatebundlev1(repo)
421 changegroup.writechunks(ui, gen, fname)
421 changegroup.writechunks(ui, gen, fname)
422
422
423 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
423 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
424
424
425 @command('debugdag',
425 @command('debugdag',
426 [('t', 'tags', None, _('use tags as labels')),
426 [('t', 'tags', None, _('use tags as labels')),
427 ('b', 'branches', None, _('annotate with branch names')),
427 ('b', 'branches', None, _('annotate with branch names')),
428 ('', 'dots', None, _('use dots for runs')),
428 ('', 'dots', None, _('use dots for runs')),
429 ('s', 'spaces', None, _('separate elements by spaces'))],
429 ('s', 'spaces', None, _('separate elements by spaces'))],
430 _('[OPTION]... [FILE [REV]...]'),
430 _('[OPTION]... [FILE [REV]...]'),
431 optionalrepo=True)
431 optionalrepo=True)
432 def debugdag(ui, repo, file_=None, *revs, **opts):
432 def debugdag(ui, repo, file_=None, *revs, **opts):
433 """format the changelog or an index DAG as a concise textual description
433 """format the changelog or an index DAG as a concise textual description
434
434
435 If you pass a revlog index, the revlog's DAG is emitted. If you list
435 If you pass a revlog index, the revlog's DAG is emitted. If you list
436 revision numbers, they get labeled in the output as rN.
436 revision numbers, they get labeled in the output as rN.
437
437
438 Otherwise, the changelog DAG of the current repo is emitted.
438 Otherwise, the changelog DAG of the current repo is emitted.
439 """
439 """
440 spaces = opts.get('spaces')
440 spaces = opts.get('spaces')
441 dots = opts.get('dots')
441 dots = opts.get('dots')
442 if file_:
442 if file_:
443 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
443 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
444 file_)
444 file_)
445 revs = set((int(r) for r in revs))
445 revs = set((int(r) for r in revs))
446 def events():
446 def events():
447 for r in rlog:
447 for r in rlog:
448 yield 'n', (r, list(p for p in rlog.parentrevs(r)
448 yield 'n', (r, list(p for p in rlog.parentrevs(r)
449 if p != -1))
449 if p != -1))
450 if r in revs:
450 if r in revs:
451 yield 'l', (r, "r%i" % r)
451 yield 'l', (r, "r%i" % r)
452 elif repo:
452 elif repo:
453 cl = repo.changelog
453 cl = repo.changelog
454 tags = opts.get('tags')
454 tags = opts.get('tags')
455 branches = opts.get('branches')
455 branches = opts.get('branches')
456 if tags:
456 if tags:
457 labels = {}
457 labels = {}
458 for l, n in repo.tags().items():
458 for l, n in repo.tags().items():
459 labels.setdefault(cl.rev(n), []).append(l)
459 labels.setdefault(cl.rev(n), []).append(l)
460 def events():
460 def events():
461 b = "default"
461 b = "default"
462 for r in cl:
462 for r in cl:
463 if branches:
463 if branches:
464 newb = cl.read(cl.node(r))[5]['branch']
464 newb = cl.read(cl.node(r))[5]['branch']
465 if newb != b:
465 if newb != b:
466 yield 'a', newb
466 yield 'a', newb
467 b = newb
467 b = newb
468 yield 'n', (r, list(p for p in cl.parentrevs(r)
468 yield 'n', (r, list(p for p in cl.parentrevs(r)
469 if p != -1))
469 if p != -1))
470 if tags:
470 if tags:
471 ls = labels.get(r)
471 ls = labels.get(r)
472 if ls:
472 if ls:
473 for l in ls:
473 for l in ls:
474 yield 'l', (r, l)
474 yield 'l', (r, l)
475 else:
475 else:
476 raise error.Abort(_('need repo for changelog dag'))
476 raise error.Abort(_('need repo for changelog dag'))
477
477
478 for line in dagparser.dagtextlines(events(),
478 for line in dagparser.dagtextlines(events(),
479 addspaces=spaces,
479 addspaces=spaces,
480 wraplabels=True,
480 wraplabels=True,
481 wrapannotations=True,
481 wrapannotations=True,
482 wrapnonlinear=dots,
482 wrapnonlinear=dots,
483 usedots=dots,
483 usedots=dots,
484 maxlinewidth=70):
484 maxlinewidth=70):
485 ui.write(line)
485 ui.write(line)
486 ui.write("\n")
486 ui.write("\n")
487
487
488 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
488 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
489 def debugdata(ui, repo, file_, rev=None, **opts):
489 def debugdata(ui, repo, file_, rev=None, **opts):
490 """dump the contents of a data file revision"""
490 """dump the contents of a data file revision"""
491 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
491 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
492 if rev is not None:
492 if rev is not None:
493 raise error.CommandError('debugdata', _('invalid arguments'))
493 raise error.CommandError('debugdata', _('invalid arguments'))
494 file_, rev = None, file_
494 file_, rev = None, file_
495 elif rev is None:
495 elif rev is None:
496 raise error.CommandError('debugdata', _('invalid arguments'))
496 raise error.CommandError('debugdata', _('invalid arguments'))
497 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
497 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
498 try:
498 try:
499 ui.write(r.revision(r.lookup(rev), raw=True))
499 ui.write(r.revision(r.lookup(rev), raw=True))
500 except KeyError:
500 except KeyError:
501 raise error.Abort(_('invalid revision identifier %s') % rev)
501 raise error.Abort(_('invalid revision identifier %s') % rev)
502
502
503 @command('debugdate',
503 @command('debugdate',
504 [('e', 'extended', None, _('try extended date formats'))],
504 [('e', 'extended', None, _('try extended date formats'))],
505 _('[-e] DATE [RANGE]'),
505 _('[-e] DATE [RANGE]'),
506 norepo=True, optionalrepo=True)
506 norepo=True, optionalrepo=True)
507 def debugdate(ui, date, range=None, **opts):
507 def debugdate(ui, date, range=None, **opts):
508 """parse and display a date"""
508 """parse and display a date"""
509 if opts["extended"]:
509 if opts["extended"]:
510 d = util.parsedate(date, util.extendeddateformats)
510 d = util.parsedate(date, util.extendeddateformats)
511 else:
511 else:
512 d = util.parsedate(date)
512 d = util.parsedate(date)
513 ui.write(("internal: %s %s\n") % d)
513 ui.write(("internal: %s %s\n") % d)
514 ui.write(("standard: %s\n") % util.datestr(d))
514 ui.write(("standard: %s\n") % util.datestr(d))
515 if range:
515 if range:
516 m = util.matchdate(range)
516 m = util.matchdate(range)
517 ui.write(("match: %s\n") % m(d[0]))
517 ui.write(("match: %s\n") % m(d[0]))
518
518
519 @command('debugdeltachain',
519 @command('debugdeltachain',
520 cmdutil.debugrevlogopts + cmdutil.formatteropts,
520 cmdutil.debugrevlogopts + cmdutil.formatteropts,
521 _('-c|-m|FILE'),
521 _('-c|-m|FILE'),
522 optionalrepo=True)
522 optionalrepo=True)
523 def debugdeltachain(ui, repo, file_=None, **opts):
523 def debugdeltachain(ui, repo, file_=None, **opts):
524 """dump information about delta chains in a revlog
524 """dump information about delta chains in a revlog
525
525
526 Output can be templatized. Available template keywords are:
526 Output can be templatized. Available template keywords are:
527
527
528 :``rev``: revision number
528 :``rev``: revision number
529 :``chainid``: delta chain identifier (numbered by unique base)
529 :``chainid``: delta chain identifier (numbered by unique base)
530 :``chainlen``: delta chain length to this revision
530 :``chainlen``: delta chain length to this revision
531 :``prevrev``: previous revision in delta chain
531 :``prevrev``: previous revision in delta chain
532 :``deltatype``: role of delta / how it was computed
532 :``deltatype``: role of delta / how it was computed
533 :``compsize``: compressed size of revision
533 :``compsize``: compressed size of revision
534 :``uncompsize``: uncompressed size of revision
534 :``uncompsize``: uncompressed size of revision
535 :``chainsize``: total size of compressed revisions in chain
535 :``chainsize``: total size of compressed revisions in chain
536 :``chainratio``: total chain size divided by uncompressed revision size
536 :``chainratio``: total chain size divided by uncompressed revision size
537 (new delta chains typically start at ratio 2.00)
537 (new delta chains typically start at ratio 2.00)
538 :``lindist``: linear distance from base revision in delta chain to end
538 :``lindist``: linear distance from base revision in delta chain to end
539 of this revision
539 of this revision
540 :``extradist``: total size of revisions not part of this delta chain from
540 :``extradist``: total size of revisions not part of this delta chain from
541 base of delta chain to end of this revision; a measurement
541 base of delta chain to end of this revision; a measurement
542 of how much extra data we need to read/seek across to read
542 of how much extra data we need to read/seek across to read
543 the delta chain for this revision
543 the delta chain for this revision
544 :``extraratio``: extradist divided by chainsize; another representation of
544 :``extraratio``: extradist divided by chainsize; another representation of
545 how much unrelated data is needed to load this delta chain
545 how much unrelated data is needed to load this delta chain
546 """
546 """
547 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
547 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
548 index = r.index
548 index = r.index
549 generaldelta = r.version & revlog.FLAG_GENERALDELTA
549 generaldelta = r.version & revlog.FLAG_GENERALDELTA
550
550
551 def revinfo(rev):
551 def revinfo(rev):
552 e = index[rev]
552 e = index[rev]
553 compsize = e[1]
553 compsize = e[1]
554 uncompsize = e[2]
554 uncompsize = e[2]
555 chainsize = 0
555 chainsize = 0
556
556
557 if generaldelta:
557 if generaldelta:
558 if e[3] == e[5]:
558 if e[3] == e[5]:
559 deltatype = 'p1'
559 deltatype = 'p1'
560 elif e[3] == e[6]:
560 elif e[3] == e[6]:
561 deltatype = 'p2'
561 deltatype = 'p2'
562 elif e[3] == rev - 1:
562 elif e[3] == rev - 1:
563 deltatype = 'prev'
563 deltatype = 'prev'
564 elif e[3] == rev:
564 elif e[3] == rev:
565 deltatype = 'base'
565 deltatype = 'base'
566 else:
566 else:
567 deltatype = 'other'
567 deltatype = 'other'
568 else:
568 else:
569 if e[3] == rev:
569 if e[3] == rev:
570 deltatype = 'base'
570 deltatype = 'base'
571 else:
571 else:
572 deltatype = 'prev'
572 deltatype = 'prev'
573
573
574 chain = r._deltachain(rev)[0]
574 chain = r._deltachain(rev)[0]
575 for iterrev in chain:
575 for iterrev in chain:
576 e = index[iterrev]
576 e = index[iterrev]
577 chainsize += e[1]
577 chainsize += e[1]
578
578
579 return compsize, uncompsize, deltatype, chain, chainsize
579 return compsize, uncompsize, deltatype, chain, chainsize
580
580
581 fm = ui.formatter('debugdeltachain', opts)
581 fm = ui.formatter('debugdeltachain', opts)
582
582
583 fm.plain(' rev chain# chainlen prev delta '
583 fm.plain(' rev chain# chainlen prev delta '
584 'size rawsize chainsize ratio lindist extradist '
584 'size rawsize chainsize ratio lindist extradist '
585 'extraratio\n')
585 'extraratio\n')
586
586
587 chainbases = {}
587 chainbases = {}
588 for rev in r:
588 for rev in r:
589 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
589 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
590 chainbase = chain[0]
590 chainbase = chain[0]
591 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
591 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
592 basestart = r.start(chainbase)
592 basestart = r.start(chainbase)
593 revstart = r.start(rev)
593 revstart = r.start(rev)
594 lineardist = revstart + comp - basestart
594 lineardist = revstart + comp - basestart
595 extradist = lineardist - chainsize
595 extradist = lineardist - chainsize
596 try:
596 try:
597 prevrev = chain[-2]
597 prevrev = chain[-2]
598 except IndexError:
598 except IndexError:
599 prevrev = -1
599 prevrev = -1
600
600
601 chainratio = float(chainsize) / float(uncomp)
601 chainratio = float(chainsize) / float(uncomp)
602 extraratio = float(extradist) / float(chainsize)
602 extraratio = float(extradist) / float(chainsize)
603
603
604 fm.startitem()
604 fm.startitem()
605 fm.write('rev chainid chainlen prevrev deltatype compsize '
605 fm.write('rev chainid chainlen prevrev deltatype compsize '
606 'uncompsize chainsize chainratio lindist extradist '
606 'uncompsize chainsize chainratio lindist extradist '
607 'extraratio',
607 'extraratio',
608 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
608 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
609 rev, chainid, len(chain), prevrev, deltatype, comp,
609 rev, chainid, len(chain), prevrev, deltatype, comp,
610 uncomp, chainsize, chainratio, lineardist, extradist,
610 uncomp, chainsize, chainratio, lineardist, extradist,
611 extraratio,
611 extraratio,
612 rev=rev, chainid=chainid, chainlen=len(chain),
612 rev=rev, chainid=chainid, chainlen=len(chain),
613 prevrev=prevrev, deltatype=deltatype, compsize=comp,
613 prevrev=prevrev, deltatype=deltatype, compsize=comp,
614 uncompsize=uncomp, chainsize=chainsize,
614 uncompsize=uncomp, chainsize=chainsize,
615 chainratio=chainratio, lindist=lineardist,
615 chainratio=chainratio, lindist=lineardist,
616 extradist=extradist, extraratio=extraratio)
616 extradist=extradist, extraratio=extraratio)
617
617
618 fm.end()
618 fm.end()
619
619
620 @command('debugdirstate|debugstate',
620 @command('debugdirstate|debugstate',
621 [('', 'nodates', None, _('do not display the saved mtime')),
621 [('', 'nodates', None, _('do not display the saved mtime')),
622 ('', 'datesort', None, _('sort by saved mtime'))],
622 ('', 'datesort', None, _('sort by saved mtime'))],
623 _('[OPTION]...'))
623 _('[OPTION]...'))
624 def debugstate(ui, repo, **opts):
624 def debugstate(ui, repo, **opts):
625 """show the contents of the current dirstate"""
625 """show the contents of the current dirstate"""
626
626
627 nodates = opts.get('nodates')
627 nodates = opts.get('nodates')
628 datesort = opts.get('datesort')
628 datesort = opts.get('datesort')
629
629
630 timestr = ""
630 timestr = ""
631 if datesort:
631 if datesort:
632 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
632 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
633 else:
633 else:
634 keyfunc = None # sort by filename
634 keyfunc = None # sort by filename
635 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
635 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
636 if ent[3] == -1:
636 if ent[3] == -1:
637 timestr = 'unset '
637 timestr = 'unset '
638 elif nodates:
638 elif nodates:
639 timestr = 'set '
639 timestr = 'set '
640 else:
640 else:
641 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
641 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
642 time.localtime(ent[3]))
642 time.localtime(ent[3]))
643 if ent[1] & 0o20000:
643 if ent[1] & 0o20000:
644 mode = 'lnk'
644 mode = 'lnk'
645 else:
645 else:
646 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
646 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
647 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
647 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
648 for f in repo.dirstate.copies():
648 for f in repo.dirstate.copies():
649 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
649 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
650
650
651 @command('debugdiscovery',
651 @command('debugdiscovery',
652 [('', 'old', None, _('use old-style discovery')),
652 [('', 'old', None, _('use old-style discovery')),
653 ('', 'nonheads', None,
653 ('', 'nonheads', None,
654 _('use old-style discovery with non-heads included')),
654 _('use old-style discovery with non-heads included')),
655 ] + cmdutil.remoteopts,
655 ] + cmdutil.remoteopts,
656 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
656 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
657 def debugdiscovery(ui, repo, remoteurl="default", **opts):
657 def debugdiscovery(ui, repo, remoteurl="default", **opts):
658 """runs the changeset discovery protocol in isolation"""
658 """runs the changeset discovery protocol in isolation"""
659 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
659 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
660 opts.get('branch'))
660 opts.get('branch'))
661 remote = hg.peer(repo, opts, remoteurl)
661 remote = hg.peer(repo, opts, remoteurl)
662 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
662 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
663
663
664 # make sure tests are repeatable
664 # make sure tests are repeatable
665 random.seed(12323)
665 random.seed(12323)
666
666
667 def doit(localheads, remoteheads, remote=remote):
667 def doit(localheads, remoteheads, remote=remote):
668 if opts.get('old'):
668 if opts.get('old'):
669 if localheads:
669 if localheads:
670 raise error.Abort('cannot use localheads with old style '
670 raise error.Abort('cannot use localheads with old style '
671 'discovery')
671 'discovery')
672 if not util.safehasattr(remote, 'branches'):
672 if not util.safehasattr(remote, 'branches'):
673 # enable in-client legacy support
673 # enable in-client legacy support
674 remote = localrepo.locallegacypeer(remote.local())
674 remote = localrepo.locallegacypeer(remote.local())
675 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
675 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
676 force=True)
676 force=True)
677 common = set(common)
677 common = set(common)
678 if not opts.get('nonheads'):
678 if not opts.get('nonheads'):
679 ui.write(("unpruned common: %s\n") %
679 ui.write(("unpruned common: %s\n") %
680 " ".join(sorted(short(n) for n in common)))
680 " ".join(sorted(short(n) for n in common)))
681 dag = dagutil.revlogdag(repo.changelog)
681 dag = dagutil.revlogdag(repo.changelog)
682 all = dag.ancestorset(dag.internalizeall(common))
682 all = dag.ancestorset(dag.internalizeall(common))
683 common = dag.externalizeall(dag.headsetofconnecteds(all))
683 common = dag.externalizeall(dag.headsetofconnecteds(all))
684 else:
684 else:
685 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
685 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
686 common = set(common)
686 common = set(common)
687 rheads = set(hds)
687 rheads = set(hds)
688 lheads = set(repo.heads())
688 lheads = set(repo.heads())
689 ui.write(("common heads: %s\n") %
689 ui.write(("common heads: %s\n") %
690 " ".join(sorted(short(n) for n in common)))
690 " ".join(sorted(short(n) for n in common)))
691 if lheads <= common:
691 if lheads <= common:
692 ui.write(("local is subset\n"))
692 ui.write(("local is subset\n"))
693 elif rheads <= common:
693 elif rheads <= common:
694 ui.write(("remote is subset\n"))
694 ui.write(("remote is subset\n"))
695
695
696 serverlogs = opts.get('serverlog')
696 serverlogs = opts.get('serverlog')
697 if serverlogs:
697 if serverlogs:
698 for filename in serverlogs:
698 for filename in serverlogs:
699 with open(filename, 'r') as logfile:
699 with open(filename, 'r') as logfile:
700 line = logfile.readline()
700 line = logfile.readline()
701 while line:
701 while line:
702 parts = line.strip().split(';')
702 parts = line.strip().split(';')
703 op = parts[1]
703 op = parts[1]
704 if op == 'cg':
704 if op == 'cg':
705 pass
705 pass
706 elif op == 'cgss':
706 elif op == 'cgss':
707 doit(parts[2].split(' '), parts[3].split(' '))
707 doit(parts[2].split(' '), parts[3].split(' '))
708 elif op == 'unb':
708 elif op == 'unb':
709 doit(parts[3].split(' '), parts[2].split(' '))
709 doit(parts[3].split(' '), parts[2].split(' '))
710 line = logfile.readline()
710 line = logfile.readline()
711 else:
711 else:
712 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
712 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
713 opts.get('remote_head'))
713 opts.get('remote_head'))
714 localrevs = opts.get('local_head')
714 localrevs = opts.get('local_head')
715 doit(localrevs, remoterevs)
715 doit(localrevs, remoterevs)
716
716
717 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
717 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
718 def debugextensions(ui, **opts):
718 def debugextensions(ui, **opts):
719 '''show information about active extensions'''
719 '''show information about active extensions'''
720 exts = extensions.extensions(ui)
720 exts = extensions.extensions(ui)
721 hgver = util.version()
721 hgver = util.version()
722 fm = ui.formatter('debugextensions', opts)
722 fm = ui.formatter('debugextensions', opts)
723 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
723 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
724 isinternal = extensions.ismoduleinternal(extmod)
724 isinternal = extensions.ismoduleinternal(extmod)
725 extsource = pycompat.fsencode(extmod.__file__)
725 extsource = pycompat.fsencode(extmod.__file__)
726 if isinternal:
726 if isinternal:
727 exttestedwith = [] # never expose magic string to users
727 exttestedwith = [] # never expose magic string to users
728 else:
728 else:
729 exttestedwith = getattr(extmod, 'testedwith', '').split()
729 exttestedwith = getattr(extmod, 'testedwith', '').split()
730 extbuglink = getattr(extmod, 'buglink', None)
730 extbuglink = getattr(extmod, 'buglink', None)
731
731
732 fm.startitem()
732 fm.startitem()
733
733
734 if ui.quiet or ui.verbose:
734 if ui.quiet or ui.verbose:
735 fm.write('name', '%s\n', extname)
735 fm.write('name', '%s\n', extname)
736 else:
736 else:
737 fm.write('name', '%s', extname)
737 fm.write('name', '%s', extname)
738 if isinternal or hgver in exttestedwith:
738 if isinternal or hgver in exttestedwith:
739 fm.plain('\n')
739 fm.plain('\n')
740 elif not exttestedwith:
740 elif not exttestedwith:
741 fm.plain(_(' (untested!)\n'))
741 fm.plain(_(' (untested!)\n'))
742 else:
742 else:
743 lasttestedversion = exttestedwith[-1]
743 lasttestedversion = exttestedwith[-1]
744 fm.plain(' (%s!)\n' % lasttestedversion)
744 fm.plain(' (%s!)\n' % lasttestedversion)
745
745
746 fm.condwrite(ui.verbose and extsource, 'source',
746 fm.condwrite(ui.verbose and extsource, 'source',
747 _(' location: %s\n'), extsource or "")
747 _(' location: %s\n'), extsource or "")
748
748
749 if ui.verbose:
749 if ui.verbose:
750 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
750 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
751 fm.data(bundled=isinternal)
751 fm.data(bundled=isinternal)
752
752
753 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
753 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
754 _(' tested with: %s\n'),
754 _(' tested with: %s\n'),
755 fm.formatlist(exttestedwith, name='ver'))
755 fm.formatlist(exttestedwith, name='ver'))
756
756
757 fm.condwrite(ui.verbose and extbuglink, 'buglink',
757 fm.condwrite(ui.verbose and extbuglink, 'buglink',
758 _(' bug reporting: %s\n'), extbuglink or "")
758 _(' bug reporting: %s\n'), extbuglink or "")
759
759
760 fm.end()
760 fm.end()
761
761
762 @command('debugfileset',
762 @command('debugfileset',
763 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
763 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
764 _('[-r REV] FILESPEC'))
764 _('[-r REV] FILESPEC'))
765 def debugfileset(ui, repo, expr, **opts):
765 def debugfileset(ui, repo, expr, **opts):
766 '''parse and apply a fileset specification'''
766 '''parse and apply a fileset specification'''
767 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
767 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
768 if ui.verbose:
768 if ui.verbose:
769 tree = fileset.parse(expr)
769 tree = fileset.parse(expr)
770 ui.note(fileset.prettyformat(tree), "\n")
770 ui.note(fileset.prettyformat(tree), "\n")
771
771
772 for f in ctx.getfileset(expr):
772 for f in ctx.getfileset(expr):
773 ui.write("%s\n" % f)
773 ui.write("%s\n" % f)
774
774
775 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
775 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
776 def debugfsinfo(ui, path="."):
776 def debugfsinfo(ui, path="."):
777 """show information detected about current filesystem"""
777 """show information detected about current filesystem"""
778 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
778 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
779 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
779 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
780 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
780 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
781 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
781 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
782 casesensitive = '(unknown)'
782 casesensitive = '(unknown)'
783 try:
783 try:
784 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
784 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
785 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
785 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
786 except OSError:
786 except OSError:
787 pass
787 pass
788 ui.write(('case-sensitive: %s\n') % casesensitive)
788 ui.write(('case-sensitive: %s\n') % casesensitive)
789
789
790 @command('debuggetbundle',
790 @command('debuggetbundle',
791 [('H', 'head', [], _('id of head node'), _('ID')),
791 [('H', 'head', [], _('id of head node'), _('ID')),
792 ('C', 'common', [], _('id of common node'), _('ID')),
792 ('C', 'common', [], _('id of common node'), _('ID')),
793 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
793 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
794 _('REPO FILE [-H|-C ID]...'),
794 _('REPO FILE [-H|-C ID]...'),
795 norepo=True)
795 norepo=True)
796 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
796 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
797 """retrieves a bundle from a repo
797 """retrieves a bundle from a repo
798
798
799 Every ID must be a full-length hex node id string. Saves the bundle to the
799 Every ID must be a full-length hex node id string. Saves the bundle to the
800 given file.
800 given file.
801 """
801 """
802 repo = hg.peer(ui, opts, repopath)
802 repo = hg.peer(ui, opts, repopath)
803 if not repo.capable('getbundle'):
803 if not repo.capable('getbundle'):
804 raise error.Abort("getbundle() not supported by target repository")
804 raise error.Abort("getbundle() not supported by target repository")
805 args = {}
805 args = {}
806 if common:
806 if common:
807 args['common'] = [bin(s) for s in common]
807 args['common'] = [bin(s) for s in common]
808 if head:
808 if head:
809 args['heads'] = [bin(s) for s in head]
809 args['heads'] = [bin(s) for s in head]
810 # TODO: get desired bundlecaps from command line.
810 # TODO: get desired bundlecaps from command line.
811 args['bundlecaps'] = None
811 args['bundlecaps'] = None
812 bundle = repo.getbundle('debug', **args)
812 bundle = repo.getbundle('debug', **args)
813
813
814 bundletype = opts.get('type', 'bzip2').lower()
814 bundletype = opts.get('type', 'bzip2').lower()
815 btypes = {'none': 'HG10UN',
815 btypes = {'none': 'HG10UN',
816 'bzip2': 'HG10BZ',
816 'bzip2': 'HG10BZ',
817 'gzip': 'HG10GZ',
817 'gzip': 'HG10GZ',
818 'bundle2': 'HG20'}
818 'bundle2': 'HG20'}
819 bundletype = btypes.get(bundletype)
819 bundletype = btypes.get(bundletype)
820 if bundletype not in bundle2.bundletypes:
820 if bundletype not in bundle2.bundletypes:
821 raise error.Abort(_('unknown bundle type specified with --type'))
821 raise error.Abort(_('unknown bundle type specified with --type'))
822 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
822 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
823
823
824 @command('debugignore', [], '[FILE]')
824 @command('debugignore', [], '[FILE]')
825 def debugignore(ui, repo, *files, **opts):
825 def debugignore(ui, repo, *files, **opts):
826 """display the combined ignore pattern and information about ignored files
826 """display the combined ignore pattern and information about ignored files
827
827
828 With no argument display the combined ignore pattern.
828 With no argument display the combined ignore pattern.
829
829
830 Given space separated file names, shows if the given file is ignored and
830 Given space separated file names, shows if the given file is ignored and
831 if so, show the ignore rule (file and line number) that matched it.
831 if so, show the ignore rule (file and line number) that matched it.
832 """
832 """
833 ignore = repo.dirstate._ignore
833 ignore = repo.dirstate._ignore
834 if not files:
834 if not files:
835 # Show all the patterns
835 # Show all the patterns
836 ui.write("%s\n" % repr(ignore))
836 ui.write("%s\n" % repr(ignore))
837 else:
837 else:
838 for f in files:
838 for f in files:
839 nf = util.normpath(f)
839 nf = util.normpath(f)
840 ignored = None
840 ignored = None
841 ignoredata = None
841 ignoredata = None
842 if nf != '.':
842 if nf != '.':
843 if ignore(nf):
843 if ignore(nf):
844 ignored = nf
844 ignored = nf
845 ignoredata = repo.dirstate._ignorefileandline(nf)
845 ignoredata = repo.dirstate._ignorefileandline(nf)
846 else:
846 else:
847 for p in util.finddirs(nf):
847 for p in util.finddirs(nf):
848 if ignore(p):
848 if ignore(p):
849 ignored = p
849 ignored = p
850 ignoredata = repo.dirstate._ignorefileandline(p)
850 ignoredata = repo.dirstate._ignorefileandline(p)
851 break
851 break
852 if ignored:
852 if ignored:
853 if ignored == nf:
853 if ignored == nf:
854 ui.write(_("%s is ignored\n") % f)
854 ui.write(_("%s is ignored\n") % f)
855 else:
855 else:
856 ui.write(_("%s is ignored because of "
856 ui.write(_("%s is ignored because of "
857 "containing folder %s\n")
857 "containing folder %s\n")
858 % (f, ignored))
858 % (f, ignored))
859 ignorefile, lineno, line = ignoredata
859 ignorefile, lineno, line = ignoredata
860 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
860 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
861 % (ignorefile, lineno, line))
861 % (ignorefile, lineno, line))
862 else:
862 else:
863 ui.write(_("%s is not ignored\n") % f)
863 ui.write(_("%s is not ignored\n") % f)
864
864
865 @command('debugindex', cmdutil.debugrevlogopts +
865 @command('debugindex', cmdutil.debugrevlogopts +
866 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
866 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
867 _('[-f FORMAT] -c|-m|FILE'),
867 _('[-f FORMAT] -c|-m|FILE'),
868 optionalrepo=True)
868 optionalrepo=True)
869 def debugindex(ui, repo, file_=None, **opts):
869 def debugindex(ui, repo, file_=None, **opts):
870 """dump the contents of an index file"""
870 """dump the contents of an index file"""
871 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
871 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
872 format = opts.get('format', 0)
872 format = opts.get('format', 0)
873 if format not in (0, 1):
873 if format not in (0, 1):
874 raise error.Abort(_("unknown format %d") % format)
874 raise error.Abort(_("unknown format %d") % format)
875
875
876 generaldelta = r.version & revlog.FLAG_GENERALDELTA
876 generaldelta = r.version & revlog.FLAG_GENERALDELTA
877 if generaldelta:
877 if generaldelta:
878 basehdr = ' delta'
878 basehdr = ' delta'
879 else:
879 else:
880 basehdr = ' base'
880 basehdr = ' base'
881
881
882 if ui.debugflag:
882 if ui.debugflag:
883 shortfn = hex
883 shortfn = hex
884 else:
884 else:
885 shortfn = short
885 shortfn = short
886
886
887 # There might not be anything in r, so have a sane default
887 # There might not be anything in r, so have a sane default
888 idlen = 12
888 idlen = 12
889 for i in r:
889 for i in r:
890 idlen = len(shortfn(r.node(i)))
890 idlen = len(shortfn(r.node(i)))
891 break
891 break
892
892
893 if format == 0:
893 if format == 0:
894 ui.write((" rev offset length " + basehdr + " linkrev"
894 ui.write((" rev offset length " + basehdr + " linkrev"
895 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
895 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
896 elif format == 1:
896 elif format == 1:
897 ui.write((" rev flag offset length"
897 ui.write((" rev flag offset length"
898 " size " + basehdr + " link p1 p2"
898 " size " + basehdr + " link p1 p2"
899 " %s\n") % "nodeid".rjust(idlen))
899 " %s\n") % "nodeid".rjust(idlen))
900
900
901 for i in r:
901 for i in r:
902 node = r.node(i)
902 node = r.node(i)
903 if generaldelta:
903 if generaldelta:
904 base = r.deltaparent(i)
904 base = r.deltaparent(i)
905 else:
905 else:
906 base = r.chainbase(i)
906 base = r.chainbase(i)
907 if format == 0:
907 if format == 0:
908 try:
908 try:
909 pp = r.parents(node)
909 pp = r.parents(node)
910 except Exception:
910 except Exception:
911 pp = [nullid, nullid]
911 pp = [nullid, nullid]
912 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
912 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
913 i, r.start(i), r.length(i), base, r.linkrev(i),
913 i, r.start(i), r.length(i), base, r.linkrev(i),
914 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
914 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
915 elif format == 1:
915 elif format == 1:
916 pr = r.parentrevs(i)
916 pr = r.parentrevs(i)
917 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
917 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
918 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
918 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
919 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
919 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
920
920
921 @command('debugindexdot', cmdutil.debugrevlogopts,
921 @command('debugindexdot', cmdutil.debugrevlogopts,
922 _('-c|-m|FILE'), optionalrepo=True)
922 _('-c|-m|FILE'), optionalrepo=True)
923 def debugindexdot(ui, repo, file_=None, **opts):
923 def debugindexdot(ui, repo, file_=None, **opts):
924 """dump an index DAG as a graphviz dot file"""
924 """dump an index DAG as a graphviz dot file"""
925 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
925 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
926 ui.write(("digraph G {\n"))
926 ui.write(("digraph G {\n"))
927 for i in r:
927 for i in r:
928 node = r.node(i)
928 node = r.node(i)
929 pp = r.parents(node)
929 pp = r.parents(node)
930 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
930 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
931 if pp[1] != nullid:
931 if pp[1] != nullid:
932 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
932 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
933 ui.write("}\n")
933 ui.write("}\n")
934
934
935 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
935 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
936 def debuginstall(ui, **opts):
936 def debuginstall(ui, **opts):
937 '''test Mercurial installation
937 '''test Mercurial installation
938
938
939 Returns 0 on success.
939 Returns 0 on success.
940 '''
940 '''
941
941
942 def writetemp(contents):
942 def writetemp(contents):
943 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
943 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
944 f = os.fdopen(fd, pycompat.sysstr("wb"))
944 f = os.fdopen(fd, pycompat.sysstr("wb"))
945 f.write(contents)
945 f.write(contents)
946 f.close()
946 f.close()
947 return name
947 return name
948
948
949 problems = 0
949 problems = 0
950
950
951 fm = ui.formatter('debuginstall', opts)
951 fm = ui.formatter('debuginstall', opts)
952 fm.startitem()
952 fm.startitem()
953
953
954 # encoding
954 # encoding
955 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
955 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
956 err = None
956 err = None
957 try:
957 try:
958 encoding.fromlocal("test")
958 encoding.fromlocal("test")
959 except error.Abort as inst:
959 except error.Abort as inst:
960 err = inst
960 err = inst
961 problems += 1
961 problems += 1
962 fm.condwrite(err, 'encodingerror', _(" %s\n"
962 fm.condwrite(err, 'encodingerror', _(" %s\n"
963 " (check that your locale is properly set)\n"), err)
963 " (check that your locale is properly set)\n"), err)
964
964
965 # Python
965 # Python
966 fm.write('pythonexe', _("checking Python executable (%s)\n"),
966 fm.write('pythonexe', _("checking Python executable (%s)\n"),
967 pycompat.sysexecutable)
967 pycompat.sysexecutable)
968 fm.write('pythonver', _("checking Python version (%s)\n"),
968 fm.write('pythonver', _("checking Python version (%s)\n"),
969 ("%d.%d.%d" % sys.version_info[:3]))
969 ("%d.%d.%d" % sys.version_info[:3]))
970 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
970 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
971 os.path.dirname(pycompat.fsencode(os.__file__)))
971 os.path.dirname(pycompat.fsencode(os.__file__)))
972
972
973 security = set(sslutil.supportedprotocols)
973 security = set(sslutil.supportedprotocols)
974 if sslutil.hassni:
974 if sslutil.hassni:
975 security.add('sni')
975 security.add('sni')
976
976
977 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
977 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
978 fm.formatlist(sorted(security), name='protocol',
978 fm.formatlist(sorted(security), name='protocol',
979 fmt='%s', sep=','))
979 fmt='%s', sep=','))
980
980
981 # These are warnings, not errors. So don't increment problem count. This
981 # These are warnings, not errors. So don't increment problem count. This
982 # may change in the future.
982 # may change in the future.
983 if 'tls1.2' not in security:
983 if 'tls1.2' not in security:
984 fm.plain(_(' TLS 1.2 not supported by Python install; '
984 fm.plain(_(' TLS 1.2 not supported by Python install; '
985 'network connections lack modern security\n'))
985 'network connections lack modern security\n'))
986 if 'sni' not in security:
986 if 'sni' not in security:
987 fm.plain(_(' SNI not supported by Python install; may have '
987 fm.plain(_(' SNI not supported by Python install; may have '
988 'connectivity issues with some servers\n'))
988 'connectivity issues with some servers\n'))
989
989
990 # TODO print CA cert info
990 # TODO print CA cert info
991
991
992 # hg version
992 # hg version
993 hgver = util.version()
993 hgver = util.version()
994 fm.write('hgver', _("checking Mercurial version (%s)\n"),
994 fm.write('hgver', _("checking Mercurial version (%s)\n"),
995 hgver.split('+')[0])
995 hgver.split('+')[0])
996 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
996 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
997 '+'.join(hgver.split('+')[1:]))
997 '+'.join(hgver.split('+')[1:]))
998
998
999 # compiled modules
999 # compiled modules
1000 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1000 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1001 policy.policy)
1001 policy.policy)
1002 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1002 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1003 os.path.dirname(pycompat.fsencode(__file__)))
1003 os.path.dirname(pycompat.fsencode(__file__)))
1004
1004
1005 if policy.policy in ('c', 'allow'):
1005 if policy.policy in ('c', 'allow'):
1006 err = None
1006 err = None
1007 try:
1007 try:
1008 from .cext import (
1008 from .cext import (
1009 base85,
1009 base85,
1010 bdiff,
1010 bdiff,
1011 mpatch,
1011 mpatch,
1012 osutil,
1012 osutil,
1013 )
1013 )
1014 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1014 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1015 except Exception as inst:
1015 except Exception as inst:
1016 err = inst
1016 err = inst
1017 problems += 1
1017 problems += 1
1018 fm.condwrite(err, 'extensionserror', " %s\n", err)
1018 fm.condwrite(err, 'extensionserror', " %s\n", err)
1019
1019
1020 compengines = util.compengines._engines.values()
1020 compengines = util.compengines._engines.values()
1021 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1021 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1022 fm.formatlist(sorted(e.name() for e in compengines),
1022 fm.formatlist(sorted(e.name() for e in compengines),
1023 name='compengine', fmt='%s', sep=', '))
1023 name='compengine', fmt='%s', sep=', '))
1024 fm.write('compenginesavail', _('checking available compression engines '
1024 fm.write('compenginesavail', _('checking available compression engines '
1025 '(%s)\n'),
1025 '(%s)\n'),
1026 fm.formatlist(sorted(e.name() for e in compengines
1026 fm.formatlist(sorted(e.name() for e in compengines
1027 if e.available()),
1027 if e.available()),
1028 name='compengine', fmt='%s', sep=', '))
1028 name='compengine', fmt='%s', sep=', '))
1029 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1029 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1030 fm.write('compenginesserver', _('checking available compression engines '
1030 fm.write('compenginesserver', _('checking available compression engines '
1031 'for wire protocol (%s)\n'),
1031 'for wire protocol (%s)\n'),
1032 fm.formatlist([e.name() for e in wirecompengines
1032 fm.formatlist([e.name() for e in wirecompengines
1033 if e.wireprotosupport()],
1033 if e.wireprotosupport()],
1034 name='compengine', fmt='%s', sep=', '))
1034 name='compengine', fmt='%s', sep=', '))
1035
1035
1036 # templates
1036 # templates
1037 p = templater.templatepaths()
1037 p = templater.templatepaths()
1038 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1038 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1039 fm.condwrite(not p, '', _(" no template directories found\n"))
1039 fm.condwrite(not p, '', _(" no template directories found\n"))
1040 if p:
1040 if p:
1041 m = templater.templatepath("map-cmdline.default")
1041 m = templater.templatepath("map-cmdline.default")
1042 if m:
1042 if m:
1043 # template found, check if it is working
1043 # template found, check if it is working
1044 err = None
1044 err = None
1045 try:
1045 try:
1046 templater.templater.frommapfile(m)
1046 templater.templater.frommapfile(m)
1047 except Exception as inst:
1047 except Exception as inst:
1048 err = inst
1048 err = inst
1049 p = None
1049 p = None
1050 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1050 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1051 else:
1051 else:
1052 p = None
1052 p = None
1053 fm.condwrite(p, 'defaulttemplate',
1053 fm.condwrite(p, 'defaulttemplate',
1054 _("checking default template (%s)\n"), m)
1054 _("checking default template (%s)\n"), m)
1055 fm.condwrite(not m, 'defaulttemplatenotfound',
1055 fm.condwrite(not m, 'defaulttemplatenotfound',
1056 _(" template '%s' not found\n"), "default")
1056 _(" template '%s' not found\n"), "default")
1057 if not p:
1057 if not p:
1058 problems += 1
1058 problems += 1
1059 fm.condwrite(not p, '',
1059 fm.condwrite(not p, '',
1060 _(" (templates seem to have been installed incorrectly)\n"))
1060 _(" (templates seem to have been installed incorrectly)\n"))
1061
1061
1062 # editor
1062 # editor
1063 editor = ui.geteditor()
1063 editor = ui.geteditor()
1064 editor = util.expandpath(editor)
1064 editor = util.expandpath(editor)
1065 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1065 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1066 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1066 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1067 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1067 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1068 _(" No commit editor set and can't find %s in PATH\n"
1068 _(" No commit editor set and can't find %s in PATH\n"
1069 " (specify a commit editor in your configuration"
1069 " (specify a commit editor in your configuration"
1070 " file)\n"), not cmdpath and editor == 'vi' and editor)
1070 " file)\n"), not cmdpath and editor == 'vi' and editor)
1071 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1071 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1072 _(" Can't find editor '%s' in PATH\n"
1072 _(" Can't find editor '%s' in PATH\n"
1073 " (specify a commit editor in your configuration"
1073 " (specify a commit editor in your configuration"
1074 " file)\n"), not cmdpath and editor)
1074 " file)\n"), not cmdpath and editor)
1075 if not cmdpath and editor != 'vi':
1075 if not cmdpath and editor != 'vi':
1076 problems += 1
1076 problems += 1
1077
1077
1078 # check username
1078 # check username
1079 username = None
1079 username = None
1080 err = None
1080 err = None
1081 try:
1081 try:
1082 username = ui.username()
1082 username = ui.username()
1083 except error.Abort as e:
1083 except error.Abort as e:
1084 err = e
1084 err = e
1085 problems += 1
1085 problems += 1
1086
1086
1087 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1087 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1088 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1088 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1089 " (specify a username in your configuration file)\n"), err)
1089 " (specify a username in your configuration file)\n"), err)
1090
1090
1091 fm.condwrite(not problems, '',
1091 fm.condwrite(not problems, '',
1092 _("no problems detected\n"))
1092 _("no problems detected\n"))
1093 if not problems:
1093 if not problems:
1094 fm.data(problems=problems)
1094 fm.data(problems=problems)
1095 fm.condwrite(problems, 'problems',
1095 fm.condwrite(problems, 'problems',
1096 _("%d problems detected,"
1096 _("%d problems detected,"
1097 " please check your install!\n"), problems)
1097 " please check your install!\n"), problems)
1098 fm.end()
1098 fm.end()
1099
1099
1100 return problems
1100 return problems
1101
1101
1102 @command('debugknown', [], _('REPO ID...'), norepo=True)
1102 @command('debugknown', [], _('REPO ID...'), norepo=True)
1103 def debugknown(ui, repopath, *ids, **opts):
1103 def debugknown(ui, repopath, *ids, **opts):
1104 """test whether node ids are known to a repo
1104 """test whether node ids are known to a repo
1105
1105
1106 Every ID must be a full-length hex node id string. Returns a list of 0s
1106 Every ID must be a full-length hex node id string. Returns a list of 0s
1107 and 1s indicating unknown/known.
1107 and 1s indicating unknown/known.
1108 """
1108 """
1109 repo = hg.peer(ui, opts, repopath)
1109 repo = hg.peer(ui, opts, repopath)
1110 if not repo.capable('known'):
1110 if not repo.capable('known'):
1111 raise error.Abort("known() not supported by target repository")
1111 raise error.Abort("known() not supported by target repository")
1112 flags = repo.known([bin(s) for s in ids])
1112 flags = repo.known([bin(s) for s in ids])
1113 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1113 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1114
1114
1115 @command('debuglabelcomplete', [], _('LABEL...'))
1115 @command('debuglabelcomplete', [], _('LABEL...'))
1116 def debuglabelcomplete(ui, repo, *args):
1116 def debuglabelcomplete(ui, repo, *args):
1117 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1117 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1118 debugnamecomplete(ui, repo, *args)
1118 debugnamecomplete(ui, repo, *args)
1119
1119
1120 @command('debuglocks',
1120 @command('debuglocks',
1121 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1121 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1122 ('W', 'force-wlock', None,
1122 ('W', 'force-wlock', None,
1123 _('free the working state lock (DANGEROUS)'))],
1123 _('free the working state lock (DANGEROUS)'))],
1124 _('[OPTION]...'))
1124 _('[OPTION]...'))
1125 def debuglocks(ui, repo, **opts):
1125 def debuglocks(ui, repo, **opts):
1126 """show or modify state of locks
1126 """show or modify state of locks
1127
1127
1128 By default, this command will show which locks are held. This
1128 By default, this command will show which locks are held. This
1129 includes the user and process holding the lock, the amount of time
1129 includes the user and process holding the lock, the amount of time
1130 the lock has been held, and the machine name where the process is
1130 the lock has been held, and the machine name where the process is
1131 running if it's not local.
1131 running if it's not local.
1132
1132
1133 Locks protect the integrity of Mercurial's data, so should be
1133 Locks protect the integrity of Mercurial's data, so should be
1134 treated with care. System crashes or other interruptions may cause
1134 treated with care. System crashes or other interruptions may cause
1135 locks to not be properly released, though Mercurial will usually
1135 locks to not be properly released, though Mercurial will usually
1136 detect and remove such stale locks automatically.
1136 detect and remove such stale locks automatically.
1137
1137
1138 However, detecting stale locks may not always be possible (for
1138 However, detecting stale locks may not always be possible (for
1139 instance, on a shared filesystem). Removing locks may also be
1139 instance, on a shared filesystem). Removing locks may also be
1140 blocked by filesystem permissions.
1140 blocked by filesystem permissions.
1141
1141
1142 Returns 0 if no locks are held.
1142 Returns 0 if no locks are held.
1143
1143
1144 """
1144 """
1145
1145
1146 if opts.get('force_lock'):
1146 if opts.get('force_lock'):
1147 repo.svfs.unlink('lock')
1147 repo.svfs.unlink('lock')
1148 if opts.get('force_wlock'):
1148 if opts.get('force_wlock'):
1149 repo.vfs.unlink('wlock')
1149 repo.vfs.unlink('wlock')
1150 if opts.get('force_lock') or opts.get('force_lock'):
1150 if opts.get('force_lock') or opts.get('force_lock'):
1151 return 0
1151 return 0
1152
1152
1153 now = time.time()
1153 now = time.time()
1154 held = 0
1154 held = 0
1155
1155
1156 def report(vfs, name, method):
1156 def report(vfs, name, method):
1157 # this causes stale locks to get reaped for more accurate reporting
1157 # this causes stale locks to get reaped for more accurate reporting
1158 try:
1158 try:
1159 l = method(False)
1159 l = method(False)
1160 except error.LockHeld:
1160 except error.LockHeld:
1161 l = None
1161 l = None
1162
1162
1163 if l:
1163 if l:
1164 l.release()
1164 l.release()
1165 else:
1165 else:
1166 try:
1166 try:
1167 stat = vfs.lstat(name)
1167 stat = vfs.lstat(name)
1168 age = now - stat.st_mtime
1168 age = now - stat.st_mtime
1169 user = util.username(stat.st_uid)
1169 user = util.username(stat.st_uid)
1170 locker = vfs.readlock(name)
1170 locker = vfs.readlock(name)
1171 if ":" in locker:
1171 if ":" in locker:
1172 host, pid = locker.split(':')
1172 host, pid = locker.split(':')
1173 if host == socket.gethostname():
1173 if host == socket.gethostname():
1174 locker = 'user %s, process %s' % (user, pid)
1174 locker = 'user %s, process %s' % (user, pid)
1175 else:
1175 else:
1176 locker = 'user %s, process %s, host %s' \
1176 locker = 'user %s, process %s, host %s' \
1177 % (user, pid, host)
1177 % (user, pid, host)
1178 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1178 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1179 return 1
1179 return 1
1180 except OSError as e:
1180 except OSError as e:
1181 if e.errno != errno.ENOENT:
1181 if e.errno != errno.ENOENT:
1182 raise
1182 raise
1183
1183
1184 ui.write(("%-6s free\n") % (name + ":"))
1184 ui.write(("%-6s free\n") % (name + ":"))
1185 return 0
1185 return 0
1186
1186
1187 held += report(repo.svfs, "lock", repo.lock)
1187 held += report(repo.svfs, "lock", repo.lock)
1188 held += report(repo.vfs, "wlock", repo.wlock)
1188 held += report(repo.vfs, "wlock", repo.wlock)
1189
1189
1190 return held
1190 return held
1191
1191
1192 @command('debugmergestate', [], '')
1192 @command('debugmergestate', [], '')
1193 def debugmergestate(ui, repo, *args):
1193 def debugmergestate(ui, repo, *args):
1194 """print merge state
1194 """print merge state
1195
1195
1196 Use --verbose to print out information about whether v1 or v2 merge state
1196 Use --verbose to print out information about whether v1 or v2 merge state
1197 was chosen."""
1197 was chosen."""
1198 def _hashornull(h):
1198 def _hashornull(h):
1199 if h == nullhex:
1199 if h == nullhex:
1200 return 'null'
1200 return 'null'
1201 else:
1201 else:
1202 return h
1202 return h
1203
1203
1204 def printrecords(version):
1204 def printrecords(version):
1205 ui.write(('* version %s records\n') % version)
1205 ui.write(('* version %s records\n') % version)
1206 if version == 1:
1206 if version == 1:
1207 records = v1records
1207 records = v1records
1208 else:
1208 else:
1209 records = v2records
1209 records = v2records
1210
1210
1211 for rtype, record in records:
1211 for rtype, record in records:
1212 # pretty print some record types
1212 # pretty print some record types
1213 if rtype == 'L':
1213 if rtype == 'L':
1214 ui.write(('local: %s\n') % record)
1214 ui.write(('local: %s\n') % record)
1215 elif rtype == 'O':
1215 elif rtype == 'O':
1216 ui.write(('other: %s\n') % record)
1216 ui.write(('other: %s\n') % record)
1217 elif rtype == 'm':
1217 elif rtype == 'm':
1218 driver, mdstate = record.split('\0', 1)
1218 driver, mdstate = record.split('\0', 1)
1219 ui.write(('merge driver: %s (state "%s")\n')
1219 ui.write(('merge driver: %s (state "%s")\n')
1220 % (driver, mdstate))
1220 % (driver, mdstate))
1221 elif rtype in 'FDC':
1221 elif rtype in 'FDC':
1222 r = record.split('\0')
1222 r = record.split('\0')
1223 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1223 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1224 if version == 1:
1224 if version == 1:
1225 onode = 'not stored in v1 format'
1225 onode = 'not stored in v1 format'
1226 flags = r[7]
1226 flags = r[7]
1227 else:
1227 else:
1228 onode, flags = r[7:9]
1228 onode, flags = r[7:9]
1229 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1229 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1230 % (f, rtype, state, _hashornull(hash)))
1230 % (f, rtype, state, _hashornull(hash)))
1231 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1231 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1232 ui.write((' ancestor path: %s (node %s)\n')
1232 ui.write((' ancestor path: %s (node %s)\n')
1233 % (afile, _hashornull(anode)))
1233 % (afile, _hashornull(anode)))
1234 ui.write((' other path: %s (node %s)\n')
1234 ui.write((' other path: %s (node %s)\n')
1235 % (ofile, _hashornull(onode)))
1235 % (ofile, _hashornull(onode)))
1236 elif rtype == 'f':
1236 elif rtype == 'f':
1237 filename, rawextras = record.split('\0', 1)
1237 filename, rawextras = record.split('\0', 1)
1238 extras = rawextras.split('\0')
1238 extras = rawextras.split('\0')
1239 i = 0
1239 i = 0
1240 extrastrings = []
1240 extrastrings = []
1241 while i < len(extras):
1241 while i < len(extras):
1242 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1242 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1243 i += 2
1243 i += 2
1244
1244
1245 ui.write(('file extras: %s (%s)\n')
1245 ui.write(('file extras: %s (%s)\n')
1246 % (filename, ', '.join(extrastrings)))
1246 % (filename, ', '.join(extrastrings)))
1247 elif rtype == 'l':
1247 elif rtype == 'l':
1248 labels = record.split('\0', 2)
1248 labels = record.split('\0', 2)
1249 labels = [l for l in labels if len(l) > 0]
1249 labels = [l for l in labels if len(l) > 0]
1250 ui.write(('labels:\n'))
1250 ui.write(('labels:\n'))
1251 ui.write((' local: %s\n' % labels[0]))
1251 ui.write((' local: %s\n' % labels[0]))
1252 ui.write((' other: %s\n' % labels[1]))
1252 ui.write((' other: %s\n' % labels[1]))
1253 if len(labels) > 2:
1253 if len(labels) > 2:
1254 ui.write((' base: %s\n' % labels[2]))
1254 ui.write((' base: %s\n' % labels[2]))
1255 else:
1255 else:
1256 ui.write(('unrecognized entry: %s\t%s\n')
1256 ui.write(('unrecognized entry: %s\t%s\n')
1257 % (rtype, record.replace('\0', '\t')))
1257 % (rtype, record.replace('\0', '\t')))
1258
1258
1259 # Avoid mergestate.read() since it may raise an exception for unsupported
1259 # Avoid mergestate.read() since it may raise an exception for unsupported
1260 # merge state records. We shouldn't be doing this, but this is OK since this
1260 # merge state records. We shouldn't be doing this, but this is OK since this
1261 # command is pretty low-level.
1261 # command is pretty low-level.
1262 ms = mergemod.mergestate(repo)
1262 ms = mergemod.mergestate(repo)
1263
1263
1264 # sort so that reasonable information is on top
1264 # sort so that reasonable information is on top
1265 v1records = ms._readrecordsv1()
1265 v1records = ms._readrecordsv1()
1266 v2records = ms._readrecordsv2()
1266 v2records = ms._readrecordsv2()
1267 order = 'LOml'
1267 order = 'LOml'
1268 def key(r):
1268 def key(r):
1269 idx = order.find(r[0])
1269 idx = order.find(r[0])
1270 if idx == -1:
1270 if idx == -1:
1271 return (1, r[1])
1271 return (1, r[1])
1272 else:
1272 else:
1273 return (0, idx)
1273 return (0, idx)
1274 v1records.sort(key=key)
1274 v1records.sort(key=key)
1275 v2records.sort(key=key)
1275 v2records.sort(key=key)
1276
1276
1277 if not v1records and not v2records:
1277 if not v1records and not v2records:
1278 ui.write(('no merge state found\n'))
1278 ui.write(('no merge state found\n'))
1279 elif not v2records:
1279 elif not v2records:
1280 ui.note(('no version 2 merge state\n'))
1280 ui.note(('no version 2 merge state\n'))
1281 printrecords(1)
1281 printrecords(1)
1282 elif ms._v1v2match(v1records, v2records):
1282 elif ms._v1v2match(v1records, v2records):
1283 ui.note(('v1 and v2 states match: using v2\n'))
1283 ui.note(('v1 and v2 states match: using v2\n'))
1284 printrecords(2)
1284 printrecords(2)
1285 else:
1285 else:
1286 ui.note(('v1 and v2 states mismatch: using v1\n'))
1286 ui.note(('v1 and v2 states mismatch: using v1\n'))
1287 printrecords(1)
1287 printrecords(1)
1288 if ui.verbose:
1288 if ui.verbose:
1289 printrecords(2)
1289 printrecords(2)
1290
1290
1291 @command('debugnamecomplete', [], _('NAME...'))
1291 @command('debugnamecomplete', [], _('NAME...'))
1292 def debugnamecomplete(ui, repo, *args):
1292 def debugnamecomplete(ui, repo, *args):
1293 '''complete "names" - tags, open branch names, bookmark names'''
1293 '''complete "names" - tags, open branch names, bookmark names'''
1294
1294
1295 names = set()
1295 names = set()
1296 # since we previously only listed open branches, we will handle that
1296 # since we previously only listed open branches, we will handle that
1297 # specially (after this for loop)
1297 # specially (after this for loop)
1298 for name, ns in repo.names.iteritems():
1298 for name, ns in repo.names.iteritems():
1299 if name != 'branches':
1299 if name != 'branches':
1300 names.update(ns.listnames(repo))
1300 names.update(ns.listnames(repo))
1301 names.update(tag for (tag, heads, tip, closed)
1301 names.update(tag for (tag, heads, tip, closed)
1302 in repo.branchmap().iterbranches() if not closed)
1302 in repo.branchmap().iterbranches() if not closed)
1303 completions = set()
1303 completions = set()
1304 if not args:
1304 if not args:
1305 args = ['']
1305 args = ['']
1306 for a in args:
1306 for a in args:
1307 completions.update(n for n in names if n.startswith(a))
1307 completions.update(n for n in names if n.startswith(a))
1308 ui.write('\n'.join(sorted(completions)))
1308 ui.write('\n'.join(sorted(completions)))
1309 ui.write('\n')
1309 ui.write('\n')
1310
1310
1311 @command('debugobsolete',
1311 @command('debugobsolete',
1312 [('', 'flags', 0, _('markers flag')),
1312 [('', 'flags', 0, _('markers flag')),
1313 ('', 'record-parents', False,
1313 ('', 'record-parents', False,
1314 _('record parent information for the precursor')),
1314 _('record parent information for the precursor')),
1315 ('r', 'rev', [], _('display markers relevant to REV')),
1315 ('r', 'rev', [], _('display markers relevant to REV')),
1316 ('', 'exclusive', False, _('restrict display to markers only '
1317 'relevant to REV')),
1316 ('', 'index', False, _('display index of the marker')),
1318 ('', 'index', False, _('display index of the marker')),
1317 ('', 'delete', [], _('delete markers specified by indices')),
1319 ('', 'delete', [], _('delete markers specified by indices')),
1318 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1320 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1319 _('[OBSOLETED [REPLACEMENT ...]]'))
1321 _('[OBSOLETED [REPLACEMENT ...]]'))
1320 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1322 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1321 """create arbitrary obsolete marker
1323 """create arbitrary obsolete marker
1322
1324
1323 With no arguments, displays the list of obsolescence markers."""
1325 With no arguments, displays the list of obsolescence markers."""
1324
1326
1325 def parsenodeid(s):
1327 def parsenodeid(s):
1326 try:
1328 try:
1327 # We do not use revsingle/revrange functions here to accept
1329 # We do not use revsingle/revrange functions here to accept
1328 # arbitrary node identifiers, possibly not present in the
1330 # arbitrary node identifiers, possibly not present in the
1329 # local repository.
1331 # local repository.
1330 n = bin(s)
1332 n = bin(s)
1331 if len(n) != len(nullid):
1333 if len(n) != len(nullid):
1332 raise TypeError()
1334 raise TypeError()
1333 return n
1335 return n
1334 except TypeError:
1336 except TypeError:
1335 raise error.Abort('changeset references must be full hexadecimal '
1337 raise error.Abort('changeset references must be full hexadecimal '
1336 'node identifiers')
1338 'node identifiers')
1337
1339
1338 if opts.get('delete'):
1340 if opts.get('delete'):
1339 indices = []
1341 indices = []
1340 for v in opts.get('delete'):
1342 for v in opts.get('delete'):
1341 try:
1343 try:
1342 indices.append(int(v))
1344 indices.append(int(v))
1343 except ValueError:
1345 except ValueError:
1344 raise error.Abort(_('invalid index value: %r') % v,
1346 raise error.Abort(_('invalid index value: %r') % v,
1345 hint=_('use integers for indices'))
1347 hint=_('use integers for indices'))
1346
1348
1347 if repo.currenttransaction():
1349 if repo.currenttransaction():
1348 raise error.Abort(_('cannot delete obsmarkers in the middle '
1350 raise error.Abort(_('cannot delete obsmarkers in the middle '
1349 'of transaction.'))
1351 'of transaction.'))
1350
1352
1351 with repo.lock():
1353 with repo.lock():
1352 n = repair.deleteobsmarkers(repo.obsstore, indices)
1354 n = repair.deleteobsmarkers(repo.obsstore, indices)
1353 ui.write(_('deleted %i obsolescence markers\n') % n)
1355 ui.write(_('deleted %i obsolescence markers\n') % n)
1354
1356
1355 return
1357 return
1356
1358
1357 if precursor is not None:
1359 if precursor is not None:
1358 if opts['rev']:
1360 if opts['rev']:
1359 raise error.Abort('cannot select revision when creating marker')
1361 raise error.Abort('cannot select revision when creating marker')
1360 metadata = {}
1362 metadata = {}
1361 metadata['user'] = opts['user'] or ui.username()
1363 metadata['user'] = opts['user'] or ui.username()
1362 succs = tuple(parsenodeid(succ) for succ in successors)
1364 succs = tuple(parsenodeid(succ) for succ in successors)
1363 l = repo.lock()
1365 l = repo.lock()
1364 try:
1366 try:
1365 tr = repo.transaction('debugobsolete')
1367 tr = repo.transaction('debugobsolete')
1366 try:
1368 try:
1367 date = opts.get('date')
1369 date = opts.get('date')
1368 if date:
1370 if date:
1369 date = util.parsedate(date)
1371 date = util.parsedate(date)
1370 else:
1372 else:
1371 date = None
1373 date = None
1372 prec = parsenodeid(precursor)
1374 prec = parsenodeid(precursor)
1373 parents = None
1375 parents = None
1374 if opts['record_parents']:
1376 if opts['record_parents']:
1375 if prec not in repo.unfiltered():
1377 if prec not in repo.unfiltered():
1376 raise error.Abort('cannot used --record-parents on '
1378 raise error.Abort('cannot used --record-parents on '
1377 'unknown changesets')
1379 'unknown changesets')
1378 parents = repo.unfiltered()[prec].parents()
1380 parents = repo.unfiltered()[prec].parents()
1379 parents = tuple(p.node() for p in parents)
1381 parents = tuple(p.node() for p in parents)
1380 repo.obsstore.create(tr, prec, succs, opts['flags'],
1382 repo.obsstore.create(tr, prec, succs, opts['flags'],
1381 parents=parents, date=date,
1383 parents=parents, date=date,
1382 metadata=metadata, ui=ui)
1384 metadata=metadata, ui=ui)
1383 tr.close()
1385 tr.close()
1384 except ValueError as exc:
1386 except ValueError as exc:
1385 raise error.Abort(_('bad obsmarker input: %s') % exc)
1387 raise error.Abort(_('bad obsmarker input: %s') % exc)
1386 finally:
1388 finally:
1387 tr.release()
1389 tr.release()
1388 finally:
1390 finally:
1389 l.release()
1391 l.release()
1390 else:
1392 else:
1391 if opts['rev']:
1393 if opts['rev']:
1392 revs = scmutil.revrange(repo, opts['rev'])
1394 revs = scmutil.revrange(repo, opts['rev'])
1393 nodes = [repo[r].node() for r in revs]
1395 nodes = [repo[r].node() for r in revs]
1394 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1396 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1397 exclusive=opts['exclusive']))
1395 markers.sort(key=lambda x: x._data)
1398 markers.sort(key=lambda x: x._data)
1396 else:
1399 else:
1397 markers = obsolete.getmarkers(repo)
1400 markers = obsolete.getmarkers(repo)
1398
1401
1399 markerstoiter = markers
1402 markerstoiter = markers
1400 isrelevant = lambda m: True
1403 isrelevant = lambda m: True
1401 if opts.get('rev') and opts.get('index'):
1404 if opts.get('rev') and opts.get('index'):
1402 markerstoiter = obsolete.getmarkers(repo)
1405 markerstoiter = obsolete.getmarkers(repo)
1403 markerset = set(markers)
1406 markerset = set(markers)
1404 isrelevant = lambda m: m in markerset
1407 isrelevant = lambda m: m in markerset
1405
1408
1406 fm = ui.formatter('debugobsolete', opts)
1409 fm = ui.formatter('debugobsolete', opts)
1407 for i, m in enumerate(markerstoiter):
1410 for i, m in enumerate(markerstoiter):
1408 if not isrelevant(m):
1411 if not isrelevant(m):
1409 # marker can be irrelevant when we're iterating over a set
1412 # marker can be irrelevant when we're iterating over a set
1410 # of markers (markerstoiter) which is bigger than the set
1413 # of markers (markerstoiter) which is bigger than the set
1411 # of markers we want to display (markers)
1414 # of markers we want to display (markers)
1412 # this can happen if both --index and --rev options are
1415 # this can happen if both --index and --rev options are
1413 # provided and thus we need to iterate over all of the markers
1416 # provided and thus we need to iterate over all of the markers
1414 # to get the correct indices, but only display the ones that
1417 # to get the correct indices, but only display the ones that
1415 # are relevant to --rev value
1418 # are relevant to --rev value
1416 continue
1419 continue
1417 fm.startitem()
1420 fm.startitem()
1418 ind = i if opts.get('index') else None
1421 ind = i if opts.get('index') else None
1419 cmdutil.showmarker(fm, m, index=ind)
1422 cmdutil.showmarker(fm, m, index=ind)
1420 fm.end()
1423 fm.end()
1421
1424
1422 @command('debugpathcomplete',
1425 @command('debugpathcomplete',
1423 [('f', 'full', None, _('complete an entire path')),
1426 [('f', 'full', None, _('complete an entire path')),
1424 ('n', 'normal', None, _('show only normal files')),
1427 ('n', 'normal', None, _('show only normal files')),
1425 ('a', 'added', None, _('show only added files')),
1428 ('a', 'added', None, _('show only added files')),
1426 ('r', 'removed', None, _('show only removed files'))],
1429 ('r', 'removed', None, _('show only removed files'))],
1427 _('FILESPEC...'))
1430 _('FILESPEC...'))
1428 def debugpathcomplete(ui, repo, *specs, **opts):
1431 def debugpathcomplete(ui, repo, *specs, **opts):
1429 '''complete part or all of a tracked path
1432 '''complete part or all of a tracked path
1430
1433
1431 This command supports shells that offer path name completion. It
1434 This command supports shells that offer path name completion. It
1432 currently completes only files already known to the dirstate.
1435 currently completes only files already known to the dirstate.
1433
1436
1434 Completion extends only to the next path segment unless
1437 Completion extends only to the next path segment unless
1435 --full is specified, in which case entire paths are used.'''
1438 --full is specified, in which case entire paths are used.'''
1436
1439
1437 def complete(path, acceptable):
1440 def complete(path, acceptable):
1438 dirstate = repo.dirstate
1441 dirstate = repo.dirstate
1439 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1442 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1440 rootdir = repo.root + pycompat.ossep
1443 rootdir = repo.root + pycompat.ossep
1441 if spec != repo.root and not spec.startswith(rootdir):
1444 if spec != repo.root and not spec.startswith(rootdir):
1442 return [], []
1445 return [], []
1443 if os.path.isdir(spec):
1446 if os.path.isdir(spec):
1444 spec += '/'
1447 spec += '/'
1445 spec = spec[len(rootdir):]
1448 spec = spec[len(rootdir):]
1446 fixpaths = pycompat.ossep != '/'
1449 fixpaths = pycompat.ossep != '/'
1447 if fixpaths:
1450 if fixpaths:
1448 spec = spec.replace(pycompat.ossep, '/')
1451 spec = spec.replace(pycompat.ossep, '/')
1449 speclen = len(spec)
1452 speclen = len(spec)
1450 fullpaths = opts['full']
1453 fullpaths = opts['full']
1451 files, dirs = set(), set()
1454 files, dirs = set(), set()
1452 adddir, addfile = dirs.add, files.add
1455 adddir, addfile = dirs.add, files.add
1453 for f, st in dirstate.iteritems():
1456 for f, st in dirstate.iteritems():
1454 if f.startswith(spec) and st[0] in acceptable:
1457 if f.startswith(spec) and st[0] in acceptable:
1455 if fixpaths:
1458 if fixpaths:
1456 f = f.replace('/', pycompat.ossep)
1459 f = f.replace('/', pycompat.ossep)
1457 if fullpaths:
1460 if fullpaths:
1458 addfile(f)
1461 addfile(f)
1459 continue
1462 continue
1460 s = f.find(pycompat.ossep, speclen)
1463 s = f.find(pycompat.ossep, speclen)
1461 if s >= 0:
1464 if s >= 0:
1462 adddir(f[:s])
1465 adddir(f[:s])
1463 else:
1466 else:
1464 addfile(f)
1467 addfile(f)
1465 return files, dirs
1468 return files, dirs
1466
1469
1467 acceptable = ''
1470 acceptable = ''
1468 if opts['normal']:
1471 if opts['normal']:
1469 acceptable += 'nm'
1472 acceptable += 'nm'
1470 if opts['added']:
1473 if opts['added']:
1471 acceptable += 'a'
1474 acceptable += 'a'
1472 if opts['removed']:
1475 if opts['removed']:
1473 acceptable += 'r'
1476 acceptable += 'r'
1474 cwd = repo.getcwd()
1477 cwd = repo.getcwd()
1475 if not specs:
1478 if not specs:
1476 specs = ['.']
1479 specs = ['.']
1477
1480
1478 files, dirs = set(), set()
1481 files, dirs = set(), set()
1479 for spec in specs:
1482 for spec in specs:
1480 f, d = complete(spec, acceptable or 'nmar')
1483 f, d = complete(spec, acceptable or 'nmar')
1481 files.update(f)
1484 files.update(f)
1482 dirs.update(d)
1485 dirs.update(d)
1483 files.update(dirs)
1486 files.update(dirs)
1484 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1487 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1485 ui.write('\n')
1488 ui.write('\n')
1486
1489
1487 @command('debugpickmergetool',
1490 @command('debugpickmergetool',
1488 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1491 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1489 ('', 'changedelete', None, _('emulate merging change and delete')),
1492 ('', 'changedelete', None, _('emulate merging change and delete')),
1490 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1493 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1491 _('[PATTERN]...'),
1494 _('[PATTERN]...'),
1492 inferrepo=True)
1495 inferrepo=True)
1493 def debugpickmergetool(ui, repo, *pats, **opts):
1496 def debugpickmergetool(ui, repo, *pats, **opts):
1494 """examine which merge tool is chosen for specified file
1497 """examine which merge tool is chosen for specified file
1495
1498
1496 As described in :hg:`help merge-tools`, Mercurial examines
1499 As described in :hg:`help merge-tools`, Mercurial examines
1497 configurations below in this order to decide which merge tool is
1500 configurations below in this order to decide which merge tool is
1498 chosen for specified file.
1501 chosen for specified file.
1499
1502
1500 1. ``--tool`` option
1503 1. ``--tool`` option
1501 2. ``HGMERGE`` environment variable
1504 2. ``HGMERGE`` environment variable
1502 3. configurations in ``merge-patterns`` section
1505 3. configurations in ``merge-patterns`` section
1503 4. configuration of ``ui.merge``
1506 4. configuration of ``ui.merge``
1504 5. configurations in ``merge-tools`` section
1507 5. configurations in ``merge-tools`` section
1505 6. ``hgmerge`` tool (for historical reason only)
1508 6. ``hgmerge`` tool (for historical reason only)
1506 7. default tool for fallback (``:merge`` or ``:prompt``)
1509 7. default tool for fallback (``:merge`` or ``:prompt``)
1507
1510
1508 This command writes out examination result in the style below::
1511 This command writes out examination result in the style below::
1509
1512
1510 FILE = MERGETOOL
1513 FILE = MERGETOOL
1511
1514
1512 By default, all files known in the first parent context of the
1515 By default, all files known in the first parent context of the
1513 working directory are examined. Use file patterns and/or -I/-X
1516 working directory are examined. Use file patterns and/or -I/-X
1514 options to limit target files. -r/--rev is also useful to examine
1517 options to limit target files. -r/--rev is also useful to examine
1515 files in another context without actual updating to it.
1518 files in another context without actual updating to it.
1516
1519
1517 With --debug, this command shows warning messages while matching
1520 With --debug, this command shows warning messages while matching
1518 against ``merge-patterns`` and so on, too. It is recommended to
1521 against ``merge-patterns`` and so on, too. It is recommended to
1519 use this option with explicit file patterns and/or -I/-X options,
1522 use this option with explicit file patterns and/or -I/-X options,
1520 because this option increases amount of output per file according
1523 because this option increases amount of output per file according
1521 to configurations in hgrc.
1524 to configurations in hgrc.
1522
1525
1523 With -v/--verbose, this command shows configurations below at
1526 With -v/--verbose, this command shows configurations below at
1524 first (only if specified).
1527 first (only if specified).
1525
1528
1526 - ``--tool`` option
1529 - ``--tool`` option
1527 - ``HGMERGE`` environment variable
1530 - ``HGMERGE`` environment variable
1528 - configuration of ``ui.merge``
1531 - configuration of ``ui.merge``
1529
1532
1530 If merge tool is chosen before matching against
1533 If merge tool is chosen before matching against
1531 ``merge-patterns``, this command can't show any helpful
1534 ``merge-patterns``, this command can't show any helpful
1532 information, even with --debug. In such case, information above is
1535 information, even with --debug. In such case, information above is
1533 useful to know why a merge tool is chosen.
1536 useful to know why a merge tool is chosen.
1534 """
1537 """
1535 overrides = {}
1538 overrides = {}
1536 if opts['tool']:
1539 if opts['tool']:
1537 overrides[('ui', 'forcemerge')] = opts['tool']
1540 overrides[('ui', 'forcemerge')] = opts['tool']
1538 ui.note(('with --tool %r\n') % (opts['tool']))
1541 ui.note(('with --tool %r\n') % (opts['tool']))
1539
1542
1540 with ui.configoverride(overrides, 'debugmergepatterns'):
1543 with ui.configoverride(overrides, 'debugmergepatterns'):
1541 hgmerge = encoding.environ.get("HGMERGE")
1544 hgmerge = encoding.environ.get("HGMERGE")
1542 if hgmerge is not None:
1545 if hgmerge is not None:
1543 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1546 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1544 uimerge = ui.config("ui", "merge")
1547 uimerge = ui.config("ui", "merge")
1545 if uimerge:
1548 if uimerge:
1546 ui.note(('with ui.merge=%r\n') % (uimerge))
1549 ui.note(('with ui.merge=%r\n') % (uimerge))
1547
1550
1548 ctx = scmutil.revsingle(repo, opts.get('rev'))
1551 ctx = scmutil.revsingle(repo, opts.get('rev'))
1549 m = scmutil.match(ctx, pats, opts)
1552 m = scmutil.match(ctx, pats, opts)
1550 changedelete = opts['changedelete']
1553 changedelete = opts['changedelete']
1551 for path in ctx.walk(m):
1554 for path in ctx.walk(m):
1552 fctx = ctx[path]
1555 fctx = ctx[path]
1553 try:
1556 try:
1554 if not ui.debugflag:
1557 if not ui.debugflag:
1555 ui.pushbuffer(error=True)
1558 ui.pushbuffer(error=True)
1556 tool, toolpath = filemerge._picktool(repo, ui, path,
1559 tool, toolpath = filemerge._picktool(repo, ui, path,
1557 fctx.isbinary(),
1560 fctx.isbinary(),
1558 'l' in fctx.flags(),
1561 'l' in fctx.flags(),
1559 changedelete)
1562 changedelete)
1560 finally:
1563 finally:
1561 if not ui.debugflag:
1564 if not ui.debugflag:
1562 ui.popbuffer()
1565 ui.popbuffer()
1563 ui.write(('%s = %s\n') % (path, tool))
1566 ui.write(('%s = %s\n') % (path, tool))
1564
1567
1565 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1568 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1566 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1569 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1567 '''access the pushkey key/value protocol
1570 '''access the pushkey key/value protocol
1568
1571
1569 With two args, list the keys in the given namespace.
1572 With two args, list the keys in the given namespace.
1570
1573
1571 With five args, set a key to new if it currently is set to old.
1574 With five args, set a key to new if it currently is set to old.
1572 Reports success or failure.
1575 Reports success or failure.
1573 '''
1576 '''
1574
1577
1575 target = hg.peer(ui, {}, repopath)
1578 target = hg.peer(ui, {}, repopath)
1576 if keyinfo:
1579 if keyinfo:
1577 key, old, new = keyinfo
1580 key, old, new = keyinfo
1578 r = target.pushkey(namespace, key, old, new)
1581 r = target.pushkey(namespace, key, old, new)
1579 ui.status(str(r) + '\n')
1582 ui.status(str(r) + '\n')
1580 return not r
1583 return not r
1581 else:
1584 else:
1582 for k, v in sorted(target.listkeys(namespace).iteritems()):
1585 for k, v in sorted(target.listkeys(namespace).iteritems()):
1583 ui.write("%s\t%s\n" % (util.escapestr(k),
1586 ui.write("%s\t%s\n" % (util.escapestr(k),
1584 util.escapestr(v)))
1587 util.escapestr(v)))
1585
1588
1586 @command('debugpvec', [], _('A B'))
1589 @command('debugpvec', [], _('A B'))
1587 def debugpvec(ui, repo, a, b=None):
1590 def debugpvec(ui, repo, a, b=None):
1588 ca = scmutil.revsingle(repo, a)
1591 ca = scmutil.revsingle(repo, a)
1589 cb = scmutil.revsingle(repo, b)
1592 cb = scmutil.revsingle(repo, b)
1590 pa = pvec.ctxpvec(ca)
1593 pa = pvec.ctxpvec(ca)
1591 pb = pvec.ctxpvec(cb)
1594 pb = pvec.ctxpvec(cb)
1592 if pa == pb:
1595 if pa == pb:
1593 rel = "="
1596 rel = "="
1594 elif pa > pb:
1597 elif pa > pb:
1595 rel = ">"
1598 rel = ">"
1596 elif pa < pb:
1599 elif pa < pb:
1597 rel = "<"
1600 rel = "<"
1598 elif pa | pb:
1601 elif pa | pb:
1599 rel = "|"
1602 rel = "|"
1600 ui.write(_("a: %s\n") % pa)
1603 ui.write(_("a: %s\n") % pa)
1601 ui.write(_("b: %s\n") % pb)
1604 ui.write(_("b: %s\n") % pb)
1602 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1605 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1603 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1606 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1604 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1607 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1605 pa.distance(pb), rel))
1608 pa.distance(pb), rel))
1606
1609
1607 @command('debugrebuilddirstate|debugrebuildstate',
1610 @command('debugrebuilddirstate|debugrebuildstate',
1608 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1611 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1609 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1612 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1610 'the working copy parent')),
1613 'the working copy parent')),
1611 ],
1614 ],
1612 _('[-r REV]'))
1615 _('[-r REV]'))
1613 def debugrebuilddirstate(ui, repo, rev, **opts):
1616 def debugrebuilddirstate(ui, repo, rev, **opts):
1614 """rebuild the dirstate as it would look like for the given revision
1617 """rebuild the dirstate as it would look like for the given revision
1615
1618
1616 If no revision is specified the first current parent will be used.
1619 If no revision is specified the first current parent will be used.
1617
1620
1618 The dirstate will be set to the files of the given revision.
1621 The dirstate will be set to the files of the given revision.
1619 The actual working directory content or existing dirstate
1622 The actual working directory content or existing dirstate
1620 information such as adds or removes is not considered.
1623 information such as adds or removes is not considered.
1621
1624
1622 ``minimal`` will only rebuild the dirstate status for files that claim to be
1625 ``minimal`` will only rebuild the dirstate status for files that claim to be
1623 tracked but are not in the parent manifest, or that exist in the parent
1626 tracked but are not in the parent manifest, or that exist in the parent
1624 manifest but are not in the dirstate. It will not change adds, removes, or
1627 manifest but are not in the dirstate. It will not change adds, removes, or
1625 modified files that are in the working copy parent.
1628 modified files that are in the working copy parent.
1626
1629
1627 One use of this command is to make the next :hg:`status` invocation
1630 One use of this command is to make the next :hg:`status` invocation
1628 check the actual file content.
1631 check the actual file content.
1629 """
1632 """
1630 ctx = scmutil.revsingle(repo, rev)
1633 ctx = scmutil.revsingle(repo, rev)
1631 with repo.wlock():
1634 with repo.wlock():
1632 dirstate = repo.dirstate
1635 dirstate = repo.dirstate
1633 changedfiles = None
1636 changedfiles = None
1634 # See command doc for what minimal does.
1637 # See command doc for what minimal does.
1635 if opts.get('minimal'):
1638 if opts.get('minimal'):
1636 manifestfiles = set(ctx.manifest().keys())
1639 manifestfiles = set(ctx.manifest().keys())
1637 dirstatefiles = set(dirstate)
1640 dirstatefiles = set(dirstate)
1638 manifestonly = manifestfiles - dirstatefiles
1641 manifestonly = manifestfiles - dirstatefiles
1639 dsonly = dirstatefiles - manifestfiles
1642 dsonly = dirstatefiles - manifestfiles
1640 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1643 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1641 changedfiles = manifestonly | dsnotadded
1644 changedfiles = manifestonly | dsnotadded
1642
1645
1643 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1646 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1644
1647
1645 @command('debugrebuildfncache', [], '')
1648 @command('debugrebuildfncache', [], '')
1646 def debugrebuildfncache(ui, repo):
1649 def debugrebuildfncache(ui, repo):
1647 """rebuild the fncache file"""
1650 """rebuild the fncache file"""
1648 repair.rebuildfncache(ui, repo)
1651 repair.rebuildfncache(ui, repo)
1649
1652
1650 @command('debugrename',
1653 @command('debugrename',
1651 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1654 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1652 _('[-r REV] FILE'))
1655 _('[-r REV] FILE'))
1653 def debugrename(ui, repo, file1, *pats, **opts):
1656 def debugrename(ui, repo, file1, *pats, **opts):
1654 """dump rename information"""
1657 """dump rename information"""
1655
1658
1656 ctx = scmutil.revsingle(repo, opts.get('rev'))
1659 ctx = scmutil.revsingle(repo, opts.get('rev'))
1657 m = scmutil.match(ctx, (file1,) + pats, opts)
1660 m = scmutil.match(ctx, (file1,) + pats, opts)
1658 for abs in ctx.walk(m):
1661 for abs in ctx.walk(m):
1659 fctx = ctx[abs]
1662 fctx = ctx[abs]
1660 o = fctx.filelog().renamed(fctx.filenode())
1663 o = fctx.filelog().renamed(fctx.filenode())
1661 rel = m.rel(abs)
1664 rel = m.rel(abs)
1662 if o:
1665 if o:
1663 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1666 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1664 else:
1667 else:
1665 ui.write(_("%s not renamed\n") % rel)
1668 ui.write(_("%s not renamed\n") % rel)
1666
1669
1667 @command('debugrevlog', cmdutil.debugrevlogopts +
1670 @command('debugrevlog', cmdutil.debugrevlogopts +
1668 [('d', 'dump', False, _('dump index data'))],
1671 [('d', 'dump', False, _('dump index data'))],
1669 _('-c|-m|FILE'),
1672 _('-c|-m|FILE'),
1670 optionalrepo=True)
1673 optionalrepo=True)
1671 def debugrevlog(ui, repo, file_=None, **opts):
1674 def debugrevlog(ui, repo, file_=None, **opts):
1672 """show data and statistics about a revlog"""
1675 """show data and statistics about a revlog"""
1673 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1676 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1674
1677
1675 if opts.get("dump"):
1678 if opts.get("dump"):
1676 numrevs = len(r)
1679 numrevs = len(r)
1677 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1680 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1678 " rawsize totalsize compression heads chainlen\n"))
1681 " rawsize totalsize compression heads chainlen\n"))
1679 ts = 0
1682 ts = 0
1680 heads = set()
1683 heads = set()
1681
1684
1682 for rev in xrange(numrevs):
1685 for rev in xrange(numrevs):
1683 dbase = r.deltaparent(rev)
1686 dbase = r.deltaparent(rev)
1684 if dbase == -1:
1687 if dbase == -1:
1685 dbase = rev
1688 dbase = rev
1686 cbase = r.chainbase(rev)
1689 cbase = r.chainbase(rev)
1687 clen = r.chainlen(rev)
1690 clen = r.chainlen(rev)
1688 p1, p2 = r.parentrevs(rev)
1691 p1, p2 = r.parentrevs(rev)
1689 rs = r.rawsize(rev)
1692 rs = r.rawsize(rev)
1690 ts = ts + rs
1693 ts = ts + rs
1691 heads -= set(r.parentrevs(rev))
1694 heads -= set(r.parentrevs(rev))
1692 heads.add(rev)
1695 heads.add(rev)
1693 try:
1696 try:
1694 compression = ts / r.end(rev)
1697 compression = ts / r.end(rev)
1695 except ZeroDivisionError:
1698 except ZeroDivisionError:
1696 compression = 0
1699 compression = 0
1697 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1700 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1698 "%11d %5d %8d\n" %
1701 "%11d %5d %8d\n" %
1699 (rev, p1, p2, r.start(rev), r.end(rev),
1702 (rev, p1, p2, r.start(rev), r.end(rev),
1700 r.start(dbase), r.start(cbase),
1703 r.start(dbase), r.start(cbase),
1701 r.start(p1), r.start(p2),
1704 r.start(p1), r.start(p2),
1702 rs, ts, compression, len(heads), clen))
1705 rs, ts, compression, len(heads), clen))
1703 return 0
1706 return 0
1704
1707
1705 v = r.version
1708 v = r.version
1706 format = v & 0xFFFF
1709 format = v & 0xFFFF
1707 flags = []
1710 flags = []
1708 gdelta = False
1711 gdelta = False
1709 if v & revlog.FLAG_INLINE_DATA:
1712 if v & revlog.FLAG_INLINE_DATA:
1710 flags.append('inline')
1713 flags.append('inline')
1711 if v & revlog.FLAG_GENERALDELTA:
1714 if v & revlog.FLAG_GENERALDELTA:
1712 gdelta = True
1715 gdelta = True
1713 flags.append('generaldelta')
1716 flags.append('generaldelta')
1714 if not flags:
1717 if not flags:
1715 flags = ['(none)']
1718 flags = ['(none)']
1716
1719
1717 nummerges = 0
1720 nummerges = 0
1718 numfull = 0
1721 numfull = 0
1719 numprev = 0
1722 numprev = 0
1720 nump1 = 0
1723 nump1 = 0
1721 nump2 = 0
1724 nump2 = 0
1722 numother = 0
1725 numother = 0
1723 nump1prev = 0
1726 nump1prev = 0
1724 nump2prev = 0
1727 nump2prev = 0
1725 chainlengths = []
1728 chainlengths = []
1726
1729
1727 datasize = [None, 0, 0]
1730 datasize = [None, 0, 0]
1728 fullsize = [None, 0, 0]
1731 fullsize = [None, 0, 0]
1729 deltasize = [None, 0, 0]
1732 deltasize = [None, 0, 0]
1730 chunktypecounts = {}
1733 chunktypecounts = {}
1731 chunktypesizes = {}
1734 chunktypesizes = {}
1732
1735
1733 def addsize(size, l):
1736 def addsize(size, l):
1734 if l[0] is None or size < l[0]:
1737 if l[0] is None or size < l[0]:
1735 l[0] = size
1738 l[0] = size
1736 if size > l[1]:
1739 if size > l[1]:
1737 l[1] = size
1740 l[1] = size
1738 l[2] += size
1741 l[2] += size
1739
1742
1740 numrevs = len(r)
1743 numrevs = len(r)
1741 for rev in xrange(numrevs):
1744 for rev in xrange(numrevs):
1742 p1, p2 = r.parentrevs(rev)
1745 p1, p2 = r.parentrevs(rev)
1743 delta = r.deltaparent(rev)
1746 delta = r.deltaparent(rev)
1744 if format > 0:
1747 if format > 0:
1745 addsize(r.rawsize(rev), datasize)
1748 addsize(r.rawsize(rev), datasize)
1746 if p2 != nullrev:
1749 if p2 != nullrev:
1747 nummerges += 1
1750 nummerges += 1
1748 size = r.length(rev)
1751 size = r.length(rev)
1749 if delta == nullrev:
1752 if delta == nullrev:
1750 chainlengths.append(0)
1753 chainlengths.append(0)
1751 numfull += 1
1754 numfull += 1
1752 addsize(size, fullsize)
1755 addsize(size, fullsize)
1753 else:
1756 else:
1754 chainlengths.append(chainlengths[delta] + 1)
1757 chainlengths.append(chainlengths[delta] + 1)
1755 addsize(size, deltasize)
1758 addsize(size, deltasize)
1756 if delta == rev - 1:
1759 if delta == rev - 1:
1757 numprev += 1
1760 numprev += 1
1758 if delta == p1:
1761 if delta == p1:
1759 nump1prev += 1
1762 nump1prev += 1
1760 elif delta == p2:
1763 elif delta == p2:
1761 nump2prev += 1
1764 nump2prev += 1
1762 elif delta == p1:
1765 elif delta == p1:
1763 nump1 += 1
1766 nump1 += 1
1764 elif delta == p2:
1767 elif delta == p2:
1765 nump2 += 1
1768 nump2 += 1
1766 elif delta != nullrev:
1769 elif delta != nullrev:
1767 numother += 1
1770 numother += 1
1768
1771
1769 # Obtain data on the raw chunks in the revlog.
1772 # Obtain data on the raw chunks in the revlog.
1770 segment = r._getsegmentforrevs(rev, rev)[1]
1773 segment = r._getsegmentforrevs(rev, rev)[1]
1771 if segment:
1774 if segment:
1772 chunktype = segment[0]
1775 chunktype = segment[0]
1773 else:
1776 else:
1774 chunktype = 'empty'
1777 chunktype = 'empty'
1775
1778
1776 if chunktype not in chunktypecounts:
1779 if chunktype not in chunktypecounts:
1777 chunktypecounts[chunktype] = 0
1780 chunktypecounts[chunktype] = 0
1778 chunktypesizes[chunktype] = 0
1781 chunktypesizes[chunktype] = 0
1779
1782
1780 chunktypecounts[chunktype] += 1
1783 chunktypecounts[chunktype] += 1
1781 chunktypesizes[chunktype] += size
1784 chunktypesizes[chunktype] += size
1782
1785
1783 # Adjust size min value for empty cases
1786 # Adjust size min value for empty cases
1784 for size in (datasize, fullsize, deltasize):
1787 for size in (datasize, fullsize, deltasize):
1785 if size[0] is None:
1788 if size[0] is None:
1786 size[0] = 0
1789 size[0] = 0
1787
1790
1788 numdeltas = numrevs - numfull
1791 numdeltas = numrevs - numfull
1789 numoprev = numprev - nump1prev - nump2prev
1792 numoprev = numprev - nump1prev - nump2prev
1790 totalrawsize = datasize[2]
1793 totalrawsize = datasize[2]
1791 datasize[2] /= numrevs
1794 datasize[2] /= numrevs
1792 fulltotal = fullsize[2]
1795 fulltotal = fullsize[2]
1793 fullsize[2] /= numfull
1796 fullsize[2] /= numfull
1794 deltatotal = deltasize[2]
1797 deltatotal = deltasize[2]
1795 if numrevs - numfull > 0:
1798 if numrevs - numfull > 0:
1796 deltasize[2] /= numrevs - numfull
1799 deltasize[2] /= numrevs - numfull
1797 totalsize = fulltotal + deltatotal
1800 totalsize = fulltotal + deltatotal
1798 avgchainlen = sum(chainlengths) / numrevs
1801 avgchainlen = sum(chainlengths) / numrevs
1799 maxchainlen = max(chainlengths)
1802 maxchainlen = max(chainlengths)
1800 compratio = 1
1803 compratio = 1
1801 if totalsize:
1804 if totalsize:
1802 compratio = totalrawsize / totalsize
1805 compratio = totalrawsize / totalsize
1803
1806
1804 basedfmtstr = '%%%dd\n'
1807 basedfmtstr = '%%%dd\n'
1805 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1808 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1806
1809
1807 def dfmtstr(max):
1810 def dfmtstr(max):
1808 return basedfmtstr % len(str(max))
1811 return basedfmtstr % len(str(max))
1809 def pcfmtstr(max, padding=0):
1812 def pcfmtstr(max, padding=0):
1810 return basepcfmtstr % (len(str(max)), ' ' * padding)
1813 return basepcfmtstr % (len(str(max)), ' ' * padding)
1811
1814
1812 def pcfmt(value, total):
1815 def pcfmt(value, total):
1813 if total:
1816 if total:
1814 return (value, 100 * float(value) / total)
1817 return (value, 100 * float(value) / total)
1815 else:
1818 else:
1816 return value, 100.0
1819 return value, 100.0
1817
1820
1818 ui.write(('format : %d\n') % format)
1821 ui.write(('format : %d\n') % format)
1819 ui.write(('flags : %s\n') % ', '.join(flags))
1822 ui.write(('flags : %s\n') % ', '.join(flags))
1820
1823
1821 ui.write('\n')
1824 ui.write('\n')
1822 fmt = pcfmtstr(totalsize)
1825 fmt = pcfmtstr(totalsize)
1823 fmt2 = dfmtstr(totalsize)
1826 fmt2 = dfmtstr(totalsize)
1824 ui.write(('revisions : ') + fmt2 % numrevs)
1827 ui.write(('revisions : ') + fmt2 % numrevs)
1825 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1828 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1826 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1829 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1827 ui.write(('revisions : ') + fmt2 % numrevs)
1830 ui.write(('revisions : ') + fmt2 % numrevs)
1828 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1831 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1829 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1832 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1830 ui.write(('revision size : ') + fmt2 % totalsize)
1833 ui.write(('revision size : ') + fmt2 % totalsize)
1831 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1834 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1832 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1835 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1833
1836
1834 def fmtchunktype(chunktype):
1837 def fmtchunktype(chunktype):
1835 if chunktype == 'empty':
1838 if chunktype == 'empty':
1836 return ' %s : ' % chunktype
1839 return ' %s : ' % chunktype
1837 elif chunktype in string.ascii_letters:
1840 elif chunktype in string.ascii_letters:
1838 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1841 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1839 else:
1842 else:
1840 return ' 0x%s : ' % hex(chunktype)
1843 return ' 0x%s : ' % hex(chunktype)
1841
1844
1842 ui.write('\n')
1845 ui.write('\n')
1843 ui.write(('chunks : ') + fmt2 % numrevs)
1846 ui.write(('chunks : ') + fmt2 % numrevs)
1844 for chunktype in sorted(chunktypecounts):
1847 for chunktype in sorted(chunktypecounts):
1845 ui.write(fmtchunktype(chunktype))
1848 ui.write(fmtchunktype(chunktype))
1846 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1849 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1847 ui.write(('chunks size : ') + fmt2 % totalsize)
1850 ui.write(('chunks size : ') + fmt2 % totalsize)
1848 for chunktype in sorted(chunktypecounts):
1851 for chunktype in sorted(chunktypecounts):
1849 ui.write(fmtchunktype(chunktype))
1852 ui.write(fmtchunktype(chunktype))
1850 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1853 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1851
1854
1852 ui.write('\n')
1855 ui.write('\n')
1853 fmt = dfmtstr(max(avgchainlen, compratio))
1856 fmt = dfmtstr(max(avgchainlen, compratio))
1854 ui.write(('avg chain length : ') + fmt % avgchainlen)
1857 ui.write(('avg chain length : ') + fmt % avgchainlen)
1855 ui.write(('max chain length : ') + fmt % maxchainlen)
1858 ui.write(('max chain length : ') + fmt % maxchainlen)
1856 ui.write(('compression ratio : ') + fmt % compratio)
1859 ui.write(('compression ratio : ') + fmt % compratio)
1857
1860
1858 if format > 0:
1861 if format > 0:
1859 ui.write('\n')
1862 ui.write('\n')
1860 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1863 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1861 % tuple(datasize))
1864 % tuple(datasize))
1862 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1865 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1863 % tuple(fullsize))
1866 % tuple(fullsize))
1864 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1867 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1865 % tuple(deltasize))
1868 % tuple(deltasize))
1866
1869
1867 if numdeltas > 0:
1870 if numdeltas > 0:
1868 ui.write('\n')
1871 ui.write('\n')
1869 fmt = pcfmtstr(numdeltas)
1872 fmt = pcfmtstr(numdeltas)
1870 fmt2 = pcfmtstr(numdeltas, 4)
1873 fmt2 = pcfmtstr(numdeltas, 4)
1871 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1874 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1872 if numprev > 0:
1875 if numprev > 0:
1873 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1876 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1874 numprev))
1877 numprev))
1875 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1878 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1876 numprev))
1879 numprev))
1877 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1880 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1878 numprev))
1881 numprev))
1879 if gdelta:
1882 if gdelta:
1880 ui.write(('deltas against p1 : ')
1883 ui.write(('deltas against p1 : ')
1881 + fmt % pcfmt(nump1, numdeltas))
1884 + fmt % pcfmt(nump1, numdeltas))
1882 ui.write(('deltas against p2 : ')
1885 ui.write(('deltas against p2 : ')
1883 + fmt % pcfmt(nump2, numdeltas))
1886 + fmt % pcfmt(nump2, numdeltas))
1884 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1887 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1885 numdeltas))
1888 numdeltas))
1886
1889
1887 @command('debugrevspec',
1890 @command('debugrevspec',
1888 [('', 'optimize', None,
1891 [('', 'optimize', None,
1889 _('print parsed tree after optimizing (DEPRECATED)')),
1892 _('print parsed tree after optimizing (DEPRECATED)')),
1890 ('p', 'show-stage', [],
1893 ('p', 'show-stage', [],
1891 _('print parsed tree at the given stage'), _('NAME')),
1894 _('print parsed tree at the given stage'), _('NAME')),
1892 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1895 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1893 ('', 'verify-optimized', False, _('verify optimized result')),
1896 ('', 'verify-optimized', False, _('verify optimized result')),
1894 ],
1897 ],
1895 ('REVSPEC'))
1898 ('REVSPEC'))
1896 def debugrevspec(ui, repo, expr, **opts):
1899 def debugrevspec(ui, repo, expr, **opts):
1897 """parse and apply a revision specification
1900 """parse and apply a revision specification
1898
1901
1899 Use -p/--show-stage option to print the parsed tree at the given stages.
1902 Use -p/--show-stage option to print the parsed tree at the given stages.
1900 Use -p all to print tree at every stage.
1903 Use -p all to print tree at every stage.
1901
1904
1902 Use --verify-optimized to compare the optimized result with the unoptimized
1905 Use --verify-optimized to compare the optimized result with the unoptimized
1903 one. Returns 1 if the optimized result differs.
1906 one. Returns 1 if the optimized result differs.
1904 """
1907 """
1905 stages = [
1908 stages = [
1906 ('parsed', lambda tree: tree),
1909 ('parsed', lambda tree: tree),
1907 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1910 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1908 ('concatenated', revsetlang.foldconcat),
1911 ('concatenated', revsetlang.foldconcat),
1909 ('analyzed', revsetlang.analyze),
1912 ('analyzed', revsetlang.analyze),
1910 ('optimized', revsetlang.optimize),
1913 ('optimized', revsetlang.optimize),
1911 ]
1914 ]
1912 if opts['no_optimized']:
1915 if opts['no_optimized']:
1913 stages = stages[:-1]
1916 stages = stages[:-1]
1914 if opts['verify_optimized'] and opts['no_optimized']:
1917 if opts['verify_optimized'] and opts['no_optimized']:
1915 raise error.Abort(_('cannot use --verify-optimized with '
1918 raise error.Abort(_('cannot use --verify-optimized with '
1916 '--no-optimized'))
1919 '--no-optimized'))
1917 stagenames = set(n for n, f in stages)
1920 stagenames = set(n for n, f in stages)
1918
1921
1919 showalways = set()
1922 showalways = set()
1920 showchanged = set()
1923 showchanged = set()
1921 if ui.verbose and not opts['show_stage']:
1924 if ui.verbose and not opts['show_stage']:
1922 # show parsed tree by --verbose (deprecated)
1925 # show parsed tree by --verbose (deprecated)
1923 showalways.add('parsed')
1926 showalways.add('parsed')
1924 showchanged.update(['expanded', 'concatenated'])
1927 showchanged.update(['expanded', 'concatenated'])
1925 if opts['optimize']:
1928 if opts['optimize']:
1926 showalways.add('optimized')
1929 showalways.add('optimized')
1927 if opts['show_stage'] and opts['optimize']:
1930 if opts['show_stage'] and opts['optimize']:
1928 raise error.Abort(_('cannot use --optimize with --show-stage'))
1931 raise error.Abort(_('cannot use --optimize with --show-stage'))
1929 if opts['show_stage'] == ['all']:
1932 if opts['show_stage'] == ['all']:
1930 showalways.update(stagenames)
1933 showalways.update(stagenames)
1931 else:
1934 else:
1932 for n in opts['show_stage']:
1935 for n in opts['show_stage']:
1933 if n not in stagenames:
1936 if n not in stagenames:
1934 raise error.Abort(_('invalid stage name: %s') % n)
1937 raise error.Abort(_('invalid stage name: %s') % n)
1935 showalways.update(opts['show_stage'])
1938 showalways.update(opts['show_stage'])
1936
1939
1937 treebystage = {}
1940 treebystage = {}
1938 printedtree = None
1941 printedtree = None
1939 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1942 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1940 for n, f in stages:
1943 for n, f in stages:
1941 treebystage[n] = tree = f(tree)
1944 treebystage[n] = tree = f(tree)
1942 if n in showalways or (n in showchanged and tree != printedtree):
1945 if n in showalways or (n in showchanged and tree != printedtree):
1943 if opts['show_stage'] or n != 'parsed':
1946 if opts['show_stage'] or n != 'parsed':
1944 ui.write(("* %s:\n") % n)
1947 ui.write(("* %s:\n") % n)
1945 ui.write(revsetlang.prettyformat(tree), "\n")
1948 ui.write(revsetlang.prettyformat(tree), "\n")
1946 printedtree = tree
1949 printedtree = tree
1947
1950
1948 if opts['verify_optimized']:
1951 if opts['verify_optimized']:
1949 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1952 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1950 brevs = revset.makematcher(treebystage['optimized'])(repo)
1953 brevs = revset.makematcher(treebystage['optimized'])(repo)
1951 if ui.verbose:
1954 if ui.verbose:
1952 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1955 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1953 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1956 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1954 arevs = list(arevs)
1957 arevs = list(arevs)
1955 brevs = list(brevs)
1958 brevs = list(brevs)
1956 if arevs == brevs:
1959 if arevs == brevs:
1957 return 0
1960 return 0
1958 ui.write(('--- analyzed\n'), label='diff.file_a')
1961 ui.write(('--- analyzed\n'), label='diff.file_a')
1959 ui.write(('+++ optimized\n'), label='diff.file_b')
1962 ui.write(('+++ optimized\n'), label='diff.file_b')
1960 sm = difflib.SequenceMatcher(None, arevs, brevs)
1963 sm = difflib.SequenceMatcher(None, arevs, brevs)
1961 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1964 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1962 if tag in ('delete', 'replace'):
1965 if tag in ('delete', 'replace'):
1963 for c in arevs[alo:ahi]:
1966 for c in arevs[alo:ahi]:
1964 ui.write('-%s\n' % c, label='diff.deleted')
1967 ui.write('-%s\n' % c, label='diff.deleted')
1965 if tag in ('insert', 'replace'):
1968 if tag in ('insert', 'replace'):
1966 for c in brevs[blo:bhi]:
1969 for c in brevs[blo:bhi]:
1967 ui.write('+%s\n' % c, label='diff.inserted')
1970 ui.write('+%s\n' % c, label='diff.inserted')
1968 if tag == 'equal':
1971 if tag == 'equal':
1969 for c in arevs[alo:ahi]:
1972 for c in arevs[alo:ahi]:
1970 ui.write(' %s\n' % c)
1973 ui.write(' %s\n' % c)
1971 return 1
1974 return 1
1972
1975
1973 func = revset.makematcher(tree)
1976 func = revset.makematcher(tree)
1974 revs = func(repo)
1977 revs = func(repo)
1975 if ui.verbose:
1978 if ui.verbose:
1976 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1979 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1977 for c in revs:
1980 for c in revs:
1978 ui.write("%s\n" % c)
1981 ui.write("%s\n" % c)
1979
1982
1980 @command('debugsetparents', [], _('REV1 [REV2]'))
1983 @command('debugsetparents', [], _('REV1 [REV2]'))
1981 def debugsetparents(ui, repo, rev1, rev2=None):
1984 def debugsetparents(ui, repo, rev1, rev2=None):
1982 """manually set the parents of the current working directory
1985 """manually set the parents of the current working directory
1983
1986
1984 This is useful for writing repository conversion tools, but should
1987 This is useful for writing repository conversion tools, but should
1985 be used with care. For example, neither the working directory nor the
1988 be used with care. For example, neither the working directory nor the
1986 dirstate is updated, so file status may be incorrect after running this
1989 dirstate is updated, so file status may be incorrect after running this
1987 command.
1990 command.
1988
1991
1989 Returns 0 on success.
1992 Returns 0 on success.
1990 """
1993 """
1991
1994
1992 r1 = scmutil.revsingle(repo, rev1).node()
1995 r1 = scmutil.revsingle(repo, rev1).node()
1993 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1996 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1994
1997
1995 with repo.wlock():
1998 with repo.wlock():
1996 repo.setparents(r1, r2)
1999 repo.setparents(r1, r2)
1997
2000
1998 @command('debugsub',
2001 @command('debugsub',
1999 [('r', 'rev', '',
2002 [('r', 'rev', '',
2000 _('revision to check'), _('REV'))],
2003 _('revision to check'), _('REV'))],
2001 _('[-r REV] [REV]'))
2004 _('[-r REV] [REV]'))
2002 def debugsub(ui, repo, rev=None):
2005 def debugsub(ui, repo, rev=None):
2003 ctx = scmutil.revsingle(repo, rev, None)
2006 ctx = scmutil.revsingle(repo, rev, None)
2004 for k, v in sorted(ctx.substate.items()):
2007 for k, v in sorted(ctx.substate.items()):
2005 ui.write(('path %s\n') % k)
2008 ui.write(('path %s\n') % k)
2006 ui.write((' source %s\n') % v[0])
2009 ui.write((' source %s\n') % v[0])
2007 ui.write((' revision %s\n') % v[1])
2010 ui.write((' revision %s\n') % v[1])
2008
2011
2009 @command('debugsuccessorssets',
2012 @command('debugsuccessorssets',
2010 [],
2013 [],
2011 _('[REV]'))
2014 _('[REV]'))
2012 def debugsuccessorssets(ui, repo, *revs):
2015 def debugsuccessorssets(ui, repo, *revs):
2013 """show set of successors for revision
2016 """show set of successors for revision
2014
2017
2015 A successors set of changeset A is a consistent group of revisions that
2018 A successors set of changeset A is a consistent group of revisions that
2016 succeed A. It contains non-obsolete changesets only.
2019 succeed A. It contains non-obsolete changesets only.
2017
2020
2018 In most cases a changeset A has a single successors set containing a single
2021 In most cases a changeset A has a single successors set containing a single
2019 successor (changeset A replaced by A').
2022 successor (changeset A replaced by A').
2020
2023
2021 A changeset that is made obsolete with no successors are called "pruned".
2024 A changeset that is made obsolete with no successors are called "pruned".
2022 Such changesets have no successors sets at all.
2025 Such changesets have no successors sets at all.
2023
2026
2024 A changeset that has been "split" will have a successors set containing
2027 A changeset that has been "split" will have a successors set containing
2025 more than one successor.
2028 more than one successor.
2026
2029
2027 A changeset that has been rewritten in multiple different ways is called
2030 A changeset that has been rewritten in multiple different ways is called
2028 "divergent". Such changesets have multiple successor sets (each of which
2031 "divergent". Such changesets have multiple successor sets (each of which
2029 may also be split, i.e. have multiple successors).
2032 may also be split, i.e. have multiple successors).
2030
2033
2031 Results are displayed as follows::
2034 Results are displayed as follows::
2032
2035
2033 <rev1>
2036 <rev1>
2034 <successors-1A>
2037 <successors-1A>
2035 <rev2>
2038 <rev2>
2036 <successors-2A>
2039 <successors-2A>
2037 <successors-2B1> <successors-2B2> <successors-2B3>
2040 <successors-2B1> <successors-2B2> <successors-2B3>
2038
2041
2039 Here rev2 has two possible (i.e. divergent) successors sets. The first
2042 Here rev2 has two possible (i.e. divergent) successors sets. The first
2040 holds one element, whereas the second holds three (i.e. the changeset has
2043 holds one element, whereas the second holds three (i.e. the changeset has
2041 been split).
2044 been split).
2042 """
2045 """
2043 # passed to successorssets caching computation from one call to another
2046 # passed to successorssets caching computation from one call to another
2044 cache = {}
2047 cache = {}
2045 ctx2str = str
2048 ctx2str = str
2046 node2str = short
2049 node2str = short
2047 if ui.debug():
2050 if ui.debug():
2048 def ctx2str(ctx):
2051 def ctx2str(ctx):
2049 return ctx.hex()
2052 return ctx.hex()
2050 node2str = hex
2053 node2str = hex
2051 for rev in scmutil.revrange(repo, revs):
2054 for rev in scmutil.revrange(repo, revs):
2052 ctx = repo[rev]
2055 ctx = repo[rev]
2053 ui.write('%s\n'% ctx2str(ctx))
2056 ui.write('%s\n'% ctx2str(ctx))
2054 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2057 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2055 if succsset:
2058 if succsset:
2056 ui.write(' ')
2059 ui.write(' ')
2057 ui.write(node2str(succsset[0]))
2060 ui.write(node2str(succsset[0]))
2058 for node in succsset[1:]:
2061 for node in succsset[1:]:
2059 ui.write(' ')
2062 ui.write(' ')
2060 ui.write(node2str(node))
2063 ui.write(node2str(node))
2061 ui.write('\n')
2064 ui.write('\n')
2062
2065
2063 @command('debugtemplate',
2066 @command('debugtemplate',
2064 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2067 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2065 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2068 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2066 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2069 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2067 optionalrepo=True)
2070 optionalrepo=True)
2068 def debugtemplate(ui, repo, tmpl, **opts):
2071 def debugtemplate(ui, repo, tmpl, **opts):
2069 """parse and apply a template
2072 """parse and apply a template
2070
2073
2071 If -r/--rev is given, the template is processed as a log template and
2074 If -r/--rev is given, the template is processed as a log template and
2072 applied to the given changesets. Otherwise, it is processed as a generic
2075 applied to the given changesets. Otherwise, it is processed as a generic
2073 template.
2076 template.
2074
2077
2075 Use --verbose to print the parsed tree.
2078 Use --verbose to print the parsed tree.
2076 """
2079 """
2077 revs = None
2080 revs = None
2078 if opts['rev']:
2081 if opts['rev']:
2079 if repo is None:
2082 if repo is None:
2080 raise error.RepoError(_('there is no Mercurial repository here '
2083 raise error.RepoError(_('there is no Mercurial repository here '
2081 '(.hg not found)'))
2084 '(.hg not found)'))
2082 revs = scmutil.revrange(repo, opts['rev'])
2085 revs = scmutil.revrange(repo, opts['rev'])
2083
2086
2084 props = {}
2087 props = {}
2085 for d in opts['define']:
2088 for d in opts['define']:
2086 try:
2089 try:
2087 k, v = (e.strip() for e in d.split('=', 1))
2090 k, v = (e.strip() for e in d.split('=', 1))
2088 if not k or k == 'ui':
2091 if not k or k == 'ui':
2089 raise ValueError
2092 raise ValueError
2090 props[k] = v
2093 props[k] = v
2091 except ValueError:
2094 except ValueError:
2092 raise error.Abort(_('malformed keyword definition: %s') % d)
2095 raise error.Abort(_('malformed keyword definition: %s') % d)
2093
2096
2094 if ui.verbose:
2097 if ui.verbose:
2095 aliases = ui.configitems('templatealias')
2098 aliases = ui.configitems('templatealias')
2096 tree = templater.parse(tmpl)
2099 tree = templater.parse(tmpl)
2097 ui.note(templater.prettyformat(tree), '\n')
2100 ui.note(templater.prettyformat(tree), '\n')
2098 newtree = templater.expandaliases(tree, aliases)
2101 newtree = templater.expandaliases(tree, aliases)
2099 if newtree != tree:
2102 if newtree != tree:
2100 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2103 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2101
2104
2102 mapfile = None
2105 mapfile = None
2103 if revs is None:
2106 if revs is None:
2104 k = 'debugtemplate'
2107 k = 'debugtemplate'
2105 t = formatter.maketemplater(ui, k, tmpl)
2108 t = formatter.maketemplater(ui, k, tmpl)
2106 ui.write(templater.stringify(t(k, ui=ui, **props)))
2109 ui.write(templater.stringify(t(k, ui=ui, **props)))
2107 else:
2110 else:
2108 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2111 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2109 mapfile, buffered=False)
2112 mapfile, buffered=False)
2110 for r in revs:
2113 for r in revs:
2111 displayer.show(repo[r], **props)
2114 displayer.show(repo[r], **props)
2112 displayer.close()
2115 displayer.close()
2113
2116
2114 @command('debugupdatecaches', [])
2117 @command('debugupdatecaches', [])
2115 def debugupdatecaches(ui, repo, *pats, **opts):
2118 def debugupdatecaches(ui, repo, *pats, **opts):
2116 """warm all known caches in the repository"""
2119 """warm all known caches in the repository"""
2117 with repo.wlock():
2120 with repo.wlock():
2118 with repo.lock():
2121 with repo.lock():
2119 repo.updatecaches()
2122 repo.updatecaches()
2120
2123
2121 @command('debugupgraderepo', [
2124 @command('debugupgraderepo', [
2122 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2125 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2123 ('', 'run', False, _('performs an upgrade')),
2126 ('', 'run', False, _('performs an upgrade')),
2124 ])
2127 ])
2125 def debugupgraderepo(ui, repo, run=False, optimize=None):
2128 def debugupgraderepo(ui, repo, run=False, optimize=None):
2126 """upgrade a repository to use different features
2129 """upgrade a repository to use different features
2127
2130
2128 If no arguments are specified, the repository is evaluated for upgrade
2131 If no arguments are specified, the repository is evaluated for upgrade
2129 and a list of problems and potential optimizations is printed.
2132 and a list of problems and potential optimizations is printed.
2130
2133
2131 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2134 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2132 can be influenced via additional arguments. More details will be provided
2135 can be influenced via additional arguments. More details will be provided
2133 by the command output when run without ``--run``.
2136 by the command output when run without ``--run``.
2134
2137
2135 During the upgrade, the repository will be locked and no writes will be
2138 During the upgrade, the repository will be locked and no writes will be
2136 allowed.
2139 allowed.
2137
2140
2138 At the end of the upgrade, the repository may not be readable while new
2141 At the end of the upgrade, the repository may not be readable while new
2139 repository data is swapped in. This window will be as long as it takes to
2142 repository data is swapped in. This window will be as long as it takes to
2140 rename some directories inside the ``.hg`` directory. On most machines, this
2143 rename some directories inside the ``.hg`` directory. On most machines, this
2141 should complete almost instantaneously and the chances of a consumer being
2144 should complete almost instantaneously and the chances of a consumer being
2142 unable to access the repository should be low.
2145 unable to access the repository should be low.
2143 """
2146 """
2144 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2147 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2145
2148
2146 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2149 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2147 inferrepo=True)
2150 inferrepo=True)
2148 def debugwalk(ui, repo, *pats, **opts):
2151 def debugwalk(ui, repo, *pats, **opts):
2149 """show how files match on given patterns"""
2152 """show how files match on given patterns"""
2150 m = scmutil.match(repo[None], pats, opts)
2153 m = scmutil.match(repo[None], pats, opts)
2151 ui.write(('matcher: %r\n' % m))
2154 ui.write(('matcher: %r\n' % m))
2152 items = list(repo[None].walk(m))
2155 items = list(repo[None].walk(m))
2153 if not items:
2156 if not items:
2154 return
2157 return
2155 f = lambda fn: fn
2158 f = lambda fn: fn
2156 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2159 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2157 f = lambda fn: util.normpath(fn)
2160 f = lambda fn: util.normpath(fn)
2158 fmt = 'f %%-%ds %%-%ds %%s' % (
2161 fmt = 'f %%-%ds %%-%ds %%s' % (
2159 max([len(abs) for abs in items]),
2162 max([len(abs) for abs in items]),
2160 max([len(m.rel(abs)) for abs in items]))
2163 max([len(m.rel(abs)) for abs in items]))
2161 for abs in items:
2164 for abs in items:
2162 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2165 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2163 ui.write("%s\n" % line.rstrip())
2166 ui.write("%s\n" % line.rstrip())
2164
2167
2165 @command('debugwireargs',
2168 @command('debugwireargs',
2166 [('', 'three', '', 'three'),
2169 [('', 'three', '', 'three'),
2167 ('', 'four', '', 'four'),
2170 ('', 'four', '', 'four'),
2168 ('', 'five', '', 'five'),
2171 ('', 'five', '', 'five'),
2169 ] + cmdutil.remoteopts,
2172 ] + cmdutil.remoteopts,
2170 _('REPO [OPTIONS]... [ONE [TWO]]'),
2173 _('REPO [OPTIONS]... [ONE [TWO]]'),
2171 norepo=True)
2174 norepo=True)
2172 def debugwireargs(ui, repopath, *vals, **opts):
2175 def debugwireargs(ui, repopath, *vals, **opts):
2173 repo = hg.peer(ui, opts, repopath)
2176 repo = hg.peer(ui, opts, repopath)
2174 for opt in cmdutil.remoteopts:
2177 for opt in cmdutil.remoteopts:
2175 del opts[opt[1]]
2178 del opts[opt[1]]
2176 args = {}
2179 args = {}
2177 for k, v in opts.iteritems():
2180 for k, v in opts.iteritems():
2178 if v:
2181 if v:
2179 args[k] = v
2182 args[k] = v
2180 # run twice to check that we don't mess up the stream for the next command
2183 # run twice to check that we don't mess up the stream for the next command
2181 res1 = repo.debugwireargs(*vals, **args)
2184 res1 = repo.debugwireargs(*vals, **args)
2182 res2 = repo.debugwireargs(*vals, **args)
2185 res2 = repo.debugwireargs(*vals, **args)
2183 ui.write("%s\n" % res1)
2186 ui.write("%s\n" % res1)
2184 if res1 != res2:
2187 if res1 != res2:
2185 ui.warn("%s\n" % res2)
2188 ui.warn("%s\n" % res2)
@@ -1,1301 +1,1426 b''
1 # obsolete.py - obsolete markers handling
1 # obsolete.py - obsolete markers handling
2 #
2 #
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
3 # Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
4 # Logilab SA <contact@logilab.fr>
4 # Logilab SA <contact@logilab.fr>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """Obsolete marker handling
9 """Obsolete marker handling
10
10
11 An obsolete marker maps an old changeset to a list of new
11 An obsolete marker maps an old changeset to a list of new
12 changesets. If the list of new changesets is empty, the old changeset
12 changesets. If the list of new changesets is empty, the old changeset
13 is said to be "killed". Otherwise, the old changeset is being
13 is said to be "killed". Otherwise, the old changeset is being
14 "replaced" by the new changesets.
14 "replaced" by the new changesets.
15
15
16 Obsolete markers can be used to record and distribute changeset graph
16 Obsolete markers can be used to record and distribute changeset graph
17 transformations performed by history rewrite operations, and help
17 transformations performed by history rewrite operations, and help
18 building new tools to reconcile conflicting rewrite actions. To
18 building new tools to reconcile conflicting rewrite actions. To
19 facilitate conflict resolution, markers include various annotations
19 facilitate conflict resolution, markers include various annotations
20 besides old and news changeset identifiers, such as creation date or
20 besides old and news changeset identifiers, such as creation date or
21 author name.
21 author name.
22
22
23 The old obsoleted changeset is called a "precursor" and possible
23 The old obsoleted changeset is called a "precursor" and possible
24 replacements are called "successors". Markers that used changeset X as
24 replacements are called "successors". Markers that used changeset X as
25 a precursor are called "successor markers of X" because they hold
25 a precursor are called "successor markers of X" because they hold
26 information about the successors of X. Markers that use changeset Y as
26 information about the successors of X. Markers that use changeset Y as
27 a successors are call "precursor markers of Y" because they hold
27 a successors are call "precursor markers of Y" because they hold
28 information about the precursors of Y.
28 information about the precursors of Y.
29
29
30 Examples:
30 Examples:
31
31
32 - When changeset A is replaced by changeset A', one marker is stored:
32 - When changeset A is replaced by changeset A', one marker is stored:
33
33
34 (A, (A',))
34 (A, (A',))
35
35
36 - When changesets A and B are folded into a new changeset C, two markers are
36 - When changesets A and B are folded into a new changeset C, two markers are
37 stored:
37 stored:
38
38
39 (A, (C,)) and (B, (C,))
39 (A, (C,)) and (B, (C,))
40
40
41 - When changeset A is simply "pruned" from the graph, a marker is created:
41 - When changeset A is simply "pruned" from the graph, a marker is created:
42
42
43 (A, ())
43 (A, ())
44
44
45 - When changeset A is split into B and C, a single marker is used:
45 - When changeset A is split into B and C, a single marker is used:
46
46
47 (A, (B, C))
47 (A, (B, C))
48
48
49 We use a single marker to distinguish the "split" case from the "divergence"
49 We use a single marker to distinguish the "split" case from the "divergence"
50 case. If two independent operations rewrite the same changeset A in to A' and
50 case. If two independent operations rewrite the same changeset A in to A' and
51 A'', we have an error case: divergent rewriting. We can detect it because
51 A'', we have an error case: divergent rewriting. We can detect it because
52 two markers will be created independently:
52 two markers will be created independently:
53
53
54 (A, (B,)) and (A, (C,))
54 (A, (B,)) and (A, (C,))
55
55
56 Format
56 Format
57 ------
57 ------
58
58
59 Markers are stored in an append-only file stored in
59 Markers are stored in an append-only file stored in
60 '.hg/store/obsstore'.
60 '.hg/store/obsstore'.
61
61
62 The file starts with a version header:
62 The file starts with a version header:
63
63
64 - 1 unsigned byte: version number, starting at zero.
64 - 1 unsigned byte: version number, starting at zero.
65
65
66 The header is followed by the markers. Marker format depend of the version. See
66 The header is followed by the markers. Marker format depend of the version. See
67 comment associated with each format for details.
67 comment associated with each format for details.
68
68
69 """
69 """
70 from __future__ import absolute_import
70 from __future__ import absolute_import
71
71
72 import errno
72 import errno
73 import struct
73 import struct
74
74
75 from .i18n import _
75 from .i18n import _
76 from . import (
76 from . import (
77 error,
77 error,
78 node,
78 node,
79 phases,
79 phases,
80 policy,
80 policy,
81 util,
81 util,
82 )
82 )
83
83
84 parsers = policy.importmod(r'parsers')
84 parsers = policy.importmod(r'parsers')
85
85
86 _pack = struct.pack
86 _pack = struct.pack
87 _unpack = struct.unpack
87 _unpack = struct.unpack
88 _calcsize = struct.calcsize
88 _calcsize = struct.calcsize
89 propertycache = util.propertycache
89 propertycache = util.propertycache
90
90
91 # the obsolete feature is not mature enough to be enabled by default.
91 # the obsolete feature is not mature enough to be enabled by default.
92 # you have to rely on third party extension extension to enable this.
92 # you have to rely on third party extension extension to enable this.
93 _enabled = False
93 _enabled = False
94
94
95 # Options for obsolescence
95 # Options for obsolescence
96 createmarkersopt = 'createmarkers'
96 createmarkersopt = 'createmarkers'
97 allowunstableopt = 'allowunstable'
97 allowunstableopt = 'allowunstable'
98 exchangeopt = 'exchange'
98 exchangeopt = 'exchange'
99
99
100 def isenabled(repo, option):
100 def isenabled(repo, option):
101 """Returns True if the given repository has the given obsolete option
101 """Returns True if the given repository has the given obsolete option
102 enabled.
102 enabled.
103 """
103 """
104 result = set(repo.ui.configlist('experimental', 'evolution'))
104 result = set(repo.ui.configlist('experimental', 'evolution'))
105 if 'all' in result:
105 if 'all' in result:
106 return True
106 return True
107
107
108 # For migration purposes, temporarily return true if the config hasn't been
108 # For migration purposes, temporarily return true if the config hasn't been
109 # set but _enabled is true.
109 # set but _enabled is true.
110 if len(result) == 0 and _enabled:
110 if len(result) == 0 and _enabled:
111 return True
111 return True
112
112
113 # createmarkers must be enabled if other options are enabled
113 # createmarkers must be enabled if other options are enabled
114 if ((allowunstableopt in result or exchangeopt in result) and
114 if ((allowunstableopt in result or exchangeopt in result) and
115 not createmarkersopt in result):
115 not createmarkersopt in result):
116 raise error.Abort(_("'createmarkers' obsolete option must be enabled "
116 raise error.Abort(_("'createmarkers' obsolete option must be enabled "
117 "if other obsolete options are enabled"))
117 "if other obsolete options are enabled"))
118
118
119 return option in result
119 return option in result
120
120
121 ### obsolescence marker flag
121 ### obsolescence marker flag
122
122
123 ## bumpedfix flag
123 ## bumpedfix flag
124 #
124 #
125 # When a changeset A' succeed to a changeset A which became public, we call A'
125 # When a changeset A' succeed to a changeset A which became public, we call A'
126 # "bumped" because it's a successors of a public changesets
126 # "bumped" because it's a successors of a public changesets
127 #
127 #
128 # o A' (bumped)
128 # o A' (bumped)
129 # |`:
129 # |`:
130 # | o A
130 # | o A
131 # |/
131 # |/
132 # o Z
132 # o Z
133 #
133 #
134 # The way to solve this situation is to create a new changeset Ad as children
134 # The way to solve this situation is to create a new changeset Ad as children
135 # of A. This changeset have the same content than A'. So the diff from A to A'
135 # of A. This changeset have the same content than A'. So the diff from A to A'
136 # is the same than the diff from A to Ad. Ad is marked as a successors of A'
136 # is the same than the diff from A to Ad. Ad is marked as a successors of A'
137 #
137 #
138 # o Ad
138 # o Ad
139 # |`:
139 # |`:
140 # | x A'
140 # | x A'
141 # |'|
141 # |'|
142 # o | A
142 # o | A
143 # |/
143 # |/
144 # o Z
144 # o Z
145 #
145 #
146 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
146 # But by transitivity Ad is also a successors of A. To avoid having Ad marked
147 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
147 # as bumped too, we add the `bumpedfix` flag to the marker. <A', (Ad,)>.
148 # This flag mean that the successors express the changes between the public and
148 # This flag mean that the successors express the changes between the public and
149 # bumped version and fix the situation, breaking the transitivity of
149 # bumped version and fix the situation, breaking the transitivity of
150 # "bumped" here.
150 # "bumped" here.
151 bumpedfix = 1
151 bumpedfix = 1
152 usingsha256 = 2
152 usingsha256 = 2
153
153
154 ## Parsing and writing of version "0"
154 ## Parsing and writing of version "0"
155 #
155 #
156 # The header is followed by the markers. Each marker is made of:
156 # The header is followed by the markers. Each marker is made of:
157 #
157 #
158 # - 1 uint8 : number of new changesets "N", can be zero.
158 # - 1 uint8 : number of new changesets "N", can be zero.
159 #
159 #
160 # - 1 uint32: metadata size "M" in bytes.
160 # - 1 uint32: metadata size "M" in bytes.
161 #
161 #
162 # - 1 byte: a bit field. It is reserved for flags used in common
162 # - 1 byte: a bit field. It is reserved for flags used in common
163 # obsolete marker operations, to avoid repeated decoding of metadata
163 # obsolete marker operations, to avoid repeated decoding of metadata
164 # entries.
164 # entries.
165 #
165 #
166 # - 20 bytes: obsoleted changeset identifier.
166 # - 20 bytes: obsoleted changeset identifier.
167 #
167 #
168 # - N*20 bytes: new changesets identifiers.
168 # - N*20 bytes: new changesets identifiers.
169 #
169 #
170 # - M bytes: metadata as a sequence of nul-terminated strings. Each
170 # - M bytes: metadata as a sequence of nul-terminated strings. Each
171 # string contains a key and a value, separated by a colon ':', without
171 # string contains a key and a value, separated by a colon ':', without
172 # additional encoding. Keys cannot contain '\0' or ':' and values
172 # additional encoding. Keys cannot contain '\0' or ':' and values
173 # cannot contain '\0'.
173 # cannot contain '\0'.
174 _fm0version = 0
174 _fm0version = 0
175 _fm0fixed = '>BIB20s'
175 _fm0fixed = '>BIB20s'
176 _fm0node = '20s'
176 _fm0node = '20s'
177 _fm0fsize = _calcsize(_fm0fixed)
177 _fm0fsize = _calcsize(_fm0fixed)
178 _fm0fnodesize = _calcsize(_fm0node)
178 _fm0fnodesize = _calcsize(_fm0node)
179
179
180 def _fm0readmarkers(data, off):
180 def _fm0readmarkers(data, off):
181 # Loop on markers
181 # Loop on markers
182 l = len(data)
182 l = len(data)
183 while off + _fm0fsize <= l:
183 while off + _fm0fsize <= l:
184 # read fixed part
184 # read fixed part
185 cur = data[off:off + _fm0fsize]
185 cur = data[off:off + _fm0fsize]
186 off += _fm0fsize
186 off += _fm0fsize
187 numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur)
187 numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur)
188 # read replacement
188 # read replacement
189 sucs = ()
189 sucs = ()
190 if numsuc:
190 if numsuc:
191 s = (_fm0fnodesize * numsuc)
191 s = (_fm0fnodesize * numsuc)
192 cur = data[off:off + s]
192 cur = data[off:off + s]
193 sucs = _unpack(_fm0node * numsuc, cur)
193 sucs = _unpack(_fm0node * numsuc, cur)
194 off += s
194 off += s
195 # read metadata
195 # read metadata
196 # (metadata will be decoded on demand)
196 # (metadata will be decoded on demand)
197 metadata = data[off:off + mdsize]
197 metadata = data[off:off + mdsize]
198 if len(metadata) != mdsize:
198 if len(metadata) != mdsize:
199 raise error.Abort(_('parsing obsolete marker: metadata is too '
199 raise error.Abort(_('parsing obsolete marker: metadata is too '
200 'short, %d bytes expected, got %d')
200 'short, %d bytes expected, got %d')
201 % (mdsize, len(metadata)))
201 % (mdsize, len(metadata)))
202 off += mdsize
202 off += mdsize
203 metadata = _fm0decodemeta(metadata)
203 metadata = _fm0decodemeta(metadata)
204 try:
204 try:
205 when, offset = metadata.pop('date', '0 0').split(' ')
205 when, offset = metadata.pop('date', '0 0').split(' ')
206 date = float(when), int(offset)
206 date = float(when), int(offset)
207 except ValueError:
207 except ValueError:
208 date = (0., 0)
208 date = (0., 0)
209 parents = None
209 parents = None
210 if 'p2' in metadata:
210 if 'p2' in metadata:
211 parents = (metadata.pop('p1', None), metadata.pop('p2', None))
211 parents = (metadata.pop('p1', None), metadata.pop('p2', None))
212 elif 'p1' in metadata:
212 elif 'p1' in metadata:
213 parents = (metadata.pop('p1', None),)
213 parents = (metadata.pop('p1', None),)
214 elif 'p0' in metadata:
214 elif 'p0' in metadata:
215 parents = ()
215 parents = ()
216 if parents is not None:
216 if parents is not None:
217 try:
217 try:
218 parents = tuple(node.bin(p) for p in parents)
218 parents = tuple(node.bin(p) for p in parents)
219 # if parent content is not a nodeid, drop the data
219 # if parent content is not a nodeid, drop the data
220 for p in parents:
220 for p in parents:
221 if len(p) != 20:
221 if len(p) != 20:
222 parents = None
222 parents = None
223 break
223 break
224 except TypeError:
224 except TypeError:
225 # if content cannot be translated to nodeid drop the data.
225 # if content cannot be translated to nodeid drop the data.
226 parents = None
226 parents = None
227
227
228 metadata = tuple(sorted(metadata.iteritems()))
228 metadata = tuple(sorted(metadata.iteritems()))
229
229
230 yield (pre, sucs, flags, metadata, date, parents)
230 yield (pre, sucs, flags, metadata, date, parents)
231
231
232 def _fm0encodeonemarker(marker):
232 def _fm0encodeonemarker(marker):
233 pre, sucs, flags, metadata, date, parents = marker
233 pre, sucs, flags, metadata, date, parents = marker
234 if flags & usingsha256:
234 if flags & usingsha256:
235 raise error.Abort(_('cannot handle sha256 with old obsstore format'))
235 raise error.Abort(_('cannot handle sha256 with old obsstore format'))
236 metadata = dict(metadata)
236 metadata = dict(metadata)
237 time, tz = date
237 time, tz = date
238 metadata['date'] = '%r %i' % (time, tz)
238 metadata['date'] = '%r %i' % (time, tz)
239 if parents is not None:
239 if parents is not None:
240 if not parents:
240 if not parents:
241 # mark that we explicitly recorded no parents
241 # mark that we explicitly recorded no parents
242 metadata['p0'] = ''
242 metadata['p0'] = ''
243 for i, p in enumerate(parents, 1):
243 for i, p in enumerate(parents, 1):
244 metadata['p%i' % i] = node.hex(p)
244 metadata['p%i' % i] = node.hex(p)
245 metadata = _fm0encodemeta(metadata)
245 metadata = _fm0encodemeta(metadata)
246 numsuc = len(sucs)
246 numsuc = len(sucs)
247 format = _fm0fixed + (_fm0node * numsuc)
247 format = _fm0fixed + (_fm0node * numsuc)
248 data = [numsuc, len(metadata), flags, pre]
248 data = [numsuc, len(metadata), flags, pre]
249 data.extend(sucs)
249 data.extend(sucs)
250 return _pack(format, *data) + metadata
250 return _pack(format, *data) + metadata
251
251
252 def _fm0encodemeta(meta):
252 def _fm0encodemeta(meta):
253 """Return encoded metadata string to string mapping.
253 """Return encoded metadata string to string mapping.
254
254
255 Assume no ':' in key and no '\0' in both key and value."""
255 Assume no ':' in key and no '\0' in both key and value."""
256 for key, value in meta.iteritems():
256 for key, value in meta.iteritems():
257 if ':' in key or '\0' in key:
257 if ':' in key or '\0' in key:
258 raise ValueError("':' and '\0' are forbidden in metadata key'")
258 raise ValueError("':' and '\0' are forbidden in metadata key'")
259 if '\0' in value:
259 if '\0' in value:
260 raise ValueError("':' is forbidden in metadata value'")
260 raise ValueError("':' is forbidden in metadata value'")
261 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
261 return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
262
262
263 def _fm0decodemeta(data):
263 def _fm0decodemeta(data):
264 """Return string to string dictionary from encoded version."""
264 """Return string to string dictionary from encoded version."""
265 d = {}
265 d = {}
266 for l in data.split('\0'):
266 for l in data.split('\0'):
267 if l:
267 if l:
268 key, value = l.split(':')
268 key, value = l.split(':')
269 d[key] = value
269 d[key] = value
270 return d
270 return d
271
271
272 ## Parsing and writing of version "1"
272 ## Parsing and writing of version "1"
273 #
273 #
274 # The header is followed by the markers. Each marker is made of:
274 # The header is followed by the markers. Each marker is made of:
275 #
275 #
276 # - uint32: total size of the marker (including this field)
276 # - uint32: total size of the marker (including this field)
277 #
277 #
278 # - float64: date in seconds since epoch
278 # - float64: date in seconds since epoch
279 #
279 #
280 # - int16: timezone offset in minutes
280 # - int16: timezone offset in minutes
281 #
281 #
282 # - uint16: a bit field. It is reserved for flags used in common
282 # - uint16: a bit field. It is reserved for flags used in common
283 # obsolete marker operations, to avoid repeated decoding of metadata
283 # obsolete marker operations, to avoid repeated decoding of metadata
284 # entries.
284 # entries.
285 #
285 #
286 # - uint8: number of successors "N", can be zero.
286 # - uint8: number of successors "N", can be zero.
287 #
287 #
288 # - uint8: number of parents "P", can be zero.
288 # - uint8: number of parents "P", can be zero.
289 #
289 #
290 # 0: parents data stored but no parent,
290 # 0: parents data stored but no parent,
291 # 1: one parent stored,
291 # 1: one parent stored,
292 # 2: two parents stored,
292 # 2: two parents stored,
293 # 3: no parent data stored
293 # 3: no parent data stored
294 #
294 #
295 # - uint8: number of metadata entries M
295 # - uint8: number of metadata entries M
296 #
296 #
297 # - 20 or 32 bytes: precursor changeset identifier.
297 # - 20 or 32 bytes: precursor changeset identifier.
298 #
298 #
299 # - N*(20 or 32) bytes: successors changesets identifiers.
299 # - N*(20 or 32) bytes: successors changesets identifiers.
300 #
300 #
301 # - P*(20 or 32) bytes: parents of the precursors changesets.
301 # - P*(20 or 32) bytes: parents of the precursors changesets.
302 #
302 #
303 # - M*(uint8, uint8): size of all metadata entries (key and value)
303 # - M*(uint8, uint8): size of all metadata entries (key and value)
304 #
304 #
305 # - remaining bytes: the metadata, each (key, value) pair after the other.
305 # - remaining bytes: the metadata, each (key, value) pair after the other.
306 _fm1version = 1
306 _fm1version = 1
307 _fm1fixed = '>IdhHBBB20s'
307 _fm1fixed = '>IdhHBBB20s'
308 _fm1nodesha1 = '20s'
308 _fm1nodesha1 = '20s'
309 _fm1nodesha256 = '32s'
309 _fm1nodesha256 = '32s'
310 _fm1nodesha1size = _calcsize(_fm1nodesha1)
310 _fm1nodesha1size = _calcsize(_fm1nodesha1)
311 _fm1nodesha256size = _calcsize(_fm1nodesha256)
311 _fm1nodesha256size = _calcsize(_fm1nodesha256)
312 _fm1fsize = _calcsize(_fm1fixed)
312 _fm1fsize = _calcsize(_fm1fixed)
313 _fm1parentnone = 3
313 _fm1parentnone = 3
314 _fm1parentshift = 14
314 _fm1parentshift = 14
315 _fm1parentmask = (_fm1parentnone << _fm1parentshift)
315 _fm1parentmask = (_fm1parentnone << _fm1parentshift)
316 _fm1metapair = 'BB'
316 _fm1metapair = 'BB'
317 _fm1metapairsize = _calcsize('BB')
317 _fm1metapairsize = _calcsize('BB')
318
318
319 def _fm1purereadmarkers(data, off):
319 def _fm1purereadmarkers(data, off):
320 # make some global constants local for performance
320 # make some global constants local for performance
321 noneflag = _fm1parentnone
321 noneflag = _fm1parentnone
322 sha2flag = usingsha256
322 sha2flag = usingsha256
323 sha1size = _fm1nodesha1size
323 sha1size = _fm1nodesha1size
324 sha2size = _fm1nodesha256size
324 sha2size = _fm1nodesha256size
325 sha1fmt = _fm1nodesha1
325 sha1fmt = _fm1nodesha1
326 sha2fmt = _fm1nodesha256
326 sha2fmt = _fm1nodesha256
327 metasize = _fm1metapairsize
327 metasize = _fm1metapairsize
328 metafmt = _fm1metapair
328 metafmt = _fm1metapair
329 fsize = _fm1fsize
329 fsize = _fm1fsize
330 unpack = _unpack
330 unpack = _unpack
331
331
332 # Loop on markers
332 # Loop on markers
333 stop = len(data) - _fm1fsize
333 stop = len(data) - _fm1fsize
334 ufixed = struct.Struct(_fm1fixed).unpack
334 ufixed = struct.Struct(_fm1fixed).unpack
335
335
336 while off <= stop:
336 while off <= stop:
337 # read fixed part
337 # read fixed part
338 o1 = off + fsize
338 o1 = off + fsize
339 t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
339 t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1])
340
340
341 if flags & sha2flag:
341 if flags & sha2flag:
342 # FIXME: prec was read as a SHA1, needs to be amended
342 # FIXME: prec was read as a SHA1, needs to be amended
343
343
344 # read 0 or more successors
344 # read 0 or more successors
345 if numsuc == 1:
345 if numsuc == 1:
346 o2 = o1 + sha2size
346 o2 = o1 + sha2size
347 sucs = (data[o1:o2],)
347 sucs = (data[o1:o2],)
348 else:
348 else:
349 o2 = o1 + sha2size * numsuc
349 o2 = o1 + sha2size * numsuc
350 sucs = unpack(sha2fmt * numsuc, data[o1:o2])
350 sucs = unpack(sha2fmt * numsuc, data[o1:o2])
351
351
352 # read parents
352 # read parents
353 if numpar == noneflag:
353 if numpar == noneflag:
354 o3 = o2
354 o3 = o2
355 parents = None
355 parents = None
356 elif numpar == 1:
356 elif numpar == 1:
357 o3 = o2 + sha2size
357 o3 = o2 + sha2size
358 parents = (data[o2:o3],)
358 parents = (data[o2:o3],)
359 else:
359 else:
360 o3 = o2 + sha2size * numpar
360 o3 = o2 + sha2size * numpar
361 parents = unpack(sha2fmt * numpar, data[o2:o3])
361 parents = unpack(sha2fmt * numpar, data[o2:o3])
362 else:
362 else:
363 # read 0 or more successors
363 # read 0 or more successors
364 if numsuc == 1:
364 if numsuc == 1:
365 o2 = o1 + sha1size
365 o2 = o1 + sha1size
366 sucs = (data[o1:o2],)
366 sucs = (data[o1:o2],)
367 else:
367 else:
368 o2 = o1 + sha1size * numsuc
368 o2 = o1 + sha1size * numsuc
369 sucs = unpack(sha1fmt * numsuc, data[o1:o2])
369 sucs = unpack(sha1fmt * numsuc, data[o1:o2])
370
370
371 # read parents
371 # read parents
372 if numpar == noneflag:
372 if numpar == noneflag:
373 o3 = o2
373 o3 = o2
374 parents = None
374 parents = None
375 elif numpar == 1:
375 elif numpar == 1:
376 o3 = o2 + sha1size
376 o3 = o2 + sha1size
377 parents = (data[o2:o3],)
377 parents = (data[o2:o3],)
378 else:
378 else:
379 o3 = o2 + sha1size * numpar
379 o3 = o2 + sha1size * numpar
380 parents = unpack(sha1fmt * numpar, data[o2:o3])
380 parents = unpack(sha1fmt * numpar, data[o2:o3])
381
381
382 # read metadata
382 # read metadata
383 off = o3 + metasize * nummeta
383 off = o3 + metasize * nummeta
384 metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
384 metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off])
385 metadata = []
385 metadata = []
386 for idx in xrange(0, len(metapairsize), 2):
386 for idx in xrange(0, len(metapairsize), 2):
387 o1 = off + metapairsize[idx]
387 o1 = off + metapairsize[idx]
388 o2 = o1 + metapairsize[idx + 1]
388 o2 = o1 + metapairsize[idx + 1]
389 metadata.append((data[off:o1], data[o1:o2]))
389 metadata.append((data[off:o1], data[o1:o2]))
390 off = o2
390 off = o2
391
391
392 yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
392 yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
393
393
394 def _fm1encodeonemarker(marker):
394 def _fm1encodeonemarker(marker):
395 pre, sucs, flags, metadata, date, parents = marker
395 pre, sucs, flags, metadata, date, parents = marker
396 # determine node size
396 # determine node size
397 _fm1node = _fm1nodesha1
397 _fm1node = _fm1nodesha1
398 if flags & usingsha256:
398 if flags & usingsha256:
399 _fm1node = _fm1nodesha256
399 _fm1node = _fm1nodesha256
400 numsuc = len(sucs)
400 numsuc = len(sucs)
401 numextranodes = numsuc
401 numextranodes = numsuc
402 if parents is None:
402 if parents is None:
403 numpar = _fm1parentnone
403 numpar = _fm1parentnone
404 else:
404 else:
405 numpar = len(parents)
405 numpar = len(parents)
406 numextranodes += numpar
406 numextranodes += numpar
407 formatnodes = _fm1node * numextranodes
407 formatnodes = _fm1node * numextranodes
408 formatmeta = _fm1metapair * len(metadata)
408 formatmeta = _fm1metapair * len(metadata)
409 format = _fm1fixed + formatnodes + formatmeta
409 format = _fm1fixed + formatnodes + formatmeta
410 # tz is stored in minutes so we divide by 60
410 # tz is stored in minutes so we divide by 60
411 tz = date[1]//60
411 tz = date[1]//60
412 data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre]
412 data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre]
413 data.extend(sucs)
413 data.extend(sucs)
414 if parents is not None:
414 if parents is not None:
415 data.extend(parents)
415 data.extend(parents)
416 totalsize = _calcsize(format)
416 totalsize = _calcsize(format)
417 for key, value in metadata:
417 for key, value in metadata:
418 lk = len(key)
418 lk = len(key)
419 lv = len(value)
419 lv = len(value)
420 data.append(lk)
420 data.append(lk)
421 data.append(lv)
421 data.append(lv)
422 totalsize += lk + lv
422 totalsize += lk + lv
423 data[0] = totalsize
423 data[0] = totalsize
424 data = [_pack(format, *data)]
424 data = [_pack(format, *data)]
425 for key, value in metadata:
425 for key, value in metadata:
426 data.append(key)
426 data.append(key)
427 data.append(value)
427 data.append(value)
428 return ''.join(data)
428 return ''.join(data)
429
429
430 def _fm1readmarkers(data, off):
430 def _fm1readmarkers(data, off):
431 native = getattr(parsers, 'fm1readmarkers', None)
431 native = getattr(parsers, 'fm1readmarkers', None)
432 if not native:
432 if not native:
433 return _fm1purereadmarkers(data, off)
433 return _fm1purereadmarkers(data, off)
434 stop = len(data) - _fm1fsize
434 stop = len(data) - _fm1fsize
435 return native(data, off, stop)
435 return native(data, off, stop)
436
436
437 # mapping to read/write various marker formats
437 # mapping to read/write various marker formats
438 # <version> -> (decoder, encoder)
438 # <version> -> (decoder, encoder)
439 formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
439 formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
440 _fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
440 _fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
441
441
442 @util.nogc
442 @util.nogc
443 def _readmarkers(data):
443 def _readmarkers(data):
444 """Read and enumerate markers from raw data"""
444 """Read and enumerate markers from raw data"""
445 off = 0
445 off = 0
446 diskversion = _unpack('>B', data[off:off + 1])[0]
446 diskversion = _unpack('>B', data[off:off + 1])[0]
447 off += 1
447 off += 1
448 if diskversion not in formats:
448 if diskversion not in formats:
449 msg = _('parsing obsolete marker: unknown version %r') % diskversion
449 msg = _('parsing obsolete marker: unknown version %r') % diskversion
450 raise error.UnknownVersion(msg, version=diskversion)
450 raise error.UnknownVersion(msg, version=diskversion)
451 return diskversion, formats[diskversion][0](data, off)
451 return diskversion, formats[diskversion][0](data, off)
452
452
453 def encodemarkers(markers, addheader=False, version=_fm0version):
453 def encodemarkers(markers, addheader=False, version=_fm0version):
454 # Kept separate from flushmarkers(), it will be reused for
454 # Kept separate from flushmarkers(), it will be reused for
455 # markers exchange.
455 # markers exchange.
456 encodeone = formats[version][1]
456 encodeone = formats[version][1]
457 if addheader:
457 if addheader:
458 yield _pack('>B', version)
458 yield _pack('>B', version)
459 for marker in markers:
459 for marker in markers:
460 yield encodeone(marker)
460 yield encodeone(marker)
461
461
462
462
463 class marker(object):
463 class marker(object):
464 """Wrap obsolete marker raw data"""
464 """Wrap obsolete marker raw data"""
465
465
466 def __init__(self, repo, data):
466 def __init__(self, repo, data):
467 # the repo argument will be used to create changectx in later version
467 # the repo argument will be used to create changectx in later version
468 self._repo = repo
468 self._repo = repo
469 self._data = data
469 self._data = data
470 self._decodedmeta = None
470 self._decodedmeta = None
471
471
472 def __hash__(self):
472 def __hash__(self):
473 return hash(self._data)
473 return hash(self._data)
474
474
475 def __eq__(self, other):
475 def __eq__(self, other):
476 if type(other) != type(self):
476 if type(other) != type(self):
477 return False
477 return False
478 return self._data == other._data
478 return self._data == other._data
479
479
480 def precnode(self):
480 def precnode(self):
481 """Precursor changeset node identifier"""
481 """Precursor changeset node identifier"""
482 return self._data[0]
482 return self._data[0]
483
483
484 def succnodes(self):
484 def succnodes(self):
485 """List of successor changesets node identifiers"""
485 """List of successor changesets node identifiers"""
486 return self._data[1]
486 return self._data[1]
487
487
488 def parentnodes(self):
488 def parentnodes(self):
489 """Parents of the precursors (None if not recorded)"""
489 """Parents of the precursors (None if not recorded)"""
490 return self._data[5]
490 return self._data[5]
491
491
492 def metadata(self):
492 def metadata(self):
493 """Decoded metadata dictionary"""
493 """Decoded metadata dictionary"""
494 return dict(self._data[3])
494 return dict(self._data[3])
495
495
496 def date(self):
496 def date(self):
497 """Creation date as (unixtime, offset)"""
497 """Creation date as (unixtime, offset)"""
498 return self._data[4]
498 return self._data[4]
499
499
500 def flags(self):
500 def flags(self):
501 """The flags field of the marker"""
501 """The flags field of the marker"""
502 return self._data[2]
502 return self._data[2]
503
503
504 @util.nogc
504 @util.nogc
505 def _addsuccessors(successors, markers):
505 def _addsuccessors(successors, markers):
506 for mark in markers:
506 for mark in markers:
507 successors.setdefault(mark[0], set()).add(mark)
507 successors.setdefault(mark[0], set()).add(mark)
508
508
509 @util.nogc
509 @util.nogc
510 def _addprecursors(precursors, markers):
510 def _addprecursors(precursors, markers):
511 for mark in markers:
511 for mark in markers:
512 for suc in mark[1]:
512 for suc in mark[1]:
513 precursors.setdefault(suc, set()).add(mark)
513 precursors.setdefault(suc, set()).add(mark)
514
514
515 @util.nogc
515 @util.nogc
516 def _addchildren(children, markers):
516 def _addchildren(children, markers):
517 for mark in markers:
517 for mark in markers:
518 parents = mark[5]
518 parents = mark[5]
519 if parents is not None:
519 if parents is not None:
520 for p in parents:
520 for p in parents:
521 children.setdefault(p, set()).add(mark)
521 children.setdefault(p, set()).add(mark)
522
522
523 def _checkinvalidmarkers(markers):
523 def _checkinvalidmarkers(markers):
524 """search for marker with invalid data and raise error if needed
524 """search for marker with invalid data and raise error if needed
525
525
526 Exist as a separated function to allow the evolve extension for a more
526 Exist as a separated function to allow the evolve extension for a more
527 subtle handling.
527 subtle handling.
528 """
528 """
529 for mark in markers:
529 for mark in markers:
530 if node.nullid in mark[1]:
530 if node.nullid in mark[1]:
531 raise error.Abort(_('bad obsolescence marker detected: '
531 raise error.Abort(_('bad obsolescence marker detected: '
532 'invalid successors nullid'))
532 'invalid successors nullid'))
533
533
534 class obsstore(object):
534 class obsstore(object):
535 """Store obsolete markers
535 """Store obsolete markers
536
536
537 Markers can be accessed with two mappings:
537 Markers can be accessed with two mappings:
538 - precursors[x] -> set(markers on precursors edges of x)
538 - precursors[x] -> set(markers on precursors edges of x)
539 - successors[x] -> set(markers on successors edges of x)
539 - successors[x] -> set(markers on successors edges of x)
540 - children[x] -> set(markers on precursors edges of children(x)
540 - children[x] -> set(markers on precursors edges of children(x)
541 """
541 """
542
542
543 fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
543 fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
544 # prec: nodeid, precursor changesets
544 # prec: nodeid, precursor changesets
545 # succs: tuple of nodeid, successor changesets (0-N length)
545 # succs: tuple of nodeid, successor changesets (0-N length)
546 # flag: integer, flag field carrying modifier for the markers (see doc)
546 # flag: integer, flag field carrying modifier for the markers (see doc)
547 # meta: binary blob, encoded metadata dictionary
547 # meta: binary blob, encoded metadata dictionary
548 # date: (float, int) tuple, date of marker creation
548 # date: (float, int) tuple, date of marker creation
549 # parents: (tuple of nodeid) or None, parents of precursors
549 # parents: (tuple of nodeid) or None, parents of precursors
550 # None is used when no data has been recorded
550 # None is used when no data has been recorded
551
551
552 def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
552 def __init__(self, svfs, defaultformat=_fm1version, readonly=False):
553 # caches for various obsolescence related cache
553 # caches for various obsolescence related cache
554 self.caches = {}
554 self.caches = {}
555 self.svfs = svfs
555 self.svfs = svfs
556 self._version = defaultformat
556 self._version = defaultformat
557 self._readonly = readonly
557 self._readonly = readonly
558
558
559 def __iter__(self):
559 def __iter__(self):
560 return iter(self._all)
560 return iter(self._all)
561
561
562 def __len__(self):
562 def __len__(self):
563 return len(self._all)
563 return len(self._all)
564
564
565 def __nonzero__(self):
565 def __nonzero__(self):
566 if not self._cached('_all'):
566 if not self._cached('_all'):
567 try:
567 try:
568 return self.svfs.stat('obsstore').st_size > 1
568 return self.svfs.stat('obsstore').st_size > 1
569 except OSError as inst:
569 except OSError as inst:
570 if inst.errno != errno.ENOENT:
570 if inst.errno != errno.ENOENT:
571 raise
571 raise
572 # just build an empty _all list if no obsstore exists, which
572 # just build an empty _all list if no obsstore exists, which
573 # avoids further stat() syscalls
573 # avoids further stat() syscalls
574 pass
574 pass
575 return bool(self._all)
575 return bool(self._all)
576
576
577 __bool__ = __nonzero__
577 __bool__ = __nonzero__
578
578
579 @property
579 @property
580 def readonly(self):
580 def readonly(self):
581 """True if marker creation is disabled
581 """True if marker creation is disabled
582
582
583 Remove me in the future when obsolete marker is always on."""
583 Remove me in the future when obsolete marker is always on."""
584 return self._readonly
584 return self._readonly
585
585
586 def create(self, transaction, prec, succs=(), flag=0, parents=None,
586 def create(self, transaction, prec, succs=(), flag=0, parents=None,
587 date=None, metadata=None, ui=None):
587 date=None, metadata=None, ui=None):
588 """obsolete: add a new obsolete marker
588 """obsolete: add a new obsolete marker
589
589
590 * ensuring it is hashable
590 * ensuring it is hashable
591 * check mandatory metadata
591 * check mandatory metadata
592 * encode metadata
592 * encode metadata
593
593
594 If you are a human writing code creating marker you want to use the
594 If you are a human writing code creating marker you want to use the
595 `createmarkers` function in this module instead.
595 `createmarkers` function in this module instead.
596
596
597 return True if a new marker have been added, False if the markers
597 return True if a new marker have been added, False if the markers
598 already existed (no op).
598 already existed (no op).
599 """
599 """
600 if metadata is None:
600 if metadata is None:
601 metadata = {}
601 metadata = {}
602 if date is None:
602 if date is None:
603 if 'date' in metadata:
603 if 'date' in metadata:
604 # as a courtesy for out-of-tree extensions
604 # as a courtesy for out-of-tree extensions
605 date = util.parsedate(metadata.pop('date'))
605 date = util.parsedate(metadata.pop('date'))
606 elif ui is not None:
606 elif ui is not None:
607 date = ui.configdate('devel', 'default-date')
607 date = ui.configdate('devel', 'default-date')
608 if date is None:
608 if date is None:
609 date = util.makedate()
609 date = util.makedate()
610 else:
610 else:
611 date = util.makedate()
611 date = util.makedate()
612 if len(prec) != 20:
612 if len(prec) != 20:
613 raise ValueError(prec)
613 raise ValueError(prec)
614 for succ in succs:
614 for succ in succs:
615 if len(succ) != 20:
615 if len(succ) != 20:
616 raise ValueError(succ)
616 raise ValueError(succ)
617 if prec in succs:
617 if prec in succs:
618 raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
618 raise ValueError(_('in-marker cycle with %s') % node.hex(prec))
619
619
620 metadata = tuple(sorted(metadata.iteritems()))
620 metadata = tuple(sorted(metadata.iteritems()))
621
621
622 marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
622 marker = (str(prec), tuple(succs), int(flag), metadata, date, parents)
623 return bool(self.add(transaction, [marker]))
623 return bool(self.add(transaction, [marker]))
624
624
625 def add(self, transaction, markers):
625 def add(self, transaction, markers):
626 """Add new markers to the store
626 """Add new markers to the store
627
627
628 Take care of filtering duplicate.
628 Take care of filtering duplicate.
629 Return the number of new marker."""
629 Return the number of new marker."""
630 if self._readonly:
630 if self._readonly:
631 raise error.Abort(_('creating obsolete markers is not enabled on '
631 raise error.Abort(_('creating obsolete markers is not enabled on '
632 'this repo'))
632 'this repo'))
633 known = set(self._all)
633 known = set(self._all)
634 new = []
634 new = []
635 for m in markers:
635 for m in markers:
636 if m not in known:
636 if m not in known:
637 known.add(m)
637 known.add(m)
638 new.append(m)
638 new.append(m)
639 if new:
639 if new:
640 f = self.svfs('obsstore', 'ab')
640 f = self.svfs('obsstore', 'ab')
641 try:
641 try:
642 offset = f.tell()
642 offset = f.tell()
643 transaction.add('obsstore', offset)
643 transaction.add('obsstore', offset)
644 # offset == 0: new file - add the version header
644 # offset == 0: new file - add the version header
645 for bytes in encodemarkers(new, offset == 0, self._version):
645 for bytes in encodemarkers(new, offset == 0, self._version):
646 f.write(bytes)
646 f.write(bytes)
647 finally:
647 finally:
648 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
648 # XXX: f.close() == filecache invalidation == obsstore rebuilt.
649 # call 'filecacheentry.refresh()' here
649 # call 'filecacheentry.refresh()' here
650 f.close()
650 f.close()
651 self._addmarkers(new)
651 self._addmarkers(new)
652 # new marker *may* have changed several set. invalidate the cache.
652 # new marker *may* have changed several set. invalidate the cache.
653 self.caches.clear()
653 self.caches.clear()
654 # records the number of new markers for the transaction hooks
654 # records the number of new markers for the transaction hooks
655 previous = int(transaction.hookargs.get('new_obsmarkers', '0'))
655 previous = int(transaction.hookargs.get('new_obsmarkers', '0'))
656 transaction.hookargs['new_obsmarkers'] = str(previous + len(new))
656 transaction.hookargs['new_obsmarkers'] = str(previous + len(new))
657 return len(new)
657 return len(new)
658
658
659 def mergemarkers(self, transaction, data):
659 def mergemarkers(self, transaction, data):
660 """merge a binary stream of markers inside the obsstore
660 """merge a binary stream of markers inside the obsstore
661
661
662 Returns the number of new markers added."""
662 Returns the number of new markers added."""
663 version, markers = _readmarkers(data)
663 version, markers = _readmarkers(data)
664 return self.add(transaction, markers)
664 return self.add(transaction, markers)
665
665
666 @propertycache
666 @propertycache
667 def _all(self):
667 def _all(self):
668 data = self.svfs.tryread('obsstore')
668 data = self.svfs.tryread('obsstore')
669 if not data:
669 if not data:
670 return []
670 return []
671 self._version, markers = _readmarkers(data)
671 self._version, markers = _readmarkers(data)
672 markers = list(markers)
672 markers = list(markers)
673 _checkinvalidmarkers(markers)
673 _checkinvalidmarkers(markers)
674 return markers
674 return markers
675
675
676 @propertycache
676 @propertycache
677 def successors(self):
677 def successors(self):
678 successors = {}
678 successors = {}
679 _addsuccessors(successors, self._all)
679 _addsuccessors(successors, self._all)
680 return successors
680 return successors
681
681
682 @propertycache
682 @propertycache
683 def precursors(self):
683 def precursors(self):
684 precursors = {}
684 precursors = {}
685 _addprecursors(precursors, self._all)
685 _addprecursors(precursors, self._all)
686 return precursors
686 return precursors
687
687
688 @propertycache
688 @propertycache
689 def children(self):
689 def children(self):
690 children = {}
690 children = {}
691 _addchildren(children, self._all)
691 _addchildren(children, self._all)
692 return children
692 return children
693
693
694 def _cached(self, attr):
694 def _cached(self, attr):
695 return attr in self.__dict__
695 return attr in self.__dict__
696
696
697 def _addmarkers(self, markers):
697 def _addmarkers(self, markers):
698 markers = list(markers) # to allow repeated iteration
698 markers = list(markers) # to allow repeated iteration
699 self._all.extend(markers)
699 self._all.extend(markers)
700 if self._cached('successors'):
700 if self._cached('successors'):
701 _addsuccessors(self.successors, markers)
701 _addsuccessors(self.successors, markers)
702 if self._cached('precursors'):
702 if self._cached('precursors'):
703 _addprecursors(self.precursors, markers)
703 _addprecursors(self.precursors, markers)
704 if self._cached('children'):
704 if self._cached('children'):
705 _addchildren(self.children, markers)
705 _addchildren(self.children, markers)
706 _checkinvalidmarkers(markers)
706 _checkinvalidmarkers(markers)
707
707
708 def relevantmarkers(self, nodes):
708 def relevantmarkers(self, nodes):
709 """return a set of all obsolescence markers relevant to a set of nodes.
709 """return a set of all obsolescence markers relevant to a set of nodes.
710
710
711 "relevant" to a set of nodes mean:
711 "relevant" to a set of nodes mean:
712
712
713 - marker that use this changeset as successor
713 - marker that use this changeset as successor
714 - prune marker of direct children on this changeset
714 - prune marker of direct children on this changeset
715 - recursive application of the two rules on precursors of these markers
715 - recursive application of the two rules on precursors of these markers
716
716
717 It is a set so you cannot rely on order."""
717 It is a set so you cannot rely on order."""
718
718
719 pendingnodes = set(nodes)
719 pendingnodes = set(nodes)
720 seenmarkers = set()
720 seenmarkers = set()
721 seennodes = set(pendingnodes)
721 seennodes = set(pendingnodes)
722 precursorsmarkers = self.precursors
722 precursorsmarkers = self.precursors
723 succsmarkers = self.successors
723 succsmarkers = self.successors
724 children = self.children
724 children = self.children
725 while pendingnodes:
725 while pendingnodes:
726 direct = set()
726 direct = set()
727 for current in pendingnodes:
727 for current in pendingnodes:
728 direct.update(precursorsmarkers.get(current, ()))
728 direct.update(precursorsmarkers.get(current, ()))
729 pruned = [m for m in children.get(current, ()) if not m[1]]
729 pruned = [m for m in children.get(current, ()) if not m[1]]
730 direct.update(pruned)
730 direct.update(pruned)
731 pruned = [m for m in succsmarkers.get(current, ()) if not m[1]]
731 pruned = [m for m in succsmarkers.get(current, ()) if not m[1]]
732 direct.update(pruned)
732 direct.update(pruned)
733 direct -= seenmarkers
733 direct -= seenmarkers
734 pendingnodes = set([m[0] for m in direct])
734 pendingnodes = set([m[0] for m in direct])
735 seenmarkers |= direct
735 seenmarkers |= direct
736 pendingnodes -= seennodes
736 pendingnodes -= seennodes
737 seennodes |= pendingnodes
737 seennodes |= pendingnodes
738 return seenmarkers
738 return seenmarkers
739
739
740 def _filterprunes(markers):
741 """return a set with no prune markers"""
742 return set(m for m in markers if m[1])
743
744 def exclusivemarkers(repo, nodes):
745 """set of markers relevant to "nodes" but no other locally-known nodes
746
747 This function compute the set of markers "exclusive" to a locally-known
748 node. This means we walk the markers starting from <nodes> until we reach a
749 locally-known precursors outside of <nodes>. Element of <nodes> with
750 locally-known successors outside of <nodes> are ignored (since their
751 precursors markers are also relevant to these successors).
752
753 For example:
754
755 # (A0 rewritten as A1)
756 #
757 # A0 <-1- A1 # Marker "1" is exclusive to A1
758
759 or
760
761 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
762 #
763 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
764
765 or
766
767 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
768 #
769 # <-2- A1 # Marker "2" is exclusive to A0,A1
770 # /
771 # <-1- A0
772 # \
773 # <-3- A2 # Marker "3" is exclusive to A0,A2
774 #
775 # in addition:
776 #
777 # Markers "2,3" are exclusive to A1,A2
778 # Markers "1,2,3" are exclusive to A0,A1,A2
779
780 An example usage is strip. When stripping a changeset, we also want to
781 strip the markers exclusive to this changeset. Otherwise we would have
782 "dangling"" obsolescence markers from its precursors: Obsolescence markers
783 marking a node as obsolete without any successors available locally.
784
785 As for relevant markers, the prune markers for children will be followed.
786 Of course, they will only be followed if the pruned children is
787 locally-known. Since the prune markers are relevant to the pruned node.
788 However, while prune markers are considered relevant to the parent of the
789 pruned changesets, prune markers for locally-known changeset (with no
790 successors) are considered exclusive to the pruned nodes. This allows
791 to strip the prune markers (with the rest of the exclusive chain) alongside
792 the pruned changesets.
793 """
794 # running on a filtered repository would be dangerous as markers could be
795 # reported as exclusive when they are relevant for other filtered nodes.
796 unfi = repo.unfiltered()
797
798 # shortcut to various useful item
799 nm = unfi.changelog.nodemap
800 precursorsmarkers = unfi.obsstore.precursors
801 successormarkers = unfi.obsstore.successors
802 childrenmarkers = unfi.obsstore.children
803
804 # exclusive markers (return of the function)
805 exclmarkers = set()
806 # we need fast membership testing
807 nodes = set(nodes)
808 # looking for head in the obshistory
809 #
810 # XXX we are ignoring all issues in regard with cycle for now.
811 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
812 stack.sort()
813 # nodes already stacked
814 seennodes = set(stack)
815 while stack:
816 current = stack.pop()
817 # fetch precursors markers
818 markers = list(precursorsmarkers.get(current, ()))
819 # extend the list with prune markers
820 for mark in successormarkers.get(current, ()):
821 if not mark[1]:
822 markers.append(mark)
823 # and markers from children (looking for prune)
824 for mark in childrenmarkers.get(current, ()):
825 if not mark[1]:
826 markers.append(mark)
827 # traverse the markers
828 for mark in markers:
829 if mark in exclmarkers:
830 # markers already selected
831 continue
832
833 # If the markers is about the current node, select it
834 #
835 # (this delay the addition of markers from children)
836 if mark[1] or mark[0] == current:
837 exclmarkers.add(mark)
838
839 # should we keep traversing through the precursors?
840 prec = mark[0]
841
842 # nodes in the stack or already processed
843 if prec in seennodes:
844 continue
845
846 # is this a locally known node ?
847 known = prec in nm
848 # if locally-known and not in the <nodes> set the traversal
849 # stop here.
850 if known and prec not in nodes:
851 continue
852
853 # do not keep going if there are unselected markers pointing to this
854 # nodes. If we end up traversing these unselected markers later the
855 # node will be taken care of at that point.
856 precmarkers = _filterprunes(successormarkers.get(prec))
857 if precmarkers.issubset(exclmarkers):
858 seennodes.add(prec)
859 stack.append(prec)
860
861 return exclmarkers
862
740 def commonversion(versions):
863 def commonversion(versions):
741 """Return the newest version listed in both versions and our local formats.
864 """Return the newest version listed in both versions and our local formats.
742
865
743 Returns None if no common version exists.
866 Returns None if no common version exists.
744 """
867 """
745 versions.sort(reverse=True)
868 versions.sort(reverse=True)
746 # search for highest version known on both side
869 # search for highest version known on both side
747 for v in versions:
870 for v in versions:
748 if v in formats:
871 if v in formats:
749 return v
872 return v
750 return None
873 return None
751
874
752 # arbitrary picked to fit into 8K limit from HTTP server
875 # arbitrary picked to fit into 8K limit from HTTP server
753 # you have to take in account:
876 # you have to take in account:
754 # - the version header
877 # - the version header
755 # - the base85 encoding
878 # - the base85 encoding
756 _maxpayload = 5300
879 _maxpayload = 5300
757
880
758 def _pushkeyescape(markers):
881 def _pushkeyescape(markers):
759 """encode markers into a dict suitable for pushkey exchange
882 """encode markers into a dict suitable for pushkey exchange
760
883
761 - binary data is base85 encoded
884 - binary data is base85 encoded
762 - split in chunks smaller than 5300 bytes"""
885 - split in chunks smaller than 5300 bytes"""
763 keys = {}
886 keys = {}
764 parts = []
887 parts = []
765 currentlen = _maxpayload * 2 # ensure we create a new part
888 currentlen = _maxpayload * 2 # ensure we create a new part
766 for marker in markers:
889 for marker in markers:
767 nextdata = _fm0encodeonemarker(marker)
890 nextdata = _fm0encodeonemarker(marker)
768 if (len(nextdata) + currentlen > _maxpayload):
891 if (len(nextdata) + currentlen > _maxpayload):
769 currentpart = []
892 currentpart = []
770 currentlen = 0
893 currentlen = 0
771 parts.append(currentpart)
894 parts.append(currentpart)
772 currentpart.append(nextdata)
895 currentpart.append(nextdata)
773 currentlen += len(nextdata)
896 currentlen += len(nextdata)
774 for idx, part in enumerate(reversed(parts)):
897 for idx, part in enumerate(reversed(parts)):
775 data = ''.join([_pack('>B', _fm0version)] + part)
898 data = ''.join([_pack('>B', _fm0version)] + part)
776 keys['dump%i' % idx] = util.b85encode(data)
899 keys['dump%i' % idx] = util.b85encode(data)
777 return keys
900 return keys
778
901
779 def listmarkers(repo):
902 def listmarkers(repo):
780 """List markers over pushkey"""
903 """List markers over pushkey"""
781 if not repo.obsstore:
904 if not repo.obsstore:
782 return {}
905 return {}
783 return _pushkeyescape(sorted(repo.obsstore))
906 return _pushkeyescape(sorted(repo.obsstore))
784
907
785 def pushmarker(repo, key, old, new):
908 def pushmarker(repo, key, old, new):
786 """Push markers over pushkey"""
909 """Push markers over pushkey"""
787 if not key.startswith('dump'):
910 if not key.startswith('dump'):
788 repo.ui.warn(_('unknown key: %r') % key)
911 repo.ui.warn(_('unknown key: %r') % key)
789 return 0
912 return 0
790 if old:
913 if old:
791 repo.ui.warn(_('unexpected old value for %r') % key)
914 repo.ui.warn(_('unexpected old value for %r') % key)
792 return 0
915 return 0
793 data = util.b85decode(new)
916 data = util.b85decode(new)
794 lock = repo.lock()
917 lock = repo.lock()
795 try:
918 try:
796 tr = repo.transaction('pushkey: obsolete markers')
919 tr = repo.transaction('pushkey: obsolete markers')
797 try:
920 try:
798 repo.obsstore.mergemarkers(tr, data)
921 repo.obsstore.mergemarkers(tr, data)
799 repo.invalidatevolatilesets()
922 repo.invalidatevolatilesets()
800 tr.close()
923 tr.close()
801 return 1
924 return 1
802 finally:
925 finally:
803 tr.release()
926 tr.release()
804 finally:
927 finally:
805 lock.release()
928 lock.release()
806
929
807 def getmarkers(repo, nodes=None):
930 def getmarkers(repo, nodes=None, exclusive=False):
808 """returns markers known in a repository
931 """returns markers known in a repository
809
932
810 If <nodes> is specified, only markers "relevant" to those nodes are are
933 If <nodes> is specified, only markers "relevant" to those nodes are are
811 returned"""
934 returned"""
812 if nodes is None:
935 if nodes is None:
813 rawmarkers = repo.obsstore
936 rawmarkers = repo.obsstore
937 elif exclusive:
938 rawmarkers = exclusivemarkers(repo, nodes)
814 else:
939 else:
815 rawmarkers = repo.obsstore.relevantmarkers(nodes)
940 rawmarkers = repo.obsstore.relevantmarkers(nodes)
816
941
817 for markerdata in rawmarkers:
942 for markerdata in rawmarkers:
818 yield marker(repo, markerdata)
943 yield marker(repo, markerdata)
819
944
820 def relevantmarkers(repo, node):
945 def relevantmarkers(repo, node):
821 """all obsolete markers relevant to some revision"""
946 """all obsolete markers relevant to some revision"""
822 for markerdata in repo.obsstore.relevantmarkers(node):
947 for markerdata in repo.obsstore.relevantmarkers(node):
823 yield marker(repo, markerdata)
948 yield marker(repo, markerdata)
824
949
825
950
826 def precursormarkers(ctx):
951 def precursormarkers(ctx):
827 """obsolete marker marking this changeset as a successors"""
952 """obsolete marker marking this changeset as a successors"""
828 for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()):
953 for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()):
829 yield marker(ctx.repo(), data)
954 yield marker(ctx.repo(), data)
830
955
831 def successormarkers(ctx):
956 def successormarkers(ctx):
832 """obsolete marker making this changeset obsolete"""
957 """obsolete marker making this changeset obsolete"""
833 for data in ctx.repo().obsstore.successors.get(ctx.node(), ()):
958 for data in ctx.repo().obsstore.successors.get(ctx.node(), ()):
834 yield marker(ctx.repo(), data)
959 yield marker(ctx.repo(), data)
835
960
836 def allsuccessors(obsstore, nodes, ignoreflags=0):
961 def allsuccessors(obsstore, nodes, ignoreflags=0):
837 """Yield node for every successor of <nodes>.
962 """Yield node for every successor of <nodes>.
838
963
839 Some successors may be unknown locally.
964 Some successors may be unknown locally.
840
965
841 This is a linear yield unsuited to detecting split changesets. It includes
966 This is a linear yield unsuited to detecting split changesets. It includes
842 initial nodes too."""
967 initial nodes too."""
843 remaining = set(nodes)
968 remaining = set(nodes)
844 seen = set(remaining)
969 seen = set(remaining)
845 while remaining:
970 while remaining:
846 current = remaining.pop()
971 current = remaining.pop()
847 yield current
972 yield current
848 for mark in obsstore.successors.get(current, ()):
973 for mark in obsstore.successors.get(current, ()):
849 # ignore marker flagged with specified flag
974 # ignore marker flagged with specified flag
850 if mark[2] & ignoreflags:
975 if mark[2] & ignoreflags:
851 continue
976 continue
852 for suc in mark[1]:
977 for suc in mark[1]:
853 if suc not in seen:
978 if suc not in seen:
854 seen.add(suc)
979 seen.add(suc)
855 remaining.add(suc)
980 remaining.add(suc)
856
981
857 def allprecursors(obsstore, nodes, ignoreflags=0):
982 def allprecursors(obsstore, nodes, ignoreflags=0):
858 """Yield node for every precursors of <nodes>.
983 """Yield node for every precursors of <nodes>.
859
984
860 Some precursors may be unknown locally.
985 Some precursors may be unknown locally.
861
986
862 This is a linear yield unsuited to detecting folded changesets. It includes
987 This is a linear yield unsuited to detecting folded changesets. It includes
863 initial nodes too."""
988 initial nodes too."""
864
989
865 remaining = set(nodes)
990 remaining = set(nodes)
866 seen = set(remaining)
991 seen = set(remaining)
867 while remaining:
992 while remaining:
868 current = remaining.pop()
993 current = remaining.pop()
869 yield current
994 yield current
870 for mark in obsstore.precursors.get(current, ()):
995 for mark in obsstore.precursors.get(current, ()):
871 # ignore marker flagged with specified flag
996 # ignore marker flagged with specified flag
872 if mark[2] & ignoreflags:
997 if mark[2] & ignoreflags:
873 continue
998 continue
874 suc = mark[0]
999 suc = mark[0]
875 if suc not in seen:
1000 if suc not in seen:
876 seen.add(suc)
1001 seen.add(suc)
877 remaining.add(suc)
1002 remaining.add(suc)
878
1003
879 def foreground(repo, nodes):
1004 def foreground(repo, nodes):
880 """return all nodes in the "foreground" of other node
1005 """return all nodes in the "foreground" of other node
881
1006
882 The foreground of a revision is anything reachable using parent -> children
1007 The foreground of a revision is anything reachable using parent -> children
883 or precursor -> successor relation. It is very similar to "descendant" but
1008 or precursor -> successor relation. It is very similar to "descendant" but
884 augmented with obsolescence information.
1009 augmented with obsolescence information.
885
1010
886 Beware that possible obsolescence cycle may result if complex situation.
1011 Beware that possible obsolescence cycle may result if complex situation.
887 """
1012 """
888 repo = repo.unfiltered()
1013 repo = repo.unfiltered()
889 foreground = set(repo.set('%ln::', nodes))
1014 foreground = set(repo.set('%ln::', nodes))
890 if repo.obsstore:
1015 if repo.obsstore:
891 # We only need this complicated logic if there is obsolescence
1016 # We only need this complicated logic if there is obsolescence
892 # XXX will probably deserve an optimised revset.
1017 # XXX will probably deserve an optimised revset.
893 nm = repo.changelog.nodemap
1018 nm = repo.changelog.nodemap
894 plen = -1
1019 plen = -1
895 # compute the whole set of successors or descendants
1020 # compute the whole set of successors or descendants
896 while len(foreground) != plen:
1021 while len(foreground) != plen:
897 plen = len(foreground)
1022 plen = len(foreground)
898 succs = set(c.node() for c in foreground)
1023 succs = set(c.node() for c in foreground)
899 mutable = [c.node() for c in foreground if c.mutable()]
1024 mutable = [c.node() for c in foreground if c.mutable()]
900 succs.update(allsuccessors(repo.obsstore, mutable))
1025 succs.update(allsuccessors(repo.obsstore, mutable))
901 known = (n for n in succs if n in nm)
1026 known = (n for n in succs if n in nm)
902 foreground = set(repo.set('%ln::', known))
1027 foreground = set(repo.set('%ln::', known))
903 return set(c.node() for c in foreground)
1028 return set(c.node() for c in foreground)
904
1029
905
1030
906 def successorssets(repo, initialnode, cache=None):
1031 def successorssets(repo, initialnode, cache=None):
907 """Return set of all latest successors of initial nodes
1032 """Return set of all latest successors of initial nodes
908
1033
909 The successors set of a changeset A are the group of revisions that succeed
1034 The successors set of a changeset A are the group of revisions that succeed
910 A. It succeeds A as a consistent whole, each revision being only a partial
1035 A. It succeeds A as a consistent whole, each revision being only a partial
911 replacement. The successors set contains non-obsolete changesets only.
1036 replacement. The successors set contains non-obsolete changesets only.
912
1037
913 This function returns the full list of successor sets which is why it
1038 This function returns the full list of successor sets which is why it
914 returns a list of tuples and not just a single tuple. Each tuple is a valid
1039 returns a list of tuples and not just a single tuple. Each tuple is a valid
915 successors set. Note that (A,) may be a valid successors set for changeset A
1040 successors set. Note that (A,) may be a valid successors set for changeset A
916 (see below).
1041 (see below).
917
1042
918 In most cases, a changeset A will have a single element (e.g. the changeset
1043 In most cases, a changeset A will have a single element (e.g. the changeset
919 A is replaced by A') in its successors set. Though, it is also common for a
1044 A is replaced by A') in its successors set. Though, it is also common for a
920 changeset A to have no elements in its successor set (e.g. the changeset
1045 changeset A to have no elements in its successor set (e.g. the changeset
921 has been pruned). Therefore, the returned list of successors sets will be
1046 has been pruned). Therefore, the returned list of successors sets will be
922 [(A',)] or [], respectively.
1047 [(A',)] or [], respectively.
923
1048
924 When a changeset A is split into A' and B', however, it will result in a
1049 When a changeset A is split into A' and B', however, it will result in a
925 successors set containing more than a single element, i.e. [(A',B')].
1050 successors set containing more than a single element, i.e. [(A',B')].
926 Divergent changesets will result in multiple successors sets, i.e. [(A',),
1051 Divergent changesets will result in multiple successors sets, i.e. [(A',),
927 (A'')].
1052 (A'')].
928
1053
929 If a changeset A is not obsolete, then it will conceptually have no
1054 If a changeset A is not obsolete, then it will conceptually have no
930 successors set. To distinguish this from a pruned changeset, the successor
1055 successors set. To distinguish this from a pruned changeset, the successor
931 set will contain itself only, i.e. [(A,)].
1056 set will contain itself only, i.e. [(A,)].
932
1057
933 Finally, successors unknown locally are considered to be pruned (obsoleted
1058 Finally, successors unknown locally are considered to be pruned (obsoleted
934 without any successors).
1059 without any successors).
935
1060
936 The optional `cache` parameter is a dictionary that may contain precomputed
1061 The optional `cache` parameter is a dictionary that may contain precomputed
937 successors sets. It is meant to reuse the computation of a previous call to
1062 successors sets. It is meant to reuse the computation of a previous call to
938 `successorssets` when multiple calls are made at the same time. The cache
1063 `successorssets` when multiple calls are made at the same time. The cache
939 dictionary is updated in place. The caller is responsible for its life
1064 dictionary is updated in place. The caller is responsible for its life
940 span. Code that makes multiple calls to `successorssets` *must* use this
1065 span. Code that makes multiple calls to `successorssets` *must* use this
941 cache mechanism or suffer terrible performance.
1066 cache mechanism or suffer terrible performance.
942 """
1067 """
943
1068
944 succmarkers = repo.obsstore.successors
1069 succmarkers = repo.obsstore.successors
945
1070
946 # Stack of nodes we search successors sets for
1071 # Stack of nodes we search successors sets for
947 toproceed = [initialnode]
1072 toproceed = [initialnode]
948 # set version of above list for fast loop detection
1073 # set version of above list for fast loop detection
949 # element added to "toproceed" must be added here
1074 # element added to "toproceed" must be added here
950 stackedset = set(toproceed)
1075 stackedset = set(toproceed)
951 if cache is None:
1076 if cache is None:
952 cache = {}
1077 cache = {}
953
1078
954 # This while loop is the flattened version of a recursive search for
1079 # This while loop is the flattened version of a recursive search for
955 # successors sets
1080 # successors sets
956 #
1081 #
957 # def successorssets(x):
1082 # def successorssets(x):
958 # successors = directsuccessors(x)
1083 # successors = directsuccessors(x)
959 # ss = [[]]
1084 # ss = [[]]
960 # for succ in directsuccessors(x):
1085 # for succ in directsuccessors(x):
961 # # product as in itertools cartesian product
1086 # # product as in itertools cartesian product
962 # ss = product(ss, successorssets(succ))
1087 # ss = product(ss, successorssets(succ))
963 # return ss
1088 # return ss
964 #
1089 #
965 # But we can not use plain recursive calls here:
1090 # But we can not use plain recursive calls here:
966 # - that would blow the python call stack
1091 # - that would blow the python call stack
967 # - obsolescence markers may have cycles, we need to handle them.
1092 # - obsolescence markers may have cycles, we need to handle them.
968 #
1093 #
969 # The `toproceed` list act as our call stack. Every node we search
1094 # The `toproceed` list act as our call stack. Every node we search
970 # successors set for are stacked there.
1095 # successors set for are stacked there.
971 #
1096 #
972 # The `stackedset` is set version of this stack used to check if a node is
1097 # The `stackedset` is set version of this stack used to check if a node is
973 # already stacked. This check is used to detect cycles and prevent infinite
1098 # already stacked. This check is used to detect cycles and prevent infinite
974 # loop.
1099 # loop.
975 #
1100 #
976 # successors set of all nodes are stored in the `cache` dictionary.
1101 # successors set of all nodes are stored in the `cache` dictionary.
977 #
1102 #
978 # After this while loop ends we use the cache to return the successors sets
1103 # After this while loop ends we use the cache to return the successors sets
979 # for the node requested by the caller.
1104 # for the node requested by the caller.
980 while toproceed:
1105 while toproceed:
981 # Every iteration tries to compute the successors sets of the topmost
1106 # Every iteration tries to compute the successors sets of the topmost
982 # node of the stack: CURRENT.
1107 # node of the stack: CURRENT.
983 #
1108 #
984 # There are four possible outcomes:
1109 # There are four possible outcomes:
985 #
1110 #
986 # 1) We already know the successors sets of CURRENT:
1111 # 1) We already know the successors sets of CURRENT:
987 # -> mission accomplished, pop it from the stack.
1112 # -> mission accomplished, pop it from the stack.
988 # 2) Node is not obsolete:
1113 # 2) Node is not obsolete:
989 # -> the node is its own successors sets. Add it to the cache.
1114 # -> the node is its own successors sets. Add it to the cache.
990 # 3) We do not know successors set of direct successors of CURRENT:
1115 # 3) We do not know successors set of direct successors of CURRENT:
991 # -> We add those successors to the stack.
1116 # -> We add those successors to the stack.
992 # 4) We know successors sets of all direct successors of CURRENT:
1117 # 4) We know successors sets of all direct successors of CURRENT:
993 # -> We can compute CURRENT successors set and add it to the
1118 # -> We can compute CURRENT successors set and add it to the
994 # cache.
1119 # cache.
995 #
1120 #
996 current = toproceed[-1]
1121 current = toproceed[-1]
997 if current in cache:
1122 if current in cache:
998 # case (1): We already know the successors sets
1123 # case (1): We already know the successors sets
999 stackedset.remove(toproceed.pop())
1124 stackedset.remove(toproceed.pop())
1000 elif current not in succmarkers:
1125 elif current not in succmarkers:
1001 # case (2): The node is not obsolete.
1126 # case (2): The node is not obsolete.
1002 if current in repo:
1127 if current in repo:
1003 # We have a valid last successors.
1128 # We have a valid last successors.
1004 cache[current] = [(current,)]
1129 cache[current] = [(current,)]
1005 else:
1130 else:
1006 # Final obsolete version is unknown locally.
1131 # Final obsolete version is unknown locally.
1007 # Do not count that as a valid successors
1132 # Do not count that as a valid successors
1008 cache[current] = []
1133 cache[current] = []
1009 else:
1134 else:
1010 # cases (3) and (4)
1135 # cases (3) and (4)
1011 #
1136 #
1012 # We proceed in two phases. Phase 1 aims to distinguish case (3)
1137 # We proceed in two phases. Phase 1 aims to distinguish case (3)
1013 # from case (4):
1138 # from case (4):
1014 #
1139 #
1015 # For each direct successors of CURRENT, we check whether its
1140 # For each direct successors of CURRENT, we check whether its
1016 # successors sets are known. If they are not, we stack the
1141 # successors sets are known. If they are not, we stack the
1017 # unknown node and proceed to the next iteration of the while
1142 # unknown node and proceed to the next iteration of the while
1018 # loop. (case 3)
1143 # loop. (case 3)
1019 #
1144 #
1020 # During this step, we may detect obsolescence cycles: a node
1145 # During this step, we may detect obsolescence cycles: a node
1021 # with unknown successors sets but already in the call stack.
1146 # with unknown successors sets but already in the call stack.
1022 # In such a situation, we arbitrary set the successors sets of
1147 # In such a situation, we arbitrary set the successors sets of
1023 # the node to nothing (node pruned) to break the cycle.
1148 # the node to nothing (node pruned) to break the cycle.
1024 #
1149 #
1025 # If no break was encountered we proceed to phase 2.
1150 # If no break was encountered we proceed to phase 2.
1026 #
1151 #
1027 # Phase 2 computes successors sets of CURRENT (case 4); see details
1152 # Phase 2 computes successors sets of CURRENT (case 4); see details
1028 # in phase 2 itself.
1153 # in phase 2 itself.
1029 #
1154 #
1030 # Note the two levels of iteration in each phase.
1155 # Note the two levels of iteration in each phase.
1031 # - The first one handles obsolescence markers using CURRENT as
1156 # - The first one handles obsolescence markers using CURRENT as
1032 # precursor (successors markers of CURRENT).
1157 # precursor (successors markers of CURRENT).
1033 #
1158 #
1034 # Having multiple entry here means divergence.
1159 # Having multiple entry here means divergence.
1035 #
1160 #
1036 # - The second one handles successors defined in each marker.
1161 # - The second one handles successors defined in each marker.
1037 #
1162 #
1038 # Having none means pruned node, multiple successors means split,
1163 # Having none means pruned node, multiple successors means split,
1039 # single successors are standard replacement.
1164 # single successors are standard replacement.
1040 #
1165 #
1041 for mark in sorted(succmarkers[current]):
1166 for mark in sorted(succmarkers[current]):
1042 for suc in mark[1]:
1167 for suc in mark[1]:
1043 if suc not in cache:
1168 if suc not in cache:
1044 if suc in stackedset:
1169 if suc in stackedset:
1045 # cycle breaking
1170 # cycle breaking
1046 cache[suc] = []
1171 cache[suc] = []
1047 else:
1172 else:
1048 # case (3) If we have not computed successors sets
1173 # case (3) If we have not computed successors sets
1049 # of one of those successors we add it to the
1174 # of one of those successors we add it to the
1050 # `toproceed` stack and stop all work for this
1175 # `toproceed` stack and stop all work for this
1051 # iteration.
1176 # iteration.
1052 toproceed.append(suc)
1177 toproceed.append(suc)
1053 stackedset.add(suc)
1178 stackedset.add(suc)
1054 break
1179 break
1055 else:
1180 else:
1056 continue
1181 continue
1057 break
1182 break
1058 else:
1183 else:
1059 # case (4): we know all successors sets of all direct
1184 # case (4): we know all successors sets of all direct
1060 # successors
1185 # successors
1061 #
1186 #
1062 # Successors set contributed by each marker depends on the
1187 # Successors set contributed by each marker depends on the
1063 # successors sets of all its "successors" node.
1188 # successors sets of all its "successors" node.
1064 #
1189 #
1065 # Each different marker is a divergence in the obsolescence
1190 # Each different marker is a divergence in the obsolescence
1066 # history. It contributes successors sets distinct from other
1191 # history. It contributes successors sets distinct from other
1067 # markers.
1192 # markers.
1068 #
1193 #
1069 # Within a marker, a successor may have divergent successors
1194 # Within a marker, a successor may have divergent successors
1070 # sets. In such a case, the marker will contribute multiple
1195 # sets. In such a case, the marker will contribute multiple
1071 # divergent successors sets. If multiple successors have
1196 # divergent successors sets. If multiple successors have
1072 # divergent successors sets, a Cartesian product is used.
1197 # divergent successors sets, a Cartesian product is used.
1073 #
1198 #
1074 # At the end we post-process successors sets to remove
1199 # At the end we post-process successors sets to remove
1075 # duplicated entry and successors set that are strict subset of
1200 # duplicated entry and successors set that are strict subset of
1076 # another one.
1201 # another one.
1077 succssets = []
1202 succssets = []
1078 for mark in sorted(succmarkers[current]):
1203 for mark in sorted(succmarkers[current]):
1079 # successors sets contributed by this marker
1204 # successors sets contributed by this marker
1080 markss = [[]]
1205 markss = [[]]
1081 for suc in mark[1]:
1206 for suc in mark[1]:
1082 # cardinal product with previous successors
1207 # cardinal product with previous successors
1083 productresult = []
1208 productresult = []
1084 for prefix in markss:
1209 for prefix in markss:
1085 for suffix in cache[suc]:
1210 for suffix in cache[suc]:
1086 newss = list(prefix)
1211 newss = list(prefix)
1087 for part in suffix:
1212 for part in suffix:
1088 # do not duplicated entry in successors set
1213 # do not duplicated entry in successors set
1089 # first entry wins.
1214 # first entry wins.
1090 if part not in newss:
1215 if part not in newss:
1091 newss.append(part)
1216 newss.append(part)
1092 productresult.append(newss)
1217 productresult.append(newss)
1093 markss = productresult
1218 markss = productresult
1094 succssets.extend(markss)
1219 succssets.extend(markss)
1095 # remove duplicated and subset
1220 # remove duplicated and subset
1096 seen = []
1221 seen = []
1097 final = []
1222 final = []
1098 candidate = sorted(((set(s), s) for s in succssets if s),
1223 candidate = sorted(((set(s), s) for s in succssets if s),
1099 key=lambda x: len(x[1]), reverse=True)
1224 key=lambda x: len(x[1]), reverse=True)
1100 for setversion, listversion in candidate:
1225 for setversion, listversion in candidate:
1101 for seenset in seen:
1226 for seenset in seen:
1102 if setversion.issubset(seenset):
1227 if setversion.issubset(seenset):
1103 break
1228 break
1104 else:
1229 else:
1105 final.append(listversion)
1230 final.append(listversion)
1106 seen.append(setversion)
1231 seen.append(setversion)
1107 final.reverse() # put small successors set first
1232 final.reverse() # put small successors set first
1108 cache[current] = final
1233 cache[current] = final
1109 return cache[initialnode]
1234 return cache[initialnode]
1110
1235
1111 # mapping of 'set-name' -> <function to compute this set>
1236 # mapping of 'set-name' -> <function to compute this set>
1112 cachefuncs = {}
1237 cachefuncs = {}
1113 def cachefor(name):
1238 def cachefor(name):
1114 """Decorator to register a function as computing the cache for a set"""
1239 """Decorator to register a function as computing the cache for a set"""
1115 def decorator(func):
1240 def decorator(func):
1116 assert name not in cachefuncs
1241 assert name not in cachefuncs
1117 cachefuncs[name] = func
1242 cachefuncs[name] = func
1118 return func
1243 return func
1119 return decorator
1244 return decorator
1120
1245
1121 def getrevs(repo, name):
1246 def getrevs(repo, name):
1122 """Return the set of revision that belong to the <name> set
1247 """Return the set of revision that belong to the <name> set
1123
1248
1124 Such access may compute the set and cache it for future use"""
1249 Such access may compute the set and cache it for future use"""
1125 repo = repo.unfiltered()
1250 repo = repo.unfiltered()
1126 if not repo.obsstore:
1251 if not repo.obsstore:
1127 return frozenset()
1252 return frozenset()
1128 if name not in repo.obsstore.caches:
1253 if name not in repo.obsstore.caches:
1129 repo.obsstore.caches[name] = cachefuncs[name](repo)
1254 repo.obsstore.caches[name] = cachefuncs[name](repo)
1130 return repo.obsstore.caches[name]
1255 return repo.obsstore.caches[name]
1131
1256
1132 # To be simple we need to invalidate obsolescence cache when:
1257 # To be simple we need to invalidate obsolescence cache when:
1133 #
1258 #
1134 # - new changeset is added:
1259 # - new changeset is added:
1135 # - public phase is changed
1260 # - public phase is changed
1136 # - obsolescence marker are added
1261 # - obsolescence marker are added
1137 # - strip is used a repo
1262 # - strip is used a repo
1138 def clearobscaches(repo):
1263 def clearobscaches(repo):
1139 """Remove all obsolescence related cache from a repo
1264 """Remove all obsolescence related cache from a repo
1140
1265
1141 This remove all cache in obsstore is the obsstore already exist on the
1266 This remove all cache in obsstore is the obsstore already exist on the
1142 repo.
1267 repo.
1143
1268
1144 (We could be smarter here given the exact event that trigger the cache
1269 (We could be smarter here given the exact event that trigger the cache
1145 clearing)"""
1270 clearing)"""
1146 # only clear cache is there is obsstore data in this repo
1271 # only clear cache is there is obsstore data in this repo
1147 if 'obsstore' in repo._filecache:
1272 if 'obsstore' in repo._filecache:
1148 repo.obsstore.caches.clear()
1273 repo.obsstore.caches.clear()
1149
1274
1150 @cachefor('obsolete')
1275 @cachefor('obsolete')
1151 def _computeobsoleteset(repo):
1276 def _computeobsoleteset(repo):
1152 """the set of obsolete revisions"""
1277 """the set of obsolete revisions"""
1153 obs = set()
1278 obs = set()
1154 getnode = repo.changelog.node
1279 getnode = repo.changelog.node
1155 notpublic = repo._phasecache.getrevset(repo, (phases.draft, phases.secret))
1280 notpublic = repo._phasecache.getrevset(repo, (phases.draft, phases.secret))
1156 for r in notpublic:
1281 for r in notpublic:
1157 if getnode(r) in repo.obsstore.successors:
1282 if getnode(r) in repo.obsstore.successors:
1158 obs.add(r)
1283 obs.add(r)
1159 return obs
1284 return obs
1160
1285
1161 @cachefor('unstable')
1286 @cachefor('unstable')
1162 def _computeunstableset(repo):
1287 def _computeunstableset(repo):
1163 """the set of non obsolete revisions with obsolete parents"""
1288 """the set of non obsolete revisions with obsolete parents"""
1164 revs = [(ctx.rev(), ctx) for ctx in
1289 revs = [(ctx.rev(), ctx) for ctx in
1165 repo.set('(not public()) and (not obsolete())')]
1290 repo.set('(not public()) and (not obsolete())')]
1166 revs.sort(key=lambda x:x[0])
1291 revs.sort(key=lambda x:x[0])
1167 unstable = set()
1292 unstable = set()
1168 for rev, ctx in revs:
1293 for rev, ctx in revs:
1169 # A rev is unstable if one of its parent is obsolete or unstable
1294 # A rev is unstable if one of its parent is obsolete or unstable
1170 # this works since we traverse following growing rev order
1295 # this works since we traverse following growing rev order
1171 if any((x.obsolete() or (x.rev() in unstable))
1296 if any((x.obsolete() or (x.rev() in unstable))
1172 for x in ctx.parents()):
1297 for x in ctx.parents()):
1173 unstable.add(rev)
1298 unstable.add(rev)
1174 return unstable
1299 return unstable
1175
1300
1176 @cachefor('suspended')
1301 @cachefor('suspended')
1177 def _computesuspendedset(repo):
1302 def _computesuspendedset(repo):
1178 """the set of obsolete parents with non obsolete descendants"""
1303 """the set of obsolete parents with non obsolete descendants"""
1179 suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
1304 suspended = repo.changelog.ancestors(getrevs(repo, 'unstable'))
1180 return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
1305 return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
1181
1306
1182 @cachefor('extinct')
1307 @cachefor('extinct')
1183 def _computeextinctset(repo):
1308 def _computeextinctset(repo):
1184 """the set of obsolete parents without non obsolete descendants"""
1309 """the set of obsolete parents without non obsolete descendants"""
1185 return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
1310 return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
1186
1311
1187
1312
1188 @cachefor('bumped')
1313 @cachefor('bumped')
1189 def _computebumpedset(repo):
1314 def _computebumpedset(repo):
1190 """the set of revs trying to obsolete public revisions"""
1315 """the set of revs trying to obsolete public revisions"""
1191 bumped = set()
1316 bumped = set()
1192 # util function (avoid attribute lookup in the loop)
1317 # util function (avoid attribute lookup in the loop)
1193 phase = repo._phasecache.phase # would be faster to grab the full list
1318 phase = repo._phasecache.phase # would be faster to grab the full list
1194 public = phases.public
1319 public = phases.public
1195 cl = repo.changelog
1320 cl = repo.changelog
1196 torev = cl.nodemap.get
1321 torev = cl.nodemap.get
1197 for ctx in repo.set('(not public()) and (not obsolete())'):
1322 for ctx in repo.set('(not public()) and (not obsolete())'):
1198 rev = ctx.rev()
1323 rev = ctx.rev()
1199 # We only evaluate mutable, non-obsolete revision
1324 # We only evaluate mutable, non-obsolete revision
1200 node = ctx.node()
1325 node = ctx.node()
1201 # (future) A cache of precursors may worth if split is very common
1326 # (future) A cache of precursors may worth if split is very common
1202 for pnode in allprecursors(repo.obsstore, [node],
1327 for pnode in allprecursors(repo.obsstore, [node],
1203 ignoreflags=bumpedfix):
1328 ignoreflags=bumpedfix):
1204 prev = torev(pnode) # unfiltered! but so is phasecache
1329 prev = torev(pnode) # unfiltered! but so is phasecache
1205 if (prev is not None) and (phase(repo, prev) <= public):
1330 if (prev is not None) and (phase(repo, prev) <= public):
1206 # we have a public precursor
1331 # we have a public precursor
1207 bumped.add(rev)
1332 bumped.add(rev)
1208 break # Next draft!
1333 break # Next draft!
1209 return bumped
1334 return bumped
1210
1335
1211 @cachefor('divergent')
1336 @cachefor('divergent')
1212 def _computedivergentset(repo):
1337 def _computedivergentset(repo):
1213 """the set of rev that compete to be the final successors of some revision.
1338 """the set of rev that compete to be the final successors of some revision.
1214 """
1339 """
1215 divergent = set()
1340 divergent = set()
1216 obsstore = repo.obsstore
1341 obsstore = repo.obsstore
1217 newermap = {}
1342 newermap = {}
1218 for ctx in repo.set('(not public()) - obsolete()'):
1343 for ctx in repo.set('(not public()) - obsolete()'):
1219 mark = obsstore.precursors.get(ctx.node(), ())
1344 mark = obsstore.precursors.get(ctx.node(), ())
1220 toprocess = set(mark)
1345 toprocess = set(mark)
1221 seen = set()
1346 seen = set()
1222 while toprocess:
1347 while toprocess:
1223 prec = toprocess.pop()[0]
1348 prec = toprocess.pop()[0]
1224 if prec in seen:
1349 if prec in seen:
1225 continue # emergency cycle hanging prevention
1350 continue # emergency cycle hanging prevention
1226 seen.add(prec)
1351 seen.add(prec)
1227 if prec not in newermap:
1352 if prec not in newermap:
1228 successorssets(repo, prec, newermap)
1353 successorssets(repo, prec, newermap)
1229 newer = [n for n in newermap[prec] if n]
1354 newer = [n for n in newermap[prec] if n]
1230 if len(newer) > 1:
1355 if len(newer) > 1:
1231 divergent.add(ctx.rev())
1356 divergent.add(ctx.rev())
1232 break
1357 break
1233 toprocess.update(obsstore.precursors.get(prec, ()))
1358 toprocess.update(obsstore.precursors.get(prec, ()))
1234 return divergent
1359 return divergent
1235
1360
1236
1361
1237 def createmarkers(repo, relations, flag=0, date=None, metadata=None,
1362 def createmarkers(repo, relations, flag=0, date=None, metadata=None,
1238 operation=None):
1363 operation=None):
1239 """Add obsolete markers between changesets in a repo
1364 """Add obsolete markers between changesets in a repo
1240
1365
1241 <relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
1366 <relations> must be an iterable of (<old>, (<new>, ...)[,{metadata}])
1242 tuple. `old` and `news` are changectx. metadata is an optional dictionary
1367 tuple. `old` and `news` are changectx. metadata is an optional dictionary
1243 containing metadata for this marker only. It is merged with the global
1368 containing metadata for this marker only. It is merged with the global
1244 metadata specified through the `metadata` argument of this function,
1369 metadata specified through the `metadata` argument of this function,
1245
1370
1246 Trying to obsolete a public changeset will raise an exception.
1371 Trying to obsolete a public changeset will raise an exception.
1247
1372
1248 Current user and date are used except if specified otherwise in the
1373 Current user and date are used except if specified otherwise in the
1249 metadata attribute.
1374 metadata attribute.
1250
1375
1251 This function operates within a transaction of its own, but does
1376 This function operates within a transaction of its own, but does
1252 not take any lock on the repo.
1377 not take any lock on the repo.
1253 """
1378 """
1254 # prepare metadata
1379 # prepare metadata
1255 if metadata is None:
1380 if metadata is None:
1256 metadata = {}
1381 metadata = {}
1257 if 'user' not in metadata:
1382 if 'user' not in metadata:
1258 metadata['user'] = repo.ui.username()
1383 metadata['user'] = repo.ui.username()
1259 useoperation = repo.ui.configbool('experimental',
1384 useoperation = repo.ui.configbool('experimental',
1260 'evolution.track-operation',
1385 'evolution.track-operation',
1261 False)
1386 False)
1262 if useoperation and operation:
1387 if useoperation and operation:
1263 metadata['operation'] = operation
1388 metadata['operation'] = operation
1264 tr = repo.transaction('add-obsolescence-marker')
1389 tr = repo.transaction('add-obsolescence-marker')
1265 try:
1390 try:
1266 markerargs = []
1391 markerargs = []
1267 for rel in relations:
1392 for rel in relations:
1268 prec = rel[0]
1393 prec = rel[0]
1269 sucs = rel[1]
1394 sucs = rel[1]
1270 localmetadata = metadata.copy()
1395 localmetadata = metadata.copy()
1271 if 2 < len(rel):
1396 if 2 < len(rel):
1272 localmetadata.update(rel[2])
1397 localmetadata.update(rel[2])
1273
1398
1274 if not prec.mutable():
1399 if not prec.mutable():
1275 raise error.Abort(_("cannot obsolete public changeset: %s")
1400 raise error.Abort(_("cannot obsolete public changeset: %s")
1276 % prec,
1401 % prec,
1277 hint="see 'hg help phases' for details")
1402 hint="see 'hg help phases' for details")
1278 nprec = prec.node()
1403 nprec = prec.node()
1279 nsucs = tuple(s.node() for s in sucs)
1404 nsucs = tuple(s.node() for s in sucs)
1280 npare = None
1405 npare = None
1281 if not nsucs:
1406 if not nsucs:
1282 npare = tuple(p.node() for p in prec.parents())
1407 npare = tuple(p.node() for p in prec.parents())
1283 if nprec in nsucs:
1408 if nprec in nsucs:
1284 raise error.Abort(_("changeset %s cannot obsolete itself")
1409 raise error.Abort(_("changeset %s cannot obsolete itself")
1285 % prec)
1410 % prec)
1286
1411
1287 # Creating the marker causes the hidden cache to become invalid,
1412 # Creating the marker causes the hidden cache to become invalid,
1288 # which causes recomputation when we ask for prec.parents() above.
1413 # which causes recomputation when we ask for prec.parents() above.
1289 # Resulting in n^2 behavior. So let's prepare all of the args
1414 # Resulting in n^2 behavior. So let's prepare all of the args
1290 # first, then create the markers.
1415 # first, then create the markers.
1291 markerargs.append((nprec, nsucs, npare, localmetadata))
1416 markerargs.append((nprec, nsucs, npare, localmetadata))
1292
1417
1293 for args in markerargs:
1418 for args in markerargs:
1294 nprec, nsucs, npare, localmetadata = args
1419 nprec, nsucs, npare, localmetadata = args
1295 repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
1420 repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
1296 date=date, metadata=localmetadata,
1421 date=date, metadata=localmetadata,
1297 ui=repo.ui)
1422 ui=repo.ui)
1298 repo.filteredrevcache.clear()
1423 repo.filteredrevcache.clear()
1299 tr.close()
1424 tr.close()
1300 finally:
1425 finally:
1301 tr.release()
1426 tr.release()
@@ -1,381 +1,381 b''
1 Show all commands except debug commands
1 Show all commands except debug commands
2 $ hg debugcomplete
2 $ hg debugcomplete
3 add
3 add
4 addremove
4 addremove
5 annotate
5 annotate
6 archive
6 archive
7 backout
7 backout
8 bisect
8 bisect
9 bookmarks
9 bookmarks
10 branch
10 branch
11 branches
11 branches
12 bundle
12 bundle
13 cat
13 cat
14 clone
14 clone
15 commit
15 commit
16 config
16 config
17 copy
17 copy
18 diff
18 diff
19 export
19 export
20 files
20 files
21 forget
21 forget
22 graft
22 graft
23 grep
23 grep
24 heads
24 heads
25 help
25 help
26 identify
26 identify
27 import
27 import
28 incoming
28 incoming
29 init
29 init
30 locate
30 locate
31 log
31 log
32 manifest
32 manifest
33 merge
33 merge
34 outgoing
34 outgoing
35 parents
35 parents
36 paths
36 paths
37 phase
37 phase
38 pull
38 pull
39 push
39 push
40 recover
40 recover
41 remove
41 remove
42 rename
42 rename
43 resolve
43 resolve
44 revert
44 revert
45 rollback
45 rollback
46 root
46 root
47 serve
47 serve
48 status
48 status
49 summary
49 summary
50 tag
50 tag
51 tags
51 tags
52 tip
52 tip
53 unbundle
53 unbundle
54 update
54 update
55 verify
55 verify
56 version
56 version
57
57
58 Show all commands that start with "a"
58 Show all commands that start with "a"
59 $ hg debugcomplete a
59 $ hg debugcomplete a
60 add
60 add
61 addremove
61 addremove
62 annotate
62 annotate
63 archive
63 archive
64
64
65 Do not show debug commands if there are other candidates
65 Do not show debug commands if there are other candidates
66 $ hg debugcomplete d
66 $ hg debugcomplete d
67 diff
67 diff
68
68
69 Show debug commands if there are no other candidates
69 Show debug commands if there are no other candidates
70 $ hg debugcomplete debug
70 $ hg debugcomplete debug
71 debugancestor
71 debugancestor
72 debugapplystreamclonebundle
72 debugapplystreamclonebundle
73 debugbuilddag
73 debugbuilddag
74 debugbundle
74 debugbundle
75 debugcheckstate
75 debugcheckstate
76 debugcolor
76 debugcolor
77 debugcommands
77 debugcommands
78 debugcomplete
78 debugcomplete
79 debugconfig
79 debugconfig
80 debugcreatestreamclonebundle
80 debugcreatestreamclonebundle
81 debugdag
81 debugdag
82 debugdata
82 debugdata
83 debugdate
83 debugdate
84 debugdeltachain
84 debugdeltachain
85 debugdirstate
85 debugdirstate
86 debugdiscovery
86 debugdiscovery
87 debugextensions
87 debugextensions
88 debugfileset
88 debugfileset
89 debugfsinfo
89 debugfsinfo
90 debuggetbundle
90 debuggetbundle
91 debugignore
91 debugignore
92 debugindex
92 debugindex
93 debugindexdot
93 debugindexdot
94 debuginstall
94 debuginstall
95 debugknown
95 debugknown
96 debuglabelcomplete
96 debuglabelcomplete
97 debuglocks
97 debuglocks
98 debugmergestate
98 debugmergestate
99 debugnamecomplete
99 debugnamecomplete
100 debugobsolete
100 debugobsolete
101 debugpathcomplete
101 debugpathcomplete
102 debugpickmergetool
102 debugpickmergetool
103 debugpushkey
103 debugpushkey
104 debugpvec
104 debugpvec
105 debugrebuilddirstate
105 debugrebuilddirstate
106 debugrebuildfncache
106 debugrebuildfncache
107 debugrename
107 debugrename
108 debugrevlog
108 debugrevlog
109 debugrevspec
109 debugrevspec
110 debugsetparents
110 debugsetparents
111 debugsub
111 debugsub
112 debugsuccessorssets
112 debugsuccessorssets
113 debugtemplate
113 debugtemplate
114 debugupdatecaches
114 debugupdatecaches
115 debugupgraderepo
115 debugupgraderepo
116 debugwalk
116 debugwalk
117 debugwireargs
117 debugwireargs
118
118
119 Do not show the alias of a debug command if there are other candidates
119 Do not show the alias of a debug command if there are other candidates
120 (this should hide rawcommit)
120 (this should hide rawcommit)
121 $ hg debugcomplete r
121 $ hg debugcomplete r
122 recover
122 recover
123 remove
123 remove
124 rename
124 rename
125 resolve
125 resolve
126 revert
126 revert
127 rollback
127 rollback
128 root
128 root
129 Show the alias of a debug command if there are no other candidates
129 Show the alias of a debug command if there are no other candidates
130 $ hg debugcomplete rawc
130 $ hg debugcomplete rawc
131
131
132
132
133 Show the global options
133 Show the global options
134 $ hg debugcomplete --options | sort
134 $ hg debugcomplete --options | sort
135 --color
135 --color
136 --config
136 --config
137 --cwd
137 --cwd
138 --debug
138 --debug
139 --debugger
139 --debugger
140 --encoding
140 --encoding
141 --encodingmode
141 --encodingmode
142 --help
142 --help
143 --hidden
143 --hidden
144 --noninteractive
144 --noninteractive
145 --pager
145 --pager
146 --profile
146 --profile
147 --quiet
147 --quiet
148 --repository
148 --repository
149 --time
149 --time
150 --traceback
150 --traceback
151 --verbose
151 --verbose
152 --version
152 --version
153 -R
153 -R
154 -h
154 -h
155 -q
155 -q
156 -v
156 -v
157 -y
157 -y
158
158
159 Show the options for the "serve" command
159 Show the options for the "serve" command
160 $ hg debugcomplete --options serve | sort
160 $ hg debugcomplete --options serve | sort
161 --accesslog
161 --accesslog
162 --address
162 --address
163 --certificate
163 --certificate
164 --cmdserver
164 --cmdserver
165 --color
165 --color
166 --config
166 --config
167 --cwd
167 --cwd
168 --daemon
168 --daemon
169 --daemon-postexec
169 --daemon-postexec
170 --debug
170 --debug
171 --debugger
171 --debugger
172 --encoding
172 --encoding
173 --encodingmode
173 --encodingmode
174 --errorlog
174 --errorlog
175 --help
175 --help
176 --hidden
176 --hidden
177 --ipv6
177 --ipv6
178 --name
178 --name
179 --noninteractive
179 --noninteractive
180 --pager
180 --pager
181 --pid-file
181 --pid-file
182 --port
182 --port
183 --prefix
183 --prefix
184 --profile
184 --profile
185 --quiet
185 --quiet
186 --repository
186 --repository
187 --stdio
187 --stdio
188 --style
188 --style
189 --subrepos
189 --subrepos
190 --templates
190 --templates
191 --time
191 --time
192 --traceback
192 --traceback
193 --verbose
193 --verbose
194 --version
194 --version
195 --web-conf
195 --web-conf
196 -6
196 -6
197 -A
197 -A
198 -E
198 -E
199 -R
199 -R
200 -S
200 -S
201 -a
201 -a
202 -d
202 -d
203 -h
203 -h
204 -n
204 -n
205 -p
205 -p
206 -q
206 -q
207 -t
207 -t
208 -v
208 -v
209 -y
209 -y
210
210
211 Show an error if we use --options with an ambiguous abbreviation
211 Show an error if we use --options with an ambiguous abbreviation
212 $ hg debugcomplete --options s
212 $ hg debugcomplete --options s
213 hg: command 's' is ambiguous:
213 hg: command 's' is ambiguous:
214 serve showconfig status summary
214 serve showconfig status summary
215 [255]
215 [255]
216
216
217 Show all commands + options
217 Show all commands + options
218 $ hg debugcommands
218 $ hg debugcommands
219 add: include, exclude, subrepos, dry-run
219 add: include, exclude, subrepos, dry-run
220 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
220 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template
221 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
221 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure
222 commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
222 commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos
223 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos
223 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos
224 export: output, switch-parent, rev, text, git, binary, nodates
224 export: output, switch-parent, rev, text, git, binary, nodates
225 forget: include, exclude
225 forget: include, exclude
226 init: ssh, remotecmd, insecure
226 init: ssh, remotecmd, insecure
227 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
227 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
228 merge: force, rev, preview, tool
228 merge: force, rev, preview, tool
229 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
229 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
230 push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
230 push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure
231 remove: after, force, subrepos, include, exclude
231 remove: after, force, subrepos, include, exclude
232 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
232 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, subrepos
233 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos, template
233 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos, template
234 summary: remote
234 summary: remote
235 update: clean, check, merge, date, rev, tool
235 update: clean, check, merge, date, rev, tool
236 addremove: similarity, subrepos, include, exclude, dry-run
236 addremove: similarity, subrepos, include, exclude, dry-run
237 archive: no-decode, prefix, rev, type, subrepos, include, exclude
237 archive: no-decode, prefix, rev, type, subrepos, include, exclude
238 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
238 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
239 bisect: reset, good, bad, skip, extend, command, noupdate
239 bisect: reset, good, bad, skip, extend, command, noupdate
240 bookmarks: force, rev, delete, rename, inactive, template
240 bookmarks: force, rev, delete, rename, inactive, template
241 branch: force, clean
241 branch: force, clean
242 branches: active, closed, template
242 branches: active, closed, template
243 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
243 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
244 cat: output, rev, decode, include, exclude, template
244 cat: output, rev, decode, include, exclude, template
245 config: untrusted, edit, local, global, template
245 config: untrusted, edit, local, global, template
246 copy: after, force, include, exclude, dry-run
246 copy: after, force, include, exclude, dry-run
247 debugancestor:
247 debugancestor:
248 debugapplystreamclonebundle:
248 debugapplystreamclonebundle:
249 debugbuilddag: mergeable-file, overwritten-file, new-file
249 debugbuilddag: mergeable-file, overwritten-file, new-file
250 debugbundle: all, spec
250 debugbundle: all, spec
251 debugcheckstate:
251 debugcheckstate:
252 debugcolor: style
252 debugcolor: style
253 debugcommands:
253 debugcommands:
254 debugcomplete: options
254 debugcomplete: options
255 debugcreatestreamclonebundle:
255 debugcreatestreamclonebundle:
256 debugdag: tags, branches, dots, spaces
256 debugdag: tags, branches, dots, spaces
257 debugdata: changelog, manifest, dir
257 debugdata: changelog, manifest, dir
258 debugdate: extended
258 debugdate: extended
259 debugdeltachain: changelog, manifest, dir, template
259 debugdeltachain: changelog, manifest, dir, template
260 debugdirstate: nodates, datesort
260 debugdirstate: nodates, datesort
261 debugdiscovery: old, nonheads, ssh, remotecmd, insecure
261 debugdiscovery: old, nonheads, ssh, remotecmd, insecure
262 debugextensions: template
262 debugextensions: template
263 debugfileset: rev
263 debugfileset: rev
264 debugfsinfo:
264 debugfsinfo:
265 debuggetbundle: head, common, type
265 debuggetbundle: head, common, type
266 debugignore:
266 debugignore:
267 debugindex: changelog, manifest, dir, format
267 debugindex: changelog, manifest, dir, format
268 debugindexdot: changelog, manifest, dir
268 debugindexdot: changelog, manifest, dir
269 debuginstall: template
269 debuginstall: template
270 debugknown:
270 debugknown:
271 debuglabelcomplete:
271 debuglabelcomplete:
272 debuglocks: force-lock, force-wlock
272 debuglocks: force-lock, force-wlock
273 debugmergestate:
273 debugmergestate:
274 debugnamecomplete:
274 debugnamecomplete:
275 debugobsolete: flags, record-parents, rev, index, delete, date, user, template
275 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
276 debugpathcomplete: full, normal, added, removed
276 debugpathcomplete: full, normal, added, removed
277 debugpickmergetool: rev, changedelete, include, exclude, tool
277 debugpickmergetool: rev, changedelete, include, exclude, tool
278 debugpushkey:
278 debugpushkey:
279 debugpvec:
279 debugpvec:
280 debugrebuilddirstate: rev, minimal
280 debugrebuilddirstate: rev, minimal
281 debugrebuildfncache:
281 debugrebuildfncache:
282 debugrename: rev
282 debugrename: rev
283 debugrevlog: changelog, manifest, dir, dump
283 debugrevlog: changelog, manifest, dir, dump
284 debugrevspec: optimize, show-stage, no-optimized, verify-optimized
284 debugrevspec: optimize, show-stage, no-optimized, verify-optimized
285 debugsetparents:
285 debugsetparents:
286 debugsub: rev
286 debugsub: rev
287 debugsuccessorssets:
287 debugsuccessorssets:
288 debugtemplate: rev, define
288 debugtemplate: rev, define
289 debugupdatecaches:
289 debugupdatecaches:
290 debugupgraderepo: optimize, run
290 debugupgraderepo: optimize, run
291 debugwalk: include, exclude
291 debugwalk: include, exclude
292 debugwireargs: three, four, five, ssh, remotecmd, insecure
292 debugwireargs: three, four, five, ssh, remotecmd, insecure
293 files: rev, print0, include, exclude, template, subrepos
293 files: rev, print0, include, exclude, template, subrepos
294 graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run
294 graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run
295 grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, template, include, exclude
295 grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, template, include, exclude
296 heads: rev, topo, active, closed, style, template
296 heads: rev, topo, active, closed, style, template
297 help: extension, command, keyword, system
297 help: extension, command, keyword, system
298 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure
298 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure
299 import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
299 import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
300 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
300 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
301 locate: rev, print0, fullpath, include, exclude
301 locate: rev, print0, fullpath, include, exclude
302 manifest: rev, all, template
302 manifest: rev, all, template
303 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
303 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
304 parents: rev, style, template
304 parents: rev, style, template
305 paths: template
305 paths: template
306 phase: public, draft, secret, force, rev
306 phase: public, draft, secret, force, rev
307 recover:
307 recover:
308 rename: after, force, include, exclude, dry-run
308 rename: after, force, include, exclude, dry-run
309 resolve: all, list, mark, unmark, no-status, tool, include, exclude, template
309 resolve: all, list, mark, unmark, no-status, tool, include, exclude, template
310 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
310 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
311 rollback: dry-run, force
311 rollback: dry-run, force
312 root:
312 root:
313 tag: force, local, rev, remove, edit, message, date, user
313 tag: force, local, rev, remove, edit, message, date, user
314 tags: template
314 tags: template
315 tip: patch, git, style, template
315 tip: patch, git, style, template
316 unbundle: update
316 unbundle: update
317 verify:
317 verify:
318 version: template
318 version: template
319
319
320 $ hg init a
320 $ hg init a
321 $ cd a
321 $ cd a
322 $ echo fee > fee
322 $ echo fee > fee
323 $ hg ci -q -Amfee
323 $ hg ci -q -Amfee
324 $ hg tag fee
324 $ hg tag fee
325 $ mkdir fie
325 $ mkdir fie
326 $ echo dead > fie/dead
326 $ echo dead > fie/dead
327 $ echo live > fie/live
327 $ echo live > fie/live
328 $ hg bookmark fo
328 $ hg bookmark fo
329 $ hg branch -q fie
329 $ hg branch -q fie
330 $ hg ci -q -Amfie
330 $ hg ci -q -Amfie
331 $ echo fo > fo
331 $ echo fo > fo
332 $ hg branch -qf default
332 $ hg branch -qf default
333 $ hg ci -q -Amfo
333 $ hg ci -q -Amfo
334 $ echo Fum > Fum
334 $ echo Fum > Fum
335 $ hg ci -q -AmFum
335 $ hg ci -q -AmFum
336 $ hg bookmark Fum
336 $ hg bookmark Fum
337
337
338 Test debugpathcomplete
338 Test debugpathcomplete
339
339
340 $ hg debugpathcomplete f
340 $ hg debugpathcomplete f
341 fee
341 fee
342 fie
342 fie
343 fo
343 fo
344 $ hg debugpathcomplete -f f
344 $ hg debugpathcomplete -f f
345 fee
345 fee
346 fie/dead
346 fie/dead
347 fie/live
347 fie/live
348 fo
348 fo
349
349
350 $ hg rm Fum
350 $ hg rm Fum
351 $ hg debugpathcomplete -r F
351 $ hg debugpathcomplete -r F
352 Fum
352 Fum
353
353
354 Test debugnamecomplete
354 Test debugnamecomplete
355
355
356 $ hg debugnamecomplete
356 $ hg debugnamecomplete
357 Fum
357 Fum
358 default
358 default
359 fee
359 fee
360 fie
360 fie
361 fo
361 fo
362 tip
362 tip
363 $ hg debugnamecomplete f
363 $ hg debugnamecomplete f
364 fee
364 fee
365 fie
365 fie
366 fo
366 fo
367
367
368 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
368 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
369 used for completions in some shells.
369 used for completions in some shells.
370
370
371 $ hg debuglabelcomplete
371 $ hg debuglabelcomplete
372 Fum
372 Fum
373 default
373 default
374 fee
374 fee
375 fie
375 fie
376 fo
376 fo
377 tip
377 tip
378 $ hg debuglabelcomplete f
378 $ hg debuglabelcomplete f
379 fee
379 fee
380 fie
380 fie
381 fo
381 fo
@@ -1,1307 +1,1343 b''
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [phases]
2 > [phases]
3 > # public changeset are not obsolete
3 > # public changeset are not obsolete
4 > publish=false
4 > publish=false
5 > [ui]
5 > [ui]
6 > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(troubles, ' {troubles}')}) [{tags} {bookmarks}] {desc|firstline}\n"
6 > logtemplate="{rev}:{node|short} ({phase}{if(obsolete, ' *{obsolete}*')}{if(troubles, ' {troubles}')}) [{tags} {bookmarks}] {desc|firstline}\n"
7 > EOF
7 > EOF
8 $ mkcommit() {
8 $ mkcommit() {
9 > echo "$1" > "$1"
9 > echo "$1" > "$1"
10 > hg add "$1"
10 > hg add "$1"
11 > hg ci -m "add $1"
11 > hg ci -m "add $1"
12 > }
12 > }
13 $ getid() {
13 $ getid() {
14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
14 > hg log -T "{node}\n" --hidden -r "desc('$1')"
15 > }
15 > }
16
16
17 $ cat > debugkeys.py <<EOF
17 $ cat > debugkeys.py <<EOF
18 > def reposetup(ui, repo):
18 > def reposetup(ui, repo):
19 > class debugkeysrepo(repo.__class__):
19 > class debugkeysrepo(repo.__class__):
20 > def listkeys(self, namespace):
20 > def listkeys(self, namespace):
21 > ui.write('listkeys %s\n' % (namespace,))
21 > ui.write('listkeys %s\n' % (namespace,))
22 > return super(debugkeysrepo, self).listkeys(namespace)
22 > return super(debugkeysrepo, self).listkeys(namespace)
23 >
23 >
24 > if repo.local():
24 > if repo.local():
25 > repo.__class__ = debugkeysrepo
25 > repo.__class__ = debugkeysrepo
26 > EOF
26 > EOF
27
27
28 $ hg init tmpa
28 $ hg init tmpa
29 $ cd tmpa
29 $ cd tmpa
30 $ mkcommit kill_me
30 $ mkcommit kill_me
31
31
32 Checking that the feature is properly disabled
32 Checking that the feature is properly disabled
33
33
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
34 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
35 abort: creating obsolete markers is not enabled on this repo
35 abort: creating obsolete markers is not enabled on this repo
36 [255]
36 [255]
37
37
38 Enabling it
38 Enabling it
39
39
40 $ cat >> $HGRCPATH << EOF
40 $ cat >> $HGRCPATH << EOF
41 > [experimental]
41 > [experimental]
42 > evolution=createmarkers,exchange
42 > evolution=createmarkers,exchange
43 > EOF
43 > EOF
44
44
45 Killing a single changeset without replacement
45 Killing a single changeset without replacement
46
46
47 $ hg debugobsolete 0
47 $ hg debugobsolete 0
48 abort: changeset references must be full hexadecimal node identifiers
48 abort: changeset references must be full hexadecimal node identifiers
49 [255]
49 [255]
50 $ hg debugobsolete '00'
50 $ hg debugobsolete '00'
51 abort: changeset references must be full hexadecimal node identifiers
51 abort: changeset references must be full hexadecimal node identifiers
52 [255]
52 [255]
53 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
53 $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
54 $ hg debugobsolete
54 $ hg debugobsolete
55 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
55 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'}
56
56
57 (test that mercurial is not confused)
57 (test that mercurial is not confused)
58
58
59 $ hg up null --quiet # having 0 as parent prevents it to be hidden
59 $ hg up null --quiet # having 0 as parent prevents it to be hidden
60 $ hg tip
60 $ hg tip
61 -1:000000000000 (public) [tip ]
61 -1:000000000000 (public) [tip ]
62 $ hg up --hidden tip --quiet
62 $ hg up --hidden tip --quiet
63
63
64 Killing a single changeset with itself should fail
64 Killing a single changeset with itself should fail
65 (simple local safeguard)
65 (simple local safeguard)
66
66
67 $ hg debugobsolete `getid kill_me` `getid kill_me`
67 $ hg debugobsolete `getid kill_me` `getid kill_me`
68 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
68 abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0
69 [255]
69 [255]
70
70
71 $ cd ..
71 $ cd ..
72
72
73 Killing a single changeset with replacement
73 Killing a single changeset with replacement
74 (and testing the format option)
74 (and testing the format option)
75
75
76 $ hg init tmpb
76 $ hg init tmpb
77 $ cd tmpb
77 $ cd tmpb
78 $ mkcommit a
78 $ mkcommit a
79 $ mkcommit b
79 $ mkcommit b
80 $ mkcommit original_c
80 $ mkcommit original_c
81 $ hg up "desc('b')"
81 $ hg up "desc('b')"
82 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
82 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
83 $ mkcommit new_c
83 $ mkcommit new_c
84 created new head
84 created new head
85 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
85 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
86 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
86 $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120'
87 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
87 $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden
88 2:245bde4270cd add original_c
88 2:245bde4270cd add original_c
89 $ hg debugrevlog -cd
89 $ hg debugrevlog -cd
90 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
90 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
91 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
91 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0
92 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
92 1 0 -1 59 118 59 59 0 0 58 116 0 1 0
93 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
93 2 1 -1 118 193 118 118 59 0 76 192 0 1 0
94 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
94 3 1 -1 193 260 193 193 59 0 66 258 0 2 0
95 $ hg debugobsolete
95 $ hg debugobsolete
96 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
96 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
97
97
98 (check for version number of the obsstore)
98 (check for version number of the obsstore)
99
99
100 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
100 $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null
101 \x00 (no-eol) (esc)
101 \x00 (no-eol) (esc)
102
102
103 do it again (it read the obsstore before adding new changeset)
103 do it again (it read the obsstore before adding new changeset)
104
104
105 $ hg up '.^'
105 $ hg up '.^'
106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
107 $ mkcommit new_2_c
107 $ mkcommit new_2_c
108 created new head
108 created new head
109 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
109 $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c`
110 $ hg debugobsolete
110 $ hg debugobsolete
111 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
111 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
112 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
112 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
113
113
114 Register two markers with a missing node
114 Register two markers with a missing node
115
115
116 $ hg up '.^'
116 $ hg up '.^'
117 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
117 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
118 $ mkcommit new_3_c
118 $ mkcommit new_3_c
119 created new head
119 created new head
120 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
120 $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337
121 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
121 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c`
122 $ hg debugobsolete
122 $ hg debugobsolete
123 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
123 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
124 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
124 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
125 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
125 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
126 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
126 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
127
127
128 Test the --index option of debugobsolete command
128 Test the --index option of debugobsolete command
129 $ hg debugobsolete --index
129 $ hg debugobsolete --index
130 0 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
130 0 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
131 1 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
131 1 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
132 2 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
132 2 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
133 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
133 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
134
134
135 Refuse pathological nullid successors
135 Refuse pathological nullid successors
136 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
136 $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000
137 transaction abort!
137 transaction abort!
138 rollback completed
138 rollback completed
139 abort: bad obsolescence marker detected: invalid successors nullid
139 abort: bad obsolescence marker detected: invalid successors nullid
140 [255]
140 [255]
141
141
142 Check that graphlog detect that a changeset is obsolete:
142 Check that graphlog detect that a changeset is obsolete:
143
143
144 $ hg log -G
144 $ hg log -G
145 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
145 @ 5:5601fb93a350 (draft) [tip ] add new_3_c
146 |
146 |
147 o 1:7c3bad9141dc (draft) [ ] add b
147 o 1:7c3bad9141dc (draft) [ ] add b
148 |
148 |
149 o 0:1f0dee641bb7 (draft) [ ] add a
149 o 0:1f0dee641bb7 (draft) [ ] add a
150
150
151
151
152 check that heads does not report them
152 check that heads does not report them
153
153
154 $ hg heads
154 $ hg heads
155 5:5601fb93a350 (draft) [tip ] add new_3_c
155 5:5601fb93a350 (draft) [tip ] add new_3_c
156 $ hg heads --hidden
156 $ hg heads --hidden
157 5:5601fb93a350 (draft) [tip ] add new_3_c
157 5:5601fb93a350 (draft) [tip ] add new_3_c
158 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
158 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
159 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
159 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
160 2:245bde4270cd (draft *obsolete*) [ ] add original_c
160 2:245bde4270cd (draft *obsolete*) [ ] add original_c
161
161
162
162
163 check that summary does not report them
163 check that summary does not report them
164
164
165 $ hg init ../sink
165 $ hg init ../sink
166 $ echo '[paths]' >> .hg/hgrc
166 $ echo '[paths]' >> .hg/hgrc
167 $ echo 'default=../sink' >> .hg/hgrc
167 $ echo 'default=../sink' >> .hg/hgrc
168 $ hg summary --remote
168 $ hg summary --remote
169 parent: 5:5601fb93a350 tip
169 parent: 5:5601fb93a350 tip
170 add new_3_c
170 add new_3_c
171 branch: default
171 branch: default
172 commit: (clean)
172 commit: (clean)
173 update: (current)
173 update: (current)
174 phases: 3 draft
174 phases: 3 draft
175 remote: 3 outgoing
175 remote: 3 outgoing
176
176
177 $ hg summary --remote --hidden
177 $ hg summary --remote --hidden
178 parent: 5:5601fb93a350 tip
178 parent: 5:5601fb93a350 tip
179 add new_3_c
179 add new_3_c
180 branch: default
180 branch: default
181 commit: (clean)
181 commit: (clean)
182 update: 3 new changesets, 4 branch heads (merge)
182 update: 3 new changesets, 4 branch heads (merge)
183 phases: 6 draft
183 phases: 6 draft
184 remote: 3 outgoing
184 remote: 3 outgoing
185
185
186 check that various commands work well with filtering
186 check that various commands work well with filtering
187
187
188 $ hg tip
188 $ hg tip
189 5:5601fb93a350 (draft) [tip ] add new_3_c
189 5:5601fb93a350 (draft) [tip ] add new_3_c
190 $ hg log -r 6
190 $ hg log -r 6
191 abort: unknown revision '6'!
191 abort: unknown revision '6'!
192 [255]
192 [255]
193 $ hg log -r 4
193 $ hg log -r 4
194 abort: hidden revision '4'!
194 abort: hidden revision '4'!
195 (use --hidden to access hidden revisions)
195 (use --hidden to access hidden revisions)
196 [255]
196 [255]
197 $ hg debugrevspec 'rev(6)'
197 $ hg debugrevspec 'rev(6)'
198 $ hg debugrevspec 'rev(4)'
198 $ hg debugrevspec 'rev(4)'
199 $ hg debugrevspec 'null'
199 $ hg debugrevspec 'null'
200 -1
200 -1
201
201
202 Check that public changeset are not accounted as obsolete:
202 Check that public changeset are not accounted as obsolete:
203
203
204 $ hg --hidden phase --public 2
204 $ hg --hidden phase --public 2
205 $ hg log -G
205 $ hg log -G
206 @ 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
206 @ 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
207 |
207 |
208 | o 2:245bde4270cd (public) [ ] add original_c
208 | o 2:245bde4270cd (public) [ ] add original_c
209 |/
209 |/
210 o 1:7c3bad9141dc (public) [ ] add b
210 o 1:7c3bad9141dc (public) [ ] add b
211 |
211 |
212 o 0:1f0dee641bb7 (public) [ ] add a
212 o 0:1f0dee641bb7 (public) [ ] add a
213
213
214
214
215 And that bumped changeset are detected
215 And that bumped changeset are detected
216 --------------------------------------
216 --------------------------------------
217
217
218 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
218 If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also
219 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
219 note that the bumped changeset (5:5601fb93a350) is not a direct successor of
220 the public changeset
220 the public changeset
221
221
222 $ hg log --hidden -r 'bumped()'
222 $ hg log --hidden -r 'bumped()'
223 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
223 5:5601fb93a350 (draft bumped) [tip ] add new_3_c
224
224
225 And that we can't push bumped changeset
225 And that we can't push bumped changeset
226
226
227 $ hg push ../tmpa -r 0 --force #(make repo related)
227 $ hg push ../tmpa -r 0 --force #(make repo related)
228 pushing to ../tmpa
228 pushing to ../tmpa
229 searching for changes
229 searching for changes
230 warning: repository is unrelated
230 warning: repository is unrelated
231 adding changesets
231 adding changesets
232 adding manifests
232 adding manifests
233 adding file changes
233 adding file changes
234 added 1 changesets with 1 changes to 1 files (+1 heads)
234 added 1 changesets with 1 changes to 1 files (+1 heads)
235 $ hg push ../tmpa
235 $ hg push ../tmpa
236 pushing to ../tmpa
236 pushing to ../tmpa
237 searching for changes
237 searching for changes
238 abort: push includes bumped changeset: 5601fb93a350!
238 abort: push includes bumped changeset: 5601fb93a350!
239 [255]
239 [255]
240
240
241 Fixing "bumped" situation
241 Fixing "bumped" situation
242 We need to create a clone of 5 and add a special marker with a flag
242 We need to create a clone of 5 and add a special marker with a flag
243
243
244 $ hg summary
244 $ hg summary
245 parent: 5:5601fb93a350 tip (bumped)
245 parent: 5:5601fb93a350 tip (bumped)
246 add new_3_c
246 add new_3_c
247 branch: default
247 branch: default
248 commit: (clean)
248 commit: (clean)
249 update: 1 new changesets, 2 branch heads (merge)
249 update: 1 new changesets, 2 branch heads (merge)
250 phases: 1 draft
250 phases: 1 draft
251 bumped: 1 changesets
251 bumped: 1 changesets
252 $ hg up '5^'
252 $ hg up '5^'
253 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
253 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
254 $ hg revert -ar 5
254 $ hg revert -ar 5
255 adding new_3_c
255 adding new_3_c
256 $ hg ci -m 'add n3w_3_c'
256 $ hg ci -m 'add n3w_3_c'
257 created new head
257 created new head
258 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
258 $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c`
259 $ hg log -r 'bumped()'
259 $ hg log -r 'bumped()'
260 $ hg log -G
260 $ hg log -G
261 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
261 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
262 |
262 |
263 | o 2:245bde4270cd (public) [ ] add original_c
263 | o 2:245bde4270cd (public) [ ] add original_c
264 |/
264 |/
265 o 1:7c3bad9141dc (public) [ ] add b
265 o 1:7c3bad9141dc (public) [ ] add b
266 |
266 |
267 o 0:1f0dee641bb7 (public) [ ] add a
267 o 0:1f0dee641bb7 (public) [ ] add a
268
268
269
269
270 Basic exclusive testing
271
272 $ hg log -G --hidden
273 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
274 |
275 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c
276 |/
277 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
278 |/
279 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
280 |/
281 | o 2:245bde4270cd (public) [ ] add original_c
282 |/
283 o 1:7c3bad9141dc (public) [ ] add b
284 |
285 o 0:1f0dee641bb7 (public) [ ] add a
286
287 $ hg debugobsolete --rev 6f9641995072
288 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
289 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
290 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
291 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
292 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
293 $ hg debugobsolete --rev 6f9641995072 --exclusive
294 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
295 $ hg debugobsolete --rev 5601fb93a350 --hidden
296 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
297 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
298 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
299 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
300 $ hg debugobsolete --rev 5601fb93a350 --hidden --exclusive
301 $ hg debugobsolete --rev 5601fb93a350+6f9641995072 --hidden --exclusive
302 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
303 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
304 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
305
270 $ cd ..
306 $ cd ..
271
307
272 Revision 0 is hidden
308 Revision 0 is hidden
273 --------------------
309 --------------------
274
310
275 $ hg init rev0hidden
311 $ hg init rev0hidden
276 $ cd rev0hidden
312 $ cd rev0hidden
277
313
278 $ mkcommit kill0
314 $ mkcommit kill0
279 $ hg up -q null
315 $ hg up -q null
280 $ hg debugobsolete `getid kill0`
316 $ hg debugobsolete `getid kill0`
281 $ mkcommit a
317 $ mkcommit a
282 $ mkcommit b
318 $ mkcommit b
283
319
284 Should pick the first visible revision as "repo" node
320 Should pick the first visible revision as "repo" node
285
321
286 $ hg archive ../archive-null
322 $ hg archive ../archive-null
287 $ cat ../archive-null/.hg_archival.txt
323 $ cat ../archive-null/.hg_archival.txt
288 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
324 repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e
289 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
325 node: 7c3bad9141dcb46ff89abf5f61856facd56e476c
290 branch: default
326 branch: default
291 latesttag: null
327 latesttag: null
292 latesttagdistance: 2
328 latesttagdistance: 2
293 changessincelatesttag: 2
329 changessincelatesttag: 2
294
330
295
331
296 $ cd ..
332 $ cd ..
297
333
298 Exchange Test
334 Exchange Test
299 ============================
335 ============================
300
336
301 Destination repo does not have any data
337 Destination repo does not have any data
302 ---------------------------------------
338 ---------------------------------------
303
339
304 Simple incoming test
340 Simple incoming test
305
341
306 $ hg init tmpc
342 $ hg init tmpc
307 $ cd tmpc
343 $ cd tmpc
308 $ hg incoming ../tmpb
344 $ hg incoming ../tmpb
309 comparing with ../tmpb
345 comparing with ../tmpb
310 0:1f0dee641bb7 (public) [ ] add a
346 0:1f0dee641bb7 (public) [ ] add a
311 1:7c3bad9141dc (public) [ ] add b
347 1:7c3bad9141dc (public) [ ] add b
312 2:245bde4270cd (public) [ ] add original_c
348 2:245bde4270cd (public) [ ] add original_c
313 6:6f9641995072 (draft) [tip ] add n3w_3_c
349 6:6f9641995072 (draft) [tip ] add n3w_3_c
314
350
315 Try to pull markers
351 Try to pull markers
316 (extinct changeset are excluded but marker are pushed)
352 (extinct changeset are excluded but marker are pushed)
317
353
318 $ hg pull ../tmpb
354 $ hg pull ../tmpb
319 pulling from ../tmpb
355 pulling from ../tmpb
320 requesting all changes
356 requesting all changes
321 adding changesets
357 adding changesets
322 adding manifests
358 adding manifests
323 adding file changes
359 adding file changes
324 added 4 changesets with 4 changes to 4 files (+1 heads)
360 added 4 changesets with 4 changes to 4 files (+1 heads)
325 5 new obsolescence markers
361 5 new obsolescence markers
326 (run 'hg heads' to see heads, 'hg merge' to merge)
362 (run 'hg heads' to see heads, 'hg merge' to merge)
327 $ hg debugobsolete
363 $ hg debugobsolete
328 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
364 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
329 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
365 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
330 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
366 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
331 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
367 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
332 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
368 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
333
369
334 Rollback//Transaction support
370 Rollback//Transaction support
335
371
336 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
372 $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
337 $ hg debugobsolete
373 $ hg debugobsolete
338 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
374 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
339 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
375 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
340 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
376 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
341 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
377 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
342 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
378 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
343 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
379 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'}
344 $ hg rollback -n
380 $ hg rollback -n
345 repository tip rolled back to revision 3 (undo debugobsolete)
381 repository tip rolled back to revision 3 (undo debugobsolete)
346 $ hg rollback
382 $ hg rollback
347 repository tip rolled back to revision 3 (undo debugobsolete)
383 repository tip rolled back to revision 3 (undo debugobsolete)
348 $ hg debugobsolete
384 $ hg debugobsolete
349 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
385 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
350 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
386 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
351 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
387 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
352 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
388 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
353 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
389 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
354
390
355 $ cd ..
391 $ cd ..
356
392
357 Try to push markers
393 Try to push markers
358
394
359 $ hg init tmpd
395 $ hg init tmpd
360 $ hg -R tmpb push tmpd
396 $ hg -R tmpb push tmpd
361 pushing to tmpd
397 pushing to tmpd
362 searching for changes
398 searching for changes
363 adding changesets
399 adding changesets
364 adding manifests
400 adding manifests
365 adding file changes
401 adding file changes
366 added 4 changesets with 4 changes to 4 files (+1 heads)
402 added 4 changesets with 4 changes to 4 files (+1 heads)
367 5 new obsolescence markers
403 5 new obsolescence markers
368 $ hg -R tmpd debugobsolete | sort
404 $ hg -R tmpd debugobsolete | sort
369 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
405 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
370 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
406 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
371 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
407 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
372 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
408 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
373 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
409 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
374
410
375 Check obsolete keys are exchanged only if source has an obsolete store
411 Check obsolete keys are exchanged only if source has an obsolete store
376
412
377 $ hg init empty
413 $ hg init empty
378 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
414 $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd
379 pushing to tmpd
415 pushing to tmpd
380 listkeys phases
416 listkeys phases
381 listkeys bookmarks
417 listkeys bookmarks
382 no changes found
418 no changes found
383 listkeys phases
419 listkeys phases
384 [1]
420 [1]
385
421
386 clone support
422 clone support
387 (markers are copied and extinct changesets are included to allow hardlinks)
423 (markers are copied and extinct changesets are included to allow hardlinks)
388
424
389 $ hg clone tmpb clone-dest
425 $ hg clone tmpb clone-dest
390 updating to branch default
426 updating to branch default
391 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
427 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
392 $ hg -R clone-dest log -G --hidden
428 $ hg -R clone-dest log -G --hidden
393 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
429 @ 6:6f9641995072 (draft) [tip ] add n3w_3_c
394 |
430 |
395 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c
431 | x 5:5601fb93a350 (draft *obsolete*) [ ] add new_3_c
396 |/
432 |/
397 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
433 | x 4:ca819180edb9 (draft *obsolete*) [ ] add new_2_c
398 |/
434 |/
399 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
435 | x 3:cdbce2fbb163 (draft *obsolete*) [ ] add new_c
400 |/
436 |/
401 | o 2:245bde4270cd (public) [ ] add original_c
437 | o 2:245bde4270cd (public) [ ] add original_c
402 |/
438 |/
403 o 1:7c3bad9141dc (public) [ ] add b
439 o 1:7c3bad9141dc (public) [ ] add b
404 |
440 |
405 o 0:1f0dee641bb7 (public) [ ] add a
441 o 0:1f0dee641bb7 (public) [ ] add a
406
442
407 $ hg -R clone-dest debugobsolete
443 $ hg -R clone-dest debugobsolete
408 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
444 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
409 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
445 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
410 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
446 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
411 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
447 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
412 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
448 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
413
449
414
450
415 Destination repo have existing data
451 Destination repo have existing data
416 ---------------------------------------
452 ---------------------------------------
417
453
418 On pull
454 On pull
419
455
420 $ hg init tmpe
456 $ hg init tmpe
421 $ cd tmpe
457 $ cd tmpe
422 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
458 $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00
423 $ hg pull ../tmpb
459 $ hg pull ../tmpb
424 pulling from ../tmpb
460 pulling from ../tmpb
425 requesting all changes
461 requesting all changes
426 adding changesets
462 adding changesets
427 adding manifests
463 adding manifests
428 adding file changes
464 adding file changes
429 added 4 changesets with 4 changes to 4 files (+1 heads)
465 added 4 changesets with 4 changes to 4 files (+1 heads)
430 5 new obsolescence markers
466 5 new obsolescence markers
431 (run 'hg heads' to see heads, 'hg merge' to merge)
467 (run 'hg heads' to see heads, 'hg merge' to merge)
432 $ hg debugobsolete
468 $ hg debugobsolete
433 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
469 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
434 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
470 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
435 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
471 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
436 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
472 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
437 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
473 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
438 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
474 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
439
475
440
476
441 On push
477 On push
442
478
443 $ hg push ../tmpc
479 $ hg push ../tmpc
444 pushing to ../tmpc
480 pushing to ../tmpc
445 searching for changes
481 searching for changes
446 no changes found
482 no changes found
447 1 new obsolescence markers
483 1 new obsolescence markers
448 [1]
484 [1]
449 $ hg -R ../tmpc debugobsolete
485 $ hg -R ../tmpc debugobsolete
450 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
486 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
451 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
487 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
452 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
488 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
453 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
489 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
454 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
490 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
455 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
491 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
456
492
457 detect outgoing obsolete and unstable
493 detect outgoing obsolete and unstable
458 ---------------------------------------
494 ---------------------------------------
459
495
460
496
461 $ hg log -G
497 $ hg log -G
462 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
498 o 3:6f9641995072 (draft) [tip ] add n3w_3_c
463 |
499 |
464 | o 2:245bde4270cd (public) [ ] add original_c
500 | o 2:245bde4270cd (public) [ ] add original_c
465 |/
501 |/
466 o 1:7c3bad9141dc (public) [ ] add b
502 o 1:7c3bad9141dc (public) [ ] add b
467 |
503 |
468 o 0:1f0dee641bb7 (public) [ ] add a
504 o 0:1f0dee641bb7 (public) [ ] add a
469
505
470 $ hg up 'desc("n3w_3_c")'
506 $ hg up 'desc("n3w_3_c")'
471 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
507 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
472 $ mkcommit original_d
508 $ mkcommit original_d
473 $ mkcommit original_e
509 $ mkcommit original_e
474 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
510 $ hg debugobsolete --record-parents `getid original_d` -d '0 0'
475 $ hg debugobsolete | grep `getid original_d`
511 $ hg debugobsolete | grep `getid original_d`
476 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
512 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
477 $ hg log -r 'obsolete()'
513 $ hg log -r 'obsolete()'
478 4:94b33453f93b (draft *obsolete*) [ ] add original_d
514 4:94b33453f93b (draft *obsolete*) [ ] add original_d
479 $ hg summary
515 $ hg summary
480 parent: 5:cda648ca50f5 tip (unstable)
516 parent: 5:cda648ca50f5 tip (unstable)
481 add original_e
517 add original_e
482 branch: default
518 branch: default
483 commit: (clean)
519 commit: (clean)
484 update: 1 new changesets, 2 branch heads (merge)
520 update: 1 new changesets, 2 branch heads (merge)
485 phases: 3 draft
521 phases: 3 draft
486 unstable: 1 changesets
522 unstable: 1 changesets
487 $ hg log -G -r '::unstable()'
523 $ hg log -G -r '::unstable()'
488 @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
524 @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
489 |
525 |
490 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
526 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
491 |
527 |
492 o 3:6f9641995072 (draft) [ ] add n3w_3_c
528 o 3:6f9641995072 (draft) [ ] add n3w_3_c
493 |
529 |
494 o 1:7c3bad9141dc (public) [ ] add b
530 o 1:7c3bad9141dc (public) [ ] add b
495 |
531 |
496 o 0:1f0dee641bb7 (public) [ ] add a
532 o 0:1f0dee641bb7 (public) [ ] add a
497
533
498
534
499 refuse to push obsolete changeset
535 refuse to push obsolete changeset
500
536
501 $ hg push ../tmpc/ -r 'desc("original_d")'
537 $ hg push ../tmpc/ -r 'desc("original_d")'
502 pushing to ../tmpc/
538 pushing to ../tmpc/
503 searching for changes
539 searching for changes
504 abort: push includes obsolete changeset: 94b33453f93b!
540 abort: push includes obsolete changeset: 94b33453f93b!
505 [255]
541 [255]
506
542
507 refuse to push unstable changeset
543 refuse to push unstable changeset
508
544
509 $ hg push ../tmpc/
545 $ hg push ../tmpc/
510 pushing to ../tmpc/
546 pushing to ../tmpc/
511 searching for changes
547 searching for changes
512 abort: push includes unstable changeset: cda648ca50f5!
548 abort: push includes unstable changeset: cda648ca50f5!
513 [255]
549 [255]
514
550
515 Test that extinct changeset are properly detected
551 Test that extinct changeset are properly detected
516
552
517 $ hg log -r 'extinct()'
553 $ hg log -r 'extinct()'
518
554
519 Don't try to push extinct changeset
555 Don't try to push extinct changeset
520
556
521 $ hg init ../tmpf
557 $ hg init ../tmpf
522 $ hg out ../tmpf
558 $ hg out ../tmpf
523 comparing with ../tmpf
559 comparing with ../tmpf
524 searching for changes
560 searching for changes
525 0:1f0dee641bb7 (public) [ ] add a
561 0:1f0dee641bb7 (public) [ ] add a
526 1:7c3bad9141dc (public) [ ] add b
562 1:7c3bad9141dc (public) [ ] add b
527 2:245bde4270cd (public) [ ] add original_c
563 2:245bde4270cd (public) [ ] add original_c
528 3:6f9641995072 (draft) [ ] add n3w_3_c
564 3:6f9641995072 (draft) [ ] add n3w_3_c
529 4:94b33453f93b (draft *obsolete*) [ ] add original_d
565 4:94b33453f93b (draft *obsolete*) [ ] add original_d
530 5:cda648ca50f5 (draft unstable) [tip ] add original_e
566 5:cda648ca50f5 (draft unstable) [tip ] add original_e
531 $ hg push ../tmpf -f # -f because be push unstable too
567 $ hg push ../tmpf -f # -f because be push unstable too
532 pushing to ../tmpf
568 pushing to ../tmpf
533 searching for changes
569 searching for changes
534 adding changesets
570 adding changesets
535 adding manifests
571 adding manifests
536 adding file changes
572 adding file changes
537 added 6 changesets with 6 changes to 6 files (+1 heads)
573 added 6 changesets with 6 changes to 6 files (+1 heads)
538 7 new obsolescence markers
574 7 new obsolescence markers
539
575
540 no warning displayed
576 no warning displayed
541
577
542 $ hg push ../tmpf
578 $ hg push ../tmpf
543 pushing to ../tmpf
579 pushing to ../tmpf
544 searching for changes
580 searching for changes
545 no changes found
581 no changes found
546 [1]
582 [1]
547
583
548 Do not warn about new head when the new head is a successors of a remote one
584 Do not warn about new head when the new head is a successors of a remote one
549
585
550 $ hg log -G
586 $ hg log -G
551 @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
587 @ 5:cda648ca50f5 (draft unstable) [tip ] add original_e
552 |
588 |
553 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
589 x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
554 |
590 |
555 o 3:6f9641995072 (draft) [ ] add n3w_3_c
591 o 3:6f9641995072 (draft) [ ] add n3w_3_c
556 |
592 |
557 | o 2:245bde4270cd (public) [ ] add original_c
593 | o 2:245bde4270cd (public) [ ] add original_c
558 |/
594 |/
559 o 1:7c3bad9141dc (public) [ ] add b
595 o 1:7c3bad9141dc (public) [ ] add b
560 |
596 |
561 o 0:1f0dee641bb7 (public) [ ] add a
597 o 0:1f0dee641bb7 (public) [ ] add a
562
598
563 $ hg up -q 'desc(n3w_3_c)'
599 $ hg up -q 'desc(n3w_3_c)'
564 $ mkcommit obsolete_e
600 $ mkcommit obsolete_e
565 created new head
601 created new head
566 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
602 $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'`
567 $ hg outgoing ../tmpf # parasite hg outgoing testin
603 $ hg outgoing ../tmpf # parasite hg outgoing testin
568 comparing with ../tmpf
604 comparing with ../tmpf
569 searching for changes
605 searching for changes
570 6:3de5eca88c00 (draft) [tip ] add obsolete_e
606 6:3de5eca88c00 (draft) [tip ] add obsolete_e
571 $ hg push ../tmpf
607 $ hg push ../tmpf
572 pushing to ../tmpf
608 pushing to ../tmpf
573 searching for changes
609 searching for changes
574 adding changesets
610 adding changesets
575 adding manifests
611 adding manifests
576 adding file changes
612 adding file changes
577 added 1 changesets with 1 changes to 1 files (+1 heads)
613 added 1 changesets with 1 changes to 1 files (+1 heads)
578 1 new obsolescence markers
614 1 new obsolescence markers
579
615
580 test relevance computation
616 test relevance computation
581 ---------------------------------------
617 ---------------------------------------
582
618
583 Checking simple case of "marker relevance".
619 Checking simple case of "marker relevance".
584
620
585
621
586 Reminder of the repo situation
622 Reminder of the repo situation
587
623
588 $ hg log --hidden --graph
624 $ hg log --hidden --graph
589 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
625 @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e
590 |
626 |
591 | x 5:cda648ca50f5 (draft *obsolete*) [ ] add original_e
627 | x 5:cda648ca50f5 (draft *obsolete*) [ ] add original_e
592 | |
628 | |
593 | x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
629 | x 4:94b33453f93b (draft *obsolete*) [ ] add original_d
594 |/
630 |/
595 o 3:6f9641995072 (draft) [ ] add n3w_3_c
631 o 3:6f9641995072 (draft) [ ] add n3w_3_c
596 |
632 |
597 | o 2:245bde4270cd (public) [ ] add original_c
633 | o 2:245bde4270cd (public) [ ] add original_c
598 |/
634 |/
599 o 1:7c3bad9141dc (public) [ ] add b
635 o 1:7c3bad9141dc (public) [ ] add b
600 |
636 |
601 o 0:1f0dee641bb7 (public) [ ] add a
637 o 0:1f0dee641bb7 (public) [ ] add a
602
638
603
639
604 List of all markers
640 List of all markers
605
641
606 $ hg debugobsolete
642 $ hg debugobsolete
607 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
643 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
608 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
644 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
609 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
645 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
610 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
646 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
611 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
647 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
612 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
648 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
613 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
649 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
614 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
650 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
615
651
616 List of changesets with no chain
652 List of changesets with no chain
617
653
618 $ hg debugobsolete --hidden --rev ::2
654 $ hg debugobsolete --hidden --rev ::2
619
655
620 List of changesets that are included on marker chain
656 List of changesets that are included on marker chain
621
657
622 $ hg debugobsolete --hidden --rev 6
658 $ hg debugobsolete --hidden --rev 6
623 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
659 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
624
660
625 List of changesets with a longer chain, (including a pruned children)
661 List of changesets with a longer chain, (including a pruned children)
626
662
627 $ hg debugobsolete --hidden --rev 3
663 $ hg debugobsolete --hidden --rev 3
628 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
664 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
629 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
665 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
630 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
666 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
631 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
667 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
632 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
668 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
633 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
669 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
634 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
670 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
635
671
636 List of both
672 List of both
637
673
638 $ hg debugobsolete --hidden --rev 3::6
674 $ hg debugobsolete --hidden --rev 3::6
639 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
675 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
640 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
676 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'}
641 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
677 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'}
642 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
678 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
643 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
679 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
644 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
680 ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'}
645 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
681 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob)
646 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
682 cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'}
647
683
648 List of all markers in JSON
684 List of all markers in JSON
649
685
650 $ hg debugobsolete -Tjson
686 $ hg debugobsolete -Tjson
651 [
687 [
652 {
688 {
653 "date": [1339.0, 0],
689 "date": [1339.0, 0],
654 "flag": 0,
690 "flag": 0,
655 "metadata": {"user": "test"},
691 "metadata": {"user": "test"},
656 "precnode": "1339133913391339133913391339133913391339",
692 "precnode": "1339133913391339133913391339133913391339",
657 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
693 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
658 },
694 },
659 {
695 {
660 "date": [1339.0, 0],
696 "date": [1339.0, 0],
661 "flag": 0,
697 "flag": 0,
662 "metadata": {"user": "test"},
698 "metadata": {"user": "test"},
663 "precnode": "1337133713371337133713371337133713371337",
699 "precnode": "1337133713371337133713371337133713371337",
664 "succnodes": ["5601fb93a350734d935195fee37f4054c529ff39"]
700 "succnodes": ["5601fb93a350734d935195fee37f4054c529ff39"]
665 },
701 },
666 {
702 {
667 "date": [121.0, 120],
703 "date": [121.0, 120],
668 "flag": 12,
704 "flag": 12,
669 "metadata": {"user": "test"},
705 "metadata": {"user": "test"},
670 "precnode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
706 "precnode": "245bde4270cd1072a27757984f9cda8ba26f08ca",
671 "succnodes": ["cdbce2fbb16313928851e97e0d85413f3f7eb77f"]
707 "succnodes": ["cdbce2fbb16313928851e97e0d85413f3f7eb77f"]
672 },
708 },
673 {
709 {
674 "date": [1338.0, 0],
710 "date": [1338.0, 0],
675 "flag": 1,
711 "flag": 1,
676 "metadata": {"user": "test"},
712 "metadata": {"user": "test"},
677 "precnode": "5601fb93a350734d935195fee37f4054c529ff39",
713 "precnode": "5601fb93a350734d935195fee37f4054c529ff39",
678 "succnodes": ["6f96419950729f3671185b847352890f074f7557"]
714 "succnodes": ["6f96419950729f3671185b847352890f074f7557"]
679 },
715 },
680 {
716 {
681 "date": [1338.0, 0],
717 "date": [1338.0, 0],
682 "flag": 0,
718 "flag": 0,
683 "metadata": {"user": "test"},
719 "metadata": {"user": "test"},
684 "precnode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
720 "precnode": "ca819180edb99ed25ceafb3e9584ac287e240b00",
685 "succnodes": ["1337133713371337133713371337133713371337"]
721 "succnodes": ["1337133713371337133713371337133713371337"]
686 },
722 },
687 {
723 {
688 "date": [1337.0, 0],
724 "date": [1337.0, 0],
689 "flag": 0,
725 "flag": 0,
690 "metadata": {"user": "test"},
726 "metadata": {"user": "test"},
691 "precnode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
727 "precnode": "cdbce2fbb16313928851e97e0d85413f3f7eb77f",
692 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
728 "succnodes": ["ca819180edb99ed25ceafb3e9584ac287e240b00"]
693 },
729 },
694 {
730 {
695 "date": [0.0, 0],
731 "date": [0.0, 0],
696 "flag": 0,
732 "flag": 0,
697 "metadata": {"user": "test"},
733 "metadata": {"user": "test"},
698 "parentnodes": ["6f96419950729f3671185b847352890f074f7557"],
734 "parentnodes": ["6f96419950729f3671185b847352890f074f7557"],
699 "precnode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
735 "precnode": "94b33453f93bdb8d457ef9b770851a618bf413e1",
700 "succnodes": []
736 "succnodes": []
701 },
737 },
702 {
738 {
703 "date": *, (glob)
739 "date": *, (glob)
704 "flag": 0,
740 "flag": 0,
705 "metadata": {"user": "test"},
741 "metadata": {"user": "test"},
706 "precnode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
742 "precnode": "cda648ca50f50482b7055c0b0c4c117bba6733d9",
707 "succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
743 "succnodes": ["3de5eca88c00aa039da7399a220f4a5221faa585"]
708 }
744 }
709 ]
745 ]
710
746
711 Template keywords
747 Template keywords
712
748
713 $ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
749 $ hg debugobsolete -r6 -T '{succnodes % "{node|short}"} {date|shortdate}\n'
714 3de5eca88c00 ????-??-?? (glob)
750 3de5eca88c00 ????-??-?? (glob)
715 $ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
751 $ hg debugobsolete -r6 -T '{join(metadata % "{key}={value}", " ")}\n'
716 user=test
752 user=test
717 $ hg debugobsolete -r6 -T '{metadata}\n'
753 $ hg debugobsolete -r6 -T '{metadata}\n'
718 'user': 'test'
754 'user': 'test'
719 $ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
755 $ hg debugobsolete -r6 -T '{flag} {get(metadata, "user")}\n'
720 0 test
756 0 test
721
757
722 Test the debug output for exchange
758 Test the debug output for exchange
723 ----------------------------------
759 ----------------------------------
724
760
725 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' # bundle2
761 $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' # bundle2
726 pulling from ../tmpb
762 pulling from ../tmpb
727 searching for changes
763 searching for changes
728 no changes found
764 no changes found
729 obsmarker-exchange: 346 bytes received
765 obsmarker-exchange: 346 bytes received
730
766
731 check hgweb does not explode
767 check hgweb does not explode
732 ====================================
768 ====================================
733
769
734 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
770 $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg
735 adding changesets
771 adding changesets
736 adding manifests
772 adding manifests
737 adding file changes
773 adding file changes
738 added 62 changesets with 63 changes to 9 files (+60 heads)
774 added 62 changesets with 63 changes to 9 files (+60 heads)
739 (run 'hg heads .' to see heads, 'hg merge' to merge)
775 (run 'hg heads .' to see heads, 'hg merge' to merge)
740 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
776 $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`;
741 > do
777 > do
742 > hg debugobsolete $node
778 > hg debugobsolete $node
743 > done
779 > done
744 $ hg up tip
780 $ hg up tip
745 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
781 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
746
782
747 #if serve
783 #if serve
748
784
749 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
785 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
750 $ cat hg.pid >> $DAEMON_PIDS
786 $ cat hg.pid >> $DAEMON_PIDS
751
787
752 check changelog view
788 check changelog view
753
789
754 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
790 $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/'
755 200 Script output follows
791 200 Script output follows
756
792
757 check graph view
793 check graph view
758
794
759 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
795 $ get-with-headers.py --headeronly localhost:$HGPORT 'graph'
760 200 Script output follows
796 200 Script output follows
761
797
762 check filelog view
798 check filelog view
763
799
764 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
800 $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar'
765 200 Script output follows
801 200 Script output follows
766
802
767 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
803 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68'
768 200 Script output follows
804 200 Script output follows
769 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
805 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
770 404 Not Found
806 404 Not Found
771 [1]
807 [1]
772
808
773 check that web.view config option:
809 check that web.view config option:
774
810
775 $ killdaemons.py hg.pid
811 $ killdaemons.py hg.pid
776 $ cat >> .hg/hgrc << EOF
812 $ cat >> .hg/hgrc << EOF
777 > [web]
813 > [web]
778 > view=all
814 > view=all
779 > EOF
815 > EOF
780 $ wait
816 $ wait
781 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
817 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
782 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
818 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67'
783 200 Script output follows
819 200 Script output follows
784 $ killdaemons.py hg.pid
820 $ killdaemons.py hg.pid
785
821
786 Checking _enable=False warning if obsolete marker exists
822 Checking _enable=False warning if obsolete marker exists
787
823
788 $ echo '[experimental]' >> $HGRCPATH
824 $ echo '[experimental]' >> $HGRCPATH
789 $ echo "evolution=" >> $HGRCPATH
825 $ echo "evolution=" >> $HGRCPATH
790 $ hg log -r tip
826 $ hg log -r tip
791 obsolete feature not enabled but 68 markers found!
827 obsolete feature not enabled but 68 markers found!
792 68:c15e9edfca13 (draft) [tip ] add celestine
828 68:c15e9edfca13 (draft) [tip ] add celestine
793
829
794 reenable for later test
830 reenable for later test
795
831
796 $ echo '[experimental]' >> $HGRCPATH
832 $ echo '[experimental]' >> $HGRCPATH
797 $ echo "evolution=createmarkers,exchange" >> $HGRCPATH
833 $ echo "evolution=createmarkers,exchange" >> $HGRCPATH
798
834
799 $ rm hg.pid access.log errors.log
835 $ rm hg.pid access.log errors.log
800 #endif
836 #endif
801
837
802 Several troubles on the same changeset (create an unstable and bumped changeset)
838 Several troubles on the same changeset (create an unstable and bumped changeset)
803
839
804 $ hg debugobsolete `getid obsolete_e`
840 $ hg debugobsolete `getid obsolete_e`
805 $ hg debugobsolete `getid original_c` `getid babar`
841 $ hg debugobsolete `getid original_c` `getid babar`
806 $ hg log --config ui.logtemplate= -r 'bumped() and unstable()'
842 $ hg log --config ui.logtemplate= -r 'bumped() and unstable()'
807 changeset: 7:50c51b361e60
843 changeset: 7:50c51b361e60
808 user: test
844 user: test
809 date: Thu Jan 01 00:00:00 1970 +0000
845 date: Thu Jan 01 00:00:00 1970 +0000
810 trouble: unstable, bumped
846 trouble: unstable, bumped
811 summary: add babar
847 summary: add babar
812
848
813
849
814 test the "obsolete" templatekw
850 test the "obsolete" templatekw
815
851
816 $ hg log -r 'obsolete()'
852 $ hg log -r 'obsolete()'
817 6:3de5eca88c00 (draft *obsolete*) [ ] add obsolete_e
853 6:3de5eca88c00 (draft *obsolete*) [ ] add obsolete_e
818
854
819 test the "troubles" templatekw
855 test the "troubles" templatekw
820
856
821 $ hg log -r 'bumped() and unstable()'
857 $ hg log -r 'bumped() and unstable()'
822 7:50c51b361e60 (draft unstable bumped) [ ] add babar
858 7:50c51b361e60 (draft unstable bumped) [ ] add babar
823
859
824 test the default cmdline template
860 test the default cmdline template
825
861
826 $ hg log -T default -r 'bumped()'
862 $ hg log -T default -r 'bumped()'
827 changeset: 7:50c51b361e60
863 changeset: 7:50c51b361e60
828 user: test
864 user: test
829 date: Thu Jan 01 00:00:00 1970 +0000
865 date: Thu Jan 01 00:00:00 1970 +0000
830 trouble: unstable, bumped
866 trouble: unstable, bumped
831 summary: add babar
867 summary: add babar
832
868
833 $ hg log -T default -r 'obsolete()'
869 $ hg log -T default -r 'obsolete()'
834 changeset: 6:3de5eca88c00
870 changeset: 6:3de5eca88c00
835 parent: 3:6f9641995072
871 parent: 3:6f9641995072
836 user: test
872 user: test
837 date: Thu Jan 01 00:00:00 1970 +0000
873 date: Thu Jan 01 00:00:00 1970 +0000
838 summary: add obsolete_e
874 summary: add obsolete_e
839
875
840
876
841 test summary output
877 test summary output
842
878
843 $ hg up -r 'bumped() and unstable()'
879 $ hg up -r 'bumped() and unstable()'
844 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
880 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
845 $ hg summary
881 $ hg summary
846 parent: 7:50c51b361e60 (unstable, bumped)
882 parent: 7:50c51b361e60 (unstable, bumped)
847 add babar
883 add babar
848 branch: default
884 branch: default
849 commit: (clean)
885 commit: (clean)
850 update: 2 new changesets (update)
886 update: 2 new changesets (update)
851 phases: 4 draft
887 phases: 4 draft
852 unstable: 2 changesets
888 unstable: 2 changesets
853 bumped: 1 changesets
889 bumped: 1 changesets
854 $ hg up -r 'obsolete()'
890 $ hg up -r 'obsolete()'
855 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
891 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
856 $ hg summary
892 $ hg summary
857 parent: 6:3de5eca88c00 (obsolete)
893 parent: 6:3de5eca88c00 (obsolete)
858 add obsolete_e
894 add obsolete_e
859 branch: default
895 branch: default
860 commit: (clean)
896 commit: (clean)
861 update: 3 new changesets (update)
897 update: 3 new changesets (update)
862 phases: 4 draft
898 phases: 4 draft
863 unstable: 2 changesets
899 unstable: 2 changesets
864 bumped: 1 changesets
900 bumped: 1 changesets
865
901
866 Test incoming/outcoming with changesets obsoleted remotely, known locally
902 Test incoming/outcoming with changesets obsoleted remotely, known locally
867 ===============================================================================
903 ===============================================================================
868
904
869 This test issue 3805
905 This test issue 3805
870
906
871 $ hg init repo-issue3805
907 $ hg init repo-issue3805
872 $ cd repo-issue3805
908 $ cd repo-issue3805
873 $ echo "base" > base
909 $ echo "base" > base
874 $ hg ci -Am "base"
910 $ hg ci -Am "base"
875 adding base
911 adding base
876 $ echo "foo" > foo
912 $ echo "foo" > foo
877 $ hg ci -Am "A"
913 $ hg ci -Am "A"
878 adding foo
914 adding foo
879 $ hg clone . ../other-issue3805
915 $ hg clone . ../other-issue3805
880 updating to branch default
916 updating to branch default
881 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
917 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
882 $ echo "bar" >> foo
918 $ echo "bar" >> foo
883 $ hg ci --amend
919 $ hg ci --amend
884 $ cd ../other-issue3805
920 $ cd ../other-issue3805
885 $ hg log -G
921 $ hg log -G
886 @ 1:29f0c6921ddd (draft) [tip ] A
922 @ 1:29f0c6921ddd (draft) [tip ] A
887 |
923 |
888 o 0:d20a80d4def3 (draft) [ ] base
924 o 0:d20a80d4def3 (draft) [ ] base
889
925
890 $ hg log -G -R ../repo-issue3805
926 $ hg log -G -R ../repo-issue3805
891 @ 3:323a9c3ddd91 (draft) [tip ] A
927 @ 3:323a9c3ddd91 (draft) [tip ] A
892 |
928 |
893 o 0:d20a80d4def3 (draft) [ ] base
929 o 0:d20a80d4def3 (draft) [ ] base
894
930
895 $ hg incoming
931 $ hg incoming
896 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
932 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
897 searching for changes
933 searching for changes
898 3:323a9c3ddd91 (draft) [tip ] A
934 3:323a9c3ddd91 (draft) [tip ] A
899 $ hg incoming --bundle ../issue3805.hg
935 $ hg incoming --bundle ../issue3805.hg
900 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
936 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
901 searching for changes
937 searching for changes
902 3:323a9c3ddd91 (draft) [tip ] A
938 3:323a9c3ddd91 (draft) [tip ] A
903 $ hg outgoing
939 $ hg outgoing
904 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
940 comparing with $TESTTMP/tmpe/repo-issue3805 (glob)
905 searching for changes
941 searching for changes
906 1:29f0c6921ddd (draft) [tip ] A
942 1:29f0c6921ddd (draft) [tip ] A
907
943
908 #if serve
944 #if serve
909
945
910 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
946 $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
911 $ cat hg.pid >> $DAEMON_PIDS
947 $ cat hg.pid >> $DAEMON_PIDS
912
948
913 $ hg incoming http://localhost:$HGPORT
949 $ hg incoming http://localhost:$HGPORT
914 comparing with http://localhost:$HGPORT/
950 comparing with http://localhost:$HGPORT/
915 searching for changes
951 searching for changes
916 2:323a9c3ddd91 (draft) [tip ] A
952 2:323a9c3ddd91 (draft) [tip ] A
917 $ hg outgoing http://localhost:$HGPORT
953 $ hg outgoing http://localhost:$HGPORT
918 comparing with http://localhost:$HGPORT/
954 comparing with http://localhost:$HGPORT/
919 searching for changes
955 searching for changes
920 1:29f0c6921ddd (draft) [tip ] A
956 1:29f0c6921ddd (draft) [tip ] A
921
957
922 $ killdaemons.py
958 $ killdaemons.py
923
959
924 #endif
960 #endif
925
961
926 This test issue 3814
962 This test issue 3814
927
963
928 (nothing to push but locally hidden changeset)
964 (nothing to push but locally hidden changeset)
929
965
930 $ cd ..
966 $ cd ..
931 $ hg init repo-issue3814
967 $ hg init repo-issue3814
932 $ cd repo-issue3805
968 $ cd repo-issue3805
933 $ hg push -r 323a9c3ddd91 ../repo-issue3814
969 $ hg push -r 323a9c3ddd91 ../repo-issue3814
934 pushing to ../repo-issue3814
970 pushing to ../repo-issue3814
935 searching for changes
971 searching for changes
936 adding changesets
972 adding changesets
937 adding manifests
973 adding manifests
938 adding file changes
974 adding file changes
939 added 2 changesets with 2 changes to 2 files
975 added 2 changesets with 2 changes to 2 files
940 2 new obsolescence markers
976 2 new obsolescence markers
941 $ hg out ../repo-issue3814
977 $ hg out ../repo-issue3814
942 comparing with ../repo-issue3814
978 comparing with ../repo-issue3814
943 searching for changes
979 searching for changes
944 no changes found
980 no changes found
945 [1]
981 [1]
946
982
947 Test that a local tag blocks a changeset from being hidden
983 Test that a local tag blocks a changeset from being hidden
948
984
949 $ hg tag -l visible -r 1 --hidden
985 $ hg tag -l visible -r 1 --hidden
950 $ hg log -G
986 $ hg log -G
951 @ 3:323a9c3ddd91 (draft) [tip ] A
987 @ 3:323a9c3ddd91 (draft) [tip ] A
952 |
988 |
953 | x 1:29f0c6921ddd (draft *obsolete*) [visible ] A
989 | x 1:29f0c6921ddd (draft *obsolete*) [visible ] A
954 |/
990 |/
955 o 0:d20a80d4def3 (draft) [ ] base
991 o 0:d20a80d4def3 (draft) [ ] base
956
992
957 Test that removing a local tag does not cause some commands to fail
993 Test that removing a local tag does not cause some commands to fail
958
994
959 $ hg tag -l -r tip tiptag
995 $ hg tag -l -r tip tiptag
960 $ hg tags
996 $ hg tags
961 tiptag 3:323a9c3ddd91
997 tiptag 3:323a9c3ddd91
962 tip 3:323a9c3ddd91
998 tip 3:323a9c3ddd91
963 visible 1:29f0c6921ddd
999 visible 1:29f0c6921ddd
964 $ hg --config extensions.strip= strip -r tip --no-backup
1000 $ hg --config extensions.strip= strip -r tip --no-backup
965 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1001 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
966 $ hg tags
1002 $ hg tags
967 visible 1:29f0c6921ddd
1003 visible 1:29f0c6921ddd
968 tip 1:29f0c6921ddd
1004 tip 1:29f0c6921ddd
969
1005
970 Test bundle overlay onto hidden revision
1006 Test bundle overlay onto hidden revision
971
1007
972 $ cd ..
1008 $ cd ..
973 $ hg init repo-bundleoverlay
1009 $ hg init repo-bundleoverlay
974 $ cd repo-bundleoverlay
1010 $ cd repo-bundleoverlay
975 $ echo "A" > foo
1011 $ echo "A" > foo
976 $ hg ci -Am "A"
1012 $ hg ci -Am "A"
977 adding foo
1013 adding foo
978 $ echo "B" >> foo
1014 $ echo "B" >> foo
979 $ hg ci -m "B"
1015 $ hg ci -m "B"
980 $ hg up 0
1016 $ hg up 0
981 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1017 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
982 $ echo "C" >> foo
1018 $ echo "C" >> foo
983 $ hg ci -m "C"
1019 $ hg ci -m "C"
984 created new head
1020 created new head
985 $ hg log -G
1021 $ hg log -G
986 @ 2:c186d7714947 (draft) [tip ] C
1022 @ 2:c186d7714947 (draft) [tip ] C
987 |
1023 |
988 | o 1:44526ebb0f98 (draft) [ ] B
1024 | o 1:44526ebb0f98 (draft) [ ] B
989 |/
1025 |/
990 o 0:4b34ecfb0d56 (draft) [ ] A
1026 o 0:4b34ecfb0d56 (draft) [ ] A
991
1027
992
1028
993 $ hg clone -r1 . ../other-bundleoverlay
1029 $ hg clone -r1 . ../other-bundleoverlay
994 adding changesets
1030 adding changesets
995 adding manifests
1031 adding manifests
996 adding file changes
1032 adding file changes
997 added 2 changesets with 2 changes to 1 files
1033 added 2 changesets with 2 changes to 1 files
998 updating to branch default
1034 updating to branch default
999 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1035 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
1000 $ cd ../other-bundleoverlay
1036 $ cd ../other-bundleoverlay
1001 $ echo "B+" >> foo
1037 $ echo "B+" >> foo
1002 $ hg ci --amend -m "B+"
1038 $ hg ci --amend -m "B+"
1003 $ hg log -G --hidden
1039 $ hg log -G --hidden
1004 @ 3:b7d587542d40 (draft) [tip ] B+
1040 @ 3:b7d587542d40 (draft) [tip ] B+
1005 |
1041 |
1006 | x 2:eb95e9297e18 (draft *obsolete*) [ ] temporary amend commit for 44526ebb0f98
1042 | x 2:eb95e9297e18 (draft *obsolete*) [ ] temporary amend commit for 44526ebb0f98
1007 | |
1043 | |
1008 | x 1:44526ebb0f98 (draft *obsolete*) [ ] B
1044 | x 1:44526ebb0f98 (draft *obsolete*) [ ] B
1009 |/
1045 |/
1010 o 0:4b34ecfb0d56 (draft) [ ] A
1046 o 0:4b34ecfb0d56 (draft) [ ] A
1011
1047
1012
1048
1013 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
1049 $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg
1014 comparing with ../repo-bundleoverlay
1050 comparing with ../repo-bundleoverlay
1015 searching for changes
1051 searching for changes
1016 1:44526ebb0f98 (draft) [ ] B
1052 1:44526ebb0f98 (draft) [ ] B
1017 2:c186d7714947 (draft) [tip ] C
1053 2:c186d7714947 (draft) [tip ] C
1018 $ hg log -G -R ../bundleoverlay.hg
1054 $ hg log -G -R ../bundleoverlay.hg
1019 o 4:c186d7714947 (draft) [tip ] C
1055 o 4:c186d7714947 (draft) [tip ] C
1020 |
1056 |
1021 | @ 3:b7d587542d40 (draft) [ ] B+
1057 | @ 3:b7d587542d40 (draft) [ ] B+
1022 |/
1058 |/
1023 o 0:4b34ecfb0d56 (draft) [ ] A
1059 o 0:4b34ecfb0d56 (draft) [ ] A
1024
1060
1025
1061
1026 #if serve
1062 #if serve
1027
1063
1028 Test issue 4506
1064 Test issue 4506
1029
1065
1030 $ cd ..
1066 $ cd ..
1031 $ hg init repo-issue4506
1067 $ hg init repo-issue4506
1032 $ cd repo-issue4506
1068 $ cd repo-issue4506
1033 $ echo "0" > foo
1069 $ echo "0" > foo
1034 $ hg add foo
1070 $ hg add foo
1035 $ hg ci -m "content-0"
1071 $ hg ci -m "content-0"
1036
1072
1037 $ hg up null
1073 $ hg up null
1038 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1074 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1039 $ echo "1" > bar
1075 $ echo "1" > bar
1040 $ hg add bar
1076 $ hg add bar
1041 $ hg ci -m "content-1"
1077 $ hg ci -m "content-1"
1042 created new head
1078 created new head
1043 $ hg up 0
1079 $ hg up 0
1044 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1080 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
1045 $ hg graft 1
1081 $ hg graft 1
1046 grafting 1:1c9eddb02162 "content-1" (tip)
1082 grafting 1:1c9eddb02162 "content-1" (tip)
1047
1083
1048 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
1084 $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'`
1049
1085
1050 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1086 $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
1051 $ cat hg.pid >> $DAEMON_PIDS
1087 $ cat hg.pid >> $DAEMON_PIDS
1052
1088
1053 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
1089 $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1'
1054 404 Not Found
1090 404 Not Found
1055 [1]
1091 [1]
1056 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
1092 $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar'
1057 200 Script output follows
1093 200 Script output follows
1058 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
1094 $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar'
1059 200 Script output follows
1095 200 Script output follows
1060
1096
1061 $ killdaemons.py
1097 $ killdaemons.py
1062
1098
1063 #endif
1099 #endif
1064
1100
1065 Test heads computation on pending index changes with obsolescence markers
1101 Test heads computation on pending index changes with obsolescence markers
1066 $ cd ..
1102 $ cd ..
1067 $ cat >$TESTTMP/test_extension.py << EOF
1103 $ cat >$TESTTMP/test_extension.py << EOF
1068 > from mercurial import cmdutil, registrar
1104 > from mercurial import cmdutil, registrar
1069 > from mercurial.i18n import _
1105 > from mercurial.i18n import _
1070 >
1106 >
1071 > cmdtable = {}
1107 > cmdtable = {}
1072 > command = registrar.command(cmdtable)
1108 > command = registrar.command(cmdtable)
1073 > @command("amendtransient",[], _('hg amendtransient [rev]'))
1109 > @command("amendtransient",[], _('hg amendtransient [rev]'))
1074 > def amend(ui, repo, *pats, **opts):
1110 > def amend(ui, repo, *pats, **opts):
1075 > def commitfunc(ui, repo, message, match, opts):
1111 > def commitfunc(ui, repo, message, match, opts):
1076 > return repo.commit(message, repo['.'].user(), repo['.'].date(), match)
1112 > return repo.commit(message, repo['.'].user(), repo['.'].date(), match)
1077 > opts['message'] = 'Test'
1113 > opts['message'] = 'Test'
1078 > opts['logfile'] = None
1114 > opts['logfile'] = None
1079 > cmdutil.amend(ui, repo, commitfunc, repo['.'], {}, pats, opts)
1115 > cmdutil.amend(ui, repo, commitfunc, repo['.'], {}, pats, opts)
1080 > ui.write('%s\n' % repo.changelog.headrevs())
1116 > ui.write('%s\n' % repo.changelog.headrevs())
1081 > EOF
1117 > EOF
1082 $ cat >> $HGRCPATH << EOF
1118 $ cat >> $HGRCPATH << EOF
1083 > [extensions]
1119 > [extensions]
1084 > testextension=$TESTTMP/test_extension.py
1120 > testextension=$TESTTMP/test_extension.py
1085 > EOF
1121 > EOF
1086 $ hg init repo-issue-nativerevs-pending-changes
1122 $ hg init repo-issue-nativerevs-pending-changes
1087 $ cd repo-issue-nativerevs-pending-changes
1123 $ cd repo-issue-nativerevs-pending-changes
1088 $ mkcommit a
1124 $ mkcommit a
1089 $ mkcommit b
1125 $ mkcommit b
1090 $ hg up ".^"
1126 $ hg up ".^"
1091 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1127 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1092 $ echo aa > a
1128 $ echo aa > a
1093 $ hg amendtransient
1129 $ hg amendtransient
1094 [1, 3]
1130 [1, 3]
1095
1131
1096 Test cache consistency for the visible filter
1132 Test cache consistency for the visible filter
1097 1) We want to make sure that the cached filtered revs are invalidated when
1133 1) We want to make sure that the cached filtered revs are invalidated when
1098 bookmarks change
1134 bookmarks change
1099 $ cd ..
1135 $ cd ..
1100 $ cat >$TESTTMP/test_extension.py << EOF
1136 $ cat >$TESTTMP/test_extension.py << EOF
1101 > import weakref
1137 > import weakref
1102 > from mercurial import cmdutil, extensions, bookmarks, repoview
1138 > from mercurial import cmdutil, extensions, bookmarks, repoview
1103 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
1139 > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs):
1104 > reporef = weakref.ref(bkmstoreinst._repo)
1140 > reporef = weakref.ref(bkmstoreinst._repo)
1105 > def trhook(tr):
1141 > def trhook(tr):
1106 > repo = reporef()
1142 > repo = reporef()
1107 > hidden1 = repoview.computehidden(repo)
1143 > hidden1 = repoview.computehidden(repo)
1108 > hidden = repoview.filterrevs(repo, 'visible')
1144 > hidden = repoview.filterrevs(repo, 'visible')
1109 > if sorted(hidden1) != sorted(hidden):
1145 > if sorted(hidden1) != sorted(hidden):
1110 > print "cache inconsistency"
1146 > print "cache inconsistency"
1111 > bkmstoreinst._repo.currenttransaction().addpostclose('test_extension', trhook)
1147 > bkmstoreinst._repo.currenttransaction().addpostclose('test_extension', trhook)
1112 > orig(bkmstoreinst, *args, **kwargs)
1148 > orig(bkmstoreinst, *args, **kwargs)
1113 > def extsetup(ui):
1149 > def extsetup(ui):
1114 > extensions.wrapfunction(bookmarks.bmstore, 'recordchange',
1150 > extensions.wrapfunction(bookmarks.bmstore, 'recordchange',
1115 > _bookmarkchanged)
1151 > _bookmarkchanged)
1116 > EOF
1152 > EOF
1117
1153
1118 $ hg init repo-cache-inconsistency
1154 $ hg init repo-cache-inconsistency
1119 $ cd repo-issue-nativerevs-pending-changes
1155 $ cd repo-issue-nativerevs-pending-changes
1120 $ mkcommit a
1156 $ mkcommit a
1121 a already tracked!
1157 a already tracked!
1122 $ mkcommit b
1158 $ mkcommit b
1123 $ hg id
1159 $ hg id
1124 13bedc178fce tip
1160 13bedc178fce tip
1125 $ echo "hello" > b
1161 $ echo "hello" > b
1126 $ hg commit --amend -m "message"
1162 $ hg commit --amend -m "message"
1127 $ hg book bookb -r 13bedc178fce --hidden
1163 $ hg book bookb -r 13bedc178fce --hidden
1128 $ hg log -r 13bedc178fce
1164 $ hg log -r 13bedc178fce
1129 5:13bedc178fce (draft *obsolete*) [ bookb] add b
1165 5:13bedc178fce (draft *obsolete*) [ bookb] add b
1130 $ hg book -d bookb
1166 $ hg book -d bookb
1131 $ hg log -r 13bedc178fce
1167 $ hg log -r 13bedc178fce
1132 abort: hidden revision '13bedc178fce'!
1168 abort: hidden revision '13bedc178fce'!
1133 (use --hidden to access hidden revisions)
1169 (use --hidden to access hidden revisions)
1134 [255]
1170 [255]
1135
1171
1136 Empty out the test extension, as it isn't compatible with later parts
1172 Empty out the test extension, as it isn't compatible with later parts
1137 of the test.
1173 of the test.
1138 $ echo > $TESTTMP/test_extension.py
1174 $ echo > $TESTTMP/test_extension.py
1139
1175
1140 Test ability to pull changeset with locally applying obsolescence markers
1176 Test ability to pull changeset with locally applying obsolescence markers
1141 (issue4945)
1177 (issue4945)
1142
1178
1143 $ cd ..
1179 $ cd ..
1144 $ hg init issue4845
1180 $ hg init issue4845
1145 $ cd issue4845
1181 $ cd issue4845
1146
1182
1147 $ echo foo > f0
1183 $ echo foo > f0
1148 $ hg add f0
1184 $ hg add f0
1149 $ hg ci -m '0'
1185 $ hg ci -m '0'
1150 $ echo foo > f1
1186 $ echo foo > f1
1151 $ hg add f1
1187 $ hg add f1
1152 $ hg ci -m '1'
1188 $ hg ci -m '1'
1153 $ echo foo > f2
1189 $ echo foo > f2
1154 $ hg add f2
1190 $ hg add f2
1155 $ hg ci -m '2'
1191 $ hg ci -m '2'
1156
1192
1157 $ echo bar > f2
1193 $ echo bar > f2
1158 $ hg commit --amend --config experimetnal.evolution=createmarkers
1194 $ hg commit --amend --config experimetnal.evolution=createmarkers
1159 $ hg log -G
1195 $ hg log -G
1160 @ 4:b0551702f918 (draft) [tip ] 2
1196 @ 4:b0551702f918 (draft) [tip ] 2
1161 |
1197 |
1162 o 1:e016b03fd86f (draft) [ ] 1
1198 o 1:e016b03fd86f (draft) [ ] 1
1163 |
1199 |
1164 o 0:a78f55e5508c (draft) [ ] 0
1200 o 0:a78f55e5508c (draft) [ ] 0
1165
1201
1166 $ hg log -G --hidden
1202 $ hg log -G --hidden
1167 @ 4:b0551702f918 (draft) [tip ] 2
1203 @ 4:b0551702f918 (draft) [tip ] 2
1168 |
1204 |
1169 | x 3:f27abbcc1f77 (draft *obsolete*) [ ] temporary amend commit for e008cf283490
1205 | x 3:f27abbcc1f77 (draft *obsolete*) [ ] temporary amend commit for e008cf283490
1170 | |
1206 | |
1171 | x 2:e008cf283490 (draft *obsolete*) [ ] 2
1207 | x 2:e008cf283490 (draft *obsolete*) [ ] 2
1172 |/
1208 |/
1173 o 1:e016b03fd86f (draft) [ ] 1
1209 o 1:e016b03fd86f (draft) [ ] 1
1174 |
1210 |
1175 o 0:a78f55e5508c (draft) [ ] 0
1211 o 0:a78f55e5508c (draft) [ ] 0
1176
1212
1177
1213
1178 $ hg strip -r 1 --config extensions.strip=
1214 $ hg strip -r 1 --config extensions.strip=
1179 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1215 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
1180 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg (glob)
1216 saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg (glob)
1181 $ hg log -G
1217 $ hg log -G
1182 @ 0:a78f55e5508c (draft) [tip ] 0
1218 @ 0:a78f55e5508c (draft) [tip ] 0
1183
1219
1184 $ hg log -G --hidden
1220 $ hg log -G --hidden
1185 @ 0:a78f55e5508c (draft) [tip ] 0
1221 @ 0:a78f55e5508c (draft) [tip ] 0
1186
1222
1187 $ hg debugbundle .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg
1223 $ hg debugbundle .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg
1188 Stream params: sortdict([('Compression', 'BZ')])
1224 Stream params: sortdict([('Compression', 'BZ')])
1189 changegroup -- "sortdict([('version', '02'), ('nbchanges', '4')])"
1225 changegroup -- "sortdict([('version', '02'), ('nbchanges', '4')])"
1190 e016b03fd86fcccc54817d120b90b751aaf367d6
1226 e016b03fd86fcccc54817d120b90b751aaf367d6
1191 e008cf2834908e5d6b0f792a9d4b0e2272260fb8
1227 e008cf2834908e5d6b0f792a9d4b0e2272260fb8
1192 f27abbcc1f77fb409cf9160482fe619541e2d605
1228 f27abbcc1f77fb409cf9160482fe619541e2d605
1193 b0551702f918510f01ae838ab03a463054c67b46
1229 b0551702f918510f01ae838ab03a463054c67b46
1194 obsmarkers -- 'sortdict()'
1230 obsmarkers -- 'sortdict()'
1195 version: 1 (139 bytes)
1231 version: 1 (139 bytes)
1196 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1232 e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1197 f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1233 f27abbcc1f77fb409cf9160482fe619541e2d605 0 {e008cf2834908e5d6b0f792a9d4b0e2272260fb8} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1198
1234
1199 $ hg pull .hg/strip-backup/*
1235 $ hg pull .hg/strip-backup/*
1200 pulling from .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg
1236 pulling from .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg
1201 searching for changes
1237 searching for changes
1202 adding changesets
1238 adding changesets
1203 adding manifests
1239 adding manifests
1204 adding file changes
1240 adding file changes
1205 added 2 changesets with 2 changes to 2 files
1241 added 2 changesets with 2 changes to 2 files
1206 (run 'hg update' to get a working copy)
1242 (run 'hg update' to get a working copy)
1207 $ hg log -G
1243 $ hg log -G
1208 o 2:b0551702f918 (draft) [tip ] 2
1244 o 2:b0551702f918 (draft) [tip ] 2
1209 |
1245 |
1210 o 1:e016b03fd86f (draft) [ ] 1
1246 o 1:e016b03fd86f (draft) [ ] 1
1211 |
1247 |
1212 @ 0:a78f55e5508c (draft) [ ] 0
1248 @ 0:a78f55e5508c (draft) [ ] 0
1213
1249
1214 $ hg log -G --hidden
1250 $ hg log -G --hidden
1215 o 2:b0551702f918 (draft) [tip ] 2
1251 o 2:b0551702f918 (draft) [tip ] 2
1216 |
1252 |
1217 o 1:e016b03fd86f (draft) [ ] 1
1253 o 1:e016b03fd86f (draft) [ ] 1
1218 |
1254 |
1219 @ 0:a78f55e5508c (draft) [ ] 0
1255 @ 0:a78f55e5508c (draft) [ ] 0
1220
1256
1221 Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when
1257 Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when
1222 only a subset of those are displayed (because of --rev option)
1258 only a subset of those are displayed (because of --rev option)
1223 $ hg init doindexrev
1259 $ hg init doindexrev
1224 $ cd doindexrev
1260 $ cd doindexrev
1225 $ echo a > a
1261 $ echo a > a
1226 $ hg ci -Am a
1262 $ hg ci -Am a
1227 adding a
1263 adding a
1228 $ hg ci --amend -m aa
1264 $ hg ci --amend -m aa
1229 $ echo b > b
1265 $ echo b > b
1230 $ hg ci -Am b
1266 $ hg ci -Am b
1231 adding b
1267 adding b
1232 $ hg ci --amend -m bb
1268 $ hg ci --amend -m bb
1233 $ echo c > c
1269 $ echo c > c
1234 $ hg ci -Am c
1270 $ hg ci -Am c
1235 adding c
1271 adding c
1236 $ hg ci --amend -m cc
1272 $ hg ci --amend -m cc
1237 $ echo d > d
1273 $ echo d > d
1238 $ hg ci -Am d
1274 $ hg ci -Am d
1239 adding d
1275 adding d
1240 $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
1276 $ hg ci --amend -m dd --config experimental.evolution.track-operation=1
1241 $ hg debugobsolete --index --rev "3+7"
1277 $ hg debugobsolete --index --rev "3+7"
1242 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
1278 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 \(.*\) {'user': 'test'} (re)
1243 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'operation': 'amend', 'user': 'test'} (re)
1279 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 \(.*\) {'operation': 'amend', 'user': 'test'} (re)
1244 $ hg debugobsolete --index --rev "3+7" -Tjson
1280 $ hg debugobsolete --index --rev "3+7" -Tjson
1245 [
1281 [
1246 {
1282 {
1247 "date": [0.0, 0],
1283 "date": [0.0, 0],
1248 "flag": 0,
1284 "flag": 0,
1249 "index": 1,
1285 "index": 1,
1250 "metadata": {"user": "test"},
1286 "metadata": {"user": "test"},
1251 "precnode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
1287 "precnode": "6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1",
1252 "succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
1288 "succnodes": ["d27fb9b066076fd921277a4b9e8b9cb48c95bc6a"]
1253 },
1289 },
1254 {
1290 {
1255 "date": [0.0, 0],
1291 "date": [0.0, 0],
1256 "flag": 0,
1292 "flag": 0,
1257 "index": 3,
1293 "index": 3,
1258 "metadata": {"operation": "amend", "user": "test"},
1294 "metadata": {"operation": "amend", "user": "test"},
1259 "precnode": "4715cf767440ed891755448016c2b8cf70760c30",
1295 "precnode": "4715cf767440ed891755448016c2b8cf70760c30",
1260 "succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
1296 "succnodes": ["7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d"]
1261 }
1297 }
1262 ]
1298 ]
1263
1299
1264 Test the --delete option of debugobsolete command
1300 Test the --delete option of debugobsolete command
1265 $ hg debugobsolete --index
1301 $ hg debugobsolete --index
1266 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1302 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1267 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1303 1 6fdef60fcbabbd3d50e9b9cbc2a240724b91a5e1 d27fb9b066076fd921277a4b9e8b9cb48c95bc6a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1268 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1304 2 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1269 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
1305 3 4715cf767440ed891755448016c2b8cf70760c30 7ae79c5d60f049c7b0dd02f5f25b9d60aaf7b36d 0 (Thu Jan 01 00:00:00 1970 +0000) {'operation': 'amend', 'user': 'test'}
1270 $ hg debugobsolete --delete 1 --delete 3
1306 $ hg debugobsolete --delete 1 --delete 3
1271 deleted 2 obsolescence markers
1307 deleted 2 obsolescence markers
1272 $ hg debugobsolete
1308 $ hg debugobsolete
1273 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1309 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b f9bd49731b0b175e42992a3c8fa6c678b2bc11f1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1274 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1310 1ab51af8f9b41ef8c7f6f3312d4706d870b1fb74 29346082e4a9e27042b62d2da0e2de211c027621 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
1275
1311
1276 Test adding changeset after obsmarkers affecting it
1312 Test adding changeset after obsmarkers affecting it
1277 (eg: during pull, or unbundle)
1313 (eg: during pull, or unbundle)
1278
1314
1279 $ mkcommit e
1315 $ mkcommit e
1280 $ hg bundle -r . --base .~1 ../bundle-2.hg
1316 $ hg bundle -r . --base .~1 ../bundle-2.hg
1281 1 changesets found
1317 1 changesets found
1282 $ getid .
1318 $ getid .
1283 $ hg --config extensions.strip= strip -r .
1319 $ hg --config extensions.strip= strip -r .
1284 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1320 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
1285 saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg (glob)
1321 saved backup bundle to $TESTTMP/tmpe/issue4845/doindexrev/.hg/strip-backup/9bc153528424-ee80edd4-backup.hg (glob)
1286 $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
1322 $ hg debugobsolete 9bc153528424ea266d13e57f9ff0d799dfe61e4b
1287 $ hg unbundle ../bundle-2.hg
1323 $ hg unbundle ../bundle-2.hg
1288 adding changesets
1324 adding changesets
1289 adding manifests
1325 adding manifests
1290 adding file changes
1326 adding file changes
1291 added 1 changesets with 1 changes to 1 files
1327 added 1 changesets with 1 changes to 1 files
1292 (run 'hg update' to get a working copy)
1328 (run 'hg update' to get a working copy)
1293 $ hg log -G
1329 $ hg log -G
1294 @ 7:7ae79c5d60f0 (draft) [tip ] dd
1330 @ 7:7ae79c5d60f0 (draft) [tip ] dd
1295 |
1331 |
1296 | o 6:4715cf767440 (draft) [ ] d
1332 | o 6:4715cf767440 (draft) [ ] d
1297 |/
1333 |/
1298 o 5:29346082e4a9 (draft) [ ] cc
1334 o 5:29346082e4a9 (draft) [ ] cc
1299 |
1335 |
1300 o 3:d27fb9b06607 (draft) [ ] bb
1336 o 3:d27fb9b06607 (draft) [ ] bb
1301 |
1337 |
1302 | o 2:6fdef60fcbab (draft) [ ] b
1338 | o 2:6fdef60fcbab (draft) [ ] b
1303 |/
1339 |/
1304 o 1:f9bd49731b0b (draft) [ ] aa
1340 o 1:f9bd49731b0b (draft) [ ] aa
1305
1341
1306
1342
1307 $ cd ..
1343 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now