##// END OF EJS Templates
debugbundle: display the content of obsmarkers parts...
marmoute -
r32517:b62b2b37 default
parent child Browse files
Show More
@@ -1,2162 +1,2185 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 context,
35 context,
36 dagparser,
36 dagparser,
37 dagutil,
37 dagutil,
38 encoding,
38 encoding,
39 error,
39 error,
40 exchange,
40 exchange,
41 extensions,
41 extensions,
42 filemerge,
42 filemerge,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 policy,
50 policy,
51 pvec,
51 pvec,
52 pycompat,
52 pycompat,
53 registrar,
53 registrar,
54 repair,
54 repair,
55 revlog,
55 revlog,
56 revset,
56 revset,
57 revsetlang,
57 revsetlang,
58 scmutil,
58 scmutil,
59 setdiscovery,
59 setdiscovery,
60 simplemerge,
60 simplemerge,
61 smartset,
61 smartset,
62 sslutil,
62 sslutil,
63 streamclone,
63 streamclone,
64 templater,
64 templater,
65 treediscovery,
65 treediscovery,
66 upgrade,
66 upgrade,
67 util,
67 util,
68 vfs as vfsmod,
68 vfs as vfsmod,
69 )
69 )
70
70
71 release = lockmod.release
71 release = lockmod.release
72
72
73 command = registrar.command()
73 command = registrar.command()
74
74
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 def debugancestor(ui, repo, *args):
76 def debugancestor(ui, repo, *args):
77 """find the ancestor revision of two revisions in a given index"""
77 """find the ancestor revision of two revisions in a given index"""
78 if len(args) == 3:
78 if len(args) == 3:
79 index, rev1, rev2 = args
79 index, rev1, rev2 = args
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 lookup = r.lookup
81 lookup = r.lookup
82 elif len(args) == 2:
82 elif len(args) == 2:
83 if not repo:
83 if not repo:
84 raise error.Abort(_('there is no Mercurial repository here '
84 raise error.Abort(_('there is no Mercurial repository here '
85 '(.hg not found)'))
85 '(.hg not found)'))
86 rev1, rev2 = args
86 rev1, rev2 = args
87 r = repo.changelog
87 r = repo.changelog
88 lookup = repo.lookup
88 lookup = repo.lookup
89 else:
89 else:
90 raise error.Abort(_('either two or three arguments required'))
90 raise error.Abort(_('either two or three arguments required'))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93
93
94 @command('debugapplystreamclonebundle', [], 'FILE')
94 @command('debugapplystreamclonebundle', [], 'FILE')
95 def debugapplystreamclonebundle(ui, repo, fname):
95 def debugapplystreamclonebundle(ui, repo, fname):
96 """apply a stream clone bundle file"""
96 """apply a stream clone bundle file"""
97 f = hg.openpath(ui, fname)
97 f = hg.openpath(ui, fname)
98 gen = exchange.readbundle(ui, f, fname)
98 gen = exchange.readbundle(ui, f, fname)
99 gen.apply(repo)
99 gen.apply(repo)
100
100
101 @command('debugbuilddag',
101 @command('debugbuilddag',
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('n', 'new-file', None, _('add new file at each rev'))],
104 ('n', 'new-file', None, _('add new file at each rev'))],
105 _('[OPTION]... [TEXT]'))
105 _('[OPTION]... [TEXT]'))
106 def debugbuilddag(ui, repo, text=None,
106 def debugbuilddag(ui, repo, text=None,
107 mergeable_file=False,
107 mergeable_file=False,
108 overwritten_file=False,
108 overwritten_file=False,
109 new_file=False):
109 new_file=False):
110 """builds a repo with a given DAG from scratch in the current empty repo
110 """builds a repo with a given DAG from scratch in the current empty repo
111
111
112 The description of the DAG is read from stdin if not given on the
112 The description of the DAG is read from stdin if not given on the
113 command line.
113 command line.
114
114
115 Elements:
115 Elements:
116
116
117 - "+n" is a linear run of n nodes based on the current default parent
117 - "+n" is a linear run of n nodes based on the current default parent
118 - "." is a single node based on the current default parent
118 - "." is a single node based on the current default parent
119 - "$" resets the default parent to null (implied at the start);
119 - "$" resets the default parent to null (implied at the start);
120 otherwise the default parent is always the last node created
120 otherwise the default parent is always the last node created
121 - "<p" sets the default parent to the backref p
121 - "<p" sets the default parent to the backref p
122 - "*p" is a fork at parent p, which is a backref
122 - "*p" is a fork at parent p, which is a backref
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "/p2" is a merge of the preceding node and p2
124 - "/p2" is a merge of the preceding node and p2
125 - ":tag" defines a local tag for the preceding node
125 - ":tag" defines a local tag for the preceding node
126 - "@branch" sets the named branch for subsequent nodes
126 - "@branch" sets the named branch for subsequent nodes
127 - "#...\\n" is a comment up to the end of the line
127 - "#...\\n" is a comment up to the end of the line
128
128
129 Whitespace between the above elements is ignored.
129 Whitespace between the above elements is ignored.
130
130
131 A backref is either
131 A backref is either
132
132
133 - a number n, which references the node curr-n, where curr is the current
133 - a number n, which references the node curr-n, where curr is the current
134 node, or
134 node, or
135 - the name of a local tag you placed earlier using ":tag", or
135 - the name of a local tag you placed earlier using ":tag", or
136 - empty to denote the default parent.
136 - empty to denote the default parent.
137
137
138 All string valued-elements are either strictly alphanumeric, or must
138 All string valued-elements are either strictly alphanumeric, or must
139 be enclosed in double quotes ("..."), with "\\" as escape character.
139 be enclosed in double quotes ("..."), with "\\" as escape character.
140 """
140 """
141
141
142 if text is None:
142 if text is None:
143 ui.status(_("reading DAG from stdin\n"))
143 ui.status(_("reading DAG from stdin\n"))
144 text = ui.fin.read()
144 text = ui.fin.read()
145
145
146 cl = repo.changelog
146 cl = repo.changelog
147 if len(cl) > 0:
147 if len(cl) > 0:
148 raise error.Abort(_('repository is not empty'))
148 raise error.Abort(_('repository is not empty'))
149
149
150 # determine number of revs in DAG
150 # determine number of revs in DAG
151 total = 0
151 total = 0
152 for type, data in dagparser.parsedag(text):
152 for type, data in dagparser.parsedag(text):
153 if type == 'n':
153 if type == 'n':
154 total += 1
154 total += 1
155
155
156 if mergeable_file:
156 if mergeable_file:
157 linesperrev = 2
157 linesperrev = 2
158 # make a file with k lines per rev
158 # make a file with k lines per rev
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines.append("")
160 initialmergedlines.append("")
161
161
162 tags = []
162 tags = []
163
163
164 wlock = lock = tr = None
164 wlock = lock = tr = None
165 try:
165 try:
166 wlock = repo.wlock()
166 wlock = repo.wlock()
167 lock = repo.lock()
167 lock = repo.lock()
168 tr = repo.transaction("builddag")
168 tr = repo.transaction("builddag")
169
169
170 at = -1
170 at = -1
171 atbranch = 'default'
171 atbranch = 'default'
172 nodeids = []
172 nodeids = []
173 id = 0
173 id = 0
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 ui.note(('node %s\n' % str(data)))
177 ui.note(('node %s\n' % str(data)))
178 id, ps = data
178 id, ps = data
179
179
180 files = []
180 files = []
181 fctxs = {}
181 fctxs = {}
182
182
183 p2 = None
183 p2 = None
184 if mergeable_file:
184 if mergeable_file:
185 fn = "mf"
185 fn = "mf"
186 p1 = repo[ps[0]]
186 p1 = repo[ps[0]]
187 if len(ps) > 1:
187 if len(ps) > 1:
188 p2 = repo[ps[1]]
188 p2 = repo[ps[1]]
189 pa = p1.ancestor(p2)
189 pa = p1.ancestor(p2)
190 base, local, other = [x[fn].data() for x in (pa, p1,
190 base, local, other = [x[fn].data() for x in (pa, p1,
191 p2)]
191 p2)]
192 m3 = simplemerge.Merge3Text(base, local, other)
192 m3 = simplemerge.Merge3Text(base, local, other)
193 ml = [l.strip() for l in m3.merge_lines()]
193 ml = [l.strip() for l in m3.merge_lines()]
194 ml.append("")
194 ml.append("")
195 elif at > 0:
195 elif at > 0:
196 ml = p1[fn].data().split("\n")
196 ml = p1[fn].data().split("\n")
197 else:
197 else:
198 ml = initialmergedlines
198 ml = initialmergedlines
199 ml[id * linesperrev] += " r%i" % id
199 ml[id * linesperrev] += " r%i" % id
200 mergedtext = "\n".join(ml)
200 mergedtext = "\n".join(ml)
201 files.append(fn)
201 files.append(fn)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203
203
204 if overwritten_file:
204 if overwritten_file:
205 fn = "of"
205 fn = "of"
206 files.append(fn)
206 files.append(fn)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208
208
209 if new_file:
209 if new_file:
210 fn = "nf%i" % id
210 fn = "nf%i" % id
211 files.append(fn)
211 files.append(fn)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 if len(ps) > 1:
213 if len(ps) > 1:
214 if not p2:
214 if not p2:
215 p2 = repo[ps[1]]
215 p2 = repo[ps[1]]
216 for fn in p2:
216 for fn in p2:
217 if fn.startswith("nf"):
217 if fn.startswith("nf"):
218 files.append(fn)
218 files.append(fn)
219 fctxs[fn] = p2[fn]
219 fctxs[fn] = p2[fn]
220
220
221 def fctxfn(repo, cx, path):
221 def fctxfn(repo, cx, path):
222 return fctxs.get(path)
222 return fctxs.get(path)
223
223
224 if len(ps) == 0 or ps[0] < 0:
224 if len(ps) == 0 or ps[0] < 0:
225 pars = [None, None]
225 pars = [None, None]
226 elif len(ps) == 1:
226 elif len(ps) == 1:
227 pars = [nodeids[ps[0]], None]
227 pars = [nodeids[ps[0]], None]
228 else:
228 else:
229 pars = [nodeids[p] for p in ps]
229 pars = [nodeids[p] for p in ps]
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 date=(id, 0),
231 date=(id, 0),
232 user="debugbuilddag",
232 user="debugbuilddag",
233 extra={'branch': atbranch})
233 extra={'branch': atbranch})
234 nodeid = repo.commitctx(cx)
234 nodeid = repo.commitctx(cx)
235 nodeids.append(nodeid)
235 nodeids.append(nodeid)
236 at = id
236 at = id
237 elif type == 'l':
237 elif type == 'l':
238 id, name = data
238 id, name = data
239 ui.note(('tag %s\n' % name))
239 ui.note(('tag %s\n' % name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 elif type == 'a':
241 elif type == 'a':
242 ui.note(('branch %s\n' % data))
242 ui.note(('branch %s\n' % data))
243 atbranch = data
243 atbranch = data
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 tr.close()
245 tr.close()
246
246
247 if tags:
247 if tags:
248 repo.vfs.write("localtags", "".join(tags))
248 repo.vfs.write("localtags", "".join(tags))
249 finally:
249 finally:
250 ui.progress(_('building'), None)
250 ui.progress(_('building'), None)
251 release(tr, lock, wlock)
251 release(tr, lock, wlock)
252
252
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 indent_string = ' ' * indent
254 indent_string = ' ' * indent
255 if all:
255 if all:
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 % indent_string)
257 % indent_string)
258
258
259 def showchunks(named):
259 def showchunks(named):
260 ui.write("\n%s%s\n" % (indent_string, named))
260 ui.write("\n%s%s\n" % (indent_string, named))
261 chain = None
261 chain = None
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 node = chunkdata['node']
263 node = chunkdata['node']
264 p1 = chunkdata['p1']
264 p1 = chunkdata['p1']
265 p2 = chunkdata['p2']
265 p2 = chunkdata['p2']
266 cs = chunkdata['cs']
266 cs = chunkdata['cs']
267 deltabase = chunkdata['deltabase']
267 deltabase = chunkdata['deltabase']
268 delta = chunkdata['delta']
268 delta = chunkdata['delta']
269 ui.write("%s%s %s %s %s %s %s\n" %
269 ui.write("%s%s %s %s %s %s %s\n" %
270 (indent_string, hex(node), hex(p1), hex(p2),
270 (indent_string, hex(node), hex(p1), hex(p2),
271 hex(cs), hex(deltabase), len(delta)))
271 hex(cs), hex(deltabase), len(delta)))
272 chain = node
272 chain = node
273
273
274 chunkdata = gen.changelogheader()
274 chunkdata = gen.changelogheader()
275 showchunks("changelog")
275 showchunks("changelog")
276 chunkdata = gen.manifestheader()
276 chunkdata = gen.manifestheader()
277 showchunks("manifest")
277 showchunks("manifest")
278 for chunkdata in iter(gen.filelogheader, {}):
278 for chunkdata in iter(gen.filelogheader, {}):
279 fname = chunkdata['filename']
279 fname = chunkdata['filename']
280 showchunks(fname)
280 showchunks(fname)
281 else:
281 else:
282 if isinstance(gen, bundle2.unbundle20):
282 if isinstance(gen, bundle2.unbundle20):
283 raise error.Abort(_('use debugbundle2 for this file'))
283 raise error.Abort(_('use debugbundle2 for this file'))
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 chain = None
285 chain = None
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 node = chunkdata['node']
287 node = chunkdata['node']
288 ui.write("%s%s\n" % (indent_string, hex(node)))
288 ui.write("%s%s\n" % (indent_string, hex(node)))
289 chain = node
289 chain = node
290
290
291 def _debugobsmarkers(ui, data, all=None, indent=0, **opts):
292 """display version and markers contained in 'data'"""
293 indent_string = ' ' * indent
294 try:
295 version, markers = obsolete._readmarkers(data)
296 except error.UnknownVersion as exc:
297 msg = "%sunsupported version: %s (%d bytes)\n"
298 msg %= indent_string, exc.version, len(data)
299 ui.write(msg)
300 else:
301 msg = "%sversion: %s (%d bytes)\n"
302 msg %= indent_string, version, len(data)
303 ui.write(msg)
304 fm = ui.formatter('debugobsolete', opts)
305 for rawmarker in sorted(markers):
306 m = obsolete.marker(None, rawmarker)
307 fm.startitem()
308 fm.plain(indent_string)
309 cmdutil.showmarker(fm, m)
310 fm.end()
311
291 def _debugbundle2(ui, gen, all=None, **opts):
312 def _debugbundle2(ui, gen, all=None, **opts):
292 """lists the contents of a bundle2"""
313 """lists the contents of a bundle2"""
293 if not isinstance(gen, bundle2.unbundle20):
314 if not isinstance(gen, bundle2.unbundle20):
294 raise error.Abort(_('not a bundle2 file'))
315 raise error.Abort(_('not a bundle2 file'))
295 ui.write(('Stream params: %s\n' % repr(gen.params)))
316 ui.write(('Stream params: %s\n' % repr(gen.params)))
296 for part in gen.iterparts():
317 for part in gen.iterparts():
297 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
318 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
298 if part.type == 'changegroup':
319 if part.type == 'changegroup':
299 version = part.params.get('version', '01')
320 version = part.params.get('version', '01')
300 cg = changegroup.getunbundler(version, part, 'UN')
321 cg = changegroup.getunbundler(version, part, 'UN')
301 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
322 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
323 if part.type == 'obsmarkers':
324 _debugobsmarkers(ui, part.read(), all=all, indent=4, **opts)
302
325
303 @command('debugbundle',
326 @command('debugbundle',
304 [('a', 'all', None, _('show all details')),
327 [('a', 'all', None, _('show all details')),
305 ('', 'spec', None, _('print the bundlespec of the bundle'))],
328 ('', 'spec', None, _('print the bundlespec of the bundle'))],
306 _('FILE'),
329 _('FILE'),
307 norepo=True)
330 norepo=True)
308 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
331 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
309 """lists the contents of a bundle"""
332 """lists the contents of a bundle"""
310 with hg.openpath(ui, bundlepath) as f:
333 with hg.openpath(ui, bundlepath) as f:
311 if spec:
334 if spec:
312 spec = exchange.getbundlespec(ui, f)
335 spec = exchange.getbundlespec(ui, f)
313 ui.write('%s\n' % spec)
336 ui.write('%s\n' % spec)
314 return
337 return
315
338
316 gen = exchange.readbundle(ui, f, bundlepath)
339 gen = exchange.readbundle(ui, f, bundlepath)
317 if isinstance(gen, bundle2.unbundle20):
340 if isinstance(gen, bundle2.unbundle20):
318 return _debugbundle2(ui, gen, all=all, **opts)
341 return _debugbundle2(ui, gen, all=all, **opts)
319 _debugchangegroup(ui, gen, all=all, **opts)
342 _debugchangegroup(ui, gen, all=all, **opts)
320
343
321 @command('debugcheckstate', [], '')
344 @command('debugcheckstate', [], '')
322 def debugcheckstate(ui, repo):
345 def debugcheckstate(ui, repo):
323 """validate the correctness of the current dirstate"""
346 """validate the correctness of the current dirstate"""
324 parent1, parent2 = repo.dirstate.parents()
347 parent1, parent2 = repo.dirstate.parents()
325 m1 = repo[parent1].manifest()
348 m1 = repo[parent1].manifest()
326 m2 = repo[parent2].manifest()
349 m2 = repo[parent2].manifest()
327 errors = 0
350 errors = 0
328 for f in repo.dirstate:
351 for f in repo.dirstate:
329 state = repo.dirstate[f]
352 state = repo.dirstate[f]
330 if state in "nr" and f not in m1:
353 if state in "nr" and f not in m1:
331 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
354 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
332 errors += 1
355 errors += 1
333 if state in "a" and f in m1:
356 if state in "a" and f in m1:
334 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
357 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
335 errors += 1
358 errors += 1
336 if state in "m" and f not in m1 and f not in m2:
359 if state in "m" and f not in m1 and f not in m2:
337 ui.warn(_("%s in state %s, but not in either manifest\n") %
360 ui.warn(_("%s in state %s, but not in either manifest\n") %
338 (f, state))
361 (f, state))
339 errors += 1
362 errors += 1
340 for f in m1:
363 for f in m1:
341 state = repo.dirstate[f]
364 state = repo.dirstate[f]
342 if state not in "nrm":
365 if state not in "nrm":
343 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
366 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
344 errors += 1
367 errors += 1
345 if errors:
368 if errors:
346 error = _(".hg/dirstate inconsistent with current parent's manifest")
369 error = _(".hg/dirstate inconsistent with current parent's manifest")
347 raise error.Abort(error)
370 raise error.Abort(error)
348
371
349 @command('debugcolor',
372 @command('debugcolor',
350 [('', 'style', None, _('show all configured styles'))],
373 [('', 'style', None, _('show all configured styles'))],
351 'hg debugcolor')
374 'hg debugcolor')
352 def debugcolor(ui, repo, **opts):
375 def debugcolor(ui, repo, **opts):
353 """show available color, effects or style"""
376 """show available color, effects or style"""
354 ui.write(('color mode: %s\n') % ui._colormode)
377 ui.write(('color mode: %s\n') % ui._colormode)
355 if opts.get('style'):
378 if opts.get('style'):
356 return _debugdisplaystyle(ui)
379 return _debugdisplaystyle(ui)
357 else:
380 else:
358 return _debugdisplaycolor(ui)
381 return _debugdisplaycolor(ui)
359
382
360 def _debugdisplaycolor(ui):
383 def _debugdisplaycolor(ui):
361 ui = ui.copy()
384 ui = ui.copy()
362 ui._styles.clear()
385 ui._styles.clear()
363 for effect in color._activeeffects(ui).keys():
386 for effect in color._activeeffects(ui).keys():
364 ui._styles[effect] = effect
387 ui._styles[effect] = effect
365 if ui._terminfoparams:
388 if ui._terminfoparams:
366 for k, v in ui.configitems('color'):
389 for k, v in ui.configitems('color'):
367 if k.startswith('color.'):
390 if k.startswith('color.'):
368 ui._styles[k] = k[6:]
391 ui._styles[k] = k[6:]
369 elif k.startswith('terminfo.'):
392 elif k.startswith('terminfo.'):
370 ui._styles[k] = k[9:]
393 ui._styles[k] = k[9:]
371 ui.write(_('available colors:\n'))
394 ui.write(_('available colors:\n'))
372 # sort label with a '_' after the other to group '_background' entry.
395 # sort label with a '_' after the other to group '_background' entry.
373 items = sorted(ui._styles.items(),
396 items = sorted(ui._styles.items(),
374 key=lambda i: ('_' in i[0], i[0], i[1]))
397 key=lambda i: ('_' in i[0], i[0], i[1]))
375 for colorname, label in items:
398 for colorname, label in items:
376 ui.write(('%s\n') % colorname, label=label)
399 ui.write(('%s\n') % colorname, label=label)
377
400
378 def _debugdisplaystyle(ui):
401 def _debugdisplaystyle(ui):
379 ui.write(_('available style:\n'))
402 ui.write(_('available style:\n'))
380 width = max(len(s) for s in ui._styles)
403 width = max(len(s) for s in ui._styles)
381 for label, effects in sorted(ui._styles.items()):
404 for label, effects in sorted(ui._styles.items()):
382 ui.write('%s' % label, label=label)
405 ui.write('%s' % label, label=label)
383 if effects:
406 if effects:
384 # 50
407 # 50
385 ui.write(': ')
408 ui.write(': ')
386 ui.write(' ' * (max(0, width - len(label))))
409 ui.write(' ' * (max(0, width - len(label))))
387 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
410 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
388 ui.write('\n')
411 ui.write('\n')
389
412
390 @command('debugcreatestreamclonebundle', [], 'FILE')
413 @command('debugcreatestreamclonebundle', [], 'FILE')
391 def debugcreatestreamclonebundle(ui, repo, fname):
414 def debugcreatestreamclonebundle(ui, repo, fname):
392 """create a stream clone bundle file
415 """create a stream clone bundle file
393
416
394 Stream bundles are special bundles that are essentially archives of
417 Stream bundles are special bundles that are essentially archives of
395 revlog files. They are commonly used for cloning very quickly.
418 revlog files. They are commonly used for cloning very quickly.
396 """
419 """
397 requirements, gen = streamclone.generatebundlev1(repo)
420 requirements, gen = streamclone.generatebundlev1(repo)
398 changegroup.writechunks(ui, gen, fname)
421 changegroup.writechunks(ui, gen, fname)
399
422
400 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
423 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
401
424
402 @command('debugdag',
425 @command('debugdag',
403 [('t', 'tags', None, _('use tags as labels')),
426 [('t', 'tags', None, _('use tags as labels')),
404 ('b', 'branches', None, _('annotate with branch names')),
427 ('b', 'branches', None, _('annotate with branch names')),
405 ('', 'dots', None, _('use dots for runs')),
428 ('', 'dots', None, _('use dots for runs')),
406 ('s', 'spaces', None, _('separate elements by spaces'))],
429 ('s', 'spaces', None, _('separate elements by spaces'))],
407 _('[OPTION]... [FILE [REV]...]'),
430 _('[OPTION]... [FILE [REV]...]'),
408 optionalrepo=True)
431 optionalrepo=True)
409 def debugdag(ui, repo, file_=None, *revs, **opts):
432 def debugdag(ui, repo, file_=None, *revs, **opts):
410 """format the changelog or an index DAG as a concise textual description
433 """format the changelog or an index DAG as a concise textual description
411
434
412 If you pass a revlog index, the revlog's DAG is emitted. If you list
435 If you pass a revlog index, the revlog's DAG is emitted. If you list
413 revision numbers, they get labeled in the output as rN.
436 revision numbers, they get labeled in the output as rN.
414
437
415 Otherwise, the changelog DAG of the current repo is emitted.
438 Otherwise, the changelog DAG of the current repo is emitted.
416 """
439 """
417 spaces = opts.get('spaces')
440 spaces = opts.get('spaces')
418 dots = opts.get('dots')
441 dots = opts.get('dots')
419 if file_:
442 if file_:
420 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
443 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
421 file_)
444 file_)
422 revs = set((int(r) for r in revs))
445 revs = set((int(r) for r in revs))
423 def events():
446 def events():
424 for r in rlog:
447 for r in rlog:
425 yield 'n', (r, list(p for p in rlog.parentrevs(r)
448 yield 'n', (r, list(p for p in rlog.parentrevs(r)
426 if p != -1))
449 if p != -1))
427 if r in revs:
450 if r in revs:
428 yield 'l', (r, "r%i" % r)
451 yield 'l', (r, "r%i" % r)
429 elif repo:
452 elif repo:
430 cl = repo.changelog
453 cl = repo.changelog
431 tags = opts.get('tags')
454 tags = opts.get('tags')
432 branches = opts.get('branches')
455 branches = opts.get('branches')
433 if tags:
456 if tags:
434 labels = {}
457 labels = {}
435 for l, n in repo.tags().items():
458 for l, n in repo.tags().items():
436 labels.setdefault(cl.rev(n), []).append(l)
459 labels.setdefault(cl.rev(n), []).append(l)
437 def events():
460 def events():
438 b = "default"
461 b = "default"
439 for r in cl:
462 for r in cl:
440 if branches:
463 if branches:
441 newb = cl.read(cl.node(r))[5]['branch']
464 newb = cl.read(cl.node(r))[5]['branch']
442 if newb != b:
465 if newb != b:
443 yield 'a', newb
466 yield 'a', newb
444 b = newb
467 b = newb
445 yield 'n', (r, list(p for p in cl.parentrevs(r)
468 yield 'n', (r, list(p for p in cl.parentrevs(r)
446 if p != -1))
469 if p != -1))
447 if tags:
470 if tags:
448 ls = labels.get(r)
471 ls = labels.get(r)
449 if ls:
472 if ls:
450 for l in ls:
473 for l in ls:
451 yield 'l', (r, l)
474 yield 'l', (r, l)
452 else:
475 else:
453 raise error.Abort(_('need repo for changelog dag'))
476 raise error.Abort(_('need repo for changelog dag'))
454
477
455 for line in dagparser.dagtextlines(events(),
478 for line in dagparser.dagtextlines(events(),
456 addspaces=spaces,
479 addspaces=spaces,
457 wraplabels=True,
480 wraplabels=True,
458 wrapannotations=True,
481 wrapannotations=True,
459 wrapnonlinear=dots,
482 wrapnonlinear=dots,
460 usedots=dots,
483 usedots=dots,
461 maxlinewidth=70):
484 maxlinewidth=70):
462 ui.write(line)
485 ui.write(line)
463 ui.write("\n")
486 ui.write("\n")
464
487
465 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
488 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
466 def debugdata(ui, repo, file_, rev=None, **opts):
489 def debugdata(ui, repo, file_, rev=None, **opts):
467 """dump the contents of a data file revision"""
490 """dump the contents of a data file revision"""
468 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
491 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
469 if rev is not None:
492 if rev is not None:
470 raise error.CommandError('debugdata', _('invalid arguments'))
493 raise error.CommandError('debugdata', _('invalid arguments'))
471 file_, rev = None, file_
494 file_, rev = None, file_
472 elif rev is None:
495 elif rev is None:
473 raise error.CommandError('debugdata', _('invalid arguments'))
496 raise error.CommandError('debugdata', _('invalid arguments'))
474 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
497 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
475 try:
498 try:
476 ui.write(r.revision(r.lookup(rev), raw=True))
499 ui.write(r.revision(r.lookup(rev), raw=True))
477 except KeyError:
500 except KeyError:
478 raise error.Abort(_('invalid revision identifier %s') % rev)
501 raise error.Abort(_('invalid revision identifier %s') % rev)
479
502
480 @command('debugdate',
503 @command('debugdate',
481 [('e', 'extended', None, _('try extended date formats'))],
504 [('e', 'extended', None, _('try extended date formats'))],
482 _('[-e] DATE [RANGE]'),
505 _('[-e] DATE [RANGE]'),
483 norepo=True, optionalrepo=True)
506 norepo=True, optionalrepo=True)
484 def debugdate(ui, date, range=None, **opts):
507 def debugdate(ui, date, range=None, **opts):
485 """parse and display a date"""
508 """parse and display a date"""
486 if opts["extended"]:
509 if opts["extended"]:
487 d = util.parsedate(date, util.extendeddateformats)
510 d = util.parsedate(date, util.extendeddateformats)
488 else:
511 else:
489 d = util.parsedate(date)
512 d = util.parsedate(date)
490 ui.write(("internal: %s %s\n") % d)
513 ui.write(("internal: %s %s\n") % d)
491 ui.write(("standard: %s\n") % util.datestr(d))
514 ui.write(("standard: %s\n") % util.datestr(d))
492 if range:
515 if range:
493 m = util.matchdate(range)
516 m = util.matchdate(range)
494 ui.write(("match: %s\n") % m(d[0]))
517 ui.write(("match: %s\n") % m(d[0]))
495
518
496 @command('debugdeltachain',
519 @command('debugdeltachain',
497 cmdutil.debugrevlogopts + cmdutil.formatteropts,
520 cmdutil.debugrevlogopts + cmdutil.formatteropts,
498 _('-c|-m|FILE'),
521 _('-c|-m|FILE'),
499 optionalrepo=True)
522 optionalrepo=True)
500 def debugdeltachain(ui, repo, file_=None, **opts):
523 def debugdeltachain(ui, repo, file_=None, **opts):
501 """dump information about delta chains in a revlog
524 """dump information about delta chains in a revlog
502
525
503 Output can be templatized. Available template keywords are:
526 Output can be templatized. Available template keywords are:
504
527
505 :``rev``: revision number
528 :``rev``: revision number
506 :``chainid``: delta chain identifier (numbered by unique base)
529 :``chainid``: delta chain identifier (numbered by unique base)
507 :``chainlen``: delta chain length to this revision
530 :``chainlen``: delta chain length to this revision
508 :``prevrev``: previous revision in delta chain
531 :``prevrev``: previous revision in delta chain
509 :``deltatype``: role of delta / how it was computed
532 :``deltatype``: role of delta / how it was computed
510 :``compsize``: compressed size of revision
533 :``compsize``: compressed size of revision
511 :``uncompsize``: uncompressed size of revision
534 :``uncompsize``: uncompressed size of revision
512 :``chainsize``: total size of compressed revisions in chain
535 :``chainsize``: total size of compressed revisions in chain
513 :``chainratio``: total chain size divided by uncompressed revision size
536 :``chainratio``: total chain size divided by uncompressed revision size
514 (new delta chains typically start at ratio 2.00)
537 (new delta chains typically start at ratio 2.00)
515 :``lindist``: linear distance from base revision in delta chain to end
538 :``lindist``: linear distance from base revision in delta chain to end
516 of this revision
539 of this revision
517 :``extradist``: total size of revisions not part of this delta chain from
540 :``extradist``: total size of revisions not part of this delta chain from
518 base of delta chain to end of this revision; a measurement
541 base of delta chain to end of this revision; a measurement
519 of how much extra data we need to read/seek across to read
542 of how much extra data we need to read/seek across to read
520 the delta chain for this revision
543 the delta chain for this revision
521 :``extraratio``: extradist divided by chainsize; another representation of
544 :``extraratio``: extradist divided by chainsize; another representation of
522 how much unrelated data is needed to load this delta chain
545 how much unrelated data is needed to load this delta chain
523 """
546 """
524 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
547 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
525 index = r.index
548 index = r.index
526 generaldelta = r.version & revlog.FLAG_GENERALDELTA
549 generaldelta = r.version & revlog.FLAG_GENERALDELTA
527
550
528 def revinfo(rev):
551 def revinfo(rev):
529 e = index[rev]
552 e = index[rev]
530 compsize = e[1]
553 compsize = e[1]
531 uncompsize = e[2]
554 uncompsize = e[2]
532 chainsize = 0
555 chainsize = 0
533
556
534 if generaldelta:
557 if generaldelta:
535 if e[3] == e[5]:
558 if e[3] == e[5]:
536 deltatype = 'p1'
559 deltatype = 'p1'
537 elif e[3] == e[6]:
560 elif e[3] == e[6]:
538 deltatype = 'p2'
561 deltatype = 'p2'
539 elif e[3] == rev - 1:
562 elif e[3] == rev - 1:
540 deltatype = 'prev'
563 deltatype = 'prev'
541 elif e[3] == rev:
564 elif e[3] == rev:
542 deltatype = 'base'
565 deltatype = 'base'
543 else:
566 else:
544 deltatype = 'other'
567 deltatype = 'other'
545 else:
568 else:
546 if e[3] == rev:
569 if e[3] == rev:
547 deltatype = 'base'
570 deltatype = 'base'
548 else:
571 else:
549 deltatype = 'prev'
572 deltatype = 'prev'
550
573
551 chain = r._deltachain(rev)[0]
574 chain = r._deltachain(rev)[0]
552 for iterrev in chain:
575 for iterrev in chain:
553 e = index[iterrev]
576 e = index[iterrev]
554 chainsize += e[1]
577 chainsize += e[1]
555
578
556 return compsize, uncompsize, deltatype, chain, chainsize
579 return compsize, uncompsize, deltatype, chain, chainsize
557
580
558 fm = ui.formatter('debugdeltachain', opts)
581 fm = ui.formatter('debugdeltachain', opts)
559
582
560 fm.plain(' rev chain# chainlen prev delta '
583 fm.plain(' rev chain# chainlen prev delta '
561 'size rawsize chainsize ratio lindist extradist '
584 'size rawsize chainsize ratio lindist extradist '
562 'extraratio\n')
585 'extraratio\n')
563
586
564 chainbases = {}
587 chainbases = {}
565 for rev in r:
588 for rev in r:
566 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
589 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
567 chainbase = chain[0]
590 chainbase = chain[0]
568 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
591 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
569 basestart = r.start(chainbase)
592 basestart = r.start(chainbase)
570 revstart = r.start(rev)
593 revstart = r.start(rev)
571 lineardist = revstart + comp - basestart
594 lineardist = revstart + comp - basestart
572 extradist = lineardist - chainsize
595 extradist = lineardist - chainsize
573 try:
596 try:
574 prevrev = chain[-2]
597 prevrev = chain[-2]
575 except IndexError:
598 except IndexError:
576 prevrev = -1
599 prevrev = -1
577
600
578 chainratio = float(chainsize) / float(uncomp)
601 chainratio = float(chainsize) / float(uncomp)
579 extraratio = float(extradist) / float(chainsize)
602 extraratio = float(extradist) / float(chainsize)
580
603
581 fm.startitem()
604 fm.startitem()
582 fm.write('rev chainid chainlen prevrev deltatype compsize '
605 fm.write('rev chainid chainlen prevrev deltatype compsize '
583 'uncompsize chainsize chainratio lindist extradist '
606 'uncompsize chainsize chainratio lindist extradist '
584 'extraratio',
607 'extraratio',
585 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
608 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
586 rev, chainid, len(chain), prevrev, deltatype, comp,
609 rev, chainid, len(chain), prevrev, deltatype, comp,
587 uncomp, chainsize, chainratio, lineardist, extradist,
610 uncomp, chainsize, chainratio, lineardist, extradist,
588 extraratio,
611 extraratio,
589 rev=rev, chainid=chainid, chainlen=len(chain),
612 rev=rev, chainid=chainid, chainlen=len(chain),
590 prevrev=prevrev, deltatype=deltatype, compsize=comp,
613 prevrev=prevrev, deltatype=deltatype, compsize=comp,
591 uncompsize=uncomp, chainsize=chainsize,
614 uncompsize=uncomp, chainsize=chainsize,
592 chainratio=chainratio, lindist=lineardist,
615 chainratio=chainratio, lindist=lineardist,
593 extradist=extradist, extraratio=extraratio)
616 extradist=extradist, extraratio=extraratio)
594
617
595 fm.end()
618 fm.end()
596
619
597 @command('debugdirstate|debugstate',
620 @command('debugdirstate|debugstate',
598 [('', 'nodates', None, _('do not display the saved mtime')),
621 [('', 'nodates', None, _('do not display the saved mtime')),
599 ('', 'datesort', None, _('sort by saved mtime'))],
622 ('', 'datesort', None, _('sort by saved mtime'))],
600 _('[OPTION]...'))
623 _('[OPTION]...'))
601 def debugstate(ui, repo, **opts):
624 def debugstate(ui, repo, **opts):
602 """show the contents of the current dirstate"""
625 """show the contents of the current dirstate"""
603
626
604 nodates = opts.get('nodates')
627 nodates = opts.get('nodates')
605 datesort = opts.get('datesort')
628 datesort = opts.get('datesort')
606
629
607 timestr = ""
630 timestr = ""
608 if datesort:
631 if datesort:
609 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
632 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
610 else:
633 else:
611 keyfunc = None # sort by filename
634 keyfunc = None # sort by filename
612 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
635 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
613 if ent[3] == -1:
636 if ent[3] == -1:
614 timestr = 'unset '
637 timestr = 'unset '
615 elif nodates:
638 elif nodates:
616 timestr = 'set '
639 timestr = 'set '
617 else:
640 else:
618 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
641 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
619 time.localtime(ent[3]))
642 time.localtime(ent[3]))
620 if ent[1] & 0o20000:
643 if ent[1] & 0o20000:
621 mode = 'lnk'
644 mode = 'lnk'
622 else:
645 else:
623 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
646 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
624 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
647 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
625 for f in repo.dirstate.copies():
648 for f in repo.dirstate.copies():
626 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
649 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
627
650
628 @command('debugdiscovery',
651 @command('debugdiscovery',
629 [('', 'old', None, _('use old-style discovery')),
652 [('', 'old', None, _('use old-style discovery')),
630 ('', 'nonheads', None,
653 ('', 'nonheads', None,
631 _('use old-style discovery with non-heads included')),
654 _('use old-style discovery with non-heads included')),
632 ] + cmdutil.remoteopts,
655 ] + cmdutil.remoteopts,
633 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
656 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
634 def debugdiscovery(ui, repo, remoteurl="default", **opts):
657 def debugdiscovery(ui, repo, remoteurl="default", **opts):
635 """runs the changeset discovery protocol in isolation"""
658 """runs the changeset discovery protocol in isolation"""
636 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
659 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
637 opts.get('branch'))
660 opts.get('branch'))
638 remote = hg.peer(repo, opts, remoteurl)
661 remote = hg.peer(repo, opts, remoteurl)
639 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
662 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
640
663
641 # make sure tests are repeatable
664 # make sure tests are repeatable
642 random.seed(12323)
665 random.seed(12323)
643
666
644 def doit(localheads, remoteheads, remote=remote):
667 def doit(localheads, remoteheads, remote=remote):
645 if opts.get('old'):
668 if opts.get('old'):
646 if localheads:
669 if localheads:
647 raise error.Abort('cannot use localheads with old style '
670 raise error.Abort('cannot use localheads with old style '
648 'discovery')
671 'discovery')
649 if not util.safehasattr(remote, 'branches'):
672 if not util.safehasattr(remote, 'branches'):
650 # enable in-client legacy support
673 # enable in-client legacy support
651 remote = localrepo.locallegacypeer(remote.local())
674 remote = localrepo.locallegacypeer(remote.local())
652 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
675 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
653 force=True)
676 force=True)
654 common = set(common)
677 common = set(common)
655 if not opts.get('nonheads'):
678 if not opts.get('nonheads'):
656 ui.write(("unpruned common: %s\n") %
679 ui.write(("unpruned common: %s\n") %
657 " ".join(sorted(short(n) for n in common)))
680 " ".join(sorted(short(n) for n in common)))
658 dag = dagutil.revlogdag(repo.changelog)
681 dag = dagutil.revlogdag(repo.changelog)
659 all = dag.ancestorset(dag.internalizeall(common))
682 all = dag.ancestorset(dag.internalizeall(common))
660 common = dag.externalizeall(dag.headsetofconnecteds(all))
683 common = dag.externalizeall(dag.headsetofconnecteds(all))
661 else:
684 else:
662 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
685 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
663 common = set(common)
686 common = set(common)
664 rheads = set(hds)
687 rheads = set(hds)
665 lheads = set(repo.heads())
688 lheads = set(repo.heads())
666 ui.write(("common heads: %s\n") %
689 ui.write(("common heads: %s\n") %
667 " ".join(sorted(short(n) for n in common)))
690 " ".join(sorted(short(n) for n in common)))
668 if lheads <= common:
691 if lheads <= common:
669 ui.write(("local is subset\n"))
692 ui.write(("local is subset\n"))
670 elif rheads <= common:
693 elif rheads <= common:
671 ui.write(("remote is subset\n"))
694 ui.write(("remote is subset\n"))
672
695
673 serverlogs = opts.get('serverlog')
696 serverlogs = opts.get('serverlog')
674 if serverlogs:
697 if serverlogs:
675 for filename in serverlogs:
698 for filename in serverlogs:
676 with open(filename, 'r') as logfile:
699 with open(filename, 'r') as logfile:
677 line = logfile.readline()
700 line = logfile.readline()
678 while line:
701 while line:
679 parts = line.strip().split(';')
702 parts = line.strip().split(';')
680 op = parts[1]
703 op = parts[1]
681 if op == 'cg':
704 if op == 'cg':
682 pass
705 pass
683 elif op == 'cgss':
706 elif op == 'cgss':
684 doit(parts[2].split(' '), parts[3].split(' '))
707 doit(parts[2].split(' '), parts[3].split(' '))
685 elif op == 'unb':
708 elif op == 'unb':
686 doit(parts[3].split(' '), parts[2].split(' '))
709 doit(parts[3].split(' '), parts[2].split(' '))
687 line = logfile.readline()
710 line = logfile.readline()
688 else:
711 else:
689 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
712 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
690 opts.get('remote_head'))
713 opts.get('remote_head'))
691 localrevs = opts.get('local_head')
714 localrevs = opts.get('local_head')
692 doit(localrevs, remoterevs)
715 doit(localrevs, remoterevs)
693
716
694 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
717 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
695 def debugextensions(ui, **opts):
718 def debugextensions(ui, **opts):
696 '''show information about active extensions'''
719 '''show information about active extensions'''
697 exts = extensions.extensions(ui)
720 exts = extensions.extensions(ui)
698 hgver = util.version()
721 hgver = util.version()
699 fm = ui.formatter('debugextensions', opts)
722 fm = ui.formatter('debugextensions', opts)
700 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
723 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
701 isinternal = extensions.ismoduleinternal(extmod)
724 isinternal = extensions.ismoduleinternal(extmod)
702 extsource = pycompat.fsencode(extmod.__file__)
725 extsource = pycompat.fsencode(extmod.__file__)
703 if isinternal:
726 if isinternal:
704 exttestedwith = [] # never expose magic string to users
727 exttestedwith = [] # never expose magic string to users
705 else:
728 else:
706 exttestedwith = getattr(extmod, 'testedwith', '').split()
729 exttestedwith = getattr(extmod, 'testedwith', '').split()
707 extbuglink = getattr(extmod, 'buglink', None)
730 extbuglink = getattr(extmod, 'buglink', None)
708
731
709 fm.startitem()
732 fm.startitem()
710
733
711 if ui.quiet or ui.verbose:
734 if ui.quiet or ui.verbose:
712 fm.write('name', '%s\n', extname)
735 fm.write('name', '%s\n', extname)
713 else:
736 else:
714 fm.write('name', '%s', extname)
737 fm.write('name', '%s', extname)
715 if isinternal or hgver in exttestedwith:
738 if isinternal or hgver in exttestedwith:
716 fm.plain('\n')
739 fm.plain('\n')
717 elif not exttestedwith:
740 elif not exttestedwith:
718 fm.plain(_(' (untested!)\n'))
741 fm.plain(_(' (untested!)\n'))
719 else:
742 else:
720 lasttestedversion = exttestedwith[-1]
743 lasttestedversion = exttestedwith[-1]
721 fm.plain(' (%s!)\n' % lasttestedversion)
744 fm.plain(' (%s!)\n' % lasttestedversion)
722
745
723 fm.condwrite(ui.verbose and extsource, 'source',
746 fm.condwrite(ui.verbose and extsource, 'source',
724 _(' location: %s\n'), extsource or "")
747 _(' location: %s\n'), extsource or "")
725
748
726 if ui.verbose:
749 if ui.verbose:
727 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
750 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
728 fm.data(bundled=isinternal)
751 fm.data(bundled=isinternal)
729
752
730 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
753 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
731 _(' tested with: %s\n'),
754 _(' tested with: %s\n'),
732 fm.formatlist(exttestedwith, name='ver'))
755 fm.formatlist(exttestedwith, name='ver'))
733
756
734 fm.condwrite(ui.verbose and extbuglink, 'buglink',
757 fm.condwrite(ui.verbose and extbuglink, 'buglink',
735 _(' bug reporting: %s\n'), extbuglink or "")
758 _(' bug reporting: %s\n'), extbuglink or "")
736
759
737 fm.end()
760 fm.end()
738
761
739 @command('debugfileset',
762 @command('debugfileset',
740 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
763 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
741 _('[-r REV] FILESPEC'))
764 _('[-r REV] FILESPEC'))
742 def debugfileset(ui, repo, expr, **opts):
765 def debugfileset(ui, repo, expr, **opts):
743 '''parse and apply a fileset specification'''
766 '''parse and apply a fileset specification'''
744 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
767 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
745 if ui.verbose:
768 if ui.verbose:
746 tree = fileset.parse(expr)
769 tree = fileset.parse(expr)
747 ui.note(fileset.prettyformat(tree), "\n")
770 ui.note(fileset.prettyformat(tree), "\n")
748
771
749 for f in ctx.getfileset(expr):
772 for f in ctx.getfileset(expr):
750 ui.write("%s\n" % f)
773 ui.write("%s\n" % f)
751
774
752 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
775 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
753 def debugfsinfo(ui, path="."):
776 def debugfsinfo(ui, path="."):
754 """show information detected about current filesystem"""
777 """show information detected about current filesystem"""
755 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
778 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
756 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
779 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
757 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
780 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
758 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
781 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
759 casesensitive = '(unknown)'
782 casesensitive = '(unknown)'
760 try:
783 try:
761 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
784 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
762 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
785 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
763 except OSError:
786 except OSError:
764 pass
787 pass
765 ui.write(('case-sensitive: %s\n') % casesensitive)
788 ui.write(('case-sensitive: %s\n') % casesensitive)
766
789
767 @command('debuggetbundle',
790 @command('debuggetbundle',
768 [('H', 'head', [], _('id of head node'), _('ID')),
791 [('H', 'head', [], _('id of head node'), _('ID')),
769 ('C', 'common', [], _('id of common node'), _('ID')),
792 ('C', 'common', [], _('id of common node'), _('ID')),
770 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
793 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
771 _('REPO FILE [-H|-C ID]...'),
794 _('REPO FILE [-H|-C ID]...'),
772 norepo=True)
795 norepo=True)
773 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
796 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
774 """retrieves a bundle from a repo
797 """retrieves a bundle from a repo
775
798
776 Every ID must be a full-length hex node id string. Saves the bundle to the
799 Every ID must be a full-length hex node id string. Saves the bundle to the
777 given file.
800 given file.
778 """
801 """
779 repo = hg.peer(ui, opts, repopath)
802 repo = hg.peer(ui, opts, repopath)
780 if not repo.capable('getbundle'):
803 if not repo.capable('getbundle'):
781 raise error.Abort("getbundle() not supported by target repository")
804 raise error.Abort("getbundle() not supported by target repository")
782 args = {}
805 args = {}
783 if common:
806 if common:
784 args['common'] = [bin(s) for s in common]
807 args['common'] = [bin(s) for s in common]
785 if head:
808 if head:
786 args['heads'] = [bin(s) for s in head]
809 args['heads'] = [bin(s) for s in head]
787 # TODO: get desired bundlecaps from command line.
810 # TODO: get desired bundlecaps from command line.
788 args['bundlecaps'] = None
811 args['bundlecaps'] = None
789 bundle = repo.getbundle('debug', **args)
812 bundle = repo.getbundle('debug', **args)
790
813
791 bundletype = opts.get('type', 'bzip2').lower()
814 bundletype = opts.get('type', 'bzip2').lower()
792 btypes = {'none': 'HG10UN',
815 btypes = {'none': 'HG10UN',
793 'bzip2': 'HG10BZ',
816 'bzip2': 'HG10BZ',
794 'gzip': 'HG10GZ',
817 'gzip': 'HG10GZ',
795 'bundle2': 'HG20'}
818 'bundle2': 'HG20'}
796 bundletype = btypes.get(bundletype)
819 bundletype = btypes.get(bundletype)
797 if bundletype not in bundle2.bundletypes:
820 if bundletype not in bundle2.bundletypes:
798 raise error.Abort(_('unknown bundle type specified with --type'))
821 raise error.Abort(_('unknown bundle type specified with --type'))
799 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
822 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
800
823
801 @command('debugignore', [], '[FILE]')
824 @command('debugignore', [], '[FILE]')
802 def debugignore(ui, repo, *files, **opts):
825 def debugignore(ui, repo, *files, **opts):
803 """display the combined ignore pattern and information about ignored files
826 """display the combined ignore pattern and information about ignored files
804
827
805 With no argument display the combined ignore pattern.
828 With no argument display the combined ignore pattern.
806
829
807 Given space separated file names, shows if the given file is ignored and
830 Given space separated file names, shows if the given file is ignored and
808 if so, show the ignore rule (file and line number) that matched it.
831 if so, show the ignore rule (file and line number) that matched it.
809 """
832 """
810 ignore = repo.dirstate._ignore
833 ignore = repo.dirstate._ignore
811 if not files:
834 if not files:
812 # Show all the patterns
835 # Show all the patterns
813 ui.write("%s\n" % repr(ignore))
836 ui.write("%s\n" % repr(ignore))
814 else:
837 else:
815 for f in files:
838 for f in files:
816 nf = util.normpath(f)
839 nf = util.normpath(f)
817 ignored = None
840 ignored = None
818 ignoredata = None
841 ignoredata = None
819 if nf != '.':
842 if nf != '.':
820 if ignore(nf):
843 if ignore(nf):
821 ignored = nf
844 ignored = nf
822 ignoredata = repo.dirstate._ignorefileandline(nf)
845 ignoredata = repo.dirstate._ignorefileandline(nf)
823 else:
846 else:
824 for p in util.finddirs(nf):
847 for p in util.finddirs(nf):
825 if ignore(p):
848 if ignore(p):
826 ignored = p
849 ignored = p
827 ignoredata = repo.dirstate._ignorefileandline(p)
850 ignoredata = repo.dirstate._ignorefileandline(p)
828 break
851 break
829 if ignored:
852 if ignored:
830 if ignored == nf:
853 if ignored == nf:
831 ui.write(_("%s is ignored\n") % f)
854 ui.write(_("%s is ignored\n") % f)
832 else:
855 else:
833 ui.write(_("%s is ignored because of "
856 ui.write(_("%s is ignored because of "
834 "containing folder %s\n")
857 "containing folder %s\n")
835 % (f, ignored))
858 % (f, ignored))
836 ignorefile, lineno, line = ignoredata
859 ignorefile, lineno, line = ignoredata
837 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
860 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
838 % (ignorefile, lineno, line))
861 % (ignorefile, lineno, line))
839 else:
862 else:
840 ui.write(_("%s is not ignored\n") % f)
863 ui.write(_("%s is not ignored\n") % f)
841
864
842 @command('debugindex', cmdutil.debugrevlogopts +
865 @command('debugindex', cmdutil.debugrevlogopts +
843 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
866 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
844 _('[-f FORMAT] -c|-m|FILE'),
867 _('[-f FORMAT] -c|-m|FILE'),
845 optionalrepo=True)
868 optionalrepo=True)
846 def debugindex(ui, repo, file_=None, **opts):
869 def debugindex(ui, repo, file_=None, **opts):
847 """dump the contents of an index file"""
870 """dump the contents of an index file"""
848 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
871 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
849 format = opts.get('format', 0)
872 format = opts.get('format', 0)
850 if format not in (0, 1):
873 if format not in (0, 1):
851 raise error.Abort(_("unknown format %d") % format)
874 raise error.Abort(_("unknown format %d") % format)
852
875
853 generaldelta = r.version & revlog.FLAG_GENERALDELTA
876 generaldelta = r.version & revlog.FLAG_GENERALDELTA
854 if generaldelta:
877 if generaldelta:
855 basehdr = ' delta'
878 basehdr = ' delta'
856 else:
879 else:
857 basehdr = ' base'
880 basehdr = ' base'
858
881
859 if ui.debugflag:
882 if ui.debugflag:
860 shortfn = hex
883 shortfn = hex
861 else:
884 else:
862 shortfn = short
885 shortfn = short
863
886
864 # There might not be anything in r, so have a sane default
887 # There might not be anything in r, so have a sane default
865 idlen = 12
888 idlen = 12
866 for i in r:
889 for i in r:
867 idlen = len(shortfn(r.node(i)))
890 idlen = len(shortfn(r.node(i)))
868 break
891 break
869
892
870 if format == 0:
893 if format == 0:
871 ui.write((" rev offset length " + basehdr + " linkrev"
894 ui.write((" rev offset length " + basehdr + " linkrev"
872 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
895 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
873 elif format == 1:
896 elif format == 1:
874 ui.write((" rev flag offset length"
897 ui.write((" rev flag offset length"
875 " size " + basehdr + " link p1 p2"
898 " size " + basehdr + " link p1 p2"
876 " %s\n") % "nodeid".rjust(idlen))
899 " %s\n") % "nodeid".rjust(idlen))
877
900
878 for i in r:
901 for i in r:
879 node = r.node(i)
902 node = r.node(i)
880 if generaldelta:
903 if generaldelta:
881 base = r.deltaparent(i)
904 base = r.deltaparent(i)
882 else:
905 else:
883 base = r.chainbase(i)
906 base = r.chainbase(i)
884 if format == 0:
907 if format == 0:
885 try:
908 try:
886 pp = r.parents(node)
909 pp = r.parents(node)
887 except Exception:
910 except Exception:
888 pp = [nullid, nullid]
911 pp = [nullid, nullid]
889 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
912 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
890 i, r.start(i), r.length(i), base, r.linkrev(i),
913 i, r.start(i), r.length(i), base, r.linkrev(i),
891 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
914 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
892 elif format == 1:
915 elif format == 1:
893 pr = r.parentrevs(i)
916 pr = r.parentrevs(i)
894 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
917 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
895 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
918 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
896 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
919 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
897
920
898 @command('debugindexdot', cmdutil.debugrevlogopts,
921 @command('debugindexdot', cmdutil.debugrevlogopts,
899 _('-c|-m|FILE'), optionalrepo=True)
922 _('-c|-m|FILE'), optionalrepo=True)
900 def debugindexdot(ui, repo, file_=None, **opts):
923 def debugindexdot(ui, repo, file_=None, **opts):
901 """dump an index DAG as a graphviz dot file"""
924 """dump an index DAG as a graphviz dot file"""
902 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
925 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
903 ui.write(("digraph G {\n"))
926 ui.write(("digraph G {\n"))
904 for i in r:
927 for i in r:
905 node = r.node(i)
928 node = r.node(i)
906 pp = r.parents(node)
929 pp = r.parents(node)
907 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
930 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
908 if pp[1] != nullid:
931 if pp[1] != nullid:
909 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
932 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
910 ui.write("}\n")
933 ui.write("}\n")
911
934
912 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
935 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
913 def debuginstall(ui, **opts):
936 def debuginstall(ui, **opts):
914 '''test Mercurial installation
937 '''test Mercurial installation
915
938
916 Returns 0 on success.
939 Returns 0 on success.
917 '''
940 '''
918
941
919 def writetemp(contents):
942 def writetemp(contents):
920 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
943 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
921 f = os.fdopen(fd, pycompat.sysstr("wb"))
944 f = os.fdopen(fd, pycompat.sysstr("wb"))
922 f.write(contents)
945 f.write(contents)
923 f.close()
946 f.close()
924 return name
947 return name
925
948
926 problems = 0
949 problems = 0
927
950
928 fm = ui.formatter('debuginstall', opts)
951 fm = ui.formatter('debuginstall', opts)
929 fm.startitem()
952 fm.startitem()
930
953
931 # encoding
954 # encoding
932 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
955 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
933 err = None
956 err = None
934 try:
957 try:
935 encoding.fromlocal("test")
958 encoding.fromlocal("test")
936 except error.Abort as inst:
959 except error.Abort as inst:
937 err = inst
960 err = inst
938 problems += 1
961 problems += 1
939 fm.condwrite(err, 'encodingerror', _(" %s\n"
962 fm.condwrite(err, 'encodingerror', _(" %s\n"
940 " (check that your locale is properly set)\n"), err)
963 " (check that your locale is properly set)\n"), err)
941
964
942 # Python
965 # Python
943 fm.write('pythonexe', _("checking Python executable (%s)\n"),
966 fm.write('pythonexe', _("checking Python executable (%s)\n"),
944 pycompat.sysexecutable)
967 pycompat.sysexecutable)
945 fm.write('pythonver', _("checking Python version (%s)\n"),
968 fm.write('pythonver', _("checking Python version (%s)\n"),
946 ("%d.%d.%d" % sys.version_info[:3]))
969 ("%d.%d.%d" % sys.version_info[:3]))
947 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
970 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
948 os.path.dirname(pycompat.fsencode(os.__file__)))
971 os.path.dirname(pycompat.fsencode(os.__file__)))
949
972
950 security = set(sslutil.supportedprotocols)
973 security = set(sslutil.supportedprotocols)
951 if sslutil.hassni:
974 if sslutil.hassni:
952 security.add('sni')
975 security.add('sni')
953
976
954 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
977 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
955 fm.formatlist(sorted(security), name='protocol',
978 fm.formatlist(sorted(security), name='protocol',
956 fmt='%s', sep=','))
979 fmt='%s', sep=','))
957
980
958 # These are warnings, not errors. So don't increment problem count. This
981 # These are warnings, not errors. So don't increment problem count. This
959 # may change in the future.
982 # may change in the future.
960 if 'tls1.2' not in security:
983 if 'tls1.2' not in security:
961 fm.plain(_(' TLS 1.2 not supported by Python install; '
984 fm.plain(_(' TLS 1.2 not supported by Python install; '
962 'network connections lack modern security\n'))
985 'network connections lack modern security\n'))
963 if 'sni' not in security:
986 if 'sni' not in security:
964 fm.plain(_(' SNI not supported by Python install; may have '
987 fm.plain(_(' SNI not supported by Python install; may have '
965 'connectivity issues with some servers\n'))
988 'connectivity issues with some servers\n'))
966
989
967 # TODO print CA cert info
990 # TODO print CA cert info
968
991
969 # hg version
992 # hg version
970 hgver = util.version()
993 hgver = util.version()
971 fm.write('hgver', _("checking Mercurial version (%s)\n"),
994 fm.write('hgver', _("checking Mercurial version (%s)\n"),
972 hgver.split('+')[0])
995 hgver.split('+')[0])
973 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
996 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
974 '+'.join(hgver.split('+')[1:]))
997 '+'.join(hgver.split('+')[1:]))
975
998
976 # compiled modules
999 # compiled modules
977 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1000 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
978 policy.policy)
1001 policy.policy)
979 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1002 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
980 os.path.dirname(pycompat.fsencode(__file__)))
1003 os.path.dirname(pycompat.fsencode(__file__)))
981
1004
982 if policy.policy in ('c', 'allow'):
1005 if policy.policy in ('c', 'allow'):
983 err = None
1006 err = None
984 try:
1007 try:
985 from .cext import (
1008 from .cext import (
986 base85,
1009 base85,
987 bdiff,
1010 bdiff,
988 mpatch,
1011 mpatch,
989 osutil,
1012 osutil,
990 )
1013 )
991 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1014 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
992 except Exception as inst:
1015 except Exception as inst:
993 err = inst
1016 err = inst
994 problems += 1
1017 problems += 1
995 fm.condwrite(err, 'extensionserror', " %s\n", err)
1018 fm.condwrite(err, 'extensionserror', " %s\n", err)
996
1019
997 compengines = util.compengines._engines.values()
1020 compengines = util.compengines._engines.values()
998 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1021 fm.write('compengines', _('checking registered compression engines (%s)\n'),
999 fm.formatlist(sorted(e.name() for e in compengines),
1022 fm.formatlist(sorted(e.name() for e in compengines),
1000 name='compengine', fmt='%s', sep=', '))
1023 name='compengine', fmt='%s', sep=', '))
1001 fm.write('compenginesavail', _('checking available compression engines '
1024 fm.write('compenginesavail', _('checking available compression engines '
1002 '(%s)\n'),
1025 '(%s)\n'),
1003 fm.formatlist(sorted(e.name() for e in compengines
1026 fm.formatlist(sorted(e.name() for e in compengines
1004 if e.available()),
1027 if e.available()),
1005 name='compengine', fmt='%s', sep=', '))
1028 name='compengine', fmt='%s', sep=', '))
1006 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1029 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1007 fm.write('compenginesserver', _('checking available compression engines '
1030 fm.write('compenginesserver', _('checking available compression engines '
1008 'for wire protocol (%s)\n'),
1031 'for wire protocol (%s)\n'),
1009 fm.formatlist([e.name() for e in wirecompengines
1032 fm.formatlist([e.name() for e in wirecompengines
1010 if e.wireprotosupport()],
1033 if e.wireprotosupport()],
1011 name='compengine', fmt='%s', sep=', '))
1034 name='compengine', fmt='%s', sep=', '))
1012
1035
1013 # templates
1036 # templates
1014 p = templater.templatepaths()
1037 p = templater.templatepaths()
1015 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1038 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1016 fm.condwrite(not p, '', _(" no template directories found\n"))
1039 fm.condwrite(not p, '', _(" no template directories found\n"))
1017 if p:
1040 if p:
1018 m = templater.templatepath("map-cmdline.default")
1041 m = templater.templatepath("map-cmdline.default")
1019 if m:
1042 if m:
1020 # template found, check if it is working
1043 # template found, check if it is working
1021 err = None
1044 err = None
1022 try:
1045 try:
1023 templater.templater.frommapfile(m)
1046 templater.templater.frommapfile(m)
1024 except Exception as inst:
1047 except Exception as inst:
1025 err = inst
1048 err = inst
1026 p = None
1049 p = None
1027 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1050 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1028 else:
1051 else:
1029 p = None
1052 p = None
1030 fm.condwrite(p, 'defaulttemplate',
1053 fm.condwrite(p, 'defaulttemplate',
1031 _("checking default template (%s)\n"), m)
1054 _("checking default template (%s)\n"), m)
1032 fm.condwrite(not m, 'defaulttemplatenotfound',
1055 fm.condwrite(not m, 'defaulttemplatenotfound',
1033 _(" template '%s' not found\n"), "default")
1056 _(" template '%s' not found\n"), "default")
1034 if not p:
1057 if not p:
1035 problems += 1
1058 problems += 1
1036 fm.condwrite(not p, '',
1059 fm.condwrite(not p, '',
1037 _(" (templates seem to have been installed incorrectly)\n"))
1060 _(" (templates seem to have been installed incorrectly)\n"))
1038
1061
1039 # editor
1062 # editor
1040 editor = ui.geteditor()
1063 editor = ui.geteditor()
1041 editor = util.expandpath(editor)
1064 editor = util.expandpath(editor)
1042 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1065 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1043 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1066 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1044 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1067 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1045 _(" No commit editor set and can't find %s in PATH\n"
1068 _(" No commit editor set and can't find %s in PATH\n"
1046 " (specify a commit editor in your configuration"
1069 " (specify a commit editor in your configuration"
1047 " file)\n"), not cmdpath and editor == 'vi' and editor)
1070 " file)\n"), not cmdpath and editor == 'vi' and editor)
1048 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1071 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1049 _(" Can't find editor '%s' in PATH\n"
1072 _(" Can't find editor '%s' in PATH\n"
1050 " (specify a commit editor in your configuration"
1073 " (specify a commit editor in your configuration"
1051 " file)\n"), not cmdpath and editor)
1074 " file)\n"), not cmdpath and editor)
1052 if not cmdpath and editor != 'vi':
1075 if not cmdpath and editor != 'vi':
1053 problems += 1
1076 problems += 1
1054
1077
1055 # check username
1078 # check username
1056 username = None
1079 username = None
1057 err = None
1080 err = None
1058 try:
1081 try:
1059 username = ui.username()
1082 username = ui.username()
1060 except error.Abort as e:
1083 except error.Abort as e:
1061 err = e
1084 err = e
1062 problems += 1
1085 problems += 1
1063
1086
1064 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1087 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1065 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1088 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1066 " (specify a username in your configuration file)\n"), err)
1089 " (specify a username in your configuration file)\n"), err)
1067
1090
1068 fm.condwrite(not problems, '',
1091 fm.condwrite(not problems, '',
1069 _("no problems detected\n"))
1092 _("no problems detected\n"))
1070 if not problems:
1093 if not problems:
1071 fm.data(problems=problems)
1094 fm.data(problems=problems)
1072 fm.condwrite(problems, 'problems',
1095 fm.condwrite(problems, 'problems',
1073 _("%d problems detected,"
1096 _("%d problems detected,"
1074 " please check your install!\n"), problems)
1097 " please check your install!\n"), problems)
1075 fm.end()
1098 fm.end()
1076
1099
1077 return problems
1100 return problems
1078
1101
1079 @command('debugknown', [], _('REPO ID...'), norepo=True)
1102 @command('debugknown', [], _('REPO ID...'), norepo=True)
1080 def debugknown(ui, repopath, *ids, **opts):
1103 def debugknown(ui, repopath, *ids, **opts):
1081 """test whether node ids are known to a repo
1104 """test whether node ids are known to a repo
1082
1105
1083 Every ID must be a full-length hex node id string. Returns a list of 0s
1106 Every ID must be a full-length hex node id string. Returns a list of 0s
1084 and 1s indicating unknown/known.
1107 and 1s indicating unknown/known.
1085 """
1108 """
1086 repo = hg.peer(ui, opts, repopath)
1109 repo = hg.peer(ui, opts, repopath)
1087 if not repo.capable('known'):
1110 if not repo.capable('known'):
1088 raise error.Abort("known() not supported by target repository")
1111 raise error.Abort("known() not supported by target repository")
1089 flags = repo.known([bin(s) for s in ids])
1112 flags = repo.known([bin(s) for s in ids])
1090 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1113 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1091
1114
1092 @command('debuglabelcomplete', [], _('LABEL...'))
1115 @command('debuglabelcomplete', [], _('LABEL...'))
1093 def debuglabelcomplete(ui, repo, *args):
1116 def debuglabelcomplete(ui, repo, *args):
1094 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1117 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1095 debugnamecomplete(ui, repo, *args)
1118 debugnamecomplete(ui, repo, *args)
1096
1119
1097 @command('debuglocks',
1120 @command('debuglocks',
1098 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1121 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1099 ('W', 'force-wlock', None,
1122 ('W', 'force-wlock', None,
1100 _('free the working state lock (DANGEROUS)'))],
1123 _('free the working state lock (DANGEROUS)'))],
1101 _('[OPTION]...'))
1124 _('[OPTION]...'))
1102 def debuglocks(ui, repo, **opts):
1125 def debuglocks(ui, repo, **opts):
1103 """show or modify state of locks
1126 """show or modify state of locks
1104
1127
1105 By default, this command will show which locks are held. This
1128 By default, this command will show which locks are held. This
1106 includes the user and process holding the lock, the amount of time
1129 includes the user and process holding the lock, the amount of time
1107 the lock has been held, and the machine name where the process is
1130 the lock has been held, and the machine name where the process is
1108 running if it's not local.
1131 running if it's not local.
1109
1132
1110 Locks protect the integrity of Mercurial's data, so should be
1133 Locks protect the integrity of Mercurial's data, so should be
1111 treated with care. System crashes or other interruptions may cause
1134 treated with care. System crashes or other interruptions may cause
1112 locks to not be properly released, though Mercurial will usually
1135 locks to not be properly released, though Mercurial will usually
1113 detect and remove such stale locks automatically.
1136 detect and remove such stale locks automatically.
1114
1137
1115 However, detecting stale locks may not always be possible (for
1138 However, detecting stale locks may not always be possible (for
1116 instance, on a shared filesystem). Removing locks may also be
1139 instance, on a shared filesystem). Removing locks may also be
1117 blocked by filesystem permissions.
1140 blocked by filesystem permissions.
1118
1141
1119 Returns 0 if no locks are held.
1142 Returns 0 if no locks are held.
1120
1143
1121 """
1144 """
1122
1145
1123 if opts.get('force_lock'):
1146 if opts.get('force_lock'):
1124 repo.svfs.unlink('lock')
1147 repo.svfs.unlink('lock')
1125 if opts.get('force_wlock'):
1148 if opts.get('force_wlock'):
1126 repo.vfs.unlink('wlock')
1149 repo.vfs.unlink('wlock')
1127 if opts.get('force_lock') or opts.get('force_lock'):
1150 if opts.get('force_lock') or opts.get('force_lock'):
1128 return 0
1151 return 0
1129
1152
1130 now = time.time()
1153 now = time.time()
1131 held = 0
1154 held = 0
1132
1155
1133 def report(vfs, name, method):
1156 def report(vfs, name, method):
1134 # this causes stale locks to get reaped for more accurate reporting
1157 # this causes stale locks to get reaped for more accurate reporting
1135 try:
1158 try:
1136 l = method(False)
1159 l = method(False)
1137 except error.LockHeld:
1160 except error.LockHeld:
1138 l = None
1161 l = None
1139
1162
1140 if l:
1163 if l:
1141 l.release()
1164 l.release()
1142 else:
1165 else:
1143 try:
1166 try:
1144 stat = vfs.lstat(name)
1167 stat = vfs.lstat(name)
1145 age = now - stat.st_mtime
1168 age = now - stat.st_mtime
1146 user = util.username(stat.st_uid)
1169 user = util.username(stat.st_uid)
1147 locker = vfs.readlock(name)
1170 locker = vfs.readlock(name)
1148 if ":" in locker:
1171 if ":" in locker:
1149 host, pid = locker.split(':')
1172 host, pid = locker.split(':')
1150 if host == socket.gethostname():
1173 if host == socket.gethostname():
1151 locker = 'user %s, process %s' % (user, pid)
1174 locker = 'user %s, process %s' % (user, pid)
1152 else:
1175 else:
1153 locker = 'user %s, process %s, host %s' \
1176 locker = 'user %s, process %s, host %s' \
1154 % (user, pid, host)
1177 % (user, pid, host)
1155 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1178 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1156 return 1
1179 return 1
1157 except OSError as e:
1180 except OSError as e:
1158 if e.errno != errno.ENOENT:
1181 if e.errno != errno.ENOENT:
1159 raise
1182 raise
1160
1183
1161 ui.write(("%-6s free\n") % (name + ":"))
1184 ui.write(("%-6s free\n") % (name + ":"))
1162 return 0
1185 return 0
1163
1186
1164 held += report(repo.svfs, "lock", repo.lock)
1187 held += report(repo.svfs, "lock", repo.lock)
1165 held += report(repo.vfs, "wlock", repo.wlock)
1188 held += report(repo.vfs, "wlock", repo.wlock)
1166
1189
1167 return held
1190 return held
1168
1191
1169 @command('debugmergestate', [], '')
1192 @command('debugmergestate', [], '')
1170 def debugmergestate(ui, repo, *args):
1193 def debugmergestate(ui, repo, *args):
1171 """print merge state
1194 """print merge state
1172
1195
1173 Use --verbose to print out information about whether v1 or v2 merge state
1196 Use --verbose to print out information about whether v1 or v2 merge state
1174 was chosen."""
1197 was chosen."""
1175 def _hashornull(h):
1198 def _hashornull(h):
1176 if h == nullhex:
1199 if h == nullhex:
1177 return 'null'
1200 return 'null'
1178 else:
1201 else:
1179 return h
1202 return h
1180
1203
1181 def printrecords(version):
1204 def printrecords(version):
1182 ui.write(('* version %s records\n') % version)
1205 ui.write(('* version %s records\n') % version)
1183 if version == 1:
1206 if version == 1:
1184 records = v1records
1207 records = v1records
1185 else:
1208 else:
1186 records = v2records
1209 records = v2records
1187
1210
1188 for rtype, record in records:
1211 for rtype, record in records:
1189 # pretty print some record types
1212 # pretty print some record types
1190 if rtype == 'L':
1213 if rtype == 'L':
1191 ui.write(('local: %s\n') % record)
1214 ui.write(('local: %s\n') % record)
1192 elif rtype == 'O':
1215 elif rtype == 'O':
1193 ui.write(('other: %s\n') % record)
1216 ui.write(('other: %s\n') % record)
1194 elif rtype == 'm':
1217 elif rtype == 'm':
1195 driver, mdstate = record.split('\0', 1)
1218 driver, mdstate = record.split('\0', 1)
1196 ui.write(('merge driver: %s (state "%s")\n')
1219 ui.write(('merge driver: %s (state "%s")\n')
1197 % (driver, mdstate))
1220 % (driver, mdstate))
1198 elif rtype in 'FDC':
1221 elif rtype in 'FDC':
1199 r = record.split('\0')
1222 r = record.split('\0')
1200 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1223 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1201 if version == 1:
1224 if version == 1:
1202 onode = 'not stored in v1 format'
1225 onode = 'not stored in v1 format'
1203 flags = r[7]
1226 flags = r[7]
1204 else:
1227 else:
1205 onode, flags = r[7:9]
1228 onode, flags = r[7:9]
1206 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1229 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1207 % (f, rtype, state, _hashornull(hash)))
1230 % (f, rtype, state, _hashornull(hash)))
1208 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1231 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1209 ui.write((' ancestor path: %s (node %s)\n')
1232 ui.write((' ancestor path: %s (node %s)\n')
1210 % (afile, _hashornull(anode)))
1233 % (afile, _hashornull(anode)))
1211 ui.write((' other path: %s (node %s)\n')
1234 ui.write((' other path: %s (node %s)\n')
1212 % (ofile, _hashornull(onode)))
1235 % (ofile, _hashornull(onode)))
1213 elif rtype == 'f':
1236 elif rtype == 'f':
1214 filename, rawextras = record.split('\0', 1)
1237 filename, rawextras = record.split('\0', 1)
1215 extras = rawextras.split('\0')
1238 extras = rawextras.split('\0')
1216 i = 0
1239 i = 0
1217 extrastrings = []
1240 extrastrings = []
1218 while i < len(extras):
1241 while i < len(extras):
1219 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1242 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1220 i += 2
1243 i += 2
1221
1244
1222 ui.write(('file extras: %s (%s)\n')
1245 ui.write(('file extras: %s (%s)\n')
1223 % (filename, ', '.join(extrastrings)))
1246 % (filename, ', '.join(extrastrings)))
1224 elif rtype == 'l':
1247 elif rtype == 'l':
1225 labels = record.split('\0', 2)
1248 labels = record.split('\0', 2)
1226 labels = [l for l in labels if len(l) > 0]
1249 labels = [l for l in labels if len(l) > 0]
1227 ui.write(('labels:\n'))
1250 ui.write(('labels:\n'))
1228 ui.write((' local: %s\n' % labels[0]))
1251 ui.write((' local: %s\n' % labels[0]))
1229 ui.write((' other: %s\n' % labels[1]))
1252 ui.write((' other: %s\n' % labels[1]))
1230 if len(labels) > 2:
1253 if len(labels) > 2:
1231 ui.write((' base: %s\n' % labels[2]))
1254 ui.write((' base: %s\n' % labels[2]))
1232 else:
1255 else:
1233 ui.write(('unrecognized entry: %s\t%s\n')
1256 ui.write(('unrecognized entry: %s\t%s\n')
1234 % (rtype, record.replace('\0', '\t')))
1257 % (rtype, record.replace('\0', '\t')))
1235
1258
1236 # Avoid mergestate.read() since it may raise an exception for unsupported
1259 # Avoid mergestate.read() since it may raise an exception for unsupported
1237 # merge state records. We shouldn't be doing this, but this is OK since this
1260 # merge state records. We shouldn't be doing this, but this is OK since this
1238 # command is pretty low-level.
1261 # command is pretty low-level.
1239 ms = mergemod.mergestate(repo)
1262 ms = mergemod.mergestate(repo)
1240
1263
1241 # sort so that reasonable information is on top
1264 # sort so that reasonable information is on top
1242 v1records = ms._readrecordsv1()
1265 v1records = ms._readrecordsv1()
1243 v2records = ms._readrecordsv2()
1266 v2records = ms._readrecordsv2()
1244 order = 'LOml'
1267 order = 'LOml'
1245 def key(r):
1268 def key(r):
1246 idx = order.find(r[0])
1269 idx = order.find(r[0])
1247 if idx == -1:
1270 if idx == -1:
1248 return (1, r[1])
1271 return (1, r[1])
1249 else:
1272 else:
1250 return (0, idx)
1273 return (0, idx)
1251 v1records.sort(key=key)
1274 v1records.sort(key=key)
1252 v2records.sort(key=key)
1275 v2records.sort(key=key)
1253
1276
1254 if not v1records and not v2records:
1277 if not v1records and not v2records:
1255 ui.write(('no merge state found\n'))
1278 ui.write(('no merge state found\n'))
1256 elif not v2records:
1279 elif not v2records:
1257 ui.note(('no version 2 merge state\n'))
1280 ui.note(('no version 2 merge state\n'))
1258 printrecords(1)
1281 printrecords(1)
1259 elif ms._v1v2match(v1records, v2records):
1282 elif ms._v1v2match(v1records, v2records):
1260 ui.note(('v1 and v2 states match: using v2\n'))
1283 ui.note(('v1 and v2 states match: using v2\n'))
1261 printrecords(2)
1284 printrecords(2)
1262 else:
1285 else:
1263 ui.note(('v1 and v2 states mismatch: using v1\n'))
1286 ui.note(('v1 and v2 states mismatch: using v1\n'))
1264 printrecords(1)
1287 printrecords(1)
1265 if ui.verbose:
1288 if ui.verbose:
1266 printrecords(2)
1289 printrecords(2)
1267
1290
1268 @command('debugnamecomplete', [], _('NAME...'))
1291 @command('debugnamecomplete', [], _('NAME...'))
1269 def debugnamecomplete(ui, repo, *args):
1292 def debugnamecomplete(ui, repo, *args):
1270 '''complete "names" - tags, open branch names, bookmark names'''
1293 '''complete "names" - tags, open branch names, bookmark names'''
1271
1294
1272 names = set()
1295 names = set()
1273 # since we previously only listed open branches, we will handle that
1296 # since we previously only listed open branches, we will handle that
1274 # specially (after this for loop)
1297 # specially (after this for loop)
1275 for name, ns in repo.names.iteritems():
1298 for name, ns in repo.names.iteritems():
1276 if name != 'branches':
1299 if name != 'branches':
1277 names.update(ns.listnames(repo))
1300 names.update(ns.listnames(repo))
1278 names.update(tag for (tag, heads, tip, closed)
1301 names.update(tag for (tag, heads, tip, closed)
1279 in repo.branchmap().iterbranches() if not closed)
1302 in repo.branchmap().iterbranches() if not closed)
1280 completions = set()
1303 completions = set()
1281 if not args:
1304 if not args:
1282 args = ['']
1305 args = ['']
1283 for a in args:
1306 for a in args:
1284 completions.update(n for n in names if n.startswith(a))
1307 completions.update(n for n in names if n.startswith(a))
1285 ui.write('\n'.join(sorted(completions)))
1308 ui.write('\n'.join(sorted(completions)))
1286 ui.write('\n')
1309 ui.write('\n')
1287
1310
1288 @command('debugobsolete',
1311 @command('debugobsolete',
1289 [('', 'flags', 0, _('markers flag')),
1312 [('', 'flags', 0, _('markers flag')),
1290 ('', 'record-parents', False,
1313 ('', 'record-parents', False,
1291 _('record parent information for the precursor')),
1314 _('record parent information for the precursor')),
1292 ('r', 'rev', [], _('display markers relevant to REV')),
1315 ('r', 'rev', [], _('display markers relevant to REV')),
1293 ('', 'index', False, _('display index of the marker')),
1316 ('', 'index', False, _('display index of the marker')),
1294 ('', 'delete', [], _('delete markers specified by indices')),
1317 ('', 'delete', [], _('delete markers specified by indices')),
1295 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1318 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1296 _('[OBSOLETED [REPLACEMENT ...]]'))
1319 _('[OBSOLETED [REPLACEMENT ...]]'))
1297 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1320 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1298 """create arbitrary obsolete marker
1321 """create arbitrary obsolete marker
1299
1322
1300 With no arguments, displays the list of obsolescence markers."""
1323 With no arguments, displays the list of obsolescence markers."""
1301
1324
1302 def parsenodeid(s):
1325 def parsenodeid(s):
1303 try:
1326 try:
1304 # We do not use revsingle/revrange functions here to accept
1327 # We do not use revsingle/revrange functions here to accept
1305 # arbitrary node identifiers, possibly not present in the
1328 # arbitrary node identifiers, possibly not present in the
1306 # local repository.
1329 # local repository.
1307 n = bin(s)
1330 n = bin(s)
1308 if len(n) != len(nullid):
1331 if len(n) != len(nullid):
1309 raise TypeError()
1332 raise TypeError()
1310 return n
1333 return n
1311 except TypeError:
1334 except TypeError:
1312 raise error.Abort('changeset references must be full hexadecimal '
1335 raise error.Abort('changeset references must be full hexadecimal '
1313 'node identifiers')
1336 'node identifiers')
1314
1337
1315 if opts.get('delete'):
1338 if opts.get('delete'):
1316 indices = []
1339 indices = []
1317 for v in opts.get('delete'):
1340 for v in opts.get('delete'):
1318 try:
1341 try:
1319 indices.append(int(v))
1342 indices.append(int(v))
1320 except ValueError:
1343 except ValueError:
1321 raise error.Abort(_('invalid index value: %r') % v,
1344 raise error.Abort(_('invalid index value: %r') % v,
1322 hint=_('use integers for indices'))
1345 hint=_('use integers for indices'))
1323
1346
1324 if repo.currenttransaction():
1347 if repo.currenttransaction():
1325 raise error.Abort(_('cannot delete obsmarkers in the middle '
1348 raise error.Abort(_('cannot delete obsmarkers in the middle '
1326 'of transaction.'))
1349 'of transaction.'))
1327
1350
1328 with repo.lock():
1351 with repo.lock():
1329 n = repair.deleteobsmarkers(repo.obsstore, indices)
1352 n = repair.deleteobsmarkers(repo.obsstore, indices)
1330 ui.write(_('deleted %i obsolescence markers\n') % n)
1353 ui.write(_('deleted %i obsolescence markers\n') % n)
1331
1354
1332 return
1355 return
1333
1356
1334 if precursor is not None:
1357 if precursor is not None:
1335 if opts['rev']:
1358 if opts['rev']:
1336 raise error.Abort('cannot select revision when creating marker')
1359 raise error.Abort('cannot select revision when creating marker')
1337 metadata = {}
1360 metadata = {}
1338 metadata['user'] = opts['user'] or ui.username()
1361 metadata['user'] = opts['user'] or ui.username()
1339 succs = tuple(parsenodeid(succ) for succ in successors)
1362 succs = tuple(parsenodeid(succ) for succ in successors)
1340 l = repo.lock()
1363 l = repo.lock()
1341 try:
1364 try:
1342 tr = repo.transaction('debugobsolete')
1365 tr = repo.transaction('debugobsolete')
1343 try:
1366 try:
1344 date = opts.get('date')
1367 date = opts.get('date')
1345 if date:
1368 if date:
1346 date = util.parsedate(date)
1369 date = util.parsedate(date)
1347 else:
1370 else:
1348 date = None
1371 date = None
1349 prec = parsenodeid(precursor)
1372 prec = parsenodeid(precursor)
1350 parents = None
1373 parents = None
1351 if opts['record_parents']:
1374 if opts['record_parents']:
1352 if prec not in repo.unfiltered():
1375 if prec not in repo.unfiltered():
1353 raise error.Abort('cannot used --record-parents on '
1376 raise error.Abort('cannot used --record-parents on '
1354 'unknown changesets')
1377 'unknown changesets')
1355 parents = repo.unfiltered()[prec].parents()
1378 parents = repo.unfiltered()[prec].parents()
1356 parents = tuple(p.node() for p in parents)
1379 parents = tuple(p.node() for p in parents)
1357 repo.obsstore.create(tr, prec, succs, opts['flags'],
1380 repo.obsstore.create(tr, prec, succs, opts['flags'],
1358 parents=parents, date=date,
1381 parents=parents, date=date,
1359 metadata=metadata, ui=ui)
1382 metadata=metadata, ui=ui)
1360 tr.close()
1383 tr.close()
1361 except ValueError as exc:
1384 except ValueError as exc:
1362 raise error.Abort(_('bad obsmarker input: %s') % exc)
1385 raise error.Abort(_('bad obsmarker input: %s') % exc)
1363 finally:
1386 finally:
1364 tr.release()
1387 tr.release()
1365 finally:
1388 finally:
1366 l.release()
1389 l.release()
1367 else:
1390 else:
1368 if opts['rev']:
1391 if opts['rev']:
1369 revs = scmutil.revrange(repo, opts['rev'])
1392 revs = scmutil.revrange(repo, opts['rev'])
1370 nodes = [repo[r].node() for r in revs]
1393 nodes = [repo[r].node() for r in revs]
1371 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1394 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1372 markers.sort(key=lambda x: x._data)
1395 markers.sort(key=lambda x: x._data)
1373 else:
1396 else:
1374 markers = obsolete.getmarkers(repo)
1397 markers = obsolete.getmarkers(repo)
1375
1398
1376 markerstoiter = markers
1399 markerstoiter = markers
1377 isrelevant = lambda m: True
1400 isrelevant = lambda m: True
1378 if opts.get('rev') and opts.get('index'):
1401 if opts.get('rev') and opts.get('index'):
1379 markerstoiter = obsolete.getmarkers(repo)
1402 markerstoiter = obsolete.getmarkers(repo)
1380 markerset = set(markers)
1403 markerset = set(markers)
1381 isrelevant = lambda m: m in markerset
1404 isrelevant = lambda m: m in markerset
1382
1405
1383 fm = ui.formatter('debugobsolete', opts)
1406 fm = ui.formatter('debugobsolete', opts)
1384 for i, m in enumerate(markerstoiter):
1407 for i, m in enumerate(markerstoiter):
1385 if not isrelevant(m):
1408 if not isrelevant(m):
1386 # marker can be irrelevant when we're iterating over a set
1409 # marker can be irrelevant when we're iterating over a set
1387 # of markers (markerstoiter) which is bigger than the set
1410 # of markers (markerstoiter) which is bigger than the set
1388 # of markers we want to display (markers)
1411 # of markers we want to display (markers)
1389 # this can happen if both --index and --rev options are
1412 # this can happen if both --index and --rev options are
1390 # provided and thus we need to iterate over all of the markers
1413 # provided and thus we need to iterate over all of the markers
1391 # to get the correct indices, but only display the ones that
1414 # to get the correct indices, but only display the ones that
1392 # are relevant to --rev value
1415 # are relevant to --rev value
1393 continue
1416 continue
1394 fm.startitem()
1417 fm.startitem()
1395 ind = i if opts.get('index') else None
1418 ind = i if opts.get('index') else None
1396 cmdutil.showmarker(fm, m, index=ind)
1419 cmdutil.showmarker(fm, m, index=ind)
1397 fm.end()
1420 fm.end()
1398
1421
1399 @command('debugpathcomplete',
1422 @command('debugpathcomplete',
1400 [('f', 'full', None, _('complete an entire path')),
1423 [('f', 'full', None, _('complete an entire path')),
1401 ('n', 'normal', None, _('show only normal files')),
1424 ('n', 'normal', None, _('show only normal files')),
1402 ('a', 'added', None, _('show only added files')),
1425 ('a', 'added', None, _('show only added files')),
1403 ('r', 'removed', None, _('show only removed files'))],
1426 ('r', 'removed', None, _('show only removed files'))],
1404 _('FILESPEC...'))
1427 _('FILESPEC...'))
1405 def debugpathcomplete(ui, repo, *specs, **opts):
1428 def debugpathcomplete(ui, repo, *specs, **opts):
1406 '''complete part or all of a tracked path
1429 '''complete part or all of a tracked path
1407
1430
1408 This command supports shells that offer path name completion. It
1431 This command supports shells that offer path name completion. It
1409 currently completes only files already known to the dirstate.
1432 currently completes only files already known to the dirstate.
1410
1433
1411 Completion extends only to the next path segment unless
1434 Completion extends only to the next path segment unless
1412 --full is specified, in which case entire paths are used.'''
1435 --full is specified, in which case entire paths are used.'''
1413
1436
1414 def complete(path, acceptable):
1437 def complete(path, acceptable):
1415 dirstate = repo.dirstate
1438 dirstate = repo.dirstate
1416 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1439 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1417 rootdir = repo.root + pycompat.ossep
1440 rootdir = repo.root + pycompat.ossep
1418 if spec != repo.root and not spec.startswith(rootdir):
1441 if spec != repo.root and not spec.startswith(rootdir):
1419 return [], []
1442 return [], []
1420 if os.path.isdir(spec):
1443 if os.path.isdir(spec):
1421 spec += '/'
1444 spec += '/'
1422 spec = spec[len(rootdir):]
1445 spec = spec[len(rootdir):]
1423 fixpaths = pycompat.ossep != '/'
1446 fixpaths = pycompat.ossep != '/'
1424 if fixpaths:
1447 if fixpaths:
1425 spec = spec.replace(pycompat.ossep, '/')
1448 spec = spec.replace(pycompat.ossep, '/')
1426 speclen = len(spec)
1449 speclen = len(spec)
1427 fullpaths = opts['full']
1450 fullpaths = opts['full']
1428 files, dirs = set(), set()
1451 files, dirs = set(), set()
1429 adddir, addfile = dirs.add, files.add
1452 adddir, addfile = dirs.add, files.add
1430 for f, st in dirstate.iteritems():
1453 for f, st in dirstate.iteritems():
1431 if f.startswith(spec) and st[0] in acceptable:
1454 if f.startswith(spec) and st[0] in acceptable:
1432 if fixpaths:
1455 if fixpaths:
1433 f = f.replace('/', pycompat.ossep)
1456 f = f.replace('/', pycompat.ossep)
1434 if fullpaths:
1457 if fullpaths:
1435 addfile(f)
1458 addfile(f)
1436 continue
1459 continue
1437 s = f.find(pycompat.ossep, speclen)
1460 s = f.find(pycompat.ossep, speclen)
1438 if s >= 0:
1461 if s >= 0:
1439 adddir(f[:s])
1462 adddir(f[:s])
1440 else:
1463 else:
1441 addfile(f)
1464 addfile(f)
1442 return files, dirs
1465 return files, dirs
1443
1466
1444 acceptable = ''
1467 acceptable = ''
1445 if opts['normal']:
1468 if opts['normal']:
1446 acceptable += 'nm'
1469 acceptable += 'nm'
1447 if opts['added']:
1470 if opts['added']:
1448 acceptable += 'a'
1471 acceptable += 'a'
1449 if opts['removed']:
1472 if opts['removed']:
1450 acceptable += 'r'
1473 acceptable += 'r'
1451 cwd = repo.getcwd()
1474 cwd = repo.getcwd()
1452 if not specs:
1475 if not specs:
1453 specs = ['.']
1476 specs = ['.']
1454
1477
1455 files, dirs = set(), set()
1478 files, dirs = set(), set()
1456 for spec in specs:
1479 for spec in specs:
1457 f, d = complete(spec, acceptable or 'nmar')
1480 f, d = complete(spec, acceptable or 'nmar')
1458 files.update(f)
1481 files.update(f)
1459 dirs.update(d)
1482 dirs.update(d)
1460 files.update(dirs)
1483 files.update(dirs)
1461 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1484 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1462 ui.write('\n')
1485 ui.write('\n')
1463
1486
1464 @command('debugpickmergetool',
1487 @command('debugpickmergetool',
1465 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1488 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1466 ('', 'changedelete', None, _('emulate merging change and delete')),
1489 ('', 'changedelete', None, _('emulate merging change and delete')),
1467 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1490 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1468 _('[PATTERN]...'),
1491 _('[PATTERN]...'),
1469 inferrepo=True)
1492 inferrepo=True)
1470 def debugpickmergetool(ui, repo, *pats, **opts):
1493 def debugpickmergetool(ui, repo, *pats, **opts):
1471 """examine which merge tool is chosen for specified file
1494 """examine which merge tool is chosen for specified file
1472
1495
1473 As described in :hg:`help merge-tools`, Mercurial examines
1496 As described in :hg:`help merge-tools`, Mercurial examines
1474 configurations below in this order to decide which merge tool is
1497 configurations below in this order to decide which merge tool is
1475 chosen for specified file.
1498 chosen for specified file.
1476
1499
1477 1. ``--tool`` option
1500 1. ``--tool`` option
1478 2. ``HGMERGE`` environment variable
1501 2. ``HGMERGE`` environment variable
1479 3. configurations in ``merge-patterns`` section
1502 3. configurations in ``merge-patterns`` section
1480 4. configuration of ``ui.merge``
1503 4. configuration of ``ui.merge``
1481 5. configurations in ``merge-tools`` section
1504 5. configurations in ``merge-tools`` section
1482 6. ``hgmerge`` tool (for historical reason only)
1505 6. ``hgmerge`` tool (for historical reason only)
1483 7. default tool for fallback (``:merge`` or ``:prompt``)
1506 7. default tool for fallback (``:merge`` or ``:prompt``)
1484
1507
1485 This command writes out examination result in the style below::
1508 This command writes out examination result in the style below::
1486
1509
1487 FILE = MERGETOOL
1510 FILE = MERGETOOL
1488
1511
1489 By default, all files known in the first parent context of the
1512 By default, all files known in the first parent context of the
1490 working directory are examined. Use file patterns and/or -I/-X
1513 working directory are examined. Use file patterns and/or -I/-X
1491 options to limit target files. -r/--rev is also useful to examine
1514 options to limit target files. -r/--rev is also useful to examine
1492 files in another context without actual updating to it.
1515 files in another context without actual updating to it.
1493
1516
1494 With --debug, this command shows warning messages while matching
1517 With --debug, this command shows warning messages while matching
1495 against ``merge-patterns`` and so on, too. It is recommended to
1518 against ``merge-patterns`` and so on, too. It is recommended to
1496 use this option with explicit file patterns and/or -I/-X options,
1519 use this option with explicit file patterns and/or -I/-X options,
1497 because this option increases amount of output per file according
1520 because this option increases amount of output per file according
1498 to configurations in hgrc.
1521 to configurations in hgrc.
1499
1522
1500 With -v/--verbose, this command shows configurations below at
1523 With -v/--verbose, this command shows configurations below at
1501 first (only if specified).
1524 first (only if specified).
1502
1525
1503 - ``--tool`` option
1526 - ``--tool`` option
1504 - ``HGMERGE`` environment variable
1527 - ``HGMERGE`` environment variable
1505 - configuration of ``ui.merge``
1528 - configuration of ``ui.merge``
1506
1529
1507 If merge tool is chosen before matching against
1530 If merge tool is chosen before matching against
1508 ``merge-patterns``, this command can't show any helpful
1531 ``merge-patterns``, this command can't show any helpful
1509 information, even with --debug. In such case, information above is
1532 information, even with --debug. In such case, information above is
1510 useful to know why a merge tool is chosen.
1533 useful to know why a merge tool is chosen.
1511 """
1534 """
1512 overrides = {}
1535 overrides = {}
1513 if opts['tool']:
1536 if opts['tool']:
1514 overrides[('ui', 'forcemerge')] = opts['tool']
1537 overrides[('ui', 'forcemerge')] = opts['tool']
1515 ui.note(('with --tool %r\n') % (opts['tool']))
1538 ui.note(('with --tool %r\n') % (opts['tool']))
1516
1539
1517 with ui.configoverride(overrides, 'debugmergepatterns'):
1540 with ui.configoverride(overrides, 'debugmergepatterns'):
1518 hgmerge = encoding.environ.get("HGMERGE")
1541 hgmerge = encoding.environ.get("HGMERGE")
1519 if hgmerge is not None:
1542 if hgmerge is not None:
1520 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1543 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1521 uimerge = ui.config("ui", "merge")
1544 uimerge = ui.config("ui", "merge")
1522 if uimerge:
1545 if uimerge:
1523 ui.note(('with ui.merge=%r\n') % (uimerge))
1546 ui.note(('with ui.merge=%r\n') % (uimerge))
1524
1547
1525 ctx = scmutil.revsingle(repo, opts.get('rev'))
1548 ctx = scmutil.revsingle(repo, opts.get('rev'))
1526 m = scmutil.match(ctx, pats, opts)
1549 m = scmutil.match(ctx, pats, opts)
1527 changedelete = opts['changedelete']
1550 changedelete = opts['changedelete']
1528 for path in ctx.walk(m):
1551 for path in ctx.walk(m):
1529 fctx = ctx[path]
1552 fctx = ctx[path]
1530 try:
1553 try:
1531 if not ui.debugflag:
1554 if not ui.debugflag:
1532 ui.pushbuffer(error=True)
1555 ui.pushbuffer(error=True)
1533 tool, toolpath = filemerge._picktool(repo, ui, path,
1556 tool, toolpath = filemerge._picktool(repo, ui, path,
1534 fctx.isbinary(),
1557 fctx.isbinary(),
1535 'l' in fctx.flags(),
1558 'l' in fctx.flags(),
1536 changedelete)
1559 changedelete)
1537 finally:
1560 finally:
1538 if not ui.debugflag:
1561 if not ui.debugflag:
1539 ui.popbuffer()
1562 ui.popbuffer()
1540 ui.write(('%s = %s\n') % (path, tool))
1563 ui.write(('%s = %s\n') % (path, tool))
1541
1564
1542 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1565 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1543 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1566 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1544 '''access the pushkey key/value protocol
1567 '''access the pushkey key/value protocol
1545
1568
1546 With two args, list the keys in the given namespace.
1569 With two args, list the keys in the given namespace.
1547
1570
1548 With five args, set a key to new if it currently is set to old.
1571 With five args, set a key to new if it currently is set to old.
1549 Reports success or failure.
1572 Reports success or failure.
1550 '''
1573 '''
1551
1574
1552 target = hg.peer(ui, {}, repopath)
1575 target = hg.peer(ui, {}, repopath)
1553 if keyinfo:
1576 if keyinfo:
1554 key, old, new = keyinfo
1577 key, old, new = keyinfo
1555 r = target.pushkey(namespace, key, old, new)
1578 r = target.pushkey(namespace, key, old, new)
1556 ui.status(str(r) + '\n')
1579 ui.status(str(r) + '\n')
1557 return not r
1580 return not r
1558 else:
1581 else:
1559 for k, v in sorted(target.listkeys(namespace).iteritems()):
1582 for k, v in sorted(target.listkeys(namespace).iteritems()):
1560 ui.write("%s\t%s\n" % (util.escapestr(k),
1583 ui.write("%s\t%s\n" % (util.escapestr(k),
1561 util.escapestr(v)))
1584 util.escapestr(v)))
1562
1585
1563 @command('debugpvec', [], _('A B'))
1586 @command('debugpvec', [], _('A B'))
1564 def debugpvec(ui, repo, a, b=None):
1587 def debugpvec(ui, repo, a, b=None):
1565 ca = scmutil.revsingle(repo, a)
1588 ca = scmutil.revsingle(repo, a)
1566 cb = scmutil.revsingle(repo, b)
1589 cb = scmutil.revsingle(repo, b)
1567 pa = pvec.ctxpvec(ca)
1590 pa = pvec.ctxpvec(ca)
1568 pb = pvec.ctxpvec(cb)
1591 pb = pvec.ctxpvec(cb)
1569 if pa == pb:
1592 if pa == pb:
1570 rel = "="
1593 rel = "="
1571 elif pa > pb:
1594 elif pa > pb:
1572 rel = ">"
1595 rel = ">"
1573 elif pa < pb:
1596 elif pa < pb:
1574 rel = "<"
1597 rel = "<"
1575 elif pa | pb:
1598 elif pa | pb:
1576 rel = "|"
1599 rel = "|"
1577 ui.write(_("a: %s\n") % pa)
1600 ui.write(_("a: %s\n") % pa)
1578 ui.write(_("b: %s\n") % pb)
1601 ui.write(_("b: %s\n") % pb)
1579 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1602 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1580 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1603 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1581 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1604 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1582 pa.distance(pb), rel))
1605 pa.distance(pb), rel))
1583
1606
1584 @command('debugrebuilddirstate|debugrebuildstate',
1607 @command('debugrebuilddirstate|debugrebuildstate',
1585 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1608 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1586 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1609 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1587 'the working copy parent')),
1610 'the working copy parent')),
1588 ],
1611 ],
1589 _('[-r REV]'))
1612 _('[-r REV]'))
1590 def debugrebuilddirstate(ui, repo, rev, **opts):
1613 def debugrebuilddirstate(ui, repo, rev, **opts):
1591 """rebuild the dirstate as it would look like for the given revision
1614 """rebuild the dirstate as it would look like for the given revision
1592
1615
1593 If no revision is specified the first current parent will be used.
1616 If no revision is specified the first current parent will be used.
1594
1617
1595 The dirstate will be set to the files of the given revision.
1618 The dirstate will be set to the files of the given revision.
1596 The actual working directory content or existing dirstate
1619 The actual working directory content or existing dirstate
1597 information such as adds or removes is not considered.
1620 information such as adds or removes is not considered.
1598
1621
1599 ``minimal`` will only rebuild the dirstate status for files that claim to be
1622 ``minimal`` will only rebuild the dirstate status for files that claim to be
1600 tracked but are not in the parent manifest, or that exist in the parent
1623 tracked but are not in the parent manifest, or that exist in the parent
1601 manifest but are not in the dirstate. It will not change adds, removes, or
1624 manifest but are not in the dirstate. It will not change adds, removes, or
1602 modified files that are in the working copy parent.
1625 modified files that are in the working copy parent.
1603
1626
1604 One use of this command is to make the next :hg:`status` invocation
1627 One use of this command is to make the next :hg:`status` invocation
1605 check the actual file content.
1628 check the actual file content.
1606 """
1629 """
1607 ctx = scmutil.revsingle(repo, rev)
1630 ctx = scmutil.revsingle(repo, rev)
1608 with repo.wlock():
1631 with repo.wlock():
1609 dirstate = repo.dirstate
1632 dirstate = repo.dirstate
1610 changedfiles = None
1633 changedfiles = None
1611 # See command doc for what minimal does.
1634 # See command doc for what minimal does.
1612 if opts.get('minimal'):
1635 if opts.get('minimal'):
1613 manifestfiles = set(ctx.manifest().keys())
1636 manifestfiles = set(ctx.manifest().keys())
1614 dirstatefiles = set(dirstate)
1637 dirstatefiles = set(dirstate)
1615 manifestonly = manifestfiles - dirstatefiles
1638 manifestonly = manifestfiles - dirstatefiles
1616 dsonly = dirstatefiles - manifestfiles
1639 dsonly = dirstatefiles - manifestfiles
1617 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1640 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1618 changedfiles = manifestonly | dsnotadded
1641 changedfiles = manifestonly | dsnotadded
1619
1642
1620 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1643 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1621
1644
1622 @command('debugrebuildfncache', [], '')
1645 @command('debugrebuildfncache', [], '')
1623 def debugrebuildfncache(ui, repo):
1646 def debugrebuildfncache(ui, repo):
1624 """rebuild the fncache file"""
1647 """rebuild the fncache file"""
1625 repair.rebuildfncache(ui, repo)
1648 repair.rebuildfncache(ui, repo)
1626
1649
1627 @command('debugrename',
1650 @command('debugrename',
1628 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1651 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1629 _('[-r REV] FILE'))
1652 _('[-r REV] FILE'))
1630 def debugrename(ui, repo, file1, *pats, **opts):
1653 def debugrename(ui, repo, file1, *pats, **opts):
1631 """dump rename information"""
1654 """dump rename information"""
1632
1655
1633 ctx = scmutil.revsingle(repo, opts.get('rev'))
1656 ctx = scmutil.revsingle(repo, opts.get('rev'))
1634 m = scmutil.match(ctx, (file1,) + pats, opts)
1657 m = scmutil.match(ctx, (file1,) + pats, opts)
1635 for abs in ctx.walk(m):
1658 for abs in ctx.walk(m):
1636 fctx = ctx[abs]
1659 fctx = ctx[abs]
1637 o = fctx.filelog().renamed(fctx.filenode())
1660 o = fctx.filelog().renamed(fctx.filenode())
1638 rel = m.rel(abs)
1661 rel = m.rel(abs)
1639 if o:
1662 if o:
1640 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1663 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1641 else:
1664 else:
1642 ui.write(_("%s not renamed\n") % rel)
1665 ui.write(_("%s not renamed\n") % rel)
1643
1666
1644 @command('debugrevlog', cmdutil.debugrevlogopts +
1667 @command('debugrevlog', cmdutil.debugrevlogopts +
1645 [('d', 'dump', False, _('dump index data'))],
1668 [('d', 'dump', False, _('dump index data'))],
1646 _('-c|-m|FILE'),
1669 _('-c|-m|FILE'),
1647 optionalrepo=True)
1670 optionalrepo=True)
1648 def debugrevlog(ui, repo, file_=None, **opts):
1671 def debugrevlog(ui, repo, file_=None, **opts):
1649 """show data and statistics about a revlog"""
1672 """show data and statistics about a revlog"""
1650 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1673 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1651
1674
1652 if opts.get("dump"):
1675 if opts.get("dump"):
1653 numrevs = len(r)
1676 numrevs = len(r)
1654 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1677 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1655 " rawsize totalsize compression heads chainlen\n"))
1678 " rawsize totalsize compression heads chainlen\n"))
1656 ts = 0
1679 ts = 0
1657 heads = set()
1680 heads = set()
1658
1681
1659 for rev in xrange(numrevs):
1682 for rev in xrange(numrevs):
1660 dbase = r.deltaparent(rev)
1683 dbase = r.deltaparent(rev)
1661 if dbase == -1:
1684 if dbase == -1:
1662 dbase = rev
1685 dbase = rev
1663 cbase = r.chainbase(rev)
1686 cbase = r.chainbase(rev)
1664 clen = r.chainlen(rev)
1687 clen = r.chainlen(rev)
1665 p1, p2 = r.parentrevs(rev)
1688 p1, p2 = r.parentrevs(rev)
1666 rs = r.rawsize(rev)
1689 rs = r.rawsize(rev)
1667 ts = ts + rs
1690 ts = ts + rs
1668 heads -= set(r.parentrevs(rev))
1691 heads -= set(r.parentrevs(rev))
1669 heads.add(rev)
1692 heads.add(rev)
1670 try:
1693 try:
1671 compression = ts / r.end(rev)
1694 compression = ts / r.end(rev)
1672 except ZeroDivisionError:
1695 except ZeroDivisionError:
1673 compression = 0
1696 compression = 0
1674 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1697 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1675 "%11d %5d %8d\n" %
1698 "%11d %5d %8d\n" %
1676 (rev, p1, p2, r.start(rev), r.end(rev),
1699 (rev, p1, p2, r.start(rev), r.end(rev),
1677 r.start(dbase), r.start(cbase),
1700 r.start(dbase), r.start(cbase),
1678 r.start(p1), r.start(p2),
1701 r.start(p1), r.start(p2),
1679 rs, ts, compression, len(heads), clen))
1702 rs, ts, compression, len(heads), clen))
1680 return 0
1703 return 0
1681
1704
1682 v = r.version
1705 v = r.version
1683 format = v & 0xFFFF
1706 format = v & 0xFFFF
1684 flags = []
1707 flags = []
1685 gdelta = False
1708 gdelta = False
1686 if v & revlog.FLAG_INLINE_DATA:
1709 if v & revlog.FLAG_INLINE_DATA:
1687 flags.append('inline')
1710 flags.append('inline')
1688 if v & revlog.FLAG_GENERALDELTA:
1711 if v & revlog.FLAG_GENERALDELTA:
1689 gdelta = True
1712 gdelta = True
1690 flags.append('generaldelta')
1713 flags.append('generaldelta')
1691 if not flags:
1714 if not flags:
1692 flags = ['(none)']
1715 flags = ['(none)']
1693
1716
1694 nummerges = 0
1717 nummerges = 0
1695 numfull = 0
1718 numfull = 0
1696 numprev = 0
1719 numprev = 0
1697 nump1 = 0
1720 nump1 = 0
1698 nump2 = 0
1721 nump2 = 0
1699 numother = 0
1722 numother = 0
1700 nump1prev = 0
1723 nump1prev = 0
1701 nump2prev = 0
1724 nump2prev = 0
1702 chainlengths = []
1725 chainlengths = []
1703
1726
1704 datasize = [None, 0, 0]
1727 datasize = [None, 0, 0]
1705 fullsize = [None, 0, 0]
1728 fullsize = [None, 0, 0]
1706 deltasize = [None, 0, 0]
1729 deltasize = [None, 0, 0]
1707 chunktypecounts = {}
1730 chunktypecounts = {}
1708 chunktypesizes = {}
1731 chunktypesizes = {}
1709
1732
1710 def addsize(size, l):
1733 def addsize(size, l):
1711 if l[0] is None or size < l[0]:
1734 if l[0] is None or size < l[0]:
1712 l[0] = size
1735 l[0] = size
1713 if size > l[1]:
1736 if size > l[1]:
1714 l[1] = size
1737 l[1] = size
1715 l[2] += size
1738 l[2] += size
1716
1739
1717 numrevs = len(r)
1740 numrevs = len(r)
1718 for rev in xrange(numrevs):
1741 for rev in xrange(numrevs):
1719 p1, p2 = r.parentrevs(rev)
1742 p1, p2 = r.parentrevs(rev)
1720 delta = r.deltaparent(rev)
1743 delta = r.deltaparent(rev)
1721 if format > 0:
1744 if format > 0:
1722 addsize(r.rawsize(rev), datasize)
1745 addsize(r.rawsize(rev), datasize)
1723 if p2 != nullrev:
1746 if p2 != nullrev:
1724 nummerges += 1
1747 nummerges += 1
1725 size = r.length(rev)
1748 size = r.length(rev)
1726 if delta == nullrev:
1749 if delta == nullrev:
1727 chainlengths.append(0)
1750 chainlengths.append(0)
1728 numfull += 1
1751 numfull += 1
1729 addsize(size, fullsize)
1752 addsize(size, fullsize)
1730 else:
1753 else:
1731 chainlengths.append(chainlengths[delta] + 1)
1754 chainlengths.append(chainlengths[delta] + 1)
1732 addsize(size, deltasize)
1755 addsize(size, deltasize)
1733 if delta == rev - 1:
1756 if delta == rev - 1:
1734 numprev += 1
1757 numprev += 1
1735 if delta == p1:
1758 if delta == p1:
1736 nump1prev += 1
1759 nump1prev += 1
1737 elif delta == p2:
1760 elif delta == p2:
1738 nump2prev += 1
1761 nump2prev += 1
1739 elif delta == p1:
1762 elif delta == p1:
1740 nump1 += 1
1763 nump1 += 1
1741 elif delta == p2:
1764 elif delta == p2:
1742 nump2 += 1
1765 nump2 += 1
1743 elif delta != nullrev:
1766 elif delta != nullrev:
1744 numother += 1
1767 numother += 1
1745
1768
1746 # Obtain data on the raw chunks in the revlog.
1769 # Obtain data on the raw chunks in the revlog.
1747 segment = r._getsegmentforrevs(rev, rev)[1]
1770 segment = r._getsegmentforrevs(rev, rev)[1]
1748 if segment:
1771 if segment:
1749 chunktype = segment[0]
1772 chunktype = segment[0]
1750 else:
1773 else:
1751 chunktype = 'empty'
1774 chunktype = 'empty'
1752
1775
1753 if chunktype not in chunktypecounts:
1776 if chunktype not in chunktypecounts:
1754 chunktypecounts[chunktype] = 0
1777 chunktypecounts[chunktype] = 0
1755 chunktypesizes[chunktype] = 0
1778 chunktypesizes[chunktype] = 0
1756
1779
1757 chunktypecounts[chunktype] += 1
1780 chunktypecounts[chunktype] += 1
1758 chunktypesizes[chunktype] += size
1781 chunktypesizes[chunktype] += size
1759
1782
1760 # Adjust size min value for empty cases
1783 # Adjust size min value for empty cases
1761 for size in (datasize, fullsize, deltasize):
1784 for size in (datasize, fullsize, deltasize):
1762 if size[0] is None:
1785 if size[0] is None:
1763 size[0] = 0
1786 size[0] = 0
1764
1787
1765 numdeltas = numrevs - numfull
1788 numdeltas = numrevs - numfull
1766 numoprev = numprev - nump1prev - nump2prev
1789 numoprev = numprev - nump1prev - nump2prev
1767 totalrawsize = datasize[2]
1790 totalrawsize = datasize[2]
1768 datasize[2] /= numrevs
1791 datasize[2] /= numrevs
1769 fulltotal = fullsize[2]
1792 fulltotal = fullsize[2]
1770 fullsize[2] /= numfull
1793 fullsize[2] /= numfull
1771 deltatotal = deltasize[2]
1794 deltatotal = deltasize[2]
1772 if numrevs - numfull > 0:
1795 if numrevs - numfull > 0:
1773 deltasize[2] /= numrevs - numfull
1796 deltasize[2] /= numrevs - numfull
1774 totalsize = fulltotal + deltatotal
1797 totalsize = fulltotal + deltatotal
1775 avgchainlen = sum(chainlengths) / numrevs
1798 avgchainlen = sum(chainlengths) / numrevs
1776 maxchainlen = max(chainlengths)
1799 maxchainlen = max(chainlengths)
1777 compratio = 1
1800 compratio = 1
1778 if totalsize:
1801 if totalsize:
1779 compratio = totalrawsize / totalsize
1802 compratio = totalrawsize / totalsize
1780
1803
1781 basedfmtstr = '%%%dd\n'
1804 basedfmtstr = '%%%dd\n'
1782 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1805 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1783
1806
1784 def dfmtstr(max):
1807 def dfmtstr(max):
1785 return basedfmtstr % len(str(max))
1808 return basedfmtstr % len(str(max))
1786 def pcfmtstr(max, padding=0):
1809 def pcfmtstr(max, padding=0):
1787 return basepcfmtstr % (len(str(max)), ' ' * padding)
1810 return basepcfmtstr % (len(str(max)), ' ' * padding)
1788
1811
1789 def pcfmt(value, total):
1812 def pcfmt(value, total):
1790 if total:
1813 if total:
1791 return (value, 100 * float(value) / total)
1814 return (value, 100 * float(value) / total)
1792 else:
1815 else:
1793 return value, 100.0
1816 return value, 100.0
1794
1817
1795 ui.write(('format : %d\n') % format)
1818 ui.write(('format : %d\n') % format)
1796 ui.write(('flags : %s\n') % ', '.join(flags))
1819 ui.write(('flags : %s\n') % ', '.join(flags))
1797
1820
1798 ui.write('\n')
1821 ui.write('\n')
1799 fmt = pcfmtstr(totalsize)
1822 fmt = pcfmtstr(totalsize)
1800 fmt2 = dfmtstr(totalsize)
1823 fmt2 = dfmtstr(totalsize)
1801 ui.write(('revisions : ') + fmt2 % numrevs)
1824 ui.write(('revisions : ') + fmt2 % numrevs)
1802 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1825 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1803 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1826 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1804 ui.write(('revisions : ') + fmt2 % numrevs)
1827 ui.write(('revisions : ') + fmt2 % numrevs)
1805 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1828 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1806 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1829 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1807 ui.write(('revision size : ') + fmt2 % totalsize)
1830 ui.write(('revision size : ') + fmt2 % totalsize)
1808 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1831 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1809 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1832 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1810
1833
1811 def fmtchunktype(chunktype):
1834 def fmtchunktype(chunktype):
1812 if chunktype == 'empty':
1835 if chunktype == 'empty':
1813 return ' %s : ' % chunktype
1836 return ' %s : ' % chunktype
1814 elif chunktype in string.ascii_letters:
1837 elif chunktype in string.ascii_letters:
1815 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1838 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1816 else:
1839 else:
1817 return ' 0x%s : ' % hex(chunktype)
1840 return ' 0x%s : ' % hex(chunktype)
1818
1841
1819 ui.write('\n')
1842 ui.write('\n')
1820 ui.write(('chunks : ') + fmt2 % numrevs)
1843 ui.write(('chunks : ') + fmt2 % numrevs)
1821 for chunktype in sorted(chunktypecounts):
1844 for chunktype in sorted(chunktypecounts):
1822 ui.write(fmtchunktype(chunktype))
1845 ui.write(fmtchunktype(chunktype))
1823 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1846 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1824 ui.write(('chunks size : ') + fmt2 % totalsize)
1847 ui.write(('chunks size : ') + fmt2 % totalsize)
1825 for chunktype in sorted(chunktypecounts):
1848 for chunktype in sorted(chunktypecounts):
1826 ui.write(fmtchunktype(chunktype))
1849 ui.write(fmtchunktype(chunktype))
1827 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1850 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1828
1851
1829 ui.write('\n')
1852 ui.write('\n')
1830 fmt = dfmtstr(max(avgchainlen, compratio))
1853 fmt = dfmtstr(max(avgchainlen, compratio))
1831 ui.write(('avg chain length : ') + fmt % avgchainlen)
1854 ui.write(('avg chain length : ') + fmt % avgchainlen)
1832 ui.write(('max chain length : ') + fmt % maxchainlen)
1855 ui.write(('max chain length : ') + fmt % maxchainlen)
1833 ui.write(('compression ratio : ') + fmt % compratio)
1856 ui.write(('compression ratio : ') + fmt % compratio)
1834
1857
1835 if format > 0:
1858 if format > 0:
1836 ui.write('\n')
1859 ui.write('\n')
1837 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1860 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1838 % tuple(datasize))
1861 % tuple(datasize))
1839 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1862 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1840 % tuple(fullsize))
1863 % tuple(fullsize))
1841 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1864 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1842 % tuple(deltasize))
1865 % tuple(deltasize))
1843
1866
1844 if numdeltas > 0:
1867 if numdeltas > 0:
1845 ui.write('\n')
1868 ui.write('\n')
1846 fmt = pcfmtstr(numdeltas)
1869 fmt = pcfmtstr(numdeltas)
1847 fmt2 = pcfmtstr(numdeltas, 4)
1870 fmt2 = pcfmtstr(numdeltas, 4)
1848 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1871 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1849 if numprev > 0:
1872 if numprev > 0:
1850 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1873 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1851 numprev))
1874 numprev))
1852 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1875 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1853 numprev))
1876 numprev))
1854 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1877 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1855 numprev))
1878 numprev))
1856 if gdelta:
1879 if gdelta:
1857 ui.write(('deltas against p1 : ')
1880 ui.write(('deltas against p1 : ')
1858 + fmt % pcfmt(nump1, numdeltas))
1881 + fmt % pcfmt(nump1, numdeltas))
1859 ui.write(('deltas against p2 : ')
1882 ui.write(('deltas against p2 : ')
1860 + fmt % pcfmt(nump2, numdeltas))
1883 + fmt % pcfmt(nump2, numdeltas))
1861 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1884 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1862 numdeltas))
1885 numdeltas))
1863
1886
1864 @command('debugrevspec',
1887 @command('debugrevspec',
1865 [('', 'optimize', None,
1888 [('', 'optimize', None,
1866 _('print parsed tree after optimizing (DEPRECATED)')),
1889 _('print parsed tree after optimizing (DEPRECATED)')),
1867 ('p', 'show-stage', [],
1890 ('p', 'show-stage', [],
1868 _('print parsed tree at the given stage'), _('NAME')),
1891 _('print parsed tree at the given stage'), _('NAME')),
1869 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1892 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1870 ('', 'verify-optimized', False, _('verify optimized result')),
1893 ('', 'verify-optimized', False, _('verify optimized result')),
1871 ],
1894 ],
1872 ('REVSPEC'))
1895 ('REVSPEC'))
1873 def debugrevspec(ui, repo, expr, **opts):
1896 def debugrevspec(ui, repo, expr, **opts):
1874 """parse and apply a revision specification
1897 """parse and apply a revision specification
1875
1898
1876 Use -p/--show-stage option to print the parsed tree at the given stages.
1899 Use -p/--show-stage option to print the parsed tree at the given stages.
1877 Use -p all to print tree at every stage.
1900 Use -p all to print tree at every stage.
1878
1901
1879 Use --verify-optimized to compare the optimized result with the unoptimized
1902 Use --verify-optimized to compare the optimized result with the unoptimized
1880 one. Returns 1 if the optimized result differs.
1903 one. Returns 1 if the optimized result differs.
1881 """
1904 """
1882 stages = [
1905 stages = [
1883 ('parsed', lambda tree: tree),
1906 ('parsed', lambda tree: tree),
1884 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1907 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1885 ('concatenated', revsetlang.foldconcat),
1908 ('concatenated', revsetlang.foldconcat),
1886 ('analyzed', revsetlang.analyze),
1909 ('analyzed', revsetlang.analyze),
1887 ('optimized', revsetlang.optimize),
1910 ('optimized', revsetlang.optimize),
1888 ]
1911 ]
1889 if opts['no_optimized']:
1912 if opts['no_optimized']:
1890 stages = stages[:-1]
1913 stages = stages[:-1]
1891 if opts['verify_optimized'] and opts['no_optimized']:
1914 if opts['verify_optimized'] and opts['no_optimized']:
1892 raise error.Abort(_('cannot use --verify-optimized with '
1915 raise error.Abort(_('cannot use --verify-optimized with '
1893 '--no-optimized'))
1916 '--no-optimized'))
1894 stagenames = set(n for n, f in stages)
1917 stagenames = set(n for n, f in stages)
1895
1918
1896 showalways = set()
1919 showalways = set()
1897 showchanged = set()
1920 showchanged = set()
1898 if ui.verbose and not opts['show_stage']:
1921 if ui.verbose and not opts['show_stage']:
1899 # show parsed tree by --verbose (deprecated)
1922 # show parsed tree by --verbose (deprecated)
1900 showalways.add('parsed')
1923 showalways.add('parsed')
1901 showchanged.update(['expanded', 'concatenated'])
1924 showchanged.update(['expanded', 'concatenated'])
1902 if opts['optimize']:
1925 if opts['optimize']:
1903 showalways.add('optimized')
1926 showalways.add('optimized')
1904 if opts['show_stage'] and opts['optimize']:
1927 if opts['show_stage'] and opts['optimize']:
1905 raise error.Abort(_('cannot use --optimize with --show-stage'))
1928 raise error.Abort(_('cannot use --optimize with --show-stage'))
1906 if opts['show_stage'] == ['all']:
1929 if opts['show_stage'] == ['all']:
1907 showalways.update(stagenames)
1930 showalways.update(stagenames)
1908 else:
1931 else:
1909 for n in opts['show_stage']:
1932 for n in opts['show_stage']:
1910 if n not in stagenames:
1933 if n not in stagenames:
1911 raise error.Abort(_('invalid stage name: %s') % n)
1934 raise error.Abort(_('invalid stage name: %s') % n)
1912 showalways.update(opts['show_stage'])
1935 showalways.update(opts['show_stage'])
1913
1936
1914 treebystage = {}
1937 treebystage = {}
1915 printedtree = None
1938 printedtree = None
1916 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1939 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1917 for n, f in stages:
1940 for n, f in stages:
1918 treebystage[n] = tree = f(tree)
1941 treebystage[n] = tree = f(tree)
1919 if n in showalways or (n in showchanged and tree != printedtree):
1942 if n in showalways or (n in showchanged and tree != printedtree):
1920 if opts['show_stage'] or n != 'parsed':
1943 if opts['show_stage'] or n != 'parsed':
1921 ui.write(("* %s:\n") % n)
1944 ui.write(("* %s:\n") % n)
1922 ui.write(revsetlang.prettyformat(tree), "\n")
1945 ui.write(revsetlang.prettyformat(tree), "\n")
1923 printedtree = tree
1946 printedtree = tree
1924
1947
1925 if opts['verify_optimized']:
1948 if opts['verify_optimized']:
1926 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1949 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1927 brevs = revset.makematcher(treebystage['optimized'])(repo)
1950 brevs = revset.makematcher(treebystage['optimized'])(repo)
1928 if ui.verbose:
1951 if ui.verbose:
1929 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1952 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1930 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1953 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1931 arevs = list(arevs)
1954 arevs = list(arevs)
1932 brevs = list(brevs)
1955 brevs = list(brevs)
1933 if arevs == brevs:
1956 if arevs == brevs:
1934 return 0
1957 return 0
1935 ui.write(('--- analyzed\n'), label='diff.file_a')
1958 ui.write(('--- analyzed\n'), label='diff.file_a')
1936 ui.write(('+++ optimized\n'), label='diff.file_b')
1959 ui.write(('+++ optimized\n'), label='diff.file_b')
1937 sm = difflib.SequenceMatcher(None, arevs, brevs)
1960 sm = difflib.SequenceMatcher(None, arevs, brevs)
1938 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1961 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1939 if tag in ('delete', 'replace'):
1962 if tag in ('delete', 'replace'):
1940 for c in arevs[alo:ahi]:
1963 for c in arevs[alo:ahi]:
1941 ui.write('-%s\n' % c, label='diff.deleted')
1964 ui.write('-%s\n' % c, label='diff.deleted')
1942 if tag in ('insert', 'replace'):
1965 if tag in ('insert', 'replace'):
1943 for c in brevs[blo:bhi]:
1966 for c in brevs[blo:bhi]:
1944 ui.write('+%s\n' % c, label='diff.inserted')
1967 ui.write('+%s\n' % c, label='diff.inserted')
1945 if tag == 'equal':
1968 if tag == 'equal':
1946 for c in arevs[alo:ahi]:
1969 for c in arevs[alo:ahi]:
1947 ui.write(' %s\n' % c)
1970 ui.write(' %s\n' % c)
1948 return 1
1971 return 1
1949
1972
1950 func = revset.makematcher(tree)
1973 func = revset.makematcher(tree)
1951 revs = func(repo)
1974 revs = func(repo)
1952 if ui.verbose:
1975 if ui.verbose:
1953 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1976 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1954 for c in revs:
1977 for c in revs:
1955 ui.write("%s\n" % c)
1978 ui.write("%s\n" % c)
1956
1979
1957 @command('debugsetparents', [], _('REV1 [REV2]'))
1980 @command('debugsetparents', [], _('REV1 [REV2]'))
1958 def debugsetparents(ui, repo, rev1, rev2=None):
1981 def debugsetparents(ui, repo, rev1, rev2=None):
1959 """manually set the parents of the current working directory
1982 """manually set the parents of the current working directory
1960
1983
1961 This is useful for writing repository conversion tools, but should
1984 This is useful for writing repository conversion tools, but should
1962 be used with care. For example, neither the working directory nor the
1985 be used with care. For example, neither the working directory nor the
1963 dirstate is updated, so file status may be incorrect after running this
1986 dirstate is updated, so file status may be incorrect after running this
1964 command.
1987 command.
1965
1988
1966 Returns 0 on success.
1989 Returns 0 on success.
1967 """
1990 """
1968
1991
1969 r1 = scmutil.revsingle(repo, rev1).node()
1992 r1 = scmutil.revsingle(repo, rev1).node()
1970 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1993 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1971
1994
1972 with repo.wlock():
1995 with repo.wlock():
1973 repo.setparents(r1, r2)
1996 repo.setparents(r1, r2)
1974
1997
1975 @command('debugsub',
1998 @command('debugsub',
1976 [('r', 'rev', '',
1999 [('r', 'rev', '',
1977 _('revision to check'), _('REV'))],
2000 _('revision to check'), _('REV'))],
1978 _('[-r REV] [REV]'))
2001 _('[-r REV] [REV]'))
1979 def debugsub(ui, repo, rev=None):
2002 def debugsub(ui, repo, rev=None):
1980 ctx = scmutil.revsingle(repo, rev, None)
2003 ctx = scmutil.revsingle(repo, rev, None)
1981 for k, v in sorted(ctx.substate.items()):
2004 for k, v in sorted(ctx.substate.items()):
1982 ui.write(('path %s\n') % k)
2005 ui.write(('path %s\n') % k)
1983 ui.write((' source %s\n') % v[0])
2006 ui.write((' source %s\n') % v[0])
1984 ui.write((' revision %s\n') % v[1])
2007 ui.write((' revision %s\n') % v[1])
1985
2008
1986 @command('debugsuccessorssets',
2009 @command('debugsuccessorssets',
1987 [],
2010 [],
1988 _('[REV]'))
2011 _('[REV]'))
1989 def debugsuccessorssets(ui, repo, *revs):
2012 def debugsuccessorssets(ui, repo, *revs):
1990 """show set of successors for revision
2013 """show set of successors for revision
1991
2014
1992 A successors set of changeset A is a consistent group of revisions that
2015 A successors set of changeset A is a consistent group of revisions that
1993 succeed A. It contains non-obsolete changesets only.
2016 succeed A. It contains non-obsolete changesets only.
1994
2017
1995 In most cases a changeset A has a single successors set containing a single
2018 In most cases a changeset A has a single successors set containing a single
1996 successor (changeset A replaced by A').
2019 successor (changeset A replaced by A').
1997
2020
1998 A changeset that is made obsolete with no successors are called "pruned".
2021 A changeset that is made obsolete with no successors are called "pruned".
1999 Such changesets have no successors sets at all.
2022 Such changesets have no successors sets at all.
2000
2023
2001 A changeset that has been "split" will have a successors set containing
2024 A changeset that has been "split" will have a successors set containing
2002 more than one successor.
2025 more than one successor.
2003
2026
2004 A changeset that has been rewritten in multiple different ways is called
2027 A changeset that has been rewritten in multiple different ways is called
2005 "divergent". Such changesets have multiple successor sets (each of which
2028 "divergent". Such changesets have multiple successor sets (each of which
2006 may also be split, i.e. have multiple successors).
2029 may also be split, i.e. have multiple successors).
2007
2030
2008 Results are displayed as follows::
2031 Results are displayed as follows::
2009
2032
2010 <rev1>
2033 <rev1>
2011 <successors-1A>
2034 <successors-1A>
2012 <rev2>
2035 <rev2>
2013 <successors-2A>
2036 <successors-2A>
2014 <successors-2B1> <successors-2B2> <successors-2B3>
2037 <successors-2B1> <successors-2B2> <successors-2B3>
2015
2038
2016 Here rev2 has two possible (i.e. divergent) successors sets. The first
2039 Here rev2 has two possible (i.e. divergent) successors sets. The first
2017 holds one element, whereas the second holds three (i.e. the changeset has
2040 holds one element, whereas the second holds three (i.e. the changeset has
2018 been split).
2041 been split).
2019 """
2042 """
2020 # passed to successorssets caching computation from one call to another
2043 # passed to successorssets caching computation from one call to another
2021 cache = {}
2044 cache = {}
2022 ctx2str = str
2045 ctx2str = str
2023 node2str = short
2046 node2str = short
2024 if ui.debug():
2047 if ui.debug():
2025 def ctx2str(ctx):
2048 def ctx2str(ctx):
2026 return ctx.hex()
2049 return ctx.hex()
2027 node2str = hex
2050 node2str = hex
2028 for rev in scmutil.revrange(repo, revs):
2051 for rev in scmutil.revrange(repo, revs):
2029 ctx = repo[rev]
2052 ctx = repo[rev]
2030 ui.write('%s\n'% ctx2str(ctx))
2053 ui.write('%s\n'% ctx2str(ctx))
2031 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2054 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2032 if succsset:
2055 if succsset:
2033 ui.write(' ')
2056 ui.write(' ')
2034 ui.write(node2str(succsset[0]))
2057 ui.write(node2str(succsset[0]))
2035 for node in succsset[1:]:
2058 for node in succsset[1:]:
2036 ui.write(' ')
2059 ui.write(' ')
2037 ui.write(node2str(node))
2060 ui.write(node2str(node))
2038 ui.write('\n')
2061 ui.write('\n')
2039
2062
2040 @command('debugtemplate',
2063 @command('debugtemplate',
2041 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2064 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2042 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2065 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2043 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2066 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2044 optionalrepo=True)
2067 optionalrepo=True)
2045 def debugtemplate(ui, repo, tmpl, **opts):
2068 def debugtemplate(ui, repo, tmpl, **opts):
2046 """parse and apply a template
2069 """parse and apply a template
2047
2070
2048 If -r/--rev is given, the template is processed as a log template and
2071 If -r/--rev is given, the template is processed as a log template and
2049 applied to the given changesets. Otherwise, it is processed as a generic
2072 applied to the given changesets. Otherwise, it is processed as a generic
2050 template.
2073 template.
2051
2074
2052 Use --verbose to print the parsed tree.
2075 Use --verbose to print the parsed tree.
2053 """
2076 """
2054 revs = None
2077 revs = None
2055 if opts['rev']:
2078 if opts['rev']:
2056 if repo is None:
2079 if repo is None:
2057 raise error.RepoError(_('there is no Mercurial repository here '
2080 raise error.RepoError(_('there is no Mercurial repository here '
2058 '(.hg not found)'))
2081 '(.hg not found)'))
2059 revs = scmutil.revrange(repo, opts['rev'])
2082 revs = scmutil.revrange(repo, opts['rev'])
2060
2083
2061 props = {}
2084 props = {}
2062 for d in opts['define']:
2085 for d in opts['define']:
2063 try:
2086 try:
2064 k, v = (e.strip() for e in d.split('=', 1))
2087 k, v = (e.strip() for e in d.split('=', 1))
2065 if not k or k == 'ui':
2088 if not k or k == 'ui':
2066 raise ValueError
2089 raise ValueError
2067 props[k] = v
2090 props[k] = v
2068 except ValueError:
2091 except ValueError:
2069 raise error.Abort(_('malformed keyword definition: %s') % d)
2092 raise error.Abort(_('malformed keyword definition: %s') % d)
2070
2093
2071 if ui.verbose:
2094 if ui.verbose:
2072 aliases = ui.configitems('templatealias')
2095 aliases = ui.configitems('templatealias')
2073 tree = templater.parse(tmpl)
2096 tree = templater.parse(tmpl)
2074 ui.note(templater.prettyformat(tree), '\n')
2097 ui.note(templater.prettyformat(tree), '\n')
2075 newtree = templater.expandaliases(tree, aliases)
2098 newtree = templater.expandaliases(tree, aliases)
2076 if newtree != tree:
2099 if newtree != tree:
2077 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2100 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2078
2101
2079 mapfile = None
2102 mapfile = None
2080 if revs is None:
2103 if revs is None:
2081 k = 'debugtemplate'
2104 k = 'debugtemplate'
2082 t = formatter.maketemplater(ui, k, tmpl)
2105 t = formatter.maketemplater(ui, k, tmpl)
2083 ui.write(templater.stringify(t(k, ui=ui, **props)))
2106 ui.write(templater.stringify(t(k, ui=ui, **props)))
2084 else:
2107 else:
2085 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2108 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2086 mapfile, buffered=False)
2109 mapfile, buffered=False)
2087 for r in revs:
2110 for r in revs:
2088 displayer.show(repo[r], **props)
2111 displayer.show(repo[r], **props)
2089 displayer.close()
2112 displayer.close()
2090
2113
2091 @command('debugupdatecaches', [])
2114 @command('debugupdatecaches', [])
2092 def debugupdatecaches(ui, repo, *pats, **opts):
2115 def debugupdatecaches(ui, repo, *pats, **opts):
2093 """warm all known caches in the repository"""
2116 """warm all known caches in the repository"""
2094 with repo.wlock():
2117 with repo.wlock():
2095 with repo.lock():
2118 with repo.lock():
2096 repo.updatecaches()
2119 repo.updatecaches()
2097
2120
2098 @command('debugupgraderepo', [
2121 @command('debugupgraderepo', [
2099 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2122 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2100 ('', 'run', False, _('performs an upgrade')),
2123 ('', 'run', False, _('performs an upgrade')),
2101 ])
2124 ])
2102 def debugupgraderepo(ui, repo, run=False, optimize=None):
2125 def debugupgraderepo(ui, repo, run=False, optimize=None):
2103 """upgrade a repository to use different features
2126 """upgrade a repository to use different features
2104
2127
2105 If no arguments are specified, the repository is evaluated for upgrade
2128 If no arguments are specified, the repository is evaluated for upgrade
2106 and a list of problems and potential optimizations is printed.
2129 and a list of problems and potential optimizations is printed.
2107
2130
2108 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2131 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2109 can be influenced via additional arguments. More details will be provided
2132 can be influenced via additional arguments. More details will be provided
2110 by the command output when run without ``--run``.
2133 by the command output when run without ``--run``.
2111
2134
2112 During the upgrade, the repository will be locked and no writes will be
2135 During the upgrade, the repository will be locked and no writes will be
2113 allowed.
2136 allowed.
2114
2137
2115 At the end of the upgrade, the repository may not be readable while new
2138 At the end of the upgrade, the repository may not be readable while new
2116 repository data is swapped in. This window will be as long as it takes to
2139 repository data is swapped in. This window will be as long as it takes to
2117 rename some directories inside the ``.hg`` directory. On most machines, this
2140 rename some directories inside the ``.hg`` directory. On most machines, this
2118 should complete almost instantaneously and the chances of a consumer being
2141 should complete almost instantaneously and the chances of a consumer being
2119 unable to access the repository should be low.
2142 unable to access the repository should be low.
2120 """
2143 """
2121 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2144 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2122
2145
2123 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2146 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2124 inferrepo=True)
2147 inferrepo=True)
2125 def debugwalk(ui, repo, *pats, **opts):
2148 def debugwalk(ui, repo, *pats, **opts):
2126 """show how files match on given patterns"""
2149 """show how files match on given patterns"""
2127 m = scmutil.match(repo[None], pats, opts)
2150 m = scmutil.match(repo[None], pats, opts)
2128 ui.write(('matcher: %r\n' % m))
2151 ui.write(('matcher: %r\n' % m))
2129 items = list(repo[None].walk(m))
2152 items = list(repo[None].walk(m))
2130 if not items:
2153 if not items:
2131 return
2154 return
2132 f = lambda fn: fn
2155 f = lambda fn: fn
2133 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2156 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2134 f = lambda fn: util.normpath(fn)
2157 f = lambda fn: util.normpath(fn)
2135 fmt = 'f %%-%ds %%-%ds %%s' % (
2158 fmt = 'f %%-%ds %%-%ds %%s' % (
2136 max([len(abs) for abs in items]),
2159 max([len(abs) for abs in items]),
2137 max([len(m.rel(abs)) for abs in items]))
2160 max([len(m.rel(abs)) for abs in items]))
2138 for abs in items:
2161 for abs in items:
2139 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2162 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2140 ui.write("%s\n" % line.rstrip())
2163 ui.write("%s\n" % line.rstrip())
2141
2164
2142 @command('debugwireargs',
2165 @command('debugwireargs',
2143 [('', 'three', '', 'three'),
2166 [('', 'three', '', 'three'),
2144 ('', 'four', '', 'four'),
2167 ('', 'four', '', 'four'),
2145 ('', 'five', '', 'five'),
2168 ('', 'five', '', 'five'),
2146 ] + cmdutil.remoteopts,
2169 ] + cmdutil.remoteopts,
2147 _('REPO [OPTIONS]... [ONE [TWO]]'),
2170 _('REPO [OPTIONS]... [ONE [TWO]]'),
2148 norepo=True)
2171 norepo=True)
2149 def debugwireargs(ui, repopath, *vals, **opts):
2172 def debugwireargs(ui, repopath, *vals, **opts):
2150 repo = hg.peer(ui, opts, repopath)
2173 repo = hg.peer(ui, opts, repopath)
2151 for opt in cmdutil.remoteopts:
2174 for opt in cmdutil.remoteopts:
2152 del opts[opt[1]]
2175 del opts[opt[1]]
2153 args = {}
2176 args = {}
2154 for k, v in opts.iteritems():
2177 for k, v in opts.iteritems():
2155 if v:
2178 if v:
2156 args[k] = v
2179 args[k] = v
2157 # run twice to check that we don't mess up the stream for the next command
2180 # run twice to check that we don't mess up the stream for the next command
2158 res1 = repo.debugwireargs(*vals, **args)
2181 res1 = repo.debugwireargs(*vals, **args)
2159 res2 = repo.debugwireargs(*vals, **args)
2182 res2 = repo.debugwireargs(*vals, **args)
2160 ui.write("%s\n" % res1)
2183 ui.write("%s\n" % res1)
2161 if res1 != res2:
2184 if res1 != res2:
2162 ui.warn("%s\n" % res2)
2185 ui.warn("%s\n" % res2)
@@ -1,169 +1,171 b''
1 Test changesets filtering during exchanges (some tests are still in
1 Test changesets filtering during exchanges (some tests are still in
2 test-obsolete.t)
2 test-obsolete.t)
3
3
4 $ cat >> $HGRCPATH << EOF
4 $ cat >> $HGRCPATH << EOF
5 > [experimental]
5 > [experimental]
6 > evolution=createmarkers
6 > evolution=createmarkers
7 > EOF
7 > EOF
8
8
9 Push does not corrupt remote
9 Push does not corrupt remote
10 ----------------------------
10 ----------------------------
11
11
12 Create a DAG where a changeset reuses a revision from a file first used in an
12 Create a DAG where a changeset reuses a revision from a file first used in an
13 extinct changeset.
13 extinct changeset.
14
14
15 $ hg init local
15 $ hg init local
16 $ cd local
16 $ cd local
17 $ echo 'base' > base
17 $ echo 'base' > base
18 $ hg commit -Am base
18 $ hg commit -Am base
19 adding base
19 adding base
20 $ echo 'A' > A
20 $ echo 'A' > A
21 $ hg commit -Am A
21 $ hg commit -Am A
22 adding A
22 adding A
23 $ hg up 0
23 $ hg up 0
24 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
24 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
25 $ hg revert -ar 1
25 $ hg revert -ar 1
26 adding A
26 adding A
27 $ hg commit -Am "A'"
27 $ hg commit -Am "A'"
28 created new head
28 created new head
29 $ hg log -G --template='{desc} {node}'
29 $ hg log -G --template='{desc} {node}'
30 @ A' f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
30 @ A' f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
31 |
31 |
32 | o A 9d73aac1b2ed7d53835eaeec212ed41ea47da53a
32 | o A 9d73aac1b2ed7d53835eaeec212ed41ea47da53a
33 |/
33 |/
34 o base d20a80d4def38df63a4b330b7fb688f3d4cae1e3
34 o base d20a80d4def38df63a4b330b7fb688f3d4cae1e3
35
35
36 $ hg debugobsolete 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
36 $ hg debugobsolete 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
37
37
38 Push it. The bundle should not refer to the extinct changeset.
38 Push it. The bundle should not refer to the extinct changeset.
39
39
40 $ hg init ../other
40 $ hg init ../other
41 $ hg push ../other
41 $ hg push ../other
42 pushing to ../other
42 pushing to ../other
43 searching for changes
43 searching for changes
44 adding changesets
44 adding changesets
45 adding manifests
45 adding manifests
46 adding file changes
46 adding file changes
47 added 2 changesets with 2 changes to 2 files
47 added 2 changesets with 2 changes to 2 files
48 $ hg -R ../other verify
48 $ hg -R ../other verify
49 checking changesets
49 checking changesets
50 checking manifests
50 checking manifests
51 crosschecking files in changesets and manifests
51 crosschecking files in changesets and manifests
52 checking files
52 checking files
53 2 files, 2 changesets, 2 total revisions
53 2 files, 2 changesets, 2 total revisions
54
54
55 Adding a changeset going extinct locally
55 Adding a changeset going extinct locally
56 ------------------------------------------
56 ------------------------------------------
57
57
58 Pull a changeset that will immediatly goes extinct (because you already have a
58 Pull a changeset that will immediatly goes extinct (because you already have a
59 marker to obsolete him)
59 marker to obsolete him)
60 (test resolution of issue3788)
60 (test resolution of issue3788)
61
61
62 $ hg phase --draft --force f89bcc95eba5
62 $ hg phase --draft --force f89bcc95eba5
63 $ hg phase -R ../other --draft --force f89bcc95eba5
63 $ hg phase -R ../other --draft --force f89bcc95eba5
64 $ hg commit --amend -m "A''"
64 $ hg commit --amend -m "A''"
65 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
65 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
66 $ hg pull ../other
66 $ hg pull ../other
67 pulling from ../other
67 pulling from ../other
68 searching for changes
68 searching for changes
69 adding changesets
69 adding changesets
70 adding manifests
70 adding manifests
71 adding file changes
71 adding file changes
72 added 1 changesets with 0 changes to 1 files (+1 heads)
72 added 1 changesets with 0 changes to 1 files (+1 heads)
73 (run 'hg heads' to see heads, 'hg merge' to merge)
73 (run 'hg heads' to see heads, 'hg merge' to merge)
74
74
75 check that bundle is not affected
75 check that bundle is not affected
76
76
77 $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5.hg
77 $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5.hg
78 1 changesets found
78 1 changesets found
79 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
79 $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5
80 $ hg unbundle ../f89bcc95eba5.hg
80 $ hg unbundle ../f89bcc95eba5.hg
81 adding changesets
81 adding changesets
82 adding manifests
82 adding manifests
83 adding file changes
83 adding file changes
84 added 1 changesets with 0 changes to 1 files (+1 heads)
84 added 1 changesets with 0 changes to 1 files (+1 heads)
85 (run 'hg heads' to see heads)
85 (run 'hg heads' to see heads)
86
86
87 check-that bundle can contain markers:
87 check-that bundle can contain markers:
88
88
89 $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5-obs.hg --config experimental.evolution.bundle-obsmarker=1
89 $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5-obs.hg --config experimental.evolution.bundle-obsmarker=1
90 1 changesets found
90 1 changesets found
91 $ hg debugbundle ../f89bcc95eba5.hg
91 $ hg debugbundle ../f89bcc95eba5.hg
92 Stream params: sortdict([('Compression', 'BZ')])
92 Stream params: sortdict([('Compression', 'BZ')])
93 changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
93 changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
94 f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
94 f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
95 $ hg debugbundle ../f89bcc95eba5-obs.hg
95 $ hg debugbundle ../f89bcc95eba5-obs.hg
96 Stream params: sortdict([('Compression', 'BZ')])
96 Stream params: sortdict([('Compression', 'BZ')])
97 changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
97 changegroup -- "sortdict([('version', '02'), ('nbchanges', '1')])"
98 f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
98 f89bcc95eba5174b1ccc3e33a82e84c96e8338ee
99 obsmarkers -- 'sortdict()'
99 obsmarkers -- 'sortdict()'
100 version: 1 (70 bytes)
101 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'}
100
102
101 $ cd ..
103 $ cd ..
102
104
103 pull does not fetch excessive changesets when common node is hidden (issue4982)
105 pull does not fetch excessive changesets when common node is hidden (issue4982)
104 -------------------------------------------------------------------------------
106 -------------------------------------------------------------------------------
105
107
106 initial repo with server and client matching
108 initial repo with server and client matching
107
109
108 $ hg init pull-hidden-common
110 $ hg init pull-hidden-common
109 $ cd pull-hidden-common
111 $ cd pull-hidden-common
110 $ touch foo
112 $ touch foo
111 $ hg -q commit -A -m initial
113 $ hg -q commit -A -m initial
112 $ echo 1 > foo
114 $ echo 1 > foo
113 $ hg commit -m 1
115 $ hg commit -m 1
114 $ echo 2a > foo
116 $ echo 2a > foo
115 $ hg commit -m 2a
117 $ hg commit -m 2a
116 $ cd ..
118 $ cd ..
117 $ hg clone --pull pull-hidden-common pull-hidden-common-client
119 $ hg clone --pull pull-hidden-common pull-hidden-common-client
118 requesting all changes
120 requesting all changes
119 adding changesets
121 adding changesets
120 adding manifests
122 adding manifests
121 adding file changes
123 adding file changes
122 added 3 changesets with 3 changes to 1 files
124 added 3 changesets with 3 changes to 1 files
123 updating to branch default
125 updating to branch default
124 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
126 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
125
127
126 server obsoletes the old head
128 server obsoletes the old head
127
129
128 $ cd pull-hidden-common
130 $ cd pull-hidden-common
129 $ hg -q up -r 1
131 $ hg -q up -r 1
130 $ echo 2b > foo
132 $ echo 2b > foo
131 $ hg -q commit -m 2b
133 $ hg -q commit -m 2b
132 $ hg debugobsolete 6a29ed9c68defff1a139e5c6fa9696fb1a75783d bec0734cd68e84477ba7fc1d13e6cff53ab70129
134 $ hg debugobsolete 6a29ed9c68defff1a139e5c6fa9696fb1a75783d bec0734cd68e84477ba7fc1d13e6cff53ab70129
133 $ cd ..
135 $ cd ..
134
136
135 client only pulls down 1 changeset
137 client only pulls down 1 changeset
136
138
137 $ cd pull-hidden-common-client
139 $ cd pull-hidden-common-client
138 $ hg pull --debug
140 $ hg pull --debug
139 pulling from $TESTTMP/pull-hidden-common (glob)
141 pulling from $TESTTMP/pull-hidden-common (glob)
140 query 1; heads
142 query 1; heads
141 searching for changes
143 searching for changes
142 taking quick initial sample
144 taking quick initial sample
143 query 2; still undecided: 2, sample size is: 2
145 query 2; still undecided: 2, sample size is: 2
144 2 total queries
146 2 total queries
145 1 changesets found
147 1 changesets found
146 list of changesets:
148 list of changesets:
147 bec0734cd68e84477ba7fc1d13e6cff53ab70129
149 bec0734cd68e84477ba7fc1d13e6cff53ab70129
148 listing keys for "phases"
150 listing keys for "phases"
149 listing keys for "bookmarks"
151 listing keys for "bookmarks"
150 bundle2-output-bundle: "HG20", 3 parts total
152 bundle2-output-bundle: "HG20", 3 parts total
151 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
153 bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
152 bundle2-output-part: "listkeys" (params: 1 mandatory) 58 bytes payload
154 bundle2-output-part: "listkeys" (params: 1 mandatory) 58 bytes payload
153 bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
155 bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
154 bundle2-input-bundle: with-transaction
156 bundle2-input-bundle: with-transaction
155 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
157 bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
156 adding changesets
158 adding changesets
157 add changeset bec0734cd68e
159 add changeset bec0734cd68e
158 adding manifests
160 adding manifests
159 adding file changes
161 adding file changes
160 adding foo revisions
162 adding foo revisions
161 added 1 changesets with 1 changes to 1 files (+1 heads)
163 added 1 changesets with 1 changes to 1 files (+1 heads)
162 bundle2-input-part: total payload size 476
164 bundle2-input-part: total payload size 476
163 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
165 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
164 bundle2-input-part: total payload size 58
166 bundle2-input-part: total payload size 58
165 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
167 bundle2-input-part: "listkeys" (params: 1 mandatory) supported
166 bundle2-input-bundle: 2 parts total
168 bundle2-input-bundle: 2 parts total
167 checking for updated bookmarks
169 checking for updated bookmarks
168 updating the branch cache
170 updating the branch cache
169 (run 'hg heads' to see heads, 'hg merge' to merge)
171 (run 'hg heads' to see heads, 'hg merge' to merge)
General Comments 0
You need to be logged in to leave comments. Login now