##// END OF EJS Templates
debugignore: eliminate inconsistencies with `hg status` (issue5222)...
Matt Harbison -
r33507:e9672de5 default
parent child Browse files
Show More
@@ -1,2310 +1,2311 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import ssl
16 import ssl
17 import string
17 import string
18 import sys
18 import sys
19 import tempfile
19 import tempfile
20 import time
20 import time
21
21
22 from .i18n import _
22 from .i18n import _
23 from .node import (
23 from .node import (
24 bin,
24 bin,
25 hex,
25 hex,
26 nullhex,
26 nullhex,
27 nullid,
27 nullid,
28 nullrev,
28 nullrev,
29 short,
29 short,
30 )
30 )
31 from . import (
31 from . import (
32 bundle2,
32 bundle2,
33 changegroup,
33 changegroup,
34 cmdutil,
34 cmdutil,
35 color,
35 color,
36 context,
36 context,
37 dagparser,
37 dagparser,
38 dagutil,
38 dagutil,
39 encoding,
39 encoding,
40 error,
40 error,
41 exchange,
41 exchange,
42 extensions,
42 extensions,
43 filemerge,
43 filemerge,
44 fileset,
44 fileset,
45 formatter,
45 formatter,
46 hg,
46 hg,
47 localrepo,
47 localrepo,
48 lock as lockmod,
48 lock as lockmod,
49 merge as mergemod,
49 merge as mergemod,
50 obsolete,
50 obsolete,
51 obsutil,
51 obsutil,
52 phases,
52 phases,
53 policy,
53 policy,
54 pvec,
54 pvec,
55 pycompat,
55 pycompat,
56 registrar,
56 registrar,
57 repair,
57 repair,
58 revlog,
58 revlog,
59 revset,
59 revset,
60 revsetlang,
60 revsetlang,
61 scmutil,
61 scmutil,
62 setdiscovery,
62 setdiscovery,
63 simplemerge,
63 simplemerge,
64 smartset,
64 smartset,
65 sslutil,
65 sslutil,
66 streamclone,
66 streamclone,
67 templater,
67 templater,
68 treediscovery,
68 treediscovery,
69 upgrade,
69 upgrade,
70 util,
70 util,
71 vfs as vfsmod,
71 vfs as vfsmod,
72 )
72 )
73
73
74 release = lockmod.release
74 release = lockmod.release
75
75
76 command = registrar.command()
76 command = registrar.command()
77
77
78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
79 def debugancestor(ui, repo, *args):
79 def debugancestor(ui, repo, *args):
80 """find the ancestor revision of two revisions in a given index"""
80 """find the ancestor revision of two revisions in a given index"""
81 if len(args) == 3:
81 if len(args) == 3:
82 index, rev1, rev2 = args
82 index, rev1, rev2 = args
83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
84 lookup = r.lookup
84 lookup = r.lookup
85 elif len(args) == 2:
85 elif len(args) == 2:
86 if not repo:
86 if not repo:
87 raise error.Abort(_('there is no Mercurial repository here '
87 raise error.Abort(_('there is no Mercurial repository here '
88 '(.hg not found)'))
88 '(.hg not found)'))
89 rev1, rev2 = args
89 rev1, rev2 = args
90 r = repo.changelog
90 r = repo.changelog
91 lookup = repo.lookup
91 lookup = repo.lookup
92 else:
92 else:
93 raise error.Abort(_('either two or three arguments required'))
93 raise error.Abort(_('either two or three arguments required'))
94 a = r.ancestor(lookup(rev1), lookup(rev2))
94 a = r.ancestor(lookup(rev1), lookup(rev2))
95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
96
96
97 @command('debugapplystreamclonebundle', [], 'FILE')
97 @command('debugapplystreamclonebundle', [], 'FILE')
98 def debugapplystreamclonebundle(ui, repo, fname):
98 def debugapplystreamclonebundle(ui, repo, fname):
99 """apply a stream clone bundle file"""
99 """apply a stream clone bundle file"""
100 f = hg.openpath(ui, fname)
100 f = hg.openpath(ui, fname)
101 gen = exchange.readbundle(ui, f, fname)
101 gen = exchange.readbundle(ui, f, fname)
102 gen.apply(repo)
102 gen.apply(repo)
103
103
104 @command('debugbuilddag',
104 @command('debugbuilddag',
105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
107 ('n', 'new-file', None, _('add new file at each rev'))],
107 ('n', 'new-file', None, _('add new file at each rev'))],
108 _('[OPTION]... [TEXT]'))
108 _('[OPTION]... [TEXT]'))
109 def debugbuilddag(ui, repo, text=None,
109 def debugbuilddag(ui, repo, text=None,
110 mergeable_file=False,
110 mergeable_file=False,
111 overwritten_file=False,
111 overwritten_file=False,
112 new_file=False):
112 new_file=False):
113 """builds a repo with a given DAG from scratch in the current empty repo
113 """builds a repo with a given DAG from scratch in the current empty repo
114
114
115 The description of the DAG is read from stdin if not given on the
115 The description of the DAG is read from stdin if not given on the
116 command line.
116 command line.
117
117
118 Elements:
118 Elements:
119
119
120 - "+n" is a linear run of n nodes based on the current default parent
120 - "+n" is a linear run of n nodes based on the current default parent
121 - "." is a single node based on the current default parent
121 - "." is a single node based on the current default parent
122 - "$" resets the default parent to null (implied at the start);
122 - "$" resets the default parent to null (implied at the start);
123 otherwise the default parent is always the last node created
123 otherwise the default parent is always the last node created
124 - "<p" sets the default parent to the backref p
124 - "<p" sets the default parent to the backref p
125 - "*p" is a fork at parent p, which is a backref
125 - "*p" is a fork at parent p, which is a backref
126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
127 - "/p2" is a merge of the preceding node and p2
127 - "/p2" is a merge of the preceding node and p2
128 - ":tag" defines a local tag for the preceding node
128 - ":tag" defines a local tag for the preceding node
129 - "@branch" sets the named branch for subsequent nodes
129 - "@branch" sets the named branch for subsequent nodes
130 - "#...\\n" is a comment up to the end of the line
130 - "#...\\n" is a comment up to the end of the line
131
131
132 Whitespace between the above elements is ignored.
132 Whitespace between the above elements is ignored.
133
133
134 A backref is either
134 A backref is either
135
135
136 - a number n, which references the node curr-n, where curr is the current
136 - a number n, which references the node curr-n, where curr is the current
137 node, or
137 node, or
138 - the name of a local tag you placed earlier using ":tag", or
138 - the name of a local tag you placed earlier using ":tag", or
139 - empty to denote the default parent.
139 - empty to denote the default parent.
140
140
141 All string valued-elements are either strictly alphanumeric, or must
141 All string valued-elements are either strictly alphanumeric, or must
142 be enclosed in double quotes ("..."), with "\\" as escape character.
142 be enclosed in double quotes ("..."), with "\\" as escape character.
143 """
143 """
144
144
145 if text is None:
145 if text is None:
146 ui.status(_("reading DAG from stdin\n"))
146 ui.status(_("reading DAG from stdin\n"))
147 text = ui.fin.read()
147 text = ui.fin.read()
148
148
149 cl = repo.changelog
149 cl = repo.changelog
150 if len(cl) > 0:
150 if len(cl) > 0:
151 raise error.Abort(_('repository is not empty'))
151 raise error.Abort(_('repository is not empty'))
152
152
153 # determine number of revs in DAG
153 # determine number of revs in DAG
154 total = 0
154 total = 0
155 for type, data in dagparser.parsedag(text):
155 for type, data in dagparser.parsedag(text):
156 if type == 'n':
156 if type == 'n':
157 total += 1
157 total += 1
158
158
159 if mergeable_file:
159 if mergeable_file:
160 linesperrev = 2
160 linesperrev = 2
161 # make a file with k lines per rev
161 # make a file with k lines per rev
162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
163 initialmergedlines.append("")
163 initialmergedlines.append("")
164
164
165 tags = []
165 tags = []
166
166
167 wlock = lock = tr = None
167 wlock = lock = tr = None
168 try:
168 try:
169 wlock = repo.wlock()
169 wlock = repo.wlock()
170 lock = repo.lock()
170 lock = repo.lock()
171 tr = repo.transaction("builddag")
171 tr = repo.transaction("builddag")
172
172
173 at = -1
173 at = -1
174 atbranch = 'default'
174 atbranch = 'default'
175 nodeids = []
175 nodeids = []
176 id = 0
176 id = 0
177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
178 for type, data in dagparser.parsedag(text):
178 for type, data in dagparser.parsedag(text):
179 if type == 'n':
179 if type == 'n':
180 ui.note(('node %s\n' % str(data)))
180 ui.note(('node %s\n' % str(data)))
181 id, ps = data
181 id, ps = data
182
182
183 files = []
183 files = []
184 fctxs = {}
184 fctxs = {}
185
185
186 p2 = None
186 p2 = None
187 if mergeable_file:
187 if mergeable_file:
188 fn = "mf"
188 fn = "mf"
189 p1 = repo[ps[0]]
189 p1 = repo[ps[0]]
190 if len(ps) > 1:
190 if len(ps) > 1:
191 p2 = repo[ps[1]]
191 p2 = repo[ps[1]]
192 pa = p1.ancestor(p2)
192 pa = p1.ancestor(p2)
193 base, local, other = [x[fn].data() for x in (pa, p1,
193 base, local, other = [x[fn].data() for x in (pa, p1,
194 p2)]
194 p2)]
195 m3 = simplemerge.Merge3Text(base, local, other)
195 m3 = simplemerge.Merge3Text(base, local, other)
196 ml = [l.strip() for l in m3.merge_lines()]
196 ml = [l.strip() for l in m3.merge_lines()]
197 ml.append("")
197 ml.append("")
198 elif at > 0:
198 elif at > 0:
199 ml = p1[fn].data().split("\n")
199 ml = p1[fn].data().split("\n")
200 else:
200 else:
201 ml = initialmergedlines
201 ml = initialmergedlines
202 ml[id * linesperrev] += " r%i" % id
202 ml[id * linesperrev] += " r%i" % id
203 mergedtext = "\n".join(ml)
203 mergedtext = "\n".join(ml)
204 files.append(fn)
204 files.append(fn)
205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
206
206
207 if overwritten_file:
207 if overwritten_file:
208 fn = "of"
208 fn = "of"
209 files.append(fn)
209 files.append(fn)
210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
211
211
212 if new_file:
212 if new_file:
213 fn = "nf%i" % id
213 fn = "nf%i" % id
214 files.append(fn)
214 files.append(fn)
215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
216 if len(ps) > 1:
216 if len(ps) > 1:
217 if not p2:
217 if not p2:
218 p2 = repo[ps[1]]
218 p2 = repo[ps[1]]
219 for fn in p2:
219 for fn in p2:
220 if fn.startswith("nf"):
220 if fn.startswith("nf"):
221 files.append(fn)
221 files.append(fn)
222 fctxs[fn] = p2[fn]
222 fctxs[fn] = p2[fn]
223
223
224 def fctxfn(repo, cx, path):
224 def fctxfn(repo, cx, path):
225 return fctxs.get(path)
225 return fctxs.get(path)
226
226
227 if len(ps) == 0 or ps[0] < 0:
227 if len(ps) == 0 or ps[0] < 0:
228 pars = [None, None]
228 pars = [None, None]
229 elif len(ps) == 1:
229 elif len(ps) == 1:
230 pars = [nodeids[ps[0]], None]
230 pars = [nodeids[ps[0]], None]
231 else:
231 else:
232 pars = [nodeids[p] for p in ps]
232 pars = [nodeids[p] for p in ps]
233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
234 date=(id, 0),
234 date=(id, 0),
235 user="debugbuilddag",
235 user="debugbuilddag",
236 extra={'branch': atbranch})
236 extra={'branch': atbranch})
237 nodeid = repo.commitctx(cx)
237 nodeid = repo.commitctx(cx)
238 nodeids.append(nodeid)
238 nodeids.append(nodeid)
239 at = id
239 at = id
240 elif type == 'l':
240 elif type == 'l':
241 id, name = data
241 id, name = data
242 ui.note(('tag %s\n' % name))
242 ui.note(('tag %s\n' % name))
243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
244 elif type == 'a':
244 elif type == 'a':
245 ui.note(('branch %s\n' % data))
245 ui.note(('branch %s\n' % data))
246 atbranch = data
246 atbranch = data
247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
248 tr.close()
248 tr.close()
249
249
250 if tags:
250 if tags:
251 repo.vfs.write("localtags", "".join(tags))
251 repo.vfs.write("localtags", "".join(tags))
252 finally:
252 finally:
253 ui.progress(_('building'), None)
253 ui.progress(_('building'), None)
254 release(tr, lock, wlock)
254 release(tr, lock, wlock)
255
255
256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
257 indent_string = ' ' * indent
257 indent_string = ' ' * indent
258 if all:
258 if all:
259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
260 % indent_string)
260 % indent_string)
261
261
262 def showchunks(named):
262 def showchunks(named):
263 ui.write("\n%s%s\n" % (indent_string, named))
263 ui.write("\n%s%s\n" % (indent_string, named))
264 chain = None
264 chain = None
265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
266 node = chunkdata['node']
266 node = chunkdata['node']
267 p1 = chunkdata['p1']
267 p1 = chunkdata['p1']
268 p2 = chunkdata['p2']
268 p2 = chunkdata['p2']
269 cs = chunkdata['cs']
269 cs = chunkdata['cs']
270 deltabase = chunkdata['deltabase']
270 deltabase = chunkdata['deltabase']
271 delta = chunkdata['delta']
271 delta = chunkdata['delta']
272 ui.write("%s%s %s %s %s %s %s\n" %
272 ui.write("%s%s %s %s %s %s %s\n" %
273 (indent_string, hex(node), hex(p1), hex(p2),
273 (indent_string, hex(node), hex(p1), hex(p2),
274 hex(cs), hex(deltabase), len(delta)))
274 hex(cs), hex(deltabase), len(delta)))
275 chain = node
275 chain = node
276
276
277 chunkdata = gen.changelogheader()
277 chunkdata = gen.changelogheader()
278 showchunks("changelog")
278 showchunks("changelog")
279 chunkdata = gen.manifestheader()
279 chunkdata = gen.manifestheader()
280 showchunks("manifest")
280 showchunks("manifest")
281 for chunkdata in iter(gen.filelogheader, {}):
281 for chunkdata in iter(gen.filelogheader, {}):
282 fname = chunkdata['filename']
282 fname = chunkdata['filename']
283 showchunks(fname)
283 showchunks(fname)
284 else:
284 else:
285 if isinstance(gen, bundle2.unbundle20):
285 if isinstance(gen, bundle2.unbundle20):
286 raise error.Abort(_('use debugbundle2 for this file'))
286 raise error.Abort(_('use debugbundle2 for this file'))
287 chunkdata = gen.changelogheader()
287 chunkdata = gen.changelogheader()
288 chain = None
288 chain = None
289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
290 node = chunkdata['node']
290 node = chunkdata['node']
291 ui.write("%s%s\n" % (indent_string, hex(node)))
291 ui.write("%s%s\n" % (indent_string, hex(node)))
292 chain = node
292 chain = node
293
293
294 def _debugobsmarkers(ui, part, indent=0, **opts):
294 def _debugobsmarkers(ui, part, indent=0, **opts):
295 """display version and markers contained in 'data'"""
295 """display version and markers contained in 'data'"""
296 opts = pycompat.byteskwargs(opts)
296 opts = pycompat.byteskwargs(opts)
297 data = part.read()
297 data = part.read()
298 indent_string = ' ' * indent
298 indent_string = ' ' * indent
299 try:
299 try:
300 version, markers = obsolete._readmarkers(data)
300 version, markers = obsolete._readmarkers(data)
301 except error.UnknownVersion as exc:
301 except error.UnknownVersion as exc:
302 msg = "%sunsupported version: %s (%d bytes)\n"
302 msg = "%sunsupported version: %s (%d bytes)\n"
303 msg %= indent_string, exc.version, len(data)
303 msg %= indent_string, exc.version, len(data)
304 ui.write(msg)
304 ui.write(msg)
305 else:
305 else:
306 msg = "%sversion: %s (%d bytes)\n"
306 msg = "%sversion: %s (%d bytes)\n"
307 msg %= indent_string, version, len(data)
307 msg %= indent_string, version, len(data)
308 ui.write(msg)
308 ui.write(msg)
309 fm = ui.formatter('debugobsolete', opts)
309 fm = ui.formatter('debugobsolete', opts)
310 for rawmarker in sorted(markers):
310 for rawmarker in sorted(markers):
311 m = obsutil.marker(None, rawmarker)
311 m = obsutil.marker(None, rawmarker)
312 fm.startitem()
312 fm.startitem()
313 fm.plain(indent_string)
313 fm.plain(indent_string)
314 cmdutil.showmarker(fm, m)
314 cmdutil.showmarker(fm, m)
315 fm.end()
315 fm.end()
316
316
317 def _debugphaseheads(ui, data, indent=0):
317 def _debugphaseheads(ui, data, indent=0):
318 """display version and markers contained in 'data'"""
318 """display version and markers contained in 'data'"""
319 indent_string = ' ' * indent
319 indent_string = ' ' * indent
320 headsbyphase = bundle2._readphaseheads(data)
320 headsbyphase = bundle2._readphaseheads(data)
321 for phase in phases.allphases:
321 for phase in phases.allphases:
322 for head in headsbyphase[phase]:
322 for head in headsbyphase[phase]:
323 ui.write(indent_string)
323 ui.write(indent_string)
324 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
324 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
325
325
326 def _debugbundle2(ui, gen, all=None, **opts):
326 def _debugbundle2(ui, gen, all=None, **opts):
327 """lists the contents of a bundle2"""
327 """lists the contents of a bundle2"""
328 if not isinstance(gen, bundle2.unbundle20):
328 if not isinstance(gen, bundle2.unbundle20):
329 raise error.Abort(_('not a bundle2 file'))
329 raise error.Abort(_('not a bundle2 file'))
330 ui.write(('Stream params: %s\n' % repr(gen.params)))
330 ui.write(('Stream params: %s\n' % repr(gen.params)))
331 parttypes = opts.get(r'part_type', [])
331 parttypes = opts.get(r'part_type', [])
332 for part in gen.iterparts():
332 for part in gen.iterparts():
333 if parttypes and part.type not in parttypes:
333 if parttypes and part.type not in parttypes:
334 continue
334 continue
335 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
335 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
336 if part.type == 'changegroup':
336 if part.type == 'changegroup':
337 version = part.params.get('version', '01')
337 version = part.params.get('version', '01')
338 cg = changegroup.getunbundler(version, part, 'UN')
338 cg = changegroup.getunbundler(version, part, 'UN')
339 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
340 if part.type == 'obsmarkers':
340 if part.type == 'obsmarkers':
341 _debugobsmarkers(ui, part, indent=4, **opts)
341 _debugobsmarkers(ui, part, indent=4, **opts)
342 if part.type == 'phase-heads':
342 if part.type == 'phase-heads':
343 _debugphaseheads(ui, part, indent=4)
343 _debugphaseheads(ui, part, indent=4)
344
344
345 @command('debugbundle',
345 @command('debugbundle',
346 [('a', 'all', None, _('show all details')),
346 [('a', 'all', None, _('show all details')),
347 ('', 'part-type', [], _('show only the named part type')),
347 ('', 'part-type', [], _('show only the named part type')),
348 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 ('', 'spec', None, _('print the bundlespec of the bundle'))],
349 _('FILE'),
349 _('FILE'),
350 norepo=True)
350 norepo=True)
351 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
352 """lists the contents of a bundle"""
352 """lists the contents of a bundle"""
353 with hg.openpath(ui, bundlepath) as f:
353 with hg.openpath(ui, bundlepath) as f:
354 if spec:
354 if spec:
355 spec = exchange.getbundlespec(ui, f)
355 spec = exchange.getbundlespec(ui, f)
356 ui.write('%s\n' % spec)
356 ui.write('%s\n' % spec)
357 return
357 return
358
358
359 gen = exchange.readbundle(ui, f, bundlepath)
359 gen = exchange.readbundle(ui, f, bundlepath)
360 if isinstance(gen, bundle2.unbundle20):
360 if isinstance(gen, bundle2.unbundle20):
361 return _debugbundle2(ui, gen, all=all, **opts)
361 return _debugbundle2(ui, gen, all=all, **opts)
362 _debugchangegroup(ui, gen, all=all, **opts)
362 _debugchangegroup(ui, gen, all=all, **opts)
363
363
364 @command('debugcheckstate', [], '')
364 @command('debugcheckstate', [], '')
365 def debugcheckstate(ui, repo):
365 def debugcheckstate(ui, repo):
366 """validate the correctness of the current dirstate"""
366 """validate the correctness of the current dirstate"""
367 parent1, parent2 = repo.dirstate.parents()
367 parent1, parent2 = repo.dirstate.parents()
368 m1 = repo[parent1].manifest()
368 m1 = repo[parent1].manifest()
369 m2 = repo[parent2].manifest()
369 m2 = repo[parent2].manifest()
370 errors = 0
370 errors = 0
371 for f in repo.dirstate:
371 for f in repo.dirstate:
372 state = repo.dirstate[f]
372 state = repo.dirstate[f]
373 if state in "nr" and f not in m1:
373 if state in "nr" and f not in m1:
374 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
374 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
375 errors += 1
375 errors += 1
376 if state in "a" and f in m1:
376 if state in "a" and f in m1:
377 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
377 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
378 errors += 1
378 errors += 1
379 if state in "m" and f not in m1 and f not in m2:
379 if state in "m" and f not in m1 and f not in m2:
380 ui.warn(_("%s in state %s, but not in either manifest\n") %
380 ui.warn(_("%s in state %s, but not in either manifest\n") %
381 (f, state))
381 (f, state))
382 errors += 1
382 errors += 1
383 for f in m1:
383 for f in m1:
384 state = repo.dirstate[f]
384 state = repo.dirstate[f]
385 if state not in "nrm":
385 if state not in "nrm":
386 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
386 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
387 errors += 1
387 errors += 1
388 if errors:
388 if errors:
389 error = _(".hg/dirstate inconsistent with current parent's manifest")
389 error = _(".hg/dirstate inconsistent with current parent's manifest")
390 raise error.Abort(error)
390 raise error.Abort(error)
391
391
392 @command('debugcolor',
392 @command('debugcolor',
393 [('', 'style', None, _('show all configured styles'))],
393 [('', 'style', None, _('show all configured styles'))],
394 'hg debugcolor')
394 'hg debugcolor')
395 def debugcolor(ui, repo, **opts):
395 def debugcolor(ui, repo, **opts):
396 """show available color, effects or style"""
396 """show available color, effects or style"""
397 ui.write(('color mode: %s\n') % ui._colormode)
397 ui.write(('color mode: %s\n') % ui._colormode)
398 if opts.get(r'style'):
398 if opts.get(r'style'):
399 return _debugdisplaystyle(ui)
399 return _debugdisplaystyle(ui)
400 else:
400 else:
401 return _debugdisplaycolor(ui)
401 return _debugdisplaycolor(ui)
402
402
403 def _debugdisplaycolor(ui):
403 def _debugdisplaycolor(ui):
404 ui = ui.copy()
404 ui = ui.copy()
405 ui._styles.clear()
405 ui._styles.clear()
406 for effect in color._activeeffects(ui).keys():
406 for effect in color._activeeffects(ui).keys():
407 ui._styles[effect] = effect
407 ui._styles[effect] = effect
408 if ui._terminfoparams:
408 if ui._terminfoparams:
409 for k, v in ui.configitems('color'):
409 for k, v in ui.configitems('color'):
410 if k.startswith('color.'):
410 if k.startswith('color.'):
411 ui._styles[k] = k[6:]
411 ui._styles[k] = k[6:]
412 elif k.startswith('terminfo.'):
412 elif k.startswith('terminfo.'):
413 ui._styles[k] = k[9:]
413 ui._styles[k] = k[9:]
414 ui.write(_('available colors:\n'))
414 ui.write(_('available colors:\n'))
415 # sort label with a '_' after the other to group '_background' entry.
415 # sort label with a '_' after the other to group '_background' entry.
416 items = sorted(ui._styles.items(),
416 items = sorted(ui._styles.items(),
417 key=lambda i: ('_' in i[0], i[0], i[1]))
417 key=lambda i: ('_' in i[0], i[0], i[1]))
418 for colorname, label in items:
418 for colorname, label in items:
419 ui.write(('%s\n') % colorname, label=label)
419 ui.write(('%s\n') % colorname, label=label)
420
420
421 def _debugdisplaystyle(ui):
421 def _debugdisplaystyle(ui):
422 ui.write(_('available style:\n'))
422 ui.write(_('available style:\n'))
423 width = max(len(s) for s in ui._styles)
423 width = max(len(s) for s in ui._styles)
424 for label, effects in sorted(ui._styles.items()):
424 for label, effects in sorted(ui._styles.items()):
425 ui.write('%s' % label, label=label)
425 ui.write('%s' % label, label=label)
426 if effects:
426 if effects:
427 # 50
427 # 50
428 ui.write(': ')
428 ui.write(': ')
429 ui.write(' ' * (max(0, width - len(label))))
429 ui.write(' ' * (max(0, width - len(label))))
430 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
430 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
431 ui.write('\n')
431 ui.write('\n')
432
432
433 @command('debugcreatestreamclonebundle', [], 'FILE')
433 @command('debugcreatestreamclonebundle', [], 'FILE')
434 def debugcreatestreamclonebundle(ui, repo, fname):
434 def debugcreatestreamclonebundle(ui, repo, fname):
435 """create a stream clone bundle file
435 """create a stream clone bundle file
436
436
437 Stream bundles are special bundles that are essentially archives of
437 Stream bundles are special bundles that are essentially archives of
438 revlog files. They are commonly used for cloning very quickly.
438 revlog files. They are commonly used for cloning very quickly.
439 """
439 """
440 # TODO we may want to turn this into an abort when this functionality
440 # TODO we may want to turn this into an abort when this functionality
441 # is moved into `hg bundle`.
441 # is moved into `hg bundle`.
442 if phases.hassecret(repo):
442 if phases.hassecret(repo):
443 ui.warn(_('(warning: stream clone bundle will contain secret '
443 ui.warn(_('(warning: stream clone bundle will contain secret '
444 'revisions)\n'))
444 'revisions)\n'))
445
445
446 requirements, gen = streamclone.generatebundlev1(repo)
446 requirements, gen = streamclone.generatebundlev1(repo)
447 changegroup.writechunks(ui, gen, fname)
447 changegroup.writechunks(ui, gen, fname)
448
448
449 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
449 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
450
450
451 @command('debugdag',
451 @command('debugdag',
452 [('t', 'tags', None, _('use tags as labels')),
452 [('t', 'tags', None, _('use tags as labels')),
453 ('b', 'branches', None, _('annotate with branch names')),
453 ('b', 'branches', None, _('annotate with branch names')),
454 ('', 'dots', None, _('use dots for runs')),
454 ('', 'dots', None, _('use dots for runs')),
455 ('s', 'spaces', None, _('separate elements by spaces'))],
455 ('s', 'spaces', None, _('separate elements by spaces'))],
456 _('[OPTION]... [FILE [REV]...]'),
456 _('[OPTION]... [FILE [REV]...]'),
457 optionalrepo=True)
457 optionalrepo=True)
458 def debugdag(ui, repo, file_=None, *revs, **opts):
458 def debugdag(ui, repo, file_=None, *revs, **opts):
459 """format the changelog or an index DAG as a concise textual description
459 """format the changelog or an index DAG as a concise textual description
460
460
461 If you pass a revlog index, the revlog's DAG is emitted. If you list
461 If you pass a revlog index, the revlog's DAG is emitted. If you list
462 revision numbers, they get labeled in the output as rN.
462 revision numbers, they get labeled in the output as rN.
463
463
464 Otherwise, the changelog DAG of the current repo is emitted.
464 Otherwise, the changelog DAG of the current repo is emitted.
465 """
465 """
466 spaces = opts.get(r'spaces')
466 spaces = opts.get(r'spaces')
467 dots = opts.get(r'dots')
467 dots = opts.get(r'dots')
468 if file_:
468 if file_:
469 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
469 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
470 file_)
470 file_)
471 revs = set((int(r) for r in revs))
471 revs = set((int(r) for r in revs))
472 def events():
472 def events():
473 for r in rlog:
473 for r in rlog:
474 yield 'n', (r, list(p for p in rlog.parentrevs(r)
474 yield 'n', (r, list(p for p in rlog.parentrevs(r)
475 if p != -1))
475 if p != -1))
476 if r in revs:
476 if r in revs:
477 yield 'l', (r, "r%i" % r)
477 yield 'l', (r, "r%i" % r)
478 elif repo:
478 elif repo:
479 cl = repo.changelog
479 cl = repo.changelog
480 tags = opts.get(r'tags')
480 tags = opts.get(r'tags')
481 branches = opts.get(r'branches')
481 branches = opts.get(r'branches')
482 if tags:
482 if tags:
483 labels = {}
483 labels = {}
484 for l, n in repo.tags().items():
484 for l, n in repo.tags().items():
485 labels.setdefault(cl.rev(n), []).append(l)
485 labels.setdefault(cl.rev(n), []).append(l)
486 def events():
486 def events():
487 b = "default"
487 b = "default"
488 for r in cl:
488 for r in cl:
489 if branches:
489 if branches:
490 newb = cl.read(cl.node(r))[5]['branch']
490 newb = cl.read(cl.node(r))[5]['branch']
491 if newb != b:
491 if newb != b:
492 yield 'a', newb
492 yield 'a', newb
493 b = newb
493 b = newb
494 yield 'n', (r, list(p for p in cl.parentrevs(r)
494 yield 'n', (r, list(p for p in cl.parentrevs(r)
495 if p != -1))
495 if p != -1))
496 if tags:
496 if tags:
497 ls = labels.get(r)
497 ls = labels.get(r)
498 if ls:
498 if ls:
499 for l in ls:
499 for l in ls:
500 yield 'l', (r, l)
500 yield 'l', (r, l)
501 else:
501 else:
502 raise error.Abort(_('need repo for changelog dag'))
502 raise error.Abort(_('need repo for changelog dag'))
503
503
504 for line in dagparser.dagtextlines(events(),
504 for line in dagparser.dagtextlines(events(),
505 addspaces=spaces,
505 addspaces=spaces,
506 wraplabels=True,
506 wraplabels=True,
507 wrapannotations=True,
507 wrapannotations=True,
508 wrapnonlinear=dots,
508 wrapnonlinear=dots,
509 usedots=dots,
509 usedots=dots,
510 maxlinewidth=70):
510 maxlinewidth=70):
511 ui.write(line)
511 ui.write(line)
512 ui.write("\n")
512 ui.write("\n")
513
513
514 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
514 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
515 def debugdata(ui, repo, file_, rev=None, **opts):
515 def debugdata(ui, repo, file_, rev=None, **opts):
516 """dump the contents of a data file revision"""
516 """dump the contents of a data file revision"""
517 opts = pycompat.byteskwargs(opts)
517 opts = pycompat.byteskwargs(opts)
518 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
518 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
519 if rev is not None:
519 if rev is not None:
520 raise error.CommandError('debugdata', _('invalid arguments'))
520 raise error.CommandError('debugdata', _('invalid arguments'))
521 file_, rev = None, file_
521 file_, rev = None, file_
522 elif rev is None:
522 elif rev is None:
523 raise error.CommandError('debugdata', _('invalid arguments'))
523 raise error.CommandError('debugdata', _('invalid arguments'))
524 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
524 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
525 try:
525 try:
526 ui.write(r.revision(r.lookup(rev), raw=True))
526 ui.write(r.revision(r.lookup(rev), raw=True))
527 except KeyError:
527 except KeyError:
528 raise error.Abort(_('invalid revision identifier %s') % rev)
528 raise error.Abort(_('invalid revision identifier %s') % rev)
529
529
530 @command('debugdate',
530 @command('debugdate',
531 [('e', 'extended', None, _('try extended date formats'))],
531 [('e', 'extended', None, _('try extended date formats'))],
532 _('[-e] DATE [RANGE]'),
532 _('[-e] DATE [RANGE]'),
533 norepo=True, optionalrepo=True)
533 norepo=True, optionalrepo=True)
534 def debugdate(ui, date, range=None, **opts):
534 def debugdate(ui, date, range=None, **opts):
535 """parse and display a date"""
535 """parse and display a date"""
536 if opts[r"extended"]:
536 if opts[r"extended"]:
537 d = util.parsedate(date, util.extendeddateformats)
537 d = util.parsedate(date, util.extendeddateformats)
538 else:
538 else:
539 d = util.parsedate(date)
539 d = util.parsedate(date)
540 ui.write(("internal: %s %s\n") % d)
540 ui.write(("internal: %s %s\n") % d)
541 ui.write(("standard: %s\n") % util.datestr(d))
541 ui.write(("standard: %s\n") % util.datestr(d))
542 if range:
542 if range:
543 m = util.matchdate(range)
543 m = util.matchdate(range)
544 ui.write(("match: %s\n") % m(d[0]))
544 ui.write(("match: %s\n") % m(d[0]))
545
545
546 @command('debugdeltachain',
546 @command('debugdeltachain',
547 cmdutil.debugrevlogopts + cmdutil.formatteropts,
547 cmdutil.debugrevlogopts + cmdutil.formatteropts,
548 _('-c|-m|FILE'),
548 _('-c|-m|FILE'),
549 optionalrepo=True)
549 optionalrepo=True)
550 def debugdeltachain(ui, repo, file_=None, **opts):
550 def debugdeltachain(ui, repo, file_=None, **opts):
551 """dump information about delta chains in a revlog
551 """dump information about delta chains in a revlog
552
552
553 Output can be templatized. Available template keywords are:
553 Output can be templatized. Available template keywords are:
554
554
555 :``rev``: revision number
555 :``rev``: revision number
556 :``chainid``: delta chain identifier (numbered by unique base)
556 :``chainid``: delta chain identifier (numbered by unique base)
557 :``chainlen``: delta chain length to this revision
557 :``chainlen``: delta chain length to this revision
558 :``prevrev``: previous revision in delta chain
558 :``prevrev``: previous revision in delta chain
559 :``deltatype``: role of delta / how it was computed
559 :``deltatype``: role of delta / how it was computed
560 :``compsize``: compressed size of revision
560 :``compsize``: compressed size of revision
561 :``uncompsize``: uncompressed size of revision
561 :``uncompsize``: uncompressed size of revision
562 :``chainsize``: total size of compressed revisions in chain
562 :``chainsize``: total size of compressed revisions in chain
563 :``chainratio``: total chain size divided by uncompressed revision size
563 :``chainratio``: total chain size divided by uncompressed revision size
564 (new delta chains typically start at ratio 2.00)
564 (new delta chains typically start at ratio 2.00)
565 :``lindist``: linear distance from base revision in delta chain to end
565 :``lindist``: linear distance from base revision in delta chain to end
566 of this revision
566 of this revision
567 :``extradist``: total size of revisions not part of this delta chain from
567 :``extradist``: total size of revisions not part of this delta chain from
568 base of delta chain to end of this revision; a measurement
568 base of delta chain to end of this revision; a measurement
569 of how much extra data we need to read/seek across to read
569 of how much extra data we need to read/seek across to read
570 the delta chain for this revision
570 the delta chain for this revision
571 :``extraratio``: extradist divided by chainsize; another representation of
571 :``extraratio``: extradist divided by chainsize; another representation of
572 how much unrelated data is needed to load this delta chain
572 how much unrelated data is needed to load this delta chain
573 """
573 """
574 opts = pycompat.byteskwargs(opts)
574 opts = pycompat.byteskwargs(opts)
575 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
575 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
576 index = r.index
576 index = r.index
577 generaldelta = r.version & revlog.FLAG_GENERALDELTA
577 generaldelta = r.version & revlog.FLAG_GENERALDELTA
578
578
579 def revinfo(rev):
579 def revinfo(rev):
580 e = index[rev]
580 e = index[rev]
581 compsize = e[1]
581 compsize = e[1]
582 uncompsize = e[2]
582 uncompsize = e[2]
583 chainsize = 0
583 chainsize = 0
584
584
585 if generaldelta:
585 if generaldelta:
586 if e[3] == e[5]:
586 if e[3] == e[5]:
587 deltatype = 'p1'
587 deltatype = 'p1'
588 elif e[3] == e[6]:
588 elif e[3] == e[6]:
589 deltatype = 'p2'
589 deltatype = 'p2'
590 elif e[3] == rev - 1:
590 elif e[3] == rev - 1:
591 deltatype = 'prev'
591 deltatype = 'prev'
592 elif e[3] == rev:
592 elif e[3] == rev:
593 deltatype = 'base'
593 deltatype = 'base'
594 else:
594 else:
595 deltatype = 'other'
595 deltatype = 'other'
596 else:
596 else:
597 if e[3] == rev:
597 if e[3] == rev:
598 deltatype = 'base'
598 deltatype = 'base'
599 else:
599 else:
600 deltatype = 'prev'
600 deltatype = 'prev'
601
601
602 chain = r._deltachain(rev)[0]
602 chain = r._deltachain(rev)[0]
603 for iterrev in chain:
603 for iterrev in chain:
604 e = index[iterrev]
604 e = index[iterrev]
605 chainsize += e[1]
605 chainsize += e[1]
606
606
607 return compsize, uncompsize, deltatype, chain, chainsize
607 return compsize, uncompsize, deltatype, chain, chainsize
608
608
609 fm = ui.formatter('debugdeltachain', opts)
609 fm = ui.formatter('debugdeltachain', opts)
610
610
611 fm.plain(' rev chain# chainlen prev delta '
611 fm.plain(' rev chain# chainlen prev delta '
612 'size rawsize chainsize ratio lindist extradist '
612 'size rawsize chainsize ratio lindist extradist '
613 'extraratio\n')
613 'extraratio\n')
614
614
615 chainbases = {}
615 chainbases = {}
616 for rev in r:
616 for rev in r:
617 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
617 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
618 chainbase = chain[0]
618 chainbase = chain[0]
619 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
619 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
620 basestart = r.start(chainbase)
620 basestart = r.start(chainbase)
621 revstart = r.start(rev)
621 revstart = r.start(rev)
622 lineardist = revstart + comp - basestart
622 lineardist = revstart + comp - basestart
623 extradist = lineardist - chainsize
623 extradist = lineardist - chainsize
624 try:
624 try:
625 prevrev = chain[-2]
625 prevrev = chain[-2]
626 except IndexError:
626 except IndexError:
627 prevrev = -1
627 prevrev = -1
628
628
629 chainratio = float(chainsize) / float(uncomp)
629 chainratio = float(chainsize) / float(uncomp)
630 extraratio = float(extradist) / float(chainsize)
630 extraratio = float(extradist) / float(chainsize)
631
631
632 fm.startitem()
632 fm.startitem()
633 fm.write('rev chainid chainlen prevrev deltatype compsize '
633 fm.write('rev chainid chainlen prevrev deltatype compsize '
634 'uncompsize chainsize chainratio lindist extradist '
634 'uncompsize chainsize chainratio lindist extradist '
635 'extraratio',
635 'extraratio',
636 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
636 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
637 rev, chainid, len(chain), prevrev, deltatype, comp,
637 rev, chainid, len(chain), prevrev, deltatype, comp,
638 uncomp, chainsize, chainratio, lineardist, extradist,
638 uncomp, chainsize, chainratio, lineardist, extradist,
639 extraratio,
639 extraratio,
640 rev=rev, chainid=chainid, chainlen=len(chain),
640 rev=rev, chainid=chainid, chainlen=len(chain),
641 prevrev=prevrev, deltatype=deltatype, compsize=comp,
641 prevrev=prevrev, deltatype=deltatype, compsize=comp,
642 uncompsize=uncomp, chainsize=chainsize,
642 uncompsize=uncomp, chainsize=chainsize,
643 chainratio=chainratio, lindist=lineardist,
643 chainratio=chainratio, lindist=lineardist,
644 extradist=extradist, extraratio=extraratio)
644 extradist=extradist, extraratio=extraratio)
645
645
646 fm.end()
646 fm.end()
647
647
648 @command('debugdirstate|debugstate',
648 @command('debugdirstate|debugstate',
649 [('', 'nodates', None, _('do not display the saved mtime')),
649 [('', 'nodates', None, _('do not display the saved mtime')),
650 ('', 'datesort', None, _('sort by saved mtime'))],
650 ('', 'datesort', None, _('sort by saved mtime'))],
651 _('[OPTION]...'))
651 _('[OPTION]...'))
652 def debugstate(ui, repo, **opts):
652 def debugstate(ui, repo, **opts):
653 """show the contents of the current dirstate"""
653 """show the contents of the current dirstate"""
654
654
655 nodates = opts.get(r'nodates')
655 nodates = opts.get(r'nodates')
656 datesort = opts.get(r'datesort')
656 datesort = opts.get(r'datesort')
657
657
658 timestr = ""
658 timestr = ""
659 if datesort:
659 if datesort:
660 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
660 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
661 else:
661 else:
662 keyfunc = None # sort by filename
662 keyfunc = None # sort by filename
663 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
663 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
664 if ent[3] == -1:
664 if ent[3] == -1:
665 timestr = 'unset '
665 timestr = 'unset '
666 elif nodates:
666 elif nodates:
667 timestr = 'set '
667 timestr = 'set '
668 else:
668 else:
669 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
669 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
670 time.localtime(ent[3]))
670 time.localtime(ent[3]))
671 if ent[1] & 0o20000:
671 if ent[1] & 0o20000:
672 mode = 'lnk'
672 mode = 'lnk'
673 else:
673 else:
674 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
674 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
675 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
675 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
676 for f in repo.dirstate.copies():
676 for f in repo.dirstate.copies():
677 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
677 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
678
678
679 @command('debugdiscovery',
679 @command('debugdiscovery',
680 [('', 'old', None, _('use old-style discovery')),
680 [('', 'old', None, _('use old-style discovery')),
681 ('', 'nonheads', None,
681 ('', 'nonheads', None,
682 _('use old-style discovery with non-heads included')),
682 _('use old-style discovery with non-heads included')),
683 ] + cmdutil.remoteopts,
683 ] + cmdutil.remoteopts,
684 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
684 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
685 def debugdiscovery(ui, repo, remoteurl="default", **opts):
685 def debugdiscovery(ui, repo, remoteurl="default", **opts):
686 """runs the changeset discovery protocol in isolation"""
686 """runs the changeset discovery protocol in isolation"""
687 opts = pycompat.byteskwargs(opts)
687 opts = pycompat.byteskwargs(opts)
688 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
688 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
689 opts.get('branch'))
689 opts.get('branch'))
690 remote = hg.peer(repo, opts, remoteurl)
690 remote = hg.peer(repo, opts, remoteurl)
691 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
691 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
692
692
693 # make sure tests are repeatable
693 # make sure tests are repeatable
694 random.seed(12323)
694 random.seed(12323)
695
695
696 def doit(localheads, remoteheads, remote=remote):
696 def doit(localheads, remoteheads, remote=remote):
697 if opts.get('old'):
697 if opts.get('old'):
698 if localheads:
698 if localheads:
699 raise error.Abort('cannot use localheads with old style '
699 raise error.Abort('cannot use localheads with old style '
700 'discovery')
700 'discovery')
701 if not util.safehasattr(remote, 'branches'):
701 if not util.safehasattr(remote, 'branches'):
702 # enable in-client legacy support
702 # enable in-client legacy support
703 remote = localrepo.locallegacypeer(remote.local())
703 remote = localrepo.locallegacypeer(remote.local())
704 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
704 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
705 force=True)
705 force=True)
706 common = set(common)
706 common = set(common)
707 if not opts.get('nonheads'):
707 if not opts.get('nonheads'):
708 ui.write(("unpruned common: %s\n") %
708 ui.write(("unpruned common: %s\n") %
709 " ".join(sorted(short(n) for n in common)))
709 " ".join(sorted(short(n) for n in common)))
710 dag = dagutil.revlogdag(repo.changelog)
710 dag = dagutil.revlogdag(repo.changelog)
711 all = dag.ancestorset(dag.internalizeall(common))
711 all = dag.ancestorset(dag.internalizeall(common))
712 common = dag.externalizeall(dag.headsetofconnecteds(all))
712 common = dag.externalizeall(dag.headsetofconnecteds(all))
713 else:
713 else:
714 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
714 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
715 common = set(common)
715 common = set(common)
716 rheads = set(hds)
716 rheads = set(hds)
717 lheads = set(repo.heads())
717 lheads = set(repo.heads())
718 ui.write(("common heads: %s\n") %
718 ui.write(("common heads: %s\n") %
719 " ".join(sorted(short(n) for n in common)))
719 " ".join(sorted(short(n) for n in common)))
720 if lheads <= common:
720 if lheads <= common:
721 ui.write(("local is subset\n"))
721 ui.write(("local is subset\n"))
722 elif rheads <= common:
722 elif rheads <= common:
723 ui.write(("remote is subset\n"))
723 ui.write(("remote is subset\n"))
724
724
725 serverlogs = opts.get('serverlog')
725 serverlogs = opts.get('serverlog')
726 if serverlogs:
726 if serverlogs:
727 for filename in serverlogs:
727 for filename in serverlogs:
728 with open(filename, 'r') as logfile:
728 with open(filename, 'r') as logfile:
729 line = logfile.readline()
729 line = logfile.readline()
730 while line:
730 while line:
731 parts = line.strip().split(';')
731 parts = line.strip().split(';')
732 op = parts[1]
732 op = parts[1]
733 if op == 'cg':
733 if op == 'cg':
734 pass
734 pass
735 elif op == 'cgss':
735 elif op == 'cgss':
736 doit(parts[2].split(' '), parts[3].split(' '))
736 doit(parts[2].split(' '), parts[3].split(' '))
737 elif op == 'unb':
737 elif op == 'unb':
738 doit(parts[3].split(' '), parts[2].split(' '))
738 doit(parts[3].split(' '), parts[2].split(' '))
739 line = logfile.readline()
739 line = logfile.readline()
740 else:
740 else:
741 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
741 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
742 opts.get('remote_head'))
742 opts.get('remote_head'))
743 localrevs = opts.get('local_head')
743 localrevs = opts.get('local_head')
744 doit(localrevs, remoterevs)
744 doit(localrevs, remoterevs)
745
745
746 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
746 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
747 def debugextensions(ui, **opts):
747 def debugextensions(ui, **opts):
748 '''show information about active extensions'''
748 '''show information about active extensions'''
749 opts = pycompat.byteskwargs(opts)
749 opts = pycompat.byteskwargs(opts)
750 exts = extensions.extensions(ui)
750 exts = extensions.extensions(ui)
751 hgver = util.version()
751 hgver = util.version()
752 fm = ui.formatter('debugextensions', opts)
752 fm = ui.formatter('debugextensions', opts)
753 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
753 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
754 isinternal = extensions.ismoduleinternal(extmod)
754 isinternal = extensions.ismoduleinternal(extmod)
755 extsource = pycompat.fsencode(extmod.__file__)
755 extsource = pycompat.fsencode(extmod.__file__)
756 if isinternal:
756 if isinternal:
757 exttestedwith = [] # never expose magic string to users
757 exttestedwith = [] # never expose magic string to users
758 else:
758 else:
759 exttestedwith = getattr(extmod, 'testedwith', '').split()
759 exttestedwith = getattr(extmod, 'testedwith', '').split()
760 extbuglink = getattr(extmod, 'buglink', None)
760 extbuglink = getattr(extmod, 'buglink', None)
761
761
762 fm.startitem()
762 fm.startitem()
763
763
764 if ui.quiet or ui.verbose:
764 if ui.quiet or ui.verbose:
765 fm.write('name', '%s\n', extname)
765 fm.write('name', '%s\n', extname)
766 else:
766 else:
767 fm.write('name', '%s', extname)
767 fm.write('name', '%s', extname)
768 if isinternal or hgver in exttestedwith:
768 if isinternal or hgver in exttestedwith:
769 fm.plain('\n')
769 fm.plain('\n')
770 elif not exttestedwith:
770 elif not exttestedwith:
771 fm.plain(_(' (untested!)\n'))
771 fm.plain(_(' (untested!)\n'))
772 else:
772 else:
773 lasttestedversion = exttestedwith[-1]
773 lasttestedversion = exttestedwith[-1]
774 fm.plain(' (%s!)\n' % lasttestedversion)
774 fm.plain(' (%s!)\n' % lasttestedversion)
775
775
776 fm.condwrite(ui.verbose and extsource, 'source',
776 fm.condwrite(ui.verbose and extsource, 'source',
777 _(' location: %s\n'), extsource or "")
777 _(' location: %s\n'), extsource or "")
778
778
779 if ui.verbose:
779 if ui.verbose:
780 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
780 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
781 fm.data(bundled=isinternal)
781 fm.data(bundled=isinternal)
782
782
783 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
783 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
784 _(' tested with: %s\n'),
784 _(' tested with: %s\n'),
785 fm.formatlist(exttestedwith, name='ver'))
785 fm.formatlist(exttestedwith, name='ver'))
786
786
787 fm.condwrite(ui.verbose and extbuglink, 'buglink',
787 fm.condwrite(ui.verbose and extbuglink, 'buglink',
788 _(' bug reporting: %s\n'), extbuglink or "")
788 _(' bug reporting: %s\n'), extbuglink or "")
789
789
790 fm.end()
790 fm.end()
791
791
792 @command('debugfileset',
792 @command('debugfileset',
793 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
793 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
794 _('[-r REV] FILESPEC'))
794 _('[-r REV] FILESPEC'))
795 def debugfileset(ui, repo, expr, **opts):
795 def debugfileset(ui, repo, expr, **opts):
796 '''parse and apply a fileset specification'''
796 '''parse and apply a fileset specification'''
797 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
797 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
798 if ui.verbose:
798 if ui.verbose:
799 tree = fileset.parse(expr)
799 tree = fileset.parse(expr)
800 ui.note(fileset.prettyformat(tree), "\n")
800 ui.note(fileset.prettyformat(tree), "\n")
801
801
802 for f in ctx.getfileset(expr):
802 for f in ctx.getfileset(expr):
803 ui.write("%s\n" % f)
803 ui.write("%s\n" % f)
804
804
805 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
805 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
806 def debugfsinfo(ui, path="."):
806 def debugfsinfo(ui, path="."):
807 """show information detected about current filesystem"""
807 """show information detected about current filesystem"""
808 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
808 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
809 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
809 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
810 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
810 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
811 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
811 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
812 casesensitive = '(unknown)'
812 casesensitive = '(unknown)'
813 try:
813 try:
814 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
814 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
815 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
815 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
816 except OSError:
816 except OSError:
817 pass
817 pass
818 ui.write(('case-sensitive: %s\n') % casesensitive)
818 ui.write(('case-sensitive: %s\n') % casesensitive)
819
819
820 @command('debuggetbundle',
820 @command('debuggetbundle',
821 [('H', 'head', [], _('id of head node'), _('ID')),
821 [('H', 'head', [], _('id of head node'), _('ID')),
822 ('C', 'common', [], _('id of common node'), _('ID')),
822 ('C', 'common', [], _('id of common node'), _('ID')),
823 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
823 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
824 _('REPO FILE [-H|-C ID]...'),
824 _('REPO FILE [-H|-C ID]...'),
825 norepo=True)
825 norepo=True)
826 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
826 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
827 """retrieves a bundle from a repo
827 """retrieves a bundle from a repo
828
828
829 Every ID must be a full-length hex node id string. Saves the bundle to the
829 Every ID must be a full-length hex node id string. Saves the bundle to the
830 given file.
830 given file.
831 """
831 """
832 opts = pycompat.byteskwargs(opts)
832 opts = pycompat.byteskwargs(opts)
833 repo = hg.peer(ui, opts, repopath)
833 repo = hg.peer(ui, opts, repopath)
834 if not repo.capable('getbundle'):
834 if not repo.capable('getbundle'):
835 raise error.Abort("getbundle() not supported by target repository")
835 raise error.Abort("getbundle() not supported by target repository")
836 args = {}
836 args = {}
837 if common:
837 if common:
838 args[r'common'] = [bin(s) for s in common]
838 args[r'common'] = [bin(s) for s in common]
839 if head:
839 if head:
840 args[r'heads'] = [bin(s) for s in head]
840 args[r'heads'] = [bin(s) for s in head]
841 # TODO: get desired bundlecaps from command line.
841 # TODO: get desired bundlecaps from command line.
842 args[r'bundlecaps'] = None
842 args[r'bundlecaps'] = None
843 bundle = repo.getbundle('debug', **args)
843 bundle = repo.getbundle('debug', **args)
844
844
845 bundletype = opts.get('type', 'bzip2').lower()
845 bundletype = opts.get('type', 'bzip2').lower()
846 btypes = {'none': 'HG10UN',
846 btypes = {'none': 'HG10UN',
847 'bzip2': 'HG10BZ',
847 'bzip2': 'HG10BZ',
848 'gzip': 'HG10GZ',
848 'gzip': 'HG10GZ',
849 'bundle2': 'HG20'}
849 'bundle2': 'HG20'}
850 bundletype = btypes.get(bundletype)
850 bundletype = btypes.get(bundletype)
851 if bundletype not in bundle2.bundletypes:
851 if bundletype not in bundle2.bundletypes:
852 raise error.Abort(_('unknown bundle type specified with --type'))
852 raise error.Abort(_('unknown bundle type specified with --type'))
853 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
853 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
854
854
855 @command('debugignore', [], '[FILE]')
855 @command('debugignore', [], '[FILE]')
856 def debugignore(ui, repo, *files, **opts):
856 def debugignore(ui, repo, *files, **opts):
857 """display the combined ignore pattern and information about ignored files
857 """display the combined ignore pattern and information about ignored files
858
858
859 With no argument display the combined ignore pattern.
859 With no argument display the combined ignore pattern.
860
860
861 Given space separated file names, shows if the given file is ignored and
861 Given space separated file names, shows if the given file is ignored and
862 if so, show the ignore rule (file and line number) that matched it.
862 if so, show the ignore rule (file and line number) that matched it.
863 """
863 """
864 ignore = repo.dirstate._ignore
864 ignore = repo.dirstate._ignore
865 if not files:
865 if not files:
866 # Show all the patterns
866 # Show all the patterns
867 ui.write("%s\n" % repr(ignore))
867 ui.write("%s\n" % repr(ignore))
868 else:
868 else:
869 for f in files:
869 m = scmutil.match(repo[None], pats=files)
870 for f in m.files():
870 nf = util.normpath(f)
871 nf = util.normpath(f)
871 ignored = None
872 ignored = None
872 ignoredata = None
873 ignoredata = None
873 if nf != '.':
874 if nf != '.':
874 if ignore(nf):
875 if ignore(nf):
875 ignored = nf
876 ignored = nf
876 ignoredata = repo.dirstate._ignorefileandline(nf)
877 ignoredata = repo.dirstate._ignorefileandline(nf)
877 else:
878 else:
878 for p in util.finddirs(nf):
879 for p in util.finddirs(nf):
879 if ignore(p):
880 if ignore(p):
880 ignored = p
881 ignored = p
881 ignoredata = repo.dirstate._ignorefileandline(p)
882 ignoredata = repo.dirstate._ignorefileandline(p)
882 break
883 break
883 if ignored:
884 if ignored:
884 if ignored == nf:
885 if ignored == nf:
885 ui.write(_("%s is ignored\n") % f)
886 ui.write(_("%s is ignored\n") % m.uipath(f))
886 else:
887 else:
887 ui.write(_("%s is ignored because of "
888 ui.write(_("%s is ignored because of "
888 "containing folder %s\n")
889 "containing folder %s\n")
889 % (f, ignored))
890 % (m.uipath(f), ignored))
890 ignorefile, lineno, line = ignoredata
891 ignorefile, lineno, line = ignoredata
891 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
892 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
892 % (ignorefile, lineno, line))
893 % (ignorefile, lineno, line))
893 else:
894 else:
894 ui.write(_("%s is not ignored\n") % f)
895 ui.write(_("%s is not ignored\n") % m.uipath(f))
895
896
896 @command('debugindex', cmdutil.debugrevlogopts +
897 @command('debugindex', cmdutil.debugrevlogopts +
897 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
898 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
898 _('[-f FORMAT] -c|-m|FILE'),
899 _('[-f FORMAT] -c|-m|FILE'),
899 optionalrepo=True)
900 optionalrepo=True)
900 def debugindex(ui, repo, file_=None, **opts):
901 def debugindex(ui, repo, file_=None, **opts):
901 """dump the contents of an index file"""
902 """dump the contents of an index file"""
902 opts = pycompat.byteskwargs(opts)
903 opts = pycompat.byteskwargs(opts)
903 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
904 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
904 format = opts.get('format', 0)
905 format = opts.get('format', 0)
905 if format not in (0, 1):
906 if format not in (0, 1):
906 raise error.Abort(_("unknown format %d") % format)
907 raise error.Abort(_("unknown format %d") % format)
907
908
908 generaldelta = r.version & revlog.FLAG_GENERALDELTA
909 generaldelta = r.version & revlog.FLAG_GENERALDELTA
909 if generaldelta:
910 if generaldelta:
910 basehdr = ' delta'
911 basehdr = ' delta'
911 else:
912 else:
912 basehdr = ' base'
913 basehdr = ' base'
913
914
914 if ui.debugflag:
915 if ui.debugflag:
915 shortfn = hex
916 shortfn = hex
916 else:
917 else:
917 shortfn = short
918 shortfn = short
918
919
919 # There might not be anything in r, so have a sane default
920 # There might not be anything in r, so have a sane default
920 idlen = 12
921 idlen = 12
921 for i in r:
922 for i in r:
922 idlen = len(shortfn(r.node(i)))
923 idlen = len(shortfn(r.node(i)))
923 break
924 break
924
925
925 if format == 0:
926 if format == 0:
926 ui.write((" rev offset length " + basehdr + " linkrev"
927 ui.write((" rev offset length " + basehdr + " linkrev"
927 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
928 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
928 elif format == 1:
929 elif format == 1:
929 ui.write((" rev flag offset length"
930 ui.write((" rev flag offset length"
930 " size " + basehdr + " link p1 p2"
931 " size " + basehdr + " link p1 p2"
931 " %s\n") % "nodeid".rjust(idlen))
932 " %s\n") % "nodeid".rjust(idlen))
932
933
933 for i in r:
934 for i in r:
934 node = r.node(i)
935 node = r.node(i)
935 if generaldelta:
936 if generaldelta:
936 base = r.deltaparent(i)
937 base = r.deltaparent(i)
937 else:
938 else:
938 base = r.chainbase(i)
939 base = r.chainbase(i)
939 if format == 0:
940 if format == 0:
940 try:
941 try:
941 pp = r.parents(node)
942 pp = r.parents(node)
942 except Exception:
943 except Exception:
943 pp = [nullid, nullid]
944 pp = [nullid, nullid]
944 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
945 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
945 i, r.start(i), r.length(i), base, r.linkrev(i),
946 i, r.start(i), r.length(i), base, r.linkrev(i),
946 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
947 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
947 elif format == 1:
948 elif format == 1:
948 pr = r.parentrevs(i)
949 pr = r.parentrevs(i)
949 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
950 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
950 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
951 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
951 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
952 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
952
953
953 @command('debugindexdot', cmdutil.debugrevlogopts,
954 @command('debugindexdot', cmdutil.debugrevlogopts,
954 _('-c|-m|FILE'), optionalrepo=True)
955 _('-c|-m|FILE'), optionalrepo=True)
955 def debugindexdot(ui, repo, file_=None, **opts):
956 def debugindexdot(ui, repo, file_=None, **opts):
956 """dump an index DAG as a graphviz dot file"""
957 """dump an index DAG as a graphviz dot file"""
957 opts = pycompat.byteskwargs(opts)
958 opts = pycompat.byteskwargs(opts)
958 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
959 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
959 ui.write(("digraph G {\n"))
960 ui.write(("digraph G {\n"))
960 for i in r:
961 for i in r:
961 node = r.node(i)
962 node = r.node(i)
962 pp = r.parents(node)
963 pp = r.parents(node)
963 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
964 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
964 if pp[1] != nullid:
965 if pp[1] != nullid:
965 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
966 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
966 ui.write("}\n")
967 ui.write("}\n")
967
968
968 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
969 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
969 def debuginstall(ui, **opts):
970 def debuginstall(ui, **opts):
970 '''test Mercurial installation
971 '''test Mercurial installation
971
972
972 Returns 0 on success.
973 Returns 0 on success.
973 '''
974 '''
974 opts = pycompat.byteskwargs(opts)
975 opts = pycompat.byteskwargs(opts)
975
976
976 def writetemp(contents):
977 def writetemp(contents):
977 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
978 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
978 f = os.fdopen(fd, pycompat.sysstr("wb"))
979 f = os.fdopen(fd, pycompat.sysstr("wb"))
979 f.write(contents)
980 f.write(contents)
980 f.close()
981 f.close()
981 return name
982 return name
982
983
983 problems = 0
984 problems = 0
984
985
985 fm = ui.formatter('debuginstall', opts)
986 fm = ui.formatter('debuginstall', opts)
986 fm.startitem()
987 fm.startitem()
987
988
988 # encoding
989 # encoding
989 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
990 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
990 err = None
991 err = None
991 try:
992 try:
992 encoding.fromlocal("test")
993 encoding.fromlocal("test")
993 except error.Abort as inst:
994 except error.Abort as inst:
994 err = inst
995 err = inst
995 problems += 1
996 problems += 1
996 fm.condwrite(err, 'encodingerror', _(" %s\n"
997 fm.condwrite(err, 'encodingerror', _(" %s\n"
997 " (check that your locale is properly set)\n"), err)
998 " (check that your locale is properly set)\n"), err)
998
999
999 # Python
1000 # Python
1000 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1001 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1001 pycompat.sysexecutable)
1002 pycompat.sysexecutable)
1002 fm.write('pythonver', _("checking Python version (%s)\n"),
1003 fm.write('pythonver', _("checking Python version (%s)\n"),
1003 ("%d.%d.%d" % sys.version_info[:3]))
1004 ("%d.%d.%d" % sys.version_info[:3]))
1004 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1005 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1005 os.path.dirname(pycompat.fsencode(os.__file__)))
1006 os.path.dirname(pycompat.fsencode(os.__file__)))
1006
1007
1007 security = set(sslutil.supportedprotocols)
1008 security = set(sslutil.supportedprotocols)
1008 if sslutil.hassni:
1009 if sslutil.hassni:
1009 security.add('sni')
1010 security.add('sni')
1010
1011
1011 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1012 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1012 fm.formatlist(sorted(security), name='protocol',
1013 fm.formatlist(sorted(security), name='protocol',
1013 fmt='%s', sep=','))
1014 fmt='%s', sep=','))
1014
1015
1015 # These are warnings, not errors. So don't increment problem count. This
1016 # These are warnings, not errors. So don't increment problem count. This
1016 # may change in the future.
1017 # may change in the future.
1017 if 'tls1.2' not in security:
1018 if 'tls1.2' not in security:
1018 fm.plain(_(' TLS 1.2 not supported by Python install; '
1019 fm.plain(_(' TLS 1.2 not supported by Python install; '
1019 'network connections lack modern security\n'))
1020 'network connections lack modern security\n'))
1020 if 'sni' not in security:
1021 if 'sni' not in security:
1021 fm.plain(_(' SNI not supported by Python install; may have '
1022 fm.plain(_(' SNI not supported by Python install; may have '
1022 'connectivity issues with some servers\n'))
1023 'connectivity issues with some servers\n'))
1023
1024
1024 # TODO print CA cert info
1025 # TODO print CA cert info
1025
1026
1026 # hg version
1027 # hg version
1027 hgver = util.version()
1028 hgver = util.version()
1028 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1029 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1029 hgver.split('+')[0])
1030 hgver.split('+')[0])
1030 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1031 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1031 '+'.join(hgver.split('+')[1:]))
1032 '+'.join(hgver.split('+')[1:]))
1032
1033
1033 # compiled modules
1034 # compiled modules
1034 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1035 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1035 policy.policy)
1036 policy.policy)
1036 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1037 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1037 os.path.dirname(pycompat.fsencode(__file__)))
1038 os.path.dirname(pycompat.fsencode(__file__)))
1038
1039
1039 if policy.policy in ('c', 'allow'):
1040 if policy.policy in ('c', 'allow'):
1040 err = None
1041 err = None
1041 try:
1042 try:
1042 from .cext import (
1043 from .cext import (
1043 base85,
1044 base85,
1044 bdiff,
1045 bdiff,
1045 mpatch,
1046 mpatch,
1046 osutil,
1047 osutil,
1047 )
1048 )
1048 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1049 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1049 except Exception as inst:
1050 except Exception as inst:
1050 err = inst
1051 err = inst
1051 problems += 1
1052 problems += 1
1052 fm.condwrite(err, 'extensionserror', " %s\n", err)
1053 fm.condwrite(err, 'extensionserror', " %s\n", err)
1053
1054
1054 compengines = util.compengines._engines.values()
1055 compengines = util.compengines._engines.values()
1055 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1056 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1056 fm.formatlist(sorted(e.name() for e in compengines),
1057 fm.formatlist(sorted(e.name() for e in compengines),
1057 name='compengine', fmt='%s', sep=', '))
1058 name='compengine', fmt='%s', sep=', '))
1058 fm.write('compenginesavail', _('checking available compression engines '
1059 fm.write('compenginesavail', _('checking available compression engines '
1059 '(%s)\n'),
1060 '(%s)\n'),
1060 fm.formatlist(sorted(e.name() for e in compengines
1061 fm.formatlist(sorted(e.name() for e in compengines
1061 if e.available()),
1062 if e.available()),
1062 name='compengine', fmt='%s', sep=', '))
1063 name='compengine', fmt='%s', sep=', '))
1063 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1064 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1064 fm.write('compenginesserver', _('checking available compression engines '
1065 fm.write('compenginesserver', _('checking available compression engines '
1065 'for wire protocol (%s)\n'),
1066 'for wire protocol (%s)\n'),
1066 fm.formatlist([e.name() for e in wirecompengines
1067 fm.formatlist([e.name() for e in wirecompengines
1067 if e.wireprotosupport()],
1068 if e.wireprotosupport()],
1068 name='compengine', fmt='%s', sep=', '))
1069 name='compengine', fmt='%s', sep=', '))
1069
1070
1070 # templates
1071 # templates
1071 p = templater.templatepaths()
1072 p = templater.templatepaths()
1072 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1073 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1073 fm.condwrite(not p, '', _(" no template directories found\n"))
1074 fm.condwrite(not p, '', _(" no template directories found\n"))
1074 if p:
1075 if p:
1075 m = templater.templatepath("map-cmdline.default")
1076 m = templater.templatepath("map-cmdline.default")
1076 if m:
1077 if m:
1077 # template found, check if it is working
1078 # template found, check if it is working
1078 err = None
1079 err = None
1079 try:
1080 try:
1080 templater.templater.frommapfile(m)
1081 templater.templater.frommapfile(m)
1081 except Exception as inst:
1082 except Exception as inst:
1082 err = inst
1083 err = inst
1083 p = None
1084 p = None
1084 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1085 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1085 else:
1086 else:
1086 p = None
1087 p = None
1087 fm.condwrite(p, 'defaulttemplate',
1088 fm.condwrite(p, 'defaulttemplate',
1088 _("checking default template (%s)\n"), m)
1089 _("checking default template (%s)\n"), m)
1089 fm.condwrite(not m, 'defaulttemplatenotfound',
1090 fm.condwrite(not m, 'defaulttemplatenotfound',
1090 _(" template '%s' not found\n"), "default")
1091 _(" template '%s' not found\n"), "default")
1091 if not p:
1092 if not p:
1092 problems += 1
1093 problems += 1
1093 fm.condwrite(not p, '',
1094 fm.condwrite(not p, '',
1094 _(" (templates seem to have been installed incorrectly)\n"))
1095 _(" (templates seem to have been installed incorrectly)\n"))
1095
1096
1096 # editor
1097 # editor
1097 editor = ui.geteditor()
1098 editor = ui.geteditor()
1098 editor = util.expandpath(editor)
1099 editor = util.expandpath(editor)
1099 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1100 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1100 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1101 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1101 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1102 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1102 _(" No commit editor set and can't find %s in PATH\n"
1103 _(" No commit editor set and can't find %s in PATH\n"
1103 " (specify a commit editor in your configuration"
1104 " (specify a commit editor in your configuration"
1104 " file)\n"), not cmdpath and editor == 'vi' and editor)
1105 " file)\n"), not cmdpath and editor == 'vi' and editor)
1105 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1106 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1106 _(" Can't find editor '%s' in PATH\n"
1107 _(" Can't find editor '%s' in PATH\n"
1107 " (specify a commit editor in your configuration"
1108 " (specify a commit editor in your configuration"
1108 " file)\n"), not cmdpath and editor)
1109 " file)\n"), not cmdpath and editor)
1109 if not cmdpath and editor != 'vi':
1110 if not cmdpath and editor != 'vi':
1110 problems += 1
1111 problems += 1
1111
1112
1112 # check username
1113 # check username
1113 username = None
1114 username = None
1114 err = None
1115 err = None
1115 try:
1116 try:
1116 username = ui.username()
1117 username = ui.username()
1117 except error.Abort as e:
1118 except error.Abort as e:
1118 err = e
1119 err = e
1119 problems += 1
1120 problems += 1
1120
1121
1121 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1122 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1122 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1123 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1123 " (specify a username in your configuration file)\n"), err)
1124 " (specify a username in your configuration file)\n"), err)
1124
1125
1125 fm.condwrite(not problems, '',
1126 fm.condwrite(not problems, '',
1126 _("no problems detected\n"))
1127 _("no problems detected\n"))
1127 if not problems:
1128 if not problems:
1128 fm.data(problems=problems)
1129 fm.data(problems=problems)
1129 fm.condwrite(problems, 'problems',
1130 fm.condwrite(problems, 'problems',
1130 _("%d problems detected,"
1131 _("%d problems detected,"
1131 " please check your install!\n"), problems)
1132 " please check your install!\n"), problems)
1132 fm.end()
1133 fm.end()
1133
1134
1134 return problems
1135 return problems
1135
1136
1136 @command('debugknown', [], _('REPO ID...'), norepo=True)
1137 @command('debugknown', [], _('REPO ID...'), norepo=True)
1137 def debugknown(ui, repopath, *ids, **opts):
1138 def debugknown(ui, repopath, *ids, **opts):
1138 """test whether node ids are known to a repo
1139 """test whether node ids are known to a repo
1139
1140
1140 Every ID must be a full-length hex node id string. Returns a list of 0s
1141 Every ID must be a full-length hex node id string. Returns a list of 0s
1141 and 1s indicating unknown/known.
1142 and 1s indicating unknown/known.
1142 """
1143 """
1143 opts = pycompat.byteskwargs(opts)
1144 opts = pycompat.byteskwargs(opts)
1144 repo = hg.peer(ui, opts, repopath)
1145 repo = hg.peer(ui, opts, repopath)
1145 if not repo.capable('known'):
1146 if not repo.capable('known'):
1146 raise error.Abort("known() not supported by target repository")
1147 raise error.Abort("known() not supported by target repository")
1147 flags = repo.known([bin(s) for s in ids])
1148 flags = repo.known([bin(s) for s in ids])
1148 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1149 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1149
1150
1150 @command('debuglabelcomplete', [], _('LABEL...'))
1151 @command('debuglabelcomplete', [], _('LABEL...'))
1151 def debuglabelcomplete(ui, repo, *args):
1152 def debuglabelcomplete(ui, repo, *args):
1152 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1153 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1153 debugnamecomplete(ui, repo, *args)
1154 debugnamecomplete(ui, repo, *args)
1154
1155
1155 @command('debuglocks',
1156 @command('debuglocks',
1156 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1157 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1157 ('W', 'force-wlock', None,
1158 ('W', 'force-wlock', None,
1158 _('free the working state lock (DANGEROUS)'))],
1159 _('free the working state lock (DANGEROUS)'))],
1159 _('[OPTION]...'))
1160 _('[OPTION]...'))
1160 def debuglocks(ui, repo, **opts):
1161 def debuglocks(ui, repo, **opts):
1161 """show or modify state of locks
1162 """show or modify state of locks
1162
1163
1163 By default, this command will show which locks are held. This
1164 By default, this command will show which locks are held. This
1164 includes the user and process holding the lock, the amount of time
1165 includes the user and process holding the lock, the amount of time
1165 the lock has been held, and the machine name where the process is
1166 the lock has been held, and the machine name where the process is
1166 running if it's not local.
1167 running if it's not local.
1167
1168
1168 Locks protect the integrity of Mercurial's data, so should be
1169 Locks protect the integrity of Mercurial's data, so should be
1169 treated with care. System crashes or other interruptions may cause
1170 treated with care. System crashes or other interruptions may cause
1170 locks to not be properly released, though Mercurial will usually
1171 locks to not be properly released, though Mercurial will usually
1171 detect and remove such stale locks automatically.
1172 detect and remove such stale locks automatically.
1172
1173
1173 However, detecting stale locks may not always be possible (for
1174 However, detecting stale locks may not always be possible (for
1174 instance, on a shared filesystem). Removing locks may also be
1175 instance, on a shared filesystem). Removing locks may also be
1175 blocked by filesystem permissions.
1176 blocked by filesystem permissions.
1176
1177
1177 Returns 0 if no locks are held.
1178 Returns 0 if no locks are held.
1178
1179
1179 """
1180 """
1180
1181
1181 if opts.get(r'force_lock'):
1182 if opts.get(r'force_lock'):
1182 repo.svfs.unlink('lock')
1183 repo.svfs.unlink('lock')
1183 if opts.get(r'force_wlock'):
1184 if opts.get(r'force_wlock'):
1184 repo.vfs.unlink('wlock')
1185 repo.vfs.unlink('wlock')
1185 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1186 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1186 return 0
1187 return 0
1187
1188
1188 now = time.time()
1189 now = time.time()
1189 held = 0
1190 held = 0
1190
1191
1191 def report(vfs, name, method):
1192 def report(vfs, name, method):
1192 # this causes stale locks to get reaped for more accurate reporting
1193 # this causes stale locks to get reaped for more accurate reporting
1193 try:
1194 try:
1194 l = method(False)
1195 l = method(False)
1195 except error.LockHeld:
1196 except error.LockHeld:
1196 l = None
1197 l = None
1197
1198
1198 if l:
1199 if l:
1199 l.release()
1200 l.release()
1200 else:
1201 else:
1201 try:
1202 try:
1202 stat = vfs.lstat(name)
1203 stat = vfs.lstat(name)
1203 age = now - stat.st_mtime
1204 age = now - stat.st_mtime
1204 user = util.username(stat.st_uid)
1205 user = util.username(stat.st_uid)
1205 locker = vfs.readlock(name)
1206 locker = vfs.readlock(name)
1206 if ":" in locker:
1207 if ":" in locker:
1207 host, pid = locker.split(':')
1208 host, pid = locker.split(':')
1208 if host == socket.gethostname():
1209 if host == socket.gethostname():
1209 locker = 'user %s, process %s' % (user, pid)
1210 locker = 'user %s, process %s' % (user, pid)
1210 else:
1211 else:
1211 locker = 'user %s, process %s, host %s' \
1212 locker = 'user %s, process %s, host %s' \
1212 % (user, pid, host)
1213 % (user, pid, host)
1213 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1214 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1214 return 1
1215 return 1
1215 except OSError as e:
1216 except OSError as e:
1216 if e.errno != errno.ENOENT:
1217 if e.errno != errno.ENOENT:
1217 raise
1218 raise
1218
1219
1219 ui.write(("%-6s free\n") % (name + ":"))
1220 ui.write(("%-6s free\n") % (name + ":"))
1220 return 0
1221 return 0
1221
1222
1222 held += report(repo.svfs, "lock", repo.lock)
1223 held += report(repo.svfs, "lock", repo.lock)
1223 held += report(repo.vfs, "wlock", repo.wlock)
1224 held += report(repo.vfs, "wlock", repo.wlock)
1224
1225
1225 return held
1226 return held
1226
1227
1227 @command('debugmergestate', [], '')
1228 @command('debugmergestate', [], '')
1228 def debugmergestate(ui, repo, *args):
1229 def debugmergestate(ui, repo, *args):
1229 """print merge state
1230 """print merge state
1230
1231
1231 Use --verbose to print out information about whether v1 or v2 merge state
1232 Use --verbose to print out information about whether v1 or v2 merge state
1232 was chosen."""
1233 was chosen."""
1233 def _hashornull(h):
1234 def _hashornull(h):
1234 if h == nullhex:
1235 if h == nullhex:
1235 return 'null'
1236 return 'null'
1236 else:
1237 else:
1237 return h
1238 return h
1238
1239
1239 def printrecords(version):
1240 def printrecords(version):
1240 ui.write(('* version %s records\n') % version)
1241 ui.write(('* version %s records\n') % version)
1241 if version == 1:
1242 if version == 1:
1242 records = v1records
1243 records = v1records
1243 else:
1244 else:
1244 records = v2records
1245 records = v2records
1245
1246
1246 for rtype, record in records:
1247 for rtype, record in records:
1247 # pretty print some record types
1248 # pretty print some record types
1248 if rtype == 'L':
1249 if rtype == 'L':
1249 ui.write(('local: %s\n') % record)
1250 ui.write(('local: %s\n') % record)
1250 elif rtype == 'O':
1251 elif rtype == 'O':
1251 ui.write(('other: %s\n') % record)
1252 ui.write(('other: %s\n') % record)
1252 elif rtype == 'm':
1253 elif rtype == 'm':
1253 driver, mdstate = record.split('\0', 1)
1254 driver, mdstate = record.split('\0', 1)
1254 ui.write(('merge driver: %s (state "%s")\n')
1255 ui.write(('merge driver: %s (state "%s")\n')
1255 % (driver, mdstate))
1256 % (driver, mdstate))
1256 elif rtype in 'FDC':
1257 elif rtype in 'FDC':
1257 r = record.split('\0')
1258 r = record.split('\0')
1258 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1259 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1259 if version == 1:
1260 if version == 1:
1260 onode = 'not stored in v1 format'
1261 onode = 'not stored in v1 format'
1261 flags = r[7]
1262 flags = r[7]
1262 else:
1263 else:
1263 onode, flags = r[7:9]
1264 onode, flags = r[7:9]
1264 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1265 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1265 % (f, rtype, state, _hashornull(hash)))
1266 % (f, rtype, state, _hashornull(hash)))
1266 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1267 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1267 ui.write((' ancestor path: %s (node %s)\n')
1268 ui.write((' ancestor path: %s (node %s)\n')
1268 % (afile, _hashornull(anode)))
1269 % (afile, _hashornull(anode)))
1269 ui.write((' other path: %s (node %s)\n')
1270 ui.write((' other path: %s (node %s)\n')
1270 % (ofile, _hashornull(onode)))
1271 % (ofile, _hashornull(onode)))
1271 elif rtype == 'f':
1272 elif rtype == 'f':
1272 filename, rawextras = record.split('\0', 1)
1273 filename, rawextras = record.split('\0', 1)
1273 extras = rawextras.split('\0')
1274 extras = rawextras.split('\0')
1274 i = 0
1275 i = 0
1275 extrastrings = []
1276 extrastrings = []
1276 while i < len(extras):
1277 while i < len(extras):
1277 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1278 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1278 i += 2
1279 i += 2
1279
1280
1280 ui.write(('file extras: %s (%s)\n')
1281 ui.write(('file extras: %s (%s)\n')
1281 % (filename, ', '.join(extrastrings)))
1282 % (filename, ', '.join(extrastrings)))
1282 elif rtype == 'l':
1283 elif rtype == 'l':
1283 labels = record.split('\0', 2)
1284 labels = record.split('\0', 2)
1284 labels = [l for l in labels if len(l) > 0]
1285 labels = [l for l in labels if len(l) > 0]
1285 ui.write(('labels:\n'))
1286 ui.write(('labels:\n'))
1286 ui.write((' local: %s\n' % labels[0]))
1287 ui.write((' local: %s\n' % labels[0]))
1287 ui.write((' other: %s\n' % labels[1]))
1288 ui.write((' other: %s\n' % labels[1]))
1288 if len(labels) > 2:
1289 if len(labels) > 2:
1289 ui.write((' base: %s\n' % labels[2]))
1290 ui.write((' base: %s\n' % labels[2]))
1290 else:
1291 else:
1291 ui.write(('unrecognized entry: %s\t%s\n')
1292 ui.write(('unrecognized entry: %s\t%s\n')
1292 % (rtype, record.replace('\0', '\t')))
1293 % (rtype, record.replace('\0', '\t')))
1293
1294
1294 # Avoid mergestate.read() since it may raise an exception for unsupported
1295 # Avoid mergestate.read() since it may raise an exception for unsupported
1295 # merge state records. We shouldn't be doing this, but this is OK since this
1296 # merge state records. We shouldn't be doing this, but this is OK since this
1296 # command is pretty low-level.
1297 # command is pretty low-level.
1297 ms = mergemod.mergestate(repo)
1298 ms = mergemod.mergestate(repo)
1298
1299
1299 # sort so that reasonable information is on top
1300 # sort so that reasonable information is on top
1300 v1records = ms._readrecordsv1()
1301 v1records = ms._readrecordsv1()
1301 v2records = ms._readrecordsv2()
1302 v2records = ms._readrecordsv2()
1302 order = 'LOml'
1303 order = 'LOml'
1303 def key(r):
1304 def key(r):
1304 idx = order.find(r[0])
1305 idx = order.find(r[0])
1305 if idx == -1:
1306 if idx == -1:
1306 return (1, r[1])
1307 return (1, r[1])
1307 else:
1308 else:
1308 return (0, idx)
1309 return (0, idx)
1309 v1records.sort(key=key)
1310 v1records.sort(key=key)
1310 v2records.sort(key=key)
1311 v2records.sort(key=key)
1311
1312
1312 if not v1records and not v2records:
1313 if not v1records and not v2records:
1313 ui.write(('no merge state found\n'))
1314 ui.write(('no merge state found\n'))
1314 elif not v2records:
1315 elif not v2records:
1315 ui.note(('no version 2 merge state\n'))
1316 ui.note(('no version 2 merge state\n'))
1316 printrecords(1)
1317 printrecords(1)
1317 elif ms._v1v2match(v1records, v2records):
1318 elif ms._v1v2match(v1records, v2records):
1318 ui.note(('v1 and v2 states match: using v2\n'))
1319 ui.note(('v1 and v2 states match: using v2\n'))
1319 printrecords(2)
1320 printrecords(2)
1320 else:
1321 else:
1321 ui.note(('v1 and v2 states mismatch: using v1\n'))
1322 ui.note(('v1 and v2 states mismatch: using v1\n'))
1322 printrecords(1)
1323 printrecords(1)
1323 if ui.verbose:
1324 if ui.verbose:
1324 printrecords(2)
1325 printrecords(2)
1325
1326
1326 @command('debugnamecomplete', [], _('NAME...'))
1327 @command('debugnamecomplete', [], _('NAME...'))
1327 def debugnamecomplete(ui, repo, *args):
1328 def debugnamecomplete(ui, repo, *args):
1328 '''complete "names" - tags, open branch names, bookmark names'''
1329 '''complete "names" - tags, open branch names, bookmark names'''
1329
1330
1330 names = set()
1331 names = set()
1331 # since we previously only listed open branches, we will handle that
1332 # since we previously only listed open branches, we will handle that
1332 # specially (after this for loop)
1333 # specially (after this for loop)
1333 for name, ns in repo.names.iteritems():
1334 for name, ns in repo.names.iteritems():
1334 if name != 'branches':
1335 if name != 'branches':
1335 names.update(ns.listnames(repo))
1336 names.update(ns.listnames(repo))
1336 names.update(tag for (tag, heads, tip, closed)
1337 names.update(tag for (tag, heads, tip, closed)
1337 in repo.branchmap().iterbranches() if not closed)
1338 in repo.branchmap().iterbranches() if not closed)
1338 completions = set()
1339 completions = set()
1339 if not args:
1340 if not args:
1340 args = ['']
1341 args = ['']
1341 for a in args:
1342 for a in args:
1342 completions.update(n for n in names if n.startswith(a))
1343 completions.update(n for n in names if n.startswith(a))
1343 ui.write('\n'.join(sorted(completions)))
1344 ui.write('\n'.join(sorted(completions)))
1344 ui.write('\n')
1345 ui.write('\n')
1345
1346
1346 @command('debugobsolete',
1347 @command('debugobsolete',
1347 [('', 'flags', 0, _('markers flag')),
1348 [('', 'flags', 0, _('markers flag')),
1348 ('', 'record-parents', False,
1349 ('', 'record-parents', False,
1349 _('record parent information for the precursor')),
1350 _('record parent information for the precursor')),
1350 ('r', 'rev', [], _('display markers relevant to REV')),
1351 ('r', 'rev', [], _('display markers relevant to REV')),
1351 ('', 'exclusive', False, _('restrict display to markers only '
1352 ('', 'exclusive', False, _('restrict display to markers only '
1352 'relevant to REV')),
1353 'relevant to REV')),
1353 ('', 'index', False, _('display index of the marker')),
1354 ('', 'index', False, _('display index of the marker')),
1354 ('', 'delete', [], _('delete markers specified by indices')),
1355 ('', 'delete', [], _('delete markers specified by indices')),
1355 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1356 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1356 _('[OBSOLETED [REPLACEMENT ...]]'))
1357 _('[OBSOLETED [REPLACEMENT ...]]'))
1357 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1358 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1358 """create arbitrary obsolete marker
1359 """create arbitrary obsolete marker
1359
1360
1360 With no arguments, displays the list of obsolescence markers."""
1361 With no arguments, displays the list of obsolescence markers."""
1361
1362
1362 opts = pycompat.byteskwargs(opts)
1363 opts = pycompat.byteskwargs(opts)
1363
1364
1364 def parsenodeid(s):
1365 def parsenodeid(s):
1365 try:
1366 try:
1366 # We do not use revsingle/revrange functions here to accept
1367 # We do not use revsingle/revrange functions here to accept
1367 # arbitrary node identifiers, possibly not present in the
1368 # arbitrary node identifiers, possibly not present in the
1368 # local repository.
1369 # local repository.
1369 n = bin(s)
1370 n = bin(s)
1370 if len(n) != len(nullid):
1371 if len(n) != len(nullid):
1371 raise TypeError()
1372 raise TypeError()
1372 return n
1373 return n
1373 except TypeError:
1374 except TypeError:
1374 raise error.Abort('changeset references must be full hexadecimal '
1375 raise error.Abort('changeset references must be full hexadecimal '
1375 'node identifiers')
1376 'node identifiers')
1376
1377
1377 if opts.get('delete'):
1378 if opts.get('delete'):
1378 indices = []
1379 indices = []
1379 for v in opts.get('delete'):
1380 for v in opts.get('delete'):
1380 try:
1381 try:
1381 indices.append(int(v))
1382 indices.append(int(v))
1382 except ValueError:
1383 except ValueError:
1383 raise error.Abort(_('invalid index value: %r') % v,
1384 raise error.Abort(_('invalid index value: %r') % v,
1384 hint=_('use integers for indices'))
1385 hint=_('use integers for indices'))
1385
1386
1386 if repo.currenttransaction():
1387 if repo.currenttransaction():
1387 raise error.Abort(_('cannot delete obsmarkers in the middle '
1388 raise error.Abort(_('cannot delete obsmarkers in the middle '
1388 'of transaction.'))
1389 'of transaction.'))
1389
1390
1390 with repo.lock():
1391 with repo.lock():
1391 n = repair.deleteobsmarkers(repo.obsstore, indices)
1392 n = repair.deleteobsmarkers(repo.obsstore, indices)
1392 ui.write(_('deleted %i obsolescence markers\n') % n)
1393 ui.write(_('deleted %i obsolescence markers\n') % n)
1393
1394
1394 return
1395 return
1395
1396
1396 if precursor is not None:
1397 if precursor is not None:
1397 if opts['rev']:
1398 if opts['rev']:
1398 raise error.Abort('cannot select revision when creating marker')
1399 raise error.Abort('cannot select revision when creating marker')
1399 metadata = {}
1400 metadata = {}
1400 metadata['user'] = opts['user'] or ui.username()
1401 metadata['user'] = opts['user'] or ui.username()
1401 succs = tuple(parsenodeid(succ) for succ in successors)
1402 succs = tuple(parsenodeid(succ) for succ in successors)
1402 l = repo.lock()
1403 l = repo.lock()
1403 try:
1404 try:
1404 tr = repo.transaction('debugobsolete')
1405 tr = repo.transaction('debugobsolete')
1405 try:
1406 try:
1406 date = opts.get('date')
1407 date = opts.get('date')
1407 if date:
1408 if date:
1408 date = util.parsedate(date)
1409 date = util.parsedate(date)
1409 else:
1410 else:
1410 date = None
1411 date = None
1411 prec = parsenodeid(precursor)
1412 prec = parsenodeid(precursor)
1412 parents = None
1413 parents = None
1413 if opts['record_parents']:
1414 if opts['record_parents']:
1414 if prec not in repo.unfiltered():
1415 if prec not in repo.unfiltered():
1415 raise error.Abort('cannot used --record-parents on '
1416 raise error.Abort('cannot used --record-parents on '
1416 'unknown changesets')
1417 'unknown changesets')
1417 parents = repo.unfiltered()[prec].parents()
1418 parents = repo.unfiltered()[prec].parents()
1418 parents = tuple(p.node() for p in parents)
1419 parents = tuple(p.node() for p in parents)
1419 repo.obsstore.create(tr, prec, succs, opts['flags'],
1420 repo.obsstore.create(tr, prec, succs, opts['flags'],
1420 parents=parents, date=date,
1421 parents=parents, date=date,
1421 metadata=metadata, ui=ui)
1422 metadata=metadata, ui=ui)
1422 tr.close()
1423 tr.close()
1423 except ValueError as exc:
1424 except ValueError as exc:
1424 raise error.Abort(_('bad obsmarker input: %s') % exc)
1425 raise error.Abort(_('bad obsmarker input: %s') % exc)
1425 finally:
1426 finally:
1426 tr.release()
1427 tr.release()
1427 finally:
1428 finally:
1428 l.release()
1429 l.release()
1429 else:
1430 else:
1430 if opts['rev']:
1431 if opts['rev']:
1431 revs = scmutil.revrange(repo, opts['rev'])
1432 revs = scmutil.revrange(repo, opts['rev'])
1432 nodes = [repo[r].node() for r in revs]
1433 nodes = [repo[r].node() for r in revs]
1433 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1434 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1434 exclusive=opts['exclusive']))
1435 exclusive=opts['exclusive']))
1435 markers.sort(key=lambda x: x._data)
1436 markers.sort(key=lambda x: x._data)
1436 else:
1437 else:
1437 markers = obsutil.getmarkers(repo)
1438 markers = obsutil.getmarkers(repo)
1438
1439
1439 markerstoiter = markers
1440 markerstoiter = markers
1440 isrelevant = lambda m: True
1441 isrelevant = lambda m: True
1441 if opts.get('rev') and opts.get('index'):
1442 if opts.get('rev') and opts.get('index'):
1442 markerstoiter = obsutil.getmarkers(repo)
1443 markerstoiter = obsutil.getmarkers(repo)
1443 markerset = set(markers)
1444 markerset = set(markers)
1444 isrelevant = lambda m: m in markerset
1445 isrelevant = lambda m: m in markerset
1445
1446
1446 fm = ui.formatter('debugobsolete', opts)
1447 fm = ui.formatter('debugobsolete', opts)
1447 for i, m in enumerate(markerstoiter):
1448 for i, m in enumerate(markerstoiter):
1448 if not isrelevant(m):
1449 if not isrelevant(m):
1449 # marker can be irrelevant when we're iterating over a set
1450 # marker can be irrelevant when we're iterating over a set
1450 # of markers (markerstoiter) which is bigger than the set
1451 # of markers (markerstoiter) which is bigger than the set
1451 # of markers we want to display (markers)
1452 # of markers we want to display (markers)
1452 # this can happen if both --index and --rev options are
1453 # this can happen if both --index and --rev options are
1453 # provided and thus we need to iterate over all of the markers
1454 # provided and thus we need to iterate over all of the markers
1454 # to get the correct indices, but only display the ones that
1455 # to get the correct indices, but only display the ones that
1455 # are relevant to --rev value
1456 # are relevant to --rev value
1456 continue
1457 continue
1457 fm.startitem()
1458 fm.startitem()
1458 ind = i if opts.get('index') else None
1459 ind = i if opts.get('index') else None
1459 cmdutil.showmarker(fm, m, index=ind)
1460 cmdutil.showmarker(fm, m, index=ind)
1460 fm.end()
1461 fm.end()
1461
1462
1462 @command('debugpathcomplete',
1463 @command('debugpathcomplete',
1463 [('f', 'full', None, _('complete an entire path')),
1464 [('f', 'full', None, _('complete an entire path')),
1464 ('n', 'normal', None, _('show only normal files')),
1465 ('n', 'normal', None, _('show only normal files')),
1465 ('a', 'added', None, _('show only added files')),
1466 ('a', 'added', None, _('show only added files')),
1466 ('r', 'removed', None, _('show only removed files'))],
1467 ('r', 'removed', None, _('show only removed files'))],
1467 _('FILESPEC...'))
1468 _('FILESPEC...'))
1468 def debugpathcomplete(ui, repo, *specs, **opts):
1469 def debugpathcomplete(ui, repo, *specs, **opts):
1469 '''complete part or all of a tracked path
1470 '''complete part or all of a tracked path
1470
1471
1471 This command supports shells that offer path name completion. It
1472 This command supports shells that offer path name completion. It
1472 currently completes only files already known to the dirstate.
1473 currently completes only files already known to the dirstate.
1473
1474
1474 Completion extends only to the next path segment unless
1475 Completion extends only to the next path segment unless
1475 --full is specified, in which case entire paths are used.'''
1476 --full is specified, in which case entire paths are used.'''
1476
1477
1477 def complete(path, acceptable):
1478 def complete(path, acceptable):
1478 dirstate = repo.dirstate
1479 dirstate = repo.dirstate
1479 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1480 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1480 rootdir = repo.root + pycompat.ossep
1481 rootdir = repo.root + pycompat.ossep
1481 if spec != repo.root and not spec.startswith(rootdir):
1482 if spec != repo.root and not spec.startswith(rootdir):
1482 return [], []
1483 return [], []
1483 if os.path.isdir(spec):
1484 if os.path.isdir(spec):
1484 spec += '/'
1485 spec += '/'
1485 spec = spec[len(rootdir):]
1486 spec = spec[len(rootdir):]
1486 fixpaths = pycompat.ossep != '/'
1487 fixpaths = pycompat.ossep != '/'
1487 if fixpaths:
1488 if fixpaths:
1488 spec = spec.replace(pycompat.ossep, '/')
1489 spec = spec.replace(pycompat.ossep, '/')
1489 speclen = len(spec)
1490 speclen = len(spec)
1490 fullpaths = opts[r'full']
1491 fullpaths = opts[r'full']
1491 files, dirs = set(), set()
1492 files, dirs = set(), set()
1492 adddir, addfile = dirs.add, files.add
1493 adddir, addfile = dirs.add, files.add
1493 for f, st in dirstate.iteritems():
1494 for f, st in dirstate.iteritems():
1494 if f.startswith(spec) and st[0] in acceptable:
1495 if f.startswith(spec) and st[0] in acceptable:
1495 if fixpaths:
1496 if fixpaths:
1496 f = f.replace('/', pycompat.ossep)
1497 f = f.replace('/', pycompat.ossep)
1497 if fullpaths:
1498 if fullpaths:
1498 addfile(f)
1499 addfile(f)
1499 continue
1500 continue
1500 s = f.find(pycompat.ossep, speclen)
1501 s = f.find(pycompat.ossep, speclen)
1501 if s >= 0:
1502 if s >= 0:
1502 adddir(f[:s])
1503 adddir(f[:s])
1503 else:
1504 else:
1504 addfile(f)
1505 addfile(f)
1505 return files, dirs
1506 return files, dirs
1506
1507
1507 acceptable = ''
1508 acceptable = ''
1508 if opts[r'normal']:
1509 if opts[r'normal']:
1509 acceptable += 'nm'
1510 acceptable += 'nm'
1510 if opts[r'added']:
1511 if opts[r'added']:
1511 acceptable += 'a'
1512 acceptable += 'a'
1512 if opts[r'removed']:
1513 if opts[r'removed']:
1513 acceptable += 'r'
1514 acceptable += 'r'
1514 cwd = repo.getcwd()
1515 cwd = repo.getcwd()
1515 if not specs:
1516 if not specs:
1516 specs = ['.']
1517 specs = ['.']
1517
1518
1518 files, dirs = set(), set()
1519 files, dirs = set(), set()
1519 for spec in specs:
1520 for spec in specs:
1520 f, d = complete(spec, acceptable or 'nmar')
1521 f, d = complete(spec, acceptable or 'nmar')
1521 files.update(f)
1522 files.update(f)
1522 dirs.update(d)
1523 dirs.update(d)
1523 files.update(dirs)
1524 files.update(dirs)
1524 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1525 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1525 ui.write('\n')
1526 ui.write('\n')
1526
1527
1527 @command('debugpickmergetool',
1528 @command('debugpickmergetool',
1528 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1529 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1529 ('', 'changedelete', None, _('emulate merging change and delete')),
1530 ('', 'changedelete', None, _('emulate merging change and delete')),
1530 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1531 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1531 _('[PATTERN]...'),
1532 _('[PATTERN]...'),
1532 inferrepo=True)
1533 inferrepo=True)
1533 def debugpickmergetool(ui, repo, *pats, **opts):
1534 def debugpickmergetool(ui, repo, *pats, **opts):
1534 """examine which merge tool is chosen for specified file
1535 """examine which merge tool is chosen for specified file
1535
1536
1536 As described in :hg:`help merge-tools`, Mercurial examines
1537 As described in :hg:`help merge-tools`, Mercurial examines
1537 configurations below in this order to decide which merge tool is
1538 configurations below in this order to decide which merge tool is
1538 chosen for specified file.
1539 chosen for specified file.
1539
1540
1540 1. ``--tool`` option
1541 1. ``--tool`` option
1541 2. ``HGMERGE`` environment variable
1542 2. ``HGMERGE`` environment variable
1542 3. configurations in ``merge-patterns`` section
1543 3. configurations in ``merge-patterns`` section
1543 4. configuration of ``ui.merge``
1544 4. configuration of ``ui.merge``
1544 5. configurations in ``merge-tools`` section
1545 5. configurations in ``merge-tools`` section
1545 6. ``hgmerge`` tool (for historical reason only)
1546 6. ``hgmerge`` tool (for historical reason only)
1546 7. default tool for fallback (``:merge`` or ``:prompt``)
1547 7. default tool for fallback (``:merge`` or ``:prompt``)
1547
1548
1548 This command writes out examination result in the style below::
1549 This command writes out examination result in the style below::
1549
1550
1550 FILE = MERGETOOL
1551 FILE = MERGETOOL
1551
1552
1552 By default, all files known in the first parent context of the
1553 By default, all files known in the first parent context of the
1553 working directory are examined. Use file patterns and/or -I/-X
1554 working directory are examined. Use file patterns and/or -I/-X
1554 options to limit target files. -r/--rev is also useful to examine
1555 options to limit target files. -r/--rev is also useful to examine
1555 files in another context without actual updating to it.
1556 files in another context without actual updating to it.
1556
1557
1557 With --debug, this command shows warning messages while matching
1558 With --debug, this command shows warning messages while matching
1558 against ``merge-patterns`` and so on, too. It is recommended to
1559 against ``merge-patterns`` and so on, too. It is recommended to
1559 use this option with explicit file patterns and/or -I/-X options,
1560 use this option with explicit file patterns and/or -I/-X options,
1560 because this option increases amount of output per file according
1561 because this option increases amount of output per file according
1561 to configurations in hgrc.
1562 to configurations in hgrc.
1562
1563
1563 With -v/--verbose, this command shows configurations below at
1564 With -v/--verbose, this command shows configurations below at
1564 first (only if specified).
1565 first (only if specified).
1565
1566
1566 - ``--tool`` option
1567 - ``--tool`` option
1567 - ``HGMERGE`` environment variable
1568 - ``HGMERGE`` environment variable
1568 - configuration of ``ui.merge``
1569 - configuration of ``ui.merge``
1569
1570
1570 If merge tool is chosen before matching against
1571 If merge tool is chosen before matching against
1571 ``merge-patterns``, this command can't show any helpful
1572 ``merge-patterns``, this command can't show any helpful
1572 information, even with --debug. In such case, information above is
1573 information, even with --debug. In such case, information above is
1573 useful to know why a merge tool is chosen.
1574 useful to know why a merge tool is chosen.
1574 """
1575 """
1575 opts = pycompat.byteskwargs(opts)
1576 opts = pycompat.byteskwargs(opts)
1576 overrides = {}
1577 overrides = {}
1577 if opts['tool']:
1578 if opts['tool']:
1578 overrides[('ui', 'forcemerge')] = opts['tool']
1579 overrides[('ui', 'forcemerge')] = opts['tool']
1579 ui.note(('with --tool %r\n') % (opts['tool']))
1580 ui.note(('with --tool %r\n') % (opts['tool']))
1580
1581
1581 with ui.configoverride(overrides, 'debugmergepatterns'):
1582 with ui.configoverride(overrides, 'debugmergepatterns'):
1582 hgmerge = encoding.environ.get("HGMERGE")
1583 hgmerge = encoding.environ.get("HGMERGE")
1583 if hgmerge is not None:
1584 if hgmerge is not None:
1584 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1585 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1585 uimerge = ui.config("ui", "merge")
1586 uimerge = ui.config("ui", "merge")
1586 if uimerge:
1587 if uimerge:
1587 ui.note(('with ui.merge=%r\n') % (uimerge))
1588 ui.note(('with ui.merge=%r\n') % (uimerge))
1588
1589
1589 ctx = scmutil.revsingle(repo, opts.get('rev'))
1590 ctx = scmutil.revsingle(repo, opts.get('rev'))
1590 m = scmutil.match(ctx, pats, opts)
1591 m = scmutil.match(ctx, pats, opts)
1591 changedelete = opts['changedelete']
1592 changedelete = opts['changedelete']
1592 for path in ctx.walk(m):
1593 for path in ctx.walk(m):
1593 fctx = ctx[path]
1594 fctx = ctx[path]
1594 try:
1595 try:
1595 if not ui.debugflag:
1596 if not ui.debugflag:
1596 ui.pushbuffer(error=True)
1597 ui.pushbuffer(error=True)
1597 tool, toolpath = filemerge._picktool(repo, ui, path,
1598 tool, toolpath = filemerge._picktool(repo, ui, path,
1598 fctx.isbinary(),
1599 fctx.isbinary(),
1599 'l' in fctx.flags(),
1600 'l' in fctx.flags(),
1600 changedelete)
1601 changedelete)
1601 finally:
1602 finally:
1602 if not ui.debugflag:
1603 if not ui.debugflag:
1603 ui.popbuffer()
1604 ui.popbuffer()
1604 ui.write(('%s = %s\n') % (path, tool))
1605 ui.write(('%s = %s\n') % (path, tool))
1605
1606
1606 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1607 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1607 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1608 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1608 '''access the pushkey key/value protocol
1609 '''access the pushkey key/value protocol
1609
1610
1610 With two args, list the keys in the given namespace.
1611 With two args, list the keys in the given namespace.
1611
1612
1612 With five args, set a key to new if it currently is set to old.
1613 With five args, set a key to new if it currently is set to old.
1613 Reports success or failure.
1614 Reports success or failure.
1614 '''
1615 '''
1615
1616
1616 target = hg.peer(ui, {}, repopath)
1617 target = hg.peer(ui, {}, repopath)
1617 if keyinfo:
1618 if keyinfo:
1618 key, old, new = keyinfo
1619 key, old, new = keyinfo
1619 r = target.pushkey(namespace, key, old, new)
1620 r = target.pushkey(namespace, key, old, new)
1620 ui.status(str(r) + '\n')
1621 ui.status(str(r) + '\n')
1621 return not r
1622 return not r
1622 else:
1623 else:
1623 for k, v in sorted(target.listkeys(namespace).iteritems()):
1624 for k, v in sorted(target.listkeys(namespace).iteritems()):
1624 ui.write("%s\t%s\n" % (util.escapestr(k),
1625 ui.write("%s\t%s\n" % (util.escapestr(k),
1625 util.escapestr(v)))
1626 util.escapestr(v)))
1626
1627
1627 @command('debugpvec', [], _('A B'))
1628 @command('debugpvec', [], _('A B'))
1628 def debugpvec(ui, repo, a, b=None):
1629 def debugpvec(ui, repo, a, b=None):
1629 ca = scmutil.revsingle(repo, a)
1630 ca = scmutil.revsingle(repo, a)
1630 cb = scmutil.revsingle(repo, b)
1631 cb = scmutil.revsingle(repo, b)
1631 pa = pvec.ctxpvec(ca)
1632 pa = pvec.ctxpvec(ca)
1632 pb = pvec.ctxpvec(cb)
1633 pb = pvec.ctxpvec(cb)
1633 if pa == pb:
1634 if pa == pb:
1634 rel = "="
1635 rel = "="
1635 elif pa > pb:
1636 elif pa > pb:
1636 rel = ">"
1637 rel = ">"
1637 elif pa < pb:
1638 elif pa < pb:
1638 rel = "<"
1639 rel = "<"
1639 elif pa | pb:
1640 elif pa | pb:
1640 rel = "|"
1641 rel = "|"
1641 ui.write(_("a: %s\n") % pa)
1642 ui.write(_("a: %s\n") % pa)
1642 ui.write(_("b: %s\n") % pb)
1643 ui.write(_("b: %s\n") % pb)
1643 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1644 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1644 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1645 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1645 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1646 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1646 pa.distance(pb), rel))
1647 pa.distance(pb), rel))
1647
1648
1648 @command('debugrebuilddirstate|debugrebuildstate',
1649 @command('debugrebuilddirstate|debugrebuildstate',
1649 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1650 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1650 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1651 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1651 'the working copy parent')),
1652 'the working copy parent')),
1652 ],
1653 ],
1653 _('[-r REV]'))
1654 _('[-r REV]'))
1654 def debugrebuilddirstate(ui, repo, rev, **opts):
1655 def debugrebuilddirstate(ui, repo, rev, **opts):
1655 """rebuild the dirstate as it would look like for the given revision
1656 """rebuild the dirstate as it would look like for the given revision
1656
1657
1657 If no revision is specified the first current parent will be used.
1658 If no revision is specified the first current parent will be used.
1658
1659
1659 The dirstate will be set to the files of the given revision.
1660 The dirstate will be set to the files of the given revision.
1660 The actual working directory content or existing dirstate
1661 The actual working directory content or existing dirstate
1661 information such as adds or removes is not considered.
1662 information such as adds or removes is not considered.
1662
1663
1663 ``minimal`` will only rebuild the dirstate status for files that claim to be
1664 ``minimal`` will only rebuild the dirstate status for files that claim to be
1664 tracked but are not in the parent manifest, or that exist in the parent
1665 tracked but are not in the parent manifest, or that exist in the parent
1665 manifest but are not in the dirstate. It will not change adds, removes, or
1666 manifest but are not in the dirstate. It will not change adds, removes, or
1666 modified files that are in the working copy parent.
1667 modified files that are in the working copy parent.
1667
1668
1668 One use of this command is to make the next :hg:`status` invocation
1669 One use of this command is to make the next :hg:`status` invocation
1669 check the actual file content.
1670 check the actual file content.
1670 """
1671 """
1671 ctx = scmutil.revsingle(repo, rev)
1672 ctx = scmutil.revsingle(repo, rev)
1672 with repo.wlock():
1673 with repo.wlock():
1673 dirstate = repo.dirstate
1674 dirstate = repo.dirstate
1674 changedfiles = None
1675 changedfiles = None
1675 # See command doc for what minimal does.
1676 # See command doc for what minimal does.
1676 if opts.get(r'minimal'):
1677 if opts.get(r'minimal'):
1677 manifestfiles = set(ctx.manifest().keys())
1678 manifestfiles = set(ctx.manifest().keys())
1678 dirstatefiles = set(dirstate)
1679 dirstatefiles = set(dirstate)
1679 manifestonly = manifestfiles - dirstatefiles
1680 manifestonly = manifestfiles - dirstatefiles
1680 dsonly = dirstatefiles - manifestfiles
1681 dsonly = dirstatefiles - manifestfiles
1681 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1682 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1682 changedfiles = manifestonly | dsnotadded
1683 changedfiles = manifestonly | dsnotadded
1683
1684
1684 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1685 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1685
1686
1686 @command('debugrebuildfncache', [], '')
1687 @command('debugrebuildfncache', [], '')
1687 def debugrebuildfncache(ui, repo):
1688 def debugrebuildfncache(ui, repo):
1688 """rebuild the fncache file"""
1689 """rebuild the fncache file"""
1689 repair.rebuildfncache(ui, repo)
1690 repair.rebuildfncache(ui, repo)
1690
1691
1691 @command('debugrename',
1692 @command('debugrename',
1692 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1693 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1693 _('[-r REV] FILE'))
1694 _('[-r REV] FILE'))
1694 def debugrename(ui, repo, file1, *pats, **opts):
1695 def debugrename(ui, repo, file1, *pats, **opts):
1695 """dump rename information"""
1696 """dump rename information"""
1696
1697
1697 opts = pycompat.byteskwargs(opts)
1698 opts = pycompat.byteskwargs(opts)
1698 ctx = scmutil.revsingle(repo, opts.get('rev'))
1699 ctx = scmutil.revsingle(repo, opts.get('rev'))
1699 m = scmutil.match(ctx, (file1,) + pats, opts)
1700 m = scmutil.match(ctx, (file1,) + pats, opts)
1700 for abs in ctx.walk(m):
1701 for abs in ctx.walk(m):
1701 fctx = ctx[abs]
1702 fctx = ctx[abs]
1702 o = fctx.filelog().renamed(fctx.filenode())
1703 o = fctx.filelog().renamed(fctx.filenode())
1703 rel = m.rel(abs)
1704 rel = m.rel(abs)
1704 if o:
1705 if o:
1705 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1706 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1706 else:
1707 else:
1707 ui.write(_("%s not renamed\n") % rel)
1708 ui.write(_("%s not renamed\n") % rel)
1708
1709
1709 @command('debugrevlog', cmdutil.debugrevlogopts +
1710 @command('debugrevlog', cmdutil.debugrevlogopts +
1710 [('d', 'dump', False, _('dump index data'))],
1711 [('d', 'dump', False, _('dump index data'))],
1711 _('-c|-m|FILE'),
1712 _('-c|-m|FILE'),
1712 optionalrepo=True)
1713 optionalrepo=True)
1713 def debugrevlog(ui, repo, file_=None, **opts):
1714 def debugrevlog(ui, repo, file_=None, **opts):
1714 """show data and statistics about a revlog"""
1715 """show data and statistics about a revlog"""
1715 opts = pycompat.byteskwargs(opts)
1716 opts = pycompat.byteskwargs(opts)
1716 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1717 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1717
1718
1718 if opts.get("dump"):
1719 if opts.get("dump"):
1719 numrevs = len(r)
1720 numrevs = len(r)
1720 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1721 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1721 " rawsize totalsize compression heads chainlen\n"))
1722 " rawsize totalsize compression heads chainlen\n"))
1722 ts = 0
1723 ts = 0
1723 heads = set()
1724 heads = set()
1724
1725
1725 for rev in xrange(numrevs):
1726 for rev in xrange(numrevs):
1726 dbase = r.deltaparent(rev)
1727 dbase = r.deltaparent(rev)
1727 if dbase == -1:
1728 if dbase == -1:
1728 dbase = rev
1729 dbase = rev
1729 cbase = r.chainbase(rev)
1730 cbase = r.chainbase(rev)
1730 clen = r.chainlen(rev)
1731 clen = r.chainlen(rev)
1731 p1, p2 = r.parentrevs(rev)
1732 p1, p2 = r.parentrevs(rev)
1732 rs = r.rawsize(rev)
1733 rs = r.rawsize(rev)
1733 ts = ts + rs
1734 ts = ts + rs
1734 heads -= set(r.parentrevs(rev))
1735 heads -= set(r.parentrevs(rev))
1735 heads.add(rev)
1736 heads.add(rev)
1736 try:
1737 try:
1737 compression = ts / r.end(rev)
1738 compression = ts / r.end(rev)
1738 except ZeroDivisionError:
1739 except ZeroDivisionError:
1739 compression = 0
1740 compression = 0
1740 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1741 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1741 "%11d %5d %8d\n" %
1742 "%11d %5d %8d\n" %
1742 (rev, p1, p2, r.start(rev), r.end(rev),
1743 (rev, p1, p2, r.start(rev), r.end(rev),
1743 r.start(dbase), r.start(cbase),
1744 r.start(dbase), r.start(cbase),
1744 r.start(p1), r.start(p2),
1745 r.start(p1), r.start(p2),
1745 rs, ts, compression, len(heads), clen))
1746 rs, ts, compression, len(heads), clen))
1746 return 0
1747 return 0
1747
1748
1748 v = r.version
1749 v = r.version
1749 format = v & 0xFFFF
1750 format = v & 0xFFFF
1750 flags = []
1751 flags = []
1751 gdelta = False
1752 gdelta = False
1752 if v & revlog.FLAG_INLINE_DATA:
1753 if v & revlog.FLAG_INLINE_DATA:
1753 flags.append('inline')
1754 flags.append('inline')
1754 if v & revlog.FLAG_GENERALDELTA:
1755 if v & revlog.FLAG_GENERALDELTA:
1755 gdelta = True
1756 gdelta = True
1756 flags.append('generaldelta')
1757 flags.append('generaldelta')
1757 if not flags:
1758 if not flags:
1758 flags = ['(none)']
1759 flags = ['(none)']
1759
1760
1760 nummerges = 0
1761 nummerges = 0
1761 numfull = 0
1762 numfull = 0
1762 numprev = 0
1763 numprev = 0
1763 nump1 = 0
1764 nump1 = 0
1764 nump2 = 0
1765 nump2 = 0
1765 numother = 0
1766 numother = 0
1766 nump1prev = 0
1767 nump1prev = 0
1767 nump2prev = 0
1768 nump2prev = 0
1768 chainlengths = []
1769 chainlengths = []
1769 chainbases = []
1770 chainbases = []
1770 chainspans = []
1771 chainspans = []
1771
1772
1772 datasize = [None, 0, 0]
1773 datasize = [None, 0, 0]
1773 fullsize = [None, 0, 0]
1774 fullsize = [None, 0, 0]
1774 deltasize = [None, 0, 0]
1775 deltasize = [None, 0, 0]
1775 chunktypecounts = {}
1776 chunktypecounts = {}
1776 chunktypesizes = {}
1777 chunktypesizes = {}
1777
1778
1778 def addsize(size, l):
1779 def addsize(size, l):
1779 if l[0] is None or size < l[0]:
1780 if l[0] is None or size < l[0]:
1780 l[0] = size
1781 l[0] = size
1781 if size > l[1]:
1782 if size > l[1]:
1782 l[1] = size
1783 l[1] = size
1783 l[2] += size
1784 l[2] += size
1784
1785
1785 numrevs = len(r)
1786 numrevs = len(r)
1786 for rev in xrange(numrevs):
1787 for rev in xrange(numrevs):
1787 p1, p2 = r.parentrevs(rev)
1788 p1, p2 = r.parentrevs(rev)
1788 delta = r.deltaparent(rev)
1789 delta = r.deltaparent(rev)
1789 if format > 0:
1790 if format > 0:
1790 addsize(r.rawsize(rev), datasize)
1791 addsize(r.rawsize(rev), datasize)
1791 if p2 != nullrev:
1792 if p2 != nullrev:
1792 nummerges += 1
1793 nummerges += 1
1793 size = r.length(rev)
1794 size = r.length(rev)
1794 if delta == nullrev:
1795 if delta == nullrev:
1795 chainlengths.append(0)
1796 chainlengths.append(0)
1796 chainbases.append(r.start(rev))
1797 chainbases.append(r.start(rev))
1797 chainspans.append(size)
1798 chainspans.append(size)
1798 numfull += 1
1799 numfull += 1
1799 addsize(size, fullsize)
1800 addsize(size, fullsize)
1800 else:
1801 else:
1801 chainlengths.append(chainlengths[delta] + 1)
1802 chainlengths.append(chainlengths[delta] + 1)
1802 baseaddr = chainbases[delta]
1803 baseaddr = chainbases[delta]
1803 revaddr = r.start(rev)
1804 revaddr = r.start(rev)
1804 chainbases.append(baseaddr)
1805 chainbases.append(baseaddr)
1805 chainspans.append((revaddr - baseaddr) + size)
1806 chainspans.append((revaddr - baseaddr) + size)
1806 addsize(size, deltasize)
1807 addsize(size, deltasize)
1807 if delta == rev - 1:
1808 if delta == rev - 1:
1808 numprev += 1
1809 numprev += 1
1809 if delta == p1:
1810 if delta == p1:
1810 nump1prev += 1
1811 nump1prev += 1
1811 elif delta == p2:
1812 elif delta == p2:
1812 nump2prev += 1
1813 nump2prev += 1
1813 elif delta == p1:
1814 elif delta == p1:
1814 nump1 += 1
1815 nump1 += 1
1815 elif delta == p2:
1816 elif delta == p2:
1816 nump2 += 1
1817 nump2 += 1
1817 elif delta != nullrev:
1818 elif delta != nullrev:
1818 numother += 1
1819 numother += 1
1819
1820
1820 # Obtain data on the raw chunks in the revlog.
1821 # Obtain data on the raw chunks in the revlog.
1821 segment = r._getsegmentforrevs(rev, rev)[1]
1822 segment = r._getsegmentforrevs(rev, rev)[1]
1822 if segment:
1823 if segment:
1823 chunktype = bytes(segment[0:1])
1824 chunktype = bytes(segment[0:1])
1824 else:
1825 else:
1825 chunktype = 'empty'
1826 chunktype = 'empty'
1826
1827
1827 if chunktype not in chunktypecounts:
1828 if chunktype not in chunktypecounts:
1828 chunktypecounts[chunktype] = 0
1829 chunktypecounts[chunktype] = 0
1829 chunktypesizes[chunktype] = 0
1830 chunktypesizes[chunktype] = 0
1830
1831
1831 chunktypecounts[chunktype] += 1
1832 chunktypecounts[chunktype] += 1
1832 chunktypesizes[chunktype] += size
1833 chunktypesizes[chunktype] += size
1833
1834
1834 # Adjust size min value for empty cases
1835 # Adjust size min value for empty cases
1835 for size in (datasize, fullsize, deltasize):
1836 for size in (datasize, fullsize, deltasize):
1836 if size[0] is None:
1837 if size[0] is None:
1837 size[0] = 0
1838 size[0] = 0
1838
1839
1839 numdeltas = numrevs - numfull
1840 numdeltas = numrevs - numfull
1840 numoprev = numprev - nump1prev - nump2prev
1841 numoprev = numprev - nump1prev - nump2prev
1841 totalrawsize = datasize[2]
1842 totalrawsize = datasize[2]
1842 datasize[2] /= numrevs
1843 datasize[2] /= numrevs
1843 fulltotal = fullsize[2]
1844 fulltotal = fullsize[2]
1844 fullsize[2] /= numfull
1845 fullsize[2] /= numfull
1845 deltatotal = deltasize[2]
1846 deltatotal = deltasize[2]
1846 if numrevs - numfull > 0:
1847 if numrevs - numfull > 0:
1847 deltasize[2] /= numrevs - numfull
1848 deltasize[2] /= numrevs - numfull
1848 totalsize = fulltotal + deltatotal
1849 totalsize = fulltotal + deltatotal
1849 avgchainlen = sum(chainlengths) / numrevs
1850 avgchainlen = sum(chainlengths) / numrevs
1850 maxchainlen = max(chainlengths)
1851 maxchainlen = max(chainlengths)
1851 maxchainspan = max(chainspans)
1852 maxchainspan = max(chainspans)
1852 compratio = 1
1853 compratio = 1
1853 if totalsize:
1854 if totalsize:
1854 compratio = totalrawsize / totalsize
1855 compratio = totalrawsize / totalsize
1855
1856
1856 basedfmtstr = '%%%dd\n'
1857 basedfmtstr = '%%%dd\n'
1857 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1858 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1858
1859
1859 def dfmtstr(max):
1860 def dfmtstr(max):
1860 return basedfmtstr % len(str(max))
1861 return basedfmtstr % len(str(max))
1861 def pcfmtstr(max, padding=0):
1862 def pcfmtstr(max, padding=0):
1862 return basepcfmtstr % (len(str(max)), ' ' * padding)
1863 return basepcfmtstr % (len(str(max)), ' ' * padding)
1863
1864
1864 def pcfmt(value, total):
1865 def pcfmt(value, total):
1865 if total:
1866 if total:
1866 return (value, 100 * float(value) / total)
1867 return (value, 100 * float(value) / total)
1867 else:
1868 else:
1868 return value, 100.0
1869 return value, 100.0
1869
1870
1870 ui.write(('format : %d\n') % format)
1871 ui.write(('format : %d\n') % format)
1871 ui.write(('flags : %s\n') % ', '.join(flags))
1872 ui.write(('flags : %s\n') % ', '.join(flags))
1872
1873
1873 ui.write('\n')
1874 ui.write('\n')
1874 fmt = pcfmtstr(totalsize)
1875 fmt = pcfmtstr(totalsize)
1875 fmt2 = dfmtstr(totalsize)
1876 fmt2 = dfmtstr(totalsize)
1876 ui.write(('revisions : ') + fmt2 % numrevs)
1877 ui.write(('revisions : ') + fmt2 % numrevs)
1877 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1878 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1878 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1879 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1879 ui.write(('revisions : ') + fmt2 % numrevs)
1880 ui.write(('revisions : ') + fmt2 % numrevs)
1880 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1881 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1881 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1882 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1882 ui.write(('revision size : ') + fmt2 % totalsize)
1883 ui.write(('revision size : ') + fmt2 % totalsize)
1883 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1884 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1884 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1885 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1885
1886
1886 def fmtchunktype(chunktype):
1887 def fmtchunktype(chunktype):
1887 if chunktype == 'empty':
1888 if chunktype == 'empty':
1888 return ' %s : ' % chunktype
1889 return ' %s : ' % chunktype
1889 elif chunktype in pycompat.bytestr(string.ascii_letters):
1890 elif chunktype in pycompat.bytestr(string.ascii_letters):
1890 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1891 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1891 else:
1892 else:
1892 return ' 0x%s : ' % hex(chunktype)
1893 return ' 0x%s : ' % hex(chunktype)
1893
1894
1894 ui.write('\n')
1895 ui.write('\n')
1895 ui.write(('chunks : ') + fmt2 % numrevs)
1896 ui.write(('chunks : ') + fmt2 % numrevs)
1896 for chunktype in sorted(chunktypecounts):
1897 for chunktype in sorted(chunktypecounts):
1897 ui.write(fmtchunktype(chunktype))
1898 ui.write(fmtchunktype(chunktype))
1898 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1899 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1899 ui.write(('chunks size : ') + fmt2 % totalsize)
1900 ui.write(('chunks size : ') + fmt2 % totalsize)
1900 for chunktype in sorted(chunktypecounts):
1901 for chunktype in sorted(chunktypecounts):
1901 ui.write(fmtchunktype(chunktype))
1902 ui.write(fmtchunktype(chunktype))
1902 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1903 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1903
1904
1904 ui.write('\n')
1905 ui.write('\n')
1905 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1906 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1906 ui.write(('avg chain length : ') + fmt % avgchainlen)
1907 ui.write(('avg chain length : ') + fmt % avgchainlen)
1907 ui.write(('max chain length : ') + fmt % maxchainlen)
1908 ui.write(('max chain length : ') + fmt % maxchainlen)
1908 ui.write(('max chain reach : ') + fmt % maxchainspan)
1909 ui.write(('max chain reach : ') + fmt % maxchainspan)
1909 ui.write(('compression ratio : ') + fmt % compratio)
1910 ui.write(('compression ratio : ') + fmt % compratio)
1910
1911
1911 if format > 0:
1912 if format > 0:
1912 ui.write('\n')
1913 ui.write('\n')
1913 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1914 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1914 % tuple(datasize))
1915 % tuple(datasize))
1915 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1916 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1916 % tuple(fullsize))
1917 % tuple(fullsize))
1917 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1918 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1918 % tuple(deltasize))
1919 % tuple(deltasize))
1919
1920
1920 if numdeltas > 0:
1921 if numdeltas > 0:
1921 ui.write('\n')
1922 ui.write('\n')
1922 fmt = pcfmtstr(numdeltas)
1923 fmt = pcfmtstr(numdeltas)
1923 fmt2 = pcfmtstr(numdeltas, 4)
1924 fmt2 = pcfmtstr(numdeltas, 4)
1924 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1925 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1925 if numprev > 0:
1926 if numprev > 0:
1926 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1927 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1927 numprev))
1928 numprev))
1928 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1929 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1929 numprev))
1930 numprev))
1930 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1931 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1931 numprev))
1932 numprev))
1932 if gdelta:
1933 if gdelta:
1933 ui.write(('deltas against p1 : ')
1934 ui.write(('deltas against p1 : ')
1934 + fmt % pcfmt(nump1, numdeltas))
1935 + fmt % pcfmt(nump1, numdeltas))
1935 ui.write(('deltas against p2 : ')
1936 ui.write(('deltas against p2 : ')
1936 + fmt % pcfmt(nump2, numdeltas))
1937 + fmt % pcfmt(nump2, numdeltas))
1937 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1938 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1938 numdeltas))
1939 numdeltas))
1939
1940
1940 @command('debugrevspec',
1941 @command('debugrevspec',
1941 [('', 'optimize', None,
1942 [('', 'optimize', None,
1942 _('print parsed tree after optimizing (DEPRECATED)')),
1943 _('print parsed tree after optimizing (DEPRECATED)')),
1943 ('', 'show-revs', True, _('print list of result revisions (default)')),
1944 ('', 'show-revs', True, _('print list of result revisions (default)')),
1944 ('s', 'show-set', None, _('print internal representation of result set')),
1945 ('s', 'show-set', None, _('print internal representation of result set')),
1945 ('p', 'show-stage', [],
1946 ('p', 'show-stage', [],
1946 _('print parsed tree at the given stage'), _('NAME')),
1947 _('print parsed tree at the given stage'), _('NAME')),
1947 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1948 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1948 ('', 'verify-optimized', False, _('verify optimized result')),
1949 ('', 'verify-optimized', False, _('verify optimized result')),
1949 ],
1950 ],
1950 ('REVSPEC'))
1951 ('REVSPEC'))
1951 def debugrevspec(ui, repo, expr, **opts):
1952 def debugrevspec(ui, repo, expr, **opts):
1952 """parse and apply a revision specification
1953 """parse and apply a revision specification
1953
1954
1954 Use -p/--show-stage option to print the parsed tree at the given stages.
1955 Use -p/--show-stage option to print the parsed tree at the given stages.
1955 Use -p all to print tree at every stage.
1956 Use -p all to print tree at every stage.
1956
1957
1957 Use --no-show-revs option with -s or -p to print only the set
1958 Use --no-show-revs option with -s or -p to print only the set
1958 representation or the parsed tree respectively.
1959 representation or the parsed tree respectively.
1959
1960
1960 Use --verify-optimized to compare the optimized result with the unoptimized
1961 Use --verify-optimized to compare the optimized result with the unoptimized
1961 one. Returns 1 if the optimized result differs.
1962 one. Returns 1 if the optimized result differs.
1962 """
1963 """
1963 opts = pycompat.byteskwargs(opts)
1964 opts = pycompat.byteskwargs(opts)
1964 aliases = ui.configitems('revsetalias')
1965 aliases = ui.configitems('revsetalias')
1965 stages = [
1966 stages = [
1966 ('parsed', lambda tree: tree),
1967 ('parsed', lambda tree: tree),
1967 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
1968 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
1968 ui.warn)),
1969 ui.warn)),
1969 ('concatenated', revsetlang.foldconcat),
1970 ('concatenated', revsetlang.foldconcat),
1970 ('analyzed', revsetlang.analyze),
1971 ('analyzed', revsetlang.analyze),
1971 ('optimized', revsetlang.optimize),
1972 ('optimized', revsetlang.optimize),
1972 ]
1973 ]
1973 if opts['no_optimized']:
1974 if opts['no_optimized']:
1974 stages = stages[:-1]
1975 stages = stages[:-1]
1975 if opts['verify_optimized'] and opts['no_optimized']:
1976 if opts['verify_optimized'] and opts['no_optimized']:
1976 raise error.Abort(_('cannot use --verify-optimized with '
1977 raise error.Abort(_('cannot use --verify-optimized with '
1977 '--no-optimized'))
1978 '--no-optimized'))
1978 stagenames = set(n for n, f in stages)
1979 stagenames = set(n for n, f in stages)
1979
1980
1980 showalways = set()
1981 showalways = set()
1981 showchanged = set()
1982 showchanged = set()
1982 if ui.verbose and not opts['show_stage']:
1983 if ui.verbose and not opts['show_stage']:
1983 # show parsed tree by --verbose (deprecated)
1984 # show parsed tree by --verbose (deprecated)
1984 showalways.add('parsed')
1985 showalways.add('parsed')
1985 showchanged.update(['expanded', 'concatenated'])
1986 showchanged.update(['expanded', 'concatenated'])
1986 if opts['optimize']:
1987 if opts['optimize']:
1987 showalways.add('optimized')
1988 showalways.add('optimized')
1988 if opts['show_stage'] and opts['optimize']:
1989 if opts['show_stage'] and opts['optimize']:
1989 raise error.Abort(_('cannot use --optimize with --show-stage'))
1990 raise error.Abort(_('cannot use --optimize with --show-stage'))
1990 if opts['show_stage'] == ['all']:
1991 if opts['show_stage'] == ['all']:
1991 showalways.update(stagenames)
1992 showalways.update(stagenames)
1992 else:
1993 else:
1993 for n in opts['show_stage']:
1994 for n in opts['show_stage']:
1994 if n not in stagenames:
1995 if n not in stagenames:
1995 raise error.Abort(_('invalid stage name: %s') % n)
1996 raise error.Abort(_('invalid stage name: %s') % n)
1996 showalways.update(opts['show_stage'])
1997 showalways.update(opts['show_stage'])
1997
1998
1998 treebystage = {}
1999 treebystage = {}
1999 printedtree = None
2000 printedtree = None
2000 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2001 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2001 for n, f in stages:
2002 for n, f in stages:
2002 treebystage[n] = tree = f(tree)
2003 treebystage[n] = tree = f(tree)
2003 if n in showalways or (n in showchanged and tree != printedtree):
2004 if n in showalways or (n in showchanged and tree != printedtree):
2004 if opts['show_stage'] or n != 'parsed':
2005 if opts['show_stage'] or n != 'parsed':
2005 ui.write(("* %s:\n") % n)
2006 ui.write(("* %s:\n") % n)
2006 ui.write(revsetlang.prettyformat(tree), "\n")
2007 ui.write(revsetlang.prettyformat(tree), "\n")
2007 printedtree = tree
2008 printedtree = tree
2008
2009
2009 if opts['verify_optimized']:
2010 if opts['verify_optimized']:
2010 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2011 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2011 brevs = revset.makematcher(treebystage['optimized'])(repo)
2012 brevs = revset.makematcher(treebystage['optimized'])(repo)
2012 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2013 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2013 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2014 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2014 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2015 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2015 arevs = list(arevs)
2016 arevs = list(arevs)
2016 brevs = list(brevs)
2017 brevs = list(brevs)
2017 if arevs == brevs:
2018 if arevs == brevs:
2018 return 0
2019 return 0
2019 ui.write(('--- analyzed\n'), label='diff.file_a')
2020 ui.write(('--- analyzed\n'), label='diff.file_a')
2020 ui.write(('+++ optimized\n'), label='diff.file_b')
2021 ui.write(('+++ optimized\n'), label='diff.file_b')
2021 sm = difflib.SequenceMatcher(None, arevs, brevs)
2022 sm = difflib.SequenceMatcher(None, arevs, brevs)
2022 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2023 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2023 if tag in ('delete', 'replace'):
2024 if tag in ('delete', 'replace'):
2024 for c in arevs[alo:ahi]:
2025 for c in arevs[alo:ahi]:
2025 ui.write('-%s\n' % c, label='diff.deleted')
2026 ui.write('-%s\n' % c, label='diff.deleted')
2026 if tag in ('insert', 'replace'):
2027 if tag in ('insert', 'replace'):
2027 for c in brevs[blo:bhi]:
2028 for c in brevs[blo:bhi]:
2028 ui.write('+%s\n' % c, label='diff.inserted')
2029 ui.write('+%s\n' % c, label='diff.inserted')
2029 if tag == 'equal':
2030 if tag == 'equal':
2030 for c in arevs[alo:ahi]:
2031 for c in arevs[alo:ahi]:
2031 ui.write(' %s\n' % c)
2032 ui.write(' %s\n' % c)
2032 return 1
2033 return 1
2033
2034
2034 func = revset.makematcher(tree)
2035 func = revset.makematcher(tree)
2035 revs = func(repo)
2036 revs = func(repo)
2036 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2037 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2037 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2038 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2038 if not opts['show_revs']:
2039 if not opts['show_revs']:
2039 return
2040 return
2040 for c in revs:
2041 for c in revs:
2041 ui.write("%s\n" % c)
2042 ui.write("%s\n" % c)
2042
2043
2043 @command('debugsetparents', [], _('REV1 [REV2]'))
2044 @command('debugsetparents', [], _('REV1 [REV2]'))
2044 def debugsetparents(ui, repo, rev1, rev2=None):
2045 def debugsetparents(ui, repo, rev1, rev2=None):
2045 """manually set the parents of the current working directory
2046 """manually set the parents of the current working directory
2046
2047
2047 This is useful for writing repository conversion tools, but should
2048 This is useful for writing repository conversion tools, but should
2048 be used with care. For example, neither the working directory nor the
2049 be used with care. For example, neither the working directory nor the
2049 dirstate is updated, so file status may be incorrect after running this
2050 dirstate is updated, so file status may be incorrect after running this
2050 command.
2051 command.
2051
2052
2052 Returns 0 on success.
2053 Returns 0 on success.
2053 """
2054 """
2054
2055
2055 r1 = scmutil.revsingle(repo, rev1).node()
2056 r1 = scmutil.revsingle(repo, rev1).node()
2056 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2057 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2057
2058
2058 with repo.wlock():
2059 with repo.wlock():
2059 repo.setparents(r1, r2)
2060 repo.setparents(r1, r2)
2060
2061
2061 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2062 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2062 def debugssl(ui, repo, source=None, **opts):
2063 def debugssl(ui, repo, source=None, **opts):
2063 '''test a secure connection to a server
2064 '''test a secure connection to a server
2064
2065
2065 This builds the certificate chain for the server on Windows, installing the
2066 This builds the certificate chain for the server on Windows, installing the
2066 missing intermediates and trusted root via Windows Update if necessary. It
2067 missing intermediates and trusted root via Windows Update if necessary. It
2067 does nothing on other platforms.
2068 does nothing on other platforms.
2068
2069
2069 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2070 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2070 that server is used. See :hg:`help urls` for more information.
2071 that server is used. See :hg:`help urls` for more information.
2071
2072
2072 If the update succeeds, retry the original operation. Otherwise, the cause
2073 If the update succeeds, retry the original operation. Otherwise, the cause
2073 of the SSL error is likely another issue.
2074 of the SSL error is likely another issue.
2074 '''
2075 '''
2075 if pycompat.osname != 'nt':
2076 if pycompat.osname != 'nt':
2076 raise error.Abort(_('Certificate chain building is only possible on '
2077 raise error.Abort(_('Certificate chain building is only possible on '
2077 'Windows'))
2078 'Windows'))
2078
2079
2079 if not source:
2080 if not source:
2080 source = "default"
2081 source = "default"
2081 elif not repo:
2082 elif not repo:
2082 raise error.Abort(_("there is no Mercurial repository here, and no "
2083 raise error.Abort(_("there is no Mercurial repository here, and no "
2083 "server specified"))
2084 "server specified"))
2084
2085
2085 source, branches = hg.parseurl(ui.expandpath(source))
2086 source, branches = hg.parseurl(ui.expandpath(source))
2086 url = util.url(source)
2087 url = util.url(source)
2087 addr = None
2088 addr = None
2088
2089
2089 if url.scheme == 'https':
2090 if url.scheme == 'https':
2090 addr = (url.host, url.port or 443)
2091 addr = (url.host, url.port or 443)
2091 elif url.scheme == 'ssh':
2092 elif url.scheme == 'ssh':
2092 addr = (url.host, url.port or 22)
2093 addr = (url.host, url.port or 22)
2093 else:
2094 else:
2094 raise error.Abort(_("Only https and ssh connections are supported"))
2095 raise error.Abort(_("Only https and ssh connections are supported"))
2095
2096
2096 from . import win32
2097 from . import win32
2097
2098
2098 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2099 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2099 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2100 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2100
2101
2101 try:
2102 try:
2102 s.connect(addr)
2103 s.connect(addr)
2103 cert = s.getpeercert(True)
2104 cert = s.getpeercert(True)
2104
2105
2105 ui.status(_('Checking the certificate chain for %s.\n') % url.host)
2106 ui.status(_('Checking the certificate chain for %s.\n') % url.host)
2106
2107
2107 complete = win32.checkcertificatechain(cert, build=False)
2108 complete = win32.checkcertificatechain(cert, build=False)
2108
2109
2109 if not complete:
2110 if not complete:
2110 ui.status(_('The certificate chain is incomplete. Updating... '))
2111 ui.status(_('The certificate chain is incomplete. Updating... '))
2111
2112
2112 if not win32.checkcertificatechain(cert):
2113 if not win32.checkcertificatechain(cert):
2113 ui.status(_('Failed.\n'))
2114 ui.status(_('Failed.\n'))
2114 else:
2115 else:
2115 ui.status(_('Done.\n'))
2116 ui.status(_('Done.\n'))
2116 else:
2117 else:
2117 ui.status(_('The full certificate chain is available.\n'))
2118 ui.status(_('The full certificate chain is available.\n'))
2118 finally:
2119 finally:
2119 s.close()
2120 s.close()
2120
2121
2121 @command('debugsub',
2122 @command('debugsub',
2122 [('r', 'rev', '',
2123 [('r', 'rev', '',
2123 _('revision to check'), _('REV'))],
2124 _('revision to check'), _('REV'))],
2124 _('[-r REV] [REV]'))
2125 _('[-r REV] [REV]'))
2125 def debugsub(ui, repo, rev=None):
2126 def debugsub(ui, repo, rev=None):
2126 ctx = scmutil.revsingle(repo, rev, None)
2127 ctx = scmutil.revsingle(repo, rev, None)
2127 for k, v in sorted(ctx.substate.items()):
2128 for k, v in sorted(ctx.substate.items()):
2128 ui.write(('path %s\n') % k)
2129 ui.write(('path %s\n') % k)
2129 ui.write((' source %s\n') % v[0])
2130 ui.write((' source %s\n') % v[0])
2130 ui.write((' revision %s\n') % v[1])
2131 ui.write((' revision %s\n') % v[1])
2131
2132
2132 @command('debugsuccessorssets',
2133 @command('debugsuccessorssets',
2133 [('', 'closest', False, _('return closest successors sets only'))],
2134 [('', 'closest', False, _('return closest successors sets only'))],
2134 _('[REV]'))
2135 _('[REV]'))
2135 def debugsuccessorssets(ui, repo, *revs, **opts):
2136 def debugsuccessorssets(ui, repo, *revs, **opts):
2136 """show set of successors for revision
2137 """show set of successors for revision
2137
2138
2138 A successors set of changeset A is a consistent group of revisions that
2139 A successors set of changeset A is a consistent group of revisions that
2139 succeed A. It contains non-obsolete changesets only unless closests
2140 succeed A. It contains non-obsolete changesets only unless closests
2140 successors set is set.
2141 successors set is set.
2141
2142
2142 In most cases a changeset A has a single successors set containing a single
2143 In most cases a changeset A has a single successors set containing a single
2143 successor (changeset A replaced by A').
2144 successor (changeset A replaced by A').
2144
2145
2145 A changeset that is made obsolete with no successors are called "pruned".
2146 A changeset that is made obsolete with no successors are called "pruned".
2146 Such changesets have no successors sets at all.
2147 Such changesets have no successors sets at all.
2147
2148
2148 A changeset that has been "split" will have a successors set containing
2149 A changeset that has been "split" will have a successors set containing
2149 more than one successor.
2150 more than one successor.
2150
2151
2151 A changeset that has been rewritten in multiple different ways is called
2152 A changeset that has been rewritten in multiple different ways is called
2152 "divergent". Such changesets have multiple successor sets (each of which
2153 "divergent". Such changesets have multiple successor sets (each of which
2153 may also be split, i.e. have multiple successors).
2154 may also be split, i.e. have multiple successors).
2154
2155
2155 Results are displayed as follows::
2156 Results are displayed as follows::
2156
2157
2157 <rev1>
2158 <rev1>
2158 <successors-1A>
2159 <successors-1A>
2159 <rev2>
2160 <rev2>
2160 <successors-2A>
2161 <successors-2A>
2161 <successors-2B1> <successors-2B2> <successors-2B3>
2162 <successors-2B1> <successors-2B2> <successors-2B3>
2162
2163
2163 Here rev2 has two possible (i.e. divergent) successors sets. The first
2164 Here rev2 has two possible (i.e. divergent) successors sets. The first
2164 holds one element, whereas the second holds three (i.e. the changeset has
2165 holds one element, whereas the second holds three (i.e. the changeset has
2165 been split).
2166 been split).
2166 """
2167 """
2167 # passed to successorssets caching computation from one call to another
2168 # passed to successorssets caching computation from one call to another
2168 cache = {}
2169 cache = {}
2169 ctx2str = str
2170 ctx2str = str
2170 node2str = short
2171 node2str = short
2171 if ui.debug():
2172 if ui.debug():
2172 def ctx2str(ctx):
2173 def ctx2str(ctx):
2173 return ctx.hex()
2174 return ctx.hex()
2174 node2str = hex
2175 node2str = hex
2175 for rev in scmutil.revrange(repo, revs):
2176 for rev in scmutil.revrange(repo, revs):
2176 ctx = repo[rev]
2177 ctx = repo[rev]
2177 ui.write('%s\n'% ctx2str(ctx))
2178 ui.write('%s\n'% ctx2str(ctx))
2178 for succsset in obsutil.successorssets(repo, ctx.node(),
2179 for succsset in obsutil.successorssets(repo, ctx.node(),
2179 closest=opts['closest'],
2180 closest=opts['closest'],
2180 cache=cache):
2181 cache=cache):
2181 if succsset:
2182 if succsset:
2182 ui.write(' ')
2183 ui.write(' ')
2183 ui.write(node2str(succsset[0]))
2184 ui.write(node2str(succsset[0]))
2184 for node in succsset[1:]:
2185 for node in succsset[1:]:
2185 ui.write(' ')
2186 ui.write(' ')
2186 ui.write(node2str(node))
2187 ui.write(node2str(node))
2187 ui.write('\n')
2188 ui.write('\n')
2188
2189
2189 @command('debugtemplate',
2190 @command('debugtemplate',
2190 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2191 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2191 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2192 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2192 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2193 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2193 optionalrepo=True)
2194 optionalrepo=True)
2194 def debugtemplate(ui, repo, tmpl, **opts):
2195 def debugtemplate(ui, repo, tmpl, **opts):
2195 """parse and apply a template
2196 """parse and apply a template
2196
2197
2197 If -r/--rev is given, the template is processed as a log template and
2198 If -r/--rev is given, the template is processed as a log template and
2198 applied to the given changesets. Otherwise, it is processed as a generic
2199 applied to the given changesets. Otherwise, it is processed as a generic
2199 template.
2200 template.
2200
2201
2201 Use --verbose to print the parsed tree.
2202 Use --verbose to print the parsed tree.
2202 """
2203 """
2203 revs = None
2204 revs = None
2204 if opts[r'rev']:
2205 if opts[r'rev']:
2205 if repo is None:
2206 if repo is None:
2206 raise error.RepoError(_('there is no Mercurial repository here '
2207 raise error.RepoError(_('there is no Mercurial repository here '
2207 '(.hg not found)'))
2208 '(.hg not found)'))
2208 revs = scmutil.revrange(repo, opts[r'rev'])
2209 revs = scmutil.revrange(repo, opts[r'rev'])
2209
2210
2210 props = {}
2211 props = {}
2211 for d in opts[r'define']:
2212 for d in opts[r'define']:
2212 try:
2213 try:
2213 k, v = (e.strip() for e in d.split('=', 1))
2214 k, v = (e.strip() for e in d.split('=', 1))
2214 if not k or k == 'ui':
2215 if not k or k == 'ui':
2215 raise ValueError
2216 raise ValueError
2216 props[k] = v
2217 props[k] = v
2217 except ValueError:
2218 except ValueError:
2218 raise error.Abort(_('malformed keyword definition: %s') % d)
2219 raise error.Abort(_('malformed keyword definition: %s') % d)
2219
2220
2220 if ui.verbose:
2221 if ui.verbose:
2221 aliases = ui.configitems('templatealias')
2222 aliases = ui.configitems('templatealias')
2222 tree = templater.parse(tmpl)
2223 tree = templater.parse(tmpl)
2223 ui.note(templater.prettyformat(tree), '\n')
2224 ui.note(templater.prettyformat(tree), '\n')
2224 newtree = templater.expandaliases(tree, aliases)
2225 newtree = templater.expandaliases(tree, aliases)
2225 if newtree != tree:
2226 if newtree != tree:
2226 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2227 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2227
2228
2228 if revs is None:
2229 if revs is None:
2229 t = formatter.maketemplater(ui, tmpl)
2230 t = formatter.maketemplater(ui, tmpl)
2230 props['ui'] = ui
2231 props['ui'] = ui
2231 ui.write(t.render(props))
2232 ui.write(t.render(props))
2232 else:
2233 else:
2233 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2234 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2234 for r in revs:
2235 for r in revs:
2235 displayer.show(repo[r], **pycompat.strkwargs(props))
2236 displayer.show(repo[r], **pycompat.strkwargs(props))
2236 displayer.close()
2237 displayer.close()
2237
2238
2238 @command('debugupdatecaches', [])
2239 @command('debugupdatecaches', [])
2239 def debugupdatecaches(ui, repo, *pats, **opts):
2240 def debugupdatecaches(ui, repo, *pats, **opts):
2240 """warm all known caches in the repository"""
2241 """warm all known caches in the repository"""
2241 with repo.wlock(), repo.lock():
2242 with repo.wlock(), repo.lock():
2242 repo.updatecaches()
2243 repo.updatecaches()
2243
2244
2244 @command('debugupgraderepo', [
2245 @command('debugupgraderepo', [
2245 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2246 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2246 ('', 'run', False, _('performs an upgrade')),
2247 ('', 'run', False, _('performs an upgrade')),
2247 ])
2248 ])
2248 def debugupgraderepo(ui, repo, run=False, optimize=None):
2249 def debugupgraderepo(ui, repo, run=False, optimize=None):
2249 """upgrade a repository to use different features
2250 """upgrade a repository to use different features
2250
2251
2251 If no arguments are specified, the repository is evaluated for upgrade
2252 If no arguments are specified, the repository is evaluated for upgrade
2252 and a list of problems and potential optimizations is printed.
2253 and a list of problems and potential optimizations is printed.
2253
2254
2254 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2255 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2255 can be influenced via additional arguments. More details will be provided
2256 can be influenced via additional arguments. More details will be provided
2256 by the command output when run without ``--run``.
2257 by the command output when run without ``--run``.
2257
2258
2258 During the upgrade, the repository will be locked and no writes will be
2259 During the upgrade, the repository will be locked and no writes will be
2259 allowed.
2260 allowed.
2260
2261
2261 At the end of the upgrade, the repository may not be readable while new
2262 At the end of the upgrade, the repository may not be readable while new
2262 repository data is swapped in. This window will be as long as it takes to
2263 repository data is swapped in. This window will be as long as it takes to
2263 rename some directories inside the ``.hg`` directory. On most machines, this
2264 rename some directories inside the ``.hg`` directory. On most machines, this
2264 should complete almost instantaneously and the chances of a consumer being
2265 should complete almost instantaneously and the chances of a consumer being
2265 unable to access the repository should be low.
2266 unable to access the repository should be low.
2266 """
2267 """
2267 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2268 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2268
2269
2269 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2270 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2270 inferrepo=True)
2271 inferrepo=True)
2271 def debugwalk(ui, repo, *pats, **opts):
2272 def debugwalk(ui, repo, *pats, **opts):
2272 """show how files match on given patterns"""
2273 """show how files match on given patterns"""
2273 opts = pycompat.byteskwargs(opts)
2274 opts = pycompat.byteskwargs(opts)
2274 m = scmutil.match(repo[None], pats, opts)
2275 m = scmutil.match(repo[None], pats, opts)
2275 ui.write(('matcher: %r\n' % m))
2276 ui.write(('matcher: %r\n' % m))
2276 items = list(repo[None].walk(m))
2277 items = list(repo[None].walk(m))
2277 if not items:
2278 if not items:
2278 return
2279 return
2279 f = lambda fn: fn
2280 f = lambda fn: fn
2280 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2281 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2281 f = lambda fn: util.normpath(fn)
2282 f = lambda fn: util.normpath(fn)
2282 fmt = 'f %%-%ds %%-%ds %%s' % (
2283 fmt = 'f %%-%ds %%-%ds %%s' % (
2283 max([len(abs) for abs in items]),
2284 max([len(abs) for abs in items]),
2284 max([len(m.rel(abs)) for abs in items]))
2285 max([len(m.rel(abs)) for abs in items]))
2285 for abs in items:
2286 for abs in items:
2286 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2287 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2287 ui.write("%s\n" % line.rstrip())
2288 ui.write("%s\n" % line.rstrip())
2288
2289
2289 @command('debugwireargs',
2290 @command('debugwireargs',
2290 [('', 'three', '', 'three'),
2291 [('', 'three', '', 'three'),
2291 ('', 'four', '', 'four'),
2292 ('', 'four', '', 'four'),
2292 ('', 'five', '', 'five'),
2293 ('', 'five', '', 'five'),
2293 ] + cmdutil.remoteopts,
2294 ] + cmdutil.remoteopts,
2294 _('REPO [OPTIONS]... [ONE [TWO]]'),
2295 _('REPO [OPTIONS]... [ONE [TWO]]'),
2295 norepo=True)
2296 norepo=True)
2296 def debugwireargs(ui, repopath, *vals, **opts):
2297 def debugwireargs(ui, repopath, *vals, **opts):
2297 opts = pycompat.byteskwargs(opts)
2298 opts = pycompat.byteskwargs(opts)
2298 repo = hg.peer(ui, opts, repopath)
2299 repo = hg.peer(ui, opts, repopath)
2299 for opt in cmdutil.remoteopts:
2300 for opt in cmdutil.remoteopts:
2300 del opts[opt[1]]
2301 del opts[opt[1]]
2301 args = {}
2302 args = {}
2302 for k, v in opts.iteritems():
2303 for k, v in opts.iteritems():
2303 if v:
2304 if v:
2304 args[k] = v
2305 args[k] = v
2305 # run twice to check that we don't mess up the stream for the next command
2306 # run twice to check that we don't mess up the stream for the next command
2306 res1 = repo.debugwireargs(*vals, **args)
2307 res1 = repo.debugwireargs(*vals, **args)
2307 res2 = repo.debugwireargs(*vals, **args)
2308 res2 = repo.debugwireargs(*vals, **args)
2308 ui.write("%s\n" % res1)
2309 ui.write("%s\n" % res1)
2309 if res1 != res2:
2310 if res1 != res2:
2310 ui.warn("%s\n" % res2)
2311 ui.warn("%s\n" % res2)
@@ -1,310 +1,339 b''
1 $ hg init ignorerepo
1 $ hg init ignorerepo
2 $ cd ignorerepo
2 $ cd ignorerepo
3
3
4 debugignore with no hgignore should be deterministic:
4 debugignore with no hgignore should be deterministic:
5 $ hg debugignore
5 $ hg debugignore
6 <nevermatcher>
6 <nevermatcher>
7
7
8 Issue562: .hgignore requires newline at end:
8 Issue562: .hgignore requires newline at end:
9
9
10 $ touch foo
10 $ touch foo
11 $ touch bar
11 $ touch bar
12 $ touch baz
12 $ touch baz
13 $ cat > makeignore.py <<EOF
13 $ cat > makeignore.py <<EOF
14 > f = open(".hgignore", "w")
14 > f = open(".hgignore", "w")
15 > f.write("ignore\n")
15 > f.write("ignore\n")
16 > f.write("foo\n")
16 > f.write("foo\n")
17 > # No EOL here
17 > # No EOL here
18 > f.write("bar")
18 > f.write("bar")
19 > f.close()
19 > f.close()
20 > EOF
20 > EOF
21
21
22 $ $PYTHON makeignore.py
22 $ $PYTHON makeignore.py
23
23
24 Should display baz only:
24 Should display baz only:
25
25
26 $ hg status
26 $ hg status
27 ? baz
27 ? baz
28
28
29 $ rm foo bar baz .hgignore makeignore.py
29 $ rm foo bar baz .hgignore makeignore.py
30
30
31 $ touch a.o
31 $ touch a.o
32 $ touch a.c
32 $ touch a.c
33 $ touch syntax
33 $ touch syntax
34 $ mkdir dir
34 $ mkdir dir
35 $ touch dir/a.o
35 $ touch dir/a.o
36 $ touch dir/b.o
36 $ touch dir/b.o
37 $ touch dir/c.o
37 $ touch dir/c.o
38
38
39 $ hg add dir/a.o
39 $ hg add dir/a.o
40 $ hg commit -m 0
40 $ hg commit -m 0
41 $ hg add dir/b.o
41 $ hg add dir/b.o
42
42
43 $ hg status
43 $ hg status
44 A dir/b.o
44 A dir/b.o
45 ? a.c
45 ? a.c
46 ? a.o
46 ? a.o
47 ? dir/c.o
47 ? dir/c.o
48 ? syntax
48 ? syntax
49
49
50 $ echo "*.o" > .hgignore
50 $ echo "*.o" > .hgignore
51 $ hg status
51 $ hg status
52 abort: $TESTTMP/ignorerepo/.hgignore: invalid pattern (relre): *.o (glob)
52 abort: $TESTTMP/ignorerepo/.hgignore: invalid pattern (relre): *.o (glob)
53 [255]
53 [255]
54
54
55 Ensure given files are relative to cwd
56
57 $ echo "dir/.*\.o" > .hgignore
58 $ hg status -i
59 I dir/c.o
60
61 $ hg debugignore dir/c.o dir/missing.o
62 dir/c.o is ignored (glob)
63 (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
64 dir/missing.o is ignored (glob)
65 (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
66 $ cd dir
67 $ hg debugignore c.o missing.o
68 c.o is ignored
69 (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
70 missing.o is ignored
71 (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
72
73 For icasefs, inexact matches also work, except for missing files
74
75 #if icasefs
76 $ hg debugignore c.O missing.O
77 c.o is ignored
78 (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob)
79 missing.O is not ignored
80 #endif
81
82 $ cd ..
83
55 $ echo ".*\.o" > .hgignore
84 $ echo ".*\.o" > .hgignore
56 $ hg status
85 $ hg status
57 A dir/b.o
86 A dir/b.o
58 ? .hgignore
87 ? .hgignore
59 ? a.c
88 ? a.c
60 ? syntax
89 ? syntax
61
90
62 Ensure that comments work:
91 Ensure that comments work:
63
92
64 $ touch 'foo#bar' 'quux#'
93 $ touch 'foo#bar' 'quux#'
65 #if no-windows
94 #if no-windows
66 $ touch 'baz\#wat'
95 $ touch 'baz\#wat'
67 #endif
96 #endif
68 $ cat <<'EOF' >> .hgignore
97 $ cat <<'EOF' >> .hgignore
69 > # full-line comment
98 > # full-line comment
70 > # whitespace-only comment line
99 > # whitespace-only comment line
71 > syntax# pattern, no whitespace, then comment
100 > syntax# pattern, no whitespace, then comment
72 > a.c # pattern, then whitespace, then comment
101 > a.c # pattern, then whitespace, then comment
73 > baz\\# # escaped comment character
102 > baz\\# # escaped comment character
74 > foo\#b # escaped comment character
103 > foo\#b # escaped comment character
75 > quux\## escaped comment character at end of name
104 > quux\## escaped comment character at end of name
76 > EOF
105 > EOF
77 $ hg status
106 $ hg status
78 A dir/b.o
107 A dir/b.o
79 ? .hgignore
108 ? .hgignore
80 $ rm 'foo#bar' 'quux#'
109 $ rm 'foo#bar' 'quux#'
81 #if no-windows
110 #if no-windows
82 $ rm 'baz\#wat'
111 $ rm 'baz\#wat'
83 #endif
112 #endif
84
113
85 Check that '^\.' does not ignore the root directory:
114 Check that '^\.' does not ignore the root directory:
86
115
87 $ echo "^\." > .hgignore
116 $ echo "^\." > .hgignore
88 $ hg status
117 $ hg status
89 A dir/b.o
118 A dir/b.o
90 ? a.c
119 ? a.c
91 ? a.o
120 ? a.o
92 ? dir/c.o
121 ? dir/c.o
93 ? syntax
122 ? syntax
94
123
95 Test that patterns from ui.ignore options are read:
124 Test that patterns from ui.ignore options are read:
96
125
97 $ echo > .hgignore
126 $ echo > .hgignore
98 $ cat >> $HGRCPATH << EOF
127 $ cat >> $HGRCPATH << EOF
99 > [ui]
128 > [ui]
100 > ignore.other = $TESTTMP/ignorerepo/.hg/testhgignore
129 > ignore.other = $TESTTMP/ignorerepo/.hg/testhgignore
101 > EOF
130 > EOF
102 $ echo "glob:**.o" > .hg/testhgignore
131 $ echo "glob:**.o" > .hg/testhgignore
103 $ hg status
132 $ hg status
104 A dir/b.o
133 A dir/b.o
105 ? .hgignore
134 ? .hgignore
106 ? a.c
135 ? a.c
107 ? syntax
136 ? syntax
108
137
109 empty out testhgignore
138 empty out testhgignore
110 $ echo > .hg/testhgignore
139 $ echo > .hg/testhgignore
111
140
112 Test relative ignore path (issue4473):
141 Test relative ignore path (issue4473):
113
142
114 $ cat >> $HGRCPATH << EOF
143 $ cat >> $HGRCPATH << EOF
115 > [ui]
144 > [ui]
116 > ignore.relative = .hg/testhgignorerel
145 > ignore.relative = .hg/testhgignorerel
117 > EOF
146 > EOF
118 $ echo "glob:*.o" > .hg/testhgignorerel
147 $ echo "glob:*.o" > .hg/testhgignorerel
119 $ cd dir
148 $ cd dir
120 $ hg status
149 $ hg status
121 A dir/b.o
150 A dir/b.o
122 ? .hgignore
151 ? .hgignore
123 ? a.c
152 ? a.c
124 ? syntax
153 ? syntax
125
154
126 $ cd ..
155 $ cd ..
127 $ echo > .hg/testhgignorerel
156 $ echo > .hg/testhgignorerel
128 $ echo "syntax: glob" > .hgignore
157 $ echo "syntax: glob" > .hgignore
129 $ echo "re:.*\.o" >> .hgignore
158 $ echo "re:.*\.o" >> .hgignore
130 $ hg status
159 $ hg status
131 A dir/b.o
160 A dir/b.o
132 ? .hgignore
161 ? .hgignore
133 ? a.c
162 ? a.c
134 ? syntax
163 ? syntax
135
164
136 $ echo "syntax: invalid" > .hgignore
165 $ echo "syntax: invalid" > .hgignore
137 $ hg status
166 $ hg status
138 $TESTTMP/ignorerepo/.hgignore: ignoring invalid syntax 'invalid' (glob)
167 $TESTTMP/ignorerepo/.hgignore: ignoring invalid syntax 'invalid' (glob)
139 A dir/b.o
168 A dir/b.o
140 ? .hgignore
169 ? .hgignore
141 ? a.c
170 ? a.c
142 ? a.o
171 ? a.o
143 ? dir/c.o
172 ? dir/c.o
144 ? syntax
173 ? syntax
145
174
146 $ echo "syntax: glob" > .hgignore
175 $ echo "syntax: glob" > .hgignore
147 $ echo "*.o" >> .hgignore
176 $ echo "*.o" >> .hgignore
148 $ hg status
177 $ hg status
149 A dir/b.o
178 A dir/b.o
150 ? .hgignore
179 ? .hgignore
151 ? a.c
180 ? a.c
152 ? syntax
181 ? syntax
153
182
154 $ echo "relglob:syntax*" > .hgignore
183 $ echo "relglob:syntax*" > .hgignore
155 $ hg status
184 $ hg status
156 A dir/b.o
185 A dir/b.o
157 ? .hgignore
186 ? .hgignore
158 ? a.c
187 ? a.c
159 ? a.o
188 ? a.o
160 ? dir/c.o
189 ? dir/c.o
161
190
162 $ echo "relglob:*" > .hgignore
191 $ echo "relglob:*" > .hgignore
163 $ hg status
192 $ hg status
164 A dir/b.o
193 A dir/b.o
165
194
166 $ cd dir
195 $ cd dir
167 $ hg status .
196 $ hg status .
168 A b.o
197 A b.o
169
198
170 $ hg debugignore
199 $ hg debugignore
171 <includematcher includes='(?:(?:|.*/)[^/]*(?:/|$))'>
200 <includematcher includes='(?:(?:|.*/)[^/]*(?:/|$))'>
172
201
173 $ hg debugignore b.o
202 $ hg debugignore b.o
174 b.o is ignored
203 b.o is ignored
175 (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: '*') (glob)
204 (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: '*') (glob)
176
205
177 $ cd ..
206 $ cd ..
178
207
179 Check patterns that match only the directory
208 Check patterns that match only the directory
180
209
181 "(fsmonitor !)" below assumes that fsmonitor is enabled with
210 "(fsmonitor !)" below assumes that fsmonitor is enabled with
182 "walk_on_invalidate = false" (default), which doesn't involve
211 "walk_on_invalidate = false" (default), which doesn't involve
183 re-walking whole repository at detection of .hgignore change.
212 re-walking whole repository at detection of .hgignore change.
184
213
185 $ echo "^dir\$" > .hgignore
214 $ echo "^dir\$" > .hgignore
186 $ hg status
215 $ hg status
187 A dir/b.o
216 A dir/b.o
188 ? .hgignore
217 ? .hgignore
189 ? a.c
218 ? a.c
190 ? a.o
219 ? a.o
191 ? dir/c.o (fsmonitor !)
220 ? dir/c.o (fsmonitor !)
192 ? syntax
221 ? syntax
193
222
194 Check recursive glob pattern matches no directories (dir/**/c.o matches dir/c.o)
223 Check recursive glob pattern matches no directories (dir/**/c.o matches dir/c.o)
195
224
196 $ echo "syntax: glob" > .hgignore
225 $ echo "syntax: glob" > .hgignore
197 $ echo "dir/**/c.o" >> .hgignore
226 $ echo "dir/**/c.o" >> .hgignore
198 $ touch dir/c.o
227 $ touch dir/c.o
199 $ mkdir dir/subdir
228 $ mkdir dir/subdir
200 $ touch dir/subdir/c.o
229 $ touch dir/subdir/c.o
201 $ hg status
230 $ hg status
202 A dir/b.o
231 A dir/b.o
203 ? .hgignore
232 ? .hgignore
204 ? a.c
233 ? a.c
205 ? a.o
234 ? a.o
206 ? syntax
235 ? syntax
207 $ hg debugignore a.c
236 $ hg debugignore a.c
208 a.c is not ignored
237 a.c is not ignored
209 $ hg debugignore dir/c.o
238 $ hg debugignore dir/c.o
210 dir/c.o is ignored
239 dir/c.o is ignored (glob)
211 (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 2: 'dir/**/c.o') (glob)
240 (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 2: 'dir/**/c.o') (glob)
212
241
213 Check using 'include:' in ignore file
242 Check using 'include:' in ignore file
214
243
215 $ hg purge --all --config extensions.purge=
244 $ hg purge --all --config extensions.purge=
216 $ touch foo.included
245 $ touch foo.included
217
246
218 $ echo ".*.included" > otherignore
247 $ echo ".*.included" > otherignore
219 $ hg status -I "include:otherignore"
248 $ hg status -I "include:otherignore"
220 ? foo.included
249 ? foo.included
221
250
222 $ echo "include:otherignore" >> .hgignore
251 $ echo "include:otherignore" >> .hgignore
223 $ hg status
252 $ hg status
224 A dir/b.o
253 A dir/b.o
225 ? .hgignore
254 ? .hgignore
226 ? otherignore
255 ? otherignore
227
256
228 Check recursive uses of 'include:'
257 Check recursive uses of 'include:'
229
258
230 $ echo "include:nested/ignore" >> otherignore
259 $ echo "include:nested/ignore" >> otherignore
231 $ mkdir nested
260 $ mkdir nested
232 $ echo "glob:*ignore" > nested/ignore
261 $ echo "glob:*ignore" > nested/ignore
233 $ hg status
262 $ hg status
234 A dir/b.o
263 A dir/b.o
235
264
236 $ cp otherignore goodignore
265 $ cp otherignore goodignore
237 $ echo "include:badignore" >> otherignore
266 $ echo "include:badignore" >> otherignore
238 $ hg status
267 $ hg status
239 skipping unreadable pattern file 'badignore': No such file or directory
268 skipping unreadable pattern file 'badignore': No such file or directory
240 A dir/b.o
269 A dir/b.o
241
270
242 $ mv goodignore otherignore
271 $ mv goodignore otherignore
243
272
244 Check using 'include:' while in a non-root directory
273 Check using 'include:' while in a non-root directory
245
274
246 $ cd ..
275 $ cd ..
247 $ hg -R ignorerepo status
276 $ hg -R ignorerepo status
248 A dir/b.o
277 A dir/b.o
249 $ cd ignorerepo
278 $ cd ignorerepo
250
279
251 Check including subincludes
280 Check including subincludes
252
281
253 $ hg revert -q --all
282 $ hg revert -q --all
254 $ hg purge --all --config extensions.purge=
283 $ hg purge --all --config extensions.purge=
255 $ echo ".hgignore" > .hgignore
284 $ echo ".hgignore" > .hgignore
256 $ mkdir dir1 dir2
285 $ mkdir dir1 dir2
257 $ touch dir1/file1 dir1/file2 dir2/file1 dir2/file2
286 $ touch dir1/file1 dir1/file2 dir2/file1 dir2/file2
258 $ echo "subinclude:dir2/.hgignore" >> .hgignore
287 $ echo "subinclude:dir2/.hgignore" >> .hgignore
259 $ echo "glob:file*2" > dir2/.hgignore
288 $ echo "glob:file*2" > dir2/.hgignore
260 $ hg status
289 $ hg status
261 ? dir1/file1
290 ? dir1/file1
262 ? dir1/file2
291 ? dir1/file2
263 ? dir2/file1
292 ? dir2/file1
264
293
265 Check including subincludes with regexs
294 Check including subincludes with regexs
266
295
267 $ echo "subinclude:dir1/.hgignore" >> .hgignore
296 $ echo "subinclude:dir1/.hgignore" >> .hgignore
268 $ echo "regexp:f.le1" > dir1/.hgignore
297 $ echo "regexp:f.le1" > dir1/.hgignore
269
298
270 $ hg status
299 $ hg status
271 ? dir1/file2
300 ? dir1/file2
272 ? dir2/file1
301 ? dir2/file1
273
302
274 Check multiple levels of sub-ignores
303 Check multiple levels of sub-ignores
275
304
276 $ mkdir dir1/subdir
305 $ mkdir dir1/subdir
277 $ touch dir1/subdir/subfile1 dir1/subdir/subfile3 dir1/subdir/subfile4
306 $ touch dir1/subdir/subfile1 dir1/subdir/subfile3 dir1/subdir/subfile4
278 $ echo "subinclude:subdir/.hgignore" >> dir1/.hgignore
307 $ echo "subinclude:subdir/.hgignore" >> dir1/.hgignore
279 $ echo "glob:subfil*3" >> dir1/subdir/.hgignore
308 $ echo "glob:subfil*3" >> dir1/subdir/.hgignore
280
309
281 $ hg status
310 $ hg status
282 ? dir1/file2
311 ? dir1/file2
283 ? dir1/subdir/subfile4
312 ? dir1/subdir/subfile4
284 ? dir2/file1
313 ? dir2/file1
285
314
286 Check include subignore at the same level
315 Check include subignore at the same level
287
316
288 $ mv dir1/subdir/.hgignore dir1/.hgignoretwo
317 $ mv dir1/subdir/.hgignore dir1/.hgignoretwo
289 $ echo "regexp:f.le1" > dir1/.hgignore
318 $ echo "regexp:f.le1" > dir1/.hgignore
290 $ echo "subinclude:.hgignoretwo" >> dir1/.hgignore
319 $ echo "subinclude:.hgignoretwo" >> dir1/.hgignore
291 $ echo "glob:file*2" > dir1/.hgignoretwo
320 $ echo "glob:file*2" > dir1/.hgignoretwo
292
321
293 $ hg status | grep file2
322 $ hg status | grep file2
294 [1]
323 [1]
295 $ hg debugignore dir1/file2
324 $ hg debugignore dir1/file2
296 dir1/file2 is ignored
325 dir1/file2 is ignored (glob)
297 (ignore rule in dir2/.hgignore, line 1: 'file*2')
326 (ignore rule in dir2/.hgignore, line 1: 'file*2')
298
327
299 #if windows
328 #if windows
300
329
301 Windows paths are accepted on input
330 Windows paths are accepted on input
302
331
303 $ rm dir1/.hgignore
332 $ rm dir1/.hgignore
304 $ echo "dir1/file*" >> .hgignore
333 $ echo "dir1/file*" >> .hgignore
305 $ hg debugignore "dir1\file2"
334 $ hg debugignore "dir1\file2"
306 dir1\file2 is ignored
335 dir1\file2 is ignored
307 (ignore rule in $TESTTMP\ignorerepo\.hgignore, line 4: 'dir1/file*')
336 (ignore rule in $TESTTMP\ignorerepo\.hgignore, line 4: 'dir1/file*')
308 $ hg up -qC .
337 $ hg up -qC .
309
338
310 #endif
339 #endif
General Comments 0
You need to be logged in to leave comments. Login now