##// END OF EJS Templates
nodemap: add a function to read the data from disk...
marmoute -
r44790:6c07480d default
parent child Browse files
Show More
@@ -1,4323 +1,4336 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import platform
16 import platform
17 import random
17 import random
18 import re
18 import re
19 import socket
19 import socket
20 import ssl
20 import ssl
21 import stat
21 import stat
22 import string
22 import string
23 import subprocess
23 import subprocess
24 import sys
24 import sys
25 import time
25 import time
26
26
27 from .i18n import _
27 from .i18n import _
28 from .node import (
28 from .node import (
29 bin,
29 bin,
30 hex,
30 hex,
31 nullhex,
31 nullhex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 changegroup,
42 changegroup,
43 cmdutil,
43 cmdutil,
44 color,
44 color,
45 context,
45 context,
46 copies,
46 copies,
47 dagparser,
47 dagparser,
48 encoding,
48 encoding,
49 error,
49 error,
50 exchange,
50 exchange,
51 extensions,
51 extensions,
52 filemerge,
52 filemerge,
53 filesetlang,
53 filesetlang,
54 formatter,
54 formatter,
55 hg,
55 hg,
56 httppeer,
56 httppeer,
57 localrepo,
57 localrepo,
58 lock as lockmod,
58 lock as lockmod,
59 logcmdutil,
59 logcmdutil,
60 merge as mergemod,
60 merge as mergemod,
61 obsolete,
61 obsolete,
62 obsutil,
62 obsutil,
63 pathutil,
63 pathutil,
64 phases,
64 phases,
65 policy,
65 policy,
66 pvec,
66 pvec,
67 pycompat,
67 pycompat,
68 registrar,
68 registrar,
69 repair,
69 repair,
70 revlog,
70 revlog,
71 revset,
71 revset,
72 revsetlang,
72 revsetlang,
73 scmutil,
73 scmutil,
74 setdiscovery,
74 setdiscovery,
75 simplemerge,
75 simplemerge,
76 sshpeer,
76 sshpeer,
77 sslutil,
77 sslutil,
78 streamclone,
78 streamclone,
79 tags as tagsmod,
79 tags as tagsmod,
80 templater,
80 templater,
81 treediscovery,
81 treediscovery,
82 upgrade,
82 upgrade,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 vfs as vfsmod,
85 vfs as vfsmod,
86 wireprotoframing,
86 wireprotoframing,
87 wireprotoserver,
87 wireprotoserver,
88 wireprotov2peer,
88 wireprotov2peer,
89 )
89 )
90 from .utils import (
90 from .utils import (
91 cborutil,
91 cborutil,
92 compression,
92 compression,
93 dateutil,
93 dateutil,
94 procutil,
94 procutil,
95 stringutil,
95 stringutil,
96 )
96 )
97
97
98 from .revlogutils import (
98 from .revlogutils import (
99 deltas as deltautil,
99 deltas as deltautil,
100 nodemap,
100 nodemap,
101 )
101 )
102
102
103 release = lockmod.release
103 release = lockmod.release
104
104
105 command = registrar.command()
105 command = registrar.command()
106
106
107
107
108 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
108 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
109 def debugancestor(ui, repo, *args):
109 def debugancestor(ui, repo, *args):
110 """find the ancestor revision of two revisions in a given index"""
110 """find the ancestor revision of two revisions in a given index"""
111 if len(args) == 3:
111 if len(args) == 3:
112 index, rev1, rev2 = args
112 index, rev1, rev2 = args
113 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
113 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
114 lookup = r.lookup
114 lookup = r.lookup
115 elif len(args) == 2:
115 elif len(args) == 2:
116 if not repo:
116 if not repo:
117 raise error.Abort(
117 raise error.Abort(
118 _(b'there is no Mercurial repository here (.hg not found)')
118 _(b'there is no Mercurial repository here (.hg not found)')
119 )
119 )
120 rev1, rev2 = args
120 rev1, rev2 = args
121 r = repo.changelog
121 r = repo.changelog
122 lookup = repo.lookup
122 lookup = repo.lookup
123 else:
123 else:
124 raise error.Abort(_(b'either two or three arguments required'))
124 raise error.Abort(_(b'either two or three arguments required'))
125 a = r.ancestor(lookup(rev1), lookup(rev2))
125 a = r.ancestor(lookup(rev1), lookup(rev2))
126 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
126 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
127
127
128
128
129 @command(b'debugapplystreamclonebundle', [], b'FILE')
129 @command(b'debugapplystreamclonebundle', [], b'FILE')
130 def debugapplystreamclonebundle(ui, repo, fname):
130 def debugapplystreamclonebundle(ui, repo, fname):
131 """apply a stream clone bundle file"""
131 """apply a stream clone bundle file"""
132 f = hg.openpath(ui, fname)
132 f = hg.openpath(ui, fname)
133 gen = exchange.readbundle(ui, f, fname)
133 gen = exchange.readbundle(ui, f, fname)
134 gen.apply(repo)
134 gen.apply(repo)
135
135
136
136
137 @command(
137 @command(
138 b'debugbuilddag',
138 b'debugbuilddag',
139 [
139 [
140 (
140 (
141 b'm',
141 b'm',
142 b'mergeable-file',
142 b'mergeable-file',
143 None,
143 None,
144 _(b'add single file mergeable changes'),
144 _(b'add single file mergeable changes'),
145 ),
145 ),
146 (
146 (
147 b'o',
147 b'o',
148 b'overwritten-file',
148 b'overwritten-file',
149 None,
149 None,
150 _(b'add single file all revs overwrite'),
150 _(b'add single file all revs overwrite'),
151 ),
151 ),
152 (b'n', b'new-file', None, _(b'add new file at each rev')),
152 (b'n', b'new-file', None, _(b'add new file at each rev')),
153 ],
153 ],
154 _(b'[OPTION]... [TEXT]'),
154 _(b'[OPTION]... [TEXT]'),
155 )
155 )
156 def debugbuilddag(
156 def debugbuilddag(
157 ui,
157 ui,
158 repo,
158 repo,
159 text=None,
159 text=None,
160 mergeable_file=False,
160 mergeable_file=False,
161 overwritten_file=False,
161 overwritten_file=False,
162 new_file=False,
162 new_file=False,
163 ):
163 ):
164 """builds a repo with a given DAG from scratch in the current empty repo
164 """builds a repo with a given DAG from scratch in the current empty repo
165
165
166 The description of the DAG is read from stdin if not given on the
166 The description of the DAG is read from stdin if not given on the
167 command line.
167 command line.
168
168
169 Elements:
169 Elements:
170
170
171 - "+n" is a linear run of n nodes based on the current default parent
171 - "+n" is a linear run of n nodes based on the current default parent
172 - "." is a single node based on the current default parent
172 - "." is a single node based on the current default parent
173 - "$" resets the default parent to null (implied at the start);
173 - "$" resets the default parent to null (implied at the start);
174 otherwise the default parent is always the last node created
174 otherwise the default parent is always the last node created
175 - "<p" sets the default parent to the backref p
175 - "<p" sets the default parent to the backref p
176 - "*p" is a fork at parent p, which is a backref
176 - "*p" is a fork at parent p, which is a backref
177 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
177 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
178 - "/p2" is a merge of the preceding node and p2
178 - "/p2" is a merge of the preceding node and p2
179 - ":tag" defines a local tag for the preceding node
179 - ":tag" defines a local tag for the preceding node
180 - "@branch" sets the named branch for subsequent nodes
180 - "@branch" sets the named branch for subsequent nodes
181 - "#...\\n" is a comment up to the end of the line
181 - "#...\\n" is a comment up to the end of the line
182
182
183 Whitespace between the above elements is ignored.
183 Whitespace between the above elements is ignored.
184
184
185 A backref is either
185 A backref is either
186
186
187 - a number n, which references the node curr-n, where curr is the current
187 - a number n, which references the node curr-n, where curr is the current
188 node, or
188 node, or
189 - the name of a local tag you placed earlier using ":tag", or
189 - the name of a local tag you placed earlier using ":tag", or
190 - empty to denote the default parent.
190 - empty to denote the default parent.
191
191
192 All string valued-elements are either strictly alphanumeric, or must
192 All string valued-elements are either strictly alphanumeric, or must
193 be enclosed in double quotes ("..."), with "\\" as escape character.
193 be enclosed in double quotes ("..."), with "\\" as escape character.
194 """
194 """
195
195
196 if text is None:
196 if text is None:
197 ui.status(_(b"reading DAG from stdin\n"))
197 ui.status(_(b"reading DAG from stdin\n"))
198 text = ui.fin.read()
198 text = ui.fin.read()
199
199
200 cl = repo.changelog
200 cl = repo.changelog
201 if len(cl) > 0:
201 if len(cl) > 0:
202 raise error.Abort(_(b'repository is not empty'))
202 raise error.Abort(_(b'repository is not empty'))
203
203
204 # determine number of revs in DAG
204 # determine number of revs in DAG
205 total = 0
205 total = 0
206 for type, data in dagparser.parsedag(text):
206 for type, data in dagparser.parsedag(text):
207 if type == b'n':
207 if type == b'n':
208 total += 1
208 total += 1
209
209
210 if mergeable_file:
210 if mergeable_file:
211 linesperrev = 2
211 linesperrev = 2
212 # make a file with k lines per rev
212 # make a file with k lines per rev
213 initialmergedlines = [
213 initialmergedlines = [
214 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
214 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
215 ]
215 ]
216 initialmergedlines.append(b"")
216 initialmergedlines.append(b"")
217
217
218 tags = []
218 tags = []
219 progress = ui.makeprogress(
219 progress = ui.makeprogress(
220 _(b'building'), unit=_(b'revisions'), total=total
220 _(b'building'), unit=_(b'revisions'), total=total
221 )
221 )
222 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
222 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
223 at = -1
223 at = -1
224 atbranch = b'default'
224 atbranch = b'default'
225 nodeids = []
225 nodeids = []
226 id = 0
226 id = 0
227 progress.update(id)
227 progress.update(id)
228 for type, data in dagparser.parsedag(text):
228 for type, data in dagparser.parsedag(text):
229 if type == b'n':
229 if type == b'n':
230 ui.note((b'node %s\n' % pycompat.bytestr(data)))
230 ui.note((b'node %s\n' % pycompat.bytestr(data)))
231 id, ps = data
231 id, ps = data
232
232
233 files = []
233 files = []
234 filecontent = {}
234 filecontent = {}
235
235
236 p2 = None
236 p2 = None
237 if mergeable_file:
237 if mergeable_file:
238 fn = b"mf"
238 fn = b"mf"
239 p1 = repo[ps[0]]
239 p1 = repo[ps[0]]
240 if len(ps) > 1:
240 if len(ps) > 1:
241 p2 = repo[ps[1]]
241 p2 = repo[ps[1]]
242 pa = p1.ancestor(p2)
242 pa = p1.ancestor(p2)
243 base, local, other = [
243 base, local, other = [
244 x[fn].data() for x in (pa, p1, p2)
244 x[fn].data() for x in (pa, p1, p2)
245 ]
245 ]
246 m3 = simplemerge.Merge3Text(base, local, other)
246 m3 = simplemerge.Merge3Text(base, local, other)
247 ml = [l.strip() for l in m3.merge_lines()]
247 ml = [l.strip() for l in m3.merge_lines()]
248 ml.append(b"")
248 ml.append(b"")
249 elif at > 0:
249 elif at > 0:
250 ml = p1[fn].data().split(b"\n")
250 ml = p1[fn].data().split(b"\n")
251 else:
251 else:
252 ml = initialmergedlines
252 ml = initialmergedlines
253 ml[id * linesperrev] += b" r%i" % id
253 ml[id * linesperrev] += b" r%i" % id
254 mergedtext = b"\n".join(ml)
254 mergedtext = b"\n".join(ml)
255 files.append(fn)
255 files.append(fn)
256 filecontent[fn] = mergedtext
256 filecontent[fn] = mergedtext
257
257
258 if overwritten_file:
258 if overwritten_file:
259 fn = b"of"
259 fn = b"of"
260 files.append(fn)
260 files.append(fn)
261 filecontent[fn] = b"r%i\n" % id
261 filecontent[fn] = b"r%i\n" % id
262
262
263 if new_file:
263 if new_file:
264 fn = b"nf%i" % id
264 fn = b"nf%i" % id
265 files.append(fn)
265 files.append(fn)
266 filecontent[fn] = b"r%i\n" % id
266 filecontent[fn] = b"r%i\n" % id
267 if len(ps) > 1:
267 if len(ps) > 1:
268 if not p2:
268 if not p2:
269 p2 = repo[ps[1]]
269 p2 = repo[ps[1]]
270 for fn in p2:
270 for fn in p2:
271 if fn.startswith(b"nf"):
271 if fn.startswith(b"nf"):
272 files.append(fn)
272 files.append(fn)
273 filecontent[fn] = p2[fn].data()
273 filecontent[fn] = p2[fn].data()
274
274
275 def fctxfn(repo, cx, path):
275 def fctxfn(repo, cx, path):
276 if path in filecontent:
276 if path in filecontent:
277 return context.memfilectx(
277 return context.memfilectx(
278 repo, cx, path, filecontent[path]
278 repo, cx, path, filecontent[path]
279 )
279 )
280 return None
280 return None
281
281
282 if len(ps) == 0 or ps[0] < 0:
282 if len(ps) == 0 or ps[0] < 0:
283 pars = [None, None]
283 pars = [None, None]
284 elif len(ps) == 1:
284 elif len(ps) == 1:
285 pars = [nodeids[ps[0]], None]
285 pars = [nodeids[ps[0]], None]
286 else:
286 else:
287 pars = [nodeids[p] for p in ps]
287 pars = [nodeids[p] for p in ps]
288 cx = context.memctx(
288 cx = context.memctx(
289 repo,
289 repo,
290 pars,
290 pars,
291 b"r%i" % id,
291 b"r%i" % id,
292 files,
292 files,
293 fctxfn,
293 fctxfn,
294 date=(id, 0),
294 date=(id, 0),
295 user=b"debugbuilddag",
295 user=b"debugbuilddag",
296 extra={b'branch': atbranch},
296 extra={b'branch': atbranch},
297 )
297 )
298 nodeid = repo.commitctx(cx)
298 nodeid = repo.commitctx(cx)
299 nodeids.append(nodeid)
299 nodeids.append(nodeid)
300 at = id
300 at = id
301 elif type == b'l':
301 elif type == b'l':
302 id, name = data
302 id, name = data
303 ui.note((b'tag %s\n' % name))
303 ui.note((b'tag %s\n' % name))
304 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
304 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
305 elif type == b'a':
305 elif type == b'a':
306 ui.note((b'branch %s\n' % data))
306 ui.note((b'branch %s\n' % data))
307 atbranch = data
307 atbranch = data
308 progress.update(id)
308 progress.update(id)
309
309
310 if tags:
310 if tags:
311 repo.vfs.write(b"localtags", b"".join(tags))
311 repo.vfs.write(b"localtags", b"".join(tags))
312
312
313
313
314 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
314 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
315 indent_string = b' ' * indent
315 indent_string = b' ' * indent
316 if all:
316 if all:
317 ui.writenoi18n(
317 ui.writenoi18n(
318 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
318 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
319 % indent_string
319 % indent_string
320 )
320 )
321
321
322 def showchunks(named):
322 def showchunks(named):
323 ui.write(b"\n%s%s\n" % (indent_string, named))
323 ui.write(b"\n%s%s\n" % (indent_string, named))
324 for deltadata in gen.deltaiter():
324 for deltadata in gen.deltaiter():
325 node, p1, p2, cs, deltabase, delta, flags = deltadata
325 node, p1, p2, cs, deltabase, delta, flags = deltadata
326 ui.write(
326 ui.write(
327 b"%s%s %s %s %s %s %d\n"
327 b"%s%s %s %s %s %s %d\n"
328 % (
328 % (
329 indent_string,
329 indent_string,
330 hex(node),
330 hex(node),
331 hex(p1),
331 hex(p1),
332 hex(p2),
332 hex(p2),
333 hex(cs),
333 hex(cs),
334 hex(deltabase),
334 hex(deltabase),
335 len(delta),
335 len(delta),
336 )
336 )
337 )
337 )
338
338
339 gen.changelogheader()
339 gen.changelogheader()
340 showchunks(b"changelog")
340 showchunks(b"changelog")
341 gen.manifestheader()
341 gen.manifestheader()
342 showchunks(b"manifest")
342 showchunks(b"manifest")
343 for chunkdata in iter(gen.filelogheader, {}):
343 for chunkdata in iter(gen.filelogheader, {}):
344 fname = chunkdata[b'filename']
344 fname = chunkdata[b'filename']
345 showchunks(fname)
345 showchunks(fname)
346 else:
346 else:
347 if isinstance(gen, bundle2.unbundle20):
347 if isinstance(gen, bundle2.unbundle20):
348 raise error.Abort(_(b'use debugbundle2 for this file'))
348 raise error.Abort(_(b'use debugbundle2 for this file'))
349 gen.changelogheader()
349 gen.changelogheader()
350 for deltadata in gen.deltaiter():
350 for deltadata in gen.deltaiter():
351 node, p1, p2, cs, deltabase, delta, flags = deltadata
351 node, p1, p2, cs, deltabase, delta, flags = deltadata
352 ui.write(b"%s%s\n" % (indent_string, hex(node)))
352 ui.write(b"%s%s\n" % (indent_string, hex(node)))
353
353
354
354
355 def _debugobsmarkers(ui, part, indent=0, **opts):
355 def _debugobsmarkers(ui, part, indent=0, **opts):
356 """display version and markers contained in 'data'"""
356 """display version and markers contained in 'data'"""
357 opts = pycompat.byteskwargs(opts)
357 opts = pycompat.byteskwargs(opts)
358 data = part.read()
358 data = part.read()
359 indent_string = b' ' * indent
359 indent_string = b' ' * indent
360 try:
360 try:
361 version, markers = obsolete._readmarkers(data)
361 version, markers = obsolete._readmarkers(data)
362 except error.UnknownVersion as exc:
362 except error.UnknownVersion as exc:
363 msg = b"%sunsupported version: %s (%d bytes)\n"
363 msg = b"%sunsupported version: %s (%d bytes)\n"
364 msg %= indent_string, exc.version, len(data)
364 msg %= indent_string, exc.version, len(data)
365 ui.write(msg)
365 ui.write(msg)
366 else:
366 else:
367 msg = b"%sversion: %d (%d bytes)\n"
367 msg = b"%sversion: %d (%d bytes)\n"
368 msg %= indent_string, version, len(data)
368 msg %= indent_string, version, len(data)
369 ui.write(msg)
369 ui.write(msg)
370 fm = ui.formatter(b'debugobsolete', opts)
370 fm = ui.formatter(b'debugobsolete', opts)
371 for rawmarker in sorted(markers):
371 for rawmarker in sorted(markers):
372 m = obsutil.marker(None, rawmarker)
372 m = obsutil.marker(None, rawmarker)
373 fm.startitem()
373 fm.startitem()
374 fm.plain(indent_string)
374 fm.plain(indent_string)
375 cmdutil.showmarker(fm, m)
375 cmdutil.showmarker(fm, m)
376 fm.end()
376 fm.end()
377
377
378
378
379 def _debugphaseheads(ui, data, indent=0):
379 def _debugphaseheads(ui, data, indent=0):
380 """display version and markers contained in 'data'"""
380 """display version and markers contained in 'data'"""
381 indent_string = b' ' * indent
381 indent_string = b' ' * indent
382 headsbyphase = phases.binarydecode(data)
382 headsbyphase = phases.binarydecode(data)
383 for phase in phases.allphases:
383 for phase in phases.allphases:
384 for head in headsbyphase[phase]:
384 for head in headsbyphase[phase]:
385 ui.write(indent_string)
385 ui.write(indent_string)
386 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
386 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
387
387
388
388
389 def _quasirepr(thing):
389 def _quasirepr(thing):
390 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
390 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
391 return b'{%s}' % (
391 return b'{%s}' % (
392 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
392 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
393 )
393 )
394 return pycompat.bytestr(repr(thing))
394 return pycompat.bytestr(repr(thing))
395
395
396
396
397 def _debugbundle2(ui, gen, all=None, **opts):
397 def _debugbundle2(ui, gen, all=None, **opts):
398 """lists the contents of a bundle2"""
398 """lists the contents of a bundle2"""
399 if not isinstance(gen, bundle2.unbundle20):
399 if not isinstance(gen, bundle2.unbundle20):
400 raise error.Abort(_(b'not a bundle2 file'))
400 raise error.Abort(_(b'not a bundle2 file'))
401 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
401 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
402 parttypes = opts.get('part_type', [])
402 parttypes = opts.get('part_type', [])
403 for part in gen.iterparts():
403 for part in gen.iterparts():
404 if parttypes and part.type not in parttypes:
404 if parttypes and part.type not in parttypes:
405 continue
405 continue
406 msg = b'%s -- %s (mandatory: %r)\n'
406 msg = b'%s -- %s (mandatory: %r)\n'
407 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
407 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
408 if part.type == b'changegroup':
408 if part.type == b'changegroup':
409 version = part.params.get(b'version', b'01')
409 version = part.params.get(b'version', b'01')
410 cg = changegroup.getunbundler(version, part, b'UN')
410 cg = changegroup.getunbundler(version, part, b'UN')
411 if not ui.quiet:
411 if not ui.quiet:
412 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
412 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
413 if part.type == b'obsmarkers':
413 if part.type == b'obsmarkers':
414 if not ui.quiet:
414 if not ui.quiet:
415 _debugobsmarkers(ui, part, indent=4, **opts)
415 _debugobsmarkers(ui, part, indent=4, **opts)
416 if part.type == b'phase-heads':
416 if part.type == b'phase-heads':
417 if not ui.quiet:
417 if not ui.quiet:
418 _debugphaseheads(ui, part, indent=4)
418 _debugphaseheads(ui, part, indent=4)
419
419
420
420
421 @command(
421 @command(
422 b'debugbundle',
422 b'debugbundle',
423 [
423 [
424 (b'a', b'all', None, _(b'show all details')),
424 (b'a', b'all', None, _(b'show all details')),
425 (b'', b'part-type', [], _(b'show only the named part type')),
425 (b'', b'part-type', [], _(b'show only the named part type')),
426 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
426 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
427 ],
427 ],
428 _(b'FILE'),
428 _(b'FILE'),
429 norepo=True,
429 norepo=True,
430 )
430 )
431 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
431 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
432 """lists the contents of a bundle"""
432 """lists the contents of a bundle"""
433 with hg.openpath(ui, bundlepath) as f:
433 with hg.openpath(ui, bundlepath) as f:
434 if spec:
434 if spec:
435 spec = exchange.getbundlespec(ui, f)
435 spec = exchange.getbundlespec(ui, f)
436 ui.write(b'%s\n' % spec)
436 ui.write(b'%s\n' % spec)
437 return
437 return
438
438
439 gen = exchange.readbundle(ui, f, bundlepath)
439 gen = exchange.readbundle(ui, f, bundlepath)
440 if isinstance(gen, bundle2.unbundle20):
440 if isinstance(gen, bundle2.unbundle20):
441 return _debugbundle2(ui, gen, all=all, **opts)
441 return _debugbundle2(ui, gen, all=all, **opts)
442 _debugchangegroup(ui, gen, all=all, **opts)
442 _debugchangegroup(ui, gen, all=all, **opts)
443
443
444
444
445 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
445 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
446 def debugcapabilities(ui, path, **opts):
446 def debugcapabilities(ui, path, **opts):
447 """lists the capabilities of a remote peer"""
447 """lists the capabilities of a remote peer"""
448 opts = pycompat.byteskwargs(opts)
448 opts = pycompat.byteskwargs(opts)
449 peer = hg.peer(ui, opts, path)
449 peer = hg.peer(ui, opts, path)
450 caps = peer.capabilities()
450 caps = peer.capabilities()
451 ui.writenoi18n(b'Main capabilities:\n')
451 ui.writenoi18n(b'Main capabilities:\n')
452 for c in sorted(caps):
452 for c in sorted(caps):
453 ui.write(b' %s\n' % c)
453 ui.write(b' %s\n' % c)
454 b2caps = bundle2.bundle2caps(peer)
454 b2caps = bundle2.bundle2caps(peer)
455 if b2caps:
455 if b2caps:
456 ui.writenoi18n(b'Bundle2 capabilities:\n')
456 ui.writenoi18n(b'Bundle2 capabilities:\n')
457 for key, values in sorted(pycompat.iteritems(b2caps)):
457 for key, values in sorted(pycompat.iteritems(b2caps)):
458 ui.write(b' %s\n' % key)
458 ui.write(b' %s\n' % key)
459 for v in values:
459 for v in values:
460 ui.write(b' %s\n' % v)
460 ui.write(b' %s\n' % v)
461
461
462
462
463 @command(b'debugcheckstate', [], b'')
463 @command(b'debugcheckstate', [], b'')
464 def debugcheckstate(ui, repo):
464 def debugcheckstate(ui, repo):
465 """validate the correctness of the current dirstate"""
465 """validate the correctness of the current dirstate"""
466 parent1, parent2 = repo.dirstate.parents()
466 parent1, parent2 = repo.dirstate.parents()
467 m1 = repo[parent1].manifest()
467 m1 = repo[parent1].manifest()
468 m2 = repo[parent2].manifest()
468 m2 = repo[parent2].manifest()
469 errors = 0
469 errors = 0
470 for f in repo.dirstate:
470 for f in repo.dirstate:
471 state = repo.dirstate[f]
471 state = repo.dirstate[f]
472 if state in b"nr" and f not in m1:
472 if state in b"nr" and f not in m1:
473 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
473 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
474 errors += 1
474 errors += 1
475 if state in b"a" and f in m1:
475 if state in b"a" and f in m1:
476 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
476 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
477 errors += 1
477 errors += 1
478 if state in b"m" and f not in m1 and f not in m2:
478 if state in b"m" and f not in m1 and f not in m2:
479 ui.warn(
479 ui.warn(
480 _(b"%s in state %s, but not in either manifest\n") % (f, state)
480 _(b"%s in state %s, but not in either manifest\n") % (f, state)
481 )
481 )
482 errors += 1
482 errors += 1
483 for f in m1:
483 for f in m1:
484 state = repo.dirstate[f]
484 state = repo.dirstate[f]
485 if state not in b"nrm":
485 if state not in b"nrm":
486 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
486 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
487 errors += 1
487 errors += 1
488 if errors:
488 if errors:
489 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
489 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
490 raise error.Abort(errstr)
490 raise error.Abort(errstr)
491
491
492
492
493 @command(
493 @command(
494 b'debugcolor',
494 b'debugcolor',
495 [(b'', b'style', None, _(b'show all configured styles'))],
495 [(b'', b'style', None, _(b'show all configured styles'))],
496 b'hg debugcolor',
496 b'hg debugcolor',
497 )
497 )
498 def debugcolor(ui, repo, **opts):
498 def debugcolor(ui, repo, **opts):
499 """show available color, effects or style"""
499 """show available color, effects or style"""
500 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
500 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
501 if opts.get('style'):
501 if opts.get('style'):
502 return _debugdisplaystyle(ui)
502 return _debugdisplaystyle(ui)
503 else:
503 else:
504 return _debugdisplaycolor(ui)
504 return _debugdisplaycolor(ui)
505
505
506
506
507 def _debugdisplaycolor(ui):
507 def _debugdisplaycolor(ui):
508 ui = ui.copy()
508 ui = ui.copy()
509 ui._styles.clear()
509 ui._styles.clear()
510 for effect in color._activeeffects(ui).keys():
510 for effect in color._activeeffects(ui).keys():
511 ui._styles[effect] = effect
511 ui._styles[effect] = effect
512 if ui._terminfoparams:
512 if ui._terminfoparams:
513 for k, v in ui.configitems(b'color'):
513 for k, v in ui.configitems(b'color'):
514 if k.startswith(b'color.'):
514 if k.startswith(b'color.'):
515 ui._styles[k] = k[6:]
515 ui._styles[k] = k[6:]
516 elif k.startswith(b'terminfo.'):
516 elif k.startswith(b'terminfo.'):
517 ui._styles[k] = k[9:]
517 ui._styles[k] = k[9:]
518 ui.write(_(b'available colors:\n'))
518 ui.write(_(b'available colors:\n'))
519 # sort label with a '_' after the other to group '_background' entry.
519 # sort label with a '_' after the other to group '_background' entry.
520 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
520 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
521 for colorname, label in items:
521 for colorname, label in items:
522 ui.write(b'%s\n' % colorname, label=label)
522 ui.write(b'%s\n' % colorname, label=label)
523
523
524
524
525 def _debugdisplaystyle(ui):
525 def _debugdisplaystyle(ui):
526 ui.write(_(b'available style:\n'))
526 ui.write(_(b'available style:\n'))
527 if not ui._styles:
527 if not ui._styles:
528 return
528 return
529 width = max(len(s) for s in ui._styles)
529 width = max(len(s) for s in ui._styles)
530 for label, effects in sorted(ui._styles.items()):
530 for label, effects in sorted(ui._styles.items()):
531 ui.write(b'%s' % label, label=label)
531 ui.write(b'%s' % label, label=label)
532 if effects:
532 if effects:
533 # 50
533 # 50
534 ui.write(b': ')
534 ui.write(b': ')
535 ui.write(b' ' * (max(0, width - len(label))))
535 ui.write(b' ' * (max(0, width - len(label))))
536 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
536 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
537 ui.write(b'\n')
537 ui.write(b'\n')
538
538
539
539
540 @command(b'debugcreatestreamclonebundle', [], b'FILE')
540 @command(b'debugcreatestreamclonebundle', [], b'FILE')
541 def debugcreatestreamclonebundle(ui, repo, fname):
541 def debugcreatestreamclonebundle(ui, repo, fname):
542 """create a stream clone bundle file
542 """create a stream clone bundle file
543
543
544 Stream bundles are special bundles that are essentially archives of
544 Stream bundles are special bundles that are essentially archives of
545 revlog files. They are commonly used for cloning very quickly.
545 revlog files. They are commonly used for cloning very quickly.
546 """
546 """
547 # TODO we may want to turn this into an abort when this functionality
547 # TODO we may want to turn this into an abort when this functionality
548 # is moved into `hg bundle`.
548 # is moved into `hg bundle`.
549 if phases.hassecret(repo):
549 if phases.hassecret(repo):
550 ui.warn(
550 ui.warn(
551 _(
551 _(
552 b'(warning: stream clone bundle will contain secret '
552 b'(warning: stream clone bundle will contain secret '
553 b'revisions)\n'
553 b'revisions)\n'
554 )
554 )
555 )
555 )
556
556
557 requirements, gen = streamclone.generatebundlev1(repo)
557 requirements, gen = streamclone.generatebundlev1(repo)
558 changegroup.writechunks(ui, gen, fname)
558 changegroup.writechunks(ui, gen, fname)
559
559
560 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
560 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
561
561
562
562
563 @command(
563 @command(
564 b'debugdag',
564 b'debugdag',
565 [
565 [
566 (b't', b'tags', None, _(b'use tags as labels')),
566 (b't', b'tags', None, _(b'use tags as labels')),
567 (b'b', b'branches', None, _(b'annotate with branch names')),
567 (b'b', b'branches', None, _(b'annotate with branch names')),
568 (b'', b'dots', None, _(b'use dots for runs')),
568 (b'', b'dots', None, _(b'use dots for runs')),
569 (b's', b'spaces', None, _(b'separate elements by spaces')),
569 (b's', b'spaces', None, _(b'separate elements by spaces')),
570 ],
570 ],
571 _(b'[OPTION]... [FILE [REV]...]'),
571 _(b'[OPTION]... [FILE [REV]...]'),
572 optionalrepo=True,
572 optionalrepo=True,
573 )
573 )
574 def debugdag(ui, repo, file_=None, *revs, **opts):
574 def debugdag(ui, repo, file_=None, *revs, **opts):
575 """format the changelog or an index DAG as a concise textual description
575 """format the changelog or an index DAG as a concise textual description
576
576
577 If you pass a revlog index, the revlog's DAG is emitted. If you list
577 If you pass a revlog index, the revlog's DAG is emitted. If you list
578 revision numbers, they get labeled in the output as rN.
578 revision numbers, they get labeled in the output as rN.
579
579
580 Otherwise, the changelog DAG of the current repo is emitted.
580 Otherwise, the changelog DAG of the current repo is emitted.
581 """
581 """
582 spaces = opts.get('spaces')
582 spaces = opts.get('spaces')
583 dots = opts.get('dots')
583 dots = opts.get('dots')
584 if file_:
584 if file_:
585 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
585 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
586 revs = set((int(r) for r in revs))
586 revs = set((int(r) for r in revs))
587
587
588 def events():
588 def events():
589 for r in rlog:
589 for r in rlog:
590 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
590 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
591 if r in revs:
591 if r in revs:
592 yield b'l', (r, b"r%i" % r)
592 yield b'l', (r, b"r%i" % r)
593
593
594 elif repo:
594 elif repo:
595 cl = repo.changelog
595 cl = repo.changelog
596 tags = opts.get('tags')
596 tags = opts.get('tags')
597 branches = opts.get('branches')
597 branches = opts.get('branches')
598 if tags:
598 if tags:
599 labels = {}
599 labels = {}
600 for l, n in repo.tags().items():
600 for l, n in repo.tags().items():
601 labels.setdefault(cl.rev(n), []).append(l)
601 labels.setdefault(cl.rev(n), []).append(l)
602
602
603 def events():
603 def events():
604 b = b"default"
604 b = b"default"
605 for r in cl:
605 for r in cl:
606 if branches:
606 if branches:
607 newb = cl.read(cl.node(r))[5][b'branch']
607 newb = cl.read(cl.node(r))[5][b'branch']
608 if newb != b:
608 if newb != b:
609 yield b'a', newb
609 yield b'a', newb
610 b = newb
610 b = newb
611 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
611 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
612 if tags:
612 if tags:
613 ls = labels.get(r)
613 ls = labels.get(r)
614 if ls:
614 if ls:
615 for l in ls:
615 for l in ls:
616 yield b'l', (r, l)
616 yield b'l', (r, l)
617
617
618 else:
618 else:
619 raise error.Abort(_(b'need repo for changelog dag'))
619 raise error.Abort(_(b'need repo for changelog dag'))
620
620
621 for line in dagparser.dagtextlines(
621 for line in dagparser.dagtextlines(
622 events(),
622 events(),
623 addspaces=spaces,
623 addspaces=spaces,
624 wraplabels=True,
624 wraplabels=True,
625 wrapannotations=True,
625 wrapannotations=True,
626 wrapnonlinear=dots,
626 wrapnonlinear=dots,
627 usedots=dots,
627 usedots=dots,
628 maxlinewidth=70,
628 maxlinewidth=70,
629 ):
629 ):
630 ui.write(line)
630 ui.write(line)
631 ui.write(b"\n")
631 ui.write(b"\n")
632
632
633
633
634 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
634 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
635 def debugdata(ui, repo, file_, rev=None, **opts):
635 def debugdata(ui, repo, file_, rev=None, **opts):
636 """dump the contents of a data file revision"""
636 """dump the contents of a data file revision"""
637 opts = pycompat.byteskwargs(opts)
637 opts = pycompat.byteskwargs(opts)
638 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
638 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
639 if rev is not None:
639 if rev is not None:
640 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
640 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
641 file_, rev = None, file_
641 file_, rev = None, file_
642 elif rev is None:
642 elif rev is None:
643 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
643 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
644 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
644 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
645 try:
645 try:
646 ui.write(r.rawdata(r.lookup(rev)))
646 ui.write(r.rawdata(r.lookup(rev)))
647 except KeyError:
647 except KeyError:
648 raise error.Abort(_(b'invalid revision identifier %s') % rev)
648 raise error.Abort(_(b'invalid revision identifier %s') % rev)
649
649
650
650
651 @command(
651 @command(
652 b'debugdate',
652 b'debugdate',
653 [(b'e', b'extended', None, _(b'try extended date formats'))],
653 [(b'e', b'extended', None, _(b'try extended date formats'))],
654 _(b'[-e] DATE [RANGE]'),
654 _(b'[-e] DATE [RANGE]'),
655 norepo=True,
655 norepo=True,
656 optionalrepo=True,
656 optionalrepo=True,
657 )
657 )
658 def debugdate(ui, date, range=None, **opts):
658 def debugdate(ui, date, range=None, **opts):
659 """parse and display a date"""
659 """parse and display a date"""
660 if opts["extended"]:
660 if opts["extended"]:
661 d = dateutil.parsedate(date, dateutil.extendeddateformats)
661 d = dateutil.parsedate(date, dateutil.extendeddateformats)
662 else:
662 else:
663 d = dateutil.parsedate(date)
663 d = dateutil.parsedate(date)
664 ui.writenoi18n(b"internal: %d %d\n" % d)
664 ui.writenoi18n(b"internal: %d %d\n" % d)
665 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
665 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
666 if range:
666 if range:
667 m = dateutil.matchdate(range)
667 m = dateutil.matchdate(range)
668 ui.writenoi18n(b"match: %s\n" % m(d[0]))
668 ui.writenoi18n(b"match: %s\n" % m(d[0]))
669
669
670
670
671 @command(
671 @command(
672 b'debugdeltachain',
672 b'debugdeltachain',
673 cmdutil.debugrevlogopts + cmdutil.formatteropts,
673 cmdutil.debugrevlogopts + cmdutil.formatteropts,
674 _(b'-c|-m|FILE'),
674 _(b'-c|-m|FILE'),
675 optionalrepo=True,
675 optionalrepo=True,
676 )
676 )
677 def debugdeltachain(ui, repo, file_=None, **opts):
677 def debugdeltachain(ui, repo, file_=None, **opts):
678 """dump information about delta chains in a revlog
678 """dump information about delta chains in a revlog
679
679
680 Output can be templatized. Available template keywords are:
680 Output can be templatized. Available template keywords are:
681
681
682 :``rev``: revision number
682 :``rev``: revision number
683 :``chainid``: delta chain identifier (numbered by unique base)
683 :``chainid``: delta chain identifier (numbered by unique base)
684 :``chainlen``: delta chain length to this revision
684 :``chainlen``: delta chain length to this revision
685 :``prevrev``: previous revision in delta chain
685 :``prevrev``: previous revision in delta chain
686 :``deltatype``: role of delta / how it was computed
686 :``deltatype``: role of delta / how it was computed
687 :``compsize``: compressed size of revision
687 :``compsize``: compressed size of revision
688 :``uncompsize``: uncompressed size of revision
688 :``uncompsize``: uncompressed size of revision
689 :``chainsize``: total size of compressed revisions in chain
689 :``chainsize``: total size of compressed revisions in chain
690 :``chainratio``: total chain size divided by uncompressed revision size
690 :``chainratio``: total chain size divided by uncompressed revision size
691 (new delta chains typically start at ratio 2.00)
691 (new delta chains typically start at ratio 2.00)
692 :``lindist``: linear distance from base revision in delta chain to end
692 :``lindist``: linear distance from base revision in delta chain to end
693 of this revision
693 of this revision
694 :``extradist``: total size of revisions not part of this delta chain from
694 :``extradist``: total size of revisions not part of this delta chain from
695 base of delta chain to end of this revision; a measurement
695 base of delta chain to end of this revision; a measurement
696 of how much extra data we need to read/seek across to read
696 of how much extra data we need to read/seek across to read
697 the delta chain for this revision
697 the delta chain for this revision
698 :``extraratio``: extradist divided by chainsize; another representation of
698 :``extraratio``: extradist divided by chainsize; another representation of
699 how much unrelated data is needed to load this delta chain
699 how much unrelated data is needed to load this delta chain
700
700
701 If the repository is configured to use the sparse read, additional keywords
701 If the repository is configured to use the sparse read, additional keywords
702 are available:
702 are available:
703
703
704 :``readsize``: total size of data read from the disk for a revision
704 :``readsize``: total size of data read from the disk for a revision
705 (sum of the sizes of all the blocks)
705 (sum of the sizes of all the blocks)
706 :``largestblock``: size of the largest block of data read from the disk
706 :``largestblock``: size of the largest block of data read from the disk
707 :``readdensity``: density of useful bytes in the data read from the disk
707 :``readdensity``: density of useful bytes in the data read from the disk
708 :``srchunks``: in how many data hunks the whole revision would be read
708 :``srchunks``: in how many data hunks the whole revision would be read
709
709
710 The sparse read can be enabled with experimental.sparse-read = True
710 The sparse read can be enabled with experimental.sparse-read = True
711 """
711 """
712 opts = pycompat.byteskwargs(opts)
712 opts = pycompat.byteskwargs(opts)
713 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
713 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
714 index = r.index
714 index = r.index
715 start = r.start
715 start = r.start
716 length = r.length
716 length = r.length
717 generaldelta = r.version & revlog.FLAG_GENERALDELTA
717 generaldelta = r.version & revlog.FLAG_GENERALDELTA
718 withsparseread = getattr(r, '_withsparseread', False)
718 withsparseread = getattr(r, '_withsparseread', False)
719
719
720 def revinfo(rev):
720 def revinfo(rev):
721 e = index[rev]
721 e = index[rev]
722 compsize = e[1]
722 compsize = e[1]
723 uncompsize = e[2]
723 uncompsize = e[2]
724 chainsize = 0
724 chainsize = 0
725
725
726 if generaldelta:
726 if generaldelta:
727 if e[3] == e[5]:
727 if e[3] == e[5]:
728 deltatype = b'p1'
728 deltatype = b'p1'
729 elif e[3] == e[6]:
729 elif e[3] == e[6]:
730 deltatype = b'p2'
730 deltatype = b'p2'
731 elif e[3] == rev - 1:
731 elif e[3] == rev - 1:
732 deltatype = b'prev'
732 deltatype = b'prev'
733 elif e[3] == rev:
733 elif e[3] == rev:
734 deltatype = b'base'
734 deltatype = b'base'
735 else:
735 else:
736 deltatype = b'other'
736 deltatype = b'other'
737 else:
737 else:
738 if e[3] == rev:
738 if e[3] == rev:
739 deltatype = b'base'
739 deltatype = b'base'
740 else:
740 else:
741 deltatype = b'prev'
741 deltatype = b'prev'
742
742
743 chain = r._deltachain(rev)[0]
743 chain = r._deltachain(rev)[0]
744 for iterrev in chain:
744 for iterrev in chain:
745 e = index[iterrev]
745 e = index[iterrev]
746 chainsize += e[1]
746 chainsize += e[1]
747
747
748 return compsize, uncompsize, deltatype, chain, chainsize
748 return compsize, uncompsize, deltatype, chain, chainsize
749
749
750 fm = ui.formatter(b'debugdeltachain', opts)
750 fm = ui.formatter(b'debugdeltachain', opts)
751
751
752 fm.plain(
752 fm.plain(
753 b' rev chain# chainlen prev delta '
753 b' rev chain# chainlen prev delta '
754 b'size rawsize chainsize ratio lindist extradist '
754 b'size rawsize chainsize ratio lindist extradist '
755 b'extraratio'
755 b'extraratio'
756 )
756 )
757 if withsparseread:
757 if withsparseread:
758 fm.plain(b' readsize largestblk rddensity srchunks')
758 fm.plain(b' readsize largestblk rddensity srchunks')
759 fm.plain(b'\n')
759 fm.plain(b'\n')
760
760
761 chainbases = {}
761 chainbases = {}
762 for rev in r:
762 for rev in r:
763 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
763 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
764 chainbase = chain[0]
764 chainbase = chain[0]
765 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
765 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
766 basestart = start(chainbase)
766 basestart = start(chainbase)
767 revstart = start(rev)
767 revstart = start(rev)
768 lineardist = revstart + comp - basestart
768 lineardist = revstart + comp - basestart
769 extradist = lineardist - chainsize
769 extradist = lineardist - chainsize
770 try:
770 try:
771 prevrev = chain[-2]
771 prevrev = chain[-2]
772 except IndexError:
772 except IndexError:
773 prevrev = -1
773 prevrev = -1
774
774
775 if uncomp != 0:
775 if uncomp != 0:
776 chainratio = float(chainsize) / float(uncomp)
776 chainratio = float(chainsize) / float(uncomp)
777 else:
777 else:
778 chainratio = chainsize
778 chainratio = chainsize
779
779
780 if chainsize != 0:
780 if chainsize != 0:
781 extraratio = float(extradist) / float(chainsize)
781 extraratio = float(extradist) / float(chainsize)
782 else:
782 else:
783 extraratio = extradist
783 extraratio = extradist
784
784
785 fm.startitem()
785 fm.startitem()
786 fm.write(
786 fm.write(
787 b'rev chainid chainlen prevrev deltatype compsize '
787 b'rev chainid chainlen prevrev deltatype compsize '
788 b'uncompsize chainsize chainratio lindist extradist '
788 b'uncompsize chainsize chainratio lindist extradist '
789 b'extraratio',
789 b'extraratio',
790 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
790 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
791 rev,
791 rev,
792 chainid,
792 chainid,
793 len(chain),
793 len(chain),
794 prevrev,
794 prevrev,
795 deltatype,
795 deltatype,
796 comp,
796 comp,
797 uncomp,
797 uncomp,
798 chainsize,
798 chainsize,
799 chainratio,
799 chainratio,
800 lineardist,
800 lineardist,
801 extradist,
801 extradist,
802 extraratio,
802 extraratio,
803 rev=rev,
803 rev=rev,
804 chainid=chainid,
804 chainid=chainid,
805 chainlen=len(chain),
805 chainlen=len(chain),
806 prevrev=prevrev,
806 prevrev=prevrev,
807 deltatype=deltatype,
807 deltatype=deltatype,
808 compsize=comp,
808 compsize=comp,
809 uncompsize=uncomp,
809 uncompsize=uncomp,
810 chainsize=chainsize,
810 chainsize=chainsize,
811 chainratio=chainratio,
811 chainratio=chainratio,
812 lindist=lineardist,
812 lindist=lineardist,
813 extradist=extradist,
813 extradist=extradist,
814 extraratio=extraratio,
814 extraratio=extraratio,
815 )
815 )
816 if withsparseread:
816 if withsparseread:
817 readsize = 0
817 readsize = 0
818 largestblock = 0
818 largestblock = 0
819 srchunks = 0
819 srchunks = 0
820
820
821 for revschunk in deltautil.slicechunk(r, chain):
821 for revschunk in deltautil.slicechunk(r, chain):
822 srchunks += 1
822 srchunks += 1
823 blkend = start(revschunk[-1]) + length(revschunk[-1])
823 blkend = start(revschunk[-1]) + length(revschunk[-1])
824 blksize = blkend - start(revschunk[0])
824 blksize = blkend - start(revschunk[0])
825
825
826 readsize += blksize
826 readsize += blksize
827 if largestblock < blksize:
827 if largestblock < blksize:
828 largestblock = blksize
828 largestblock = blksize
829
829
830 if readsize:
830 if readsize:
831 readdensity = float(chainsize) / float(readsize)
831 readdensity = float(chainsize) / float(readsize)
832 else:
832 else:
833 readdensity = 1
833 readdensity = 1
834
834
835 fm.write(
835 fm.write(
836 b'readsize largestblock readdensity srchunks',
836 b'readsize largestblock readdensity srchunks',
837 b' %10d %10d %9.5f %8d',
837 b' %10d %10d %9.5f %8d',
838 readsize,
838 readsize,
839 largestblock,
839 largestblock,
840 readdensity,
840 readdensity,
841 srchunks,
841 srchunks,
842 readsize=readsize,
842 readsize=readsize,
843 largestblock=largestblock,
843 largestblock=largestblock,
844 readdensity=readdensity,
844 readdensity=readdensity,
845 srchunks=srchunks,
845 srchunks=srchunks,
846 )
846 )
847
847
848 fm.plain(b'\n')
848 fm.plain(b'\n')
849
849
850 fm.end()
850 fm.end()
851
851
852
852
853 @command(
853 @command(
854 b'debugdirstate|debugstate',
854 b'debugdirstate|debugstate',
855 [
855 [
856 (
856 (
857 b'',
857 b'',
858 b'nodates',
858 b'nodates',
859 None,
859 None,
860 _(b'do not display the saved mtime (DEPRECATED)'),
860 _(b'do not display the saved mtime (DEPRECATED)'),
861 ),
861 ),
862 (b'', b'dates', True, _(b'display the saved mtime')),
862 (b'', b'dates', True, _(b'display the saved mtime')),
863 (b'', b'datesort', None, _(b'sort by saved mtime')),
863 (b'', b'datesort', None, _(b'sort by saved mtime')),
864 ],
864 ],
865 _(b'[OPTION]...'),
865 _(b'[OPTION]...'),
866 )
866 )
867 def debugstate(ui, repo, **opts):
867 def debugstate(ui, repo, **opts):
868 """show the contents of the current dirstate"""
868 """show the contents of the current dirstate"""
869
869
870 nodates = not opts['dates']
870 nodates = not opts['dates']
871 if opts.get('nodates') is not None:
871 if opts.get('nodates') is not None:
872 nodates = True
872 nodates = True
873 datesort = opts.get('datesort')
873 datesort = opts.get('datesort')
874
874
875 if datesort:
875 if datesort:
876 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
876 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
877 else:
877 else:
878 keyfunc = None # sort by filename
878 keyfunc = None # sort by filename
879 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
879 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
880 if ent[3] == -1:
880 if ent[3] == -1:
881 timestr = b'unset '
881 timestr = b'unset '
882 elif nodates:
882 elif nodates:
883 timestr = b'set '
883 timestr = b'set '
884 else:
884 else:
885 timestr = time.strftime(
885 timestr = time.strftime(
886 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
886 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
887 )
887 )
888 timestr = encoding.strtolocal(timestr)
888 timestr = encoding.strtolocal(timestr)
889 if ent[1] & 0o20000:
889 if ent[1] & 0o20000:
890 mode = b'lnk'
890 mode = b'lnk'
891 else:
891 else:
892 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
892 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
893 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
893 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
894 for f in repo.dirstate.copies():
894 for f in repo.dirstate.copies():
895 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
895 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
896
896
897
897
898 @command(
898 @command(
899 b'debugdiscovery',
899 b'debugdiscovery',
900 [
900 [
901 (b'', b'old', None, _(b'use old-style discovery')),
901 (b'', b'old', None, _(b'use old-style discovery')),
902 (
902 (
903 b'',
903 b'',
904 b'nonheads',
904 b'nonheads',
905 None,
905 None,
906 _(b'use old-style discovery with non-heads included'),
906 _(b'use old-style discovery with non-heads included'),
907 ),
907 ),
908 (b'', b'rev', [], b'restrict discovery to this set of revs'),
908 (b'', b'rev', [], b'restrict discovery to this set of revs'),
909 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
909 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
910 ]
910 ]
911 + cmdutil.remoteopts,
911 + cmdutil.remoteopts,
912 _(b'[--rev REV] [OTHER]'),
912 _(b'[--rev REV] [OTHER]'),
913 )
913 )
914 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
914 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
915 """runs the changeset discovery protocol in isolation"""
915 """runs the changeset discovery protocol in isolation"""
916 opts = pycompat.byteskwargs(opts)
916 opts = pycompat.byteskwargs(opts)
917 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
917 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
918 remote = hg.peer(repo, opts, remoteurl)
918 remote = hg.peer(repo, opts, remoteurl)
919 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
919 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
920
920
921 # make sure tests are repeatable
921 # make sure tests are repeatable
922 random.seed(int(opts[b'seed']))
922 random.seed(int(opts[b'seed']))
923
923
924 if opts.get(b'old'):
924 if opts.get(b'old'):
925
925
926 def doit(pushedrevs, remoteheads, remote=remote):
926 def doit(pushedrevs, remoteheads, remote=remote):
927 if not util.safehasattr(remote, b'branches'):
927 if not util.safehasattr(remote, b'branches'):
928 # enable in-client legacy support
928 # enable in-client legacy support
929 remote = localrepo.locallegacypeer(remote.local())
929 remote = localrepo.locallegacypeer(remote.local())
930 common, _in, hds = treediscovery.findcommonincoming(
930 common, _in, hds = treediscovery.findcommonincoming(
931 repo, remote, force=True
931 repo, remote, force=True
932 )
932 )
933 common = set(common)
933 common = set(common)
934 if not opts.get(b'nonheads'):
934 if not opts.get(b'nonheads'):
935 ui.writenoi18n(
935 ui.writenoi18n(
936 b"unpruned common: %s\n"
936 b"unpruned common: %s\n"
937 % b" ".join(sorted(short(n) for n in common))
937 % b" ".join(sorted(short(n) for n in common))
938 )
938 )
939
939
940 clnode = repo.changelog.node
940 clnode = repo.changelog.node
941 common = repo.revs(b'heads(::%ln)', common)
941 common = repo.revs(b'heads(::%ln)', common)
942 common = {clnode(r) for r in common}
942 common = {clnode(r) for r in common}
943 return common, hds
943 return common, hds
944
944
945 else:
945 else:
946
946
947 def doit(pushedrevs, remoteheads, remote=remote):
947 def doit(pushedrevs, remoteheads, remote=remote):
948 nodes = None
948 nodes = None
949 if pushedrevs:
949 if pushedrevs:
950 revs = scmutil.revrange(repo, pushedrevs)
950 revs = scmutil.revrange(repo, pushedrevs)
951 nodes = [repo[r].node() for r in revs]
951 nodes = [repo[r].node() for r in revs]
952 common, any, hds = setdiscovery.findcommonheads(
952 common, any, hds = setdiscovery.findcommonheads(
953 ui, repo, remote, ancestorsof=nodes
953 ui, repo, remote, ancestorsof=nodes
954 )
954 )
955 return common, hds
955 return common, hds
956
956
957 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
957 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
958 localrevs = opts[b'rev']
958 localrevs = opts[b'rev']
959 with util.timedcm('debug-discovery') as t:
959 with util.timedcm('debug-discovery') as t:
960 common, hds = doit(localrevs, remoterevs)
960 common, hds = doit(localrevs, remoterevs)
961
961
962 # compute all statistics
962 # compute all statistics
963 common = set(common)
963 common = set(common)
964 rheads = set(hds)
964 rheads = set(hds)
965 lheads = set(repo.heads())
965 lheads = set(repo.heads())
966
966
967 data = {}
967 data = {}
968 data[b'elapsed'] = t.elapsed
968 data[b'elapsed'] = t.elapsed
969 data[b'nb-common'] = len(common)
969 data[b'nb-common'] = len(common)
970 data[b'nb-common-local'] = len(common & lheads)
970 data[b'nb-common-local'] = len(common & lheads)
971 data[b'nb-common-remote'] = len(common & rheads)
971 data[b'nb-common-remote'] = len(common & rheads)
972 data[b'nb-common-both'] = len(common & rheads & lheads)
972 data[b'nb-common-both'] = len(common & rheads & lheads)
973 data[b'nb-local'] = len(lheads)
973 data[b'nb-local'] = len(lheads)
974 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
974 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
975 data[b'nb-remote'] = len(rheads)
975 data[b'nb-remote'] = len(rheads)
976 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
976 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
977 data[b'nb-revs'] = len(repo.revs(b'all()'))
977 data[b'nb-revs'] = len(repo.revs(b'all()'))
978 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
978 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
979 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
979 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
980
980
981 # display discovery summary
981 # display discovery summary
982 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
982 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
983 ui.writenoi18n(b"heads summary:\n")
983 ui.writenoi18n(b"heads summary:\n")
984 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
984 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
985 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
985 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
986 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
986 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
987 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
987 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
988 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
988 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
989 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
989 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
990 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
990 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
991 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
991 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
992 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
992 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
993 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
993 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
994 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
994 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
995 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
995 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
996 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
996 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
997
997
998 if ui.verbose:
998 if ui.verbose:
999 ui.writenoi18n(
999 ui.writenoi18n(
1000 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1000 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1001 )
1001 )
1002
1002
1003
1003
1004 _chunksize = 4 << 10
1004 _chunksize = 4 << 10
1005
1005
1006
1006
1007 @command(
1007 @command(
1008 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1008 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1009 )
1009 )
1010 def debugdownload(ui, repo, url, output=None, **opts):
1010 def debugdownload(ui, repo, url, output=None, **opts):
1011 """download a resource using Mercurial logic and config
1011 """download a resource using Mercurial logic and config
1012 """
1012 """
1013 fh = urlmod.open(ui, url, output)
1013 fh = urlmod.open(ui, url, output)
1014
1014
1015 dest = ui
1015 dest = ui
1016 if output:
1016 if output:
1017 dest = open(output, b"wb", _chunksize)
1017 dest = open(output, b"wb", _chunksize)
1018 try:
1018 try:
1019 data = fh.read(_chunksize)
1019 data = fh.read(_chunksize)
1020 while data:
1020 while data:
1021 dest.write(data)
1021 dest.write(data)
1022 data = fh.read(_chunksize)
1022 data = fh.read(_chunksize)
1023 finally:
1023 finally:
1024 if output:
1024 if output:
1025 dest.close()
1025 dest.close()
1026
1026
1027
1027
1028 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1028 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1029 def debugextensions(ui, repo, **opts):
1029 def debugextensions(ui, repo, **opts):
1030 '''show information about active extensions'''
1030 '''show information about active extensions'''
1031 opts = pycompat.byteskwargs(opts)
1031 opts = pycompat.byteskwargs(opts)
1032 exts = extensions.extensions(ui)
1032 exts = extensions.extensions(ui)
1033 hgver = util.version()
1033 hgver = util.version()
1034 fm = ui.formatter(b'debugextensions', opts)
1034 fm = ui.formatter(b'debugextensions', opts)
1035 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1035 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1036 isinternal = extensions.ismoduleinternal(extmod)
1036 isinternal = extensions.ismoduleinternal(extmod)
1037 extsource = None
1037 extsource = None
1038
1038
1039 if util.safehasattr(extmod, '__file__'):
1039 if util.safehasattr(extmod, '__file__'):
1040 extsource = pycompat.fsencode(extmod.__file__)
1040 extsource = pycompat.fsencode(extmod.__file__)
1041 elif getattr(sys, 'oxidized', False):
1041 elif getattr(sys, 'oxidized', False):
1042 extsource = pycompat.sysexecutable
1042 extsource = pycompat.sysexecutable
1043 if isinternal:
1043 if isinternal:
1044 exttestedwith = [] # never expose magic string to users
1044 exttestedwith = [] # never expose magic string to users
1045 else:
1045 else:
1046 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1046 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1047 extbuglink = getattr(extmod, 'buglink', None)
1047 extbuglink = getattr(extmod, 'buglink', None)
1048
1048
1049 fm.startitem()
1049 fm.startitem()
1050
1050
1051 if ui.quiet or ui.verbose:
1051 if ui.quiet or ui.verbose:
1052 fm.write(b'name', b'%s\n', extname)
1052 fm.write(b'name', b'%s\n', extname)
1053 else:
1053 else:
1054 fm.write(b'name', b'%s', extname)
1054 fm.write(b'name', b'%s', extname)
1055 if isinternal or hgver in exttestedwith:
1055 if isinternal or hgver in exttestedwith:
1056 fm.plain(b'\n')
1056 fm.plain(b'\n')
1057 elif not exttestedwith:
1057 elif not exttestedwith:
1058 fm.plain(_(b' (untested!)\n'))
1058 fm.plain(_(b' (untested!)\n'))
1059 else:
1059 else:
1060 lasttestedversion = exttestedwith[-1]
1060 lasttestedversion = exttestedwith[-1]
1061 fm.plain(b' (%s!)\n' % lasttestedversion)
1061 fm.plain(b' (%s!)\n' % lasttestedversion)
1062
1062
1063 fm.condwrite(
1063 fm.condwrite(
1064 ui.verbose and extsource,
1064 ui.verbose and extsource,
1065 b'source',
1065 b'source',
1066 _(b' location: %s\n'),
1066 _(b' location: %s\n'),
1067 extsource or b"",
1067 extsource or b"",
1068 )
1068 )
1069
1069
1070 if ui.verbose:
1070 if ui.verbose:
1071 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1071 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1072 fm.data(bundled=isinternal)
1072 fm.data(bundled=isinternal)
1073
1073
1074 fm.condwrite(
1074 fm.condwrite(
1075 ui.verbose and exttestedwith,
1075 ui.verbose and exttestedwith,
1076 b'testedwith',
1076 b'testedwith',
1077 _(b' tested with: %s\n'),
1077 _(b' tested with: %s\n'),
1078 fm.formatlist(exttestedwith, name=b'ver'),
1078 fm.formatlist(exttestedwith, name=b'ver'),
1079 )
1079 )
1080
1080
1081 fm.condwrite(
1081 fm.condwrite(
1082 ui.verbose and extbuglink,
1082 ui.verbose and extbuglink,
1083 b'buglink',
1083 b'buglink',
1084 _(b' bug reporting: %s\n'),
1084 _(b' bug reporting: %s\n'),
1085 extbuglink or b"",
1085 extbuglink or b"",
1086 )
1086 )
1087
1087
1088 fm.end()
1088 fm.end()
1089
1089
1090
1090
1091 @command(
1091 @command(
1092 b'debugfileset',
1092 b'debugfileset',
1093 [
1093 [
1094 (
1094 (
1095 b'r',
1095 b'r',
1096 b'rev',
1096 b'rev',
1097 b'',
1097 b'',
1098 _(b'apply the filespec on this revision'),
1098 _(b'apply the filespec on this revision'),
1099 _(b'REV'),
1099 _(b'REV'),
1100 ),
1100 ),
1101 (
1101 (
1102 b'',
1102 b'',
1103 b'all-files',
1103 b'all-files',
1104 False,
1104 False,
1105 _(b'test files from all revisions and working directory'),
1105 _(b'test files from all revisions and working directory'),
1106 ),
1106 ),
1107 (
1107 (
1108 b's',
1108 b's',
1109 b'show-matcher',
1109 b'show-matcher',
1110 None,
1110 None,
1111 _(b'print internal representation of matcher'),
1111 _(b'print internal representation of matcher'),
1112 ),
1112 ),
1113 (
1113 (
1114 b'p',
1114 b'p',
1115 b'show-stage',
1115 b'show-stage',
1116 [],
1116 [],
1117 _(b'print parsed tree at the given stage'),
1117 _(b'print parsed tree at the given stage'),
1118 _(b'NAME'),
1118 _(b'NAME'),
1119 ),
1119 ),
1120 ],
1120 ],
1121 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1121 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1122 )
1122 )
1123 def debugfileset(ui, repo, expr, **opts):
1123 def debugfileset(ui, repo, expr, **opts):
1124 '''parse and apply a fileset specification'''
1124 '''parse and apply a fileset specification'''
1125 from . import fileset
1125 from . import fileset
1126
1126
1127 fileset.symbols # force import of fileset so we have predicates to optimize
1127 fileset.symbols # force import of fileset so we have predicates to optimize
1128 opts = pycompat.byteskwargs(opts)
1128 opts = pycompat.byteskwargs(opts)
1129 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1129 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1130
1130
1131 stages = [
1131 stages = [
1132 (b'parsed', pycompat.identity),
1132 (b'parsed', pycompat.identity),
1133 (b'analyzed', filesetlang.analyze),
1133 (b'analyzed', filesetlang.analyze),
1134 (b'optimized', filesetlang.optimize),
1134 (b'optimized', filesetlang.optimize),
1135 ]
1135 ]
1136 stagenames = set(n for n, f in stages)
1136 stagenames = set(n for n, f in stages)
1137
1137
1138 showalways = set()
1138 showalways = set()
1139 if ui.verbose and not opts[b'show_stage']:
1139 if ui.verbose and not opts[b'show_stage']:
1140 # show parsed tree by --verbose (deprecated)
1140 # show parsed tree by --verbose (deprecated)
1141 showalways.add(b'parsed')
1141 showalways.add(b'parsed')
1142 if opts[b'show_stage'] == [b'all']:
1142 if opts[b'show_stage'] == [b'all']:
1143 showalways.update(stagenames)
1143 showalways.update(stagenames)
1144 else:
1144 else:
1145 for n in opts[b'show_stage']:
1145 for n in opts[b'show_stage']:
1146 if n not in stagenames:
1146 if n not in stagenames:
1147 raise error.Abort(_(b'invalid stage name: %s') % n)
1147 raise error.Abort(_(b'invalid stage name: %s') % n)
1148 showalways.update(opts[b'show_stage'])
1148 showalways.update(opts[b'show_stage'])
1149
1149
1150 tree = filesetlang.parse(expr)
1150 tree = filesetlang.parse(expr)
1151 for n, f in stages:
1151 for n, f in stages:
1152 tree = f(tree)
1152 tree = f(tree)
1153 if n in showalways:
1153 if n in showalways:
1154 if opts[b'show_stage'] or n != b'parsed':
1154 if opts[b'show_stage'] or n != b'parsed':
1155 ui.write(b"* %s:\n" % n)
1155 ui.write(b"* %s:\n" % n)
1156 ui.write(filesetlang.prettyformat(tree), b"\n")
1156 ui.write(filesetlang.prettyformat(tree), b"\n")
1157
1157
1158 files = set()
1158 files = set()
1159 if opts[b'all_files']:
1159 if opts[b'all_files']:
1160 for r in repo:
1160 for r in repo:
1161 c = repo[r]
1161 c = repo[r]
1162 files.update(c.files())
1162 files.update(c.files())
1163 files.update(c.substate)
1163 files.update(c.substate)
1164 if opts[b'all_files'] or ctx.rev() is None:
1164 if opts[b'all_files'] or ctx.rev() is None:
1165 wctx = repo[None]
1165 wctx = repo[None]
1166 files.update(
1166 files.update(
1167 repo.dirstate.walk(
1167 repo.dirstate.walk(
1168 scmutil.matchall(repo),
1168 scmutil.matchall(repo),
1169 subrepos=list(wctx.substate),
1169 subrepos=list(wctx.substate),
1170 unknown=True,
1170 unknown=True,
1171 ignored=True,
1171 ignored=True,
1172 )
1172 )
1173 )
1173 )
1174 files.update(wctx.substate)
1174 files.update(wctx.substate)
1175 else:
1175 else:
1176 files.update(ctx.files())
1176 files.update(ctx.files())
1177 files.update(ctx.substate)
1177 files.update(ctx.substate)
1178
1178
1179 m = ctx.matchfileset(repo.getcwd(), expr)
1179 m = ctx.matchfileset(repo.getcwd(), expr)
1180 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1180 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1181 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1181 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1182 for f in sorted(files):
1182 for f in sorted(files):
1183 if not m(f):
1183 if not m(f):
1184 continue
1184 continue
1185 ui.write(b"%s\n" % f)
1185 ui.write(b"%s\n" % f)
1186
1186
1187
1187
1188 @command(b'debugformat', [] + cmdutil.formatteropts)
1188 @command(b'debugformat', [] + cmdutil.formatteropts)
1189 def debugformat(ui, repo, **opts):
1189 def debugformat(ui, repo, **opts):
1190 """display format information about the current repository
1190 """display format information about the current repository
1191
1191
1192 Use --verbose to get extra information about current config value and
1192 Use --verbose to get extra information about current config value and
1193 Mercurial default."""
1193 Mercurial default."""
1194 opts = pycompat.byteskwargs(opts)
1194 opts = pycompat.byteskwargs(opts)
1195 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1195 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1196 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1196 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1197
1197
1198 def makeformatname(name):
1198 def makeformatname(name):
1199 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1199 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1200
1200
1201 fm = ui.formatter(b'debugformat', opts)
1201 fm = ui.formatter(b'debugformat', opts)
1202 if fm.isplain():
1202 if fm.isplain():
1203
1203
1204 def formatvalue(value):
1204 def formatvalue(value):
1205 if util.safehasattr(value, b'startswith'):
1205 if util.safehasattr(value, b'startswith'):
1206 return value
1206 return value
1207 if value:
1207 if value:
1208 return b'yes'
1208 return b'yes'
1209 else:
1209 else:
1210 return b'no'
1210 return b'no'
1211
1211
1212 else:
1212 else:
1213 formatvalue = pycompat.identity
1213 formatvalue = pycompat.identity
1214
1214
1215 fm.plain(b'format-variant')
1215 fm.plain(b'format-variant')
1216 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1216 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1217 fm.plain(b' repo')
1217 fm.plain(b' repo')
1218 if ui.verbose:
1218 if ui.verbose:
1219 fm.plain(b' config default')
1219 fm.plain(b' config default')
1220 fm.plain(b'\n')
1220 fm.plain(b'\n')
1221 for fv in upgrade.allformatvariant:
1221 for fv in upgrade.allformatvariant:
1222 fm.startitem()
1222 fm.startitem()
1223 repovalue = fv.fromrepo(repo)
1223 repovalue = fv.fromrepo(repo)
1224 configvalue = fv.fromconfig(repo)
1224 configvalue = fv.fromconfig(repo)
1225
1225
1226 if repovalue != configvalue:
1226 if repovalue != configvalue:
1227 namelabel = b'formatvariant.name.mismatchconfig'
1227 namelabel = b'formatvariant.name.mismatchconfig'
1228 repolabel = b'formatvariant.repo.mismatchconfig'
1228 repolabel = b'formatvariant.repo.mismatchconfig'
1229 elif repovalue != fv.default:
1229 elif repovalue != fv.default:
1230 namelabel = b'formatvariant.name.mismatchdefault'
1230 namelabel = b'formatvariant.name.mismatchdefault'
1231 repolabel = b'formatvariant.repo.mismatchdefault'
1231 repolabel = b'formatvariant.repo.mismatchdefault'
1232 else:
1232 else:
1233 namelabel = b'formatvariant.name.uptodate'
1233 namelabel = b'formatvariant.name.uptodate'
1234 repolabel = b'formatvariant.repo.uptodate'
1234 repolabel = b'formatvariant.repo.uptodate'
1235
1235
1236 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1236 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1237 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1237 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1238 if fv.default != configvalue:
1238 if fv.default != configvalue:
1239 configlabel = b'formatvariant.config.special'
1239 configlabel = b'formatvariant.config.special'
1240 else:
1240 else:
1241 configlabel = b'formatvariant.config.default'
1241 configlabel = b'formatvariant.config.default'
1242 fm.condwrite(
1242 fm.condwrite(
1243 ui.verbose,
1243 ui.verbose,
1244 b'config',
1244 b'config',
1245 b' %6s',
1245 b' %6s',
1246 formatvalue(configvalue),
1246 formatvalue(configvalue),
1247 label=configlabel,
1247 label=configlabel,
1248 )
1248 )
1249 fm.condwrite(
1249 fm.condwrite(
1250 ui.verbose,
1250 ui.verbose,
1251 b'default',
1251 b'default',
1252 b' %7s',
1252 b' %7s',
1253 formatvalue(fv.default),
1253 formatvalue(fv.default),
1254 label=b'formatvariant.default',
1254 label=b'formatvariant.default',
1255 )
1255 )
1256 fm.plain(b'\n')
1256 fm.plain(b'\n')
1257 fm.end()
1257 fm.end()
1258
1258
1259
1259
1260 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1260 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1261 def debugfsinfo(ui, path=b"."):
1261 def debugfsinfo(ui, path=b"."):
1262 """show information detected about current filesystem"""
1262 """show information detected about current filesystem"""
1263 ui.writenoi18n(b'path: %s\n' % path)
1263 ui.writenoi18n(b'path: %s\n' % path)
1264 ui.writenoi18n(
1264 ui.writenoi18n(
1265 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1265 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1266 )
1266 )
1267 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1267 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1268 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1268 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1269 ui.writenoi18n(
1269 ui.writenoi18n(
1270 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1270 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1271 )
1271 )
1272 ui.writenoi18n(
1272 ui.writenoi18n(
1273 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1273 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1274 )
1274 )
1275 casesensitive = b'(unknown)'
1275 casesensitive = b'(unknown)'
1276 try:
1276 try:
1277 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1277 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1278 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1278 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1279 except OSError:
1279 except OSError:
1280 pass
1280 pass
1281 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1281 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1282
1282
1283
1283
1284 @command(
1284 @command(
1285 b'debuggetbundle',
1285 b'debuggetbundle',
1286 [
1286 [
1287 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1287 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1288 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1288 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1289 (
1289 (
1290 b't',
1290 b't',
1291 b'type',
1291 b'type',
1292 b'bzip2',
1292 b'bzip2',
1293 _(b'bundle compression type to use'),
1293 _(b'bundle compression type to use'),
1294 _(b'TYPE'),
1294 _(b'TYPE'),
1295 ),
1295 ),
1296 ],
1296 ],
1297 _(b'REPO FILE [-H|-C ID]...'),
1297 _(b'REPO FILE [-H|-C ID]...'),
1298 norepo=True,
1298 norepo=True,
1299 )
1299 )
1300 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1300 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1301 """retrieves a bundle from a repo
1301 """retrieves a bundle from a repo
1302
1302
1303 Every ID must be a full-length hex node id string. Saves the bundle to the
1303 Every ID must be a full-length hex node id string. Saves the bundle to the
1304 given file.
1304 given file.
1305 """
1305 """
1306 opts = pycompat.byteskwargs(opts)
1306 opts = pycompat.byteskwargs(opts)
1307 repo = hg.peer(ui, opts, repopath)
1307 repo = hg.peer(ui, opts, repopath)
1308 if not repo.capable(b'getbundle'):
1308 if not repo.capable(b'getbundle'):
1309 raise error.Abort(b"getbundle() not supported by target repository")
1309 raise error.Abort(b"getbundle() not supported by target repository")
1310 args = {}
1310 args = {}
1311 if common:
1311 if common:
1312 args['common'] = [bin(s) for s in common]
1312 args['common'] = [bin(s) for s in common]
1313 if head:
1313 if head:
1314 args['heads'] = [bin(s) for s in head]
1314 args['heads'] = [bin(s) for s in head]
1315 # TODO: get desired bundlecaps from command line.
1315 # TODO: get desired bundlecaps from command line.
1316 args['bundlecaps'] = None
1316 args['bundlecaps'] = None
1317 bundle = repo.getbundle(b'debug', **args)
1317 bundle = repo.getbundle(b'debug', **args)
1318
1318
1319 bundletype = opts.get(b'type', b'bzip2').lower()
1319 bundletype = opts.get(b'type', b'bzip2').lower()
1320 btypes = {
1320 btypes = {
1321 b'none': b'HG10UN',
1321 b'none': b'HG10UN',
1322 b'bzip2': b'HG10BZ',
1322 b'bzip2': b'HG10BZ',
1323 b'gzip': b'HG10GZ',
1323 b'gzip': b'HG10GZ',
1324 b'bundle2': b'HG20',
1324 b'bundle2': b'HG20',
1325 }
1325 }
1326 bundletype = btypes.get(bundletype)
1326 bundletype = btypes.get(bundletype)
1327 if bundletype not in bundle2.bundletypes:
1327 if bundletype not in bundle2.bundletypes:
1328 raise error.Abort(_(b'unknown bundle type specified with --type'))
1328 raise error.Abort(_(b'unknown bundle type specified with --type'))
1329 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1329 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1330
1330
1331
1331
1332 @command(b'debugignore', [], b'[FILE]')
1332 @command(b'debugignore', [], b'[FILE]')
1333 def debugignore(ui, repo, *files, **opts):
1333 def debugignore(ui, repo, *files, **opts):
1334 """display the combined ignore pattern and information about ignored files
1334 """display the combined ignore pattern and information about ignored files
1335
1335
1336 With no argument display the combined ignore pattern.
1336 With no argument display the combined ignore pattern.
1337
1337
1338 Given space separated file names, shows if the given file is ignored and
1338 Given space separated file names, shows if the given file is ignored and
1339 if so, show the ignore rule (file and line number) that matched it.
1339 if so, show the ignore rule (file and line number) that matched it.
1340 """
1340 """
1341 ignore = repo.dirstate._ignore
1341 ignore = repo.dirstate._ignore
1342 if not files:
1342 if not files:
1343 # Show all the patterns
1343 # Show all the patterns
1344 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1344 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1345 else:
1345 else:
1346 m = scmutil.match(repo[None], pats=files)
1346 m = scmutil.match(repo[None], pats=files)
1347 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1347 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1348 for f in m.files():
1348 for f in m.files():
1349 nf = util.normpath(f)
1349 nf = util.normpath(f)
1350 ignored = None
1350 ignored = None
1351 ignoredata = None
1351 ignoredata = None
1352 if nf != b'.':
1352 if nf != b'.':
1353 if ignore(nf):
1353 if ignore(nf):
1354 ignored = nf
1354 ignored = nf
1355 ignoredata = repo.dirstate._ignorefileandline(nf)
1355 ignoredata = repo.dirstate._ignorefileandline(nf)
1356 else:
1356 else:
1357 for p in pathutil.finddirs(nf):
1357 for p in pathutil.finddirs(nf):
1358 if ignore(p):
1358 if ignore(p):
1359 ignored = p
1359 ignored = p
1360 ignoredata = repo.dirstate._ignorefileandline(p)
1360 ignoredata = repo.dirstate._ignorefileandline(p)
1361 break
1361 break
1362 if ignored:
1362 if ignored:
1363 if ignored == nf:
1363 if ignored == nf:
1364 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1364 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1365 else:
1365 else:
1366 ui.write(
1366 ui.write(
1367 _(
1367 _(
1368 b"%s is ignored because of "
1368 b"%s is ignored because of "
1369 b"containing directory %s\n"
1369 b"containing directory %s\n"
1370 )
1370 )
1371 % (uipathfn(f), ignored)
1371 % (uipathfn(f), ignored)
1372 )
1372 )
1373 ignorefile, lineno, line = ignoredata
1373 ignorefile, lineno, line = ignoredata
1374 ui.write(
1374 ui.write(
1375 _(b"(ignore rule in %s, line %d: '%s')\n")
1375 _(b"(ignore rule in %s, line %d: '%s')\n")
1376 % (ignorefile, lineno, line)
1376 % (ignorefile, lineno, line)
1377 )
1377 )
1378 else:
1378 else:
1379 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1379 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1380
1380
1381
1381
1382 @command(
1382 @command(
1383 b'debugindex',
1383 b'debugindex',
1384 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1384 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1385 _(b'-c|-m|FILE'),
1385 _(b'-c|-m|FILE'),
1386 )
1386 )
1387 def debugindex(ui, repo, file_=None, **opts):
1387 def debugindex(ui, repo, file_=None, **opts):
1388 """dump index data for a storage primitive"""
1388 """dump index data for a storage primitive"""
1389 opts = pycompat.byteskwargs(opts)
1389 opts = pycompat.byteskwargs(opts)
1390 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1390 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1391
1391
1392 if ui.debugflag:
1392 if ui.debugflag:
1393 shortfn = hex
1393 shortfn = hex
1394 else:
1394 else:
1395 shortfn = short
1395 shortfn = short
1396
1396
1397 idlen = 12
1397 idlen = 12
1398 for i in store:
1398 for i in store:
1399 idlen = len(shortfn(store.node(i)))
1399 idlen = len(shortfn(store.node(i)))
1400 break
1400 break
1401
1401
1402 fm = ui.formatter(b'debugindex', opts)
1402 fm = ui.formatter(b'debugindex', opts)
1403 fm.plain(
1403 fm.plain(
1404 b' rev linkrev %s %s p2\n'
1404 b' rev linkrev %s %s p2\n'
1405 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1405 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1406 )
1406 )
1407
1407
1408 for rev in store:
1408 for rev in store:
1409 node = store.node(rev)
1409 node = store.node(rev)
1410 parents = store.parents(node)
1410 parents = store.parents(node)
1411
1411
1412 fm.startitem()
1412 fm.startitem()
1413 fm.write(b'rev', b'%6d ', rev)
1413 fm.write(b'rev', b'%6d ', rev)
1414 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1414 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1415 fm.write(b'node', b'%s ', shortfn(node))
1415 fm.write(b'node', b'%s ', shortfn(node))
1416 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1416 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1417 fm.write(b'p2', b'%s', shortfn(parents[1]))
1417 fm.write(b'p2', b'%s', shortfn(parents[1]))
1418 fm.plain(b'\n')
1418 fm.plain(b'\n')
1419
1419
1420 fm.end()
1420 fm.end()
1421
1421
1422
1422
1423 @command(
1423 @command(
1424 b'debugindexdot',
1424 b'debugindexdot',
1425 cmdutil.debugrevlogopts,
1425 cmdutil.debugrevlogopts,
1426 _(b'-c|-m|FILE'),
1426 _(b'-c|-m|FILE'),
1427 optionalrepo=True,
1427 optionalrepo=True,
1428 )
1428 )
1429 def debugindexdot(ui, repo, file_=None, **opts):
1429 def debugindexdot(ui, repo, file_=None, **opts):
1430 """dump an index DAG as a graphviz dot file"""
1430 """dump an index DAG as a graphviz dot file"""
1431 opts = pycompat.byteskwargs(opts)
1431 opts = pycompat.byteskwargs(opts)
1432 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1432 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1433 ui.writenoi18n(b"digraph G {\n")
1433 ui.writenoi18n(b"digraph G {\n")
1434 for i in r:
1434 for i in r:
1435 node = r.node(i)
1435 node = r.node(i)
1436 pp = r.parents(node)
1436 pp = r.parents(node)
1437 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1437 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1438 if pp[1] != nullid:
1438 if pp[1] != nullid:
1439 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1439 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1440 ui.write(b"}\n")
1440 ui.write(b"}\n")
1441
1441
1442
1442
1443 @command(b'debugindexstats', [])
1443 @command(b'debugindexstats', [])
1444 def debugindexstats(ui, repo):
1444 def debugindexstats(ui, repo):
1445 """show stats related to the changelog index"""
1445 """show stats related to the changelog index"""
1446 repo.changelog.shortest(nullid, 1)
1446 repo.changelog.shortest(nullid, 1)
1447 index = repo.changelog.index
1447 index = repo.changelog.index
1448 if not util.safehasattr(index, b'stats'):
1448 if not util.safehasattr(index, b'stats'):
1449 raise error.Abort(_(b'debugindexstats only works with native code'))
1449 raise error.Abort(_(b'debugindexstats only works with native code'))
1450 for k, v in sorted(index.stats().items()):
1450 for k, v in sorted(index.stats().items()):
1451 ui.write(b'%s: %d\n' % (k, v))
1451 ui.write(b'%s: %d\n' % (k, v))
1452
1452
1453
1453
1454 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1454 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1455 def debuginstall(ui, **opts):
1455 def debuginstall(ui, **opts):
1456 '''test Mercurial installation
1456 '''test Mercurial installation
1457
1457
1458 Returns 0 on success.
1458 Returns 0 on success.
1459 '''
1459 '''
1460 opts = pycompat.byteskwargs(opts)
1460 opts = pycompat.byteskwargs(opts)
1461
1461
1462 problems = 0
1462 problems = 0
1463
1463
1464 fm = ui.formatter(b'debuginstall', opts)
1464 fm = ui.formatter(b'debuginstall', opts)
1465 fm.startitem()
1465 fm.startitem()
1466
1466
1467 # encoding
1467 # encoding
1468 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1468 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1469 err = None
1469 err = None
1470 try:
1470 try:
1471 codecs.lookup(pycompat.sysstr(encoding.encoding))
1471 codecs.lookup(pycompat.sysstr(encoding.encoding))
1472 except LookupError as inst:
1472 except LookupError as inst:
1473 err = stringutil.forcebytestr(inst)
1473 err = stringutil.forcebytestr(inst)
1474 problems += 1
1474 problems += 1
1475 fm.condwrite(
1475 fm.condwrite(
1476 err,
1476 err,
1477 b'encodingerror',
1477 b'encodingerror',
1478 _(b" %s\n (check that your locale is properly set)\n"),
1478 _(b" %s\n (check that your locale is properly set)\n"),
1479 err,
1479 err,
1480 )
1480 )
1481
1481
1482 # Python
1482 # Python
1483 pythonlib = None
1483 pythonlib = None
1484 if util.safehasattr(os, '__file__'):
1484 if util.safehasattr(os, '__file__'):
1485 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1485 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1486 elif getattr(sys, 'oxidized', False):
1486 elif getattr(sys, 'oxidized', False):
1487 pythonlib = pycompat.sysexecutable
1487 pythonlib = pycompat.sysexecutable
1488
1488
1489 fm.write(
1489 fm.write(
1490 b'pythonexe',
1490 b'pythonexe',
1491 _(b"checking Python executable (%s)\n"),
1491 _(b"checking Python executable (%s)\n"),
1492 pycompat.sysexecutable or _(b"unknown"),
1492 pycompat.sysexecutable or _(b"unknown"),
1493 )
1493 )
1494 fm.write(
1494 fm.write(
1495 b'pythonimplementation',
1495 b'pythonimplementation',
1496 _(b"checking Python implementation (%s)\n"),
1496 _(b"checking Python implementation (%s)\n"),
1497 pycompat.sysbytes(platform.python_implementation()),
1497 pycompat.sysbytes(platform.python_implementation()),
1498 )
1498 )
1499 fm.write(
1499 fm.write(
1500 b'pythonver',
1500 b'pythonver',
1501 _(b"checking Python version (%s)\n"),
1501 _(b"checking Python version (%s)\n"),
1502 (b"%d.%d.%d" % sys.version_info[:3]),
1502 (b"%d.%d.%d" % sys.version_info[:3]),
1503 )
1503 )
1504 fm.write(
1504 fm.write(
1505 b'pythonlib',
1505 b'pythonlib',
1506 _(b"checking Python lib (%s)...\n"),
1506 _(b"checking Python lib (%s)...\n"),
1507 pythonlib or _(b"unknown"),
1507 pythonlib or _(b"unknown"),
1508 )
1508 )
1509
1509
1510 security = set(sslutil.supportedprotocols)
1510 security = set(sslutil.supportedprotocols)
1511 if sslutil.hassni:
1511 if sslutil.hassni:
1512 security.add(b'sni')
1512 security.add(b'sni')
1513
1513
1514 fm.write(
1514 fm.write(
1515 b'pythonsecurity',
1515 b'pythonsecurity',
1516 _(b"checking Python security support (%s)\n"),
1516 _(b"checking Python security support (%s)\n"),
1517 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1517 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1518 )
1518 )
1519
1519
1520 # These are warnings, not errors. So don't increment problem count. This
1520 # These are warnings, not errors. So don't increment problem count. This
1521 # may change in the future.
1521 # may change in the future.
1522 if b'tls1.2' not in security:
1522 if b'tls1.2' not in security:
1523 fm.plain(
1523 fm.plain(
1524 _(
1524 _(
1525 b' TLS 1.2 not supported by Python install; '
1525 b' TLS 1.2 not supported by Python install; '
1526 b'network connections lack modern security\n'
1526 b'network connections lack modern security\n'
1527 )
1527 )
1528 )
1528 )
1529 if b'sni' not in security:
1529 if b'sni' not in security:
1530 fm.plain(
1530 fm.plain(
1531 _(
1531 _(
1532 b' SNI not supported by Python install; may have '
1532 b' SNI not supported by Python install; may have '
1533 b'connectivity issues with some servers\n'
1533 b'connectivity issues with some servers\n'
1534 )
1534 )
1535 )
1535 )
1536
1536
1537 # TODO print CA cert info
1537 # TODO print CA cert info
1538
1538
1539 # hg version
1539 # hg version
1540 hgver = util.version()
1540 hgver = util.version()
1541 fm.write(
1541 fm.write(
1542 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1542 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1543 )
1543 )
1544 fm.write(
1544 fm.write(
1545 b'hgverextra',
1545 b'hgverextra',
1546 _(b"checking Mercurial custom build (%s)\n"),
1546 _(b"checking Mercurial custom build (%s)\n"),
1547 b'+'.join(hgver.split(b'+')[1:]),
1547 b'+'.join(hgver.split(b'+')[1:]),
1548 )
1548 )
1549
1549
1550 # compiled modules
1550 # compiled modules
1551 hgmodules = None
1551 hgmodules = None
1552 if util.safehasattr(sys.modules[__name__], '__file__'):
1552 if util.safehasattr(sys.modules[__name__], '__file__'):
1553 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1553 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1554 elif getattr(sys, 'oxidized', False):
1554 elif getattr(sys, 'oxidized', False):
1555 hgmodules = pycompat.sysexecutable
1555 hgmodules = pycompat.sysexecutable
1556
1556
1557 fm.write(
1557 fm.write(
1558 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1558 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1559 )
1559 )
1560 fm.write(
1560 fm.write(
1561 b'hgmodules',
1561 b'hgmodules',
1562 _(b"checking installed modules (%s)...\n"),
1562 _(b"checking installed modules (%s)...\n"),
1563 hgmodules or _(b"unknown"),
1563 hgmodules or _(b"unknown"),
1564 )
1564 )
1565
1565
1566 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1566 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1567 rustext = rustandc # for now, that's the only case
1567 rustext = rustandc # for now, that's the only case
1568 cext = policy.policy in (b'c', b'allow') or rustandc
1568 cext = policy.policy in (b'c', b'allow') or rustandc
1569 nopure = cext or rustext
1569 nopure = cext or rustext
1570 if nopure:
1570 if nopure:
1571 err = None
1571 err = None
1572 try:
1572 try:
1573 if cext:
1573 if cext:
1574 from .cext import ( # pytype: disable=import-error
1574 from .cext import ( # pytype: disable=import-error
1575 base85,
1575 base85,
1576 bdiff,
1576 bdiff,
1577 mpatch,
1577 mpatch,
1578 osutil,
1578 osutil,
1579 )
1579 )
1580
1580
1581 # quiet pyflakes
1581 # quiet pyflakes
1582 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1582 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1583 if rustext:
1583 if rustext:
1584 from .rustext import ( # pytype: disable=import-error
1584 from .rustext import ( # pytype: disable=import-error
1585 ancestor,
1585 ancestor,
1586 dirstate,
1586 dirstate,
1587 )
1587 )
1588
1588
1589 dir(ancestor), dir(dirstate) # quiet pyflakes
1589 dir(ancestor), dir(dirstate) # quiet pyflakes
1590 except Exception as inst:
1590 except Exception as inst:
1591 err = stringutil.forcebytestr(inst)
1591 err = stringutil.forcebytestr(inst)
1592 problems += 1
1592 problems += 1
1593 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1593 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1594
1594
1595 compengines = util.compengines._engines.values()
1595 compengines = util.compengines._engines.values()
1596 fm.write(
1596 fm.write(
1597 b'compengines',
1597 b'compengines',
1598 _(b'checking registered compression engines (%s)\n'),
1598 _(b'checking registered compression engines (%s)\n'),
1599 fm.formatlist(
1599 fm.formatlist(
1600 sorted(e.name() for e in compengines),
1600 sorted(e.name() for e in compengines),
1601 name=b'compengine',
1601 name=b'compengine',
1602 fmt=b'%s',
1602 fmt=b'%s',
1603 sep=b', ',
1603 sep=b', ',
1604 ),
1604 ),
1605 )
1605 )
1606 fm.write(
1606 fm.write(
1607 b'compenginesavail',
1607 b'compenginesavail',
1608 _(b'checking available compression engines (%s)\n'),
1608 _(b'checking available compression engines (%s)\n'),
1609 fm.formatlist(
1609 fm.formatlist(
1610 sorted(e.name() for e in compengines if e.available()),
1610 sorted(e.name() for e in compengines if e.available()),
1611 name=b'compengine',
1611 name=b'compengine',
1612 fmt=b'%s',
1612 fmt=b'%s',
1613 sep=b', ',
1613 sep=b', ',
1614 ),
1614 ),
1615 )
1615 )
1616 wirecompengines = compression.compengines.supportedwireengines(
1616 wirecompengines = compression.compengines.supportedwireengines(
1617 compression.SERVERROLE
1617 compression.SERVERROLE
1618 )
1618 )
1619 fm.write(
1619 fm.write(
1620 b'compenginesserver',
1620 b'compenginesserver',
1621 _(
1621 _(
1622 b'checking available compression engines '
1622 b'checking available compression engines '
1623 b'for wire protocol (%s)\n'
1623 b'for wire protocol (%s)\n'
1624 ),
1624 ),
1625 fm.formatlist(
1625 fm.formatlist(
1626 [e.name() for e in wirecompengines if e.wireprotosupport()],
1626 [e.name() for e in wirecompengines if e.wireprotosupport()],
1627 name=b'compengine',
1627 name=b'compengine',
1628 fmt=b'%s',
1628 fmt=b'%s',
1629 sep=b', ',
1629 sep=b', ',
1630 ),
1630 ),
1631 )
1631 )
1632 re2 = b'missing'
1632 re2 = b'missing'
1633 if util._re2:
1633 if util._re2:
1634 re2 = b'available'
1634 re2 = b'available'
1635 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1635 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1636 fm.data(re2=bool(util._re2))
1636 fm.data(re2=bool(util._re2))
1637
1637
1638 # templates
1638 # templates
1639 p = templater.templatepaths()
1639 p = templater.templatepaths()
1640 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1640 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1641 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1641 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1642 if p:
1642 if p:
1643 m = templater.templatepath(b"map-cmdline.default")
1643 m = templater.templatepath(b"map-cmdline.default")
1644 if m:
1644 if m:
1645 # template found, check if it is working
1645 # template found, check if it is working
1646 err = None
1646 err = None
1647 try:
1647 try:
1648 templater.templater.frommapfile(m)
1648 templater.templater.frommapfile(m)
1649 except Exception as inst:
1649 except Exception as inst:
1650 err = stringutil.forcebytestr(inst)
1650 err = stringutil.forcebytestr(inst)
1651 p = None
1651 p = None
1652 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1652 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1653 else:
1653 else:
1654 p = None
1654 p = None
1655 fm.condwrite(
1655 fm.condwrite(
1656 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1656 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1657 )
1657 )
1658 fm.condwrite(
1658 fm.condwrite(
1659 not m,
1659 not m,
1660 b'defaulttemplatenotfound',
1660 b'defaulttemplatenotfound',
1661 _(b" template '%s' not found\n"),
1661 _(b" template '%s' not found\n"),
1662 b"default",
1662 b"default",
1663 )
1663 )
1664 if not p:
1664 if not p:
1665 problems += 1
1665 problems += 1
1666 fm.condwrite(
1666 fm.condwrite(
1667 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1667 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1668 )
1668 )
1669
1669
1670 # editor
1670 # editor
1671 editor = ui.geteditor()
1671 editor = ui.geteditor()
1672 editor = util.expandpath(editor)
1672 editor = util.expandpath(editor)
1673 editorbin = procutil.shellsplit(editor)[0]
1673 editorbin = procutil.shellsplit(editor)[0]
1674 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1674 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1675 cmdpath = procutil.findexe(editorbin)
1675 cmdpath = procutil.findexe(editorbin)
1676 fm.condwrite(
1676 fm.condwrite(
1677 not cmdpath and editor == b'vi',
1677 not cmdpath and editor == b'vi',
1678 b'vinotfound',
1678 b'vinotfound',
1679 _(
1679 _(
1680 b" No commit editor set and can't find %s in PATH\n"
1680 b" No commit editor set and can't find %s in PATH\n"
1681 b" (specify a commit editor in your configuration"
1681 b" (specify a commit editor in your configuration"
1682 b" file)\n"
1682 b" file)\n"
1683 ),
1683 ),
1684 not cmdpath and editor == b'vi' and editorbin,
1684 not cmdpath and editor == b'vi' and editorbin,
1685 )
1685 )
1686 fm.condwrite(
1686 fm.condwrite(
1687 not cmdpath and editor != b'vi',
1687 not cmdpath and editor != b'vi',
1688 b'editornotfound',
1688 b'editornotfound',
1689 _(
1689 _(
1690 b" Can't find editor '%s' in PATH\n"
1690 b" Can't find editor '%s' in PATH\n"
1691 b" (specify a commit editor in your configuration"
1691 b" (specify a commit editor in your configuration"
1692 b" file)\n"
1692 b" file)\n"
1693 ),
1693 ),
1694 not cmdpath and editorbin,
1694 not cmdpath and editorbin,
1695 )
1695 )
1696 if not cmdpath and editor != b'vi':
1696 if not cmdpath and editor != b'vi':
1697 problems += 1
1697 problems += 1
1698
1698
1699 # check username
1699 # check username
1700 username = None
1700 username = None
1701 err = None
1701 err = None
1702 try:
1702 try:
1703 username = ui.username()
1703 username = ui.username()
1704 except error.Abort as e:
1704 except error.Abort as e:
1705 err = stringutil.forcebytestr(e)
1705 err = stringutil.forcebytestr(e)
1706 problems += 1
1706 problems += 1
1707
1707
1708 fm.condwrite(
1708 fm.condwrite(
1709 username, b'username', _(b"checking username (%s)\n"), username
1709 username, b'username', _(b"checking username (%s)\n"), username
1710 )
1710 )
1711 fm.condwrite(
1711 fm.condwrite(
1712 err,
1712 err,
1713 b'usernameerror',
1713 b'usernameerror',
1714 _(
1714 _(
1715 b"checking username...\n %s\n"
1715 b"checking username...\n %s\n"
1716 b" (specify a username in your configuration file)\n"
1716 b" (specify a username in your configuration file)\n"
1717 ),
1717 ),
1718 err,
1718 err,
1719 )
1719 )
1720
1720
1721 for name, mod in extensions.extensions():
1721 for name, mod in extensions.extensions():
1722 handler = getattr(mod, 'debuginstall', None)
1722 handler = getattr(mod, 'debuginstall', None)
1723 if handler is not None:
1723 if handler is not None:
1724 problems += handler(ui, fm)
1724 problems += handler(ui, fm)
1725
1725
1726 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1726 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1727 if not problems:
1727 if not problems:
1728 fm.data(problems=problems)
1728 fm.data(problems=problems)
1729 fm.condwrite(
1729 fm.condwrite(
1730 problems,
1730 problems,
1731 b'problems',
1731 b'problems',
1732 _(b"%d problems detected, please check your install!\n"),
1732 _(b"%d problems detected, please check your install!\n"),
1733 problems,
1733 problems,
1734 )
1734 )
1735 fm.end()
1735 fm.end()
1736
1736
1737 return problems
1737 return problems
1738
1738
1739
1739
1740 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1740 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1741 def debugknown(ui, repopath, *ids, **opts):
1741 def debugknown(ui, repopath, *ids, **opts):
1742 """test whether node ids are known to a repo
1742 """test whether node ids are known to a repo
1743
1743
1744 Every ID must be a full-length hex node id string. Returns a list of 0s
1744 Every ID must be a full-length hex node id string. Returns a list of 0s
1745 and 1s indicating unknown/known.
1745 and 1s indicating unknown/known.
1746 """
1746 """
1747 opts = pycompat.byteskwargs(opts)
1747 opts = pycompat.byteskwargs(opts)
1748 repo = hg.peer(ui, opts, repopath)
1748 repo = hg.peer(ui, opts, repopath)
1749 if not repo.capable(b'known'):
1749 if not repo.capable(b'known'):
1750 raise error.Abort(b"known() not supported by target repository")
1750 raise error.Abort(b"known() not supported by target repository")
1751 flags = repo.known([bin(s) for s in ids])
1751 flags = repo.known([bin(s) for s in ids])
1752 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1752 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1753
1753
1754
1754
1755 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1755 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1756 def debuglabelcomplete(ui, repo, *args):
1756 def debuglabelcomplete(ui, repo, *args):
1757 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1757 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1758 debugnamecomplete(ui, repo, *args)
1758 debugnamecomplete(ui, repo, *args)
1759
1759
1760
1760
1761 @command(
1761 @command(
1762 b'debuglocks',
1762 b'debuglocks',
1763 [
1763 [
1764 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1764 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1765 (
1765 (
1766 b'W',
1766 b'W',
1767 b'force-wlock',
1767 b'force-wlock',
1768 None,
1768 None,
1769 _(b'free the working state lock (DANGEROUS)'),
1769 _(b'free the working state lock (DANGEROUS)'),
1770 ),
1770 ),
1771 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1771 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1772 (
1772 (
1773 b'S',
1773 b'S',
1774 b'set-wlock',
1774 b'set-wlock',
1775 None,
1775 None,
1776 _(b'set the working state lock until stopped'),
1776 _(b'set the working state lock until stopped'),
1777 ),
1777 ),
1778 ],
1778 ],
1779 _(b'[OPTION]...'),
1779 _(b'[OPTION]...'),
1780 )
1780 )
1781 def debuglocks(ui, repo, **opts):
1781 def debuglocks(ui, repo, **opts):
1782 """show or modify state of locks
1782 """show or modify state of locks
1783
1783
1784 By default, this command will show which locks are held. This
1784 By default, this command will show which locks are held. This
1785 includes the user and process holding the lock, the amount of time
1785 includes the user and process holding the lock, the amount of time
1786 the lock has been held, and the machine name where the process is
1786 the lock has been held, and the machine name where the process is
1787 running if it's not local.
1787 running if it's not local.
1788
1788
1789 Locks protect the integrity of Mercurial's data, so should be
1789 Locks protect the integrity of Mercurial's data, so should be
1790 treated with care. System crashes or other interruptions may cause
1790 treated with care. System crashes or other interruptions may cause
1791 locks to not be properly released, though Mercurial will usually
1791 locks to not be properly released, though Mercurial will usually
1792 detect and remove such stale locks automatically.
1792 detect and remove such stale locks automatically.
1793
1793
1794 However, detecting stale locks may not always be possible (for
1794 However, detecting stale locks may not always be possible (for
1795 instance, on a shared filesystem). Removing locks may also be
1795 instance, on a shared filesystem). Removing locks may also be
1796 blocked by filesystem permissions.
1796 blocked by filesystem permissions.
1797
1797
1798 Setting a lock will prevent other commands from changing the data.
1798 Setting a lock will prevent other commands from changing the data.
1799 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1799 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1800 The set locks are removed when the command exits.
1800 The set locks are removed when the command exits.
1801
1801
1802 Returns 0 if no locks are held.
1802 Returns 0 if no locks are held.
1803
1803
1804 """
1804 """
1805
1805
1806 if opts.get('force_lock'):
1806 if opts.get('force_lock'):
1807 repo.svfs.unlink(b'lock')
1807 repo.svfs.unlink(b'lock')
1808 if opts.get('force_wlock'):
1808 if opts.get('force_wlock'):
1809 repo.vfs.unlink(b'wlock')
1809 repo.vfs.unlink(b'wlock')
1810 if opts.get('force_lock') or opts.get('force_wlock'):
1810 if opts.get('force_lock') or opts.get('force_wlock'):
1811 return 0
1811 return 0
1812
1812
1813 locks = []
1813 locks = []
1814 try:
1814 try:
1815 if opts.get('set_wlock'):
1815 if opts.get('set_wlock'):
1816 try:
1816 try:
1817 locks.append(repo.wlock(False))
1817 locks.append(repo.wlock(False))
1818 except error.LockHeld:
1818 except error.LockHeld:
1819 raise error.Abort(_(b'wlock is already held'))
1819 raise error.Abort(_(b'wlock is already held'))
1820 if opts.get('set_lock'):
1820 if opts.get('set_lock'):
1821 try:
1821 try:
1822 locks.append(repo.lock(False))
1822 locks.append(repo.lock(False))
1823 except error.LockHeld:
1823 except error.LockHeld:
1824 raise error.Abort(_(b'lock is already held'))
1824 raise error.Abort(_(b'lock is already held'))
1825 if len(locks):
1825 if len(locks):
1826 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1826 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1827 return 0
1827 return 0
1828 finally:
1828 finally:
1829 release(*locks)
1829 release(*locks)
1830
1830
1831 now = time.time()
1831 now = time.time()
1832 held = 0
1832 held = 0
1833
1833
1834 def report(vfs, name, method):
1834 def report(vfs, name, method):
1835 # this causes stale locks to get reaped for more accurate reporting
1835 # this causes stale locks to get reaped for more accurate reporting
1836 try:
1836 try:
1837 l = method(False)
1837 l = method(False)
1838 except error.LockHeld:
1838 except error.LockHeld:
1839 l = None
1839 l = None
1840
1840
1841 if l:
1841 if l:
1842 l.release()
1842 l.release()
1843 else:
1843 else:
1844 try:
1844 try:
1845 st = vfs.lstat(name)
1845 st = vfs.lstat(name)
1846 age = now - st[stat.ST_MTIME]
1846 age = now - st[stat.ST_MTIME]
1847 user = util.username(st.st_uid)
1847 user = util.username(st.st_uid)
1848 locker = vfs.readlock(name)
1848 locker = vfs.readlock(name)
1849 if b":" in locker:
1849 if b":" in locker:
1850 host, pid = locker.split(b':')
1850 host, pid = locker.split(b':')
1851 if host == socket.gethostname():
1851 if host == socket.gethostname():
1852 locker = b'user %s, process %s' % (user or b'None', pid)
1852 locker = b'user %s, process %s' % (user or b'None', pid)
1853 else:
1853 else:
1854 locker = b'user %s, process %s, host %s' % (
1854 locker = b'user %s, process %s, host %s' % (
1855 user or b'None',
1855 user or b'None',
1856 pid,
1856 pid,
1857 host,
1857 host,
1858 )
1858 )
1859 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1859 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1860 return 1
1860 return 1
1861 except OSError as e:
1861 except OSError as e:
1862 if e.errno != errno.ENOENT:
1862 if e.errno != errno.ENOENT:
1863 raise
1863 raise
1864
1864
1865 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1865 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1866 return 0
1866 return 0
1867
1867
1868 held += report(repo.svfs, b"lock", repo.lock)
1868 held += report(repo.svfs, b"lock", repo.lock)
1869 held += report(repo.vfs, b"wlock", repo.wlock)
1869 held += report(repo.vfs, b"wlock", repo.wlock)
1870
1870
1871 return held
1871 return held
1872
1872
1873
1873
1874 @command(
1874 @command(
1875 b'debugmanifestfulltextcache',
1875 b'debugmanifestfulltextcache',
1876 [
1876 [
1877 (b'', b'clear', False, _(b'clear the cache')),
1877 (b'', b'clear', False, _(b'clear the cache')),
1878 (
1878 (
1879 b'a',
1879 b'a',
1880 b'add',
1880 b'add',
1881 [],
1881 [],
1882 _(b'add the given manifest nodes to the cache'),
1882 _(b'add the given manifest nodes to the cache'),
1883 _(b'NODE'),
1883 _(b'NODE'),
1884 ),
1884 ),
1885 ],
1885 ],
1886 b'',
1886 b'',
1887 )
1887 )
1888 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1888 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1889 """show, clear or amend the contents of the manifest fulltext cache"""
1889 """show, clear or amend the contents of the manifest fulltext cache"""
1890
1890
1891 def getcache():
1891 def getcache():
1892 r = repo.manifestlog.getstorage(b'')
1892 r = repo.manifestlog.getstorage(b'')
1893 try:
1893 try:
1894 return r._fulltextcache
1894 return r._fulltextcache
1895 except AttributeError:
1895 except AttributeError:
1896 msg = _(
1896 msg = _(
1897 b"Current revlog implementation doesn't appear to have a "
1897 b"Current revlog implementation doesn't appear to have a "
1898 b"manifest fulltext cache\n"
1898 b"manifest fulltext cache\n"
1899 )
1899 )
1900 raise error.Abort(msg)
1900 raise error.Abort(msg)
1901
1901
1902 if opts.get('clear'):
1902 if opts.get('clear'):
1903 with repo.wlock():
1903 with repo.wlock():
1904 cache = getcache()
1904 cache = getcache()
1905 cache.clear(clear_persisted_data=True)
1905 cache.clear(clear_persisted_data=True)
1906 return
1906 return
1907
1907
1908 if add:
1908 if add:
1909 with repo.wlock():
1909 with repo.wlock():
1910 m = repo.manifestlog
1910 m = repo.manifestlog
1911 store = m.getstorage(b'')
1911 store = m.getstorage(b'')
1912 for n in add:
1912 for n in add:
1913 try:
1913 try:
1914 manifest = m[store.lookup(n)]
1914 manifest = m[store.lookup(n)]
1915 except error.LookupError as e:
1915 except error.LookupError as e:
1916 raise error.Abort(e, hint=b"Check your manifest node id")
1916 raise error.Abort(e, hint=b"Check your manifest node id")
1917 manifest.read() # stores revisision in cache too
1917 manifest.read() # stores revisision in cache too
1918 return
1918 return
1919
1919
1920 cache = getcache()
1920 cache = getcache()
1921 if not len(cache):
1921 if not len(cache):
1922 ui.write(_(b'cache empty\n'))
1922 ui.write(_(b'cache empty\n'))
1923 else:
1923 else:
1924 ui.write(
1924 ui.write(
1925 _(
1925 _(
1926 b'cache contains %d manifest entries, in order of most to '
1926 b'cache contains %d manifest entries, in order of most to '
1927 b'least recent:\n'
1927 b'least recent:\n'
1928 )
1928 )
1929 % (len(cache),)
1929 % (len(cache),)
1930 )
1930 )
1931 totalsize = 0
1931 totalsize = 0
1932 for nodeid in cache:
1932 for nodeid in cache:
1933 # Use cache.get to not update the LRU order
1933 # Use cache.get to not update the LRU order
1934 data = cache.peek(nodeid)
1934 data = cache.peek(nodeid)
1935 size = len(data)
1935 size = len(data)
1936 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1936 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1937 ui.write(
1937 ui.write(
1938 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1938 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1939 )
1939 )
1940 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1940 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1941 ui.write(
1941 ui.write(
1942 _(b'total cache data size %s, on-disk %s\n')
1942 _(b'total cache data size %s, on-disk %s\n')
1943 % (util.bytecount(totalsize), util.bytecount(ondisk))
1943 % (util.bytecount(totalsize), util.bytecount(ondisk))
1944 )
1944 )
1945
1945
1946
1946
1947 @command(b'debugmergestate', [], b'')
1947 @command(b'debugmergestate', [], b'')
1948 def debugmergestate(ui, repo, *args):
1948 def debugmergestate(ui, repo, *args):
1949 """print merge state
1949 """print merge state
1950
1950
1951 Use --verbose to print out information about whether v1 or v2 merge state
1951 Use --verbose to print out information about whether v1 or v2 merge state
1952 was chosen."""
1952 was chosen."""
1953
1953
1954 def _hashornull(h):
1954 def _hashornull(h):
1955 if h == nullhex:
1955 if h == nullhex:
1956 return b'null'
1956 return b'null'
1957 else:
1957 else:
1958 return h
1958 return h
1959
1959
1960 def printrecords(version):
1960 def printrecords(version):
1961 ui.writenoi18n(b'* version %d records\n' % version)
1961 ui.writenoi18n(b'* version %d records\n' % version)
1962 if version == 1:
1962 if version == 1:
1963 records = v1records
1963 records = v1records
1964 else:
1964 else:
1965 records = v2records
1965 records = v2records
1966
1966
1967 for rtype, record in records:
1967 for rtype, record in records:
1968 # pretty print some record types
1968 # pretty print some record types
1969 if rtype == b'L':
1969 if rtype == b'L':
1970 ui.writenoi18n(b'local: %s\n' % record)
1970 ui.writenoi18n(b'local: %s\n' % record)
1971 elif rtype == b'O':
1971 elif rtype == b'O':
1972 ui.writenoi18n(b'other: %s\n' % record)
1972 ui.writenoi18n(b'other: %s\n' % record)
1973 elif rtype == b'm':
1973 elif rtype == b'm':
1974 driver, mdstate = record.split(b'\0', 1)
1974 driver, mdstate = record.split(b'\0', 1)
1975 ui.writenoi18n(
1975 ui.writenoi18n(
1976 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1976 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1977 )
1977 )
1978 elif rtype in b'FDC':
1978 elif rtype in b'FDC':
1979 r = record.split(b'\0')
1979 r = record.split(b'\0')
1980 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1980 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1981 if version == 1:
1981 if version == 1:
1982 onode = b'not stored in v1 format'
1982 onode = b'not stored in v1 format'
1983 flags = r[7]
1983 flags = r[7]
1984 else:
1984 else:
1985 onode, flags = r[7:9]
1985 onode, flags = r[7:9]
1986 ui.writenoi18n(
1986 ui.writenoi18n(
1987 b'file: %s (record type "%s", state "%s", hash %s)\n'
1987 b'file: %s (record type "%s", state "%s", hash %s)\n'
1988 % (f, rtype, state, _hashornull(hash))
1988 % (f, rtype, state, _hashornull(hash))
1989 )
1989 )
1990 ui.writenoi18n(
1990 ui.writenoi18n(
1991 b' local path: %s (flags "%s")\n' % (lfile, flags)
1991 b' local path: %s (flags "%s")\n' % (lfile, flags)
1992 )
1992 )
1993 ui.writenoi18n(
1993 ui.writenoi18n(
1994 b' ancestor path: %s (node %s)\n'
1994 b' ancestor path: %s (node %s)\n'
1995 % (afile, _hashornull(anode))
1995 % (afile, _hashornull(anode))
1996 )
1996 )
1997 ui.writenoi18n(
1997 ui.writenoi18n(
1998 b' other path: %s (node %s)\n'
1998 b' other path: %s (node %s)\n'
1999 % (ofile, _hashornull(onode))
1999 % (ofile, _hashornull(onode))
2000 )
2000 )
2001 elif rtype == b'f':
2001 elif rtype == b'f':
2002 filename, rawextras = record.split(b'\0', 1)
2002 filename, rawextras = record.split(b'\0', 1)
2003 extras = rawextras.split(b'\0')
2003 extras = rawextras.split(b'\0')
2004 i = 0
2004 i = 0
2005 extrastrings = []
2005 extrastrings = []
2006 while i < len(extras):
2006 while i < len(extras):
2007 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
2007 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
2008 i += 2
2008 i += 2
2009
2009
2010 ui.writenoi18n(
2010 ui.writenoi18n(
2011 b'file extras: %s (%s)\n'
2011 b'file extras: %s (%s)\n'
2012 % (filename, b', '.join(extrastrings))
2012 % (filename, b', '.join(extrastrings))
2013 )
2013 )
2014 elif rtype == b'l':
2014 elif rtype == b'l':
2015 labels = record.split(b'\0', 2)
2015 labels = record.split(b'\0', 2)
2016 labels = [l for l in labels if len(l) > 0]
2016 labels = [l for l in labels if len(l) > 0]
2017 ui.writenoi18n(b'labels:\n')
2017 ui.writenoi18n(b'labels:\n')
2018 ui.write((b' local: %s\n' % labels[0]))
2018 ui.write((b' local: %s\n' % labels[0]))
2019 ui.write((b' other: %s\n' % labels[1]))
2019 ui.write((b' other: %s\n' % labels[1]))
2020 if len(labels) > 2:
2020 if len(labels) > 2:
2021 ui.write((b' base: %s\n' % labels[2]))
2021 ui.write((b' base: %s\n' % labels[2]))
2022 else:
2022 else:
2023 ui.writenoi18n(
2023 ui.writenoi18n(
2024 b'unrecognized entry: %s\t%s\n'
2024 b'unrecognized entry: %s\t%s\n'
2025 % (rtype, record.replace(b'\0', b'\t'))
2025 % (rtype, record.replace(b'\0', b'\t'))
2026 )
2026 )
2027
2027
2028 # Avoid mergestate.read() since it may raise an exception for unsupported
2028 # Avoid mergestate.read() since it may raise an exception for unsupported
2029 # merge state records. We shouldn't be doing this, but this is OK since this
2029 # merge state records. We shouldn't be doing this, but this is OK since this
2030 # command is pretty low-level.
2030 # command is pretty low-level.
2031 ms = mergemod.mergestate(repo)
2031 ms = mergemod.mergestate(repo)
2032
2032
2033 # sort so that reasonable information is on top
2033 # sort so that reasonable information is on top
2034 v1records = ms._readrecordsv1()
2034 v1records = ms._readrecordsv1()
2035 v2records = ms._readrecordsv2()
2035 v2records = ms._readrecordsv2()
2036 order = b'LOml'
2036 order = b'LOml'
2037
2037
2038 def key(r):
2038 def key(r):
2039 idx = order.find(r[0])
2039 idx = order.find(r[0])
2040 if idx == -1:
2040 if idx == -1:
2041 return (1, r[1])
2041 return (1, r[1])
2042 else:
2042 else:
2043 return (0, idx)
2043 return (0, idx)
2044
2044
2045 v1records.sort(key=key)
2045 v1records.sort(key=key)
2046 v2records.sort(key=key)
2046 v2records.sort(key=key)
2047
2047
2048 if not v1records and not v2records:
2048 if not v1records and not v2records:
2049 ui.writenoi18n(b'no merge state found\n')
2049 ui.writenoi18n(b'no merge state found\n')
2050 elif not v2records:
2050 elif not v2records:
2051 ui.notenoi18n(b'no version 2 merge state\n')
2051 ui.notenoi18n(b'no version 2 merge state\n')
2052 printrecords(1)
2052 printrecords(1)
2053 elif ms._v1v2match(v1records, v2records):
2053 elif ms._v1v2match(v1records, v2records):
2054 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2054 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2055 printrecords(2)
2055 printrecords(2)
2056 else:
2056 else:
2057 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2057 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2058 printrecords(1)
2058 printrecords(1)
2059 if ui.verbose:
2059 if ui.verbose:
2060 printrecords(2)
2060 printrecords(2)
2061
2061
2062
2062
2063 @command(b'debugnamecomplete', [], _(b'NAME...'))
2063 @command(b'debugnamecomplete', [], _(b'NAME...'))
2064 def debugnamecomplete(ui, repo, *args):
2064 def debugnamecomplete(ui, repo, *args):
2065 '''complete "names" - tags, open branch names, bookmark names'''
2065 '''complete "names" - tags, open branch names, bookmark names'''
2066
2066
2067 names = set()
2067 names = set()
2068 # since we previously only listed open branches, we will handle that
2068 # since we previously only listed open branches, we will handle that
2069 # specially (after this for loop)
2069 # specially (after this for loop)
2070 for name, ns in pycompat.iteritems(repo.names):
2070 for name, ns in pycompat.iteritems(repo.names):
2071 if name != b'branches':
2071 if name != b'branches':
2072 names.update(ns.listnames(repo))
2072 names.update(ns.listnames(repo))
2073 names.update(
2073 names.update(
2074 tag
2074 tag
2075 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2075 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2076 if not closed
2076 if not closed
2077 )
2077 )
2078 completions = set()
2078 completions = set()
2079 if not args:
2079 if not args:
2080 args = [b'']
2080 args = [b'']
2081 for a in args:
2081 for a in args:
2082 completions.update(n for n in names if n.startswith(a))
2082 completions.update(n for n in names if n.startswith(a))
2083 ui.write(b'\n'.join(sorted(completions)))
2083 ui.write(b'\n'.join(sorted(completions)))
2084 ui.write(b'\n')
2084 ui.write(b'\n')
2085
2085
2086
2086
2087 @command(
2087 @command(
2088 b'debugnodemap',
2088 b'debugnodemap',
2089 [(b'', b'dump', False, _(b'write persistent binary nodemap on stdin'))],
2089 [
2090 (
2091 b'',
2092 b'dump-new',
2093 False,
2094 _(b'write a (new) persistent binary nodemap on stdin'),
2095 ),
2096 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2097 ],
2090 )
2098 )
2091 def debugnodemap(ui, repo, **opts):
2099 def debugnodemap(ui, repo, **opts):
2092 """write and inspect on disk nodemap
2100 """write and inspect on disk nodemap
2093 """
2101 """
2094 if opts['dump']:
2102 if opts['dump_new']:
2095 unfi = repo.unfiltered()
2103 unfi = repo.unfiltered()
2096 cl = unfi.changelog
2104 cl = unfi.changelog
2097 data = nodemap.persistent_data(cl.index)
2105 data = nodemap.persistent_data(cl.index)
2098 ui.write(data)
2106 ui.write(data)
2107 elif opts['dump_disk']:
2108 unfi = repo.unfiltered()
2109 cl = unfi.changelog
2110 data = nodemap.persisted_data(cl)
2111 ui.write(data)
2099
2112
2100
2113
2101 @command(
2114 @command(
2102 b'debugobsolete',
2115 b'debugobsolete',
2103 [
2116 [
2104 (b'', b'flags', 0, _(b'markers flag')),
2117 (b'', b'flags', 0, _(b'markers flag')),
2105 (
2118 (
2106 b'',
2119 b'',
2107 b'record-parents',
2120 b'record-parents',
2108 False,
2121 False,
2109 _(b'record parent information for the precursor'),
2122 _(b'record parent information for the precursor'),
2110 ),
2123 ),
2111 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2124 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2112 (
2125 (
2113 b'',
2126 b'',
2114 b'exclusive',
2127 b'exclusive',
2115 False,
2128 False,
2116 _(b'restrict display to markers only relevant to REV'),
2129 _(b'restrict display to markers only relevant to REV'),
2117 ),
2130 ),
2118 (b'', b'index', False, _(b'display index of the marker')),
2131 (b'', b'index', False, _(b'display index of the marker')),
2119 (b'', b'delete', [], _(b'delete markers specified by indices')),
2132 (b'', b'delete', [], _(b'delete markers specified by indices')),
2120 ]
2133 ]
2121 + cmdutil.commitopts2
2134 + cmdutil.commitopts2
2122 + cmdutil.formatteropts,
2135 + cmdutil.formatteropts,
2123 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2136 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2124 )
2137 )
2125 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2138 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2126 """create arbitrary obsolete marker
2139 """create arbitrary obsolete marker
2127
2140
2128 With no arguments, displays the list of obsolescence markers."""
2141 With no arguments, displays the list of obsolescence markers."""
2129
2142
2130 opts = pycompat.byteskwargs(opts)
2143 opts = pycompat.byteskwargs(opts)
2131
2144
2132 def parsenodeid(s):
2145 def parsenodeid(s):
2133 try:
2146 try:
2134 # We do not use revsingle/revrange functions here to accept
2147 # We do not use revsingle/revrange functions here to accept
2135 # arbitrary node identifiers, possibly not present in the
2148 # arbitrary node identifiers, possibly not present in the
2136 # local repository.
2149 # local repository.
2137 n = bin(s)
2150 n = bin(s)
2138 if len(n) != len(nullid):
2151 if len(n) != len(nullid):
2139 raise TypeError()
2152 raise TypeError()
2140 return n
2153 return n
2141 except TypeError:
2154 except TypeError:
2142 raise error.Abort(
2155 raise error.Abort(
2143 b'changeset references must be full hexadecimal '
2156 b'changeset references must be full hexadecimal '
2144 b'node identifiers'
2157 b'node identifiers'
2145 )
2158 )
2146
2159
2147 if opts.get(b'delete'):
2160 if opts.get(b'delete'):
2148 indices = []
2161 indices = []
2149 for v in opts.get(b'delete'):
2162 for v in opts.get(b'delete'):
2150 try:
2163 try:
2151 indices.append(int(v))
2164 indices.append(int(v))
2152 except ValueError:
2165 except ValueError:
2153 raise error.Abort(
2166 raise error.Abort(
2154 _(b'invalid index value: %r') % v,
2167 _(b'invalid index value: %r') % v,
2155 hint=_(b'use integers for indices'),
2168 hint=_(b'use integers for indices'),
2156 )
2169 )
2157
2170
2158 if repo.currenttransaction():
2171 if repo.currenttransaction():
2159 raise error.Abort(
2172 raise error.Abort(
2160 _(b'cannot delete obsmarkers in the middle of transaction.')
2173 _(b'cannot delete obsmarkers in the middle of transaction.')
2161 )
2174 )
2162
2175
2163 with repo.lock():
2176 with repo.lock():
2164 n = repair.deleteobsmarkers(repo.obsstore, indices)
2177 n = repair.deleteobsmarkers(repo.obsstore, indices)
2165 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2178 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2166
2179
2167 return
2180 return
2168
2181
2169 if precursor is not None:
2182 if precursor is not None:
2170 if opts[b'rev']:
2183 if opts[b'rev']:
2171 raise error.Abort(b'cannot select revision when creating marker')
2184 raise error.Abort(b'cannot select revision when creating marker')
2172 metadata = {}
2185 metadata = {}
2173 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2186 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2174 succs = tuple(parsenodeid(succ) for succ in successors)
2187 succs = tuple(parsenodeid(succ) for succ in successors)
2175 l = repo.lock()
2188 l = repo.lock()
2176 try:
2189 try:
2177 tr = repo.transaction(b'debugobsolete')
2190 tr = repo.transaction(b'debugobsolete')
2178 try:
2191 try:
2179 date = opts.get(b'date')
2192 date = opts.get(b'date')
2180 if date:
2193 if date:
2181 date = dateutil.parsedate(date)
2194 date = dateutil.parsedate(date)
2182 else:
2195 else:
2183 date = None
2196 date = None
2184 prec = parsenodeid(precursor)
2197 prec = parsenodeid(precursor)
2185 parents = None
2198 parents = None
2186 if opts[b'record_parents']:
2199 if opts[b'record_parents']:
2187 if prec not in repo.unfiltered():
2200 if prec not in repo.unfiltered():
2188 raise error.Abort(
2201 raise error.Abort(
2189 b'cannot used --record-parents on '
2202 b'cannot used --record-parents on '
2190 b'unknown changesets'
2203 b'unknown changesets'
2191 )
2204 )
2192 parents = repo.unfiltered()[prec].parents()
2205 parents = repo.unfiltered()[prec].parents()
2193 parents = tuple(p.node() for p in parents)
2206 parents = tuple(p.node() for p in parents)
2194 repo.obsstore.create(
2207 repo.obsstore.create(
2195 tr,
2208 tr,
2196 prec,
2209 prec,
2197 succs,
2210 succs,
2198 opts[b'flags'],
2211 opts[b'flags'],
2199 parents=parents,
2212 parents=parents,
2200 date=date,
2213 date=date,
2201 metadata=metadata,
2214 metadata=metadata,
2202 ui=ui,
2215 ui=ui,
2203 )
2216 )
2204 tr.close()
2217 tr.close()
2205 except ValueError as exc:
2218 except ValueError as exc:
2206 raise error.Abort(
2219 raise error.Abort(
2207 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2220 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2208 )
2221 )
2209 finally:
2222 finally:
2210 tr.release()
2223 tr.release()
2211 finally:
2224 finally:
2212 l.release()
2225 l.release()
2213 else:
2226 else:
2214 if opts[b'rev']:
2227 if opts[b'rev']:
2215 revs = scmutil.revrange(repo, opts[b'rev'])
2228 revs = scmutil.revrange(repo, opts[b'rev'])
2216 nodes = [repo[r].node() for r in revs]
2229 nodes = [repo[r].node() for r in revs]
2217 markers = list(
2230 markers = list(
2218 obsutil.getmarkers(
2231 obsutil.getmarkers(
2219 repo, nodes=nodes, exclusive=opts[b'exclusive']
2232 repo, nodes=nodes, exclusive=opts[b'exclusive']
2220 )
2233 )
2221 )
2234 )
2222 markers.sort(key=lambda x: x._data)
2235 markers.sort(key=lambda x: x._data)
2223 else:
2236 else:
2224 markers = obsutil.getmarkers(repo)
2237 markers = obsutil.getmarkers(repo)
2225
2238
2226 markerstoiter = markers
2239 markerstoiter = markers
2227 isrelevant = lambda m: True
2240 isrelevant = lambda m: True
2228 if opts.get(b'rev') and opts.get(b'index'):
2241 if opts.get(b'rev') and opts.get(b'index'):
2229 markerstoiter = obsutil.getmarkers(repo)
2242 markerstoiter = obsutil.getmarkers(repo)
2230 markerset = set(markers)
2243 markerset = set(markers)
2231 isrelevant = lambda m: m in markerset
2244 isrelevant = lambda m: m in markerset
2232
2245
2233 fm = ui.formatter(b'debugobsolete', opts)
2246 fm = ui.formatter(b'debugobsolete', opts)
2234 for i, m in enumerate(markerstoiter):
2247 for i, m in enumerate(markerstoiter):
2235 if not isrelevant(m):
2248 if not isrelevant(m):
2236 # marker can be irrelevant when we're iterating over a set
2249 # marker can be irrelevant when we're iterating over a set
2237 # of markers (markerstoiter) which is bigger than the set
2250 # of markers (markerstoiter) which is bigger than the set
2238 # of markers we want to display (markers)
2251 # of markers we want to display (markers)
2239 # this can happen if both --index and --rev options are
2252 # this can happen if both --index and --rev options are
2240 # provided and thus we need to iterate over all of the markers
2253 # provided and thus we need to iterate over all of the markers
2241 # to get the correct indices, but only display the ones that
2254 # to get the correct indices, but only display the ones that
2242 # are relevant to --rev value
2255 # are relevant to --rev value
2243 continue
2256 continue
2244 fm.startitem()
2257 fm.startitem()
2245 ind = i if opts.get(b'index') else None
2258 ind = i if opts.get(b'index') else None
2246 cmdutil.showmarker(fm, m, index=ind)
2259 cmdutil.showmarker(fm, m, index=ind)
2247 fm.end()
2260 fm.end()
2248
2261
2249
2262
2250 @command(
2263 @command(
2251 b'debugp1copies',
2264 b'debugp1copies',
2252 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2265 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2253 _(b'[-r REV]'),
2266 _(b'[-r REV]'),
2254 )
2267 )
2255 def debugp1copies(ui, repo, **opts):
2268 def debugp1copies(ui, repo, **opts):
2256 """dump copy information compared to p1"""
2269 """dump copy information compared to p1"""
2257
2270
2258 opts = pycompat.byteskwargs(opts)
2271 opts = pycompat.byteskwargs(opts)
2259 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2272 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2260 for dst, src in ctx.p1copies().items():
2273 for dst, src in ctx.p1copies().items():
2261 ui.write(b'%s -> %s\n' % (src, dst))
2274 ui.write(b'%s -> %s\n' % (src, dst))
2262
2275
2263
2276
2264 @command(
2277 @command(
2265 b'debugp2copies',
2278 b'debugp2copies',
2266 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2279 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2267 _(b'[-r REV]'),
2280 _(b'[-r REV]'),
2268 )
2281 )
2269 def debugp1copies(ui, repo, **opts):
2282 def debugp1copies(ui, repo, **opts):
2270 """dump copy information compared to p2"""
2283 """dump copy information compared to p2"""
2271
2284
2272 opts = pycompat.byteskwargs(opts)
2285 opts = pycompat.byteskwargs(opts)
2273 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2286 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2274 for dst, src in ctx.p2copies().items():
2287 for dst, src in ctx.p2copies().items():
2275 ui.write(b'%s -> %s\n' % (src, dst))
2288 ui.write(b'%s -> %s\n' % (src, dst))
2276
2289
2277
2290
2278 @command(
2291 @command(
2279 b'debugpathcomplete',
2292 b'debugpathcomplete',
2280 [
2293 [
2281 (b'f', b'full', None, _(b'complete an entire path')),
2294 (b'f', b'full', None, _(b'complete an entire path')),
2282 (b'n', b'normal', None, _(b'show only normal files')),
2295 (b'n', b'normal', None, _(b'show only normal files')),
2283 (b'a', b'added', None, _(b'show only added files')),
2296 (b'a', b'added', None, _(b'show only added files')),
2284 (b'r', b'removed', None, _(b'show only removed files')),
2297 (b'r', b'removed', None, _(b'show only removed files')),
2285 ],
2298 ],
2286 _(b'FILESPEC...'),
2299 _(b'FILESPEC...'),
2287 )
2300 )
2288 def debugpathcomplete(ui, repo, *specs, **opts):
2301 def debugpathcomplete(ui, repo, *specs, **opts):
2289 '''complete part or all of a tracked path
2302 '''complete part or all of a tracked path
2290
2303
2291 This command supports shells that offer path name completion. It
2304 This command supports shells that offer path name completion. It
2292 currently completes only files already known to the dirstate.
2305 currently completes only files already known to the dirstate.
2293
2306
2294 Completion extends only to the next path segment unless
2307 Completion extends only to the next path segment unless
2295 --full is specified, in which case entire paths are used.'''
2308 --full is specified, in which case entire paths are used.'''
2296
2309
2297 def complete(path, acceptable):
2310 def complete(path, acceptable):
2298 dirstate = repo.dirstate
2311 dirstate = repo.dirstate
2299 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2312 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2300 rootdir = repo.root + pycompat.ossep
2313 rootdir = repo.root + pycompat.ossep
2301 if spec != repo.root and not spec.startswith(rootdir):
2314 if spec != repo.root and not spec.startswith(rootdir):
2302 return [], []
2315 return [], []
2303 if os.path.isdir(spec):
2316 if os.path.isdir(spec):
2304 spec += b'/'
2317 spec += b'/'
2305 spec = spec[len(rootdir) :]
2318 spec = spec[len(rootdir) :]
2306 fixpaths = pycompat.ossep != b'/'
2319 fixpaths = pycompat.ossep != b'/'
2307 if fixpaths:
2320 if fixpaths:
2308 spec = spec.replace(pycompat.ossep, b'/')
2321 spec = spec.replace(pycompat.ossep, b'/')
2309 speclen = len(spec)
2322 speclen = len(spec)
2310 fullpaths = opts['full']
2323 fullpaths = opts['full']
2311 files, dirs = set(), set()
2324 files, dirs = set(), set()
2312 adddir, addfile = dirs.add, files.add
2325 adddir, addfile = dirs.add, files.add
2313 for f, st in pycompat.iteritems(dirstate):
2326 for f, st in pycompat.iteritems(dirstate):
2314 if f.startswith(spec) and st[0] in acceptable:
2327 if f.startswith(spec) and st[0] in acceptable:
2315 if fixpaths:
2328 if fixpaths:
2316 f = f.replace(b'/', pycompat.ossep)
2329 f = f.replace(b'/', pycompat.ossep)
2317 if fullpaths:
2330 if fullpaths:
2318 addfile(f)
2331 addfile(f)
2319 continue
2332 continue
2320 s = f.find(pycompat.ossep, speclen)
2333 s = f.find(pycompat.ossep, speclen)
2321 if s >= 0:
2334 if s >= 0:
2322 adddir(f[:s])
2335 adddir(f[:s])
2323 else:
2336 else:
2324 addfile(f)
2337 addfile(f)
2325 return files, dirs
2338 return files, dirs
2326
2339
2327 acceptable = b''
2340 acceptable = b''
2328 if opts['normal']:
2341 if opts['normal']:
2329 acceptable += b'nm'
2342 acceptable += b'nm'
2330 if opts['added']:
2343 if opts['added']:
2331 acceptable += b'a'
2344 acceptable += b'a'
2332 if opts['removed']:
2345 if opts['removed']:
2333 acceptable += b'r'
2346 acceptable += b'r'
2334 cwd = repo.getcwd()
2347 cwd = repo.getcwd()
2335 if not specs:
2348 if not specs:
2336 specs = [b'.']
2349 specs = [b'.']
2337
2350
2338 files, dirs = set(), set()
2351 files, dirs = set(), set()
2339 for spec in specs:
2352 for spec in specs:
2340 f, d = complete(spec, acceptable or b'nmar')
2353 f, d = complete(spec, acceptable or b'nmar')
2341 files.update(f)
2354 files.update(f)
2342 dirs.update(d)
2355 dirs.update(d)
2343 files.update(dirs)
2356 files.update(dirs)
2344 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2357 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2345 ui.write(b'\n')
2358 ui.write(b'\n')
2346
2359
2347
2360
2348 @command(
2361 @command(
2349 b'debugpathcopies',
2362 b'debugpathcopies',
2350 cmdutil.walkopts,
2363 cmdutil.walkopts,
2351 b'hg debugpathcopies REV1 REV2 [FILE]',
2364 b'hg debugpathcopies REV1 REV2 [FILE]',
2352 inferrepo=True,
2365 inferrepo=True,
2353 )
2366 )
2354 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2367 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2355 """show copies between two revisions"""
2368 """show copies between two revisions"""
2356 ctx1 = scmutil.revsingle(repo, rev1)
2369 ctx1 = scmutil.revsingle(repo, rev1)
2357 ctx2 = scmutil.revsingle(repo, rev2)
2370 ctx2 = scmutil.revsingle(repo, rev2)
2358 m = scmutil.match(ctx1, pats, opts)
2371 m = scmutil.match(ctx1, pats, opts)
2359 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2372 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2360 ui.write(b'%s -> %s\n' % (src, dst))
2373 ui.write(b'%s -> %s\n' % (src, dst))
2361
2374
2362
2375
2363 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2376 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2364 def debugpeer(ui, path):
2377 def debugpeer(ui, path):
2365 """establish a connection to a peer repository"""
2378 """establish a connection to a peer repository"""
2366 # Always enable peer request logging. Requires --debug to display
2379 # Always enable peer request logging. Requires --debug to display
2367 # though.
2380 # though.
2368 overrides = {
2381 overrides = {
2369 (b'devel', b'debug.peer-request'): True,
2382 (b'devel', b'debug.peer-request'): True,
2370 }
2383 }
2371
2384
2372 with ui.configoverride(overrides):
2385 with ui.configoverride(overrides):
2373 peer = hg.peer(ui, {}, path)
2386 peer = hg.peer(ui, {}, path)
2374
2387
2375 local = peer.local() is not None
2388 local = peer.local() is not None
2376 canpush = peer.canpush()
2389 canpush = peer.canpush()
2377
2390
2378 ui.write(_(b'url: %s\n') % peer.url())
2391 ui.write(_(b'url: %s\n') % peer.url())
2379 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2392 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2380 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2393 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2381
2394
2382
2395
2383 @command(
2396 @command(
2384 b'debugpickmergetool',
2397 b'debugpickmergetool',
2385 [
2398 [
2386 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2399 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2387 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2400 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2388 ]
2401 ]
2389 + cmdutil.walkopts
2402 + cmdutil.walkopts
2390 + cmdutil.mergetoolopts,
2403 + cmdutil.mergetoolopts,
2391 _(b'[PATTERN]...'),
2404 _(b'[PATTERN]...'),
2392 inferrepo=True,
2405 inferrepo=True,
2393 )
2406 )
2394 def debugpickmergetool(ui, repo, *pats, **opts):
2407 def debugpickmergetool(ui, repo, *pats, **opts):
2395 """examine which merge tool is chosen for specified file
2408 """examine which merge tool is chosen for specified file
2396
2409
2397 As described in :hg:`help merge-tools`, Mercurial examines
2410 As described in :hg:`help merge-tools`, Mercurial examines
2398 configurations below in this order to decide which merge tool is
2411 configurations below in this order to decide which merge tool is
2399 chosen for specified file.
2412 chosen for specified file.
2400
2413
2401 1. ``--tool`` option
2414 1. ``--tool`` option
2402 2. ``HGMERGE`` environment variable
2415 2. ``HGMERGE`` environment variable
2403 3. configurations in ``merge-patterns`` section
2416 3. configurations in ``merge-patterns`` section
2404 4. configuration of ``ui.merge``
2417 4. configuration of ``ui.merge``
2405 5. configurations in ``merge-tools`` section
2418 5. configurations in ``merge-tools`` section
2406 6. ``hgmerge`` tool (for historical reason only)
2419 6. ``hgmerge`` tool (for historical reason only)
2407 7. default tool for fallback (``:merge`` or ``:prompt``)
2420 7. default tool for fallback (``:merge`` or ``:prompt``)
2408
2421
2409 This command writes out examination result in the style below::
2422 This command writes out examination result in the style below::
2410
2423
2411 FILE = MERGETOOL
2424 FILE = MERGETOOL
2412
2425
2413 By default, all files known in the first parent context of the
2426 By default, all files known in the first parent context of the
2414 working directory are examined. Use file patterns and/or -I/-X
2427 working directory are examined. Use file patterns and/or -I/-X
2415 options to limit target files. -r/--rev is also useful to examine
2428 options to limit target files. -r/--rev is also useful to examine
2416 files in another context without actual updating to it.
2429 files in another context without actual updating to it.
2417
2430
2418 With --debug, this command shows warning messages while matching
2431 With --debug, this command shows warning messages while matching
2419 against ``merge-patterns`` and so on, too. It is recommended to
2432 against ``merge-patterns`` and so on, too. It is recommended to
2420 use this option with explicit file patterns and/or -I/-X options,
2433 use this option with explicit file patterns and/or -I/-X options,
2421 because this option increases amount of output per file according
2434 because this option increases amount of output per file according
2422 to configurations in hgrc.
2435 to configurations in hgrc.
2423
2436
2424 With -v/--verbose, this command shows configurations below at
2437 With -v/--verbose, this command shows configurations below at
2425 first (only if specified).
2438 first (only if specified).
2426
2439
2427 - ``--tool`` option
2440 - ``--tool`` option
2428 - ``HGMERGE`` environment variable
2441 - ``HGMERGE`` environment variable
2429 - configuration of ``ui.merge``
2442 - configuration of ``ui.merge``
2430
2443
2431 If merge tool is chosen before matching against
2444 If merge tool is chosen before matching against
2432 ``merge-patterns``, this command can't show any helpful
2445 ``merge-patterns``, this command can't show any helpful
2433 information, even with --debug. In such case, information above is
2446 information, even with --debug. In such case, information above is
2434 useful to know why a merge tool is chosen.
2447 useful to know why a merge tool is chosen.
2435 """
2448 """
2436 opts = pycompat.byteskwargs(opts)
2449 opts = pycompat.byteskwargs(opts)
2437 overrides = {}
2450 overrides = {}
2438 if opts[b'tool']:
2451 if opts[b'tool']:
2439 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2452 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2440 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2453 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2441
2454
2442 with ui.configoverride(overrides, b'debugmergepatterns'):
2455 with ui.configoverride(overrides, b'debugmergepatterns'):
2443 hgmerge = encoding.environ.get(b"HGMERGE")
2456 hgmerge = encoding.environ.get(b"HGMERGE")
2444 if hgmerge is not None:
2457 if hgmerge is not None:
2445 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2458 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2446 uimerge = ui.config(b"ui", b"merge")
2459 uimerge = ui.config(b"ui", b"merge")
2447 if uimerge:
2460 if uimerge:
2448 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2461 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2449
2462
2450 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2463 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2451 m = scmutil.match(ctx, pats, opts)
2464 m = scmutil.match(ctx, pats, opts)
2452 changedelete = opts[b'changedelete']
2465 changedelete = opts[b'changedelete']
2453 for path in ctx.walk(m):
2466 for path in ctx.walk(m):
2454 fctx = ctx[path]
2467 fctx = ctx[path]
2455 try:
2468 try:
2456 if not ui.debugflag:
2469 if not ui.debugflag:
2457 ui.pushbuffer(error=True)
2470 ui.pushbuffer(error=True)
2458 tool, toolpath = filemerge._picktool(
2471 tool, toolpath = filemerge._picktool(
2459 repo,
2472 repo,
2460 ui,
2473 ui,
2461 path,
2474 path,
2462 fctx.isbinary(),
2475 fctx.isbinary(),
2463 b'l' in fctx.flags(),
2476 b'l' in fctx.flags(),
2464 changedelete,
2477 changedelete,
2465 )
2478 )
2466 finally:
2479 finally:
2467 if not ui.debugflag:
2480 if not ui.debugflag:
2468 ui.popbuffer()
2481 ui.popbuffer()
2469 ui.write(b'%s = %s\n' % (path, tool))
2482 ui.write(b'%s = %s\n' % (path, tool))
2470
2483
2471
2484
2472 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2485 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2473 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2486 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2474 '''access the pushkey key/value protocol
2487 '''access the pushkey key/value protocol
2475
2488
2476 With two args, list the keys in the given namespace.
2489 With two args, list the keys in the given namespace.
2477
2490
2478 With five args, set a key to new if it currently is set to old.
2491 With five args, set a key to new if it currently is set to old.
2479 Reports success or failure.
2492 Reports success or failure.
2480 '''
2493 '''
2481
2494
2482 target = hg.peer(ui, {}, repopath)
2495 target = hg.peer(ui, {}, repopath)
2483 if keyinfo:
2496 if keyinfo:
2484 key, old, new = keyinfo
2497 key, old, new = keyinfo
2485 with target.commandexecutor() as e:
2498 with target.commandexecutor() as e:
2486 r = e.callcommand(
2499 r = e.callcommand(
2487 b'pushkey',
2500 b'pushkey',
2488 {
2501 {
2489 b'namespace': namespace,
2502 b'namespace': namespace,
2490 b'key': key,
2503 b'key': key,
2491 b'old': old,
2504 b'old': old,
2492 b'new': new,
2505 b'new': new,
2493 },
2506 },
2494 ).result()
2507 ).result()
2495
2508
2496 ui.status(pycompat.bytestr(r) + b'\n')
2509 ui.status(pycompat.bytestr(r) + b'\n')
2497 return not r
2510 return not r
2498 else:
2511 else:
2499 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2512 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2500 ui.write(
2513 ui.write(
2501 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2514 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2502 )
2515 )
2503
2516
2504
2517
2505 @command(b'debugpvec', [], _(b'A B'))
2518 @command(b'debugpvec', [], _(b'A B'))
2506 def debugpvec(ui, repo, a, b=None):
2519 def debugpvec(ui, repo, a, b=None):
2507 ca = scmutil.revsingle(repo, a)
2520 ca = scmutil.revsingle(repo, a)
2508 cb = scmutil.revsingle(repo, b)
2521 cb = scmutil.revsingle(repo, b)
2509 pa = pvec.ctxpvec(ca)
2522 pa = pvec.ctxpvec(ca)
2510 pb = pvec.ctxpvec(cb)
2523 pb = pvec.ctxpvec(cb)
2511 if pa == pb:
2524 if pa == pb:
2512 rel = b"="
2525 rel = b"="
2513 elif pa > pb:
2526 elif pa > pb:
2514 rel = b">"
2527 rel = b">"
2515 elif pa < pb:
2528 elif pa < pb:
2516 rel = b"<"
2529 rel = b"<"
2517 elif pa | pb:
2530 elif pa | pb:
2518 rel = b"|"
2531 rel = b"|"
2519 ui.write(_(b"a: %s\n") % pa)
2532 ui.write(_(b"a: %s\n") % pa)
2520 ui.write(_(b"b: %s\n") % pb)
2533 ui.write(_(b"b: %s\n") % pb)
2521 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2534 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2522 ui.write(
2535 ui.write(
2523 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2536 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2524 % (
2537 % (
2525 abs(pa._depth - pb._depth),
2538 abs(pa._depth - pb._depth),
2526 pvec._hamming(pa._vec, pb._vec),
2539 pvec._hamming(pa._vec, pb._vec),
2527 pa.distance(pb),
2540 pa.distance(pb),
2528 rel,
2541 rel,
2529 )
2542 )
2530 )
2543 )
2531
2544
2532
2545
2533 @command(
2546 @command(
2534 b'debugrebuilddirstate|debugrebuildstate',
2547 b'debugrebuilddirstate|debugrebuildstate',
2535 [
2548 [
2536 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2549 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2537 (
2550 (
2538 b'',
2551 b'',
2539 b'minimal',
2552 b'minimal',
2540 None,
2553 None,
2541 _(
2554 _(
2542 b'only rebuild files that are inconsistent with '
2555 b'only rebuild files that are inconsistent with '
2543 b'the working copy parent'
2556 b'the working copy parent'
2544 ),
2557 ),
2545 ),
2558 ),
2546 ],
2559 ],
2547 _(b'[-r REV]'),
2560 _(b'[-r REV]'),
2548 )
2561 )
2549 def debugrebuilddirstate(ui, repo, rev, **opts):
2562 def debugrebuilddirstate(ui, repo, rev, **opts):
2550 """rebuild the dirstate as it would look like for the given revision
2563 """rebuild the dirstate as it would look like for the given revision
2551
2564
2552 If no revision is specified the first current parent will be used.
2565 If no revision is specified the first current parent will be used.
2553
2566
2554 The dirstate will be set to the files of the given revision.
2567 The dirstate will be set to the files of the given revision.
2555 The actual working directory content or existing dirstate
2568 The actual working directory content or existing dirstate
2556 information such as adds or removes is not considered.
2569 information such as adds or removes is not considered.
2557
2570
2558 ``minimal`` will only rebuild the dirstate status for files that claim to be
2571 ``minimal`` will only rebuild the dirstate status for files that claim to be
2559 tracked but are not in the parent manifest, or that exist in the parent
2572 tracked but are not in the parent manifest, or that exist in the parent
2560 manifest but are not in the dirstate. It will not change adds, removes, or
2573 manifest but are not in the dirstate. It will not change adds, removes, or
2561 modified files that are in the working copy parent.
2574 modified files that are in the working copy parent.
2562
2575
2563 One use of this command is to make the next :hg:`status` invocation
2576 One use of this command is to make the next :hg:`status` invocation
2564 check the actual file content.
2577 check the actual file content.
2565 """
2578 """
2566 ctx = scmutil.revsingle(repo, rev)
2579 ctx = scmutil.revsingle(repo, rev)
2567 with repo.wlock():
2580 with repo.wlock():
2568 dirstate = repo.dirstate
2581 dirstate = repo.dirstate
2569 changedfiles = None
2582 changedfiles = None
2570 # See command doc for what minimal does.
2583 # See command doc for what minimal does.
2571 if opts.get('minimal'):
2584 if opts.get('minimal'):
2572 manifestfiles = set(ctx.manifest().keys())
2585 manifestfiles = set(ctx.manifest().keys())
2573 dirstatefiles = set(dirstate)
2586 dirstatefiles = set(dirstate)
2574 manifestonly = manifestfiles - dirstatefiles
2587 manifestonly = manifestfiles - dirstatefiles
2575 dsonly = dirstatefiles - manifestfiles
2588 dsonly = dirstatefiles - manifestfiles
2576 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2589 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2577 changedfiles = manifestonly | dsnotadded
2590 changedfiles = manifestonly | dsnotadded
2578
2591
2579 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2592 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2580
2593
2581
2594
2582 @command(b'debugrebuildfncache', [], b'')
2595 @command(b'debugrebuildfncache', [], b'')
2583 def debugrebuildfncache(ui, repo):
2596 def debugrebuildfncache(ui, repo):
2584 """rebuild the fncache file"""
2597 """rebuild the fncache file"""
2585 repair.rebuildfncache(ui, repo)
2598 repair.rebuildfncache(ui, repo)
2586
2599
2587
2600
2588 @command(
2601 @command(
2589 b'debugrename',
2602 b'debugrename',
2590 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2603 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2591 _(b'[-r REV] [FILE]...'),
2604 _(b'[-r REV] [FILE]...'),
2592 )
2605 )
2593 def debugrename(ui, repo, *pats, **opts):
2606 def debugrename(ui, repo, *pats, **opts):
2594 """dump rename information"""
2607 """dump rename information"""
2595
2608
2596 opts = pycompat.byteskwargs(opts)
2609 opts = pycompat.byteskwargs(opts)
2597 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2610 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2598 m = scmutil.match(ctx, pats, opts)
2611 m = scmutil.match(ctx, pats, opts)
2599 for abs in ctx.walk(m):
2612 for abs in ctx.walk(m):
2600 fctx = ctx[abs]
2613 fctx = ctx[abs]
2601 o = fctx.filelog().renamed(fctx.filenode())
2614 o = fctx.filelog().renamed(fctx.filenode())
2602 rel = repo.pathto(abs)
2615 rel = repo.pathto(abs)
2603 if o:
2616 if o:
2604 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2617 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2605 else:
2618 else:
2606 ui.write(_(b"%s not renamed\n") % rel)
2619 ui.write(_(b"%s not renamed\n") % rel)
2607
2620
2608
2621
2609 @command(
2622 @command(
2610 b'debugrevlog',
2623 b'debugrevlog',
2611 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2624 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2612 _(b'-c|-m|FILE'),
2625 _(b'-c|-m|FILE'),
2613 optionalrepo=True,
2626 optionalrepo=True,
2614 )
2627 )
2615 def debugrevlog(ui, repo, file_=None, **opts):
2628 def debugrevlog(ui, repo, file_=None, **opts):
2616 """show data and statistics about a revlog"""
2629 """show data and statistics about a revlog"""
2617 opts = pycompat.byteskwargs(opts)
2630 opts = pycompat.byteskwargs(opts)
2618 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2631 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2619
2632
2620 if opts.get(b"dump"):
2633 if opts.get(b"dump"):
2621 numrevs = len(r)
2634 numrevs = len(r)
2622 ui.write(
2635 ui.write(
2623 (
2636 (
2624 b"# rev p1rev p2rev start end deltastart base p1 p2"
2637 b"# rev p1rev p2rev start end deltastart base p1 p2"
2625 b" rawsize totalsize compression heads chainlen\n"
2638 b" rawsize totalsize compression heads chainlen\n"
2626 )
2639 )
2627 )
2640 )
2628 ts = 0
2641 ts = 0
2629 heads = set()
2642 heads = set()
2630
2643
2631 for rev in pycompat.xrange(numrevs):
2644 for rev in pycompat.xrange(numrevs):
2632 dbase = r.deltaparent(rev)
2645 dbase = r.deltaparent(rev)
2633 if dbase == -1:
2646 if dbase == -1:
2634 dbase = rev
2647 dbase = rev
2635 cbase = r.chainbase(rev)
2648 cbase = r.chainbase(rev)
2636 clen = r.chainlen(rev)
2649 clen = r.chainlen(rev)
2637 p1, p2 = r.parentrevs(rev)
2650 p1, p2 = r.parentrevs(rev)
2638 rs = r.rawsize(rev)
2651 rs = r.rawsize(rev)
2639 ts = ts + rs
2652 ts = ts + rs
2640 heads -= set(r.parentrevs(rev))
2653 heads -= set(r.parentrevs(rev))
2641 heads.add(rev)
2654 heads.add(rev)
2642 try:
2655 try:
2643 compression = ts / r.end(rev)
2656 compression = ts / r.end(rev)
2644 except ZeroDivisionError:
2657 except ZeroDivisionError:
2645 compression = 0
2658 compression = 0
2646 ui.write(
2659 ui.write(
2647 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2660 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2648 b"%11d %5d %8d\n"
2661 b"%11d %5d %8d\n"
2649 % (
2662 % (
2650 rev,
2663 rev,
2651 p1,
2664 p1,
2652 p2,
2665 p2,
2653 r.start(rev),
2666 r.start(rev),
2654 r.end(rev),
2667 r.end(rev),
2655 r.start(dbase),
2668 r.start(dbase),
2656 r.start(cbase),
2669 r.start(cbase),
2657 r.start(p1),
2670 r.start(p1),
2658 r.start(p2),
2671 r.start(p2),
2659 rs,
2672 rs,
2660 ts,
2673 ts,
2661 compression,
2674 compression,
2662 len(heads),
2675 len(heads),
2663 clen,
2676 clen,
2664 )
2677 )
2665 )
2678 )
2666 return 0
2679 return 0
2667
2680
2668 v = r.version
2681 v = r.version
2669 format = v & 0xFFFF
2682 format = v & 0xFFFF
2670 flags = []
2683 flags = []
2671 gdelta = False
2684 gdelta = False
2672 if v & revlog.FLAG_INLINE_DATA:
2685 if v & revlog.FLAG_INLINE_DATA:
2673 flags.append(b'inline')
2686 flags.append(b'inline')
2674 if v & revlog.FLAG_GENERALDELTA:
2687 if v & revlog.FLAG_GENERALDELTA:
2675 gdelta = True
2688 gdelta = True
2676 flags.append(b'generaldelta')
2689 flags.append(b'generaldelta')
2677 if not flags:
2690 if not flags:
2678 flags = [b'(none)']
2691 flags = [b'(none)']
2679
2692
2680 ### tracks merge vs single parent
2693 ### tracks merge vs single parent
2681 nummerges = 0
2694 nummerges = 0
2682
2695
2683 ### tracks ways the "delta" are build
2696 ### tracks ways the "delta" are build
2684 # nodelta
2697 # nodelta
2685 numempty = 0
2698 numempty = 0
2686 numemptytext = 0
2699 numemptytext = 0
2687 numemptydelta = 0
2700 numemptydelta = 0
2688 # full file content
2701 # full file content
2689 numfull = 0
2702 numfull = 0
2690 # intermediate snapshot against a prior snapshot
2703 # intermediate snapshot against a prior snapshot
2691 numsemi = 0
2704 numsemi = 0
2692 # snapshot count per depth
2705 # snapshot count per depth
2693 numsnapdepth = collections.defaultdict(lambda: 0)
2706 numsnapdepth = collections.defaultdict(lambda: 0)
2694 # delta against previous revision
2707 # delta against previous revision
2695 numprev = 0
2708 numprev = 0
2696 # delta against first or second parent (not prev)
2709 # delta against first or second parent (not prev)
2697 nump1 = 0
2710 nump1 = 0
2698 nump2 = 0
2711 nump2 = 0
2699 # delta against neither prev nor parents
2712 # delta against neither prev nor parents
2700 numother = 0
2713 numother = 0
2701 # delta against prev that are also first or second parent
2714 # delta against prev that are also first or second parent
2702 # (details of `numprev`)
2715 # (details of `numprev`)
2703 nump1prev = 0
2716 nump1prev = 0
2704 nump2prev = 0
2717 nump2prev = 0
2705
2718
2706 # data about delta chain of each revs
2719 # data about delta chain of each revs
2707 chainlengths = []
2720 chainlengths = []
2708 chainbases = []
2721 chainbases = []
2709 chainspans = []
2722 chainspans = []
2710
2723
2711 # data about each revision
2724 # data about each revision
2712 datasize = [None, 0, 0]
2725 datasize = [None, 0, 0]
2713 fullsize = [None, 0, 0]
2726 fullsize = [None, 0, 0]
2714 semisize = [None, 0, 0]
2727 semisize = [None, 0, 0]
2715 # snapshot count per depth
2728 # snapshot count per depth
2716 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2729 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2717 deltasize = [None, 0, 0]
2730 deltasize = [None, 0, 0]
2718 chunktypecounts = {}
2731 chunktypecounts = {}
2719 chunktypesizes = {}
2732 chunktypesizes = {}
2720
2733
2721 def addsize(size, l):
2734 def addsize(size, l):
2722 if l[0] is None or size < l[0]:
2735 if l[0] is None or size < l[0]:
2723 l[0] = size
2736 l[0] = size
2724 if size > l[1]:
2737 if size > l[1]:
2725 l[1] = size
2738 l[1] = size
2726 l[2] += size
2739 l[2] += size
2727
2740
2728 numrevs = len(r)
2741 numrevs = len(r)
2729 for rev in pycompat.xrange(numrevs):
2742 for rev in pycompat.xrange(numrevs):
2730 p1, p2 = r.parentrevs(rev)
2743 p1, p2 = r.parentrevs(rev)
2731 delta = r.deltaparent(rev)
2744 delta = r.deltaparent(rev)
2732 if format > 0:
2745 if format > 0:
2733 addsize(r.rawsize(rev), datasize)
2746 addsize(r.rawsize(rev), datasize)
2734 if p2 != nullrev:
2747 if p2 != nullrev:
2735 nummerges += 1
2748 nummerges += 1
2736 size = r.length(rev)
2749 size = r.length(rev)
2737 if delta == nullrev:
2750 if delta == nullrev:
2738 chainlengths.append(0)
2751 chainlengths.append(0)
2739 chainbases.append(r.start(rev))
2752 chainbases.append(r.start(rev))
2740 chainspans.append(size)
2753 chainspans.append(size)
2741 if size == 0:
2754 if size == 0:
2742 numempty += 1
2755 numempty += 1
2743 numemptytext += 1
2756 numemptytext += 1
2744 else:
2757 else:
2745 numfull += 1
2758 numfull += 1
2746 numsnapdepth[0] += 1
2759 numsnapdepth[0] += 1
2747 addsize(size, fullsize)
2760 addsize(size, fullsize)
2748 addsize(size, snapsizedepth[0])
2761 addsize(size, snapsizedepth[0])
2749 else:
2762 else:
2750 chainlengths.append(chainlengths[delta] + 1)
2763 chainlengths.append(chainlengths[delta] + 1)
2751 baseaddr = chainbases[delta]
2764 baseaddr = chainbases[delta]
2752 revaddr = r.start(rev)
2765 revaddr = r.start(rev)
2753 chainbases.append(baseaddr)
2766 chainbases.append(baseaddr)
2754 chainspans.append((revaddr - baseaddr) + size)
2767 chainspans.append((revaddr - baseaddr) + size)
2755 if size == 0:
2768 if size == 0:
2756 numempty += 1
2769 numempty += 1
2757 numemptydelta += 1
2770 numemptydelta += 1
2758 elif r.issnapshot(rev):
2771 elif r.issnapshot(rev):
2759 addsize(size, semisize)
2772 addsize(size, semisize)
2760 numsemi += 1
2773 numsemi += 1
2761 depth = r.snapshotdepth(rev)
2774 depth = r.snapshotdepth(rev)
2762 numsnapdepth[depth] += 1
2775 numsnapdepth[depth] += 1
2763 addsize(size, snapsizedepth[depth])
2776 addsize(size, snapsizedepth[depth])
2764 else:
2777 else:
2765 addsize(size, deltasize)
2778 addsize(size, deltasize)
2766 if delta == rev - 1:
2779 if delta == rev - 1:
2767 numprev += 1
2780 numprev += 1
2768 if delta == p1:
2781 if delta == p1:
2769 nump1prev += 1
2782 nump1prev += 1
2770 elif delta == p2:
2783 elif delta == p2:
2771 nump2prev += 1
2784 nump2prev += 1
2772 elif delta == p1:
2785 elif delta == p1:
2773 nump1 += 1
2786 nump1 += 1
2774 elif delta == p2:
2787 elif delta == p2:
2775 nump2 += 1
2788 nump2 += 1
2776 elif delta != nullrev:
2789 elif delta != nullrev:
2777 numother += 1
2790 numother += 1
2778
2791
2779 # Obtain data on the raw chunks in the revlog.
2792 # Obtain data on the raw chunks in the revlog.
2780 if util.safehasattr(r, b'_getsegmentforrevs'):
2793 if util.safehasattr(r, b'_getsegmentforrevs'):
2781 segment = r._getsegmentforrevs(rev, rev)[1]
2794 segment = r._getsegmentforrevs(rev, rev)[1]
2782 else:
2795 else:
2783 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2796 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2784 if segment:
2797 if segment:
2785 chunktype = bytes(segment[0:1])
2798 chunktype = bytes(segment[0:1])
2786 else:
2799 else:
2787 chunktype = b'empty'
2800 chunktype = b'empty'
2788
2801
2789 if chunktype not in chunktypecounts:
2802 if chunktype not in chunktypecounts:
2790 chunktypecounts[chunktype] = 0
2803 chunktypecounts[chunktype] = 0
2791 chunktypesizes[chunktype] = 0
2804 chunktypesizes[chunktype] = 0
2792
2805
2793 chunktypecounts[chunktype] += 1
2806 chunktypecounts[chunktype] += 1
2794 chunktypesizes[chunktype] += size
2807 chunktypesizes[chunktype] += size
2795
2808
2796 # Adjust size min value for empty cases
2809 # Adjust size min value for empty cases
2797 for size in (datasize, fullsize, semisize, deltasize):
2810 for size in (datasize, fullsize, semisize, deltasize):
2798 if size[0] is None:
2811 if size[0] is None:
2799 size[0] = 0
2812 size[0] = 0
2800
2813
2801 numdeltas = numrevs - numfull - numempty - numsemi
2814 numdeltas = numrevs - numfull - numempty - numsemi
2802 numoprev = numprev - nump1prev - nump2prev
2815 numoprev = numprev - nump1prev - nump2prev
2803 totalrawsize = datasize[2]
2816 totalrawsize = datasize[2]
2804 datasize[2] /= numrevs
2817 datasize[2] /= numrevs
2805 fulltotal = fullsize[2]
2818 fulltotal = fullsize[2]
2806 if numfull == 0:
2819 if numfull == 0:
2807 fullsize[2] = 0
2820 fullsize[2] = 0
2808 else:
2821 else:
2809 fullsize[2] /= numfull
2822 fullsize[2] /= numfull
2810 semitotal = semisize[2]
2823 semitotal = semisize[2]
2811 snaptotal = {}
2824 snaptotal = {}
2812 if numsemi > 0:
2825 if numsemi > 0:
2813 semisize[2] /= numsemi
2826 semisize[2] /= numsemi
2814 for depth in snapsizedepth:
2827 for depth in snapsizedepth:
2815 snaptotal[depth] = snapsizedepth[depth][2]
2828 snaptotal[depth] = snapsizedepth[depth][2]
2816 snapsizedepth[depth][2] /= numsnapdepth[depth]
2829 snapsizedepth[depth][2] /= numsnapdepth[depth]
2817
2830
2818 deltatotal = deltasize[2]
2831 deltatotal = deltasize[2]
2819 if numdeltas > 0:
2832 if numdeltas > 0:
2820 deltasize[2] /= numdeltas
2833 deltasize[2] /= numdeltas
2821 totalsize = fulltotal + semitotal + deltatotal
2834 totalsize = fulltotal + semitotal + deltatotal
2822 avgchainlen = sum(chainlengths) / numrevs
2835 avgchainlen = sum(chainlengths) / numrevs
2823 maxchainlen = max(chainlengths)
2836 maxchainlen = max(chainlengths)
2824 maxchainspan = max(chainspans)
2837 maxchainspan = max(chainspans)
2825 compratio = 1
2838 compratio = 1
2826 if totalsize:
2839 if totalsize:
2827 compratio = totalrawsize / totalsize
2840 compratio = totalrawsize / totalsize
2828
2841
2829 basedfmtstr = b'%%%dd\n'
2842 basedfmtstr = b'%%%dd\n'
2830 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2843 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2831
2844
2832 def dfmtstr(max):
2845 def dfmtstr(max):
2833 return basedfmtstr % len(str(max))
2846 return basedfmtstr % len(str(max))
2834
2847
2835 def pcfmtstr(max, padding=0):
2848 def pcfmtstr(max, padding=0):
2836 return basepcfmtstr % (len(str(max)), b' ' * padding)
2849 return basepcfmtstr % (len(str(max)), b' ' * padding)
2837
2850
2838 def pcfmt(value, total):
2851 def pcfmt(value, total):
2839 if total:
2852 if total:
2840 return (value, 100 * float(value) / total)
2853 return (value, 100 * float(value) / total)
2841 else:
2854 else:
2842 return value, 100.0
2855 return value, 100.0
2843
2856
2844 ui.writenoi18n(b'format : %d\n' % format)
2857 ui.writenoi18n(b'format : %d\n' % format)
2845 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2858 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2846
2859
2847 ui.write(b'\n')
2860 ui.write(b'\n')
2848 fmt = pcfmtstr(totalsize)
2861 fmt = pcfmtstr(totalsize)
2849 fmt2 = dfmtstr(totalsize)
2862 fmt2 = dfmtstr(totalsize)
2850 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2863 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2851 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2864 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2852 ui.writenoi18n(
2865 ui.writenoi18n(
2853 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2866 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2854 )
2867 )
2855 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2868 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2856 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2869 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2857 ui.writenoi18n(
2870 ui.writenoi18n(
2858 b' text : '
2871 b' text : '
2859 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2872 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2860 )
2873 )
2861 ui.writenoi18n(
2874 ui.writenoi18n(
2862 b' delta : '
2875 b' delta : '
2863 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2876 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2864 )
2877 )
2865 ui.writenoi18n(
2878 ui.writenoi18n(
2866 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2879 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2867 )
2880 )
2868 for depth in sorted(numsnapdepth):
2881 for depth in sorted(numsnapdepth):
2869 ui.write(
2882 ui.write(
2870 (b' lvl-%-3d : ' % depth)
2883 (b' lvl-%-3d : ' % depth)
2871 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2884 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2872 )
2885 )
2873 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2886 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2874 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2887 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2875 ui.writenoi18n(
2888 ui.writenoi18n(
2876 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2889 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2877 )
2890 )
2878 for depth in sorted(numsnapdepth):
2891 for depth in sorted(numsnapdepth):
2879 ui.write(
2892 ui.write(
2880 (b' lvl-%-3d : ' % depth)
2893 (b' lvl-%-3d : ' % depth)
2881 + fmt % pcfmt(snaptotal[depth], totalsize)
2894 + fmt % pcfmt(snaptotal[depth], totalsize)
2882 )
2895 )
2883 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2896 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2884
2897
2885 def fmtchunktype(chunktype):
2898 def fmtchunktype(chunktype):
2886 if chunktype == b'empty':
2899 if chunktype == b'empty':
2887 return b' %s : ' % chunktype
2900 return b' %s : ' % chunktype
2888 elif chunktype in pycompat.bytestr(string.ascii_letters):
2901 elif chunktype in pycompat.bytestr(string.ascii_letters):
2889 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2902 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2890 else:
2903 else:
2891 return b' 0x%s : ' % hex(chunktype)
2904 return b' 0x%s : ' % hex(chunktype)
2892
2905
2893 ui.write(b'\n')
2906 ui.write(b'\n')
2894 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2907 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2895 for chunktype in sorted(chunktypecounts):
2908 for chunktype in sorted(chunktypecounts):
2896 ui.write(fmtchunktype(chunktype))
2909 ui.write(fmtchunktype(chunktype))
2897 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2910 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2898 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2911 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2899 for chunktype in sorted(chunktypecounts):
2912 for chunktype in sorted(chunktypecounts):
2900 ui.write(fmtchunktype(chunktype))
2913 ui.write(fmtchunktype(chunktype))
2901 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2914 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2902
2915
2903 ui.write(b'\n')
2916 ui.write(b'\n')
2904 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2917 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2905 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2918 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2906 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2919 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2907 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2920 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2908 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2921 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2909
2922
2910 if format > 0:
2923 if format > 0:
2911 ui.write(b'\n')
2924 ui.write(b'\n')
2912 ui.writenoi18n(
2925 ui.writenoi18n(
2913 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2926 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2914 % tuple(datasize)
2927 % tuple(datasize)
2915 )
2928 )
2916 ui.writenoi18n(
2929 ui.writenoi18n(
2917 b'full revision size (min/max/avg) : %d / %d / %d\n'
2930 b'full revision size (min/max/avg) : %d / %d / %d\n'
2918 % tuple(fullsize)
2931 % tuple(fullsize)
2919 )
2932 )
2920 ui.writenoi18n(
2933 ui.writenoi18n(
2921 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2934 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2922 % tuple(semisize)
2935 % tuple(semisize)
2923 )
2936 )
2924 for depth in sorted(snapsizedepth):
2937 for depth in sorted(snapsizedepth):
2925 if depth == 0:
2938 if depth == 0:
2926 continue
2939 continue
2927 ui.writenoi18n(
2940 ui.writenoi18n(
2928 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2941 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2929 % ((depth,) + tuple(snapsizedepth[depth]))
2942 % ((depth,) + tuple(snapsizedepth[depth]))
2930 )
2943 )
2931 ui.writenoi18n(
2944 ui.writenoi18n(
2932 b'delta size (min/max/avg) : %d / %d / %d\n'
2945 b'delta size (min/max/avg) : %d / %d / %d\n'
2933 % tuple(deltasize)
2946 % tuple(deltasize)
2934 )
2947 )
2935
2948
2936 if numdeltas > 0:
2949 if numdeltas > 0:
2937 ui.write(b'\n')
2950 ui.write(b'\n')
2938 fmt = pcfmtstr(numdeltas)
2951 fmt = pcfmtstr(numdeltas)
2939 fmt2 = pcfmtstr(numdeltas, 4)
2952 fmt2 = pcfmtstr(numdeltas, 4)
2940 ui.writenoi18n(
2953 ui.writenoi18n(
2941 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2954 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2942 )
2955 )
2943 if numprev > 0:
2956 if numprev > 0:
2944 ui.writenoi18n(
2957 ui.writenoi18n(
2945 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2958 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2946 )
2959 )
2947 ui.writenoi18n(
2960 ui.writenoi18n(
2948 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2961 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2949 )
2962 )
2950 ui.writenoi18n(
2963 ui.writenoi18n(
2951 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2964 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2952 )
2965 )
2953 if gdelta:
2966 if gdelta:
2954 ui.writenoi18n(
2967 ui.writenoi18n(
2955 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2968 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2956 )
2969 )
2957 ui.writenoi18n(
2970 ui.writenoi18n(
2958 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2971 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2959 )
2972 )
2960 ui.writenoi18n(
2973 ui.writenoi18n(
2961 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2974 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2962 )
2975 )
2963
2976
2964
2977
2965 @command(
2978 @command(
2966 b'debugrevlogindex',
2979 b'debugrevlogindex',
2967 cmdutil.debugrevlogopts
2980 cmdutil.debugrevlogopts
2968 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2981 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2969 _(b'[-f FORMAT] -c|-m|FILE'),
2982 _(b'[-f FORMAT] -c|-m|FILE'),
2970 optionalrepo=True,
2983 optionalrepo=True,
2971 )
2984 )
2972 def debugrevlogindex(ui, repo, file_=None, **opts):
2985 def debugrevlogindex(ui, repo, file_=None, **opts):
2973 """dump the contents of a revlog index"""
2986 """dump the contents of a revlog index"""
2974 opts = pycompat.byteskwargs(opts)
2987 opts = pycompat.byteskwargs(opts)
2975 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2988 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2976 format = opts.get(b'format', 0)
2989 format = opts.get(b'format', 0)
2977 if format not in (0, 1):
2990 if format not in (0, 1):
2978 raise error.Abort(_(b"unknown format %d") % format)
2991 raise error.Abort(_(b"unknown format %d") % format)
2979
2992
2980 if ui.debugflag:
2993 if ui.debugflag:
2981 shortfn = hex
2994 shortfn = hex
2982 else:
2995 else:
2983 shortfn = short
2996 shortfn = short
2984
2997
2985 # There might not be anything in r, so have a sane default
2998 # There might not be anything in r, so have a sane default
2986 idlen = 12
2999 idlen = 12
2987 for i in r:
3000 for i in r:
2988 idlen = len(shortfn(r.node(i)))
3001 idlen = len(shortfn(r.node(i)))
2989 break
3002 break
2990
3003
2991 if format == 0:
3004 if format == 0:
2992 if ui.verbose:
3005 if ui.verbose:
2993 ui.writenoi18n(
3006 ui.writenoi18n(
2994 b" rev offset length linkrev %s %s p2\n"
3007 b" rev offset length linkrev %s %s p2\n"
2995 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3008 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2996 )
3009 )
2997 else:
3010 else:
2998 ui.writenoi18n(
3011 ui.writenoi18n(
2999 b" rev linkrev %s %s p2\n"
3012 b" rev linkrev %s %s p2\n"
3000 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3013 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3001 )
3014 )
3002 elif format == 1:
3015 elif format == 1:
3003 if ui.verbose:
3016 if ui.verbose:
3004 ui.writenoi18n(
3017 ui.writenoi18n(
3005 (
3018 (
3006 b" rev flag offset length size link p1"
3019 b" rev flag offset length size link p1"
3007 b" p2 %s\n"
3020 b" p2 %s\n"
3008 )
3021 )
3009 % b"nodeid".rjust(idlen)
3022 % b"nodeid".rjust(idlen)
3010 )
3023 )
3011 else:
3024 else:
3012 ui.writenoi18n(
3025 ui.writenoi18n(
3013 b" rev flag size link p1 p2 %s\n"
3026 b" rev flag size link p1 p2 %s\n"
3014 % b"nodeid".rjust(idlen)
3027 % b"nodeid".rjust(idlen)
3015 )
3028 )
3016
3029
3017 for i in r:
3030 for i in r:
3018 node = r.node(i)
3031 node = r.node(i)
3019 if format == 0:
3032 if format == 0:
3020 try:
3033 try:
3021 pp = r.parents(node)
3034 pp = r.parents(node)
3022 except Exception:
3035 except Exception:
3023 pp = [nullid, nullid]
3036 pp = [nullid, nullid]
3024 if ui.verbose:
3037 if ui.verbose:
3025 ui.write(
3038 ui.write(
3026 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3039 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3027 % (
3040 % (
3028 i,
3041 i,
3029 r.start(i),
3042 r.start(i),
3030 r.length(i),
3043 r.length(i),
3031 r.linkrev(i),
3044 r.linkrev(i),
3032 shortfn(node),
3045 shortfn(node),
3033 shortfn(pp[0]),
3046 shortfn(pp[0]),
3034 shortfn(pp[1]),
3047 shortfn(pp[1]),
3035 )
3048 )
3036 )
3049 )
3037 else:
3050 else:
3038 ui.write(
3051 ui.write(
3039 b"% 6d % 7d %s %s %s\n"
3052 b"% 6d % 7d %s %s %s\n"
3040 % (
3053 % (
3041 i,
3054 i,
3042 r.linkrev(i),
3055 r.linkrev(i),
3043 shortfn(node),
3056 shortfn(node),
3044 shortfn(pp[0]),
3057 shortfn(pp[0]),
3045 shortfn(pp[1]),
3058 shortfn(pp[1]),
3046 )
3059 )
3047 )
3060 )
3048 elif format == 1:
3061 elif format == 1:
3049 pr = r.parentrevs(i)
3062 pr = r.parentrevs(i)
3050 if ui.verbose:
3063 if ui.verbose:
3051 ui.write(
3064 ui.write(
3052 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3065 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3053 % (
3066 % (
3054 i,
3067 i,
3055 r.flags(i),
3068 r.flags(i),
3056 r.start(i),
3069 r.start(i),
3057 r.length(i),
3070 r.length(i),
3058 r.rawsize(i),
3071 r.rawsize(i),
3059 r.linkrev(i),
3072 r.linkrev(i),
3060 pr[0],
3073 pr[0],
3061 pr[1],
3074 pr[1],
3062 shortfn(node),
3075 shortfn(node),
3063 )
3076 )
3064 )
3077 )
3065 else:
3078 else:
3066 ui.write(
3079 ui.write(
3067 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3080 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3068 % (
3081 % (
3069 i,
3082 i,
3070 r.flags(i),
3083 r.flags(i),
3071 r.rawsize(i),
3084 r.rawsize(i),
3072 r.linkrev(i),
3085 r.linkrev(i),
3073 pr[0],
3086 pr[0],
3074 pr[1],
3087 pr[1],
3075 shortfn(node),
3088 shortfn(node),
3076 )
3089 )
3077 )
3090 )
3078
3091
3079
3092
3080 @command(
3093 @command(
3081 b'debugrevspec',
3094 b'debugrevspec',
3082 [
3095 [
3083 (
3096 (
3084 b'',
3097 b'',
3085 b'optimize',
3098 b'optimize',
3086 None,
3099 None,
3087 _(b'print parsed tree after optimizing (DEPRECATED)'),
3100 _(b'print parsed tree after optimizing (DEPRECATED)'),
3088 ),
3101 ),
3089 (
3102 (
3090 b'',
3103 b'',
3091 b'show-revs',
3104 b'show-revs',
3092 True,
3105 True,
3093 _(b'print list of result revisions (default)'),
3106 _(b'print list of result revisions (default)'),
3094 ),
3107 ),
3095 (
3108 (
3096 b's',
3109 b's',
3097 b'show-set',
3110 b'show-set',
3098 None,
3111 None,
3099 _(b'print internal representation of result set'),
3112 _(b'print internal representation of result set'),
3100 ),
3113 ),
3101 (
3114 (
3102 b'p',
3115 b'p',
3103 b'show-stage',
3116 b'show-stage',
3104 [],
3117 [],
3105 _(b'print parsed tree at the given stage'),
3118 _(b'print parsed tree at the given stage'),
3106 _(b'NAME'),
3119 _(b'NAME'),
3107 ),
3120 ),
3108 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3121 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3109 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3122 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3110 ],
3123 ],
3111 b'REVSPEC',
3124 b'REVSPEC',
3112 )
3125 )
3113 def debugrevspec(ui, repo, expr, **opts):
3126 def debugrevspec(ui, repo, expr, **opts):
3114 """parse and apply a revision specification
3127 """parse and apply a revision specification
3115
3128
3116 Use -p/--show-stage option to print the parsed tree at the given stages.
3129 Use -p/--show-stage option to print the parsed tree at the given stages.
3117 Use -p all to print tree at every stage.
3130 Use -p all to print tree at every stage.
3118
3131
3119 Use --no-show-revs option with -s or -p to print only the set
3132 Use --no-show-revs option with -s or -p to print only the set
3120 representation or the parsed tree respectively.
3133 representation or the parsed tree respectively.
3121
3134
3122 Use --verify-optimized to compare the optimized result with the unoptimized
3135 Use --verify-optimized to compare the optimized result with the unoptimized
3123 one. Returns 1 if the optimized result differs.
3136 one. Returns 1 if the optimized result differs.
3124 """
3137 """
3125 opts = pycompat.byteskwargs(opts)
3138 opts = pycompat.byteskwargs(opts)
3126 aliases = ui.configitems(b'revsetalias')
3139 aliases = ui.configitems(b'revsetalias')
3127 stages = [
3140 stages = [
3128 (b'parsed', lambda tree: tree),
3141 (b'parsed', lambda tree: tree),
3129 (
3142 (
3130 b'expanded',
3143 b'expanded',
3131 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3144 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3132 ),
3145 ),
3133 (b'concatenated', revsetlang.foldconcat),
3146 (b'concatenated', revsetlang.foldconcat),
3134 (b'analyzed', revsetlang.analyze),
3147 (b'analyzed', revsetlang.analyze),
3135 (b'optimized', revsetlang.optimize),
3148 (b'optimized', revsetlang.optimize),
3136 ]
3149 ]
3137 if opts[b'no_optimized']:
3150 if opts[b'no_optimized']:
3138 stages = stages[:-1]
3151 stages = stages[:-1]
3139 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3152 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3140 raise error.Abort(
3153 raise error.Abort(
3141 _(b'cannot use --verify-optimized with --no-optimized')
3154 _(b'cannot use --verify-optimized with --no-optimized')
3142 )
3155 )
3143 stagenames = set(n for n, f in stages)
3156 stagenames = set(n for n, f in stages)
3144
3157
3145 showalways = set()
3158 showalways = set()
3146 showchanged = set()
3159 showchanged = set()
3147 if ui.verbose and not opts[b'show_stage']:
3160 if ui.verbose and not opts[b'show_stage']:
3148 # show parsed tree by --verbose (deprecated)
3161 # show parsed tree by --verbose (deprecated)
3149 showalways.add(b'parsed')
3162 showalways.add(b'parsed')
3150 showchanged.update([b'expanded', b'concatenated'])
3163 showchanged.update([b'expanded', b'concatenated'])
3151 if opts[b'optimize']:
3164 if opts[b'optimize']:
3152 showalways.add(b'optimized')
3165 showalways.add(b'optimized')
3153 if opts[b'show_stage'] and opts[b'optimize']:
3166 if opts[b'show_stage'] and opts[b'optimize']:
3154 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3167 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3155 if opts[b'show_stage'] == [b'all']:
3168 if opts[b'show_stage'] == [b'all']:
3156 showalways.update(stagenames)
3169 showalways.update(stagenames)
3157 else:
3170 else:
3158 for n in opts[b'show_stage']:
3171 for n in opts[b'show_stage']:
3159 if n not in stagenames:
3172 if n not in stagenames:
3160 raise error.Abort(_(b'invalid stage name: %s') % n)
3173 raise error.Abort(_(b'invalid stage name: %s') % n)
3161 showalways.update(opts[b'show_stage'])
3174 showalways.update(opts[b'show_stage'])
3162
3175
3163 treebystage = {}
3176 treebystage = {}
3164 printedtree = None
3177 printedtree = None
3165 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3178 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3166 for n, f in stages:
3179 for n, f in stages:
3167 treebystage[n] = tree = f(tree)
3180 treebystage[n] = tree = f(tree)
3168 if n in showalways or (n in showchanged and tree != printedtree):
3181 if n in showalways or (n in showchanged and tree != printedtree):
3169 if opts[b'show_stage'] or n != b'parsed':
3182 if opts[b'show_stage'] or n != b'parsed':
3170 ui.write(b"* %s:\n" % n)
3183 ui.write(b"* %s:\n" % n)
3171 ui.write(revsetlang.prettyformat(tree), b"\n")
3184 ui.write(revsetlang.prettyformat(tree), b"\n")
3172 printedtree = tree
3185 printedtree = tree
3173
3186
3174 if opts[b'verify_optimized']:
3187 if opts[b'verify_optimized']:
3175 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3188 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3176 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3189 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3177 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3190 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3178 ui.writenoi18n(
3191 ui.writenoi18n(
3179 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3192 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3180 )
3193 )
3181 ui.writenoi18n(
3194 ui.writenoi18n(
3182 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3195 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3183 )
3196 )
3184 arevs = list(arevs)
3197 arevs = list(arevs)
3185 brevs = list(brevs)
3198 brevs = list(brevs)
3186 if arevs == brevs:
3199 if arevs == brevs:
3187 return 0
3200 return 0
3188 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3201 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3189 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3202 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3190 sm = difflib.SequenceMatcher(None, arevs, brevs)
3203 sm = difflib.SequenceMatcher(None, arevs, brevs)
3191 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3204 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3192 if tag in ('delete', 'replace'):
3205 if tag in ('delete', 'replace'):
3193 for c in arevs[alo:ahi]:
3206 for c in arevs[alo:ahi]:
3194 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3207 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3195 if tag in ('insert', 'replace'):
3208 if tag in ('insert', 'replace'):
3196 for c in brevs[blo:bhi]:
3209 for c in brevs[blo:bhi]:
3197 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3210 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3198 if tag == 'equal':
3211 if tag == 'equal':
3199 for c in arevs[alo:ahi]:
3212 for c in arevs[alo:ahi]:
3200 ui.write(b' %d\n' % c)
3213 ui.write(b' %d\n' % c)
3201 return 1
3214 return 1
3202
3215
3203 func = revset.makematcher(tree)
3216 func = revset.makematcher(tree)
3204 revs = func(repo)
3217 revs = func(repo)
3205 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3218 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3206 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3219 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3207 if not opts[b'show_revs']:
3220 if not opts[b'show_revs']:
3208 return
3221 return
3209 for c in revs:
3222 for c in revs:
3210 ui.write(b"%d\n" % c)
3223 ui.write(b"%d\n" % c)
3211
3224
3212
3225
3213 @command(
3226 @command(
3214 b'debugserve',
3227 b'debugserve',
3215 [
3228 [
3216 (
3229 (
3217 b'',
3230 b'',
3218 b'sshstdio',
3231 b'sshstdio',
3219 False,
3232 False,
3220 _(b'run an SSH server bound to process handles'),
3233 _(b'run an SSH server bound to process handles'),
3221 ),
3234 ),
3222 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3235 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3223 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3236 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3224 ],
3237 ],
3225 b'',
3238 b'',
3226 )
3239 )
3227 def debugserve(ui, repo, **opts):
3240 def debugserve(ui, repo, **opts):
3228 """run a server with advanced settings
3241 """run a server with advanced settings
3229
3242
3230 This command is similar to :hg:`serve`. It exists partially as a
3243 This command is similar to :hg:`serve`. It exists partially as a
3231 workaround to the fact that ``hg serve --stdio`` must have specific
3244 workaround to the fact that ``hg serve --stdio`` must have specific
3232 arguments for security reasons.
3245 arguments for security reasons.
3233 """
3246 """
3234 opts = pycompat.byteskwargs(opts)
3247 opts = pycompat.byteskwargs(opts)
3235
3248
3236 if not opts[b'sshstdio']:
3249 if not opts[b'sshstdio']:
3237 raise error.Abort(_(b'only --sshstdio is currently supported'))
3250 raise error.Abort(_(b'only --sshstdio is currently supported'))
3238
3251
3239 logfh = None
3252 logfh = None
3240
3253
3241 if opts[b'logiofd'] and opts[b'logiofile']:
3254 if opts[b'logiofd'] and opts[b'logiofile']:
3242 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3255 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3243
3256
3244 if opts[b'logiofd']:
3257 if opts[b'logiofd']:
3245 # Ideally we would be line buffered. But line buffering in binary
3258 # Ideally we would be line buffered. But line buffering in binary
3246 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3259 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3247 # buffering could have performance impacts. But since this isn't
3260 # buffering could have performance impacts. But since this isn't
3248 # performance critical code, it should be fine.
3261 # performance critical code, it should be fine.
3249 try:
3262 try:
3250 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3263 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3251 except OSError as e:
3264 except OSError as e:
3252 if e.errno != errno.ESPIPE:
3265 if e.errno != errno.ESPIPE:
3253 raise
3266 raise
3254 # can't seek a pipe, so `ab` mode fails on py3
3267 # can't seek a pipe, so `ab` mode fails on py3
3255 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3268 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3256 elif opts[b'logiofile']:
3269 elif opts[b'logiofile']:
3257 logfh = open(opts[b'logiofile'], b'ab', 0)
3270 logfh = open(opts[b'logiofile'], b'ab', 0)
3258
3271
3259 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3272 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3260 s.serve_forever()
3273 s.serve_forever()
3261
3274
3262
3275
3263 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3276 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3264 def debugsetparents(ui, repo, rev1, rev2=None):
3277 def debugsetparents(ui, repo, rev1, rev2=None):
3265 """manually set the parents of the current working directory
3278 """manually set the parents of the current working directory
3266
3279
3267 This is useful for writing repository conversion tools, but should
3280 This is useful for writing repository conversion tools, but should
3268 be used with care. For example, neither the working directory nor the
3281 be used with care. For example, neither the working directory nor the
3269 dirstate is updated, so file status may be incorrect after running this
3282 dirstate is updated, so file status may be incorrect after running this
3270 command.
3283 command.
3271
3284
3272 Returns 0 on success.
3285 Returns 0 on success.
3273 """
3286 """
3274
3287
3275 node1 = scmutil.revsingle(repo, rev1).node()
3288 node1 = scmutil.revsingle(repo, rev1).node()
3276 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3289 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3277
3290
3278 with repo.wlock():
3291 with repo.wlock():
3279 repo.setparents(node1, node2)
3292 repo.setparents(node1, node2)
3280
3293
3281
3294
3282 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3295 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3283 def debugsidedata(ui, repo, file_, rev=None, **opts):
3296 def debugsidedata(ui, repo, file_, rev=None, **opts):
3284 """dump the side data for a cl/manifest/file revision
3297 """dump the side data for a cl/manifest/file revision
3285
3298
3286 Use --verbose to dump the sidedata content."""
3299 Use --verbose to dump the sidedata content."""
3287 opts = pycompat.byteskwargs(opts)
3300 opts = pycompat.byteskwargs(opts)
3288 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3301 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3289 if rev is not None:
3302 if rev is not None:
3290 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3303 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3291 file_, rev = None, file_
3304 file_, rev = None, file_
3292 elif rev is None:
3305 elif rev is None:
3293 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3306 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3294 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3307 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3295 r = getattr(r, '_revlog', r)
3308 r = getattr(r, '_revlog', r)
3296 try:
3309 try:
3297 sidedata = r.sidedata(r.lookup(rev))
3310 sidedata = r.sidedata(r.lookup(rev))
3298 except KeyError:
3311 except KeyError:
3299 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3312 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3300 if sidedata:
3313 if sidedata:
3301 sidedata = list(sidedata.items())
3314 sidedata = list(sidedata.items())
3302 sidedata.sort()
3315 sidedata.sort()
3303 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3316 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3304 for key, value in sidedata:
3317 for key, value in sidedata:
3305 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3318 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3306 if ui.verbose:
3319 if ui.verbose:
3307 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3320 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3308
3321
3309
3322
3310 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3323 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3311 def debugssl(ui, repo, source=None, **opts):
3324 def debugssl(ui, repo, source=None, **opts):
3312 '''test a secure connection to a server
3325 '''test a secure connection to a server
3313
3326
3314 This builds the certificate chain for the server on Windows, installing the
3327 This builds the certificate chain for the server on Windows, installing the
3315 missing intermediates and trusted root via Windows Update if necessary. It
3328 missing intermediates and trusted root via Windows Update if necessary. It
3316 does nothing on other platforms.
3329 does nothing on other platforms.
3317
3330
3318 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3331 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3319 that server is used. See :hg:`help urls` for more information.
3332 that server is used. See :hg:`help urls` for more information.
3320
3333
3321 If the update succeeds, retry the original operation. Otherwise, the cause
3334 If the update succeeds, retry the original operation. Otherwise, the cause
3322 of the SSL error is likely another issue.
3335 of the SSL error is likely another issue.
3323 '''
3336 '''
3324 if not pycompat.iswindows:
3337 if not pycompat.iswindows:
3325 raise error.Abort(
3338 raise error.Abort(
3326 _(b'certificate chain building is only possible on Windows')
3339 _(b'certificate chain building is only possible on Windows')
3327 )
3340 )
3328
3341
3329 if not source:
3342 if not source:
3330 if not repo:
3343 if not repo:
3331 raise error.Abort(
3344 raise error.Abort(
3332 _(
3345 _(
3333 b"there is no Mercurial repository here, and no "
3346 b"there is no Mercurial repository here, and no "
3334 b"server specified"
3347 b"server specified"
3335 )
3348 )
3336 )
3349 )
3337 source = b"default"
3350 source = b"default"
3338
3351
3339 source, branches = hg.parseurl(ui.expandpath(source))
3352 source, branches = hg.parseurl(ui.expandpath(source))
3340 url = util.url(source)
3353 url = util.url(source)
3341
3354
3342 defaultport = {b'https': 443, b'ssh': 22}
3355 defaultport = {b'https': 443, b'ssh': 22}
3343 if url.scheme in defaultport:
3356 if url.scheme in defaultport:
3344 try:
3357 try:
3345 addr = (url.host, int(url.port or defaultport[url.scheme]))
3358 addr = (url.host, int(url.port or defaultport[url.scheme]))
3346 except ValueError:
3359 except ValueError:
3347 raise error.Abort(_(b"malformed port number in URL"))
3360 raise error.Abort(_(b"malformed port number in URL"))
3348 else:
3361 else:
3349 raise error.Abort(_(b"only https and ssh connections are supported"))
3362 raise error.Abort(_(b"only https and ssh connections are supported"))
3350
3363
3351 from . import win32
3364 from . import win32
3352
3365
3353 s = ssl.wrap_socket(
3366 s = ssl.wrap_socket(
3354 socket.socket(),
3367 socket.socket(),
3355 ssl_version=ssl.PROTOCOL_TLS,
3368 ssl_version=ssl.PROTOCOL_TLS,
3356 cert_reqs=ssl.CERT_NONE,
3369 cert_reqs=ssl.CERT_NONE,
3357 ca_certs=None,
3370 ca_certs=None,
3358 )
3371 )
3359
3372
3360 try:
3373 try:
3361 s.connect(addr)
3374 s.connect(addr)
3362 cert = s.getpeercert(True)
3375 cert = s.getpeercert(True)
3363
3376
3364 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3377 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3365
3378
3366 complete = win32.checkcertificatechain(cert, build=False)
3379 complete = win32.checkcertificatechain(cert, build=False)
3367
3380
3368 if not complete:
3381 if not complete:
3369 ui.status(_(b'certificate chain is incomplete, updating... '))
3382 ui.status(_(b'certificate chain is incomplete, updating... '))
3370
3383
3371 if not win32.checkcertificatechain(cert):
3384 if not win32.checkcertificatechain(cert):
3372 ui.status(_(b'failed.\n'))
3385 ui.status(_(b'failed.\n'))
3373 else:
3386 else:
3374 ui.status(_(b'done.\n'))
3387 ui.status(_(b'done.\n'))
3375 else:
3388 else:
3376 ui.status(_(b'full certificate chain is available\n'))
3389 ui.status(_(b'full certificate chain is available\n'))
3377 finally:
3390 finally:
3378 s.close()
3391 s.close()
3379
3392
3380
3393
3381 @command(
3394 @command(
3382 b'debugsub',
3395 b'debugsub',
3383 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3396 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3384 _(b'[-r REV] [REV]'),
3397 _(b'[-r REV] [REV]'),
3385 )
3398 )
3386 def debugsub(ui, repo, rev=None):
3399 def debugsub(ui, repo, rev=None):
3387 ctx = scmutil.revsingle(repo, rev, None)
3400 ctx = scmutil.revsingle(repo, rev, None)
3388 for k, v in sorted(ctx.substate.items()):
3401 for k, v in sorted(ctx.substate.items()):
3389 ui.writenoi18n(b'path %s\n' % k)
3402 ui.writenoi18n(b'path %s\n' % k)
3390 ui.writenoi18n(b' source %s\n' % v[0])
3403 ui.writenoi18n(b' source %s\n' % v[0])
3391 ui.writenoi18n(b' revision %s\n' % v[1])
3404 ui.writenoi18n(b' revision %s\n' % v[1])
3392
3405
3393
3406
3394 @command(
3407 @command(
3395 b'debugsuccessorssets',
3408 b'debugsuccessorssets',
3396 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3409 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3397 _(b'[REV]'),
3410 _(b'[REV]'),
3398 )
3411 )
3399 def debugsuccessorssets(ui, repo, *revs, **opts):
3412 def debugsuccessorssets(ui, repo, *revs, **opts):
3400 """show set of successors for revision
3413 """show set of successors for revision
3401
3414
3402 A successors set of changeset A is a consistent group of revisions that
3415 A successors set of changeset A is a consistent group of revisions that
3403 succeed A. It contains non-obsolete changesets only unless closests
3416 succeed A. It contains non-obsolete changesets only unless closests
3404 successors set is set.
3417 successors set is set.
3405
3418
3406 In most cases a changeset A has a single successors set containing a single
3419 In most cases a changeset A has a single successors set containing a single
3407 successor (changeset A replaced by A').
3420 successor (changeset A replaced by A').
3408
3421
3409 A changeset that is made obsolete with no successors are called "pruned".
3422 A changeset that is made obsolete with no successors are called "pruned".
3410 Such changesets have no successors sets at all.
3423 Such changesets have no successors sets at all.
3411
3424
3412 A changeset that has been "split" will have a successors set containing
3425 A changeset that has been "split" will have a successors set containing
3413 more than one successor.
3426 more than one successor.
3414
3427
3415 A changeset that has been rewritten in multiple different ways is called
3428 A changeset that has been rewritten in multiple different ways is called
3416 "divergent". Such changesets have multiple successor sets (each of which
3429 "divergent". Such changesets have multiple successor sets (each of which
3417 may also be split, i.e. have multiple successors).
3430 may also be split, i.e. have multiple successors).
3418
3431
3419 Results are displayed as follows::
3432 Results are displayed as follows::
3420
3433
3421 <rev1>
3434 <rev1>
3422 <successors-1A>
3435 <successors-1A>
3423 <rev2>
3436 <rev2>
3424 <successors-2A>
3437 <successors-2A>
3425 <successors-2B1> <successors-2B2> <successors-2B3>
3438 <successors-2B1> <successors-2B2> <successors-2B3>
3426
3439
3427 Here rev2 has two possible (i.e. divergent) successors sets. The first
3440 Here rev2 has two possible (i.e. divergent) successors sets. The first
3428 holds one element, whereas the second holds three (i.e. the changeset has
3441 holds one element, whereas the second holds three (i.e. the changeset has
3429 been split).
3442 been split).
3430 """
3443 """
3431 # passed to successorssets caching computation from one call to another
3444 # passed to successorssets caching computation from one call to another
3432 cache = {}
3445 cache = {}
3433 ctx2str = bytes
3446 ctx2str = bytes
3434 node2str = short
3447 node2str = short
3435 for rev in scmutil.revrange(repo, revs):
3448 for rev in scmutil.revrange(repo, revs):
3436 ctx = repo[rev]
3449 ctx = repo[rev]
3437 ui.write(b'%s\n' % ctx2str(ctx))
3450 ui.write(b'%s\n' % ctx2str(ctx))
3438 for succsset in obsutil.successorssets(
3451 for succsset in obsutil.successorssets(
3439 repo, ctx.node(), closest=opts['closest'], cache=cache
3452 repo, ctx.node(), closest=opts['closest'], cache=cache
3440 ):
3453 ):
3441 if succsset:
3454 if succsset:
3442 ui.write(b' ')
3455 ui.write(b' ')
3443 ui.write(node2str(succsset[0]))
3456 ui.write(node2str(succsset[0]))
3444 for node in succsset[1:]:
3457 for node in succsset[1:]:
3445 ui.write(b' ')
3458 ui.write(b' ')
3446 ui.write(node2str(node))
3459 ui.write(node2str(node))
3447 ui.write(b'\n')
3460 ui.write(b'\n')
3448
3461
3449
3462
3450 @command(b'debugtagscache', [])
3463 @command(b'debugtagscache', [])
3451 def debugtagscache(ui, repo):
3464 def debugtagscache(ui, repo):
3452 """display the contents of .hg/cache/hgtagsfnodes1"""
3465 """display the contents of .hg/cache/hgtagsfnodes1"""
3453 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3466 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3454 for r in repo:
3467 for r in repo:
3455 node = repo[r].node()
3468 node = repo[r].node()
3456 tagsnode = cache.getfnode(node, computemissing=False)
3469 tagsnode = cache.getfnode(node, computemissing=False)
3457 tagsnodedisplay = hex(tagsnode) if tagsnode else 'missing/invalid'
3470 tagsnodedisplay = hex(tagsnode) if tagsnode else 'missing/invalid'
3458 ui.write(b'%s %s %s\n' % (r, hex(node), tagsnodedisplay))
3471 ui.write(b'%s %s %s\n' % (r, hex(node), tagsnodedisplay))
3459
3472
3460
3473
3461 @command(
3474 @command(
3462 b'debugtemplate',
3475 b'debugtemplate',
3463 [
3476 [
3464 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3477 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3465 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3478 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3466 ],
3479 ],
3467 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3480 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3468 optionalrepo=True,
3481 optionalrepo=True,
3469 )
3482 )
3470 def debugtemplate(ui, repo, tmpl, **opts):
3483 def debugtemplate(ui, repo, tmpl, **opts):
3471 """parse and apply a template
3484 """parse and apply a template
3472
3485
3473 If -r/--rev is given, the template is processed as a log template and
3486 If -r/--rev is given, the template is processed as a log template and
3474 applied to the given changesets. Otherwise, it is processed as a generic
3487 applied to the given changesets. Otherwise, it is processed as a generic
3475 template.
3488 template.
3476
3489
3477 Use --verbose to print the parsed tree.
3490 Use --verbose to print the parsed tree.
3478 """
3491 """
3479 revs = None
3492 revs = None
3480 if opts['rev']:
3493 if opts['rev']:
3481 if repo is None:
3494 if repo is None:
3482 raise error.RepoError(
3495 raise error.RepoError(
3483 _(b'there is no Mercurial repository here (.hg not found)')
3496 _(b'there is no Mercurial repository here (.hg not found)')
3484 )
3497 )
3485 revs = scmutil.revrange(repo, opts['rev'])
3498 revs = scmutil.revrange(repo, opts['rev'])
3486
3499
3487 props = {}
3500 props = {}
3488 for d in opts['define']:
3501 for d in opts['define']:
3489 try:
3502 try:
3490 k, v = (e.strip() for e in d.split(b'=', 1))
3503 k, v = (e.strip() for e in d.split(b'=', 1))
3491 if not k or k == b'ui':
3504 if not k or k == b'ui':
3492 raise ValueError
3505 raise ValueError
3493 props[k] = v
3506 props[k] = v
3494 except ValueError:
3507 except ValueError:
3495 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3508 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3496
3509
3497 if ui.verbose:
3510 if ui.verbose:
3498 aliases = ui.configitems(b'templatealias')
3511 aliases = ui.configitems(b'templatealias')
3499 tree = templater.parse(tmpl)
3512 tree = templater.parse(tmpl)
3500 ui.note(templater.prettyformat(tree), b'\n')
3513 ui.note(templater.prettyformat(tree), b'\n')
3501 newtree = templater.expandaliases(tree, aliases)
3514 newtree = templater.expandaliases(tree, aliases)
3502 if newtree != tree:
3515 if newtree != tree:
3503 ui.notenoi18n(
3516 ui.notenoi18n(
3504 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3517 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3505 )
3518 )
3506
3519
3507 if revs is None:
3520 if revs is None:
3508 tres = formatter.templateresources(ui, repo)
3521 tres = formatter.templateresources(ui, repo)
3509 t = formatter.maketemplater(ui, tmpl, resources=tres)
3522 t = formatter.maketemplater(ui, tmpl, resources=tres)
3510 if ui.verbose:
3523 if ui.verbose:
3511 kwds, funcs = t.symbolsuseddefault()
3524 kwds, funcs = t.symbolsuseddefault()
3512 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3525 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3513 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3526 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3514 ui.write(t.renderdefault(props))
3527 ui.write(t.renderdefault(props))
3515 else:
3528 else:
3516 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3529 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3517 if ui.verbose:
3530 if ui.verbose:
3518 kwds, funcs = displayer.t.symbolsuseddefault()
3531 kwds, funcs = displayer.t.symbolsuseddefault()
3519 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3532 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3520 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3533 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3521 for r in revs:
3534 for r in revs:
3522 displayer.show(repo[r], **pycompat.strkwargs(props))
3535 displayer.show(repo[r], **pycompat.strkwargs(props))
3523 displayer.close()
3536 displayer.close()
3524
3537
3525
3538
3526 @command(
3539 @command(
3527 b'debuguigetpass',
3540 b'debuguigetpass',
3528 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3541 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3529 _(b'[-p TEXT]'),
3542 _(b'[-p TEXT]'),
3530 norepo=True,
3543 norepo=True,
3531 )
3544 )
3532 def debuguigetpass(ui, prompt=b''):
3545 def debuguigetpass(ui, prompt=b''):
3533 """show prompt to type password"""
3546 """show prompt to type password"""
3534 r = ui.getpass(prompt)
3547 r = ui.getpass(prompt)
3535 ui.writenoi18n(b'respose: %s\n' % r)
3548 ui.writenoi18n(b'respose: %s\n' % r)
3536
3549
3537
3550
3538 @command(
3551 @command(
3539 b'debuguiprompt',
3552 b'debuguiprompt',
3540 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3553 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3541 _(b'[-p TEXT]'),
3554 _(b'[-p TEXT]'),
3542 norepo=True,
3555 norepo=True,
3543 )
3556 )
3544 def debuguiprompt(ui, prompt=b''):
3557 def debuguiprompt(ui, prompt=b''):
3545 """show plain prompt"""
3558 """show plain prompt"""
3546 r = ui.prompt(prompt)
3559 r = ui.prompt(prompt)
3547 ui.writenoi18n(b'response: %s\n' % r)
3560 ui.writenoi18n(b'response: %s\n' % r)
3548
3561
3549
3562
3550 @command(b'debugupdatecaches', [])
3563 @command(b'debugupdatecaches', [])
3551 def debugupdatecaches(ui, repo, *pats, **opts):
3564 def debugupdatecaches(ui, repo, *pats, **opts):
3552 """warm all known caches in the repository"""
3565 """warm all known caches in the repository"""
3553 with repo.wlock(), repo.lock():
3566 with repo.wlock(), repo.lock():
3554 repo.updatecaches(full=True)
3567 repo.updatecaches(full=True)
3555
3568
3556
3569
3557 @command(
3570 @command(
3558 b'debugupgraderepo',
3571 b'debugupgraderepo',
3559 [
3572 [
3560 (
3573 (
3561 b'o',
3574 b'o',
3562 b'optimize',
3575 b'optimize',
3563 [],
3576 [],
3564 _(b'extra optimization to perform'),
3577 _(b'extra optimization to perform'),
3565 _(b'NAME'),
3578 _(b'NAME'),
3566 ),
3579 ),
3567 (b'', b'run', False, _(b'performs an upgrade')),
3580 (b'', b'run', False, _(b'performs an upgrade')),
3568 (b'', b'backup', True, _(b'keep the old repository content around')),
3581 (b'', b'backup', True, _(b'keep the old repository content around')),
3569 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3582 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3570 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3583 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3571 ],
3584 ],
3572 )
3585 )
3573 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3586 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3574 """upgrade a repository to use different features
3587 """upgrade a repository to use different features
3575
3588
3576 If no arguments are specified, the repository is evaluated for upgrade
3589 If no arguments are specified, the repository is evaluated for upgrade
3577 and a list of problems and potential optimizations is printed.
3590 and a list of problems and potential optimizations is printed.
3578
3591
3579 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3592 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3580 can be influenced via additional arguments. More details will be provided
3593 can be influenced via additional arguments. More details will be provided
3581 by the command output when run without ``--run``.
3594 by the command output when run without ``--run``.
3582
3595
3583 During the upgrade, the repository will be locked and no writes will be
3596 During the upgrade, the repository will be locked and no writes will be
3584 allowed.
3597 allowed.
3585
3598
3586 At the end of the upgrade, the repository may not be readable while new
3599 At the end of the upgrade, the repository may not be readable while new
3587 repository data is swapped in. This window will be as long as it takes to
3600 repository data is swapped in. This window will be as long as it takes to
3588 rename some directories inside the ``.hg`` directory. On most machines, this
3601 rename some directories inside the ``.hg`` directory. On most machines, this
3589 should complete almost instantaneously and the chances of a consumer being
3602 should complete almost instantaneously and the chances of a consumer being
3590 unable to access the repository should be low.
3603 unable to access the repository should be low.
3591
3604
3592 By default, all revlog will be upgraded. You can restrict this using flag
3605 By default, all revlog will be upgraded. You can restrict this using flag
3593 such as `--manifest`:
3606 such as `--manifest`:
3594
3607
3595 * `--manifest`: only optimize the manifest
3608 * `--manifest`: only optimize the manifest
3596 * `--no-manifest`: optimize all revlog but the manifest
3609 * `--no-manifest`: optimize all revlog but the manifest
3597 * `--changelog`: optimize the changelog only
3610 * `--changelog`: optimize the changelog only
3598 * `--no-changelog --no-manifest`: optimize filelogs only
3611 * `--no-changelog --no-manifest`: optimize filelogs only
3599 """
3612 """
3600 return upgrade.upgraderepo(
3613 return upgrade.upgraderepo(
3601 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3614 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3602 )
3615 )
3603
3616
3604
3617
3605 @command(
3618 @command(
3606 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3619 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3607 )
3620 )
3608 def debugwalk(ui, repo, *pats, **opts):
3621 def debugwalk(ui, repo, *pats, **opts):
3609 """show how files match on given patterns"""
3622 """show how files match on given patterns"""
3610 opts = pycompat.byteskwargs(opts)
3623 opts = pycompat.byteskwargs(opts)
3611 m = scmutil.match(repo[None], pats, opts)
3624 m = scmutil.match(repo[None], pats, opts)
3612 if ui.verbose:
3625 if ui.verbose:
3613 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3626 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3614 items = list(repo[None].walk(m))
3627 items = list(repo[None].walk(m))
3615 if not items:
3628 if not items:
3616 return
3629 return
3617 f = lambda fn: fn
3630 f = lambda fn: fn
3618 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3631 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3619 f = lambda fn: util.normpath(fn)
3632 f = lambda fn: util.normpath(fn)
3620 fmt = b'f %%-%ds %%-%ds %%s' % (
3633 fmt = b'f %%-%ds %%-%ds %%s' % (
3621 max([len(abs) for abs in items]),
3634 max([len(abs) for abs in items]),
3622 max([len(repo.pathto(abs)) for abs in items]),
3635 max([len(repo.pathto(abs)) for abs in items]),
3623 )
3636 )
3624 for abs in items:
3637 for abs in items:
3625 line = fmt % (
3638 line = fmt % (
3626 abs,
3639 abs,
3627 f(repo.pathto(abs)),
3640 f(repo.pathto(abs)),
3628 m.exact(abs) and b'exact' or b'',
3641 m.exact(abs) and b'exact' or b'',
3629 )
3642 )
3630 ui.write(b"%s\n" % line.rstrip())
3643 ui.write(b"%s\n" % line.rstrip())
3631
3644
3632
3645
3633 @command(b'debugwhyunstable', [], _(b'REV'))
3646 @command(b'debugwhyunstable', [], _(b'REV'))
3634 def debugwhyunstable(ui, repo, rev):
3647 def debugwhyunstable(ui, repo, rev):
3635 """explain instabilities of a changeset"""
3648 """explain instabilities of a changeset"""
3636 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3649 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3637 dnodes = b''
3650 dnodes = b''
3638 if entry.get(b'divergentnodes'):
3651 if entry.get(b'divergentnodes'):
3639 dnodes = (
3652 dnodes = (
3640 b' '.join(
3653 b' '.join(
3641 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3654 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3642 for ctx in entry[b'divergentnodes']
3655 for ctx in entry[b'divergentnodes']
3643 )
3656 )
3644 + b' '
3657 + b' '
3645 )
3658 )
3646 ui.write(
3659 ui.write(
3647 b'%s: %s%s %s\n'
3660 b'%s: %s%s %s\n'
3648 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3661 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3649 )
3662 )
3650
3663
3651
3664
3652 @command(
3665 @command(
3653 b'debugwireargs',
3666 b'debugwireargs',
3654 [
3667 [
3655 (b'', b'three', b'', b'three'),
3668 (b'', b'three', b'', b'three'),
3656 (b'', b'four', b'', b'four'),
3669 (b'', b'four', b'', b'four'),
3657 (b'', b'five', b'', b'five'),
3670 (b'', b'five', b'', b'five'),
3658 ]
3671 ]
3659 + cmdutil.remoteopts,
3672 + cmdutil.remoteopts,
3660 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3673 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3661 norepo=True,
3674 norepo=True,
3662 )
3675 )
3663 def debugwireargs(ui, repopath, *vals, **opts):
3676 def debugwireargs(ui, repopath, *vals, **opts):
3664 opts = pycompat.byteskwargs(opts)
3677 opts = pycompat.byteskwargs(opts)
3665 repo = hg.peer(ui, opts, repopath)
3678 repo = hg.peer(ui, opts, repopath)
3666 for opt in cmdutil.remoteopts:
3679 for opt in cmdutil.remoteopts:
3667 del opts[opt[1]]
3680 del opts[opt[1]]
3668 args = {}
3681 args = {}
3669 for k, v in pycompat.iteritems(opts):
3682 for k, v in pycompat.iteritems(opts):
3670 if v:
3683 if v:
3671 args[k] = v
3684 args[k] = v
3672 args = pycompat.strkwargs(args)
3685 args = pycompat.strkwargs(args)
3673 # run twice to check that we don't mess up the stream for the next command
3686 # run twice to check that we don't mess up the stream for the next command
3674 res1 = repo.debugwireargs(*vals, **args)
3687 res1 = repo.debugwireargs(*vals, **args)
3675 res2 = repo.debugwireargs(*vals, **args)
3688 res2 = repo.debugwireargs(*vals, **args)
3676 ui.write(b"%s\n" % res1)
3689 ui.write(b"%s\n" % res1)
3677 if res1 != res2:
3690 if res1 != res2:
3678 ui.warn(b"%s\n" % res2)
3691 ui.warn(b"%s\n" % res2)
3679
3692
3680
3693
3681 def _parsewirelangblocks(fh):
3694 def _parsewirelangblocks(fh):
3682 activeaction = None
3695 activeaction = None
3683 blocklines = []
3696 blocklines = []
3684 lastindent = 0
3697 lastindent = 0
3685
3698
3686 for line in fh:
3699 for line in fh:
3687 line = line.rstrip()
3700 line = line.rstrip()
3688 if not line:
3701 if not line:
3689 continue
3702 continue
3690
3703
3691 if line.startswith(b'#'):
3704 if line.startswith(b'#'):
3692 continue
3705 continue
3693
3706
3694 if not line.startswith(b' '):
3707 if not line.startswith(b' '):
3695 # New block. Flush previous one.
3708 # New block. Flush previous one.
3696 if activeaction:
3709 if activeaction:
3697 yield activeaction, blocklines
3710 yield activeaction, blocklines
3698
3711
3699 activeaction = line
3712 activeaction = line
3700 blocklines = []
3713 blocklines = []
3701 lastindent = 0
3714 lastindent = 0
3702 continue
3715 continue
3703
3716
3704 # Else we start with an indent.
3717 # Else we start with an indent.
3705
3718
3706 if not activeaction:
3719 if not activeaction:
3707 raise error.Abort(_(b'indented line outside of block'))
3720 raise error.Abort(_(b'indented line outside of block'))
3708
3721
3709 indent = len(line) - len(line.lstrip())
3722 indent = len(line) - len(line.lstrip())
3710
3723
3711 # If this line is indented more than the last line, concatenate it.
3724 # If this line is indented more than the last line, concatenate it.
3712 if indent > lastindent and blocklines:
3725 if indent > lastindent and blocklines:
3713 blocklines[-1] += line.lstrip()
3726 blocklines[-1] += line.lstrip()
3714 else:
3727 else:
3715 blocklines.append(line)
3728 blocklines.append(line)
3716 lastindent = indent
3729 lastindent = indent
3717
3730
3718 # Flush last block.
3731 # Flush last block.
3719 if activeaction:
3732 if activeaction:
3720 yield activeaction, blocklines
3733 yield activeaction, blocklines
3721
3734
3722
3735
3723 @command(
3736 @command(
3724 b'debugwireproto',
3737 b'debugwireproto',
3725 [
3738 [
3726 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3739 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3727 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3740 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3728 (
3741 (
3729 b'',
3742 b'',
3730 b'noreadstderr',
3743 b'noreadstderr',
3731 False,
3744 False,
3732 _(b'do not read from stderr of the remote'),
3745 _(b'do not read from stderr of the remote'),
3733 ),
3746 ),
3734 (
3747 (
3735 b'',
3748 b'',
3736 b'nologhandshake',
3749 b'nologhandshake',
3737 False,
3750 False,
3738 _(b'do not log I/O related to the peer handshake'),
3751 _(b'do not log I/O related to the peer handshake'),
3739 ),
3752 ),
3740 ]
3753 ]
3741 + cmdutil.remoteopts,
3754 + cmdutil.remoteopts,
3742 _(b'[PATH]'),
3755 _(b'[PATH]'),
3743 optionalrepo=True,
3756 optionalrepo=True,
3744 )
3757 )
3745 def debugwireproto(ui, repo, path=None, **opts):
3758 def debugwireproto(ui, repo, path=None, **opts):
3746 """send wire protocol commands to a server
3759 """send wire protocol commands to a server
3747
3760
3748 This command can be used to issue wire protocol commands to remote
3761 This command can be used to issue wire protocol commands to remote
3749 peers and to debug the raw data being exchanged.
3762 peers and to debug the raw data being exchanged.
3750
3763
3751 ``--localssh`` will start an SSH server against the current repository
3764 ``--localssh`` will start an SSH server against the current repository
3752 and connect to that. By default, the connection will perform a handshake
3765 and connect to that. By default, the connection will perform a handshake
3753 and establish an appropriate peer instance.
3766 and establish an appropriate peer instance.
3754
3767
3755 ``--peer`` can be used to bypass the handshake protocol and construct a
3768 ``--peer`` can be used to bypass the handshake protocol and construct a
3756 peer instance using the specified class type. Valid values are ``raw``,
3769 peer instance using the specified class type. Valid values are ``raw``,
3757 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3770 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3758 raw data payloads and don't support higher-level command actions.
3771 raw data payloads and don't support higher-level command actions.
3759
3772
3760 ``--noreadstderr`` can be used to disable automatic reading from stderr
3773 ``--noreadstderr`` can be used to disable automatic reading from stderr
3761 of the peer (for SSH connections only). Disabling automatic reading of
3774 of the peer (for SSH connections only). Disabling automatic reading of
3762 stderr is useful for making output more deterministic.
3775 stderr is useful for making output more deterministic.
3763
3776
3764 Commands are issued via a mini language which is specified via stdin.
3777 Commands are issued via a mini language which is specified via stdin.
3765 The language consists of individual actions to perform. An action is
3778 The language consists of individual actions to perform. An action is
3766 defined by a block. A block is defined as a line with no leading
3779 defined by a block. A block is defined as a line with no leading
3767 space followed by 0 or more lines with leading space. Blocks are
3780 space followed by 0 or more lines with leading space. Blocks are
3768 effectively a high-level command with additional metadata.
3781 effectively a high-level command with additional metadata.
3769
3782
3770 Lines beginning with ``#`` are ignored.
3783 Lines beginning with ``#`` are ignored.
3771
3784
3772 The following sections denote available actions.
3785 The following sections denote available actions.
3773
3786
3774 raw
3787 raw
3775 ---
3788 ---
3776
3789
3777 Send raw data to the server.
3790 Send raw data to the server.
3778
3791
3779 The block payload contains the raw data to send as one atomic send
3792 The block payload contains the raw data to send as one atomic send
3780 operation. The data may not actually be delivered in a single system
3793 operation. The data may not actually be delivered in a single system
3781 call: it depends on the abilities of the transport being used.
3794 call: it depends on the abilities of the transport being used.
3782
3795
3783 Each line in the block is de-indented and concatenated. Then, that
3796 Each line in the block is de-indented and concatenated. Then, that
3784 value is evaluated as a Python b'' literal. This allows the use of
3797 value is evaluated as a Python b'' literal. This allows the use of
3785 backslash escaping, etc.
3798 backslash escaping, etc.
3786
3799
3787 raw+
3800 raw+
3788 ----
3801 ----
3789
3802
3790 Behaves like ``raw`` except flushes output afterwards.
3803 Behaves like ``raw`` except flushes output afterwards.
3791
3804
3792 command <X>
3805 command <X>
3793 -----------
3806 -----------
3794
3807
3795 Send a request to run a named command, whose name follows the ``command``
3808 Send a request to run a named command, whose name follows the ``command``
3796 string.
3809 string.
3797
3810
3798 Arguments to the command are defined as lines in this block. The format of
3811 Arguments to the command are defined as lines in this block. The format of
3799 each line is ``<key> <value>``. e.g.::
3812 each line is ``<key> <value>``. e.g.::
3800
3813
3801 command listkeys
3814 command listkeys
3802 namespace bookmarks
3815 namespace bookmarks
3803
3816
3804 If the value begins with ``eval:``, it will be interpreted as a Python
3817 If the value begins with ``eval:``, it will be interpreted as a Python
3805 literal expression. Otherwise values are interpreted as Python b'' literals.
3818 literal expression. Otherwise values are interpreted as Python b'' literals.
3806 This allows sending complex types and encoding special byte sequences via
3819 This allows sending complex types and encoding special byte sequences via
3807 backslash escaping.
3820 backslash escaping.
3808
3821
3809 The following arguments have special meaning:
3822 The following arguments have special meaning:
3810
3823
3811 ``PUSHFILE``
3824 ``PUSHFILE``
3812 When defined, the *push* mechanism of the peer will be used instead
3825 When defined, the *push* mechanism of the peer will be used instead
3813 of the static request-response mechanism and the content of the
3826 of the static request-response mechanism and the content of the
3814 file specified in the value of this argument will be sent as the
3827 file specified in the value of this argument will be sent as the
3815 command payload.
3828 command payload.
3816
3829
3817 This can be used to submit a local bundle file to the remote.
3830 This can be used to submit a local bundle file to the remote.
3818
3831
3819 batchbegin
3832 batchbegin
3820 ----------
3833 ----------
3821
3834
3822 Instruct the peer to begin a batched send.
3835 Instruct the peer to begin a batched send.
3823
3836
3824 All ``command`` blocks are queued for execution until the next
3837 All ``command`` blocks are queued for execution until the next
3825 ``batchsubmit`` block.
3838 ``batchsubmit`` block.
3826
3839
3827 batchsubmit
3840 batchsubmit
3828 -----------
3841 -----------
3829
3842
3830 Submit previously queued ``command`` blocks as a batch request.
3843 Submit previously queued ``command`` blocks as a batch request.
3831
3844
3832 This action MUST be paired with a ``batchbegin`` action.
3845 This action MUST be paired with a ``batchbegin`` action.
3833
3846
3834 httprequest <method> <path>
3847 httprequest <method> <path>
3835 ---------------------------
3848 ---------------------------
3836
3849
3837 (HTTP peer only)
3850 (HTTP peer only)
3838
3851
3839 Send an HTTP request to the peer.
3852 Send an HTTP request to the peer.
3840
3853
3841 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3854 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3842
3855
3843 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3856 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3844 headers to add to the request. e.g. ``Accept: foo``.
3857 headers to add to the request. e.g. ``Accept: foo``.
3845
3858
3846 The following arguments are special:
3859 The following arguments are special:
3847
3860
3848 ``BODYFILE``
3861 ``BODYFILE``
3849 The content of the file defined as the value to this argument will be
3862 The content of the file defined as the value to this argument will be
3850 transferred verbatim as the HTTP request body.
3863 transferred verbatim as the HTTP request body.
3851
3864
3852 ``frame <type> <flags> <payload>``
3865 ``frame <type> <flags> <payload>``
3853 Send a unified protocol frame as part of the request body.
3866 Send a unified protocol frame as part of the request body.
3854
3867
3855 All frames will be collected and sent as the body to the HTTP
3868 All frames will be collected and sent as the body to the HTTP
3856 request.
3869 request.
3857
3870
3858 close
3871 close
3859 -----
3872 -----
3860
3873
3861 Close the connection to the server.
3874 Close the connection to the server.
3862
3875
3863 flush
3876 flush
3864 -----
3877 -----
3865
3878
3866 Flush data written to the server.
3879 Flush data written to the server.
3867
3880
3868 readavailable
3881 readavailable
3869 -------------
3882 -------------
3870
3883
3871 Close the write end of the connection and read all available data from
3884 Close the write end of the connection and read all available data from
3872 the server.
3885 the server.
3873
3886
3874 If the connection to the server encompasses multiple pipes, we poll both
3887 If the connection to the server encompasses multiple pipes, we poll both
3875 pipes and read available data.
3888 pipes and read available data.
3876
3889
3877 readline
3890 readline
3878 --------
3891 --------
3879
3892
3880 Read a line of output from the server. If there are multiple output
3893 Read a line of output from the server. If there are multiple output
3881 pipes, reads only the main pipe.
3894 pipes, reads only the main pipe.
3882
3895
3883 ereadline
3896 ereadline
3884 ---------
3897 ---------
3885
3898
3886 Like ``readline``, but read from the stderr pipe, if available.
3899 Like ``readline``, but read from the stderr pipe, if available.
3887
3900
3888 read <X>
3901 read <X>
3889 --------
3902 --------
3890
3903
3891 ``read()`` N bytes from the server's main output pipe.
3904 ``read()`` N bytes from the server's main output pipe.
3892
3905
3893 eread <X>
3906 eread <X>
3894 ---------
3907 ---------
3895
3908
3896 ``read()`` N bytes from the server's stderr pipe, if available.
3909 ``read()`` N bytes from the server's stderr pipe, if available.
3897
3910
3898 Specifying Unified Frame-Based Protocol Frames
3911 Specifying Unified Frame-Based Protocol Frames
3899 ----------------------------------------------
3912 ----------------------------------------------
3900
3913
3901 It is possible to emit a *Unified Frame-Based Protocol* by using special
3914 It is possible to emit a *Unified Frame-Based Protocol* by using special
3902 syntax.
3915 syntax.
3903
3916
3904 A frame is composed as a type, flags, and payload. These can be parsed
3917 A frame is composed as a type, flags, and payload. These can be parsed
3905 from a string of the form:
3918 from a string of the form:
3906
3919
3907 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3920 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3908
3921
3909 ``request-id`` and ``stream-id`` are integers defining the request and
3922 ``request-id`` and ``stream-id`` are integers defining the request and
3910 stream identifiers.
3923 stream identifiers.
3911
3924
3912 ``type`` can be an integer value for the frame type or the string name
3925 ``type`` can be an integer value for the frame type or the string name
3913 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3926 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3914 ``command-name``.
3927 ``command-name``.
3915
3928
3916 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3929 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3917 components. Each component (and there can be just one) can be an integer
3930 components. Each component (and there can be just one) can be an integer
3918 or a flag name for stream flags or frame flags, respectively. Values are
3931 or a flag name for stream flags or frame flags, respectively. Values are
3919 resolved to integers and then bitwise OR'd together.
3932 resolved to integers and then bitwise OR'd together.
3920
3933
3921 ``payload`` represents the raw frame payload. If it begins with
3934 ``payload`` represents the raw frame payload. If it begins with
3922 ``cbor:``, the following string is evaluated as Python code and the
3935 ``cbor:``, the following string is evaluated as Python code and the
3923 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3936 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3924 as a Python byte string literal.
3937 as a Python byte string literal.
3925 """
3938 """
3926 opts = pycompat.byteskwargs(opts)
3939 opts = pycompat.byteskwargs(opts)
3927
3940
3928 if opts[b'localssh'] and not repo:
3941 if opts[b'localssh'] and not repo:
3929 raise error.Abort(_(b'--localssh requires a repository'))
3942 raise error.Abort(_(b'--localssh requires a repository'))
3930
3943
3931 if opts[b'peer'] and opts[b'peer'] not in (
3944 if opts[b'peer'] and opts[b'peer'] not in (
3932 b'raw',
3945 b'raw',
3933 b'http2',
3946 b'http2',
3934 b'ssh1',
3947 b'ssh1',
3935 b'ssh2',
3948 b'ssh2',
3936 ):
3949 ):
3937 raise error.Abort(
3950 raise error.Abort(
3938 _(b'invalid value for --peer'),
3951 _(b'invalid value for --peer'),
3939 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3952 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3940 )
3953 )
3941
3954
3942 if path and opts[b'localssh']:
3955 if path and opts[b'localssh']:
3943 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3956 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3944
3957
3945 if ui.interactive():
3958 if ui.interactive():
3946 ui.write(_(b'(waiting for commands on stdin)\n'))
3959 ui.write(_(b'(waiting for commands on stdin)\n'))
3947
3960
3948 blocks = list(_parsewirelangblocks(ui.fin))
3961 blocks = list(_parsewirelangblocks(ui.fin))
3949
3962
3950 proc = None
3963 proc = None
3951 stdin = None
3964 stdin = None
3952 stdout = None
3965 stdout = None
3953 stderr = None
3966 stderr = None
3954 opener = None
3967 opener = None
3955
3968
3956 if opts[b'localssh']:
3969 if opts[b'localssh']:
3957 # We start the SSH server in its own process so there is process
3970 # We start the SSH server in its own process so there is process
3958 # separation. This prevents a whole class of potential bugs around
3971 # separation. This prevents a whole class of potential bugs around
3959 # shared state from interfering with server operation.
3972 # shared state from interfering with server operation.
3960 args = procutil.hgcmd() + [
3973 args = procutil.hgcmd() + [
3961 b'-R',
3974 b'-R',
3962 repo.root,
3975 repo.root,
3963 b'debugserve',
3976 b'debugserve',
3964 b'--sshstdio',
3977 b'--sshstdio',
3965 ]
3978 ]
3966 proc = subprocess.Popen(
3979 proc = subprocess.Popen(
3967 pycompat.rapply(procutil.tonativestr, args),
3980 pycompat.rapply(procutil.tonativestr, args),
3968 stdin=subprocess.PIPE,
3981 stdin=subprocess.PIPE,
3969 stdout=subprocess.PIPE,
3982 stdout=subprocess.PIPE,
3970 stderr=subprocess.PIPE,
3983 stderr=subprocess.PIPE,
3971 bufsize=0,
3984 bufsize=0,
3972 )
3985 )
3973
3986
3974 stdin = proc.stdin
3987 stdin = proc.stdin
3975 stdout = proc.stdout
3988 stdout = proc.stdout
3976 stderr = proc.stderr
3989 stderr = proc.stderr
3977
3990
3978 # We turn the pipes into observers so we can log I/O.
3991 # We turn the pipes into observers so we can log I/O.
3979 if ui.verbose or opts[b'peer'] == b'raw':
3992 if ui.verbose or opts[b'peer'] == b'raw':
3980 stdin = util.makeloggingfileobject(
3993 stdin = util.makeloggingfileobject(
3981 ui, proc.stdin, b'i', logdata=True
3994 ui, proc.stdin, b'i', logdata=True
3982 )
3995 )
3983 stdout = util.makeloggingfileobject(
3996 stdout = util.makeloggingfileobject(
3984 ui, proc.stdout, b'o', logdata=True
3997 ui, proc.stdout, b'o', logdata=True
3985 )
3998 )
3986 stderr = util.makeloggingfileobject(
3999 stderr = util.makeloggingfileobject(
3987 ui, proc.stderr, b'e', logdata=True
4000 ui, proc.stderr, b'e', logdata=True
3988 )
4001 )
3989
4002
3990 # --localssh also implies the peer connection settings.
4003 # --localssh also implies the peer connection settings.
3991
4004
3992 url = b'ssh://localserver'
4005 url = b'ssh://localserver'
3993 autoreadstderr = not opts[b'noreadstderr']
4006 autoreadstderr = not opts[b'noreadstderr']
3994
4007
3995 if opts[b'peer'] == b'ssh1':
4008 if opts[b'peer'] == b'ssh1':
3996 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4009 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3997 peer = sshpeer.sshv1peer(
4010 peer = sshpeer.sshv1peer(
3998 ui,
4011 ui,
3999 url,
4012 url,
4000 proc,
4013 proc,
4001 stdin,
4014 stdin,
4002 stdout,
4015 stdout,
4003 stderr,
4016 stderr,
4004 None,
4017 None,
4005 autoreadstderr=autoreadstderr,
4018 autoreadstderr=autoreadstderr,
4006 )
4019 )
4007 elif opts[b'peer'] == b'ssh2':
4020 elif opts[b'peer'] == b'ssh2':
4008 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4021 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4009 peer = sshpeer.sshv2peer(
4022 peer = sshpeer.sshv2peer(
4010 ui,
4023 ui,
4011 url,
4024 url,
4012 proc,
4025 proc,
4013 stdin,
4026 stdin,
4014 stdout,
4027 stdout,
4015 stderr,
4028 stderr,
4016 None,
4029 None,
4017 autoreadstderr=autoreadstderr,
4030 autoreadstderr=autoreadstderr,
4018 )
4031 )
4019 elif opts[b'peer'] == b'raw':
4032 elif opts[b'peer'] == b'raw':
4020 ui.write(_(b'using raw connection to peer\n'))
4033 ui.write(_(b'using raw connection to peer\n'))
4021 peer = None
4034 peer = None
4022 else:
4035 else:
4023 ui.write(_(b'creating ssh peer from handshake results\n'))
4036 ui.write(_(b'creating ssh peer from handshake results\n'))
4024 peer = sshpeer.makepeer(
4037 peer = sshpeer.makepeer(
4025 ui,
4038 ui,
4026 url,
4039 url,
4027 proc,
4040 proc,
4028 stdin,
4041 stdin,
4029 stdout,
4042 stdout,
4030 stderr,
4043 stderr,
4031 autoreadstderr=autoreadstderr,
4044 autoreadstderr=autoreadstderr,
4032 )
4045 )
4033
4046
4034 elif path:
4047 elif path:
4035 # We bypass hg.peer() so we can proxy the sockets.
4048 # We bypass hg.peer() so we can proxy the sockets.
4036 # TODO consider not doing this because we skip
4049 # TODO consider not doing this because we skip
4037 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4050 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4038 u = util.url(path)
4051 u = util.url(path)
4039 if u.scheme != b'http':
4052 if u.scheme != b'http':
4040 raise error.Abort(_(b'only http:// paths are currently supported'))
4053 raise error.Abort(_(b'only http:// paths are currently supported'))
4041
4054
4042 url, authinfo = u.authinfo()
4055 url, authinfo = u.authinfo()
4043 openerargs = {
4056 openerargs = {
4044 'useragent': b'Mercurial debugwireproto',
4057 'useragent': b'Mercurial debugwireproto',
4045 }
4058 }
4046
4059
4047 # Turn pipes/sockets into observers so we can log I/O.
4060 # Turn pipes/sockets into observers so we can log I/O.
4048 if ui.verbose:
4061 if ui.verbose:
4049 openerargs.update(
4062 openerargs.update(
4050 {
4063 {
4051 'loggingfh': ui,
4064 'loggingfh': ui,
4052 'loggingname': b's',
4065 'loggingname': b's',
4053 'loggingopts': {'logdata': True, 'logdataapis': False,},
4066 'loggingopts': {'logdata': True, 'logdataapis': False,},
4054 }
4067 }
4055 )
4068 )
4056
4069
4057 if ui.debugflag:
4070 if ui.debugflag:
4058 openerargs['loggingopts']['logdataapis'] = True
4071 openerargs['loggingopts']['logdataapis'] = True
4059
4072
4060 # Don't send default headers when in raw mode. This allows us to
4073 # Don't send default headers when in raw mode. This allows us to
4061 # bypass most of the behavior of our URL handling code so we can
4074 # bypass most of the behavior of our URL handling code so we can
4062 # have near complete control over what's sent on the wire.
4075 # have near complete control over what's sent on the wire.
4063 if opts[b'peer'] == b'raw':
4076 if opts[b'peer'] == b'raw':
4064 openerargs['sendaccept'] = False
4077 openerargs['sendaccept'] = False
4065
4078
4066 opener = urlmod.opener(ui, authinfo, **openerargs)
4079 opener = urlmod.opener(ui, authinfo, **openerargs)
4067
4080
4068 if opts[b'peer'] == b'http2':
4081 if opts[b'peer'] == b'http2':
4069 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4082 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4070 # We go through makepeer() because we need an API descriptor for
4083 # We go through makepeer() because we need an API descriptor for
4071 # the peer instance to be useful.
4084 # the peer instance to be useful.
4072 with ui.configoverride(
4085 with ui.configoverride(
4073 {(b'experimental', b'httppeer.advertise-v2'): True}
4086 {(b'experimental', b'httppeer.advertise-v2'): True}
4074 ):
4087 ):
4075 if opts[b'nologhandshake']:
4088 if opts[b'nologhandshake']:
4076 ui.pushbuffer()
4089 ui.pushbuffer()
4077
4090
4078 peer = httppeer.makepeer(ui, path, opener=opener)
4091 peer = httppeer.makepeer(ui, path, opener=opener)
4079
4092
4080 if opts[b'nologhandshake']:
4093 if opts[b'nologhandshake']:
4081 ui.popbuffer()
4094 ui.popbuffer()
4082
4095
4083 if not isinstance(peer, httppeer.httpv2peer):
4096 if not isinstance(peer, httppeer.httpv2peer):
4084 raise error.Abort(
4097 raise error.Abort(
4085 _(
4098 _(
4086 b'could not instantiate HTTP peer for '
4099 b'could not instantiate HTTP peer for '
4087 b'wire protocol version 2'
4100 b'wire protocol version 2'
4088 ),
4101 ),
4089 hint=_(
4102 hint=_(
4090 b'the server may not have the feature '
4103 b'the server may not have the feature '
4091 b'enabled or is not allowing this '
4104 b'enabled or is not allowing this '
4092 b'client version'
4105 b'client version'
4093 ),
4106 ),
4094 )
4107 )
4095
4108
4096 elif opts[b'peer'] == b'raw':
4109 elif opts[b'peer'] == b'raw':
4097 ui.write(_(b'using raw connection to peer\n'))
4110 ui.write(_(b'using raw connection to peer\n'))
4098 peer = None
4111 peer = None
4099 elif opts[b'peer']:
4112 elif opts[b'peer']:
4100 raise error.Abort(
4113 raise error.Abort(
4101 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4114 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4102 )
4115 )
4103 else:
4116 else:
4104 peer = httppeer.makepeer(ui, path, opener=opener)
4117 peer = httppeer.makepeer(ui, path, opener=opener)
4105
4118
4106 # We /could/ populate stdin/stdout with sock.makefile()...
4119 # We /could/ populate stdin/stdout with sock.makefile()...
4107 else:
4120 else:
4108 raise error.Abort(_(b'unsupported connection configuration'))
4121 raise error.Abort(_(b'unsupported connection configuration'))
4109
4122
4110 batchedcommands = None
4123 batchedcommands = None
4111
4124
4112 # Now perform actions based on the parsed wire language instructions.
4125 # Now perform actions based on the parsed wire language instructions.
4113 for action, lines in blocks:
4126 for action, lines in blocks:
4114 if action in (b'raw', b'raw+'):
4127 if action in (b'raw', b'raw+'):
4115 if not stdin:
4128 if not stdin:
4116 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4129 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4117
4130
4118 # Concatenate the data together.
4131 # Concatenate the data together.
4119 data = b''.join(l.lstrip() for l in lines)
4132 data = b''.join(l.lstrip() for l in lines)
4120 data = stringutil.unescapestr(data)
4133 data = stringutil.unescapestr(data)
4121 stdin.write(data)
4134 stdin.write(data)
4122
4135
4123 if action == b'raw+':
4136 if action == b'raw+':
4124 stdin.flush()
4137 stdin.flush()
4125 elif action == b'flush':
4138 elif action == b'flush':
4126 if not stdin:
4139 if not stdin:
4127 raise error.Abort(_(b'cannot call flush on this peer'))
4140 raise error.Abort(_(b'cannot call flush on this peer'))
4128 stdin.flush()
4141 stdin.flush()
4129 elif action.startswith(b'command'):
4142 elif action.startswith(b'command'):
4130 if not peer:
4143 if not peer:
4131 raise error.Abort(
4144 raise error.Abort(
4132 _(
4145 _(
4133 b'cannot send commands unless peer instance '
4146 b'cannot send commands unless peer instance '
4134 b'is available'
4147 b'is available'
4135 )
4148 )
4136 )
4149 )
4137
4150
4138 command = action.split(b' ', 1)[1]
4151 command = action.split(b' ', 1)[1]
4139
4152
4140 args = {}
4153 args = {}
4141 for line in lines:
4154 for line in lines:
4142 # We need to allow empty values.
4155 # We need to allow empty values.
4143 fields = line.lstrip().split(b' ', 1)
4156 fields = line.lstrip().split(b' ', 1)
4144 if len(fields) == 1:
4157 if len(fields) == 1:
4145 key = fields[0]
4158 key = fields[0]
4146 value = b''
4159 value = b''
4147 else:
4160 else:
4148 key, value = fields
4161 key, value = fields
4149
4162
4150 if value.startswith(b'eval:'):
4163 if value.startswith(b'eval:'):
4151 value = stringutil.evalpythonliteral(value[5:])
4164 value = stringutil.evalpythonliteral(value[5:])
4152 else:
4165 else:
4153 value = stringutil.unescapestr(value)
4166 value = stringutil.unescapestr(value)
4154
4167
4155 args[key] = value
4168 args[key] = value
4156
4169
4157 if batchedcommands is not None:
4170 if batchedcommands is not None:
4158 batchedcommands.append((command, args))
4171 batchedcommands.append((command, args))
4159 continue
4172 continue
4160
4173
4161 ui.status(_(b'sending %s command\n') % command)
4174 ui.status(_(b'sending %s command\n') % command)
4162
4175
4163 if b'PUSHFILE' in args:
4176 if b'PUSHFILE' in args:
4164 with open(args[b'PUSHFILE'], 'rb') as fh:
4177 with open(args[b'PUSHFILE'], 'rb') as fh:
4165 del args[b'PUSHFILE']
4178 del args[b'PUSHFILE']
4166 res, output = peer._callpush(
4179 res, output = peer._callpush(
4167 command, fh, **pycompat.strkwargs(args)
4180 command, fh, **pycompat.strkwargs(args)
4168 )
4181 )
4169 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4182 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4170 ui.status(
4183 ui.status(
4171 _(b'remote output: %s\n') % stringutil.escapestr(output)
4184 _(b'remote output: %s\n') % stringutil.escapestr(output)
4172 )
4185 )
4173 else:
4186 else:
4174 with peer.commandexecutor() as e:
4187 with peer.commandexecutor() as e:
4175 res = e.callcommand(command, args).result()
4188 res = e.callcommand(command, args).result()
4176
4189
4177 if isinstance(res, wireprotov2peer.commandresponse):
4190 if isinstance(res, wireprotov2peer.commandresponse):
4178 val = res.objects()
4191 val = res.objects()
4179 ui.status(
4192 ui.status(
4180 _(b'response: %s\n')
4193 _(b'response: %s\n')
4181 % stringutil.pprint(val, bprefix=True, indent=2)
4194 % stringutil.pprint(val, bprefix=True, indent=2)
4182 )
4195 )
4183 else:
4196 else:
4184 ui.status(
4197 ui.status(
4185 _(b'response: %s\n')
4198 _(b'response: %s\n')
4186 % stringutil.pprint(res, bprefix=True, indent=2)
4199 % stringutil.pprint(res, bprefix=True, indent=2)
4187 )
4200 )
4188
4201
4189 elif action == b'batchbegin':
4202 elif action == b'batchbegin':
4190 if batchedcommands is not None:
4203 if batchedcommands is not None:
4191 raise error.Abort(_(b'nested batchbegin not allowed'))
4204 raise error.Abort(_(b'nested batchbegin not allowed'))
4192
4205
4193 batchedcommands = []
4206 batchedcommands = []
4194 elif action == b'batchsubmit':
4207 elif action == b'batchsubmit':
4195 # There is a batching API we could go through. But it would be
4208 # There is a batching API we could go through. But it would be
4196 # difficult to normalize requests into function calls. It is easier
4209 # difficult to normalize requests into function calls. It is easier
4197 # to bypass this layer and normalize to commands + args.
4210 # to bypass this layer and normalize to commands + args.
4198 ui.status(
4211 ui.status(
4199 _(b'sending batch with %d sub-commands\n')
4212 _(b'sending batch with %d sub-commands\n')
4200 % len(batchedcommands)
4213 % len(batchedcommands)
4201 )
4214 )
4202 assert peer is not None
4215 assert peer is not None
4203 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4216 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4204 ui.status(
4217 ui.status(
4205 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4218 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4206 )
4219 )
4207
4220
4208 batchedcommands = None
4221 batchedcommands = None
4209
4222
4210 elif action.startswith(b'httprequest '):
4223 elif action.startswith(b'httprequest '):
4211 if not opener:
4224 if not opener:
4212 raise error.Abort(
4225 raise error.Abort(
4213 _(b'cannot use httprequest without an HTTP peer')
4226 _(b'cannot use httprequest without an HTTP peer')
4214 )
4227 )
4215
4228
4216 request = action.split(b' ', 2)
4229 request = action.split(b' ', 2)
4217 if len(request) != 3:
4230 if len(request) != 3:
4218 raise error.Abort(
4231 raise error.Abort(
4219 _(
4232 _(
4220 b'invalid httprequest: expected format is '
4233 b'invalid httprequest: expected format is '
4221 b'"httprequest <method> <path>'
4234 b'"httprequest <method> <path>'
4222 )
4235 )
4223 )
4236 )
4224
4237
4225 method, httppath = request[1:]
4238 method, httppath = request[1:]
4226 headers = {}
4239 headers = {}
4227 body = None
4240 body = None
4228 frames = []
4241 frames = []
4229 for line in lines:
4242 for line in lines:
4230 line = line.lstrip()
4243 line = line.lstrip()
4231 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4244 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4232 if m:
4245 if m:
4233 # Headers need to use native strings.
4246 # Headers need to use native strings.
4234 key = pycompat.strurl(m.group(1))
4247 key = pycompat.strurl(m.group(1))
4235 value = pycompat.strurl(m.group(2))
4248 value = pycompat.strurl(m.group(2))
4236 headers[key] = value
4249 headers[key] = value
4237 continue
4250 continue
4238
4251
4239 if line.startswith(b'BODYFILE '):
4252 if line.startswith(b'BODYFILE '):
4240 with open(line.split(b' ', 1), b'rb') as fh:
4253 with open(line.split(b' ', 1), b'rb') as fh:
4241 body = fh.read()
4254 body = fh.read()
4242 elif line.startswith(b'frame '):
4255 elif line.startswith(b'frame '):
4243 frame = wireprotoframing.makeframefromhumanstring(
4256 frame = wireprotoframing.makeframefromhumanstring(
4244 line[len(b'frame ') :]
4257 line[len(b'frame ') :]
4245 )
4258 )
4246
4259
4247 frames.append(frame)
4260 frames.append(frame)
4248 else:
4261 else:
4249 raise error.Abort(
4262 raise error.Abort(
4250 _(b'unknown argument to httprequest: %s') % line
4263 _(b'unknown argument to httprequest: %s') % line
4251 )
4264 )
4252
4265
4253 url = path + httppath
4266 url = path + httppath
4254
4267
4255 if frames:
4268 if frames:
4256 body = b''.join(bytes(f) for f in frames)
4269 body = b''.join(bytes(f) for f in frames)
4257
4270
4258 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4271 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4259
4272
4260 # urllib.Request insists on using has_data() as a proxy for
4273 # urllib.Request insists on using has_data() as a proxy for
4261 # determining the request method. Override that to use our
4274 # determining the request method. Override that to use our
4262 # explicitly requested method.
4275 # explicitly requested method.
4263 req.get_method = lambda: pycompat.sysstr(method)
4276 req.get_method = lambda: pycompat.sysstr(method)
4264
4277
4265 try:
4278 try:
4266 res = opener.open(req)
4279 res = opener.open(req)
4267 body = res.read()
4280 body = res.read()
4268 except util.urlerr.urlerror as e:
4281 except util.urlerr.urlerror as e:
4269 # read() method must be called, but only exists in Python 2
4282 # read() method must be called, but only exists in Python 2
4270 getattr(e, 'read', lambda: None)()
4283 getattr(e, 'read', lambda: None)()
4271 continue
4284 continue
4272
4285
4273 ct = res.headers.get('Content-Type')
4286 ct = res.headers.get('Content-Type')
4274 if ct == 'application/mercurial-cbor':
4287 if ct == 'application/mercurial-cbor':
4275 ui.write(
4288 ui.write(
4276 _(b'cbor> %s\n')
4289 _(b'cbor> %s\n')
4277 % stringutil.pprint(
4290 % stringutil.pprint(
4278 cborutil.decodeall(body), bprefix=True, indent=2
4291 cborutil.decodeall(body), bprefix=True, indent=2
4279 )
4292 )
4280 )
4293 )
4281
4294
4282 elif action == b'close':
4295 elif action == b'close':
4283 assert peer is not None
4296 assert peer is not None
4284 peer.close()
4297 peer.close()
4285 elif action == b'readavailable':
4298 elif action == b'readavailable':
4286 if not stdout or not stderr:
4299 if not stdout or not stderr:
4287 raise error.Abort(
4300 raise error.Abort(
4288 _(b'readavailable not available on this peer')
4301 _(b'readavailable not available on this peer')
4289 )
4302 )
4290
4303
4291 stdin.close()
4304 stdin.close()
4292 stdout.read()
4305 stdout.read()
4293 stderr.read()
4306 stderr.read()
4294
4307
4295 elif action == b'readline':
4308 elif action == b'readline':
4296 if not stdout:
4309 if not stdout:
4297 raise error.Abort(_(b'readline not available on this peer'))
4310 raise error.Abort(_(b'readline not available on this peer'))
4298 stdout.readline()
4311 stdout.readline()
4299 elif action == b'ereadline':
4312 elif action == b'ereadline':
4300 if not stderr:
4313 if not stderr:
4301 raise error.Abort(_(b'ereadline not available on this peer'))
4314 raise error.Abort(_(b'ereadline not available on this peer'))
4302 stderr.readline()
4315 stderr.readline()
4303 elif action.startswith(b'read '):
4316 elif action.startswith(b'read '):
4304 count = int(action.split(b' ', 1)[1])
4317 count = int(action.split(b' ', 1)[1])
4305 if not stdout:
4318 if not stdout:
4306 raise error.Abort(_(b'read not available on this peer'))
4319 raise error.Abort(_(b'read not available on this peer'))
4307 stdout.read(count)
4320 stdout.read(count)
4308 elif action.startswith(b'eread '):
4321 elif action.startswith(b'eread '):
4309 count = int(action.split(b' ', 1)[1])
4322 count = int(action.split(b' ', 1)[1])
4310 if not stderr:
4323 if not stderr:
4311 raise error.Abort(_(b'eread not available on this peer'))
4324 raise error.Abort(_(b'eread not available on this peer'))
4312 stderr.read(count)
4325 stderr.read(count)
4313 else:
4326 else:
4314 raise error.Abort(_(b'unknown action: %s') % action)
4327 raise error.Abort(_(b'unknown action: %s') % action)
4315
4328
4316 if batchedcommands is not None:
4329 if batchedcommands is not None:
4317 raise error.Abort(_(b'unclosed "batchbegin" request'))
4330 raise error.Abort(_(b'unclosed "batchbegin" request'))
4318
4331
4319 if peer:
4332 if peer:
4320 peer.close()
4333 peer.close()
4321
4334
4322 if proc:
4335 if proc:
4323 proc.kill()
4336 proc.kill()
@@ -1,195 +1,202 b''
1 # nodemap.py - nodemap related code and utilities
1 # nodemap.py - nodemap related code and utilities
2 #
2 #
3 # Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2019 Pierre-Yves David <pierre-yves.david@octobus.net>
4 # Copyright 2019 George Racinet <georges.racinet@octobus.net>
4 # Copyright 2019 George Racinet <georges.racinet@octobus.net>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import struct
11 import struct
12
12
13 from .. import (
13 from .. import (
14 error,
14 error,
15 node as nodemod,
15 node as nodemod,
16 pycompat,
16 pycompat,
17 )
17 )
18
18
19
19
20 class NodeMap(dict):
20 class NodeMap(dict):
21 def __missing__(self, x):
21 def __missing__(self, x):
22 raise error.RevlogError(b'unknown node: %s' % x)
22 raise error.RevlogError(b'unknown node: %s' % x)
23
23
24
24
25 def persisted_data(revlog):
26 """read the nodemap for a revlog from disk"""
27 if revlog.nodemap_file is None:
28 return None
29 return revlog.opener.tryread(revlog.nodemap_file)
30
31
25 def setup_persistent_nodemap(tr, revlog):
32 def setup_persistent_nodemap(tr, revlog):
26 """Install whatever is needed transaction side to persist a nodemap on disk
33 """Install whatever is needed transaction side to persist a nodemap on disk
27
34
28 (only actually persist the nodemap if this is relevant for this revlog)
35 (only actually persist the nodemap if this is relevant for this revlog)
29 """
36 """
30 if revlog.nodemap_file is None:
37 if revlog.nodemap_file is None:
31 return # we do not use persistent_nodemap on this revlog
38 return # we do not use persistent_nodemap on this revlog
32 callback_id = b"revlog-persistent-nodemap-%s" % revlog.nodemap_file
39 callback_id = b"revlog-persistent-nodemap-%s" % revlog.nodemap_file
33 if tr.hasfinalize(callback_id):
40 if tr.hasfinalize(callback_id):
34 return # no need to register again
41 return # no need to register again
35 tr.addfinalize(callback_id, lambda tr: _persist_nodemap(tr, revlog))
42 tr.addfinalize(callback_id, lambda tr: _persist_nodemap(tr, revlog))
36
43
37
44
38 def _persist_nodemap(tr, revlog):
45 def _persist_nodemap(tr, revlog):
39 """Write nodemap data on disk for a given revlog
46 """Write nodemap data on disk for a given revlog
40 """
47 """
41 if getattr(revlog, 'filteredrevs', ()):
48 if getattr(revlog, 'filteredrevs', ()):
42 raise error.ProgrammingError(
49 raise error.ProgrammingError(
43 "cannot persist nodemap of a filtered changelog"
50 "cannot persist nodemap of a filtered changelog"
44 )
51 )
45 if revlog.nodemap_file is None:
52 if revlog.nodemap_file is None:
46 msg = "calling persist nodemap on a revlog without the feature enableb"
53 msg = "calling persist nodemap on a revlog without the feature enableb"
47 raise error.ProgrammingError(msg)
54 raise error.ProgrammingError(msg)
48 data = persistent_data(revlog.index)
55 data = persistent_data(revlog.index)
49 # EXP-TODO: if this is a cache, this should use a cache vfs, not a
56 # EXP-TODO: if this is a cache, this should use a cache vfs, not a
50 # store vfs
57 # store vfs
51 with revlog.opener(revlog.nodemap_file, b'w') as f:
58 with revlog.opener(revlog.nodemap_file, b'w') as f:
52 f.write(data)
59 f.write(data)
53 # EXP-TODO: if the transaction abort, we should remove the new data and
60 # EXP-TODO: if the transaction abort, we should remove the new data and
54 # reinstall the old one. (This will be simpler when the file format get a
61 # reinstall the old one. (This will be simpler when the file format get a
55 # bit more advanced)
62 # bit more advanced)
56
63
57
64
58 ### Nodemap Trie
65 ### Nodemap Trie
59 #
66 #
60 # This is a simple reference implementation to compute and persist a nodemap
67 # This is a simple reference implementation to compute and persist a nodemap
61 # trie. This reference implementation is write only. The python version of this
68 # trie. This reference implementation is write only. The python version of this
62 # is not expected to be actually used, since it wont provide performance
69 # is not expected to be actually used, since it wont provide performance
63 # improvement over existing non-persistent C implementation.
70 # improvement over existing non-persistent C implementation.
64 #
71 #
65 # The nodemap is persisted as Trie using 4bits-address/16-entries block. each
72 # The nodemap is persisted as Trie using 4bits-address/16-entries block. each
66 # revision can be adressed using its node shortest prefix.
73 # revision can be adressed using its node shortest prefix.
67 #
74 #
68 # The trie is stored as a sequence of block. Each block contains 16 entries
75 # The trie is stored as a sequence of block. Each block contains 16 entries
69 # (signed 64bit integer, big endian). Each entry can be one of the following:
76 # (signed 64bit integer, big endian). Each entry can be one of the following:
70 #
77 #
71 # * value >= 0 -> index of sub-block
78 # * value >= 0 -> index of sub-block
72 # * value == -1 -> no value
79 # * value == -1 -> no value
73 # * value < -1 -> a revision value: rev = -(value+10)
80 # * value < -1 -> a revision value: rev = -(value+10)
74 #
81 #
75 # The implementation focus on simplicity, not on performance. A Rust
82 # The implementation focus on simplicity, not on performance. A Rust
76 # implementation should provide a efficient version of the same binary
83 # implementation should provide a efficient version of the same binary
77 # persistence. This reference python implementation is never meant to be
84 # persistence. This reference python implementation is never meant to be
78 # extensively use in production.
85 # extensively use in production.
79
86
80
87
81 def persistent_data(index):
88 def persistent_data(index):
82 """return the persistent binary form for a nodemap for a given index
89 """return the persistent binary form for a nodemap for a given index
83 """
90 """
84 trie = _build_trie(index)
91 trie = _build_trie(index)
85 return _persist_trie(trie)
92 return _persist_trie(trie)
86
93
87
94
88 S_BLOCK = struct.Struct(">" + ("l" * 16))
95 S_BLOCK = struct.Struct(">" + ("l" * 16))
89
96
90 NO_ENTRY = -1
97 NO_ENTRY = -1
91 # rev 0 need to be -2 because 0 is used by block, -1 is a special value.
98 # rev 0 need to be -2 because 0 is used by block, -1 is a special value.
92 REV_OFFSET = 2
99 REV_OFFSET = 2
93
100
94
101
95 def _transform_rev(rev):
102 def _transform_rev(rev):
96 """Return the number used to represent the rev in the tree.
103 """Return the number used to represent the rev in the tree.
97
104
98 (or retrieve a rev number from such representation)
105 (or retrieve a rev number from such representation)
99
106
100 Note that this is an involution, a function equal to its inverse (i.e.
107 Note that this is an involution, a function equal to its inverse (i.e.
101 which gives the identity when applied to itself).
108 which gives the identity when applied to itself).
102 """
109 """
103 return -(rev + REV_OFFSET)
110 return -(rev + REV_OFFSET)
104
111
105
112
106 def _to_int(hex_digit):
113 def _to_int(hex_digit):
107 """turn an hexadecimal digit into a proper integer"""
114 """turn an hexadecimal digit into a proper integer"""
108 return int(hex_digit, 16)
115 return int(hex_digit, 16)
109
116
110
117
111 def _build_trie(index):
118 def _build_trie(index):
112 """build a nodemap trie
119 """build a nodemap trie
113
120
114 The nodemap stores revision number for each unique prefix.
121 The nodemap stores revision number for each unique prefix.
115
122
116 Each block is a dictionary with keys in `[0, 15]`. Values are either
123 Each block is a dictionary with keys in `[0, 15]`. Values are either
117 another block or a revision number.
124 another block or a revision number.
118 """
125 """
119 root = {}
126 root = {}
120 for rev in range(len(index)):
127 for rev in range(len(index)):
121 hex = nodemod.hex(index[rev][7])
128 hex = nodemod.hex(index[rev][7])
122 _insert_into_block(index, 0, root, rev, hex)
129 _insert_into_block(index, 0, root, rev, hex)
123 return root
130 return root
124
131
125
132
126 def _insert_into_block(index, level, block, current_rev, current_hex):
133 def _insert_into_block(index, level, block, current_rev, current_hex):
127 """insert a new revision in a block
134 """insert a new revision in a block
128
135
129 index: the index we are adding revision for
136 index: the index we are adding revision for
130 level: the depth of the current block in the trie
137 level: the depth of the current block in the trie
131 block: the block currently being considered
138 block: the block currently being considered
132 current_rev: the revision number we are adding
139 current_rev: the revision number we are adding
133 current_hex: the hexadecimal representation of the of that revision
140 current_hex: the hexadecimal representation of the of that revision
134 """
141 """
135 hex_digit = _to_int(current_hex[level : level + 1])
142 hex_digit = _to_int(current_hex[level : level + 1])
136 entry = block.get(hex_digit)
143 entry = block.get(hex_digit)
137 if entry is None:
144 if entry is None:
138 # no entry, simply store the revision number
145 # no entry, simply store the revision number
139 block[hex_digit] = current_rev
146 block[hex_digit] = current_rev
140 elif isinstance(entry, dict):
147 elif isinstance(entry, dict):
141 # need to recurse to an underlying block
148 # need to recurse to an underlying block
142 _insert_into_block(index, level + 1, entry, current_rev, current_hex)
149 _insert_into_block(index, level + 1, entry, current_rev, current_hex)
143 else:
150 else:
144 # collision with a previously unique prefix, inserting new
151 # collision with a previously unique prefix, inserting new
145 # vertices to fit both entry.
152 # vertices to fit both entry.
146 other_hex = nodemod.hex(index[entry][7])
153 other_hex = nodemod.hex(index[entry][7])
147 other_rev = entry
154 other_rev = entry
148 new = {}
155 new = {}
149 block[hex_digit] = new
156 block[hex_digit] = new
150 _insert_into_block(index, level + 1, new, other_rev, other_hex)
157 _insert_into_block(index, level + 1, new, other_rev, other_hex)
151 _insert_into_block(index, level + 1, new, current_rev, current_hex)
158 _insert_into_block(index, level + 1, new, current_rev, current_hex)
152
159
153
160
154 def _persist_trie(root):
161 def _persist_trie(root):
155 """turn a nodemap trie into persistent binary data
162 """turn a nodemap trie into persistent binary data
156
163
157 See `_build_trie` for nodemap trie structure"""
164 See `_build_trie` for nodemap trie structure"""
158 block_map = {}
165 block_map = {}
159 chunks = []
166 chunks = []
160 for tn in _walk_trie(root):
167 for tn in _walk_trie(root):
161 block_map[id(tn)] = len(chunks)
168 block_map[id(tn)] = len(chunks)
162 chunks.append(_persist_block(tn, block_map))
169 chunks.append(_persist_block(tn, block_map))
163 return b''.join(chunks)
170 return b''.join(chunks)
164
171
165
172
166 def _walk_trie(block):
173 def _walk_trie(block):
167 """yield all the block in a trie
174 """yield all the block in a trie
168
175
169 Children blocks are always yield before their parent block.
176 Children blocks are always yield before their parent block.
170 """
177 """
171 for (_, item) in sorted(block.items()):
178 for (_, item) in sorted(block.items()):
172 if isinstance(item, dict):
179 if isinstance(item, dict):
173 for sub_block in _walk_trie(item):
180 for sub_block in _walk_trie(item):
174 yield sub_block
181 yield sub_block
175 yield block
182 yield block
176
183
177
184
178 def _persist_block(block_node, block_map):
185 def _persist_block(block_node, block_map):
179 """produce persistent binary data for a single block
186 """produce persistent binary data for a single block
180
187
181 Children block are assumed to be already persisted and present in
188 Children block are assumed to be already persisted and present in
182 block_map.
189 block_map.
183 """
190 """
184 data = tuple(_to_value(block_node.get(i), block_map) for i in range(16))
191 data = tuple(_to_value(block_node.get(i), block_map) for i in range(16))
185 return S_BLOCK.pack(*data)
192 return S_BLOCK.pack(*data)
186
193
187
194
188 def _to_value(item, block_map):
195 def _to_value(item, block_map):
189 """persist any value as an integer"""
196 """persist any value as an integer"""
190 if item is None:
197 if item is None:
191 return NO_ENTRY
198 return NO_ENTRY
192 elif isinstance(item, dict):
199 elif isinstance(item, dict):
193 return block_map[id(item)]
200 return block_map[id(item)]
194 else:
201 else:
195 return _transform_rev(item)
202 return _transform_rev(item)
@@ -1,429 +1,429 b''
1 Show all commands except debug commands
1 Show all commands except debug commands
2 $ hg debugcomplete
2 $ hg debugcomplete
3 abort
3 abort
4 add
4 add
5 addremove
5 addremove
6 annotate
6 annotate
7 archive
7 archive
8 backout
8 backout
9 bisect
9 bisect
10 bookmarks
10 bookmarks
11 branch
11 branch
12 branches
12 branches
13 bundle
13 bundle
14 cat
14 cat
15 clone
15 clone
16 commit
16 commit
17 config
17 config
18 continue
18 continue
19 copy
19 copy
20 diff
20 diff
21 export
21 export
22 files
22 files
23 forget
23 forget
24 graft
24 graft
25 grep
25 grep
26 heads
26 heads
27 help
27 help
28 identify
28 identify
29 import
29 import
30 incoming
30 incoming
31 init
31 init
32 locate
32 locate
33 log
33 log
34 manifest
34 manifest
35 merge
35 merge
36 outgoing
36 outgoing
37 parents
37 parents
38 paths
38 paths
39 phase
39 phase
40 pull
40 pull
41 push
41 push
42 recover
42 recover
43 remove
43 remove
44 rename
44 rename
45 resolve
45 resolve
46 revert
46 revert
47 rollback
47 rollback
48 root
48 root
49 serve
49 serve
50 shelve
50 shelve
51 status
51 status
52 summary
52 summary
53 tag
53 tag
54 tags
54 tags
55 tip
55 tip
56 unbundle
56 unbundle
57 unshelve
57 unshelve
58 update
58 update
59 verify
59 verify
60 version
60 version
61
61
62 Show all commands that start with "a"
62 Show all commands that start with "a"
63 $ hg debugcomplete a
63 $ hg debugcomplete a
64 abort
64 abort
65 add
65 add
66 addremove
66 addremove
67 annotate
67 annotate
68 archive
68 archive
69
69
70 Do not show debug commands if there are other candidates
70 Do not show debug commands if there are other candidates
71 $ hg debugcomplete d
71 $ hg debugcomplete d
72 diff
72 diff
73
73
74 Show debug commands if there are no other candidates
74 Show debug commands if there are no other candidates
75 $ hg debugcomplete debug
75 $ hg debugcomplete debug
76 debugancestor
76 debugancestor
77 debugapplystreamclonebundle
77 debugapplystreamclonebundle
78 debugbuilddag
78 debugbuilddag
79 debugbundle
79 debugbundle
80 debugcapabilities
80 debugcapabilities
81 debugcheckstate
81 debugcheckstate
82 debugcolor
82 debugcolor
83 debugcommands
83 debugcommands
84 debugcomplete
84 debugcomplete
85 debugconfig
85 debugconfig
86 debugcreatestreamclonebundle
86 debugcreatestreamclonebundle
87 debugdag
87 debugdag
88 debugdata
88 debugdata
89 debugdate
89 debugdate
90 debugdeltachain
90 debugdeltachain
91 debugdirstate
91 debugdirstate
92 debugdiscovery
92 debugdiscovery
93 debugdownload
93 debugdownload
94 debugextensions
94 debugextensions
95 debugfileset
95 debugfileset
96 debugformat
96 debugformat
97 debugfsinfo
97 debugfsinfo
98 debuggetbundle
98 debuggetbundle
99 debugignore
99 debugignore
100 debugindex
100 debugindex
101 debugindexdot
101 debugindexdot
102 debugindexstats
102 debugindexstats
103 debuginstall
103 debuginstall
104 debugknown
104 debugknown
105 debuglabelcomplete
105 debuglabelcomplete
106 debuglocks
106 debuglocks
107 debugmanifestfulltextcache
107 debugmanifestfulltextcache
108 debugmergestate
108 debugmergestate
109 debugnamecomplete
109 debugnamecomplete
110 debugnodemap
110 debugnodemap
111 debugobsolete
111 debugobsolete
112 debugp1copies
112 debugp1copies
113 debugp2copies
113 debugp2copies
114 debugpathcomplete
114 debugpathcomplete
115 debugpathcopies
115 debugpathcopies
116 debugpeer
116 debugpeer
117 debugpickmergetool
117 debugpickmergetool
118 debugpushkey
118 debugpushkey
119 debugpvec
119 debugpvec
120 debugrebuilddirstate
120 debugrebuilddirstate
121 debugrebuildfncache
121 debugrebuildfncache
122 debugrename
122 debugrename
123 debugrevlog
123 debugrevlog
124 debugrevlogindex
124 debugrevlogindex
125 debugrevspec
125 debugrevspec
126 debugserve
126 debugserve
127 debugsetparents
127 debugsetparents
128 debugsidedata
128 debugsidedata
129 debugssl
129 debugssl
130 debugsub
130 debugsub
131 debugsuccessorssets
131 debugsuccessorssets
132 debugtagscache
132 debugtagscache
133 debugtemplate
133 debugtemplate
134 debuguigetpass
134 debuguigetpass
135 debuguiprompt
135 debuguiprompt
136 debugupdatecaches
136 debugupdatecaches
137 debugupgraderepo
137 debugupgraderepo
138 debugwalk
138 debugwalk
139 debugwhyunstable
139 debugwhyunstable
140 debugwireargs
140 debugwireargs
141 debugwireproto
141 debugwireproto
142
142
143 Do not show the alias of a debug command if there are other candidates
143 Do not show the alias of a debug command if there are other candidates
144 (this should hide rawcommit)
144 (this should hide rawcommit)
145 $ hg debugcomplete r
145 $ hg debugcomplete r
146 recover
146 recover
147 remove
147 remove
148 rename
148 rename
149 resolve
149 resolve
150 revert
150 revert
151 rollback
151 rollback
152 root
152 root
153 Show the alias of a debug command if there are no other candidates
153 Show the alias of a debug command if there are no other candidates
154 $ hg debugcomplete rawc
154 $ hg debugcomplete rawc
155
155
156
156
157 Show the global options
157 Show the global options
158 $ hg debugcomplete --options | sort
158 $ hg debugcomplete --options | sort
159 --color
159 --color
160 --config
160 --config
161 --cwd
161 --cwd
162 --debug
162 --debug
163 --debugger
163 --debugger
164 --encoding
164 --encoding
165 --encodingmode
165 --encodingmode
166 --help
166 --help
167 --hidden
167 --hidden
168 --noninteractive
168 --noninteractive
169 --pager
169 --pager
170 --profile
170 --profile
171 --quiet
171 --quiet
172 --repository
172 --repository
173 --time
173 --time
174 --traceback
174 --traceback
175 --verbose
175 --verbose
176 --version
176 --version
177 -R
177 -R
178 -h
178 -h
179 -q
179 -q
180 -v
180 -v
181 -y
181 -y
182
182
183 Show the options for the "serve" command
183 Show the options for the "serve" command
184 $ hg debugcomplete --options serve | sort
184 $ hg debugcomplete --options serve | sort
185 --accesslog
185 --accesslog
186 --address
186 --address
187 --certificate
187 --certificate
188 --cmdserver
188 --cmdserver
189 --color
189 --color
190 --config
190 --config
191 --cwd
191 --cwd
192 --daemon
192 --daemon
193 --daemon-postexec
193 --daemon-postexec
194 --debug
194 --debug
195 --debugger
195 --debugger
196 --encoding
196 --encoding
197 --encodingmode
197 --encodingmode
198 --errorlog
198 --errorlog
199 --help
199 --help
200 --hidden
200 --hidden
201 --ipv6
201 --ipv6
202 --name
202 --name
203 --noninteractive
203 --noninteractive
204 --pager
204 --pager
205 --pid-file
205 --pid-file
206 --port
206 --port
207 --prefix
207 --prefix
208 --print-url
208 --print-url
209 --profile
209 --profile
210 --quiet
210 --quiet
211 --repository
211 --repository
212 --stdio
212 --stdio
213 --style
213 --style
214 --subrepos
214 --subrepos
215 --templates
215 --templates
216 --time
216 --time
217 --traceback
217 --traceback
218 --verbose
218 --verbose
219 --version
219 --version
220 --web-conf
220 --web-conf
221 -6
221 -6
222 -A
222 -A
223 -E
223 -E
224 -R
224 -R
225 -S
225 -S
226 -a
226 -a
227 -d
227 -d
228 -h
228 -h
229 -n
229 -n
230 -p
230 -p
231 -q
231 -q
232 -t
232 -t
233 -v
233 -v
234 -y
234 -y
235
235
236 Show an error if we use --options with an ambiguous abbreviation
236 Show an error if we use --options with an ambiguous abbreviation
237 $ hg debugcomplete --options s
237 $ hg debugcomplete --options s
238 hg: command 's' is ambiguous:
238 hg: command 's' is ambiguous:
239 serve shelve showconfig status summary
239 serve shelve showconfig status summary
240 [255]
240 [255]
241
241
242 Show all commands + options
242 Show all commands + options
243 $ hg debugcommands
243 $ hg debugcommands
244 abort: dry-run
244 abort: dry-run
245 add: include, exclude, subrepos, dry-run
245 add: include, exclude, subrepos, dry-run
246 addremove: similarity, subrepos, include, exclude, dry-run
246 addremove: similarity, subrepos, include, exclude, dry-run
247 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
247 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
248 archive: no-decode, prefix, rev, type, subrepos, include, exclude
248 archive: no-decode, prefix, rev, type, subrepos, include, exclude
249 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
249 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
250 bisect: reset, good, bad, skip, extend, command, noupdate
250 bisect: reset, good, bad, skip, extend, command, noupdate
251 bookmarks: force, rev, delete, rename, inactive, list, template
251 bookmarks: force, rev, delete, rename, inactive, list, template
252 branch: force, clean, rev
252 branch: force, clean, rev
253 branches: active, closed, rev, template
253 branches: active, closed, rev, template
254 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
254 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
255 cat: output, rev, decode, include, exclude, template
255 cat: output, rev, decode, include, exclude, template
256 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
256 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
257 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
257 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
258 config: untrusted, edit, local, global, template
258 config: untrusted, edit, local, global, template
259 continue: dry-run
259 continue: dry-run
260 copy: after, force, include, exclude, dry-run
260 copy: after, force, include, exclude, dry-run
261 debugancestor:
261 debugancestor:
262 debugapplystreamclonebundle:
262 debugapplystreamclonebundle:
263 debugbuilddag: mergeable-file, overwritten-file, new-file
263 debugbuilddag: mergeable-file, overwritten-file, new-file
264 debugbundle: all, part-type, spec
264 debugbundle: all, part-type, spec
265 debugcapabilities:
265 debugcapabilities:
266 debugcheckstate:
266 debugcheckstate:
267 debugcolor: style
267 debugcolor: style
268 debugcommands:
268 debugcommands:
269 debugcomplete: options
269 debugcomplete: options
270 debugcreatestreamclonebundle:
270 debugcreatestreamclonebundle:
271 debugdag: tags, branches, dots, spaces
271 debugdag: tags, branches, dots, spaces
272 debugdata: changelog, manifest, dir
272 debugdata: changelog, manifest, dir
273 debugdate: extended
273 debugdate: extended
274 debugdeltachain: changelog, manifest, dir, template
274 debugdeltachain: changelog, manifest, dir, template
275 debugdirstate: nodates, dates, datesort
275 debugdirstate: nodates, dates, datesort
276 debugdiscovery: old, nonheads, rev, seed, ssh, remotecmd, insecure
276 debugdiscovery: old, nonheads, rev, seed, ssh, remotecmd, insecure
277 debugdownload: output
277 debugdownload: output
278 debugextensions: template
278 debugextensions: template
279 debugfileset: rev, all-files, show-matcher, show-stage
279 debugfileset: rev, all-files, show-matcher, show-stage
280 debugformat: template
280 debugformat: template
281 debugfsinfo:
281 debugfsinfo:
282 debuggetbundle: head, common, type
282 debuggetbundle: head, common, type
283 debugignore:
283 debugignore:
284 debugindex: changelog, manifest, dir, template
284 debugindex: changelog, manifest, dir, template
285 debugindexdot: changelog, manifest, dir
285 debugindexdot: changelog, manifest, dir
286 debugindexstats:
286 debugindexstats:
287 debuginstall: template
287 debuginstall: template
288 debugknown:
288 debugknown:
289 debuglabelcomplete:
289 debuglabelcomplete:
290 debuglocks: force-lock, force-wlock, set-lock, set-wlock
290 debuglocks: force-lock, force-wlock, set-lock, set-wlock
291 debugmanifestfulltextcache: clear, add
291 debugmanifestfulltextcache: clear, add
292 debugmergestate:
292 debugmergestate:
293 debugnamecomplete:
293 debugnamecomplete:
294 debugnodemap: dump
294 debugnodemap: dump-new, dump-disk
295 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
295 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
296 debugp1copies: rev
296 debugp1copies: rev
297 debugp2copies: rev
297 debugp2copies: rev
298 debugpathcomplete: full, normal, added, removed
298 debugpathcomplete: full, normal, added, removed
299 debugpathcopies: include, exclude
299 debugpathcopies: include, exclude
300 debugpeer:
300 debugpeer:
301 debugpickmergetool: rev, changedelete, include, exclude, tool
301 debugpickmergetool: rev, changedelete, include, exclude, tool
302 debugpushkey:
302 debugpushkey:
303 debugpvec:
303 debugpvec:
304 debugrebuilddirstate: rev, minimal
304 debugrebuilddirstate: rev, minimal
305 debugrebuildfncache:
305 debugrebuildfncache:
306 debugrename: rev
306 debugrename: rev
307 debugrevlog: changelog, manifest, dir, dump
307 debugrevlog: changelog, manifest, dir, dump
308 debugrevlogindex: changelog, manifest, dir, format
308 debugrevlogindex: changelog, manifest, dir, format
309 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
309 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
310 debugserve: sshstdio, logiofd, logiofile
310 debugserve: sshstdio, logiofd, logiofile
311 debugsetparents:
311 debugsetparents:
312 debugsidedata: changelog, manifest, dir
312 debugsidedata: changelog, manifest, dir
313 debugssl:
313 debugssl:
314 debugsub: rev
314 debugsub: rev
315 debugsuccessorssets: closest
315 debugsuccessorssets: closest
316 debugtagscache:
316 debugtagscache:
317 debugtemplate: rev, define
317 debugtemplate: rev, define
318 debuguigetpass: prompt
318 debuguigetpass: prompt
319 debuguiprompt: prompt
319 debuguiprompt: prompt
320 debugupdatecaches:
320 debugupdatecaches:
321 debugupgraderepo: optimize, run, backup, changelog, manifest
321 debugupgraderepo: optimize, run, backup, changelog, manifest
322 debugwalk: include, exclude
322 debugwalk: include, exclude
323 debugwhyunstable:
323 debugwhyunstable:
324 debugwireargs: three, four, five, ssh, remotecmd, insecure
324 debugwireargs: three, four, five, ssh, remotecmd, insecure
325 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
325 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
326 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
326 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
327 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
327 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
328 files: rev, print0, include, exclude, template, subrepos
328 files: rev, print0, include, exclude, template, subrepos
329 forget: interactive, include, exclude, dry-run
329 forget: interactive, include, exclude, dry-run
330 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
330 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
331 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
331 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
332 heads: rev, topo, active, closed, style, template
332 heads: rev, topo, active, closed, style, template
333 help: extension, command, keyword, system
333 help: extension, command, keyword, system
334 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
334 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
335 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
335 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
336 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
336 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
337 init: ssh, remotecmd, insecure
337 init: ssh, remotecmd, insecure
338 locate: rev, print0, fullpath, include, exclude
338 locate: rev, print0, fullpath, include, exclude
339 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
339 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
340 manifest: rev, all, template
340 manifest: rev, all, template
341 merge: force, rev, preview, abort, tool
341 merge: force, rev, preview, abort, tool
342 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
342 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
343 parents: rev, style, template
343 parents: rev, style, template
344 paths: template
344 paths: template
345 phase: public, draft, secret, force, rev
345 phase: public, draft, secret, force, rev
346 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
346 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
347 push: force, rev, bookmark, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
347 push: force, rev, bookmark, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
348 recover: verify
348 recover: verify
349 remove: after, force, subrepos, include, exclude, dry-run
349 remove: after, force, subrepos, include, exclude, dry-run
350 rename: after, force, include, exclude, dry-run
350 rename: after, force, include, exclude, dry-run
351 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
351 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
352 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
352 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
353 rollback: dry-run, force
353 rollback: dry-run, force
354 root: template
354 root: template
355 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
355 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
356 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
356 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
357 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
357 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
358 summary: remote
358 summary: remote
359 tag: force, local, rev, remove, edit, message, date, user
359 tag: force, local, rev, remove, edit, message, date, user
360 tags: template
360 tags: template
361 tip: patch, git, style, template
361 tip: patch, git, style, template
362 unbundle: update
362 unbundle: update
363 unshelve: abort, continue, interactive, keep, name, tool, date
363 unshelve: abort, continue, interactive, keep, name, tool, date
364 update: clean, check, merge, date, rev, tool
364 update: clean, check, merge, date, rev, tool
365 verify: full
365 verify: full
366 version: template
366 version: template
367
367
368 $ hg init a
368 $ hg init a
369 $ cd a
369 $ cd a
370 $ echo fee > fee
370 $ echo fee > fee
371 $ hg ci -q -Amfee
371 $ hg ci -q -Amfee
372 $ hg tag fee
372 $ hg tag fee
373 $ mkdir fie
373 $ mkdir fie
374 $ echo dead > fie/dead
374 $ echo dead > fie/dead
375 $ echo live > fie/live
375 $ echo live > fie/live
376 $ hg bookmark fo
376 $ hg bookmark fo
377 $ hg branch -q fie
377 $ hg branch -q fie
378 $ hg ci -q -Amfie
378 $ hg ci -q -Amfie
379 $ echo fo > fo
379 $ echo fo > fo
380 $ hg branch -qf default
380 $ hg branch -qf default
381 $ hg ci -q -Amfo
381 $ hg ci -q -Amfo
382 $ echo Fum > Fum
382 $ echo Fum > Fum
383 $ hg ci -q -AmFum
383 $ hg ci -q -AmFum
384 $ hg bookmark Fum
384 $ hg bookmark Fum
385
385
386 Test debugpathcomplete
386 Test debugpathcomplete
387
387
388 $ hg debugpathcomplete f
388 $ hg debugpathcomplete f
389 fee
389 fee
390 fie
390 fie
391 fo
391 fo
392 $ hg debugpathcomplete -f f
392 $ hg debugpathcomplete -f f
393 fee
393 fee
394 fie/dead
394 fie/dead
395 fie/live
395 fie/live
396 fo
396 fo
397
397
398 $ hg rm Fum
398 $ hg rm Fum
399 $ hg debugpathcomplete -r F
399 $ hg debugpathcomplete -r F
400 Fum
400 Fum
401
401
402 Test debugnamecomplete
402 Test debugnamecomplete
403
403
404 $ hg debugnamecomplete
404 $ hg debugnamecomplete
405 Fum
405 Fum
406 default
406 default
407 fee
407 fee
408 fie
408 fie
409 fo
409 fo
410 tip
410 tip
411 $ hg debugnamecomplete f
411 $ hg debugnamecomplete f
412 fee
412 fee
413 fie
413 fie
414 fo
414 fo
415
415
416 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
416 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
417 used for completions in some shells.
417 used for completions in some shells.
418
418
419 $ hg debuglabelcomplete
419 $ hg debuglabelcomplete
420 Fum
420 Fum
421 default
421 default
422 fee
422 fee
423 fie
423 fie
424 fo
424 fo
425 tip
425 tip
426 $ hg debuglabelcomplete f
426 $ hg debuglabelcomplete f
427 fee
427 fee
428 fie
428 fie
429 fo
429 fo
@@ -1,32 +1,32 b''
1 ===================================
1 ===================================
2 Test the persistent on-disk nodemap
2 Test the persistent on-disk nodemap
3 ===================================
3 ===================================
4
4
5
5
6 $ hg init test-repo
6 $ hg init test-repo
7 $ cd test-repo
7 $ cd test-repo
8 $ cat << EOF >> .hg/hgrc
8 $ cat << EOF >> .hg/hgrc
9 > [experimental]
9 > [experimental]
10 > exp-persistent-nodemap=yes
10 > exp-persistent-nodemap=yes
11 > EOF
11 > EOF
12 $ hg debugbuilddag .+5000
12 $ hg debugbuilddag .+5000
13 $ hg debugnodemap --dump | f --sha256 --size
13 $ hg debugnodemap --dump-new | f --sha256 --size
14 size=122880, sha256=b961925120e1c9bc345c199b2cc442abc477029fdece37ef9d99cbe59c0558b7
14 size=122880, sha256=b961925120e1c9bc345c199b2cc442abc477029fdece37ef9d99cbe59c0558b7
15 $ f --sha256 --bytes=256 --hexdump --size < .hg/store/00changelog.n
15 $ hg debugnodemap --dump-disk | f --sha256 --bytes=256 --hexdump --size
16 size=122880, sha256=b961925120e1c9bc345c199b2cc442abc477029fdece37ef9d99cbe59c0558b7
16 size=122880, sha256=b961925120e1c9bc345c199b2cc442abc477029fdece37ef9d99cbe59c0558b7
17 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
17 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
18 0010: ff ff ff ff ff ff ff ff ff ff fa c2 ff ff ff ff |................|
18 0010: ff ff ff ff ff ff ff ff ff ff fa c2 ff ff ff ff |................|
19 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
19 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
20 0030: ff ff ff ff ff ff ed b3 ff ff ff ff ff ff ff ff |................|
20 0030: ff ff ff ff ff ff ed b3 ff ff ff ff ff ff ff ff |................|
21 0040: ff ff ff ff ff ff ee 34 00 00 00 00 ff ff ff ff |.......4........|
21 0040: ff ff ff ff ff ff ee 34 00 00 00 00 ff ff ff ff |.......4........|
22 0050: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
22 0050: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
23 0060: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
23 0060: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
24 0070: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
24 0070: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
25 0080: ff ff ff ff ff ff f8 50 ff ff ff ff ff ff ff ff |.......P........|
25 0080: ff ff ff ff ff ff f8 50 ff ff ff ff ff ff ff ff |.......P........|
26 0090: ff ff ff ff ff ff ff ff ff ff ec c7 ff ff ff ff |................|
26 0090: ff ff ff ff ff ff ff ff ff ff ec c7 ff ff ff ff |................|
27 00a0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
27 00a0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
28 00b0: ff ff ff ff ff ff fa be ff ff f2 fc ff ff ff ff |................|
28 00b0: ff ff ff ff ff ff fa be ff ff f2 fc ff ff ff ff |................|
29 00c0: ff ff ff ff ff ff ef ea ff ff ff ff ff ff f9 17 |................|
29 00c0: ff ff ff ff ff ff ef ea ff ff ff ff ff ff f9 17 |................|
30 00d0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
30 00d0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
31 00e0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
31 00e0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
32 00f0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
32 00f0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
General Comments 0
You need to be logged in to leave comments. Login now