##// END OF EJS Templates
debuginstall: add entry about re2 Rust bindings when applicable...
Raphaël Gomès -
r45019:c9897371 default
parent child Browse files
Show More
@@ -1,4503 +1,4510 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import glob
14 import glob
15 import operator
15 import operator
16 import os
16 import os
17 import platform
17 import platform
18 import random
18 import random
19 import re
19 import re
20 import socket
20 import socket
21 import ssl
21 import ssl
22 import stat
22 import stat
23 import string
23 import string
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 merge as mergemod,
61 merge as mergemod,
62 obsolete,
62 obsolete,
63 obsutil,
63 obsutil,
64 pathutil,
64 pathutil,
65 phases,
65 phases,
66 policy,
66 policy,
67 pvec,
67 pvec,
68 pycompat,
68 pycompat,
69 registrar,
69 registrar,
70 repair,
70 repair,
71 revlog,
71 revlog,
72 revset,
72 revset,
73 revsetlang,
73 revsetlang,
74 scmutil,
74 scmutil,
75 setdiscovery,
75 setdiscovery,
76 simplemerge,
76 simplemerge,
77 sshpeer,
77 sshpeer,
78 sslutil,
78 sslutil,
79 streamclone,
79 streamclone,
80 tags as tagsmod,
80 tags as tagsmod,
81 templater,
81 templater,
82 treediscovery,
82 treediscovery,
83 upgrade,
83 upgrade,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 vfs as vfsmod,
86 vfs as vfsmod,
87 wireprotoframing,
87 wireprotoframing,
88 wireprotoserver,
88 wireprotoserver,
89 wireprotov2peer,
89 wireprotov2peer,
90 )
90 )
91 from .utils import (
91 from .utils import (
92 cborutil,
92 cborutil,
93 compression,
93 compression,
94 dateutil,
94 dateutil,
95 procutil,
95 procutil,
96 stringutil,
96 stringutil,
97 )
97 )
98
98
99 from .revlogutils import (
99 from .revlogutils import (
100 deltas as deltautil,
100 deltas as deltautil,
101 nodemap,
101 nodemap,
102 )
102 )
103
103
104 release = lockmod.release
104 release = lockmod.release
105
105
106 command = registrar.command()
106 command = registrar.command()
107
107
108
108
109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
110 def debugancestor(ui, repo, *args):
110 def debugancestor(ui, repo, *args):
111 """find the ancestor revision of two revisions in a given index"""
111 """find the ancestor revision of two revisions in a given index"""
112 if len(args) == 3:
112 if len(args) == 3:
113 index, rev1, rev2 = args
113 index, rev1, rev2 = args
114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
115 lookup = r.lookup
115 lookup = r.lookup
116 elif len(args) == 2:
116 elif len(args) == 2:
117 if not repo:
117 if not repo:
118 raise error.Abort(
118 raise error.Abort(
119 _(b'there is no Mercurial repository here (.hg not found)')
119 _(b'there is no Mercurial repository here (.hg not found)')
120 )
120 )
121 rev1, rev2 = args
121 rev1, rev2 = args
122 r = repo.changelog
122 r = repo.changelog
123 lookup = repo.lookup
123 lookup = repo.lookup
124 else:
124 else:
125 raise error.Abort(_(b'either two or three arguments required'))
125 raise error.Abort(_(b'either two or three arguments required'))
126 a = r.ancestor(lookup(rev1), lookup(rev2))
126 a = r.ancestor(lookup(rev1), lookup(rev2))
127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
128
128
129
129
130 @command(b'debugapplystreamclonebundle', [], b'FILE')
130 @command(b'debugapplystreamclonebundle', [], b'FILE')
131 def debugapplystreamclonebundle(ui, repo, fname):
131 def debugapplystreamclonebundle(ui, repo, fname):
132 """apply a stream clone bundle file"""
132 """apply a stream clone bundle file"""
133 f = hg.openpath(ui, fname)
133 f = hg.openpath(ui, fname)
134 gen = exchange.readbundle(ui, f, fname)
134 gen = exchange.readbundle(ui, f, fname)
135 gen.apply(repo)
135 gen.apply(repo)
136
136
137
137
138 @command(
138 @command(
139 b'debugbuilddag',
139 b'debugbuilddag',
140 [
140 [
141 (
141 (
142 b'm',
142 b'm',
143 b'mergeable-file',
143 b'mergeable-file',
144 None,
144 None,
145 _(b'add single file mergeable changes'),
145 _(b'add single file mergeable changes'),
146 ),
146 ),
147 (
147 (
148 b'o',
148 b'o',
149 b'overwritten-file',
149 b'overwritten-file',
150 None,
150 None,
151 _(b'add single file all revs overwrite'),
151 _(b'add single file all revs overwrite'),
152 ),
152 ),
153 (b'n', b'new-file', None, _(b'add new file at each rev')),
153 (b'n', b'new-file', None, _(b'add new file at each rev')),
154 ],
154 ],
155 _(b'[OPTION]... [TEXT]'),
155 _(b'[OPTION]... [TEXT]'),
156 )
156 )
157 def debugbuilddag(
157 def debugbuilddag(
158 ui,
158 ui,
159 repo,
159 repo,
160 text=None,
160 text=None,
161 mergeable_file=False,
161 mergeable_file=False,
162 overwritten_file=False,
162 overwritten_file=False,
163 new_file=False,
163 new_file=False,
164 ):
164 ):
165 """builds a repo with a given DAG from scratch in the current empty repo
165 """builds a repo with a given DAG from scratch in the current empty repo
166
166
167 The description of the DAG is read from stdin if not given on the
167 The description of the DAG is read from stdin if not given on the
168 command line.
168 command line.
169
169
170 Elements:
170 Elements:
171
171
172 - "+n" is a linear run of n nodes based on the current default parent
172 - "+n" is a linear run of n nodes based on the current default parent
173 - "." is a single node based on the current default parent
173 - "." is a single node based on the current default parent
174 - "$" resets the default parent to null (implied at the start);
174 - "$" resets the default parent to null (implied at the start);
175 otherwise the default parent is always the last node created
175 otherwise the default parent is always the last node created
176 - "<p" sets the default parent to the backref p
176 - "<p" sets the default parent to the backref p
177 - "*p" is a fork at parent p, which is a backref
177 - "*p" is a fork at parent p, which is a backref
178 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
178 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
179 - "/p2" is a merge of the preceding node and p2
179 - "/p2" is a merge of the preceding node and p2
180 - ":tag" defines a local tag for the preceding node
180 - ":tag" defines a local tag for the preceding node
181 - "@branch" sets the named branch for subsequent nodes
181 - "@branch" sets the named branch for subsequent nodes
182 - "#...\\n" is a comment up to the end of the line
182 - "#...\\n" is a comment up to the end of the line
183
183
184 Whitespace between the above elements is ignored.
184 Whitespace between the above elements is ignored.
185
185
186 A backref is either
186 A backref is either
187
187
188 - a number n, which references the node curr-n, where curr is the current
188 - a number n, which references the node curr-n, where curr is the current
189 node, or
189 node, or
190 - the name of a local tag you placed earlier using ":tag", or
190 - the name of a local tag you placed earlier using ":tag", or
191 - empty to denote the default parent.
191 - empty to denote the default parent.
192
192
193 All string valued-elements are either strictly alphanumeric, or must
193 All string valued-elements are either strictly alphanumeric, or must
194 be enclosed in double quotes ("..."), with "\\" as escape character.
194 be enclosed in double quotes ("..."), with "\\" as escape character.
195 """
195 """
196
196
197 if text is None:
197 if text is None:
198 ui.status(_(b"reading DAG from stdin\n"))
198 ui.status(_(b"reading DAG from stdin\n"))
199 text = ui.fin.read()
199 text = ui.fin.read()
200
200
201 cl = repo.changelog
201 cl = repo.changelog
202 if len(cl) > 0:
202 if len(cl) > 0:
203 raise error.Abort(_(b'repository is not empty'))
203 raise error.Abort(_(b'repository is not empty'))
204
204
205 # determine number of revs in DAG
205 # determine number of revs in DAG
206 total = 0
206 total = 0
207 for type, data in dagparser.parsedag(text):
207 for type, data in dagparser.parsedag(text):
208 if type == b'n':
208 if type == b'n':
209 total += 1
209 total += 1
210
210
211 if mergeable_file:
211 if mergeable_file:
212 linesperrev = 2
212 linesperrev = 2
213 # make a file with k lines per rev
213 # make a file with k lines per rev
214 initialmergedlines = [
214 initialmergedlines = [
215 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
215 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
216 ]
216 ]
217 initialmergedlines.append(b"")
217 initialmergedlines.append(b"")
218
218
219 tags = []
219 tags = []
220 progress = ui.makeprogress(
220 progress = ui.makeprogress(
221 _(b'building'), unit=_(b'revisions'), total=total
221 _(b'building'), unit=_(b'revisions'), total=total
222 )
222 )
223 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
223 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
224 at = -1
224 at = -1
225 atbranch = b'default'
225 atbranch = b'default'
226 nodeids = []
226 nodeids = []
227 id = 0
227 id = 0
228 progress.update(id)
228 progress.update(id)
229 for type, data in dagparser.parsedag(text):
229 for type, data in dagparser.parsedag(text):
230 if type == b'n':
230 if type == b'n':
231 ui.note((b'node %s\n' % pycompat.bytestr(data)))
231 ui.note((b'node %s\n' % pycompat.bytestr(data)))
232 id, ps = data
232 id, ps = data
233
233
234 files = []
234 files = []
235 filecontent = {}
235 filecontent = {}
236
236
237 p2 = None
237 p2 = None
238 if mergeable_file:
238 if mergeable_file:
239 fn = b"mf"
239 fn = b"mf"
240 p1 = repo[ps[0]]
240 p1 = repo[ps[0]]
241 if len(ps) > 1:
241 if len(ps) > 1:
242 p2 = repo[ps[1]]
242 p2 = repo[ps[1]]
243 pa = p1.ancestor(p2)
243 pa = p1.ancestor(p2)
244 base, local, other = [
244 base, local, other = [
245 x[fn].data() for x in (pa, p1, p2)
245 x[fn].data() for x in (pa, p1, p2)
246 ]
246 ]
247 m3 = simplemerge.Merge3Text(base, local, other)
247 m3 = simplemerge.Merge3Text(base, local, other)
248 ml = [l.strip() for l in m3.merge_lines()]
248 ml = [l.strip() for l in m3.merge_lines()]
249 ml.append(b"")
249 ml.append(b"")
250 elif at > 0:
250 elif at > 0:
251 ml = p1[fn].data().split(b"\n")
251 ml = p1[fn].data().split(b"\n")
252 else:
252 else:
253 ml = initialmergedlines
253 ml = initialmergedlines
254 ml[id * linesperrev] += b" r%i" % id
254 ml[id * linesperrev] += b" r%i" % id
255 mergedtext = b"\n".join(ml)
255 mergedtext = b"\n".join(ml)
256 files.append(fn)
256 files.append(fn)
257 filecontent[fn] = mergedtext
257 filecontent[fn] = mergedtext
258
258
259 if overwritten_file:
259 if overwritten_file:
260 fn = b"of"
260 fn = b"of"
261 files.append(fn)
261 files.append(fn)
262 filecontent[fn] = b"r%i\n" % id
262 filecontent[fn] = b"r%i\n" % id
263
263
264 if new_file:
264 if new_file:
265 fn = b"nf%i" % id
265 fn = b"nf%i" % id
266 files.append(fn)
266 files.append(fn)
267 filecontent[fn] = b"r%i\n" % id
267 filecontent[fn] = b"r%i\n" % id
268 if len(ps) > 1:
268 if len(ps) > 1:
269 if not p2:
269 if not p2:
270 p2 = repo[ps[1]]
270 p2 = repo[ps[1]]
271 for fn in p2:
271 for fn in p2:
272 if fn.startswith(b"nf"):
272 if fn.startswith(b"nf"):
273 files.append(fn)
273 files.append(fn)
274 filecontent[fn] = p2[fn].data()
274 filecontent[fn] = p2[fn].data()
275
275
276 def fctxfn(repo, cx, path):
276 def fctxfn(repo, cx, path):
277 if path in filecontent:
277 if path in filecontent:
278 return context.memfilectx(
278 return context.memfilectx(
279 repo, cx, path, filecontent[path]
279 repo, cx, path, filecontent[path]
280 )
280 )
281 return None
281 return None
282
282
283 if len(ps) == 0 or ps[0] < 0:
283 if len(ps) == 0 or ps[0] < 0:
284 pars = [None, None]
284 pars = [None, None]
285 elif len(ps) == 1:
285 elif len(ps) == 1:
286 pars = [nodeids[ps[0]], None]
286 pars = [nodeids[ps[0]], None]
287 else:
287 else:
288 pars = [nodeids[p] for p in ps]
288 pars = [nodeids[p] for p in ps]
289 cx = context.memctx(
289 cx = context.memctx(
290 repo,
290 repo,
291 pars,
291 pars,
292 b"r%i" % id,
292 b"r%i" % id,
293 files,
293 files,
294 fctxfn,
294 fctxfn,
295 date=(id, 0),
295 date=(id, 0),
296 user=b"debugbuilddag",
296 user=b"debugbuilddag",
297 extra={b'branch': atbranch},
297 extra={b'branch': atbranch},
298 )
298 )
299 nodeid = repo.commitctx(cx)
299 nodeid = repo.commitctx(cx)
300 nodeids.append(nodeid)
300 nodeids.append(nodeid)
301 at = id
301 at = id
302 elif type == b'l':
302 elif type == b'l':
303 id, name = data
303 id, name = data
304 ui.note((b'tag %s\n' % name))
304 ui.note((b'tag %s\n' % name))
305 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
305 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
306 elif type == b'a':
306 elif type == b'a':
307 ui.note((b'branch %s\n' % data))
307 ui.note((b'branch %s\n' % data))
308 atbranch = data
308 atbranch = data
309 progress.update(id)
309 progress.update(id)
310
310
311 if tags:
311 if tags:
312 repo.vfs.write(b"localtags", b"".join(tags))
312 repo.vfs.write(b"localtags", b"".join(tags))
313
313
314
314
315 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
315 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
316 indent_string = b' ' * indent
316 indent_string = b' ' * indent
317 if all:
317 if all:
318 ui.writenoi18n(
318 ui.writenoi18n(
319 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
319 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
320 % indent_string
320 % indent_string
321 )
321 )
322
322
323 def showchunks(named):
323 def showchunks(named):
324 ui.write(b"\n%s%s\n" % (indent_string, named))
324 ui.write(b"\n%s%s\n" % (indent_string, named))
325 for deltadata in gen.deltaiter():
325 for deltadata in gen.deltaiter():
326 node, p1, p2, cs, deltabase, delta, flags = deltadata
326 node, p1, p2, cs, deltabase, delta, flags = deltadata
327 ui.write(
327 ui.write(
328 b"%s%s %s %s %s %s %d\n"
328 b"%s%s %s %s %s %s %d\n"
329 % (
329 % (
330 indent_string,
330 indent_string,
331 hex(node),
331 hex(node),
332 hex(p1),
332 hex(p1),
333 hex(p2),
333 hex(p2),
334 hex(cs),
334 hex(cs),
335 hex(deltabase),
335 hex(deltabase),
336 len(delta),
336 len(delta),
337 )
337 )
338 )
338 )
339
339
340 gen.changelogheader()
340 gen.changelogheader()
341 showchunks(b"changelog")
341 showchunks(b"changelog")
342 gen.manifestheader()
342 gen.manifestheader()
343 showchunks(b"manifest")
343 showchunks(b"manifest")
344 for chunkdata in iter(gen.filelogheader, {}):
344 for chunkdata in iter(gen.filelogheader, {}):
345 fname = chunkdata[b'filename']
345 fname = chunkdata[b'filename']
346 showchunks(fname)
346 showchunks(fname)
347 else:
347 else:
348 if isinstance(gen, bundle2.unbundle20):
348 if isinstance(gen, bundle2.unbundle20):
349 raise error.Abort(_(b'use debugbundle2 for this file'))
349 raise error.Abort(_(b'use debugbundle2 for this file'))
350 gen.changelogheader()
350 gen.changelogheader()
351 for deltadata in gen.deltaiter():
351 for deltadata in gen.deltaiter():
352 node, p1, p2, cs, deltabase, delta, flags = deltadata
352 node, p1, p2, cs, deltabase, delta, flags = deltadata
353 ui.write(b"%s%s\n" % (indent_string, hex(node)))
353 ui.write(b"%s%s\n" % (indent_string, hex(node)))
354
354
355
355
356 def _debugobsmarkers(ui, part, indent=0, **opts):
356 def _debugobsmarkers(ui, part, indent=0, **opts):
357 """display version and markers contained in 'data'"""
357 """display version and markers contained in 'data'"""
358 opts = pycompat.byteskwargs(opts)
358 opts = pycompat.byteskwargs(opts)
359 data = part.read()
359 data = part.read()
360 indent_string = b' ' * indent
360 indent_string = b' ' * indent
361 try:
361 try:
362 version, markers = obsolete._readmarkers(data)
362 version, markers = obsolete._readmarkers(data)
363 except error.UnknownVersion as exc:
363 except error.UnknownVersion as exc:
364 msg = b"%sunsupported version: %s (%d bytes)\n"
364 msg = b"%sunsupported version: %s (%d bytes)\n"
365 msg %= indent_string, exc.version, len(data)
365 msg %= indent_string, exc.version, len(data)
366 ui.write(msg)
366 ui.write(msg)
367 else:
367 else:
368 msg = b"%sversion: %d (%d bytes)\n"
368 msg = b"%sversion: %d (%d bytes)\n"
369 msg %= indent_string, version, len(data)
369 msg %= indent_string, version, len(data)
370 ui.write(msg)
370 ui.write(msg)
371 fm = ui.formatter(b'debugobsolete', opts)
371 fm = ui.formatter(b'debugobsolete', opts)
372 for rawmarker in sorted(markers):
372 for rawmarker in sorted(markers):
373 m = obsutil.marker(None, rawmarker)
373 m = obsutil.marker(None, rawmarker)
374 fm.startitem()
374 fm.startitem()
375 fm.plain(indent_string)
375 fm.plain(indent_string)
376 cmdutil.showmarker(fm, m)
376 cmdutil.showmarker(fm, m)
377 fm.end()
377 fm.end()
378
378
379
379
380 def _debugphaseheads(ui, data, indent=0):
380 def _debugphaseheads(ui, data, indent=0):
381 """display version and markers contained in 'data'"""
381 """display version and markers contained in 'data'"""
382 indent_string = b' ' * indent
382 indent_string = b' ' * indent
383 headsbyphase = phases.binarydecode(data)
383 headsbyphase = phases.binarydecode(data)
384 for phase in phases.allphases:
384 for phase in phases.allphases:
385 for head in headsbyphase[phase]:
385 for head in headsbyphase[phase]:
386 ui.write(indent_string)
386 ui.write(indent_string)
387 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
387 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
388
388
389
389
390 def _quasirepr(thing):
390 def _quasirepr(thing):
391 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
391 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
392 return b'{%s}' % (
392 return b'{%s}' % (
393 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
393 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
394 )
394 )
395 return pycompat.bytestr(repr(thing))
395 return pycompat.bytestr(repr(thing))
396
396
397
397
398 def _debugbundle2(ui, gen, all=None, **opts):
398 def _debugbundle2(ui, gen, all=None, **opts):
399 """lists the contents of a bundle2"""
399 """lists the contents of a bundle2"""
400 if not isinstance(gen, bundle2.unbundle20):
400 if not isinstance(gen, bundle2.unbundle20):
401 raise error.Abort(_(b'not a bundle2 file'))
401 raise error.Abort(_(b'not a bundle2 file'))
402 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
402 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
403 parttypes = opts.get('part_type', [])
403 parttypes = opts.get('part_type', [])
404 for part in gen.iterparts():
404 for part in gen.iterparts():
405 if parttypes and part.type not in parttypes:
405 if parttypes and part.type not in parttypes:
406 continue
406 continue
407 msg = b'%s -- %s (mandatory: %r)\n'
407 msg = b'%s -- %s (mandatory: %r)\n'
408 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
408 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
409 if part.type == b'changegroup':
409 if part.type == b'changegroup':
410 version = part.params.get(b'version', b'01')
410 version = part.params.get(b'version', b'01')
411 cg = changegroup.getunbundler(version, part, b'UN')
411 cg = changegroup.getunbundler(version, part, b'UN')
412 if not ui.quiet:
412 if not ui.quiet:
413 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
413 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
414 if part.type == b'obsmarkers':
414 if part.type == b'obsmarkers':
415 if not ui.quiet:
415 if not ui.quiet:
416 _debugobsmarkers(ui, part, indent=4, **opts)
416 _debugobsmarkers(ui, part, indent=4, **opts)
417 if part.type == b'phase-heads':
417 if part.type == b'phase-heads':
418 if not ui.quiet:
418 if not ui.quiet:
419 _debugphaseheads(ui, part, indent=4)
419 _debugphaseheads(ui, part, indent=4)
420
420
421
421
422 @command(
422 @command(
423 b'debugbundle',
423 b'debugbundle',
424 [
424 [
425 (b'a', b'all', None, _(b'show all details')),
425 (b'a', b'all', None, _(b'show all details')),
426 (b'', b'part-type', [], _(b'show only the named part type')),
426 (b'', b'part-type', [], _(b'show only the named part type')),
427 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
427 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
428 ],
428 ],
429 _(b'FILE'),
429 _(b'FILE'),
430 norepo=True,
430 norepo=True,
431 )
431 )
432 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
432 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
433 """lists the contents of a bundle"""
433 """lists the contents of a bundle"""
434 with hg.openpath(ui, bundlepath) as f:
434 with hg.openpath(ui, bundlepath) as f:
435 if spec:
435 if spec:
436 spec = exchange.getbundlespec(ui, f)
436 spec = exchange.getbundlespec(ui, f)
437 ui.write(b'%s\n' % spec)
437 ui.write(b'%s\n' % spec)
438 return
438 return
439
439
440 gen = exchange.readbundle(ui, f, bundlepath)
440 gen = exchange.readbundle(ui, f, bundlepath)
441 if isinstance(gen, bundle2.unbundle20):
441 if isinstance(gen, bundle2.unbundle20):
442 return _debugbundle2(ui, gen, all=all, **opts)
442 return _debugbundle2(ui, gen, all=all, **opts)
443 _debugchangegroup(ui, gen, all=all, **opts)
443 _debugchangegroup(ui, gen, all=all, **opts)
444
444
445
445
446 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
446 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
447 def debugcapabilities(ui, path, **opts):
447 def debugcapabilities(ui, path, **opts):
448 """lists the capabilities of a remote peer"""
448 """lists the capabilities of a remote peer"""
449 opts = pycompat.byteskwargs(opts)
449 opts = pycompat.byteskwargs(opts)
450 peer = hg.peer(ui, opts, path)
450 peer = hg.peer(ui, opts, path)
451 caps = peer.capabilities()
451 caps = peer.capabilities()
452 ui.writenoi18n(b'Main capabilities:\n')
452 ui.writenoi18n(b'Main capabilities:\n')
453 for c in sorted(caps):
453 for c in sorted(caps):
454 ui.write(b' %s\n' % c)
454 ui.write(b' %s\n' % c)
455 b2caps = bundle2.bundle2caps(peer)
455 b2caps = bundle2.bundle2caps(peer)
456 if b2caps:
456 if b2caps:
457 ui.writenoi18n(b'Bundle2 capabilities:\n')
457 ui.writenoi18n(b'Bundle2 capabilities:\n')
458 for key, values in sorted(pycompat.iteritems(b2caps)):
458 for key, values in sorted(pycompat.iteritems(b2caps)):
459 ui.write(b' %s\n' % key)
459 ui.write(b' %s\n' % key)
460 for v in values:
460 for v in values:
461 ui.write(b' %s\n' % v)
461 ui.write(b' %s\n' % v)
462
462
463
463
464 @command(b'debugcheckstate', [], b'')
464 @command(b'debugcheckstate', [], b'')
465 def debugcheckstate(ui, repo):
465 def debugcheckstate(ui, repo):
466 """validate the correctness of the current dirstate"""
466 """validate the correctness of the current dirstate"""
467 parent1, parent2 = repo.dirstate.parents()
467 parent1, parent2 = repo.dirstate.parents()
468 m1 = repo[parent1].manifest()
468 m1 = repo[parent1].manifest()
469 m2 = repo[parent2].manifest()
469 m2 = repo[parent2].manifest()
470 errors = 0
470 errors = 0
471 for f in repo.dirstate:
471 for f in repo.dirstate:
472 state = repo.dirstate[f]
472 state = repo.dirstate[f]
473 if state in b"nr" and f not in m1:
473 if state in b"nr" and f not in m1:
474 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
474 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
475 errors += 1
475 errors += 1
476 if state in b"a" and f in m1:
476 if state in b"a" and f in m1:
477 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
477 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
478 errors += 1
478 errors += 1
479 if state in b"m" and f not in m1 and f not in m2:
479 if state in b"m" and f not in m1 and f not in m2:
480 ui.warn(
480 ui.warn(
481 _(b"%s in state %s, but not in either manifest\n") % (f, state)
481 _(b"%s in state %s, but not in either manifest\n") % (f, state)
482 )
482 )
483 errors += 1
483 errors += 1
484 for f in m1:
484 for f in m1:
485 state = repo.dirstate[f]
485 state = repo.dirstate[f]
486 if state not in b"nrm":
486 if state not in b"nrm":
487 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
487 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
488 errors += 1
488 errors += 1
489 if errors:
489 if errors:
490 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
490 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
491 raise error.Abort(errstr)
491 raise error.Abort(errstr)
492
492
493
493
494 @command(
494 @command(
495 b'debugcolor',
495 b'debugcolor',
496 [(b'', b'style', None, _(b'show all configured styles'))],
496 [(b'', b'style', None, _(b'show all configured styles'))],
497 b'hg debugcolor',
497 b'hg debugcolor',
498 )
498 )
499 def debugcolor(ui, repo, **opts):
499 def debugcolor(ui, repo, **opts):
500 """show available color, effects or style"""
500 """show available color, effects or style"""
501 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
501 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
502 if opts.get('style'):
502 if opts.get('style'):
503 return _debugdisplaystyle(ui)
503 return _debugdisplaystyle(ui)
504 else:
504 else:
505 return _debugdisplaycolor(ui)
505 return _debugdisplaycolor(ui)
506
506
507
507
508 def _debugdisplaycolor(ui):
508 def _debugdisplaycolor(ui):
509 ui = ui.copy()
509 ui = ui.copy()
510 ui._styles.clear()
510 ui._styles.clear()
511 for effect in color._activeeffects(ui).keys():
511 for effect in color._activeeffects(ui).keys():
512 ui._styles[effect] = effect
512 ui._styles[effect] = effect
513 if ui._terminfoparams:
513 if ui._terminfoparams:
514 for k, v in ui.configitems(b'color'):
514 for k, v in ui.configitems(b'color'):
515 if k.startswith(b'color.'):
515 if k.startswith(b'color.'):
516 ui._styles[k] = k[6:]
516 ui._styles[k] = k[6:]
517 elif k.startswith(b'terminfo.'):
517 elif k.startswith(b'terminfo.'):
518 ui._styles[k] = k[9:]
518 ui._styles[k] = k[9:]
519 ui.write(_(b'available colors:\n'))
519 ui.write(_(b'available colors:\n'))
520 # sort label with a '_' after the other to group '_background' entry.
520 # sort label with a '_' after the other to group '_background' entry.
521 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
521 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
522 for colorname, label in items:
522 for colorname, label in items:
523 ui.write(b'%s\n' % colorname, label=label)
523 ui.write(b'%s\n' % colorname, label=label)
524
524
525
525
526 def _debugdisplaystyle(ui):
526 def _debugdisplaystyle(ui):
527 ui.write(_(b'available style:\n'))
527 ui.write(_(b'available style:\n'))
528 if not ui._styles:
528 if not ui._styles:
529 return
529 return
530 width = max(len(s) for s in ui._styles)
530 width = max(len(s) for s in ui._styles)
531 for label, effects in sorted(ui._styles.items()):
531 for label, effects in sorted(ui._styles.items()):
532 ui.write(b'%s' % label, label=label)
532 ui.write(b'%s' % label, label=label)
533 if effects:
533 if effects:
534 # 50
534 # 50
535 ui.write(b': ')
535 ui.write(b': ')
536 ui.write(b' ' * (max(0, width - len(label))))
536 ui.write(b' ' * (max(0, width - len(label))))
537 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
537 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
538 ui.write(b'\n')
538 ui.write(b'\n')
539
539
540
540
541 @command(b'debugcreatestreamclonebundle', [], b'FILE')
541 @command(b'debugcreatestreamclonebundle', [], b'FILE')
542 def debugcreatestreamclonebundle(ui, repo, fname):
542 def debugcreatestreamclonebundle(ui, repo, fname):
543 """create a stream clone bundle file
543 """create a stream clone bundle file
544
544
545 Stream bundles are special bundles that are essentially archives of
545 Stream bundles are special bundles that are essentially archives of
546 revlog files. They are commonly used for cloning very quickly.
546 revlog files. They are commonly used for cloning very quickly.
547 """
547 """
548 # TODO we may want to turn this into an abort when this functionality
548 # TODO we may want to turn this into an abort when this functionality
549 # is moved into `hg bundle`.
549 # is moved into `hg bundle`.
550 if phases.hassecret(repo):
550 if phases.hassecret(repo):
551 ui.warn(
551 ui.warn(
552 _(
552 _(
553 b'(warning: stream clone bundle will contain secret '
553 b'(warning: stream clone bundle will contain secret '
554 b'revisions)\n'
554 b'revisions)\n'
555 )
555 )
556 )
556 )
557
557
558 requirements, gen = streamclone.generatebundlev1(repo)
558 requirements, gen = streamclone.generatebundlev1(repo)
559 changegroup.writechunks(ui, gen, fname)
559 changegroup.writechunks(ui, gen, fname)
560
560
561 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
561 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
562
562
563
563
564 @command(
564 @command(
565 b'debugdag',
565 b'debugdag',
566 [
566 [
567 (b't', b'tags', None, _(b'use tags as labels')),
567 (b't', b'tags', None, _(b'use tags as labels')),
568 (b'b', b'branches', None, _(b'annotate with branch names')),
568 (b'b', b'branches', None, _(b'annotate with branch names')),
569 (b'', b'dots', None, _(b'use dots for runs')),
569 (b'', b'dots', None, _(b'use dots for runs')),
570 (b's', b'spaces', None, _(b'separate elements by spaces')),
570 (b's', b'spaces', None, _(b'separate elements by spaces')),
571 ],
571 ],
572 _(b'[OPTION]... [FILE [REV]...]'),
572 _(b'[OPTION]... [FILE [REV]...]'),
573 optionalrepo=True,
573 optionalrepo=True,
574 )
574 )
575 def debugdag(ui, repo, file_=None, *revs, **opts):
575 def debugdag(ui, repo, file_=None, *revs, **opts):
576 """format the changelog or an index DAG as a concise textual description
576 """format the changelog or an index DAG as a concise textual description
577
577
578 If you pass a revlog index, the revlog's DAG is emitted. If you list
578 If you pass a revlog index, the revlog's DAG is emitted. If you list
579 revision numbers, they get labeled in the output as rN.
579 revision numbers, they get labeled in the output as rN.
580
580
581 Otherwise, the changelog DAG of the current repo is emitted.
581 Otherwise, the changelog DAG of the current repo is emitted.
582 """
582 """
583 spaces = opts.get('spaces')
583 spaces = opts.get('spaces')
584 dots = opts.get('dots')
584 dots = opts.get('dots')
585 if file_:
585 if file_:
586 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
586 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
587 revs = {int(r) for r in revs}
587 revs = {int(r) for r in revs}
588
588
589 def events():
589 def events():
590 for r in rlog:
590 for r in rlog:
591 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
591 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
592 if r in revs:
592 if r in revs:
593 yield b'l', (r, b"r%i" % r)
593 yield b'l', (r, b"r%i" % r)
594
594
595 elif repo:
595 elif repo:
596 cl = repo.changelog
596 cl = repo.changelog
597 tags = opts.get('tags')
597 tags = opts.get('tags')
598 branches = opts.get('branches')
598 branches = opts.get('branches')
599 if tags:
599 if tags:
600 labels = {}
600 labels = {}
601 for l, n in repo.tags().items():
601 for l, n in repo.tags().items():
602 labels.setdefault(cl.rev(n), []).append(l)
602 labels.setdefault(cl.rev(n), []).append(l)
603
603
604 def events():
604 def events():
605 b = b"default"
605 b = b"default"
606 for r in cl:
606 for r in cl:
607 if branches:
607 if branches:
608 newb = cl.read(cl.node(r))[5][b'branch']
608 newb = cl.read(cl.node(r))[5][b'branch']
609 if newb != b:
609 if newb != b:
610 yield b'a', newb
610 yield b'a', newb
611 b = newb
611 b = newb
612 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
612 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
613 if tags:
613 if tags:
614 ls = labels.get(r)
614 ls = labels.get(r)
615 if ls:
615 if ls:
616 for l in ls:
616 for l in ls:
617 yield b'l', (r, l)
617 yield b'l', (r, l)
618
618
619 else:
619 else:
620 raise error.Abort(_(b'need repo for changelog dag'))
620 raise error.Abort(_(b'need repo for changelog dag'))
621
621
622 for line in dagparser.dagtextlines(
622 for line in dagparser.dagtextlines(
623 events(),
623 events(),
624 addspaces=spaces,
624 addspaces=spaces,
625 wraplabels=True,
625 wraplabels=True,
626 wrapannotations=True,
626 wrapannotations=True,
627 wrapnonlinear=dots,
627 wrapnonlinear=dots,
628 usedots=dots,
628 usedots=dots,
629 maxlinewidth=70,
629 maxlinewidth=70,
630 ):
630 ):
631 ui.write(line)
631 ui.write(line)
632 ui.write(b"\n")
632 ui.write(b"\n")
633
633
634
634
635 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
635 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
636 def debugdata(ui, repo, file_, rev=None, **opts):
636 def debugdata(ui, repo, file_, rev=None, **opts):
637 """dump the contents of a data file revision"""
637 """dump the contents of a data file revision"""
638 opts = pycompat.byteskwargs(opts)
638 opts = pycompat.byteskwargs(opts)
639 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
639 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
640 if rev is not None:
640 if rev is not None:
641 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
641 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
642 file_, rev = None, file_
642 file_, rev = None, file_
643 elif rev is None:
643 elif rev is None:
644 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
644 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
645 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
645 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
646 try:
646 try:
647 ui.write(r.rawdata(r.lookup(rev)))
647 ui.write(r.rawdata(r.lookup(rev)))
648 except KeyError:
648 except KeyError:
649 raise error.Abort(_(b'invalid revision identifier %s') % rev)
649 raise error.Abort(_(b'invalid revision identifier %s') % rev)
650
650
651
651
652 @command(
652 @command(
653 b'debugdate',
653 b'debugdate',
654 [(b'e', b'extended', None, _(b'try extended date formats'))],
654 [(b'e', b'extended', None, _(b'try extended date formats'))],
655 _(b'[-e] DATE [RANGE]'),
655 _(b'[-e] DATE [RANGE]'),
656 norepo=True,
656 norepo=True,
657 optionalrepo=True,
657 optionalrepo=True,
658 )
658 )
659 def debugdate(ui, date, range=None, **opts):
659 def debugdate(ui, date, range=None, **opts):
660 """parse and display a date"""
660 """parse and display a date"""
661 if opts["extended"]:
661 if opts["extended"]:
662 d = dateutil.parsedate(date, dateutil.extendeddateformats)
662 d = dateutil.parsedate(date, dateutil.extendeddateformats)
663 else:
663 else:
664 d = dateutil.parsedate(date)
664 d = dateutil.parsedate(date)
665 ui.writenoi18n(b"internal: %d %d\n" % d)
665 ui.writenoi18n(b"internal: %d %d\n" % d)
666 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
666 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
667 if range:
667 if range:
668 m = dateutil.matchdate(range)
668 m = dateutil.matchdate(range)
669 ui.writenoi18n(b"match: %s\n" % m(d[0]))
669 ui.writenoi18n(b"match: %s\n" % m(d[0]))
670
670
671
671
672 @command(
672 @command(
673 b'debugdeltachain',
673 b'debugdeltachain',
674 cmdutil.debugrevlogopts + cmdutil.formatteropts,
674 cmdutil.debugrevlogopts + cmdutil.formatteropts,
675 _(b'-c|-m|FILE'),
675 _(b'-c|-m|FILE'),
676 optionalrepo=True,
676 optionalrepo=True,
677 )
677 )
678 def debugdeltachain(ui, repo, file_=None, **opts):
678 def debugdeltachain(ui, repo, file_=None, **opts):
679 """dump information about delta chains in a revlog
679 """dump information about delta chains in a revlog
680
680
681 Output can be templatized. Available template keywords are:
681 Output can be templatized. Available template keywords are:
682
682
683 :``rev``: revision number
683 :``rev``: revision number
684 :``chainid``: delta chain identifier (numbered by unique base)
684 :``chainid``: delta chain identifier (numbered by unique base)
685 :``chainlen``: delta chain length to this revision
685 :``chainlen``: delta chain length to this revision
686 :``prevrev``: previous revision in delta chain
686 :``prevrev``: previous revision in delta chain
687 :``deltatype``: role of delta / how it was computed
687 :``deltatype``: role of delta / how it was computed
688 :``compsize``: compressed size of revision
688 :``compsize``: compressed size of revision
689 :``uncompsize``: uncompressed size of revision
689 :``uncompsize``: uncompressed size of revision
690 :``chainsize``: total size of compressed revisions in chain
690 :``chainsize``: total size of compressed revisions in chain
691 :``chainratio``: total chain size divided by uncompressed revision size
691 :``chainratio``: total chain size divided by uncompressed revision size
692 (new delta chains typically start at ratio 2.00)
692 (new delta chains typically start at ratio 2.00)
693 :``lindist``: linear distance from base revision in delta chain to end
693 :``lindist``: linear distance from base revision in delta chain to end
694 of this revision
694 of this revision
695 :``extradist``: total size of revisions not part of this delta chain from
695 :``extradist``: total size of revisions not part of this delta chain from
696 base of delta chain to end of this revision; a measurement
696 base of delta chain to end of this revision; a measurement
697 of how much extra data we need to read/seek across to read
697 of how much extra data we need to read/seek across to read
698 the delta chain for this revision
698 the delta chain for this revision
699 :``extraratio``: extradist divided by chainsize; another representation of
699 :``extraratio``: extradist divided by chainsize; another representation of
700 how much unrelated data is needed to load this delta chain
700 how much unrelated data is needed to load this delta chain
701
701
702 If the repository is configured to use the sparse read, additional keywords
702 If the repository is configured to use the sparse read, additional keywords
703 are available:
703 are available:
704
704
705 :``readsize``: total size of data read from the disk for a revision
705 :``readsize``: total size of data read from the disk for a revision
706 (sum of the sizes of all the blocks)
706 (sum of the sizes of all the blocks)
707 :``largestblock``: size of the largest block of data read from the disk
707 :``largestblock``: size of the largest block of data read from the disk
708 :``readdensity``: density of useful bytes in the data read from the disk
708 :``readdensity``: density of useful bytes in the data read from the disk
709 :``srchunks``: in how many data hunks the whole revision would be read
709 :``srchunks``: in how many data hunks the whole revision would be read
710
710
711 The sparse read can be enabled with experimental.sparse-read = True
711 The sparse read can be enabled with experimental.sparse-read = True
712 """
712 """
713 opts = pycompat.byteskwargs(opts)
713 opts = pycompat.byteskwargs(opts)
714 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
714 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
715 index = r.index
715 index = r.index
716 start = r.start
716 start = r.start
717 length = r.length
717 length = r.length
718 generaldelta = r.version & revlog.FLAG_GENERALDELTA
718 generaldelta = r.version & revlog.FLAG_GENERALDELTA
719 withsparseread = getattr(r, '_withsparseread', False)
719 withsparseread = getattr(r, '_withsparseread', False)
720
720
721 def revinfo(rev):
721 def revinfo(rev):
722 e = index[rev]
722 e = index[rev]
723 compsize = e[1]
723 compsize = e[1]
724 uncompsize = e[2]
724 uncompsize = e[2]
725 chainsize = 0
725 chainsize = 0
726
726
727 if generaldelta:
727 if generaldelta:
728 if e[3] == e[5]:
728 if e[3] == e[5]:
729 deltatype = b'p1'
729 deltatype = b'p1'
730 elif e[3] == e[6]:
730 elif e[3] == e[6]:
731 deltatype = b'p2'
731 deltatype = b'p2'
732 elif e[3] == rev - 1:
732 elif e[3] == rev - 1:
733 deltatype = b'prev'
733 deltatype = b'prev'
734 elif e[3] == rev:
734 elif e[3] == rev:
735 deltatype = b'base'
735 deltatype = b'base'
736 else:
736 else:
737 deltatype = b'other'
737 deltatype = b'other'
738 else:
738 else:
739 if e[3] == rev:
739 if e[3] == rev:
740 deltatype = b'base'
740 deltatype = b'base'
741 else:
741 else:
742 deltatype = b'prev'
742 deltatype = b'prev'
743
743
744 chain = r._deltachain(rev)[0]
744 chain = r._deltachain(rev)[0]
745 for iterrev in chain:
745 for iterrev in chain:
746 e = index[iterrev]
746 e = index[iterrev]
747 chainsize += e[1]
747 chainsize += e[1]
748
748
749 return compsize, uncompsize, deltatype, chain, chainsize
749 return compsize, uncompsize, deltatype, chain, chainsize
750
750
751 fm = ui.formatter(b'debugdeltachain', opts)
751 fm = ui.formatter(b'debugdeltachain', opts)
752
752
753 fm.plain(
753 fm.plain(
754 b' rev chain# chainlen prev delta '
754 b' rev chain# chainlen prev delta '
755 b'size rawsize chainsize ratio lindist extradist '
755 b'size rawsize chainsize ratio lindist extradist '
756 b'extraratio'
756 b'extraratio'
757 )
757 )
758 if withsparseread:
758 if withsparseread:
759 fm.plain(b' readsize largestblk rddensity srchunks')
759 fm.plain(b' readsize largestblk rddensity srchunks')
760 fm.plain(b'\n')
760 fm.plain(b'\n')
761
761
762 chainbases = {}
762 chainbases = {}
763 for rev in r:
763 for rev in r:
764 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
764 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
765 chainbase = chain[0]
765 chainbase = chain[0]
766 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
766 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
767 basestart = start(chainbase)
767 basestart = start(chainbase)
768 revstart = start(rev)
768 revstart = start(rev)
769 lineardist = revstart + comp - basestart
769 lineardist = revstart + comp - basestart
770 extradist = lineardist - chainsize
770 extradist = lineardist - chainsize
771 try:
771 try:
772 prevrev = chain[-2]
772 prevrev = chain[-2]
773 except IndexError:
773 except IndexError:
774 prevrev = -1
774 prevrev = -1
775
775
776 if uncomp != 0:
776 if uncomp != 0:
777 chainratio = float(chainsize) / float(uncomp)
777 chainratio = float(chainsize) / float(uncomp)
778 else:
778 else:
779 chainratio = chainsize
779 chainratio = chainsize
780
780
781 if chainsize != 0:
781 if chainsize != 0:
782 extraratio = float(extradist) / float(chainsize)
782 extraratio = float(extradist) / float(chainsize)
783 else:
783 else:
784 extraratio = extradist
784 extraratio = extradist
785
785
786 fm.startitem()
786 fm.startitem()
787 fm.write(
787 fm.write(
788 b'rev chainid chainlen prevrev deltatype compsize '
788 b'rev chainid chainlen prevrev deltatype compsize '
789 b'uncompsize chainsize chainratio lindist extradist '
789 b'uncompsize chainsize chainratio lindist extradist '
790 b'extraratio',
790 b'extraratio',
791 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
791 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
792 rev,
792 rev,
793 chainid,
793 chainid,
794 len(chain),
794 len(chain),
795 prevrev,
795 prevrev,
796 deltatype,
796 deltatype,
797 comp,
797 comp,
798 uncomp,
798 uncomp,
799 chainsize,
799 chainsize,
800 chainratio,
800 chainratio,
801 lineardist,
801 lineardist,
802 extradist,
802 extradist,
803 extraratio,
803 extraratio,
804 rev=rev,
804 rev=rev,
805 chainid=chainid,
805 chainid=chainid,
806 chainlen=len(chain),
806 chainlen=len(chain),
807 prevrev=prevrev,
807 prevrev=prevrev,
808 deltatype=deltatype,
808 deltatype=deltatype,
809 compsize=comp,
809 compsize=comp,
810 uncompsize=uncomp,
810 uncompsize=uncomp,
811 chainsize=chainsize,
811 chainsize=chainsize,
812 chainratio=chainratio,
812 chainratio=chainratio,
813 lindist=lineardist,
813 lindist=lineardist,
814 extradist=extradist,
814 extradist=extradist,
815 extraratio=extraratio,
815 extraratio=extraratio,
816 )
816 )
817 if withsparseread:
817 if withsparseread:
818 readsize = 0
818 readsize = 0
819 largestblock = 0
819 largestblock = 0
820 srchunks = 0
820 srchunks = 0
821
821
822 for revschunk in deltautil.slicechunk(r, chain):
822 for revschunk in deltautil.slicechunk(r, chain):
823 srchunks += 1
823 srchunks += 1
824 blkend = start(revschunk[-1]) + length(revschunk[-1])
824 blkend = start(revschunk[-1]) + length(revschunk[-1])
825 blksize = blkend - start(revschunk[0])
825 blksize = blkend - start(revschunk[0])
826
826
827 readsize += blksize
827 readsize += blksize
828 if largestblock < blksize:
828 if largestblock < blksize:
829 largestblock = blksize
829 largestblock = blksize
830
830
831 if readsize:
831 if readsize:
832 readdensity = float(chainsize) / float(readsize)
832 readdensity = float(chainsize) / float(readsize)
833 else:
833 else:
834 readdensity = 1
834 readdensity = 1
835
835
836 fm.write(
836 fm.write(
837 b'readsize largestblock readdensity srchunks',
837 b'readsize largestblock readdensity srchunks',
838 b' %10d %10d %9.5f %8d',
838 b' %10d %10d %9.5f %8d',
839 readsize,
839 readsize,
840 largestblock,
840 largestblock,
841 readdensity,
841 readdensity,
842 srchunks,
842 srchunks,
843 readsize=readsize,
843 readsize=readsize,
844 largestblock=largestblock,
844 largestblock=largestblock,
845 readdensity=readdensity,
845 readdensity=readdensity,
846 srchunks=srchunks,
846 srchunks=srchunks,
847 )
847 )
848
848
849 fm.plain(b'\n')
849 fm.plain(b'\n')
850
850
851 fm.end()
851 fm.end()
852
852
853
853
854 @command(
854 @command(
855 b'debugdirstate|debugstate',
855 b'debugdirstate|debugstate',
856 [
856 [
857 (
857 (
858 b'',
858 b'',
859 b'nodates',
859 b'nodates',
860 None,
860 None,
861 _(b'do not display the saved mtime (DEPRECATED)'),
861 _(b'do not display the saved mtime (DEPRECATED)'),
862 ),
862 ),
863 (b'', b'dates', True, _(b'display the saved mtime')),
863 (b'', b'dates', True, _(b'display the saved mtime')),
864 (b'', b'datesort', None, _(b'sort by saved mtime')),
864 (b'', b'datesort', None, _(b'sort by saved mtime')),
865 ],
865 ],
866 _(b'[OPTION]...'),
866 _(b'[OPTION]...'),
867 )
867 )
868 def debugstate(ui, repo, **opts):
868 def debugstate(ui, repo, **opts):
869 """show the contents of the current dirstate"""
869 """show the contents of the current dirstate"""
870
870
871 nodates = not opts['dates']
871 nodates = not opts['dates']
872 if opts.get('nodates') is not None:
872 if opts.get('nodates') is not None:
873 nodates = True
873 nodates = True
874 datesort = opts.get('datesort')
874 datesort = opts.get('datesort')
875
875
876 if datesort:
876 if datesort:
877 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
877 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
878 else:
878 else:
879 keyfunc = None # sort by filename
879 keyfunc = None # sort by filename
880 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
880 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
881 if ent[3] == -1:
881 if ent[3] == -1:
882 timestr = b'unset '
882 timestr = b'unset '
883 elif nodates:
883 elif nodates:
884 timestr = b'set '
884 timestr = b'set '
885 else:
885 else:
886 timestr = time.strftime(
886 timestr = time.strftime(
887 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
887 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
888 )
888 )
889 timestr = encoding.strtolocal(timestr)
889 timestr = encoding.strtolocal(timestr)
890 if ent[1] & 0o20000:
890 if ent[1] & 0o20000:
891 mode = b'lnk'
891 mode = b'lnk'
892 else:
892 else:
893 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
893 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
894 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
894 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
895 for f in repo.dirstate.copies():
895 for f in repo.dirstate.copies():
896 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
896 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
897
897
898
898
899 @command(
899 @command(
900 b'debugdiscovery',
900 b'debugdiscovery',
901 [
901 [
902 (b'', b'old', None, _(b'use old-style discovery')),
902 (b'', b'old', None, _(b'use old-style discovery')),
903 (
903 (
904 b'',
904 b'',
905 b'nonheads',
905 b'nonheads',
906 None,
906 None,
907 _(b'use old-style discovery with non-heads included'),
907 _(b'use old-style discovery with non-heads included'),
908 ),
908 ),
909 (b'', b'rev', [], b'restrict discovery to this set of revs'),
909 (b'', b'rev', [], b'restrict discovery to this set of revs'),
910 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
910 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
911 ]
911 ]
912 + cmdutil.remoteopts,
912 + cmdutil.remoteopts,
913 _(b'[--rev REV] [OTHER]'),
913 _(b'[--rev REV] [OTHER]'),
914 )
914 )
915 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
915 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
916 """runs the changeset discovery protocol in isolation"""
916 """runs the changeset discovery protocol in isolation"""
917 opts = pycompat.byteskwargs(opts)
917 opts = pycompat.byteskwargs(opts)
918 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
918 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
919 remote = hg.peer(repo, opts, remoteurl)
919 remote = hg.peer(repo, opts, remoteurl)
920 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
920 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
921
921
922 # make sure tests are repeatable
922 # make sure tests are repeatable
923 random.seed(int(opts[b'seed']))
923 random.seed(int(opts[b'seed']))
924
924
925 if opts.get(b'old'):
925 if opts.get(b'old'):
926
926
927 def doit(pushedrevs, remoteheads, remote=remote):
927 def doit(pushedrevs, remoteheads, remote=remote):
928 if not util.safehasattr(remote, b'branches'):
928 if not util.safehasattr(remote, b'branches'):
929 # enable in-client legacy support
929 # enable in-client legacy support
930 remote = localrepo.locallegacypeer(remote.local())
930 remote = localrepo.locallegacypeer(remote.local())
931 common, _in, hds = treediscovery.findcommonincoming(
931 common, _in, hds = treediscovery.findcommonincoming(
932 repo, remote, force=True
932 repo, remote, force=True
933 )
933 )
934 common = set(common)
934 common = set(common)
935 if not opts.get(b'nonheads'):
935 if not opts.get(b'nonheads'):
936 ui.writenoi18n(
936 ui.writenoi18n(
937 b"unpruned common: %s\n"
937 b"unpruned common: %s\n"
938 % b" ".join(sorted(short(n) for n in common))
938 % b" ".join(sorted(short(n) for n in common))
939 )
939 )
940
940
941 clnode = repo.changelog.node
941 clnode = repo.changelog.node
942 common = repo.revs(b'heads(::%ln)', common)
942 common = repo.revs(b'heads(::%ln)', common)
943 common = {clnode(r) for r in common}
943 common = {clnode(r) for r in common}
944 return common, hds
944 return common, hds
945
945
946 else:
946 else:
947
947
948 def doit(pushedrevs, remoteheads, remote=remote):
948 def doit(pushedrevs, remoteheads, remote=remote):
949 nodes = None
949 nodes = None
950 if pushedrevs:
950 if pushedrevs:
951 revs = scmutil.revrange(repo, pushedrevs)
951 revs = scmutil.revrange(repo, pushedrevs)
952 nodes = [repo[r].node() for r in revs]
952 nodes = [repo[r].node() for r in revs]
953 common, any, hds = setdiscovery.findcommonheads(
953 common, any, hds = setdiscovery.findcommonheads(
954 ui, repo, remote, ancestorsof=nodes
954 ui, repo, remote, ancestorsof=nodes
955 )
955 )
956 return common, hds
956 return common, hds
957
957
958 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
958 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
959 localrevs = opts[b'rev']
959 localrevs = opts[b'rev']
960 with util.timedcm('debug-discovery') as t:
960 with util.timedcm('debug-discovery') as t:
961 common, hds = doit(localrevs, remoterevs)
961 common, hds = doit(localrevs, remoterevs)
962
962
963 # compute all statistics
963 # compute all statistics
964 common = set(common)
964 common = set(common)
965 rheads = set(hds)
965 rheads = set(hds)
966 lheads = set(repo.heads())
966 lheads = set(repo.heads())
967
967
968 data = {}
968 data = {}
969 data[b'elapsed'] = t.elapsed
969 data[b'elapsed'] = t.elapsed
970 data[b'nb-common'] = len(common)
970 data[b'nb-common'] = len(common)
971 data[b'nb-common-local'] = len(common & lheads)
971 data[b'nb-common-local'] = len(common & lheads)
972 data[b'nb-common-remote'] = len(common & rheads)
972 data[b'nb-common-remote'] = len(common & rheads)
973 data[b'nb-common-both'] = len(common & rheads & lheads)
973 data[b'nb-common-both'] = len(common & rheads & lheads)
974 data[b'nb-local'] = len(lheads)
974 data[b'nb-local'] = len(lheads)
975 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
975 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
976 data[b'nb-remote'] = len(rheads)
976 data[b'nb-remote'] = len(rheads)
977 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
977 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
978 data[b'nb-revs'] = len(repo.revs(b'all()'))
978 data[b'nb-revs'] = len(repo.revs(b'all()'))
979 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
979 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
980 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
980 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
981
981
982 # display discovery summary
982 # display discovery summary
983 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
983 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
984 ui.writenoi18n(b"heads summary:\n")
984 ui.writenoi18n(b"heads summary:\n")
985 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
985 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
986 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
986 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
987 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
987 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
988 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
988 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
989 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
989 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
992 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
992 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
993 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
993 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
994 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
994 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
995 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
995 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
996 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
996 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
997 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
997 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
998
998
999 if ui.verbose:
999 if ui.verbose:
1000 ui.writenoi18n(
1000 ui.writenoi18n(
1001 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1001 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1002 )
1002 )
1003
1003
1004
1004
1005 _chunksize = 4 << 10
1005 _chunksize = 4 << 10
1006
1006
1007
1007
1008 @command(
1008 @command(
1009 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1009 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1010 )
1010 )
1011 def debugdownload(ui, repo, url, output=None, **opts):
1011 def debugdownload(ui, repo, url, output=None, **opts):
1012 """download a resource using Mercurial logic and config
1012 """download a resource using Mercurial logic and config
1013 """
1013 """
1014 fh = urlmod.open(ui, url, output)
1014 fh = urlmod.open(ui, url, output)
1015
1015
1016 dest = ui
1016 dest = ui
1017 if output:
1017 if output:
1018 dest = open(output, b"wb", _chunksize)
1018 dest = open(output, b"wb", _chunksize)
1019 try:
1019 try:
1020 data = fh.read(_chunksize)
1020 data = fh.read(_chunksize)
1021 while data:
1021 while data:
1022 dest.write(data)
1022 dest.write(data)
1023 data = fh.read(_chunksize)
1023 data = fh.read(_chunksize)
1024 finally:
1024 finally:
1025 if output:
1025 if output:
1026 dest.close()
1026 dest.close()
1027
1027
1028
1028
1029 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1029 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1030 def debugextensions(ui, repo, **opts):
1030 def debugextensions(ui, repo, **opts):
1031 '''show information about active extensions'''
1031 '''show information about active extensions'''
1032 opts = pycompat.byteskwargs(opts)
1032 opts = pycompat.byteskwargs(opts)
1033 exts = extensions.extensions(ui)
1033 exts = extensions.extensions(ui)
1034 hgver = util.version()
1034 hgver = util.version()
1035 fm = ui.formatter(b'debugextensions', opts)
1035 fm = ui.formatter(b'debugextensions', opts)
1036 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1036 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1037 isinternal = extensions.ismoduleinternal(extmod)
1037 isinternal = extensions.ismoduleinternal(extmod)
1038 extsource = None
1038 extsource = None
1039
1039
1040 if util.safehasattr(extmod, '__file__'):
1040 if util.safehasattr(extmod, '__file__'):
1041 extsource = pycompat.fsencode(extmod.__file__)
1041 extsource = pycompat.fsencode(extmod.__file__)
1042 elif getattr(sys, 'oxidized', False):
1042 elif getattr(sys, 'oxidized', False):
1043 extsource = pycompat.sysexecutable
1043 extsource = pycompat.sysexecutable
1044 if isinternal:
1044 if isinternal:
1045 exttestedwith = [] # never expose magic string to users
1045 exttestedwith = [] # never expose magic string to users
1046 else:
1046 else:
1047 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1047 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1048 extbuglink = getattr(extmod, 'buglink', None)
1048 extbuglink = getattr(extmod, 'buglink', None)
1049
1049
1050 fm.startitem()
1050 fm.startitem()
1051
1051
1052 if ui.quiet or ui.verbose:
1052 if ui.quiet or ui.verbose:
1053 fm.write(b'name', b'%s\n', extname)
1053 fm.write(b'name', b'%s\n', extname)
1054 else:
1054 else:
1055 fm.write(b'name', b'%s', extname)
1055 fm.write(b'name', b'%s', extname)
1056 if isinternal or hgver in exttestedwith:
1056 if isinternal or hgver in exttestedwith:
1057 fm.plain(b'\n')
1057 fm.plain(b'\n')
1058 elif not exttestedwith:
1058 elif not exttestedwith:
1059 fm.plain(_(b' (untested!)\n'))
1059 fm.plain(_(b' (untested!)\n'))
1060 else:
1060 else:
1061 lasttestedversion = exttestedwith[-1]
1061 lasttestedversion = exttestedwith[-1]
1062 fm.plain(b' (%s!)\n' % lasttestedversion)
1062 fm.plain(b' (%s!)\n' % lasttestedversion)
1063
1063
1064 fm.condwrite(
1064 fm.condwrite(
1065 ui.verbose and extsource,
1065 ui.verbose and extsource,
1066 b'source',
1066 b'source',
1067 _(b' location: %s\n'),
1067 _(b' location: %s\n'),
1068 extsource or b"",
1068 extsource or b"",
1069 )
1069 )
1070
1070
1071 if ui.verbose:
1071 if ui.verbose:
1072 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1072 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1073 fm.data(bundled=isinternal)
1073 fm.data(bundled=isinternal)
1074
1074
1075 fm.condwrite(
1075 fm.condwrite(
1076 ui.verbose and exttestedwith,
1076 ui.verbose and exttestedwith,
1077 b'testedwith',
1077 b'testedwith',
1078 _(b' tested with: %s\n'),
1078 _(b' tested with: %s\n'),
1079 fm.formatlist(exttestedwith, name=b'ver'),
1079 fm.formatlist(exttestedwith, name=b'ver'),
1080 )
1080 )
1081
1081
1082 fm.condwrite(
1082 fm.condwrite(
1083 ui.verbose and extbuglink,
1083 ui.verbose and extbuglink,
1084 b'buglink',
1084 b'buglink',
1085 _(b' bug reporting: %s\n'),
1085 _(b' bug reporting: %s\n'),
1086 extbuglink or b"",
1086 extbuglink or b"",
1087 )
1087 )
1088
1088
1089 fm.end()
1089 fm.end()
1090
1090
1091
1091
1092 @command(
1092 @command(
1093 b'debugfileset',
1093 b'debugfileset',
1094 [
1094 [
1095 (
1095 (
1096 b'r',
1096 b'r',
1097 b'rev',
1097 b'rev',
1098 b'',
1098 b'',
1099 _(b'apply the filespec on this revision'),
1099 _(b'apply the filespec on this revision'),
1100 _(b'REV'),
1100 _(b'REV'),
1101 ),
1101 ),
1102 (
1102 (
1103 b'',
1103 b'',
1104 b'all-files',
1104 b'all-files',
1105 False,
1105 False,
1106 _(b'test files from all revisions and working directory'),
1106 _(b'test files from all revisions and working directory'),
1107 ),
1107 ),
1108 (
1108 (
1109 b's',
1109 b's',
1110 b'show-matcher',
1110 b'show-matcher',
1111 None,
1111 None,
1112 _(b'print internal representation of matcher'),
1112 _(b'print internal representation of matcher'),
1113 ),
1113 ),
1114 (
1114 (
1115 b'p',
1115 b'p',
1116 b'show-stage',
1116 b'show-stage',
1117 [],
1117 [],
1118 _(b'print parsed tree at the given stage'),
1118 _(b'print parsed tree at the given stage'),
1119 _(b'NAME'),
1119 _(b'NAME'),
1120 ),
1120 ),
1121 ],
1121 ],
1122 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1122 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1123 )
1123 )
1124 def debugfileset(ui, repo, expr, **opts):
1124 def debugfileset(ui, repo, expr, **opts):
1125 '''parse and apply a fileset specification'''
1125 '''parse and apply a fileset specification'''
1126 from . import fileset
1126 from . import fileset
1127
1127
1128 fileset.symbols # force import of fileset so we have predicates to optimize
1128 fileset.symbols # force import of fileset so we have predicates to optimize
1129 opts = pycompat.byteskwargs(opts)
1129 opts = pycompat.byteskwargs(opts)
1130 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1130 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1131
1131
1132 stages = [
1132 stages = [
1133 (b'parsed', pycompat.identity),
1133 (b'parsed', pycompat.identity),
1134 (b'analyzed', filesetlang.analyze),
1134 (b'analyzed', filesetlang.analyze),
1135 (b'optimized', filesetlang.optimize),
1135 (b'optimized', filesetlang.optimize),
1136 ]
1136 ]
1137 stagenames = {n for n, f in stages}
1137 stagenames = {n for n, f in stages}
1138
1138
1139 showalways = set()
1139 showalways = set()
1140 if ui.verbose and not opts[b'show_stage']:
1140 if ui.verbose and not opts[b'show_stage']:
1141 # show parsed tree by --verbose (deprecated)
1141 # show parsed tree by --verbose (deprecated)
1142 showalways.add(b'parsed')
1142 showalways.add(b'parsed')
1143 if opts[b'show_stage'] == [b'all']:
1143 if opts[b'show_stage'] == [b'all']:
1144 showalways.update(stagenames)
1144 showalways.update(stagenames)
1145 else:
1145 else:
1146 for n in opts[b'show_stage']:
1146 for n in opts[b'show_stage']:
1147 if n not in stagenames:
1147 if n not in stagenames:
1148 raise error.Abort(_(b'invalid stage name: %s') % n)
1148 raise error.Abort(_(b'invalid stage name: %s') % n)
1149 showalways.update(opts[b'show_stage'])
1149 showalways.update(opts[b'show_stage'])
1150
1150
1151 tree = filesetlang.parse(expr)
1151 tree = filesetlang.parse(expr)
1152 for n, f in stages:
1152 for n, f in stages:
1153 tree = f(tree)
1153 tree = f(tree)
1154 if n in showalways:
1154 if n in showalways:
1155 if opts[b'show_stage'] or n != b'parsed':
1155 if opts[b'show_stage'] or n != b'parsed':
1156 ui.write(b"* %s:\n" % n)
1156 ui.write(b"* %s:\n" % n)
1157 ui.write(filesetlang.prettyformat(tree), b"\n")
1157 ui.write(filesetlang.prettyformat(tree), b"\n")
1158
1158
1159 files = set()
1159 files = set()
1160 if opts[b'all_files']:
1160 if opts[b'all_files']:
1161 for r in repo:
1161 for r in repo:
1162 c = repo[r]
1162 c = repo[r]
1163 files.update(c.files())
1163 files.update(c.files())
1164 files.update(c.substate)
1164 files.update(c.substate)
1165 if opts[b'all_files'] or ctx.rev() is None:
1165 if opts[b'all_files'] or ctx.rev() is None:
1166 wctx = repo[None]
1166 wctx = repo[None]
1167 files.update(
1167 files.update(
1168 repo.dirstate.walk(
1168 repo.dirstate.walk(
1169 scmutil.matchall(repo),
1169 scmutil.matchall(repo),
1170 subrepos=list(wctx.substate),
1170 subrepos=list(wctx.substate),
1171 unknown=True,
1171 unknown=True,
1172 ignored=True,
1172 ignored=True,
1173 )
1173 )
1174 )
1174 )
1175 files.update(wctx.substate)
1175 files.update(wctx.substate)
1176 else:
1176 else:
1177 files.update(ctx.files())
1177 files.update(ctx.files())
1178 files.update(ctx.substate)
1178 files.update(ctx.substate)
1179
1179
1180 m = ctx.matchfileset(repo.getcwd(), expr)
1180 m = ctx.matchfileset(repo.getcwd(), expr)
1181 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1181 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1182 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1182 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1183 for f in sorted(files):
1183 for f in sorted(files):
1184 if not m(f):
1184 if not m(f):
1185 continue
1185 continue
1186 ui.write(b"%s\n" % f)
1186 ui.write(b"%s\n" % f)
1187
1187
1188
1188
1189 @command(b'debugformat', [] + cmdutil.formatteropts)
1189 @command(b'debugformat', [] + cmdutil.formatteropts)
1190 def debugformat(ui, repo, **opts):
1190 def debugformat(ui, repo, **opts):
1191 """display format information about the current repository
1191 """display format information about the current repository
1192
1192
1193 Use --verbose to get extra information about current config value and
1193 Use --verbose to get extra information about current config value and
1194 Mercurial default."""
1194 Mercurial default."""
1195 opts = pycompat.byteskwargs(opts)
1195 opts = pycompat.byteskwargs(opts)
1196 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1196 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1197 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1197 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1198
1198
1199 def makeformatname(name):
1199 def makeformatname(name):
1200 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1200 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1201
1201
1202 fm = ui.formatter(b'debugformat', opts)
1202 fm = ui.formatter(b'debugformat', opts)
1203 if fm.isplain():
1203 if fm.isplain():
1204
1204
1205 def formatvalue(value):
1205 def formatvalue(value):
1206 if util.safehasattr(value, b'startswith'):
1206 if util.safehasattr(value, b'startswith'):
1207 return value
1207 return value
1208 if value:
1208 if value:
1209 return b'yes'
1209 return b'yes'
1210 else:
1210 else:
1211 return b'no'
1211 return b'no'
1212
1212
1213 else:
1213 else:
1214 formatvalue = pycompat.identity
1214 formatvalue = pycompat.identity
1215
1215
1216 fm.plain(b'format-variant')
1216 fm.plain(b'format-variant')
1217 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1217 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1218 fm.plain(b' repo')
1218 fm.plain(b' repo')
1219 if ui.verbose:
1219 if ui.verbose:
1220 fm.plain(b' config default')
1220 fm.plain(b' config default')
1221 fm.plain(b'\n')
1221 fm.plain(b'\n')
1222 for fv in upgrade.allformatvariant:
1222 for fv in upgrade.allformatvariant:
1223 fm.startitem()
1223 fm.startitem()
1224 repovalue = fv.fromrepo(repo)
1224 repovalue = fv.fromrepo(repo)
1225 configvalue = fv.fromconfig(repo)
1225 configvalue = fv.fromconfig(repo)
1226
1226
1227 if repovalue != configvalue:
1227 if repovalue != configvalue:
1228 namelabel = b'formatvariant.name.mismatchconfig'
1228 namelabel = b'formatvariant.name.mismatchconfig'
1229 repolabel = b'formatvariant.repo.mismatchconfig'
1229 repolabel = b'formatvariant.repo.mismatchconfig'
1230 elif repovalue != fv.default:
1230 elif repovalue != fv.default:
1231 namelabel = b'formatvariant.name.mismatchdefault'
1231 namelabel = b'formatvariant.name.mismatchdefault'
1232 repolabel = b'formatvariant.repo.mismatchdefault'
1232 repolabel = b'formatvariant.repo.mismatchdefault'
1233 else:
1233 else:
1234 namelabel = b'formatvariant.name.uptodate'
1234 namelabel = b'formatvariant.name.uptodate'
1235 repolabel = b'formatvariant.repo.uptodate'
1235 repolabel = b'formatvariant.repo.uptodate'
1236
1236
1237 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1237 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1238 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1238 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1239 if fv.default != configvalue:
1239 if fv.default != configvalue:
1240 configlabel = b'formatvariant.config.special'
1240 configlabel = b'formatvariant.config.special'
1241 else:
1241 else:
1242 configlabel = b'formatvariant.config.default'
1242 configlabel = b'formatvariant.config.default'
1243 fm.condwrite(
1243 fm.condwrite(
1244 ui.verbose,
1244 ui.verbose,
1245 b'config',
1245 b'config',
1246 b' %6s',
1246 b' %6s',
1247 formatvalue(configvalue),
1247 formatvalue(configvalue),
1248 label=configlabel,
1248 label=configlabel,
1249 )
1249 )
1250 fm.condwrite(
1250 fm.condwrite(
1251 ui.verbose,
1251 ui.verbose,
1252 b'default',
1252 b'default',
1253 b' %7s',
1253 b' %7s',
1254 formatvalue(fv.default),
1254 formatvalue(fv.default),
1255 label=b'formatvariant.default',
1255 label=b'formatvariant.default',
1256 )
1256 )
1257 fm.plain(b'\n')
1257 fm.plain(b'\n')
1258 fm.end()
1258 fm.end()
1259
1259
1260
1260
1261 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1261 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1262 def debugfsinfo(ui, path=b"."):
1262 def debugfsinfo(ui, path=b"."):
1263 """show information detected about current filesystem"""
1263 """show information detected about current filesystem"""
1264 ui.writenoi18n(b'path: %s\n' % path)
1264 ui.writenoi18n(b'path: %s\n' % path)
1265 ui.writenoi18n(
1265 ui.writenoi18n(
1266 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1266 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1267 )
1267 )
1268 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1268 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1269 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1269 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1270 ui.writenoi18n(
1270 ui.writenoi18n(
1271 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1271 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1272 )
1272 )
1273 ui.writenoi18n(
1273 ui.writenoi18n(
1274 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1274 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1275 )
1275 )
1276 casesensitive = b'(unknown)'
1276 casesensitive = b'(unknown)'
1277 try:
1277 try:
1278 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1278 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1279 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1279 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1280 except OSError:
1280 except OSError:
1281 pass
1281 pass
1282 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1282 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1283
1283
1284
1284
1285 @command(
1285 @command(
1286 b'debuggetbundle',
1286 b'debuggetbundle',
1287 [
1287 [
1288 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1288 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1289 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1289 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1290 (
1290 (
1291 b't',
1291 b't',
1292 b'type',
1292 b'type',
1293 b'bzip2',
1293 b'bzip2',
1294 _(b'bundle compression type to use'),
1294 _(b'bundle compression type to use'),
1295 _(b'TYPE'),
1295 _(b'TYPE'),
1296 ),
1296 ),
1297 ],
1297 ],
1298 _(b'REPO FILE [-H|-C ID]...'),
1298 _(b'REPO FILE [-H|-C ID]...'),
1299 norepo=True,
1299 norepo=True,
1300 )
1300 )
1301 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1301 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1302 """retrieves a bundle from a repo
1302 """retrieves a bundle from a repo
1303
1303
1304 Every ID must be a full-length hex node id string. Saves the bundle to the
1304 Every ID must be a full-length hex node id string. Saves the bundle to the
1305 given file.
1305 given file.
1306 """
1306 """
1307 opts = pycompat.byteskwargs(opts)
1307 opts = pycompat.byteskwargs(opts)
1308 repo = hg.peer(ui, opts, repopath)
1308 repo = hg.peer(ui, opts, repopath)
1309 if not repo.capable(b'getbundle'):
1309 if not repo.capable(b'getbundle'):
1310 raise error.Abort(b"getbundle() not supported by target repository")
1310 raise error.Abort(b"getbundle() not supported by target repository")
1311 args = {}
1311 args = {}
1312 if common:
1312 if common:
1313 args['common'] = [bin(s) for s in common]
1313 args['common'] = [bin(s) for s in common]
1314 if head:
1314 if head:
1315 args['heads'] = [bin(s) for s in head]
1315 args['heads'] = [bin(s) for s in head]
1316 # TODO: get desired bundlecaps from command line.
1316 # TODO: get desired bundlecaps from command line.
1317 args['bundlecaps'] = None
1317 args['bundlecaps'] = None
1318 bundle = repo.getbundle(b'debug', **args)
1318 bundle = repo.getbundle(b'debug', **args)
1319
1319
1320 bundletype = opts.get(b'type', b'bzip2').lower()
1320 bundletype = opts.get(b'type', b'bzip2').lower()
1321 btypes = {
1321 btypes = {
1322 b'none': b'HG10UN',
1322 b'none': b'HG10UN',
1323 b'bzip2': b'HG10BZ',
1323 b'bzip2': b'HG10BZ',
1324 b'gzip': b'HG10GZ',
1324 b'gzip': b'HG10GZ',
1325 b'bundle2': b'HG20',
1325 b'bundle2': b'HG20',
1326 }
1326 }
1327 bundletype = btypes.get(bundletype)
1327 bundletype = btypes.get(bundletype)
1328 if bundletype not in bundle2.bundletypes:
1328 if bundletype not in bundle2.bundletypes:
1329 raise error.Abort(_(b'unknown bundle type specified with --type'))
1329 raise error.Abort(_(b'unknown bundle type specified with --type'))
1330 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1330 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1331
1331
1332
1332
1333 @command(b'debugignore', [], b'[FILE]')
1333 @command(b'debugignore', [], b'[FILE]')
1334 def debugignore(ui, repo, *files, **opts):
1334 def debugignore(ui, repo, *files, **opts):
1335 """display the combined ignore pattern and information about ignored files
1335 """display the combined ignore pattern and information about ignored files
1336
1336
1337 With no argument display the combined ignore pattern.
1337 With no argument display the combined ignore pattern.
1338
1338
1339 Given space separated file names, shows if the given file is ignored and
1339 Given space separated file names, shows if the given file is ignored and
1340 if so, show the ignore rule (file and line number) that matched it.
1340 if so, show the ignore rule (file and line number) that matched it.
1341 """
1341 """
1342 ignore = repo.dirstate._ignore
1342 ignore = repo.dirstate._ignore
1343 if not files:
1343 if not files:
1344 # Show all the patterns
1344 # Show all the patterns
1345 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1345 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1346 else:
1346 else:
1347 m = scmutil.match(repo[None], pats=files)
1347 m = scmutil.match(repo[None], pats=files)
1348 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1348 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1349 for f in m.files():
1349 for f in m.files():
1350 nf = util.normpath(f)
1350 nf = util.normpath(f)
1351 ignored = None
1351 ignored = None
1352 ignoredata = None
1352 ignoredata = None
1353 if nf != b'.':
1353 if nf != b'.':
1354 if ignore(nf):
1354 if ignore(nf):
1355 ignored = nf
1355 ignored = nf
1356 ignoredata = repo.dirstate._ignorefileandline(nf)
1356 ignoredata = repo.dirstate._ignorefileandline(nf)
1357 else:
1357 else:
1358 for p in pathutil.finddirs(nf):
1358 for p in pathutil.finddirs(nf):
1359 if ignore(p):
1359 if ignore(p):
1360 ignored = p
1360 ignored = p
1361 ignoredata = repo.dirstate._ignorefileandline(p)
1361 ignoredata = repo.dirstate._ignorefileandline(p)
1362 break
1362 break
1363 if ignored:
1363 if ignored:
1364 if ignored == nf:
1364 if ignored == nf:
1365 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1365 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1366 else:
1366 else:
1367 ui.write(
1367 ui.write(
1368 _(
1368 _(
1369 b"%s is ignored because of "
1369 b"%s is ignored because of "
1370 b"containing directory %s\n"
1370 b"containing directory %s\n"
1371 )
1371 )
1372 % (uipathfn(f), ignored)
1372 % (uipathfn(f), ignored)
1373 )
1373 )
1374 ignorefile, lineno, line = ignoredata
1374 ignorefile, lineno, line = ignoredata
1375 ui.write(
1375 ui.write(
1376 _(b"(ignore rule in %s, line %d: '%s')\n")
1376 _(b"(ignore rule in %s, line %d: '%s')\n")
1377 % (ignorefile, lineno, line)
1377 % (ignorefile, lineno, line)
1378 )
1378 )
1379 else:
1379 else:
1380 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1380 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1381
1381
1382
1382
1383 @command(
1383 @command(
1384 b'debugindex',
1384 b'debugindex',
1385 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1385 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1386 _(b'-c|-m|FILE'),
1386 _(b'-c|-m|FILE'),
1387 )
1387 )
1388 def debugindex(ui, repo, file_=None, **opts):
1388 def debugindex(ui, repo, file_=None, **opts):
1389 """dump index data for a storage primitive"""
1389 """dump index data for a storage primitive"""
1390 opts = pycompat.byteskwargs(opts)
1390 opts = pycompat.byteskwargs(opts)
1391 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1391 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1392
1392
1393 if ui.debugflag:
1393 if ui.debugflag:
1394 shortfn = hex
1394 shortfn = hex
1395 else:
1395 else:
1396 shortfn = short
1396 shortfn = short
1397
1397
1398 idlen = 12
1398 idlen = 12
1399 for i in store:
1399 for i in store:
1400 idlen = len(shortfn(store.node(i)))
1400 idlen = len(shortfn(store.node(i)))
1401 break
1401 break
1402
1402
1403 fm = ui.formatter(b'debugindex', opts)
1403 fm = ui.formatter(b'debugindex', opts)
1404 fm.plain(
1404 fm.plain(
1405 b' rev linkrev %s %s p2\n'
1405 b' rev linkrev %s %s p2\n'
1406 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1406 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1407 )
1407 )
1408
1408
1409 for rev in store:
1409 for rev in store:
1410 node = store.node(rev)
1410 node = store.node(rev)
1411 parents = store.parents(node)
1411 parents = store.parents(node)
1412
1412
1413 fm.startitem()
1413 fm.startitem()
1414 fm.write(b'rev', b'%6d ', rev)
1414 fm.write(b'rev', b'%6d ', rev)
1415 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1415 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1416 fm.write(b'node', b'%s ', shortfn(node))
1416 fm.write(b'node', b'%s ', shortfn(node))
1417 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1417 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1418 fm.write(b'p2', b'%s', shortfn(parents[1]))
1418 fm.write(b'p2', b'%s', shortfn(parents[1]))
1419 fm.plain(b'\n')
1419 fm.plain(b'\n')
1420
1420
1421 fm.end()
1421 fm.end()
1422
1422
1423
1423
1424 @command(
1424 @command(
1425 b'debugindexdot',
1425 b'debugindexdot',
1426 cmdutil.debugrevlogopts,
1426 cmdutil.debugrevlogopts,
1427 _(b'-c|-m|FILE'),
1427 _(b'-c|-m|FILE'),
1428 optionalrepo=True,
1428 optionalrepo=True,
1429 )
1429 )
1430 def debugindexdot(ui, repo, file_=None, **opts):
1430 def debugindexdot(ui, repo, file_=None, **opts):
1431 """dump an index DAG as a graphviz dot file"""
1431 """dump an index DAG as a graphviz dot file"""
1432 opts = pycompat.byteskwargs(opts)
1432 opts = pycompat.byteskwargs(opts)
1433 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1433 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1434 ui.writenoi18n(b"digraph G {\n")
1434 ui.writenoi18n(b"digraph G {\n")
1435 for i in r:
1435 for i in r:
1436 node = r.node(i)
1436 node = r.node(i)
1437 pp = r.parents(node)
1437 pp = r.parents(node)
1438 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1438 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1439 if pp[1] != nullid:
1439 if pp[1] != nullid:
1440 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1440 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1441 ui.write(b"}\n")
1441 ui.write(b"}\n")
1442
1442
1443
1443
1444 @command(b'debugindexstats', [])
1444 @command(b'debugindexstats', [])
1445 def debugindexstats(ui, repo):
1445 def debugindexstats(ui, repo):
1446 """show stats related to the changelog index"""
1446 """show stats related to the changelog index"""
1447 repo.changelog.shortest(nullid, 1)
1447 repo.changelog.shortest(nullid, 1)
1448 index = repo.changelog.index
1448 index = repo.changelog.index
1449 if not util.safehasattr(index, b'stats'):
1449 if not util.safehasattr(index, b'stats'):
1450 raise error.Abort(_(b'debugindexstats only works with native code'))
1450 raise error.Abort(_(b'debugindexstats only works with native code'))
1451 for k, v in sorted(index.stats().items()):
1451 for k, v in sorted(index.stats().items()):
1452 ui.write(b'%s: %d\n' % (k, v))
1452 ui.write(b'%s: %d\n' % (k, v))
1453
1453
1454
1454
1455 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1455 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1456 def debuginstall(ui, **opts):
1456 def debuginstall(ui, **opts):
1457 '''test Mercurial installation
1457 '''test Mercurial installation
1458
1458
1459 Returns 0 on success.
1459 Returns 0 on success.
1460 '''
1460 '''
1461 opts = pycompat.byteskwargs(opts)
1461 opts = pycompat.byteskwargs(opts)
1462
1462
1463 problems = 0
1463 problems = 0
1464
1464
1465 fm = ui.formatter(b'debuginstall', opts)
1465 fm = ui.formatter(b'debuginstall', opts)
1466 fm.startitem()
1466 fm.startitem()
1467
1467
1468 # encoding
1468 # encoding
1469 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1469 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1470 err = None
1470 err = None
1471 try:
1471 try:
1472 codecs.lookup(pycompat.sysstr(encoding.encoding))
1472 codecs.lookup(pycompat.sysstr(encoding.encoding))
1473 except LookupError as inst:
1473 except LookupError as inst:
1474 err = stringutil.forcebytestr(inst)
1474 err = stringutil.forcebytestr(inst)
1475 problems += 1
1475 problems += 1
1476 fm.condwrite(
1476 fm.condwrite(
1477 err,
1477 err,
1478 b'encodingerror',
1478 b'encodingerror',
1479 _(b" %s\n (check that your locale is properly set)\n"),
1479 _(b" %s\n (check that your locale is properly set)\n"),
1480 err,
1480 err,
1481 )
1481 )
1482
1482
1483 # Python
1483 # Python
1484 pythonlib = None
1484 pythonlib = None
1485 if util.safehasattr(os, '__file__'):
1485 if util.safehasattr(os, '__file__'):
1486 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1486 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1487 elif getattr(sys, 'oxidized', False):
1487 elif getattr(sys, 'oxidized', False):
1488 pythonlib = pycompat.sysexecutable
1488 pythonlib = pycompat.sysexecutable
1489
1489
1490 fm.write(
1490 fm.write(
1491 b'pythonexe',
1491 b'pythonexe',
1492 _(b"checking Python executable (%s)\n"),
1492 _(b"checking Python executable (%s)\n"),
1493 pycompat.sysexecutable or _(b"unknown"),
1493 pycompat.sysexecutable or _(b"unknown"),
1494 )
1494 )
1495 fm.write(
1495 fm.write(
1496 b'pythonimplementation',
1496 b'pythonimplementation',
1497 _(b"checking Python implementation (%s)\n"),
1497 _(b"checking Python implementation (%s)\n"),
1498 pycompat.sysbytes(platform.python_implementation()),
1498 pycompat.sysbytes(platform.python_implementation()),
1499 )
1499 )
1500 fm.write(
1500 fm.write(
1501 b'pythonver',
1501 b'pythonver',
1502 _(b"checking Python version (%s)\n"),
1502 _(b"checking Python version (%s)\n"),
1503 (b"%d.%d.%d" % sys.version_info[:3]),
1503 (b"%d.%d.%d" % sys.version_info[:3]),
1504 )
1504 )
1505 fm.write(
1505 fm.write(
1506 b'pythonlib',
1506 b'pythonlib',
1507 _(b"checking Python lib (%s)...\n"),
1507 _(b"checking Python lib (%s)...\n"),
1508 pythonlib or _(b"unknown"),
1508 pythonlib or _(b"unknown"),
1509 )
1509 )
1510
1510
1511 try:
1511 try:
1512 from . import rustext
1512 from . import rustext
1513
1513
1514 rustext.__doc__ # trigger lazy import
1514 rustext.__doc__ # trigger lazy import
1515 except ImportError:
1515 except ImportError:
1516 rustext = None
1516 rustext = None
1517
1517
1518 security = set(sslutil.supportedprotocols)
1518 security = set(sslutil.supportedprotocols)
1519 if sslutil.hassni:
1519 if sslutil.hassni:
1520 security.add(b'sni')
1520 security.add(b'sni')
1521
1521
1522 fm.write(
1522 fm.write(
1523 b'pythonsecurity',
1523 b'pythonsecurity',
1524 _(b"checking Python security support (%s)\n"),
1524 _(b"checking Python security support (%s)\n"),
1525 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1525 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1526 )
1526 )
1527
1527
1528 # These are warnings, not errors. So don't increment problem count. This
1528 # These are warnings, not errors. So don't increment problem count. This
1529 # may change in the future.
1529 # may change in the future.
1530 if b'tls1.2' not in security:
1530 if b'tls1.2' not in security:
1531 fm.plain(
1531 fm.plain(
1532 _(
1532 _(
1533 b' TLS 1.2 not supported by Python install; '
1533 b' TLS 1.2 not supported by Python install; '
1534 b'network connections lack modern security\n'
1534 b'network connections lack modern security\n'
1535 )
1535 )
1536 )
1536 )
1537 if b'sni' not in security:
1537 if b'sni' not in security:
1538 fm.plain(
1538 fm.plain(
1539 _(
1539 _(
1540 b' SNI not supported by Python install; may have '
1540 b' SNI not supported by Python install; may have '
1541 b'connectivity issues with some servers\n'
1541 b'connectivity issues with some servers\n'
1542 )
1542 )
1543 )
1543 )
1544
1544
1545 fm.plain(
1545 fm.plain(
1546 _(
1546 _(
1547 b"checking Rust extensions (%s)\n"
1547 b"checking Rust extensions (%s)\n"
1548 % (b'missing' if rustext is None else b'installed')
1548 % (b'missing' if rustext is None else b'installed')
1549 ),
1549 ),
1550 )
1550 )
1551
1551
1552 # TODO print CA cert info
1552 # TODO print CA cert info
1553
1553
1554 # hg version
1554 # hg version
1555 hgver = util.version()
1555 hgver = util.version()
1556 fm.write(
1556 fm.write(
1557 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1557 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1558 )
1558 )
1559 fm.write(
1559 fm.write(
1560 b'hgverextra',
1560 b'hgverextra',
1561 _(b"checking Mercurial custom build (%s)\n"),
1561 _(b"checking Mercurial custom build (%s)\n"),
1562 b'+'.join(hgver.split(b'+')[1:]),
1562 b'+'.join(hgver.split(b'+')[1:]),
1563 )
1563 )
1564
1564
1565 # compiled modules
1565 # compiled modules
1566 hgmodules = None
1566 hgmodules = None
1567 if util.safehasattr(sys.modules[__name__], '__file__'):
1567 if util.safehasattr(sys.modules[__name__], '__file__'):
1568 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1568 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1569 elif getattr(sys, 'oxidized', False):
1569 elif getattr(sys, 'oxidized', False):
1570 hgmodules = pycompat.sysexecutable
1570 hgmodules = pycompat.sysexecutable
1571
1571
1572 fm.write(
1572 fm.write(
1573 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1573 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1574 )
1574 )
1575 fm.write(
1575 fm.write(
1576 b'hgmodules',
1576 b'hgmodules',
1577 _(b"checking installed modules (%s)...\n"),
1577 _(b"checking installed modules (%s)...\n"),
1578 hgmodules or _(b"unknown"),
1578 hgmodules or _(b"unknown"),
1579 )
1579 )
1580
1580
1581 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1581 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1582 rustext = rustandc # for now, that's the only case
1582 rustext = rustandc # for now, that's the only case
1583 cext = policy.policy in (b'c', b'allow') or rustandc
1583 cext = policy.policy in (b'c', b'allow') or rustandc
1584 nopure = cext or rustext
1584 nopure = cext or rustext
1585 if nopure:
1585 if nopure:
1586 err = None
1586 err = None
1587 try:
1587 try:
1588 if cext:
1588 if cext:
1589 from .cext import ( # pytype: disable=import-error
1589 from .cext import ( # pytype: disable=import-error
1590 base85,
1590 base85,
1591 bdiff,
1591 bdiff,
1592 mpatch,
1592 mpatch,
1593 osutil,
1593 osutil,
1594 )
1594 )
1595
1595
1596 # quiet pyflakes
1596 # quiet pyflakes
1597 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1597 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1598 if rustext:
1598 if rustext:
1599 from .rustext import ( # pytype: disable=import-error
1599 from .rustext import ( # pytype: disable=import-error
1600 ancestor,
1600 ancestor,
1601 dirstate,
1601 dirstate,
1602 )
1602 )
1603
1603
1604 dir(ancestor), dir(dirstate) # quiet pyflakes
1604 dir(ancestor), dir(dirstate) # quiet pyflakes
1605 except Exception as inst:
1605 except Exception as inst:
1606 err = stringutil.forcebytestr(inst)
1606 err = stringutil.forcebytestr(inst)
1607 problems += 1
1607 problems += 1
1608 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1608 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1609
1609
1610 compengines = util.compengines._engines.values()
1610 compengines = util.compengines._engines.values()
1611 fm.write(
1611 fm.write(
1612 b'compengines',
1612 b'compengines',
1613 _(b'checking registered compression engines (%s)\n'),
1613 _(b'checking registered compression engines (%s)\n'),
1614 fm.formatlist(
1614 fm.formatlist(
1615 sorted(e.name() for e in compengines),
1615 sorted(e.name() for e in compengines),
1616 name=b'compengine',
1616 name=b'compengine',
1617 fmt=b'%s',
1617 fmt=b'%s',
1618 sep=b', ',
1618 sep=b', ',
1619 ),
1619 ),
1620 )
1620 )
1621 fm.write(
1621 fm.write(
1622 b'compenginesavail',
1622 b'compenginesavail',
1623 _(b'checking available compression engines (%s)\n'),
1623 _(b'checking available compression engines (%s)\n'),
1624 fm.formatlist(
1624 fm.formatlist(
1625 sorted(e.name() for e in compengines if e.available()),
1625 sorted(e.name() for e in compengines if e.available()),
1626 name=b'compengine',
1626 name=b'compengine',
1627 fmt=b'%s',
1627 fmt=b'%s',
1628 sep=b', ',
1628 sep=b', ',
1629 ),
1629 ),
1630 )
1630 )
1631 wirecompengines = compression.compengines.supportedwireengines(
1631 wirecompengines = compression.compengines.supportedwireengines(
1632 compression.SERVERROLE
1632 compression.SERVERROLE
1633 )
1633 )
1634 fm.write(
1634 fm.write(
1635 b'compenginesserver',
1635 b'compenginesserver',
1636 _(
1636 _(
1637 b'checking available compression engines '
1637 b'checking available compression engines '
1638 b'for wire protocol (%s)\n'
1638 b'for wire protocol (%s)\n'
1639 ),
1639 ),
1640 fm.formatlist(
1640 fm.formatlist(
1641 [e.name() for e in wirecompengines if e.wireprotosupport()],
1641 [e.name() for e in wirecompengines if e.wireprotosupport()],
1642 name=b'compengine',
1642 name=b'compengine',
1643 fmt=b'%s',
1643 fmt=b'%s',
1644 sep=b', ',
1644 sep=b', ',
1645 ),
1645 ),
1646 )
1646 )
1647 re2 = b'missing'
1647 re2 = b'missing'
1648 if util._re2:
1648 if util._re2:
1649 re2 = b'available'
1649 re2 = b'available'
1650 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1650 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1651 fm.data(re2=bool(util._re2))
1651 fm.data(re2=bool(util._re2))
1652
1652
1653 rust_debug_mod = policy.importrust("debug")
1654 if rust_debug_mod is not None:
1655 re2_rust = b'installed' if rust_debug_mod.re2_installed else b'missing'
1656
1657 msg = b'checking "re2" regexp engine Rust bindings (%s)\n'
1658 fm.plain(_(msg % re2_rust))
1659
1653 # templates
1660 # templates
1654 p = templater.templatepaths()
1661 p = templater.templatepaths()
1655 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1662 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1656 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1663 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1657 if p:
1664 if p:
1658 m = templater.templatepath(b"map-cmdline.default")
1665 m = templater.templatepath(b"map-cmdline.default")
1659 if m:
1666 if m:
1660 # template found, check if it is working
1667 # template found, check if it is working
1661 err = None
1668 err = None
1662 try:
1669 try:
1663 templater.templater.frommapfile(m)
1670 templater.templater.frommapfile(m)
1664 except Exception as inst:
1671 except Exception as inst:
1665 err = stringutil.forcebytestr(inst)
1672 err = stringutil.forcebytestr(inst)
1666 p = None
1673 p = None
1667 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1674 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1668 else:
1675 else:
1669 p = None
1676 p = None
1670 fm.condwrite(
1677 fm.condwrite(
1671 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1678 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1672 )
1679 )
1673 fm.condwrite(
1680 fm.condwrite(
1674 not m,
1681 not m,
1675 b'defaulttemplatenotfound',
1682 b'defaulttemplatenotfound',
1676 _(b" template '%s' not found\n"),
1683 _(b" template '%s' not found\n"),
1677 b"default",
1684 b"default",
1678 )
1685 )
1679 if not p:
1686 if not p:
1680 problems += 1
1687 problems += 1
1681 fm.condwrite(
1688 fm.condwrite(
1682 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1689 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1683 )
1690 )
1684
1691
1685 # editor
1692 # editor
1686 editor = ui.geteditor()
1693 editor = ui.geteditor()
1687 editor = util.expandpath(editor)
1694 editor = util.expandpath(editor)
1688 editorbin = procutil.shellsplit(editor)[0]
1695 editorbin = procutil.shellsplit(editor)[0]
1689 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1696 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1690 cmdpath = procutil.findexe(editorbin)
1697 cmdpath = procutil.findexe(editorbin)
1691 fm.condwrite(
1698 fm.condwrite(
1692 not cmdpath and editor == b'vi',
1699 not cmdpath and editor == b'vi',
1693 b'vinotfound',
1700 b'vinotfound',
1694 _(
1701 _(
1695 b" No commit editor set and can't find %s in PATH\n"
1702 b" No commit editor set and can't find %s in PATH\n"
1696 b" (specify a commit editor in your configuration"
1703 b" (specify a commit editor in your configuration"
1697 b" file)\n"
1704 b" file)\n"
1698 ),
1705 ),
1699 not cmdpath and editor == b'vi' and editorbin,
1706 not cmdpath and editor == b'vi' and editorbin,
1700 )
1707 )
1701 fm.condwrite(
1708 fm.condwrite(
1702 not cmdpath and editor != b'vi',
1709 not cmdpath and editor != b'vi',
1703 b'editornotfound',
1710 b'editornotfound',
1704 _(
1711 _(
1705 b" Can't find editor '%s' in PATH\n"
1712 b" Can't find editor '%s' in PATH\n"
1706 b" (specify a commit editor in your configuration"
1713 b" (specify a commit editor in your configuration"
1707 b" file)\n"
1714 b" file)\n"
1708 ),
1715 ),
1709 not cmdpath and editorbin,
1716 not cmdpath and editorbin,
1710 )
1717 )
1711 if not cmdpath and editor != b'vi':
1718 if not cmdpath and editor != b'vi':
1712 problems += 1
1719 problems += 1
1713
1720
1714 # check username
1721 # check username
1715 username = None
1722 username = None
1716 err = None
1723 err = None
1717 try:
1724 try:
1718 username = ui.username()
1725 username = ui.username()
1719 except error.Abort as e:
1726 except error.Abort as e:
1720 err = stringutil.forcebytestr(e)
1727 err = stringutil.forcebytestr(e)
1721 problems += 1
1728 problems += 1
1722
1729
1723 fm.condwrite(
1730 fm.condwrite(
1724 username, b'username', _(b"checking username (%s)\n"), username
1731 username, b'username', _(b"checking username (%s)\n"), username
1725 )
1732 )
1726 fm.condwrite(
1733 fm.condwrite(
1727 err,
1734 err,
1728 b'usernameerror',
1735 b'usernameerror',
1729 _(
1736 _(
1730 b"checking username...\n %s\n"
1737 b"checking username...\n %s\n"
1731 b" (specify a username in your configuration file)\n"
1738 b" (specify a username in your configuration file)\n"
1732 ),
1739 ),
1733 err,
1740 err,
1734 )
1741 )
1735
1742
1736 for name, mod in extensions.extensions():
1743 for name, mod in extensions.extensions():
1737 handler = getattr(mod, 'debuginstall', None)
1744 handler = getattr(mod, 'debuginstall', None)
1738 if handler is not None:
1745 if handler is not None:
1739 problems += handler(ui, fm)
1746 problems += handler(ui, fm)
1740
1747
1741 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1748 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1742 if not problems:
1749 if not problems:
1743 fm.data(problems=problems)
1750 fm.data(problems=problems)
1744 fm.condwrite(
1751 fm.condwrite(
1745 problems,
1752 problems,
1746 b'problems',
1753 b'problems',
1747 _(b"%d problems detected, please check your install!\n"),
1754 _(b"%d problems detected, please check your install!\n"),
1748 problems,
1755 problems,
1749 )
1756 )
1750 fm.end()
1757 fm.end()
1751
1758
1752 return problems
1759 return problems
1753
1760
1754
1761
1755 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1762 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1756 def debugknown(ui, repopath, *ids, **opts):
1763 def debugknown(ui, repopath, *ids, **opts):
1757 """test whether node ids are known to a repo
1764 """test whether node ids are known to a repo
1758
1765
1759 Every ID must be a full-length hex node id string. Returns a list of 0s
1766 Every ID must be a full-length hex node id string. Returns a list of 0s
1760 and 1s indicating unknown/known.
1767 and 1s indicating unknown/known.
1761 """
1768 """
1762 opts = pycompat.byteskwargs(opts)
1769 opts = pycompat.byteskwargs(opts)
1763 repo = hg.peer(ui, opts, repopath)
1770 repo = hg.peer(ui, opts, repopath)
1764 if not repo.capable(b'known'):
1771 if not repo.capable(b'known'):
1765 raise error.Abort(b"known() not supported by target repository")
1772 raise error.Abort(b"known() not supported by target repository")
1766 flags = repo.known([bin(s) for s in ids])
1773 flags = repo.known([bin(s) for s in ids])
1767 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1774 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1768
1775
1769
1776
1770 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1777 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1771 def debuglabelcomplete(ui, repo, *args):
1778 def debuglabelcomplete(ui, repo, *args):
1772 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1779 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1773 debugnamecomplete(ui, repo, *args)
1780 debugnamecomplete(ui, repo, *args)
1774
1781
1775
1782
1776 @command(
1783 @command(
1777 b'debuglocks',
1784 b'debuglocks',
1778 [
1785 [
1779 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1786 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1780 (
1787 (
1781 b'W',
1788 b'W',
1782 b'force-wlock',
1789 b'force-wlock',
1783 None,
1790 None,
1784 _(b'free the working state lock (DANGEROUS)'),
1791 _(b'free the working state lock (DANGEROUS)'),
1785 ),
1792 ),
1786 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1793 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1787 (
1794 (
1788 b'S',
1795 b'S',
1789 b'set-wlock',
1796 b'set-wlock',
1790 None,
1797 None,
1791 _(b'set the working state lock until stopped'),
1798 _(b'set the working state lock until stopped'),
1792 ),
1799 ),
1793 ],
1800 ],
1794 _(b'[OPTION]...'),
1801 _(b'[OPTION]...'),
1795 )
1802 )
1796 def debuglocks(ui, repo, **opts):
1803 def debuglocks(ui, repo, **opts):
1797 """show or modify state of locks
1804 """show or modify state of locks
1798
1805
1799 By default, this command will show which locks are held. This
1806 By default, this command will show which locks are held. This
1800 includes the user and process holding the lock, the amount of time
1807 includes the user and process holding the lock, the amount of time
1801 the lock has been held, and the machine name where the process is
1808 the lock has been held, and the machine name where the process is
1802 running if it's not local.
1809 running if it's not local.
1803
1810
1804 Locks protect the integrity of Mercurial's data, so should be
1811 Locks protect the integrity of Mercurial's data, so should be
1805 treated with care. System crashes or other interruptions may cause
1812 treated with care. System crashes or other interruptions may cause
1806 locks to not be properly released, though Mercurial will usually
1813 locks to not be properly released, though Mercurial will usually
1807 detect and remove such stale locks automatically.
1814 detect and remove such stale locks automatically.
1808
1815
1809 However, detecting stale locks may not always be possible (for
1816 However, detecting stale locks may not always be possible (for
1810 instance, on a shared filesystem). Removing locks may also be
1817 instance, on a shared filesystem). Removing locks may also be
1811 blocked by filesystem permissions.
1818 blocked by filesystem permissions.
1812
1819
1813 Setting a lock will prevent other commands from changing the data.
1820 Setting a lock will prevent other commands from changing the data.
1814 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1821 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1815 The set locks are removed when the command exits.
1822 The set locks are removed when the command exits.
1816
1823
1817 Returns 0 if no locks are held.
1824 Returns 0 if no locks are held.
1818
1825
1819 """
1826 """
1820
1827
1821 if opts.get('force_lock'):
1828 if opts.get('force_lock'):
1822 repo.svfs.unlink(b'lock')
1829 repo.svfs.unlink(b'lock')
1823 if opts.get('force_wlock'):
1830 if opts.get('force_wlock'):
1824 repo.vfs.unlink(b'wlock')
1831 repo.vfs.unlink(b'wlock')
1825 if opts.get('force_lock') or opts.get('force_wlock'):
1832 if opts.get('force_lock') or opts.get('force_wlock'):
1826 return 0
1833 return 0
1827
1834
1828 locks = []
1835 locks = []
1829 try:
1836 try:
1830 if opts.get('set_wlock'):
1837 if opts.get('set_wlock'):
1831 try:
1838 try:
1832 locks.append(repo.wlock(False))
1839 locks.append(repo.wlock(False))
1833 except error.LockHeld:
1840 except error.LockHeld:
1834 raise error.Abort(_(b'wlock is already held'))
1841 raise error.Abort(_(b'wlock is already held'))
1835 if opts.get('set_lock'):
1842 if opts.get('set_lock'):
1836 try:
1843 try:
1837 locks.append(repo.lock(False))
1844 locks.append(repo.lock(False))
1838 except error.LockHeld:
1845 except error.LockHeld:
1839 raise error.Abort(_(b'lock is already held'))
1846 raise error.Abort(_(b'lock is already held'))
1840 if len(locks):
1847 if len(locks):
1841 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1848 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1842 return 0
1849 return 0
1843 finally:
1850 finally:
1844 release(*locks)
1851 release(*locks)
1845
1852
1846 now = time.time()
1853 now = time.time()
1847 held = 0
1854 held = 0
1848
1855
1849 def report(vfs, name, method):
1856 def report(vfs, name, method):
1850 # this causes stale locks to get reaped for more accurate reporting
1857 # this causes stale locks to get reaped for more accurate reporting
1851 try:
1858 try:
1852 l = method(False)
1859 l = method(False)
1853 except error.LockHeld:
1860 except error.LockHeld:
1854 l = None
1861 l = None
1855
1862
1856 if l:
1863 if l:
1857 l.release()
1864 l.release()
1858 else:
1865 else:
1859 try:
1866 try:
1860 st = vfs.lstat(name)
1867 st = vfs.lstat(name)
1861 age = now - st[stat.ST_MTIME]
1868 age = now - st[stat.ST_MTIME]
1862 user = util.username(st.st_uid)
1869 user = util.username(st.st_uid)
1863 locker = vfs.readlock(name)
1870 locker = vfs.readlock(name)
1864 if b":" in locker:
1871 if b":" in locker:
1865 host, pid = locker.split(b':')
1872 host, pid = locker.split(b':')
1866 if host == socket.gethostname():
1873 if host == socket.gethostname():
1867 locker = b'user %s, process %s' % (user or b'None', pid)
1874 locker = b'user %s, process %s' % (user or b'None', pid)
1868 else:
1875 else:
1869 locker = b'user %s, process %s, host %s' % (
1876 locker = b'user %s, process %s, host %s' % (
1870 user or b'None',
1877 user or b'None',
1871 pid,
1878 pid,
1872 host,
1879 host,
1873 )
1880 )
1874 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1881 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1875 return 1
1882 return 1
1876 except OSError as e:
1883 except OSError as e:
1877 if e.errno != errno.ENOENT:
1884 if e.errno != errno.ENOENT:
1878 raise
1885 raise
1879
1886
1880 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1887 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1881 return 0
1888 return 0
1882
1889
1883 held += report(repo.svfs, b"lock", repo.lock)
1890 held += report(repo.svfs, b"lock", repo.lock)
1884 held += report(repo.vfs, b"wlock", repo.wlock)
1891 held += report(repo.vfs, b"wlock", repo.wlock)
1885
1892
1886 return held
1893 return held
1887
1894
1888
1895
1889 @command(
1896 @command(
1890 b'debugmanifestfulltextcache',
1897 b'debugmanifestfulltextcache',
1891 [
1898 [
1892 (b'', b'clear', False, _(b'clear the cache')),
1899 (b'', b'clear', False, _(b'clear the cache')),
1893 (
1900 (
1894 b'a',
1901 b'a',
1895 b'add',
1902 b'add',
1896 [],
1903 [],
1897 _(b'add the given manifest nodes to the cache'),
1904 _(b'add the given manifest nodes to the cache'),
1898 _(b'NODE'),
1905 _(b'NODE'),
1899 ),
1906 ),
1900 ],
1907 ],
1901 b'',
1908 b'',
1902 )
1909 )
1903 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1910 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1904 """show, clear or amend the contents of the manifest fulltext cache"""
1911 """show, clear or amend the contents of the manifest fulltext cache"""
1905
1912
1906 def getcache():
1913 def getcache():
1907 r = repo.manifestlog.getstorage(b'')
1914 r = repo.manifestlog.getstorage(b'')
1908 try:
1915 try:
1909 return r._fulltextcache
1916 return r._fulltextcache
1910 except AttributeError:
1917 except AttributeError:
1911 msg = _(
1918 msg = _(
1912 b"Current revlog implementation doesn't appear to have a "
1919 b"Current revlog implementation doesn't appear to have a "
1913 b"manifest fulltext cache\n"
1920 b"manifest fulltext cache\n"
1914 )
1921 )
1915 raise error.Abort(msg)
1922 raise error.Abort(msg)
1916
1923
1917 if opts.get('clear'):
1924 if opts.get('clear'):
1918 with repo.wlock():
1925 with repo.wlock():
1919 cache = getcache()
1926 cache = getcache()
1920 cache.clear(clear_persisted_data=True)
1927 cache.clear(clear_persisted_data=True)
1921 return
1928 return
1922
1929
1923 if add:
1930 if add:
1924 with repo.wlock():
1931 with repo.wlock():
1925 m = repo.manifestlog
1932 m = repo.manifestlog
1926 store = m.getstorage(b'')
1933 store = m.getstorage(b'')
1927 for n in add:
1934 for n in add:
1928 try:
1935 try:
1929 manifest = m[store.lookup(n)]
1936 manifest = m[store.lookup(n)]
1930 except error.LookupError as e:
1937 except error.LookupError as e:
1931 raise error.Abort(e, hint=b"Check your manifest node id")
1938 raise error.Abort(e, hint=b"Check your manifest node id")
1932 manifest.read() # stores revisision in cache too
1939 manifest.read() # stores revisision in cache too
1933 return
1940 return
1934
1941
1935 cache = getcache()
1942 cache = getcache()
1936 if not len(cache):
1943 if not len(cache):
1937 ui.write(_(b'cache empty\n'))
1944 ui.write(_(b'cache empty\n'))
1938 else:
1945 else:
1939 ui.write(
1946 ui.write(
1940 _(
1947 _(
1941 b'cache contains %d manifest entries, in order of most to '
1948 b'cache contains %d manifest entries, in order of most to '
1942 b'least recent:\n'
1949 b'least recent:\n'
1943 )
1950 )
1944 % (len(cache),)
1951 % (len(cache),)
1945 )
1952 )
1946 totalsize = 0
1953 totalsize = 0
1947 for nodeid in cache:
1954 for nodeid in cache:
1948 # Use cache.get to not update the LRU order
1955 # Use cache.get to not update the LRU order
1949 data = cache.peek(nodeid)
1956 data = cache.peek(nodeid)
1950 size = len(data)
1957 size = len(data)
1951 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1958 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1952 ui.write(
1959 ui.write(
1953 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1960 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1954 )
1961 )
1955 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1962 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1956 ui.write(
1963 ui.write(
1957 _(b'total cache data size %s, on-disk %s\n')
1964 _(b'total cache data size %s, on-disk %s\n')
1958 % (util.bytecount(totalsize), util.bytecount(ondisk))
1965 % (util.bytecount(totalsize), util.bytecount(ondisk))
1959 )
1966 )
1960
1967
1961
1968
1962 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1969 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1963 def debugmergestate(ui, repo, *args, **opts):
1970 def debugmergestate(ui, repo, *args, **opts):
1964 """print merge state
1971 """print merge state
1965
1972
1966 Use --verbose to print out information about whether v1 or v2 merge state
1973 Use --verbose to print out information about whether v1 or v2 merge state
1967 was chosen."""
1974 was chosen."""
1968
1975
1969 if ui.verbose:
1976 if ui.verbose:
1970 ms = mergemod.mergestate(repo)
1977 ms = mergemod.mergestate(repo)
1971
1978
1972 # sort so that reasonable information is on top
1979 # sort so that reasonable information is on top
1973 v1records = ms._readrecordsv1()
1980 v1records = ms._readrecordsv1()
1974 v2records = ms._readrecordsv2()
1981 v2records = ms._readrecordsv2()
1975
1982
1976 if not v1records and not v2records:
1983 if not v1records and not v2records:
1977 pass
1984 pass
1978 elif not v2records:
1985 elif not v2records:
1979 ui.writenoi18n(b'no version 2 merge state\n')
1986 ui.writenoi18n(b'no version 2 merge state\n')
1980 elif ms._v1v2match(v1records, v2records):
1987 elif ms._v1v2match(v1records, v2records):
1981 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1988 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1982 else:
1989 else:
1983 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
1990 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
1984
1991
1985 opts = pycompat.byteskwargs(opts)
1992 opts = pycompat.byteskwargs(opts)
1986 if not opts[b'template']:
1993 if not opts[b'template']:
1987 opts[b'template'] = (
1994 opts[b'template'] = (
1988 b'{if(commits, "", "no merge state found\n")}'
1995 b'{if(commits, "", "no merge state found\n")}'
1989 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
1996 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
1990 b'{files % "file: {path} (state \\"{state}\\")\n'
1997 b'{files % "file: {path} (state \\"{state}\\")\n'
1991 b'{if(local_path, "'
1998 b'{if(local_path, "'
1992 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
1999 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
1993 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2000 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
1994 b' other path: {other_path} (node {other_node})\n'
2001 b' other path: {other_path} (node {other_node})\n'
1995 b'")}'
2002 b'")}'
1996 b'{if(rename_side, "'
2003 b'{if(rename_side, "'
1997 b' rename side: {rename_side}\n'
2004 b' rename side: {rename_side}\n'
1998 b' renamed path: {renamed_path}\n'
2005 b' renamed path: {renamed_path}\n'
1999 b'")}'
2006 b'")}'
2000 b'{extras % " extra: {key} = {value}\n"}'
2007 b'{extras % " extra: {key} = {value}\n"}'
2001 b'"}'
2008 b'"}'
2002 )
2009 )
2003
2010
2004 ms = mergemod.mergestate.read(repo)
2011 ms = mergemod.mergestate.read(repo)
2005
2012
2006 fm = ui.formatter(b'debugmergestate', opts)
2013 fm = ui.formatter(b'debugmergestate', opts)
2007 fm.startitem()
2014 fm.startitem()
2008
2015
2009 fm_commits = fm.nested(b'commits')
2016 fm_commits = fm.nested(b'commits')
2010 if ms.active():
2017 if ms.active():
2011 for name, node, label_index in (
2018 for name, node, label_index in (
2012 (b'local', ms.local, 0),
2019 (b'local', ms.local, 0),
2013 (b'other', ms.other, 1),
2020 (b'other', ms.other, 1),
2014 ):
2021 ):
2015 fm_commits.startitem()
2022 fm_commits.startitem()
2016 fm_commits.data(name=name)
2023 fm_commits.data(name=name)
2017 fm_commits.data(node=hex(node))
2024 fm_commits.data(node=hex(node))
2018 if ms._labels and len(ms._labels) > label_index:
2025 if ms._labels and len(ms._labels) > label_index:
2019 fm_commits.data(label=ms._labels[label_index])
2026 fm_commits.data(label=ms._labels[label_index])
2020 fm_commits.end()
2027 fm_commits.end()
2021
2028
2022 fm_files = fm.nested(b'files')
2029 fm_files = fm.nested(b'files')
2023 if ms.active():
2030 if ms.active():
2024 for f in ms:
2031 for f in ms:
2025 fm_files.startitem()
2032 fm_files.startitem()
2026 fm_files.data(path=f)
2033 fm_files.data(path=f)
2027 state = ms._state[f]
2034 state = ms._state[f]
2028 fm_files.data(state=state[0])
2035 fm_files.data(state=state[0])
2029 if state[0] in (
2036 if state[0] in (
2030 mergemod.MERGE_RECORD_UNRESOLVED,
2037 mergemod.MERGE_RECORD_UNRESOLVED,
2031 mergemod.MERGE_RECORD_RESOLVED,
2038 mergemod.MERGE_RECORD_RESOLVED,
2032 ):
2039 ):
2033 fm_files.data(local_key=state[1])
2040 fm_files.data(local_key=state[1])
2034 fm_files.data(local_path=state[2])
2041 fm_files.data(local_path=state[2])
2035 fm_files.data(ancestor_path=state[3])
2042 fm_files.data(ancestor_path=state[3])
2036 fm_files.data(ancestor_node=state[4])
2043 fm_files.data(ancestor_node=state[4])
2037 fm_files.data(other_path=state[5])
2044 fm_files.data(other_path=state[5])
2038 fm_files.data(other_node=state[6])
2045 fm_files.data(other_node=state[6])
2039 fm_files.data(local_flags=state[7])
2046 fm_files.data(local_flags=state[7])
2040 elif state[0] in (
2047 elif state[0] in (
2041 mergemod.MERGE_RECORD_UNRESOLVED_PATH,
2048 mergemod.MERGE_RECORD_UNRESOLVED_PATH,
2042 mergemod.MERGE_RECORD_RESOLVED_PATH,
2049 mergemod.MERGE_RECORD_RESOLVED_PATH,
2043 ):
2050 ):
2044 fm_files.data(renamed_path=state[1])
2051 fm_files.data(renamed_path=state[1])
2045 fm_files.data(rename_side=state[2])
2052 fm_files.data(rename_side=state[2])
2046 fm_extras = fm_files.nested(b'extras')
2053 fm_extras = fm_files.nested(b'extras')
2047 for k, v in ms.extras(f).items():
2054 for k, v in ms.extras(f).items():
2048 fm_extras.startitem()
2055 fm_extras.startitem()
2049 fm_extras.data(key=k)
2056 fm_extras.data(key=k)
2050 fm_extras.data(value=v)
2057 fm_extras.data(value=v)
2051 fm_extras.end()
2058 fm_extras.end()
2052
2059
2053 fm_files.end()
2060 fm_files.end()
2054
2061
2055 fm.end()
2062 fm.end()
2056
2063
2057
2064
2058 @command(b'debugnamecomplete', [], _(b'NAME...'))
2065 @command(b'debugnamecomplete', [], _(b'NAME...'))
2059 def debugnamecomplete(ui, repo, *args):
2066 def debugnamecomplete(ui, repo, *args):
2060 '''complete "names" - tags, open branch names, bookmark names'''
2067 '''complete "names" - tags, open branch names, bookmark names'''
2061
2068
2062 names = set()
2069 names = set()
2063 # since we previously only listed open branches, we will handle that
2070 # since we previously only listed open branches, we will handle that
2064 # specially (after this for loop)
2071 # specially (after this for loop)
2065 for name, ns in pycompat.iteritems(repo.names):
2072 for name, ns in pycompat.iteritems(repo.names):
2066 if name != b'branches':
2073 if name != b'branches':
2067 names.update(ns.listnames(repo))
2074 names.update(ns.listnames(repo))
2068 names.update(
2075 names.update(
2069 tag
2076 tag
2070 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2077 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2071 if not closed
2078 if not closed
2072 )
2079 )
2073 completions = set()
2080 completions = set()
2074 if not args:
2081 if not args:
2075 args = [b'']
2082 args = [b'']
2076 for a in args:
2083 for a in args:
2077 completions.update(n for n in names if n.startswith(a))
2084 completions.update(n for n in names if n.startswith(a))
2078 ui.write(b'\n'.join(sorted(completions)))
2085 ui.write(b'\n'.join(sorted(completions)))
2079 ui.write(b'\n')
2086 ui.write(b'\n')
2080
2087
2081
2088
2082 @command(
2089 @command(
2083 b'debugnodemap',
2090 b'debugnodemap',
2084 [
2091 [
2085 (
2092 (
2086 b'',
2093 b'',
2087 b'dump-new',
2094 b'dump-new',
2088 False,
2095 False,
2089 _(b'write a (new) persistent binary nodemap on stdin'),
2096 _(b'write a (new) persistent binary nodemap on stdin'),
2090 ),
2097 ),
2091 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2098 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2092 (
2099 (
2093 b'',
2100 b'',
2094 b'check',
2101 b'check',
2095 False,
2102 False,
2096 _(b'check that the data on disk data are correct.'),
2103 _(b'check that the data on disk data are correct.'),
2097 ),
2104 ),
2098 (
2105 (
2099 b'',
2106 b'',
2100 b'metadata',
2107 b'metadata',
2101 False,
2108 False,
2102 _(b'display the on disk meta data for the nodemap'),
2109 _(b'display the on disk meta data for the nodemap'),
2103 ),
2110 ),
2104 ],
2111 ],
2105 )
2112 )
2106 def debugnodemap(ui, repo, **opts):
2113 def debugnodemap(ui, repo, **opts):
2107 """write and inspect on disk nodemap
2114 """write and inspect on disk nodemap
2108 """
2115 """
2109 if opts['dump_new']:
2116 if opts['dump_new']:
2110 unfi = repo.unfiltered()
2117 unfi = repo.unfiltered()
2111 cl = unfi.changelog
2118 cl = unfi.changelog
2112 if util.safehasattr(cl.index, "nodemap_data_all"):
2119 if util.safehasattr(cl.index, "nodemap_data_all"):
2113 data = cl.index.nodemap_data_all()
2120 data = cl.index.nodemap_data_all()
2114 else:
2121 else:
2115 data = nodemap.persistent_data(cl.index)
2122 data = nodemap.persistent_data(cl.index)
2116 ui.write(data)
2123 ui.write(data)
2117 elif opts['dump_disk']:
2124 elif opts['dump_disk']:
2118 unfi = repo.unfiltered()
2125 unfi = repo.unfiltered()
2119 cl = unfi.changelog
2126 cl = unfi.changelog
2120 nm_data = nodemap.persisted_data(cl)
2127 nm_data = nodemap.persisted_data(cl)
2121 if nm_data is not None:
2128 if nm_data is not None:
2122 docket, data = nm_data
2129 docket, data = nm_data
2123 ui.write(data[:])
2130 ui.write(data[:])
2124 elif opts['check']:
2131 elif opts['check']:
2125 unfi = repo.unfiltered()
2132 unfi = repo.unfiltered()
2126 cl = unfi.changelog
2133 cl = unfi.changelog
2127 nm_data = nodemap.persisted_data(cl)
2134 nm_data = nodemap.persisted_data(cl)
2128 if nm_data is not None:
2135 if nm_data is not None:
2129 docket, data = nm_data
2136 docket, data = nm_data
2130 return nodemap.check_data(ui, cl.index, data)
2137 return nodemap.check_data(ui, cl.index, data)
2131 elif opts['metadata']:
2138 elif opts['metadata']:
2132 unfi = repo.unfiltered()
2139 unfi = repo.unfiltered()
2133 cl = unfi.changelog
2140 cl = unfi.changelog
2134 nm_data = nodemap.persisted_data(cl)
2141 nm_data = nodemap.persisted_data(cl)
2135 if nm_data is not None:
2142 if nm_data is not None:
2136 docket, data = nm_data
2143 docket, data = nm_data
2137 ui.write((b"uid: %s\n") % docket.uid)
2144 ui.write((b"uid: %s\n") % docket.uid)
2138 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2145 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2139 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2146 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2140 ui.write((b"data-length: %d\n") % docket.data_length)
2147 ui.write((b"data-length: %d\n") % docket.data_length)
2141 ui.write((b"data-unused: %d\n") % docket.data_unused)
2148 ui.write((b"data-unused: %d\n") % docket.data_unused)
2142
2149
2143
2150
2144 @command(
2151 @command(
2145 b'debugobsolete',
2152 b'debugobsolete',
2146 [
2153 [
2147 (b'', b'flags', 0, _(b'markers flag')),
2154 (b'', b'flags', 0, _(b'markers flag')),
2148 (
2155 (
2149 b'',
2156 b'',
2150 b'record-parents',
2157 b'record-parents',
2151 False,
2158 False,
2152 _(b'record parent information for the precursor'),
2159 _(b'record parent information for the precursor'),
2153 ),
2160 ),
2154 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2161 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2155 (
2162 (
2156 b'',
2163 b'',
2157 b'exclusive',
2164 b'exclusive',
2158 False,
2165 False,
2159 _(b'restrict display to markers only relevant to REV'),
2166 _(b'restrict display to markers only relevant to REV'),
2160 ),
2167 ),
2161 (b'', b'index', False, _(b'display index of the marker')),
2168 (b'', b'index', False, _(b'display index of the marker')),
2162 (b'', b'delete', [], _(b'delete markers specified by indices')),
2169 (b'', b'delete', [], _(b'delete markers specified by indices')),
2163 ]
2170 ]
2164 + cmdutil.commitopts2
2171 + cmdutil.commitopts2
2165 + cmdutil.formatteropts,
2172 + cmdutil.formatteropts,
2166 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2173 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2167 )
2174 )
2168 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2175 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2169 """create arbitrary obsolete marker
2176 """create arbitrary obsolete marker
2170
2177
2171 With no arguments, displays the list of obsolescence markers."""
2178 With no arguments, displays the list of obsolescence markers."""
2172
2179
2173 opts = pycompat.byteskwargs(opts)
2180 opts = pycompat.byteskwargs(opts)
2174
2181
2175 def parsenodeid(s):
2182 def parsenodeid(s):
2176 try:
2183 try:
2177 # We do not use revsingle/revrange functions here to accept
2184 # We do not use revsingle/revrange functions here to accept
2178 # arbitrary node identifiers, possibly not present in the
2185 # arbitrary node identifiers, possibly not present in the
2179 # local repository.
2186 # local repository.
2180 n = bin(s)
2187 n = bin(s)
2181 if len(n) != len(nullid):
2188 if len(n) != len(nullid):
2182 raise TypeError()
2189 raise TypeError()
2183 return n
2190 return n
2184 except TypeError:
2191 except TypeError:
2185 raise error.Abort(
2192 raise error.Abort(
2186 b'changeset references must be full hexadecimal '
2193 b'changeset references must be full hexadecimal '
2187 b'node identifiers'
2194 b'node identifiers'
2188 )
2195 )
2189
2196
2190 if opts.get(b'delete'):
2197 if opts.get(b'delete'):
2191 indices = []
2198 indices = []
2192 for v in opts.get(b'delete'):
2199 for v in opts.get(b'delete'):
2193 try:
2200 try:
2194 indices.append(int(v))
2201 indices.append(int(v))
2195 except ValueError:
2202 except ValueError:
2196 raise error.Abort(
2203 raise error.Abort(
2197 _(b'invalid index value: %r') % v,
2204 _(b'invalid index value: %r') % v,
2198 hint=_(b'use integers for indices'),
2205 hint=_(b'use integers for indices'),
2199 )
2206 )
2200
2207
2201 if repo.currenttransaction():
2208 if repo.currenttransaction():
2202 raise error.Abort(
2209 raise error.Abort(
2203 _(b'cannot delete obsmarkers in the middle of transaction.')
2210 _(b'cannot delete obsmarkers in the middle of transaction.')
2204 )
2211 )
2205
2212
2206 with repo.lock():
2213 with repo.lock():
2207 n = repair.deleteobsmarkers(repo.obsstore, indices)
2214 n = repair.deleteobsmarkers(repo.obsstore, indices)
2208 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2215 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2209
2216
2210 return
2217 return
2211
2218
2212 if precursor is not None:
2219 if precursor is not None:
2213 if opts[b'rev']:
2220 if opts[b'rev']:
2214 raise error.Abort(b'cannot select revision when creating marker')
2221 raise error.Abort(b'cannot select revision when creating marker')
2215 metadata = {}
2222 metadata = {}
2216 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2223 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2217 succs = tuple(parsenodeid(succ) for succ in successors)
2224 succs = tuple(parsenodeid(succ) for succ in successors)
2218 l = repo.lock()
2225 l = repo.lock()
2219 try:
2226 try:
2220 tr = repo.transaction(b'debugobsolete')
2227 tr = repo.transaction(b'debugobsolete')
2221 try:
2228 try:
2222 date = opts.get(b'date')
2229 date = opts.get(b'date')
2223 if date:
2230 if date:
2224 date = dateutil.parsedate(date)
2231 date = dateutil.parsedate(date)
2225 else:
2232 else:
2226 date = None
2233 date = None
2227 prec = parsenodeid(precursor)
2234 prec = parsenodeid(precursor)
2228 parents = None
2235 parents = None
2229 if opts[b'record_parents']:
2236 if opts[b'record_parents']:
2230 if prec not in repo.unfiltered():
2237 if prec not in repo.unfiltered():
2231 raise error.Abort(
2238 raise error.Abort(
2232 b'cannot used --record-parents on '
2239 b'cannot used --record-parents on '
2233 b'unknown changesets'
2240 b'unknown changesets'
2234 )
2241 )
2235 parents = repo.unfiltered()[prec].parents()
2242 parents = repo.unfiltered()[prec].parents()
2236 parents = tuple(p.node() for p in parents)
2243 parents = tuple(p.node() for p in parents)
2237 repo.obsstore.create(
2244 repo.obsstore.create(
2238 tr,
2245 tr,
2239 prec,
2246 prec,
2240 succs,
2247 succs,
2241 opts[b'flags'],
2248 opts[b'flags'],
2242 parents=parents,
2249 parents=parents,
2243 date=date,
2250 date=date,
2244 metadata=metadata,
2251 metadata=metadata,
2245 ui=ui,
2252 ui=ui,
2246 )
2253 )
2247 tr.close()
2254 tr.close()
2248 except ValueError as exc:
2255 except ValueError as exc:
2249 raise error.Abort(
2256 raise error.Abort(
2250 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2257 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2251 )
2258 )
2252 finally:
2259 finally:
2253 tr.release()
2260 tr.release()
2254 finally:
2261 finally:
2255 l.release()
2262 l.release()
2256 else:
2263 else:
2257 if opts[b'rev']:
2264 if opts[b'rev']:
2258 revs = scmutil.revrange(repo, opts[b'rev'])
2265 revs = scmutil.revrange(repo, opts[b'rev'])
2259 nodes = [repo[r].node() for r in revs]
2266 nodes = [repo[r].node() for r in revs]
2260 markers = list(
2267 markers = list(
2261 obsutil.getmarkers(
2268 obsutil.getmarkers(
2262 repo, nodes=nodes, exclusive=opts[b'exclusive']
2269 repo, nodes=nodes, exclusive=opts[b'exclusive']
2263 )
2270 )
2264 )
2271 )
2265 markers.sort(key=lambda x: x._data)
2272 markers.sort(key=lambda x: x._data)
2266 else:
2273 else:
2267 markers = obsutil.getmarkers(repo)
2274 markers = obsutil.getmarkers(repo)
2268
2275
2269 markerstoiter = markers
2276 markerstoiter = markers
2270 isrelevant = lambda m: True
2277 isrelevant = lambda m: True
2271 if opts.get(b'rev') and opts.get(b'index'):
2278 if opts.get(b'rev') and opts.get(b'index'):
2272 markerstoiter = obsutil.getmarkers(repo)
2279 markerstoiter = obsutil.getmarkers(repo)
2273 markerset = set(markers)
2280 markerset = set(markers)
2274 isrelevant = lambda m: m in markerset
2281 isrelevant = lambda m: m in markerset
2275
2282
2276 fm = ui.formatter(b'debugobsolete', opts)
2283 fm = ui.formatter(b'debugobsolete', opts)
2277 for i, m in enumerate(markerstoiter):
2284 for i, m in enumerate(markerstoiter):
2278 if not isrelevant(m):
2285 if not isrelevant(m):
2279 # marker can be irrelevant when we're iterating over a set
2286 # marker can be irrelevant when we're iterating over a set
2280 # of markers (markerstoiter) which is bigger than the set
2287 # of markers (markerstoiter) which is bigger than the set
2281 # of markers we want to display (markers)
2288 # of markers we want to display (markers)
2282 # this can happen if both --index and --rev options are
2289 # this can happen if both --index and --rev options are
2283 # provided and thus we need to iterate over all of the markers
2290 # provided and thus we need to iterate over all of the markers
2284 # to get the correct indices, but only display the ones that
2291 # to get the correct indices, but only display the ones that
2285 # are relevant to --rev value
2292 # are relevant to --rev value
2286 continue
2293 continue
2287 fm.startitem()
2294 fm.startitem()
2288 ind = i if opts.get(b'index') else None
2295 ind = i if opts.get(b'index') else None
2289 cmdutil.showmarker(fm, m, index=ind)
2296 cmdutil.showmarker(fm, m, index=ind)
2290 fm.end()
2297 fm.end()
2291
2298
2292
2299
2293 @command(
2300 @command(
2294 b'debugp1copies',
2301 b'debugp1copies',
2295 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2302 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2296 _(b'[-r REV]'),
2303 _(b'[-r REV]'),
2297 )
2304 )
2298 def debugp1copies(ui, repo, **opts):
2305 def debugp1copies(ui, repo, **opts):
2299 """dump copy information compared to p1"""
2306 """dump copy information compared to p1"""
2300
2307
2301 opts = pycompat.byteskwargs(opts)
2308 opts = pycompat.byteskwargs(opts)
2302 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2309 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2303 for dst, src in ctx.p1copies().items():
2310 for dst, src in ctx.p1copies().items():
2304 ui.write(b'%s -> %s\n' % (src, dst))
2311 ui.write(b'%s -> %s\n' % (src, dst))
2305
2312
2306
2313
2307 @command(
2314 @command(
2308 b'debugp2copies',
2315 b'debugp2copies',
2309 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2316 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2310 _(b'[-r REV]'),
2317 _(b'[-r REV]'),
2311 )
2318 )
2312 def debugp1copies(ui, repo, **opts):
2319 def debugp1copies(ui, repo, **opts):
2313 """dump copy information compared to p2"""
2320 """dump copy information compared to p2"""
2314
2321
2315 opts = pycompat.byteskwargs(opts)
2322 opts = pycompat.byteskwargs(opts)
2316 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2323 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2317 for dst, src in ctx.p2copies().items():
2324 for dst, src in ctx.p2copies().items():
2318 ui.write(b'%s -> %s\n' % (src, dst))
2325 ui.write(b'%s -> %s\n' % (src, dst))
2319
2326
2320
2327
2321 @command(
2328 @command(
2322 b'debugpathcomplete',
2329 b'debugpathcomplete',
2323 [
2330 [
2324 (b'f', b'full', None, _(b'complete an entire path')),
2331 (b'f', b'full', None, _(b'complete an entire path')),
2325 (b'n', b'normal', None, _(b'show only normal files')),
2332 (b'n', b'normal', None, _(b'show only normal files')),
2326 (b'a', b'added', None, _(b'show only added files')),
2333 (b'a', b'added', None, _(b'show only added files')),
2327 (b'r', b'removed', None, _(b'show only removed files')),
2334 (b'r', b'removed', None, _(b'show only removed files')),
2328 ],
2335 ],
2329 _(b'FILESPEC...'),
2336 _(b'FILESPEC...'),
2330 )
2337 )
2331 def debugpathcomplete(ui, repo, *specs, **opts):
2338 def debugpathcomplete(ui, repo, *specs, **opts):
2332 '''complete part or all of a tracked path
2339 '''complete part or all of a tracked path
2333
2340
2334 This command supports shells that offer path name completion. It
2341 This command supports shells that offer path name completion. It
2335 currently completes only files already known to the dirstate.
2342 currently completes only files already known to the dirstate.
2336
2343
2337 Completion extends only to the next path segment unless
2344 Completion extends only to the next path segment unless
2338 --full is specified, in which case entire paths are used.'''
2345 --full is specified, in which case entire paths are used.'''
2339
2346
2340 def complete(path, acceptable):
2347 def complete(path, acceptable):
2341 dirstate = repo.dirstate
2348 dirstate = repo.dirstate
2342 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2349 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2343 rootdir = repo.root + pycompat.ossep
2350 rootdir = repo.root + pycompat.ossep
2344 if spec != repo.root and not spec.startswith(rootdir):
2351 if spec != repo.root and not spec.startswith(rootdir):
2345 return [], []
2352 return [], []
2346 if os.path.isdir(spec):
2353 if os.path.isdir(spec):
2347 spec += b'/'
2354 spec += b'/'
2348 spec = spec[len(rootdir) :]
2355 spec = spec[len(rootdir) :]
2349 fixpaths = pycompat.ossep != b'/'
2356 fixpaths = pycompat.ossep != b'/'
2350 if fixpaths:
2357 if fixpaths:
2351 spec = spec.replace(pycompat.ossep, b'/')
2358 spec = spec.replace(pycompat.ossep, b'/')
2352 speclen = len(spec)
2359 speclen = len(spec)
2353 fullpaths = opts['full']
2360 fullpaths = opts['full']
2354 files, dirs = set(), set()
2361 files, dirs = set(), set()
2355 adddir, addfile = dirs.add, files.add
2362 adddir, addfile = dirs.add, files.add
2356 for f, st in pycompat.iteritems(dirstate):
2363 for f, st in pycompat.iteritems(dirstate):
2357 if f.startswith(spec) and st[0] in acceptable:
2364 if f.startswith(spec) and st[0] in acceptable:
2358 if fixpaths:
2365 if fixpaths:
2359 f = f.replace(b'/', pycompat.ossep)
2366 f = f.replace(b'/', pycompat.ossep)
2360 if fullpaths:
2367 if fullpaths:
2361 addfile(f)
2368 addfile(f)
2362 continue
2369 continue
2363 s = f.find(pycompat.ossep, speclen)
2370 s = f.find(pycompat.ossep, speclen)
2364 if s >= 0:
2371 if s >= 0:
2365 adddir(f[:s])
2372 adddir(f[:s])
2366 else:
2373 else:
2367 addfile(f)
2374 addfile(f)
2368 return files, dirs
2375 return files, dirs
2369
2376
2370 acceptable = b''
2377 acceptable = b''
2371 if opts['normal']:
2378 if opts['normal']:
2372 acceptable += b'nm'
2379 acceptable += b'nm'
2373 if opts['added']:
2380 if opts['added']:
2374 acceptable += b'a'
2381 acceptable += b'a'
2375 if opts['removed']:
2382 if opts['removed']:
2376 acceptable += b'r'
2383 acceptable += b'r'
2377 cwd = repo.getcwd()
2384 cwd = repo.getcwd()
2378 if not specs:
2385 if not specs:
2379 specs = [b'.']
2386 specs = [b'.']
2380
2387
2381 files, dirs = set(), set()
2388 files, dirs = set(), set()
2382 for spec in specs:
2389 for spec in specs:
2383 f, d = complete(spec, acceptable or b'nmar')
2390 f, d = complete(spec, acceptable or b'nmar')
2384 files.update(f)
2391 files.update(f)
2385 dirs.update(d)
2392 dirs.update(d)
2386 files.update(dirs)
2393 files.update(dirs)
2387 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2394 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2388 ui.write(b'\n')
2395 ui.write(b'\n')
2389
2396
2390
2397
2391 @command(
2398 @command(
2392 b'debugpathcopies',
2399 b'debugpathcopies',
2393 cmdutil.walkopts,
2400 cmdutil.walkopts,
2394 b'hg debugpathcopies REV1 REV2 [FILE]',
2401 b'hg debugpathcopies REV1 REV2 [FILE]',
2395 inferrepo=True,
2402 inferrepo=True,
2396 )
2403 )
2397 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2404 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2398 """show copies between two revisions"""
2405 """show copies between two revisions"""
2399 ctx1 = scmutil.revsingle(repo, rev1)
2406 ctx1 = scmutil.revsingle(repo, rev1)
2400 ctx2 = scmutil.revsingle(repo, rev2)
2407 ctx2 = scmutil.revsingle(repo, rev2)
2401 m = scmutil.match(ctx1, pats, opts)
2408 m = scmutil.match(ctx1, pats, opts)
2402 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2409 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2403 ui.write(b'%s -> %s\n' % (src, dst))
2410 ui.write(b'%s -> %s\n' % (src, dst))
2404
2411
2405
2412
2406 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2413 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2407 def debugpeer(ui, path):
2414 def debugpeer(ui, path):
2408 """establish a connection to a peer repository"""
2415 """establish a connection to a peer repository"""
2409 # Always enable peer request logging. Requires --debug to display
2416 # Always enable peer request logging. Requires --debug to display
2410 # though.
2417 # though.
2411 overrides = {
2418 overrides = {
2412 (b'devel', b'debug.peer-request'): True,
2419 (b'devel', b'debug.peer-request'): True,
2413 }
2420 }
2414
2421
2415 with ui.configoverride(overrides):
2422 with ui.configoverride(overrides):
2416 peer = hg.peer(ui, {}, path)
2423 peer = hg.peer(ui, {}, path)
2417
2424
2418 local = peer.local() is not None
2425 local = peer.local() is not None
2419 canpush = peer.canpush()
2426 canpush = peer.canpush()
2420
2427
2421 ui.write(_(b'url: %s\n') % peer.url())
2428 ui.write(_(b'url: %s\n') % peer.url())
2422 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2429 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2423 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2430 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2424
2431
2425
2432
2426 @command(
2433 @command(
2427 b'debugpickmergetool',
2434 b'debugpickmergetool',
2428 [
2435 [
2429 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2436 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2430 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2437 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2431 ]
2438 ]
2432 + cmdutil.walkopts
2439 + cmdutil.walkopts
2433 + cmdutil.mergetoolopts,
2440 + cmdutil.mergetoolopts,
2434 _(b'[PATTERN]...'),
2441 _(b'[PATTERN]...'),
2435 inferrepo=True,
2442 inferrepo=True,
2436 )
2443 )
2437 def debugpickmergetool(ui, repo, *pats, **opts):
2444 def debugpickmergetool(ui, repo, *pats, **opts):
2438 """examine which merge tool is chosen for specified file
2445 """examine which merge tool is chosen for specified file
2439
2446
2440 As described in :hg:`help merge-tools`, Mercurial examines
2447 As described in :hg:`help merge-tools`, Mercurial examines
2441 configurations below in this order to decide which merge tool is
2448 configurations below in this order to decide which merge tool is
2442 chosen for specified file.
2449 chosen for specified file.
2443
2450
2444 1. ``--tool`` option
2451 1. ``--tool`` option
2445 2. ``HGMERGE`` environment variable
2452 2. ``HGMERGE`` environment variable
2446 3. configurations in ``merge-patterns`` section
2453 3. configurations in ``merge-patterns`` section
2447 4. configuration of ``ui.merge``
2454 4. configuration of ``ui.merge``
2448 5. configurations in ``merge-tools`` section
2455 5. configurations in ``merge-tools`` section
2449 6. ``hgmerge`` tool (for historical reason only)
2456 6. ``hgmerge`` tool (for historical reason only)
2450 7. default tool for fallback (``:merge`` or ``:prompt``)
2457 7. default tool for fallback (``:merge`` or ``:prompt``)
2451
2458
2452 This command writes out examination result in the style below::
2459 This command writes out examination result in the style below::
2453
2460
2454 FILE = MERGETOOL
2461 FILE = MERGETOOL
2455
2462
2456 By default, all files known in the first parent context of the
2463 By default, all files known in the first parent context of the
2457 working directory are examined. Use file patterns and/or -I/-X
2464 working directory are examined. Use file patterns and/or -I/-X
2458 options to limit target files. -r/--rev is also useful to examine
2465 options to limit target files. -r/--rev is also useful to examine
2459 files in another context without actual updating to it.
2466 files in another context without actual updating to it.
2460
2467
2461 With --debug, this command shows warning messages while matching
2468 With --debug, this command shows warning messages while matching
2462 against ``merge-patterns`` and so on, too. It is recommended to
2469 against ``merge-patterns`` and so on, too. It is recommended to
2463 use this option with explicit file patterns and/or -I/-X options,
2470 use this option with explicit file patterns and/or -I/-X options,
2464 because this option increases amount of output per file according
2471 because this option increases amount of output per file according
2465 to configurations in hgrc.
2472 to configurations in hgrc.
2466
2473
2467 With -v/--verbose, this command shows configurations below at
2474 With -v/--verbose, this command shows configurations below at
2468 first (only if specified).
2475 first (only if specified).
2469
2476
2470 - ``--tool`` option
2477 - ``--tool`` option
2471 - ``HGMERGE`` environment variable
2478 - ``HGMERGE`` environment variable
2472 - configuration of ``ui.merge``
2479 - configuration of ``ui.merge``
2473
2480
2474 If merge tool is chosen before matching against
2481 If merge tool is chosen before matching against
2475 ``merge-patterns``, this command can't show any helpful
2482 ``merge-patterns``, this command can't show any helpful
2476 information, even with --debug. In such case, information above is
2483 information, even with --debug. In such case, information above is
2477 useful to know why a merge tool is chosen.
2484 useful to know why a merge tool is chosen.
2478 """
2485 """
2479 opts = pycompat.byteskwargs(opts)
2486 opts = pycompat.byteskwargs(opts)
2480 overrides = {}
2487 overrides = {}
2481 if opts[b'tool']:
2488 if opts[b'tool']:
2482 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2489 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2483 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2490 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2484
2491
2485 with ui.configoverride(overrides, b'debugmergepatterns'):
2492 with ui.configoverride(overrides, b'debugmergepatterns'):
2486 hgmerge = encoding.environ.get(b"HGMERGE")
2493 hgmerge = encoding.environ.get(b"HGMERGE")
2487 if hgmerge is not None:
2494 if hgmerge is not None:
2488 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2495 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2489 uimerge = ui.config(b"ui", b"merge")
2496 uimerge = ui.config(b"ui", b"merge")
2490 if uimerge:
2497 if uimerge:
2491 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2498 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2492
2499
2493 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2500 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2494 m = scmutil.match(ctx, pats, opts)
2501 m = scmutil.match(ctx, pats, opts)
2495 changedelete = opts[b'changedelete']
2502 changedelete = opts[b'changedelete']
2496 for path in ctx.walk(m):
2503 for path in ctx.walk(m):
2497 fctx = ctx[path]
2504 fctx = ctx[path]
2498 try:
2505 try:
2499 if not ui.debugflag:
2506 if not ui.debugflag:
2500 ui.pushbuffer(error=True)
2507 ui.pushbuffer(error=True)
2501 tool, toolpath = filemerge._picktool(
2508 tool, toolpath = filemerge._picktool(
2502 repo,
2509 repo,
2503 ui,
2510 ui,
2504 path,
2511 path,
2505 fctx.isbinary(),
2512 fctx.isbinary(),
2506 b'l' in fctx.flags(),
2513 b'l' in fctx.flags(),
2507 changedelete,
2514 changedelete,
2508 )
2515 )
2509 finally:
2516 finally:
2510 if not ui.debugflag:
2517 if not ui.debugflag:
2511 ui.popbuffer()
2518 ui.popbuffer()
2512 ui.write(b'%s = %s\n' % (path, tool))
2519 ui.write(b'%s = %s\n' % (path, tool))
2513
2520
2514
2521
2515 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2522 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2516 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2523 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2517 '''access the pushkey key/value protocol
2524 '''access the pushkey key/value protocol
2518
2525
2519 With two args, list the keys in the given namespace.
2526 With two args, list the keys in the given namespace.
2520
2527
2521 With five args, set a key to new if it currently is set to old.
2528 With five args, set a key to new if it currently is set to old.
2522 Reports success or failure.
2529 Reports success or failure.
2523 '''
2530 '''
2524
2531
2525 target = hg.peer(ui, {}, repopath)
2532 target = hg.peer(ui, {}, repopath)
2526 if keyinfo:
2533 if keyinfo:
2527 key, old, new = keyinfo
2534 key, old, new = keyinfo
2528 with target.commandexecutor() as e:
2535 with target.commandexecutor() as e:
2529 r = e.callcommand(
2536 r = e.callcommand(
2530 b'pushkey',
2537 b'pushkey',
2531 {
2538 {
2532 b'namespace': namespace,
2539 b'namespace': namespace,
2533 b'key': key,
2540 b'key': key,
2534 b'old': old,
2541 b'old': old,
2535 b'new': new,
2542 b'new': new,
2536 },
2543 },
2537 ).result()
2544 ).result()
2538
2545
2539 ui.status(pycompat.bytestr(r) + b'\n')
2546 ui.status(pycompat.bytestr(r) + b'\n')
2540 return not r
2547 return not r
2541 else:
2548 else:
2542 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2549 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2543 ui.write(
2550 ui.write(
2544 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2551 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2545 )
2552 )
2546
2553
2547
2554
2548 @command(b'debugpvec', [], _(b'A B'))
2555 @command(b'debugpvec', [], _(b'A B'))
2549 def debugpvec(ui, repo, a, b=None):
2556 def debugpvec(ui, repo, a, b=None):
2550 ca = scmutil.revsingle(repo, a)
2557 ca = scmutil.revsingle(repo, a)
2551 cb = scmutil.revsingle(repo, b)
2558 cb = scmutil.revsingle(repo, b)
2552 pa = pvec.ctxpvec(ca)
2559 pa = pvec.ctxpvec(ca)
2553 pb = pvec.ctxpvec(cb)
2560 pb = pvec.ctxpvec(cb)
2554 if pa == pb:
2561 if pa == pb:
2555 rel = b"="
2562 rel = b"="
2556 elif pa > pb:
2563 elif pa > pb:
2557 rel = b">"
2564 rel = b">"
2558 elif pa < pb:
2565 elif pa < pb:
2559 rel = b"<"
2566 rel = b"<"
2560 elif pa | pb:
2567 elif pa | pb:
2561 rel = b"|"
2568 rel = b"|"
2562 ui.write(_(b"a: %s\n") % pa)
2569 ui.write(_(b"a: %s\n") % pa)
2563 ui.write(_(b"b: %s\n") % pb)
2570 ui.write(_(b"b: %s\n") % pb)
2564 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2571 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2565 ui.write(
2572 ui.write(
2566 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2573 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2567 % (
2574 % (
2568 abs(pa._depth - pb._depth),
2575 abs(pa._depth - pb._depth),
2569 pvec._hamming(pa._vec, pb._vec),
2576 pvec._hamming(pa._vec, pb._vec),
2570 pa.distance(pb),
2577 pa.distance(pb),
2571 rel,
2578 rel,
2572 )
2579 )
2573 )
2580 )
2574
2581
2575
2582
2576 @command(
2583 @command(
2577 b'debugrebuilddirstate|debugrebuildstate',
2584 b'debugrebuilddirstate|debugrebuildstate',
2578 [
2585 [
2579 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2586 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2580 (
2587 (
2581 b'',
2588 b'',
2582 b'minimal',
2589 b'minimal',
2583 None,
2590 None,
2584 _(
2591 _(
2585 b'only rebuild files that are inconsistent with '
2592 b'only rebuild files that are inconsistent with '
2586 b'the working copy parent'
2593 b'the working copy parent'
2587 ),
2594 ),
2588 ),
2595 ),
2589 ],
2596 ],
2590 _(b'[-r REV]'),
2597 _(b'[-r REV]'),
2591 )
2598 )
2592 def debugrebuilddirstate(ui, repo, rev, **opts):
2599 def debugrebuilddirstate(ui, repo, rev, **opts):
2593 """rebuild the dirstate as it would look like for the given revision
2600 """rebuild the dirstate as it would look like for the given revision
2594
2601
2595 If no revision is specified the first current parent will be used.
2602 If no revision is specified the first current parent will be used.
2596
2603
2597 The dirstate will be set to the files of the given revision.
2604 The dirstate will be set to the files of the given revision.
2598 The actual working directory content or existing dirstate
2605 The actual working directory content or existing dirstate
2599 information such as adds or removes is not considered.
2606 information such as adds or removes is not considered.
2600
2607
2601 ``minimal`` will only rebuild the dirstate status for files that claim to be
2608 ``minimal`` will only rebuild the dirstate status for files that claim to be
2602 tracked but are not in the parent manifest, or that exist in the parent
2609 tracked but are not in the parent manifest, or that exist in the parent
2603 manifest but are not in the dirstate. It will not change adds, removes, or
2610 manifest but are not in the dirstate. It will not change adds, removes, or
2604 modified files that are in the working copy parent.
2611 modified files that are in the working copy parent.
2605
2612
2606 One use of this command is to make the next :hg:`status` invocation
2613 One use of this command is to make the next :hg:`status` invocation
2607 check the actual file content.
2614 check the actual file content.
2608 """
2615 """
2609 ctx = scmutil.revsingle(repo, rev)
2616 ctx = scmutil.revsingle(repo, rev)
2610 with repo.wlock():
2617 with repo.wlock():
2611 dirstate = repo.dirstate
2618 dirstate = repo.dirstate
2612 changedfiles = None
2619 changedfiles = None
2613 # See command doc for what minimal does.
2620 # See command doc for what minimal does.
2614 if opts.get('minimal'):
2621 if opts.get('minimal'):
2615 manifestfiles = set(ctx.manifest().keys())
2622 manifestfiles = set(ctx.manifest().keys())
2616 dirstatefiles = set(dirstate)
2623 dirstatefiles = set(dirstate)
2617 manifestonly = manifestfiles - dirstatefiles
2624 manifestonly = manifestfiles - dirstatefiles
2618 dsonly = dirstatefiles - manifestfiles
2625 dsonly = dirstatefiles - manifestfiles
2619 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2626 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2620 changedfiles = manifestonly | dsnotadded
2627 changedfiles = manifestonly | dsnotadded
2621
2628
2622 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2629 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2623
2630
2624
2631
2625 @command(b'debugrebuildfncache', [], b'')
2632 @command(b'debugrebuildfncache', [], b'')
2626 def debugrebuildfncache(ui, repo):
2633 def debugrebuildfncache(ui, repo):
2627 """rebuild the fncache file"""
2634 """rebuild the fncache file"""
2628 repair.rebuildfncache(ui, repo)
2635 repair.rebuildfncache(ui, repo)
2629
2636
2630
2637
2631 @command(
2638 @command(
2632 b'debugrename',
2639 b'debugrename',
2633 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2640 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2634 _(b'[-r REV] [FILE]...'),
2641 _(b'[-r REV] [FILE]...'),
2635 )
2642 )
2636 def debugrename(ui, repo, *pats, **opts):
2643 def debugrename(ui, repo, *pats, **opts):
2637 """dump rename information"""
2644 """dump rename information"""
2638
2645
2639 opts = pycompat.byteskwargs(opts)
2646 opts = pycompat.byteskwargs(opts)
2640 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2647 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2641 m = scmutil.match(ctx, pats, opts)
2648 m = scmutil.match(ctx, pats, opts)
2642 for abs in ctx.walk(m):
2649 for abs in ctx.walk(m):
2643 fctx = ctx[abs]
2650 fctx = ctx[abs]
2644 o = fctx.filelog().renamed(fctx.filenode())
2651 o = fctx.filelog().renamed(fctx.filenode())
2645 rel = repo.pathto(abs)
2652 rel = repo.pathto(abs)
2646 if o:
2653 if o:
2647 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2654 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2648 else:
2655 else:
2649 ui.write(_(b"%s not renamed\n") % rel)
2656 ui.write(_(b"%s not renamed\n") % rel)
2650
2657
2651
2658
2652 @command(
2659 @command(
2653 b'debugrevlog',
2660 b'debugrevlog',
2654 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2661 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2655 _(b'-c|-m|FILE'),
2662 _(b'-c|-m|FILE'),
2656 optionalrepo=True,
2663 optionalrepo=True,
2657 )
2664 )
2658 def debugrevlog(ui, repo, file_=None, **opts):
2665 def debugrevlog(ui, repo, file_=None, **opts):
2659 """show data and statistics about a revlog"""
2666 """show data and statistics about a revlog"""
2660 opts = pycompat.byteskwargs(opts)
2667 opts = pycompat.byteskwargs(opts)
2661 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2668 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2662
2669
2663 if opts.get(b"dump"):
2670 if opts.get(b"dump"):
2664 numrevs = len(r)
2671 numrevs = len(r)
2665 ui.write(
2672 ui.write(
2666 (
2673 (
2667 b"# rev p1rev p2rev start end deltastart base p1 p2"
2674 b"# rev p1rev p2rev start end deltastart base p1 p2"
2668 b" rawsize totalsize compression heads chainlen\n"
2675 b" rawsize totalsize compression heads chainlen\n"
2669 )
2676 )
2670 )
2677 )
2671 ts = 0
2678 ts = 0
2672 heads = set()
2679 heads = set()
2673
2680
2674 for rev in pycompat.xrange(numrevs):
2681 for rev in pycompat.xrange(numrevs):
2675 dbase = r.deltaparent(rev)
2682 dbase = r.deltaparent(rev)
2676 if dbase == -1:
2683 if dbase == -1:
2677 dbase = rev
2684 dbase = rev
2678 cbase = r.chainbase(rev)
2685 cbase = r.chainbase(rev)
2679 clen = r.chainlen(rev)
2686 clen = r.chainlen(rev)
2680 p1, p2 = r.parentrevs(rev)
2687 p1, p2 = r.parentrevs(rev)
2681 rs = r.rawsize(rev)
2688 rs = r.rawsize(rev)
2682 ts = ts + rs
2689 ts = ts + rs
2683 heads -= set(r.parentrevs(rev))
2690 heads -= set(r.parentrevs(rev))
2684 heads.add(rev)
2691 heads.add(rev)
2685 try:
2692 try:
2686 compression = ts / r.end(rev)
2693 compression = ts / r.end(rev)
2687 except ZeroDivisionError:
2694 except ZeroDivisionError:
2688 compression = 0
2695 compression = 0
2689 ui.write(
2696 ui.write(
2690 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2697 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2691 b"%11d %5d %8d\n"
2698 b"%11d %5d %8d\n"
2692 % (
2699 % (
2693 rev,
2700 rev,
2694 p1,
2701 p1,
2695 p2,
2702 p2,
2696 r.start(rev),
2703 r.start(rev),
2697 r.end(rev),
2704 r.end(rev),
2698 r.start(dbase),
2705 r.start(dbase),
2699 r.start(cbase),
2706 r.start(cbase),
2700 r.start(p1),
2707 r.start(p1),
2701 r.start(p2),
2708 r.start(p2),
2702 rs,
2709 rs,
2703 ts,
2710 ts,
2704 compression,
2711 compression,
2705 len(heads),
2712 len(heads),
2706 clen,
2713 clen,
2707 )
2714 )
2708 )
2715 )
2709 return 0
2716 return 0
2710
2717
2711 v = r.version
2718 v = r.version
2712 format = v & 0xFFFF
2719 format = v & 0xFFFF
2713 flags = []
2720 flags = []
2714 gdelta = False
2721 gdelta = False
2715 if v & revlog.FLAG_INLINE_DATA:
2722 if v & revlog.FLAG_INLINE_DATA:
2716 flags.append(b'inline')
2723 flags.append(b'inline')
2717 if v & revlog.FLAG_GENERALDELTA:
2724 if v & revlog.FLAG_GENERALDELTA:
2718 gdelta = True
2725 gdelta = True
2719 flags.append(b'generaldelta')
2726 flags.append(b'generaldelta')
2720 if not flags:
2727 if not flags:
2721 flags = [b'(none)']
2728 flags = [b'(none)']
2722
2729
2723 ### tracks merge vs single parent
2730 ### tracks merge vs single parent
2724 nummerges = 0
2731 nummerges = 0
2725
2732
2726 ### tracks ways the "delta" are build
2733 ### tracks ways the "delta" are build
2727 # nodelta
2734 # nodelta
2728 numempty = 0
2735 numempty = 0
2729 numemptytext = 0
2736 numemptytext = 0
2730 numemptydelta = 0
2737 numemptydelta = 0
2731 # full file content
2738 # full file content
2732 numfull = 0
2739 numfull = 0
2733 # intermediate snapshot against a prior snapshot
2740 # intermediate snapshot against a prior snapshot
2734 numsemi = 0
2741 numsemi = 0
2735 # snapshot count per depth
2742 # snapshot count per depth
2736 numsnapdepth = collections.defaultdict(lambda: 0)
2743 numsnapdepth = collections.defaultdict(lambda: 0)
2737 # delta against previous revision
2744 # delta against previous revision
2738 numprev = 0
2745 numprev = 0
2739 # delta against first or second parent (not prev)
2746 # delta against first or second parent (not prev)
2740 nump1 = 0
2747 nump1 = 0
2741 nump2 = 0
2748 nump2 = 0
2742 # delta against neither prev nor parents
2749 # delta against neither prev nor parents
2743 numother = 0
2750 numother = 0
2744 # delta against prev that are also first or second parent
2751 # delta against prev that are also first or second parent
2745 # (details of `numprev`)
2752 # (details of `numprev`)
2746 nump1prev = 0
2753 nump1prev = 0
2747 nump2prev = 0
2754 nump2prev = 0
2748
2755
2749 # data about delta chain of each revs
2756 # data about delta chain of each revs
2750 chainlengths = []
2757 chainlengths = []
2751 chainbases = []
2758 chainbases = []
2752 chainspans = []
2759 chainspans = []
2753
2760
2754 # data about each revision
2761 # data about each revision
2755 datasize = [None, 0, 0]
2762 datasize = [None, 0, 0]
2756 fullsize = [None, 0, 0]
2763 fullsize = [None, 0, 0]
2757 semisize = [None, 0, 0]
2764 semisize = [None, 0, 0]
2758 # snapshot count per depth
2765 # snapshot count per depth
2759 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2766 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2760 deltasize = [None, 0, 0]
2767 deltasize = [None, 0, 0]
2761 chunktypecounts = {}
2768 chunktypecounts = {}
2762 chunktypesizes = {}
2769 chunktypesizes = {}
2763
2770
2764 def addsize(size, l):
2771 def addsize(size, l):
2765 if l[0] is None or size < l[0]:
2772 if l[0] is None or size < l[0]:
2766 l[0] = size
2773 l[0] = size
2767 if size > l[1]:
2774 if size > l[1]:
2768 l[1] = size
2775 l[1] = size
2769 l[2] += size
2776 l[2] += size
2770
2777
2771 numrevs = len(r)
2778 numrevs = len(r)
2772 for rev in pycompat.xrange(numrevs):
2779 for rev in pycompat.xrange(numrevs):
2773 p1, p2 = r.parentrevs(rev)
2780 p1, p2 = r.parentrevs(rev)
2774 delta = r.deltaparent(rev)
2781 delta = r.deltaparent(rev)
2775 if format > 0:
2782 if format > 0:
2776 addsize(r.rawsize(rev), datasize)
2783 addsize(r.rawsize(rev), datasize)
2777 if p2 != nullrev:
2784 if p2 != nullrev:
2778 nummerges += 1
2785 nummerges += 1
2779 size = r.length(rev)
2786 size = r.length(rev)
2780 if delta == nullrev:
2787 if delta == nullrev:
2781 chainlengths.append(0)
2788 chainlengths.append(0)
2782 chainbases.append(r.start(rev))
2789 chainbases.append(r.start(rev))
2783 chainspans.append(size)
2790 chainspans.append(size)
2784 if size == 0:
2791 if size == 0:
2785 numempty += 1
2792 numempty += 1
2786 numemptytext += 1
2793 numemptytext += 1
2787 else:
2794 else:
2788 numfull += 1
2795 numfull += 1
2789 numsnapdepth[0] += 1
2796 numsnapdepth[0] += 1
2790 addsize(size, fullsize)
2797 addsize(size, fullsize)
2791 addsize(size, snapsizedepth[0])
2798 addsize(size, snapsizedepth[0])
2792 else:
2799 else:
2793 chainlengths.append(chainlengths[delta] + 1)
2800 chainlengths.append(chainlengths[delta] + 1)
2794 baseaddr = chainbases[delta]
2801 baseaddr = chainbases[delta]
2795 revaddr = r.start(rev)
2802 revaddr = r.start(rev)
2796 chainbases.append(baseaddr)
2803 chainbases.append(baseaddr)
2797 chainspans.append((revaddr - baseaddr) + size)
2804 chainspans.append((revaddr - baseaddr) + size)
2798 if size == 0:
2805 if size == 0:
2799 numempty += 1
2806 numempty += 1
2800 numemptydelta += 1
2807 numemptydelta += 1
2801 elif r.issnapshot(rev):
2808 elif r.issnapshot(rev):
2802 addsize(size, semisize)
2809 addsize(size, semisize)
2803 numsemi += 1
2810 numsemi += 1
2804 depth = r.snapshotdepth(rev)
2811 depth = r.snapshotdepth(rev)
2805 numsnapdepth[depth] += 1
2812 numsnapdepth[depth] += 1
2806 addsize(size, snapsizedepth[depth])
2813 addsize(size, snapsizedepth[depth])
2807 else:
2814 else:
2808 addsize(size, deltasize)
2815 addsize(size, deltasize)
2809 if delta == rev - 1:
2816 if delta == rev - 1:
2810 numprev += 1
2817 numprev += 1
2811 if delta == p1:
2818 if delta == p1:
2812 nump1prev += 1
2819 nump1prev += 1
2813 elif delta == p2:
2820 elif delta == p2:
2814 nump2prev += 1
2821 nump2prev += 1
2815 elif delta == p1:
2822 elif delta == p1:
2816 nump1 += 1
2823 nump1 += 1
2817 elif delta == p2:
2824 elif delta == p2:
2818 nump2 += 1
2825 nump2 += 1
2819 elif delta != nullrev:
2826 elif delta != nullrev:
2820 numother += 1
2827 numother += 1
2821
2828
2822 # Obtain data on the raw chunks in the revlog.
2829 # Obtain data on the raw chunks in the revlog.
2823 if util.safehasattr(r, b'_getsegmentforrevs'):
2830 if util.safehasattr(r, b'_getsegmentforrevs'):
2824 segment = r._getsegmentforrevs(rev, rev)[1]
2831 segment = r._getsegmentforrevs(rev, rev)[1]
2825 else:
2832 else:
2826 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2833 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2827 if segment:
2834 if segment:
2828 chunktype = bytes(segment[0:1])
2835 chunktype = bytes(segment[0:1])
2829 else:
2836 else:
2830 chunktype = b'empty'
2837 chunktype = b'empty'
2831
2838
2832 if chunktype not in chunktypecounts:
2839 if chunktype not in chunktypecounts:
2833 chunktypecounts[chunktype] = 0
2840 chunktypecounts[chunktype] = 0
2834 chunktypesizes[chunktype] = 0
2841 chunktypesizes[chunktype] = 0
2835
2842
2836 chunktypecounts[chunktype] += 1
2843 chunktypecounts[chunktype] += 1
2837 chunktypesizes[chunktype] += size
2844 chunktypesizes[chunktype] += size
2838
2845
2839 # Adjust size min value for empty cases
2846 # Adjust size min value for empty cases
2840 for size in (datasize, fullsize, semisize, deltasize):
2847 for size in (datasize, fullsize, semisize, deltasize):
2841 if size[0] is None:
2848 if size[0] is None:
2842 size[0] = 0
2849 size[0] = 0
2843
2850
2844 numdeltas = numrevs - numfull - numempty - numsemi
2851 numdeltas = numrevs - numfull - numempty - numsemi
2845 numoprev = numprev - nump1prev - nump2prev
2852 numoprev = numprev - nump1prev - nump2prev
2846 totalrawsize = datasize[2]
2853 totalrawsize = datasize[2]
2847 datasize[2] /= numrevs
2854 datasize[2] /= numrevs
2848 fulltotal = fullsize[2]
2855 fulltotal = fullsize[2]
2849 if numfull == 0:
2856 if numfull == 0:
2850 fullsize[2] = 0
2857 fullsize[2] = 0
2851 else:
2858 else:
2852 fullsize[2] /= numfull
2859 fullsize[2] /= numfull
2853 semitotal = semisize[2]
2860 semitotal = semisize[2]
2854 snaptotal = {}
2861 snaptotal = {}
2855 if numsemi > 0:
2862 if numsemi > 0:
2856 semisize[2] /= numsemi
2863 semisize[2] /= numsemi
2857 for depth in snapsizedepth:
2864 for depth in snapsizedepth:
2858 snaptotal[depth] = snapsizedepth[depth][2]
2865 snaptotal[depth] = snapsizedepth[depth][2]
2859 snapsizedepth[depth][2] /= numsnapdepth[depth]
2866 snapsizedepth[depth][2] /= numsnapdepth[depth]
2860
2867
2861 deltatotal = deltasize[2]
2868 deltatotal = deltasize[2]
2862 if numdeltas > 0:
2869 if numdeltas > 0:
2863 deltasize[2] /= numdeltas
2870 deltasize[2] /= numdeltas
2864 totalsize = fulltotal + semitotal + deltatotal
2871 totalsize = fulltotal + semitotal + deltatotal
2865 avgchainlen = sum(chainlengths) / numrevs
2872 avgchainlen = sum(chainlengths) / numrevs
2866 maxchainlen = max(chainlengths)
2873 maxchainlen = max(chainlengths)
2867 maxchainspan = max(chainspans)
2874 maxchainspan = max(chainspans)
2868 compratio = 1
2875 compratio = 1
2869 if totalsize:
2876 if totalsize:
2870 compratio = totalrawsize / totalsize
2877 compratio = totalrawsize / totalsize
2871
2878
2872 basedfmtstr = b'%%%dd\n'
2879 basedfmtstr = b'%%%dd\n'
2873 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2880 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2874
2881
2875 def dfmtstr(max):
2882 def dfmtstr(max):
2876 return basedfmtstr % len(str(max))
2883 return basedfmtstr % len(str(max))
2877
2884
2878 def pcfmtstr(max, padding=0):
2885 def pcfmtstr(max, padding=0):
2879 return basepcfmtstr % (len(str(max)), b' ' * padding)
2886 return basepcfmtstr % (len(str(max)), b' ' * padding)
2880
2887
2881 def pcfmt(value, total):
2888 def pcfmt(value, total):
2882 if total:
2889 if total:
2883 return (value, 100 * float(value) / total)
2890 return (value, 100 * float(value) / total)
2884 else:
2891 else:
2885 return value, 100.0
2892 return value, 100.0
2886
2893
2887 ui.writenoi18n(b'format : %d\n' % format)
2894 ui.writenoi18n(b'format : %d\n' % format)
2888 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2895 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2889
2896
2890 ui.write(b'\n')
2897 ui.write(b'\n')
2891 fmt = pcfmtstr(totalsize)
2898 fmt = pcfmtstr(totalsize)
2892 fmt2 = dfmtstr(totalsize)
2899 fmt2 = dfmtstr(totalsize)
2893 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2900 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2894 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2901 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2895 ui.writenoi18n(
2902 ui.writenoi18n(
2896 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2903 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2897 )
2904 )
2898 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2905 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2899 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2906 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2900 ui.writenoi18n(
2907 ui.writenoi18n(
2901 b' text : '
2908 b' text : '
2902 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2909 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2903 )
2910 )
2904 ui.writenoi18n(
2911 ui.writenoi18n(
2905 b' delta : '
2912 b' delta : '
2906 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2913 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2907 )
2914 )
2908 ui.writenoi18n(
2915 ui.writenoi18n(
2909 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2916 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2910 )
2917 )
2911 for depth in sorted(numsnapdepth):
2918 for depth in sorted(numsnapdepth):
2912 ui.write(
2919 ui.write(
2913 (b' lvl-%-3d : ' % depth)
2920 (b' lvl-%-3d : ' % depth)
2914 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2921 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2915 )
2922 )
2916 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2923 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2917 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2924 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2918 ui.writenoi18n(
2925 ui.writenoi18n(
2919 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2926 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2920 )
2927 )
2921 for depth in sorted(numsnapdepth):
2928 for depth in sorted(numsnapdepth):
2922 ui.write(
2929 ui.write(
2923 (b' lvl-%-3d : ' % depth)
2930 (b' lvl-%-3d : ' % depth)
2924 + fmt % pcfmt(snaptotal[depth], totalsize)
2931 + fmt % pcfmt(snaptotal[depth], totalsize)
2925 )
2932 )
2926 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2933 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2927
2934
2928 def fmtchunktype(chunktype):
2935 def fmtchunktype(chunktype):
2929 if chunktype == b'empty':
2936 if chunktype == b'empty':
2930 return b' %s : ' % chunktype
2937 return b' %s : ' % chunktype
2931 elif chunktype in pycompat.bytestr(string.ascii_letters):
2938 elif chunktype in pycompat.bytestr(string.ascii_letters):
2932 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2939 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2933 else:
2940 else:
2934 return b' 0x%s : ' % hex(chunktype)
2941 return b' 0x%s : ' % hex(chunktype)
2935
2942
2936 ui.write(b'\n')
2943 ui.write(b'\n')
2937 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2944 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2938 for chunktype in sorted(chunktypecounts):
2945 for chunktype in sorted(chunktypecounts):
2939 ui.write(fmtchunktype(chunktype))
2946 ui.write(fmtchunktype(chunktype))
2940 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2947 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2941 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2948 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2942 for chunktype in sorted(chunktypecounts):
2949 for chunktype in sorted(chunktypecounts):
2943 ui.write(fmtchunktype(chunktype))
2950 ui.write(fmtchunktype(chunktype))
2944 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2951 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2945
2952
2946 ui.write(b'\n')
2953 ui.write(b'\n')
2947 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2954 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2948 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2955 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2949 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2956 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2950 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2957 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2951 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2958 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2952
2959
2953 if format > 0:
2960 if format > 0:
2954 ui.write(b'\n')
2961 ui.write(b'\n')
2955 ui.writenoi18n(
2962 ui.writenoi18n(
2956 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2963 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2957 % tuple(datasize)
2964 % tuple(datasize)
2958 )
2965 )
2959 ui.writenoi18n(
2966 ui.writenoi18n(
2960 b'full revision size (min/max/avg) : %d / %d / %d\n'
2967 b'full revision size (min/max/avg) : %d / %d / %d\n'
2961 % tuple(fullsize)
2968 % tuple(fullsize)
2962 )
2969 )
2963 ui.writenoi18n(
2970 ui.writenoi18n(
2964 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2971 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2965 % tuple(semisize)
2972 % tuple(semisize)
2966 )
2973 )
2967 for depth in sorted(snapsizedepth):
2974 for depth in sorted(snapsizedepth):
2968 if depth == 0:
2975 if depth == 0:
2969 continue
2976 continue
2970 ui.writenoi18n(
2977 ui.writenoi18n(
2971 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2978 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2972 % ((depth,) + tuple(snapsizedepth[depth]))
2979 % ((depth,) + tuple(snapsizedepth[depth]))
2973 )
2980 )
2974 ui.writenoi18n(
2981 ui.writenoi18n(
2975 b'delta size (min/max/avg) : %d / %d / %d\n'
2982 b'delta size (min/max/avg) : %d / %d / %d\n'
2976 % tuple(deltasize)
2983 % tuple(deltasize)
2977 )
2984 )
2978
2985
2979 if numdeltas > 0:
2986 if numdeltas > 0:
2980 ui.write(b'\n')
2987 ui.write(b'\n')
2981 fmt = pcfmtstr(numdeltas)
2988 fmt = pcfmtstr(numdeltas)
2982 fmt2 = pcfmtstr(numdeltas, 4)
2989 fmt2 = pcfmtstr(numdeltas, 4)
2983 ui.writenoi18n(
2990 ui.writenoi18n(
2984 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2991 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2985 )
2992 )
2986 if numprev > 0:
2993 if numprev > 0:
2987 ui.writenoi18n(
2994 ui.writenoi18n(
2988 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2995 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2989 )
2996 )
2990 ui.writenoi18n(
2997 ui.writenoi18n(
2991 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2998 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2992 )
2999 )
2993 ui.writenoi18n(
3000 ui.writenoi18n(
2994 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3001 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2995 )
3002 )
2996 if gdelta:
3003 if gdelta:
2997 ui.writenoi18n(
3004 ui.writenoi18n(
2998 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3005 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2999 )
3006 )
3000 ui.writenoi18n(
3007 ui.writenoi18n(
3001 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3008 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3002 )
3009 )
3003 ui.writenoi18n(
3010 ui.writenoi18n(
3004 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3011 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3005 )
3012 )
3006
3013
3007
3014
3008 @command(
3015 @command(
3009 b'debugrevlogindex',
3016 b'debugrevlogindex',
3010 cmdutil.debugrevlogopts
3017 cmdutil.debugrevlogopts
3011 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3018 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3012 _(b'[-f FORMAT] -c|-m|FILE'),
3019 _(b'[-f FORMAT] -c|-m|FILE'),
3013 optionalrepo=True,
3020 optionalrepo=True,
3014 )
3021 )
3015 def debugrevlogindex(ui, repo, file_=None, **opts):
3022 def debugrevlogindex(ui, repo, file_=None, **opts):
3016 """dump the contents of a revlog index"""
3023 """dump the contents of a revlog index"""
3017 opts = pycompat.byteskwargs(opts)
3024 opts = pycompat.byteskwargs(opts)
3018 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3025 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3019 format = opts.get(b'format', 0)
3026 format = opts.get(b'format', 0)
3020 if format not in (0, 1):
3027 if format not in (0, 1):
3021 raise error.Abort(_(b"unknown format %d") % format)
3028 raise error.Abort(_(b"unknown format %d") % format)
3022
3029
3023 if ui.debugflag:
3030 if ui.debugflag:
3024 shortfn = hex
3031 shortfn = hex
3025 else:
3032 else:
3026 shortfn = short
3033 shortfn = short
3027
3034
3028 # There might not be anything in r, so have a sane default
3035 # There might not be anything in r, so have a sane default
3029 idlen = 12
3036 idlen = 12
3030 for i in r:
3037 for i in r:
3031 idlen = len(shortfn(r.node(i)))
3038 idlen = len(shortfn(r.node(i)))
3032 break
3039 break
3033
3040
3034 if format == 0:
3041 if format == 0:
3035 if ui.verbose:
3042 if ui.verbose:
3036 ui.writenoi18n(
3043 ui.writenoi18n(
3037 b" rev offset length linkrev %s %s p2\n"
3044 b" rev offset length linkrev %s %s p2\n"
3038 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3045 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3039 )
3046 )
3040 else:
3047 else:
3041 ui.writenoi18n(
3048 ui.writenoi18n(
3042 b" rev linkrev %s %s p2\n"
3049 b" rev linkrev %s %s p2\n"
3043 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3050 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3044 )
3051 )
3045 elif format == 1:
3052 elif format == 1:
3046 if ui.verbose:
3053 if ui.verbose:
3047 ui.writenoi18n(
3054 ui.writenoi18n(
3048 (
3055 (
3049 b" rev flag offset length size link p1"
3056 b" rev flag offset length size link p1"
3050 b" p2 %s\n"
3057 b" p2 %s\n"
3051 )
3058 )
3052 % b"nodeid".rjust(idlen)
3059 % b"nodeid".rjust(idlen)
3053 )
3060 )
3054 else:
3061 else:
3055 ui.writenoi18n(
3062 ui.writenoi18n(
3056 b" rev flag size link p1 p2 %s\n"
3063 b" rev flag size link p1 p2 %s\n"
3057 % b"nodeid".rjust(idlen)
3064 % b"nodeid".rjust(idlen)
3058 )
3065 )
3059
3066
3060 for i in r:
3067 for i in r:
3061 node = r.node(i)
3068 node = r.node(i)
3062 if format == 0:
3069 if format == 0:
3063 try:
3070 try:
3064 pp = r.parents(node)
3071 pp = r.parents(node)
3065 except Exception:
3072 except Exception:
3066 pp = [nullid, nullid]
3073 pp = [nullid, nullid]
3067 if ui.verbose:
3074 if ui.verbose:
3068 ui.write(
3075 ui.write(
3069 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3076 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3070 % (
3077 % (
3071 i,
3078 i,
3072 r.start(i),
3079 r.start(i),
3073 r.length(i),
3080 r.length(i),
3074 r.linkrev(i),
3081 r.linkrev(i),
3075 shortfn(node),
3082 shortfn(node),
3076 shortfn(pp[0]),
3083 shortfn(pp[0]),
3077 shortfn(pp[1]),
3084 shortfn(pp[1]),
3078 )
3085 )
3079 )
3086 )
3080 else:
3087 else:
3081 ui.write(
3088 ui.write(
3082 b"% 6d % 7d %s %s %s\n"
3089 b"% 6d % 7d %s %s %s\n"
3083 % (
3090 % (
3084 i,
3091 i,
3085 r.linkrev(i),
3092 r.linkrev(i),
3086 shortfn(node),
3093 shortfn(node),
3087 shortfn(pp[0]),
3094 shortfn(pp[0]),
3088 shortfn(pp[1]),
3095 shortfn(pp[1]),
3089 )
3096 )
3090 )
3097 )
3091 elif format == 1:
3098 elif format == 1:
3092 pr = r.parentrevs(i)
3099 pr = r.parentrevs(i)
3093 if ui.verbose:
3100 if ui.verbose:
3094 ui.write(
3101 ui.write(
3095 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3102 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3096 % (
3103 % (
3097 i,
3104 i,
3098 r.flags(i),
3105 r.flags(i),
3099 r.start(i),
3106 r.start(i),
3100 r.length(i),
3107 r.length(i),
3101 r.rawsize(i),
3108 r.rawsize(i),
3102 r.linkrev(i),
3109 r.linkrev(i),
3103 pr[0],
3110 pr[0],
3104 pr[1],
3111 pr[1],
3105 shortfn(node),
3112 shortfn(node),
3106 )
3113 )
3107 )
3114 )
3108 else:
3115 else:
3109 ui.write(
3116 ui.write(
3110 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3117 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3111 % (
3118 % (
3112 i,
3119 i,
3113 r.flags(i),
3120 r.flags(i),
3114 r.rawsize(i),
3121 r.rawsize(i),
3115 r.linkrev(i),
3122 r.linkrev(i),
3116 pr[0],
3123 pr[0],
3117 pr[1],
3124 pr[1],
3118 shortfn(node),
3125 shortfn(node),
3119 )
3126 )
3120 )
3127 )
3121
3128
3122
3129
3123 @command(
3130 @command(
3124 b'debugrevspec',
3131 b'debugrevspec',
3125 [
3132 [
3126 (
3133 (
3127 b'',
3134 b'',
3128 b'optimize',
3135 b'optimize',
3129 None,
3136 None,
3130 _(b'print parsed tree after optimizing (DEPRECATED)'),
3137 _(b'print parsed tree after optimizing (DEPRECATED)'),
3131 ),
3138 ),
3132 (
3139 (
3133 b'',
3140 b'',
3134 b'show-revs',
3141 b'show-revs',
3135 True,
3142 True,
3136 _(b'print list of result revisions (default)'),
3143 _(b'print list of result revisions (default)'),
3137 ),
3144 ),
3138 (
3145 (
3139 b's',
3146 b's',
3140 b'show-set',
3147 b'show-set',
3141 None,
3148 None,
3142 _(b'print internal representation of result set'),
3149 _(b'print internal representation of result set'),
3143 ),
3150 ),
3144 (
3151 (
3145 b'p',
3152 b'p',
3146 b'show-stage',
3153 b'show-stage',
3147 [],
3154 [],
3148 _(b'print parsed tree at the given stage'),
3155 _(b'print parsed tree at the given stage'),
3149 _(b'NAME'),
3156 _(b'NAME'),
3150 ),
3157 ),
3151 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3158 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3152 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3159 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3153 ],
3160 ],
3154 b'REVSPEC',
3161 b'REVSPEC',
3155 )
3162 )
3156 def debugrevspec(ui, repo, expr, **opts):
3163 def debugrevspec(ui, repo, expr, **opts):
3157 """parse and apply a revision specification
3164 """parse and apply a revision specification
3158
3165
3159 Use -p/--show-stage option to print the parsed tree at the given stages.
3166 Use -p/--show-stage option to print the parsed tree at the given stages.
3160 Use -p all to print tree at every stage.
3167 Use -p all to print tree at every stage.
3161
3168
3162 Use --no-show-revs option with -s or -p to print only the set
3169 Use --no-show-revs option with -s or -p to print only the set
3163 representation or the parsed tree respectively.
3170 representation or the parsed tree respectively.
3164
3171
3165 Use --verify-optimized to compare the optimized result with the unoptimized
3172 Use --verify-optimized to compare the optimized result with the unoptimized
3166 one. Returns 1 if the optimized result differs.
3173 one. Returns 1 if the optimized result differs.
3167 """
3174 """
3168 opts = pycompat.byteskwargs(opts)
3175 opts = pycompat.byteskwargs(opts)
3169 aliases = ui.configitems(b'revsetalias')
3176 aliases = ui.configitems(b'revsetalias')
3170 stages = [
3177 stages = [
3171 (b'parsed', lambda tree: tree),
3178 (b'parsed', lambda tree: tree),
3172 (
3179 (
3173 b'expanded',
3180 b'expanded',
3174 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3181 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3175 ),
3182 ),
3176 (b'concatenated', revsetlang.foldconcat),
3183 (b'concatenated', revsetlang.foldconcat),
3177 (b'analyzed', revsetlang.analyze),
3184 (b'analyzed', revsetlang.analyze),
3178 (b'optimized', revsetlang.optimize),
3185 (b'optimized', revsetlang.optimize),
3179 ]
3186 ]
3180 if opts[b'no_optimized']:
3187 if opts[b'no_optimized']:
3181 stages = stages[:-1]
3188 stages = stages[:-1]
3182 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3189 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3183 raise error.Abort(
3190 raise error.Abort(
3184 _(b'cannot use --verify-optimized with --no-optimized')
3191 _(b'cannot use --verify-optimized with --no-optimized')
3185 )
3192 )
3186 stagenames = {n for n, f in stages}
3193 stagenames = {n for n, f in stages}
3187
3194
3188 showalways = set()
3195 showalways = set()
3189 showchanged = set()
3196 showchanged = set()
3190 if ui.verbose and not opts[b'show_stage']:
3197 if ui.verbose and not opts[b'show_stage']:
3191 # show parsed tree by --verbose (deprecated)
3198 # show parsed tree by --verbose (deprecated)
3192 showalways.add(b'parsed')
3199 showalways.add(b'parsed')
3193 showchanged.update([b'expanded', b'concatenated'])
3200 showchanged.update([b'expanded', b'concatenated'])
3194 if opts[b'optimize']:
3201 if opts[b'optimize']:
3195 showalways.add(b'optimized')
3202 showalways.add(b'optimized')
3196 if opts[b'show_stage'] and opts[b'optimize']:
3203 if opts[b'show_stage'] and opts[b'optimize']:
3197 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3204 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3198 if opts[b'show_stage'] == [b'all']:
3205 if opts[b'show_stage'] == [b'all']:
3199 showalways.update(stagenames)
3206 showalways.update(stagenames)
3200 else:
3207 else:
3201 for n in opts[b'show_stage']:
3208 for n in opts[b'show_stage']:
3202 if n not in stagenames:
3209 if n not in stagenames:
3203 raise error.Abort(_(b'invalid stage name: %s') % n)
3210 raise error.Abort(_(b'invalid stage name: %s') % n)
3204 showalways.update(opts[b'show_stage'])
3211 showalways.update(opts[b'show_stage'])
3205
3212
3206 treebystage = {}
3213 treebystage = {}
3207 printedtree = None
3214 printedtree = None
3208 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3215 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3209 for n, f in stages:
3216 for n, f in stages:
3210 treebystage[n] = tree = f(tree)
3217 treebystage[n] = tree = f(tree)
3211 if n in showalways or (n in showchanged and tree != printedtree):
3218 if n in showalways or (n in showchanged and tree != printedtree):
3212 if opts[b'show_stage'] or n != b'parsed':
3219 if opts[b'show_stage'] or n != b'parsed':
3213 ui.write(b"* %s:\n" % n)
3220 ui.write(b"* %s:\n" % n)
3214 ui.write(revsetlang.prettyformat(tree), b"\n")
3221 ui.write(revsetlang.prettyformat(tree), b"\n")
3215 printedtree = tree
3222 printedtree = tree
3216
3223
3217 if opts[b'verify_optimized']:
3224 if opts[b'verify_optimized']:
3218 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3225 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3219 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3226 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3220 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3227 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3221 ui.writenoi18n(
3228 ui.writenoi18n(
3222 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3229 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3223 )
3230 )
3224 ui.writenoi18n(
3231 ui.writenoi18n(
3225 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3232 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3226 )
3233 )
3227 arevs = list(arevs)
3234 arevs = list(arevs)
3228 brevs = list(brevs)
3235 brevs = list(brevs)
3229 if arevs == brevs:
3236 if arevs == brevs:
3230 return 0
3237 return 0
3231 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3238 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3232 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3239 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3233 sm = difflib.SequenceMatcher(None, arevs, brevs)
3240 sm = difflib.SequenceMatcher(None, arevs, brevs)
3234 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3241 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3235 if tag in ('delete', 'replace'):
3242 if tag in ('delete', 'replace'):
3236 for c in arevs[alo:ahi]:
3243 for c in arevs[alo:ahi]:
3237 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3244 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3238 if tag in ('insert', 'replace'):
3245 if tag in ('insert', 'replace'):
3239 for c in brevs[blo:bhi]:
3246 for c in brevs[blo:bhi]:
3240 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3247 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3241 if tag == 'equal':
3248 if tag == 'equal':
3242 for c in arevs[alo:ahi]:
3249 for c in arevs[alo:ahi]:
3243 ui.write(b' %d\n' % c)
3250 ui.write(b' %d\n' % c)
3244 return 1
3251 return 1
3245
3252
3246 func = revset.makematcher(tree)
3253 func = revset.makematcher(tree)
3247 revs = func(repo)
3254 revs = func(repo)
3248 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3255 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3249 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3256 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3250 if not opts[b'show_revs']:
3257 if not opts[b'show_revs']:
3251 return
3258 return
3252 for c in revs:
3259 for c in revs:
3253 ui.write(b"%d\n" % c)
3260 ui.write(b"%d\n" % c)
3254
3261
3255
3262
3256 @command(
3263 @command(
3257 b'debugserve',
3264 b'debugserve',
3258 [
3265 [
3259 (
3266 (
3260 b'',
3267 b'',
3261 b'sshstdio',
3268 b'sshstdio',
3262 False,
3269 False,
3263 _(b'run an SSH server bound to process handles'),
3270 _(b'run an SSH server bound to process handles'),
3264 ),
3271 ),
3265 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3272 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3266 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3273 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3267 ],
3274 ],
3268 b'',
3275 b'',
3269 )
3276 )
3270 def debugserve(ui, repo, **opts):
3277 def debugserve(ui, repo, **opts):
3271 """run a server with advanced settings
3278 """run a server with advanced settings
3272
3279
3273 This command is similar to :hg:`serve`. It exists partially as a
3280 This command is similar to :hg:`serve`. It exists partially as a
3274 workaround to the fact that ``hg serve --stdio`` must have specific
3281 workaround to the fact that ``hg serve --stdio`` must have specific
3275 arguments for security reasons.
3282 arguments for security reasons.
3276 """
3283 """
3277 opts = pycompat.byteskwargs(opts)
3284 opts = pycompat.byteskwargs(opts)
3278
3285
3279 if not opts[b'sshstdio']:
3286 if not opts[b'sshstdio']:
3280 raise error.Abort(_(b'only --sshstdio is currently supported'))
3287 raise error.Abort(_(b'only --sshstdio is currently supported'))
3281
3288
3282 logfh = None
3289 logfh = None
3283
3290
3284 if opts[b'logiofd'] and opts[b'logiofile']:
3291 if opts[b'logiofd'] and opts[b'logiofile']:
3285 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3292 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3286
3293
3287 if opts[b'logiofd']:
3294 if opts[b'logiofd']:
3288 # Ideally we would be line buffered. But line buffering in binary
3295 # Ideally we would be line buffered. But line buffering in binary
3289 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3296 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3290 # buffering could have performance impacts. But since this isn't
3297 # buffering could have performance impacts. But since this isn't
3291 # performance critical code, it should be fine.
3298 # performance critical code, it should be fine.
3292 try:
3299 try:
3293 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3300 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3294 except OSError as e:
3301 except OSError as e:
3295 if e.errno != errno.ESPIPE:
3302 if e.errno != errno.ESPIPE:
3296 raise
3303 raise
3297 # can't seek a pipe, so `ab` mode fails on py3
3304 # can't seek a pipe, so `ab` mode fails on py3
3298 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3305 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3299 elif opts[b'logiofile']:
3306 elif opts[b'logiofile']:
3300 logfh = open(opts[b'logiofile'], b'ab', 0)
3307 logfh = open(opts[b'logiofile'], b'ab', 0)
3301
3308
3302 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3309 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3303 s.serve_forever()
3310 s.serve_forever()
3304
3311
3305
3312
3306 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3313 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3307 def debugsetparents(ui, repo, rev1, rev2=None):
3314 def debugsetparents(ui, repo, rev1, rev2=None):
3308 """manually set the parents of the current working directory
3315 """manually set the parents of the current working directory
3309
3316
3310 This is useful for writing repository conversion tools, but should
3317 This is useful for writing repository conversion tools, but should
3311 be used with care. For example, neither the working directory nor the
3318 be used with care. For example, neither the working directory nor the
3312 dirstate is updated, so file status may be incorrect after running this
3319 dirstate is updated, so file status may be incorrect after running this
3313 command.
3320 command.
3314
3321
3315 Returns 0 on success.
3322 Returns 0 on success.
3316 """
3323 """
3317
3324
3318 node1 = scmutil.revsingle(repo, rev1).node()
3325 node1 = scmutil.revsingle(repo, rev1).node()
3319 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3326 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3320
3327
3321 with repo.wlock():
3328 with repo.wlock():
3322 repo.setparents(node1, node2)
3329 repo.setparents(node1, node2)
3323
3330
3324
3331
3325 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3332 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3326 def debugsidedata(ui, repo, file_, rev=None, **opts):
3333 def debugsidedata(ui, repo, file_, rev=None, **opts):
3327 """dump the side data for a cl/manifest/file revision
3334 """dump the side data for a cl/manifest/file revision
3328
3335
3329 Use --verbose to dump the sidedata content."""
3336 Use --verbose to dump the sidedata content."""
3330 opts = pycompat.byteskwargs(opts)
3337 opts = pycompat.byteskwargs(opts)
3331 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3338 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3332 if rev is not None:
3339 if rev is not None:
3333 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3340 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3334 file_, rev = None, file_
3341 file_, rev = None, file_
3335 elif rev is None:
3342 elif rev is None:
3336 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3343 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3337 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3344 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3338 r = getattr(r, '_revlog', r)
3345 r = getattr(r, '_revlog', r)
3339 try:
3346 try:
3340 sidedata = r.sidedata(r.lookup(rev))
3347 sidedata = r.sidedata(r.lookup(rev))
3341 except KeyError:
3348 except KeyError:
3342 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3349 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3343 if sidedata:
3350 if sidedata:
3344 sidedata = list(sidedata.items())
3351 sidedata = list(sidedata.items())
3345 sidedata.sort()
3352 sidedata.sort()
3346 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3353 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3347 for key, value in sidedata:
3354 for key, value in sidedata:
3348 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3355 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3349 if ui.verbose:
3356 if ui.verbose:
3350 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3357 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3351
3358
3352
3359
3353 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3360 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3354 def debugssl(ui, repo, source=None, **opts):
3361 def debugssl(ui, repo, source=None, **opts):
3355 '''test a secure connection to a server
3362 '''test a secure connection to a server
3356
3363
3357 This builds the certificate chain for the server on Windows, installing the
3364 This builds the certificate chain for the server on Windows, installing the
3358 missing intermediates and trusted root via Windows Update if necessary. It
3365 missing intermediates and trusted root via Windows Update if necessary. It
3359 does nothing on other platforms.
3366 does nothing on other platforms.
3360
3367
3361 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3368 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3362 that server is used. See :hg:`help urls` for more information.
3369 that server is used. See :hg:`help urls` for more information.
3363
3370
3364 If the update succeeds, retry the original operation. Otherwise, the cause
3371 If the update succeeds, retry the original operation. Otherwise, the cause
3365 of the SSL error is likely another issue.
3372 of the SSL error is likely another issue.
3366 '''
3373 '''
3367 if not pycompat.iswindows:
3374 if not pycompat.iswindows:
3368 raise error.Abort(
3375 raise error.Abort(
3369 _(b'certificate chain building is only possible on Windows')
3376 _(b'certificate chain building is only possible on Windows')
3370 )
3377 )
3371
3378
3372 if not source:
3379 if not source:
3373 if not repo:
3380 if not repo:
3374 raise error.Abort(
3381 raise error.Abort(
3375 _(
3382 _(
3376 b"there is no Mercurial repository here, and no "
3383 b"there is no Mercurial repository here, and no "
3377 b"server specified"
3384 b"server specified"
3378 )
3385 )
3379 )
3386 )
3380 source = b"default"
3387 source = b"default"
3381
3388
3382 source, branches = hg.parseurl(ui.expandpath(source))
3389 source, branches = hg.parseurl(ui.expandpath(source))
3383 url = util.url(source)
3390 url = util.url(source)
3384
3391
3385 defaultport = {b'https': 443, b'ssh': 22}
3392 defaultport = {b'https': 443, b'ssh': 22}
3386 if url.scheme in defaultport:
3393 if url.scheme in defaultport:
3387 try:
3394 try:
3388 addr = (url.host, int(url.port or defaultport[url.scheme]))
3395 addr = (url.host, int(url.port or defaultport[url.scheme]))
3389 except ValueError:
3396 except ValueError:
3390 raise error.Abort(_(b"malformed port number in URL"))
3397 raise error.Abort(_(b"malformed port number in URL"))
3391 else:
3398 else:
3392 raise error.Abort(_(b"only https and ssh connections are supported"))
3399 raise error.Abort(_(b"only https and ssh connections are supported"))
3393
3400
3394 from . import win32
3401 from . import win32
3395
3402
3396 s = ssl.wrap_socket(
3403 s = ssl.wrap_socket(
3397 socket.socket(),
3404 socket.socket(),
3398 ssl_version=ssl.PROTOCOL_TLS,
3405 ssl_version=ssl.PROTOCOL_TLS,
3399 cert_reqs=ssl.CERT_NONE,
3406 cert_reqs=ssl.CERT_NONE,
3400 ca_certs=None,
3407 ca_certs=None,
3401 )
3408 )
3402
3409
3403 try:
3410 try:
3404 s.connect(addr)
3411 s.connect(addr)
3405 cert = s.getpeercert(True)
3412 cert = s.getpeercert(True)
3406
3413
3407 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3414 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3408
3415
3409 complete = win32.checkcertificatechain(cert, build=False)
3416 complete = win32.checkcertificatechain(cert, build=False)
3410
3417
3411 if not complete:
3418 if not complete:
3412 ui.status(_(b'certificate chain is incomplete, updating... '))
3419 ui.status(_(b'certificate chain is incomplete, updating... '))
3413
3420
3414 if not win32.checkcertificatechain(cert):
3421 if not win32.checkcertificatechain(cert):
3415 ui.status(_(b'failed.\n'))
3422 ui.status(_(b'failed.\n'))
3416 else:
3423 else:
3417 ui.status(_(b'done.\n'))
3424 ui.status(_(b'done.\n'))
3418 else:
3425 else:
3419 ui.status(_(b'full certificate chain is available\n'))
3426 ui.status(_(b'full certificate chain is available\n'))
3420 finally:
3427 finally:
3421 s.close()
3428 s.close()
3422
3429
3423
3430
3424 @command(
3431 @command(
3425 b"debugbackupbundle",
3432 b"debugbackupbundle",
3426 [
3433 [
3427 (
3434 (
3428 b"",
3435 b"",
3429 b"recover",
3436 b"recover",
3430 b"",
3437 b"",
3431 b"brings the specified changeset back into the repository",
3438 b"brings the specified changeset back into the repository",
3432 )
3439 )
3433 ]
3440 ]
3434 + cmdutil.logopts,
3441 + cmdutil.logopts,
3435 _(b"hg debugbackupbundle [--recover HASH]"),
3442 _(b"hg debugbackupbundle [--recover HASH]"),
3436 )
3443 )
3437 def debugbackupbundle(ui, repo, *pats, **opts):
3444 def debugbackupbundle(ui, repo, *pats, **opts):
3438 """lists the changesets available in backup bundles
3445 """lists the changesets available in backup bundles
3439
3446
3440 Without any arguments, this command prints a list of the changesets in each
3447 Without any arguments, this command prints a list of the changesets in each
3441 backup bundle.
3448 backup bundle.
3442
3449
3443 --recover takes a changeset hash and unbundles the first bundle that
3450 --recover takes a changeset hash and unbundles the first bundle that
3444 contains that hash, which puts that changeset back in your repository.
3451 contains that hash, which puts that changeset back in your repository.
3445
3452
3446 --verbose will print the entire commit message and the bundle path for that
3453 --verbose will print the entire commit message and the bundle path for that
3447 backup.
3454 backup.
3448 """
3455 """
3449 backups = list(
3456 backups = list(
3450 filter(
3457 filter(
3451 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3458 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3452 )
3459 )
3453 )
3460 )
3454 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3461 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3455
3462
3456 opts = pycompat.byteskwargs(opts)
3463 opts = pycompat.byteskwargs(opts)
3457 opts[b"bundle"] = b""
3464 opts[b"bundle"] = b""
3458 opts[b"force"] = None
3465 opts[b"force"] = None
3459 limit = logcmdutil.getlimit(opts)
3466 limit = logcmdutil.getlimit(opts)
3460
3467
3461 def display(other, chlist, displayer):
3468 def display(other, chlist, displayer):
3462 if opts.get(b"newest_first"):
3469 if opts.get(b"newest_first"):
3463 chlist.reverse()
3470 chlist.reverse()
3464 count = 0
3471 count = 0
3465 for n in chlist:
3472 for n in chlist:
3466 if limit is not None and count >= limit:
3473 if limit is not None and count >= limit:
3467 break
3474 break
3468 parents = [True for p in other.changelog.parents(n) if p != nullid]
3475 parents = [True for p in other.changelog.parents(n) if p != nullid]
3469 if opts.get(b"no_merges") and len(parents) == 2:
3476 if opts.get(b"no_merges") and len(parents) == 2:
3470 continue
3477 continue
3471 count += 1
3478 count += 1
3472 displayer.show(other[n])
3479 displayer.show(other[n])
3473
3480
3474 recovernode = opts.get(b"recover")
3481 recovernode = opts.get(b"recover")
3475 if recovernode:
3482 if recovernode:
3476 if scmutil.isrevsymbol(repo, recovernode):
3483 if scmutil.isrevsymbol(repo, recovernode):
3477 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3484 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3478 return
3485 return
3479 elif backups:
3486 elif backups:
3480 msg = _(
3487 msg = _(
3481 b"Recover changesets using: hg debugbackupbundle --recover "
3488 b"Recover changesets using: hg debugbackupbundle --recover "
3482 b"<changeset hash>\n\nAvailable backup changesets:"
3489 b"<changeset hash>\n\nAvailable backup changesets:"
3483 )
3490 )
3484 ui.status(msg, label=b"status.removed")
3491 ui.status(msg, label=b"status.removed")
3485 else:
3492 else:
3486 ui.status(_(b"no backup changesets found\n"))
3493 ui.status(_(b"no backup changesets found\n"))
3487 return
3494 return
3488
3495
3489 for backup in backups:
3496 for backup in backups:
3490 # Much of this is copied from the hg incoming logic
3497 # Much of this is copied from the hg incoming logic
3491 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3498 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3492 source, branches = hg.parseurl(source, opts.get(b"branch"))
3499 source, branches = hg.parseurl(source, opts.get(b"branch"))
3493 try:
3500 try:
3494 other = hg.peer(repo, opts, source)
3501 other = hg.peer(repo, opts, source)
3495 except error.LookupError as ex:
3502 except error.LookupError as ex:
3496 msg = _(b"\nwarning: unable to open bundle %s") % source
3503 msg = _(b"\nwarning: unable to open bundle %s") % source
3497 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3504 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3498 ui.warn(msg, hint=hint)
3505 ui.warn(msg, hint=hint)
3499 continue
3506 continue
3500 revs, checkout = hg.addbranchrevs(
3507 revs, checkout = hg.addbranchrevs(
3501 repo, other, branches, opts.get(b"rev")
3508 repo, other, branches, opts.get(b"rev")
3502 )
3509 )
3503
3510
3504 if revs:
3511 if revs:
3505 revs = [other.lookup(rev) for rev in revs]
3512 revs = [other.lookup(rev) for rev in revs]
3506
3513
3507 quiet = ui.quiet
3514 quiet = ui.quiet
3508 try:
3515 try:
3509 ui.quiet = True
3516 ui.quiet = True
3510 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3517 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3511 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3518 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3512 )
3519 )
3513 except error.LookupError:
3520 except error.LookupError:
3514 continue
3521 continue
3515 finally:
3522 finally:
3516 ui.quiet = quiet
3523 ui.quiet = quiet
3517
3524
3518 try:
3525 try:
3519 if not chlist:
3526 if not chlist:
3520 continue
3527 continue
3521 if recovernode:
3528 if recovernode:
3522 with repo.lock(), repo.transaction(b"unbundle") as tr:
3529 with repo.lock(), repo.transaction(b"unbundle") as tr:
3523 if scmutil.isrevsymbol(other, recovernode):
3530 if scmutil.isrevsymbol(other, recovernode):
3524 ui.status(_(b"Unbundling %s\n") % (recovernode))
3531 ui.status(_(b"Unbundling %s\n") % (recovernode))
3525 f = hg.openpath(ui, source)
3532 f = hg.openpath(ui, source)
3526 gen = exchange.readbundle(ui, f, source)
3533 gen = exchange.readbundle(ui, f, source)
3527 if isinstance(gen, bundle2.unbundle20):
3534 if isinstance(gen, bundle2.unbundle20):
3528 bundle2.applybundle(
3535 bundle2.applybundle(
3529 repo,
3536 repo,
3530 gen,
3537 gen,
3531 tr,
3538 tr,
3532 source=b"unbundle",
3539 source=b"unbundle",
3533 url=b"bundle:" + source,
3540 url=b"bundle:" + source,
3534 )
3541 )
3535 else:
3542 else:
3536 gen.apply(repo, b"unbundle", b"bundle:" + source)
3543 gen.apply(repo, b"unbundle", b"bundle:" + source)
3537 break
3544 break
3538 else:
3545 else:
3539 backupdate = encoding.strtolocal(
3546 backupdate = encoding.strtolocal(
3540 time.strftime(
3547 time.strftime(
3541 "%a %H:%M, %Y-%m-%d",
3548 "%a %H:%M, %Y-%m-%d",
3542 time.localtime(os.path.getmtime(source)),
3549 time.localtime(os.path.getmtime(source)),
3543 )
3550 )
3544 )
3551 )
3545 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3552 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3546 if ui.verbose:
3553 if ui.verbose:
3547 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3554 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3548 else:
3555 else:
3549 opts[
3556 opts[
3550 b"template"
3557 b"template"
3551 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3558 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3552 displayer = logcmdutil.changesetdisplayer(
3559 displayer = logcmdutil.changesetdisplayer(
3553 ui, other, opts, False
3560 ui, other, opts, False
3554 )
3561 )
3555 display(other, chlist, displayer)
3562 display(other, chlist, displayer)
3556 displayer.close()
3563 displayer.close()
3557 finally:
3564 finally:
3558 cleanupfn()
3565 cleanupfn()
3559
3566
3560
3567
3561 @command(
3568 @command(
3562 b'debugsub',
3569 b'debugsub',
3563 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3570 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3564 _(b'[-r REV] [REV]'),
3571 _(b'[-r REV] [REV]'),
3565 )
3572 )
3566 def debugsub(ui, repo, rev=None):
3573 def debugsub(ui, repo, rev=None):
3567 ctx = scmutil.revsingle(repo, rev, None)
3574 ctx = scmutil.revsingle(repo, rev, None)
3568 for k, v in sorted(ctx.substate.items()):
3575 for k, v in sorted(ctx.substate.items()):
3569 ui.writenoi18n(b'path %s\n' % k)
3576 ui.writenoi18n(b'path %s\n' % k)
3570 ui.writenoi18n(b' source %s\n' % v[0])
3577 ui.writenoi18n(b' source %s\n' % v[0])
3571 ui.writenoi18n(b' revision %s\n' % v[1])
3578 ui.writenoi18n(b' revision %s\n' % v[1])
3572
3579
3573
3580
3574 @command(
3581 @command(
3575 b'debugsuccessorssets',
3582 b'debugsuccessorssets',
3576 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3583 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3577 _(b'[REV]'),
3584 _(b'[REV]'),
3578 )
3585 )
3579 def debugsuccessorssets(ui, repo, *revs, **opts):
3586 def debugsuccessorssets(ui, repo, *revs, **opts):
3580 """show set of successors for revision
3587 """show set of successors for revision
3581
3588
3582 A successors set of changeset A is a consistent group of revisions that
3589 A successors set of changeset A is a consistent group of revisions that
3583 succeed A. It contains non-obsolete changesets only unless closests
3590 succeed A. It contains non-obsolete changesets only unless closests
3584 successors set is set.
3591 successors set is set.
3585
3592
3586 In most cases a changeset A has a single successors set containing a single
3593 In most cases a changeset A has a single successors set containing a single
3587 successor (changeset A replaced by A').
3594 successor (changeset A replaced by A').
3588
3595
3589 A changeset that is made obsolete with no successors are called "pruned".
3596 A changeset that is made obsolete with no successors are called "pruned".
3590 Such changesets have no successors sets at all.
3597 Such changesets have no successors sets at all.
3591
3598
3592 A changeset that has been "split" will have a successors set containing
3599 A changeset that has been "split" will have a successors set containing
3593 more than one successor.
3600 more than one successor.
3594
3601
3595 A changeset that has been rewritten in multiple different ways is called
3602 A changeset that has been rewritten in multiple different ways is called
3596 "divergent". Such changesets have multiple successor sets (each of which
3603 "divergent". Such changesets have multiple successor sets (each of which
3597 may also be split, i.e. have multiple successors).
3604 may also be split, i.e. have multiple successors).
3598
3605
3599 Results are displayed as follows::
3606 Results are displayed as follows::
3600
3607
3601 <rev1>
3608 <rev1>
3602 <successors-1A>
3609 <successors-1A>
3603 <rev2>
3610 <rev2>
3604 <successors-2A>
3611 <successors-2A>
3605 <successors-2B1> <successors-2B2> <successors-2B3>
3612 <successors-2B1> <successors-2B2> <successors-2B3>
3606
3613
3607 Here rev2 has two possible (i.e. divergent) successors sets. The first
3614 Here rev2 has two possible (i.e. divergent) successors sets. The first
3608 holds one element, whereas the second holds three (i.e. the changeset has
3615 holds one element, whereas the second holds three (i.e. the changeset has
3609 been split).
3616 been split).
3610 """
3617 """
3611 # passed to successorssets caching computation from one call to another
3618 # passed to successorssets caching computation from one call to another
3612 cache = {}
3619 cache = {}
3613 ctx2str = bytes
3620 ctx2str = bytes
3614 node2str = short
3621 node2str = short
3615 for rev in scmutil.revrange(repo, revs):
3622 for rev in scmutil.revrange(repo, revs):
3616 ctx = repo[rev]
3623 ctx = repo[rev]
3617 ui.write(b'%s\n' % ctx2str(ctx))
3624 ui.write(b'%s\n' % ctx2str(ctx))
3618 for succsset in obsutil.successorssets(
3625 for succsset in obsutil.successorssets(
3619 repo, ctx.node(), closest=opts['closest'], cache=cache
3626 repo, ctx.node(), closest=opts['closest'], cache=cache
3620 ):
3627 ):
3621 if succsset:
3628 if succsset:
3622 ui.write(b' ')
3629 ui.write(b' ')
3623 ui.write(node2str(succsset[0]))
3630 ui.write(node2str(succsset[0]))
3624 for node in succsset[1:]:
3631 for node in succsset[1:]:
3625 ui.write(b' ')
3632 ui.write(b' ')
3626 ui.write(node2str(node))
3633 ui.write(node2str(node))
3627 ui.write(b'\n')
3634 ui.write(b'\n')
3628
3635
3629
3636
3630 @command(b'debugtagscache', [])
3637 @command(b'debugtagscache', [])
3631 def debugtagscache(ui, repo):
3638 def debugtagscache(ui, repo):
3632 """display the contents of .hg/cache/hgtagsfnodes1"""
3639 """display the contents of .hg/cache/hgtagsfnodes1"""
3633 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3640 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3634 for r in repo:
3641 for r in repo:
3635 node = repo[r].node()
3642 node = repo[r].node()
3636 tagsnode = cache.getfnode(node, computemissing=False)
3643 tagsnode = cache.getfnode(node, computemissing=False)
3637 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3644 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3638 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3645 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3639
3646
3640
3647
3641 @command(
3648 @command(
3642 b'debugtemplate',
3649 b'debugtemplate',
3643 [
3650 [
3644 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3651 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3645 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3652 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3646 ],
3653 ],
3647 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3654 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3648 optionalrepo=True,
3655 optionalrepo=True,
3649 )
3656 )
3650 def debugtemplate(ui, repo, tmpl, **opts):
3657 def debugtemplate(ui, repo, tmpl, **opts):
3651 """parse and apply a template
3658 """parse and apply a template
3652
3659
3653 If -r/--rev is given, the template is processed as a log template and
3660 If -r/--rev is given, the template is processed as a log template and
3654 applied to the given changesets. Otherwise, it is processed as a generic
3661 applied to the given changesets. Otherwise, it is processed as a generic
3655 template.
3662 template.
3656
3663
3657 Use --verbose to print the parsed tree.
3664 Use --verbose to print the parsed tree.
3658 """
3665 """
3659 revs = None
3666 revs = None
3660 if opts['rev']:
3667 if opts['rev']:
3661 if repo is None:
3668 if repo is None:
3662 raise error.RepoError(
3669 raise error.RepoError(
3663 _(b'there is no Mercurial repository here (.hg not found)')
3670 _(b'there is no Mercurial repository here (.hg not found)')
3664 )
3671 )
3665 revs = scmutil.revrange(repo, opts['rev'])
3672 revs = scmutil.revrange(repo, opts['rev'])
3666
3673
3667 props = {}
3674 props = {}
3668 for d in opts['define']:
3675 for d in opts['define']:
3669 try:
3676 try:
3670 k, v = (e.strip() for e in d.split(b'=', 1))
3677 k, v = (e.strip() for e in d.split(b'=', 1))
3671 if not k or k == b'ui':
3678 if not k or k == b'ui':
3672 raise ValueError
3679 raise ValueError
3673 props[k] = v
3680 props[k] = v
3674 except ValueError:
3681 except ValueError:
3675 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3682 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3676
3683
3677 if ui.verbose:
3684 if ui.verbose:
3678 aliases = ui.configitems(b'templatealias')
3685 aliases = ui.configitems(b'templatealias')
3679 tree = templater.parse(tmpl)
3686 tree = templater.parse(tmpl)
3680 ui.note(templater.prettyformat(tree), b'\n')
3687 ui.note(templater.prettyformat(tree), b'\n')
3681 newtree = templater.expandaliases(tree, aliases)
3688 newtree = templater.expandaliases(tree, aliases)
3682 if newtree != tree:
3689 if newtree != tree:
3683 ui.notenoi18n(
3690 ui.notenoi18n(
3684 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3691 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3685 )
3692 )
3686
3693
3687 if revs is None:
3694 if revs is None:
3688 tres = formatter.templateresources(ui, repo)
3695 tres = formatter.templateresources(ui, repo)
3689 t = formatter.maketemplater(ui, tmpl, resources=tres)
3696 t = formatter.maketemplater(ui, tmpl, resources=tres)
3690 if ui.verbose:
3697 if ui.verbose:
3691 kwds, funcs = t.symbolsuseddefault()
3698 kwds, funcs = t.symbolsuseddefault()
3692 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3699 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3693 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3700 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3694 ui.write(t.renderdefault(props))
3701 ui.write(t.renderdefault(props))
3695 else:
3702 else:
3696 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3703 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3697 if ui.verbose:
3704 if ui.verbose:
3698 kwds, funcs = displayer.t.symbolsuseddefault()
3705 kwds, funcs = displayer.t.symbolsuseddefault()
3699 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3706 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3700 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3707 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3701 for r in revs:
3708 for r in revs:
3702 displayer.show(repo[r], **pycompat.strkwargs(props))
3709 displayer.show(repo[r], **pycompat.strkwargs(props))
3703 displayer.close()
3710 displayer.close()
3704
3711
3705
3712
3706 @command(
3713 @command(
3707 b'debuguigetpass',
3714 b'debuguigetpass',
3708 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3715 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3709 _(b'[-p TEXT]'),
3716 _(b'[-p TEXT]'),
3710 norepo=True,
3717 norepo=True,
3711 )
3718 )
3712 def debuguigetpass(ui, prompt=b''):
3719 def debuguigetpass(ui, prompt=b''):
3713 """show prompt to type password"""
3720 """show prompt to type password"""
3714 r = ui.getpass(prompt)
3721 r = ui.getpass(prompt)
3715 ui.writenoi18n(b'respose: %s\n' % r)
3722 ui.writenoi18n(b'respose: %s\n' % r)
3716
3723
3717
3724
3718 @command(
3725 @command(
3719 b'debuguiprompt',
3726 b'debuguiprompt',
3720 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3727 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3721 _(b'[-p TEXT]'),
3728 _(b'[-p TEXT]'),
3722 norepo=True,
3729 norepo=True,
3723 )
3730 )
3724 def debuguiprompt(ui, prompt=b''):
3731 def debuguiprompt(ui, prompt=b''):
3725 """show plain prompt"""
3732 """show plain prompt"""
3726 r = ui.prompt(prompt)
3733 r = ui.prompt(prompt)
3727 ui.writenoi18n(b'response: %s\n' % r)
3734 ui.writenoi18n(b'response: %s\n' % r)
3728
3735
3729
3736
3730 @command(b'debugupdatecaches', [])
3737 @command(b'debugupdatecaches', [])
3731 def debugupdatecaches(ui, repo, *pats, **opts):
3738 def debugupdatecaches(ui, repo, *pats, **opts):
3732 """warm all known caches in the repository"""
3739 """warm all known caches in the repository"""
3733 with repo.wlock(), repo.lock():
3740 with repo.wlock(), repo.lock():
3734 repo.updatecaches(full=True)
3741 repo.updatecaches(full=True)
3735
3742
3736
3743
3737 @command(
3744 @command(
3738 b'debugupgraderepo',
3745 b'debugupgraderepo',
3739 [
3746 [
3740 (
3747 (
3741 b'o',
3748 b'o',
3742 b'optimize',
3749 b'optimize',
3743 [],
3750 [],
3744 _(b'extra optimization to perform'),
3751 _(b'extra optimization to perform'),
3745 _(b'NAME'),
3752 _(b'NAME'),
3746 ),
3753 ),
3747 (b'', b'run', False, _(b'performs an upgrade')),
3754 (b'', b'run', False, _(b'performs an upgrade')),
3748 (b'', b'backup', True, _(b'keep the old repository content around')),
3755 (b'', b'backup', True, _(b'keep the old repository content around')),
3749 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3756 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3750 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3757 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3751 ],
3758 ],
3752 )
3759 )
3753 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3760 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3754 """upgrade a repository to use different features
3761 """upgrade a repository to use different features
3755
3762
3756 If no arguments are specified, the repository is evaluated for upgrade
3763 If no arguments are specified, the repository is evaluated for upgrade
3757 and a list of problems and potential optimizations is printed.
3764 and a list of problems and potential optimizations is printed.
3758
3765
3759 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3766 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3760 can be influenced via additional arguments. More details will be provided
3767 can be influenced via additional arguments. More details will be provided
3761 by the command output when run without ``--run``.
3768 by the command output when run without ``--run``.
3762
3769
3763 During the upgrade, the repository will be locked and no writes will be
3770 During the upgrade, the repository will be locked and no writes will be
3764 allowed.
3771 allowed.
3765
3772
3766 At the end of the upgrade, the repository may not be readable while new
3773 At the end of the upgrade, the repository may not be readable while new
3767 repository data is swapped in. This window will be as long as it takes to
3774 repository data is swapped in. This window will be as long as it takes to
3768 rename some directories inside the ``.hg`` directory. On most machines, this
3775 rename some directories inside the ``.hg`` directory. On most machines, this
3769 should complete almost instantaneously and the chances of a consumer being
3776 should complete almost instantaneously and the chances of a consumer being
3770 unable to access the repository should be low.
3777 unable to access the repository should be low.
3771
3778
3772 By default, all revlog will be upgraded. You can restrict this using flag
3779 By default, all revlog will be upgraded. You can restrict this using flag
3773 such as `--manifest`:
3780 such as `--manifest`:
3774
3781
3775 * `--manifest`: only optimize the manifest
3782 * `--manifest`: only optimize the manifest
3776 * `--no-manifest`: optimize all revlog but the manifest
3783 * `--no-manifest`: optimize all revlog but the manifest
3777 * `--changelog`: optimize the changelog only
3784 * `--changelog`: optimize the changelog only
3778 * `--no-changelog --no-manifest`: optimize filelogs only
3785 * `--no-changelog --no-manifest`: optimize filelogs only
3779 """
3786 """
3780 return upgrade.upgraderepo(
3787 return upgrade.upgraderepo(
3781 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3788 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3782 )
3789 )
3783
3790
3784
3791
3785 @command(
3792 @command(
3786 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3793 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3787 )
3794 )
3788 def debugwalk(ui, repo, *pats, **opts):
3795 def debugwalk(ui, repo, *pats, **opts):
3789 """show how files match on given patterns"""
3796 """show how files match on given patterns"""
3790 opts = pycompat.byteskwargs(opts)
3797 opts = pycompat.byteskwargs(opts)
3791 m = scmutil.match(repo[None], pats, opts)
3798 m = scmutil.match(repo[None], pats, opts)
3792 if ui.verbose:
3799 if ui.verbose:
3793 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3800 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3794 items = list(repo[None].walk(m))
3801 items = list(repo[None].walk(m))
3795 if not items:
3802 if not items:
3796 return
3803 return
3797 f = lambda fn: fn
3804 f = lambda fn: fn
3798 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3805 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3799 f = lambda fn: util.normpath(fn)
3806 f = lambda fn: util.normpath(fn)
3800 fmt = b'f %%-%ds %%-%ds %%s' % (
3807 fmt = b'f %%-%ds %%-%ds %%s' % (
3801 max([len(abs) for abs in items]),
3808 max([len(abs) for abs in items]),
3802 max([len(repo.pathto(abs)) for abs in items]),
3809 max([len(repo.pathto(abs)) for abs in items]),
3803 )
3810 )
3804 for abs in items:
3811 for abs in items:
3805 line = fmt % (
3812 line = fmt % (
3806 abs,
3813 abs,
3807 f(repo.pathto(abs)),
3814 f(repo.pathto(abs)),
3808 m.exact(abs) and b'exact' or b'',
3815 m.exact(abs) and b'exact' or b'',
3809 )
3816 )
3810 ui.write(b"%s\n" % line.rstrip())
3817 ui.write(b"%s\n" % line.rstrip())
3811
3818
3812
3819
3813 @command(b'debugwhyunstable', [], _(b'REV'))
3820 @command(b'debugwhyunstable', [], _(b'REV'))
3814 def debugwhyunstable(ui, repo, rev):
3821 def debugwhyunstable(ui, repo, rev):
3815 """explain instabilities of a changeset"""
3822 """explain instabilities of a changeset"""
3816 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3823 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3817 dnodes = b''
3824 dnodes = b''
3818 if entry.get(b'divergentnodes'):
3825 if entry.get(b'divergentnodes'):
3819 dnodes = (
3826 dnodes = (
3820 b' '.join(
3827 b' '.join(
3821 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3828 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3822 for ctx in entry[b'divergentnodes']
3829 for ctx in entry[b'divergentnodes']
3823 )
3830 )
3824 + b' '
3831 + b' '
3825 )
3832 )
3826 ui.write(
3833 ui.write(
3827 b'%s: %s%s %s\n'
3834 b'%s: %s%s %s\n'
3828 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3835 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3829 )
3836 )
3830
3837
3831
3838
3832 @command(
3839 @command(
3833 b'debugwireargs',
3840 b'debugwireargs',
3834 [
3841 [
3835 (b'', b'three', b'', b'three'),
3842 (b'', b'three', b'', b'three'),
3836 (b'', b'four', b'', b'four'),
3843 (b'', b'four', b'', b'four'),
3837 (b'', b'five', b'', b'five'),
3844 (b'', b'five', b'', b'five'),
3838 ]
3845 ]
3839 + cmdutil.remoteopts,
3846 + cmdutil.remoteopts,
3840 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3847 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3841 norepo=True,
3848 norepo=True,
3842 )
3849 )
3843 def debugwireargs(ui, repopath, *vals, **opts):
3850 def debugwireargs(ui, repopath, *vals, **opts):
3844 opts = pycompat.byteskwargs(opts)
3851 opts = pycompat.byteskwargs(opts)
3845 repo = hg.peer(ui, opts, repopath)
3852 repo = hg.peer(ui, opts, repopath)
3846 for opt in cmdutil.remoteopts:
3853 for opt in cmdutil.remoteopts:
3847 del opts[opt[1]]
3854 del opts[opt[1]]
3848 args = {}
3855 args = {}
3849 for k, v in pycompat.iteritems(opts):
3856 for k, v in pycompat.iteritems(opts):
3850 if v:
3857 if v:
3851 args[k] = v
3858 args[k] = v
3852 args = pycompat.strkwargs(args)
3859 args = pycompat.strkwargs(args)
3853 # run twice to check that we don't mess up the stream for the next command
3860 # run twice to check that we don't mess up the stream for the next command
3854 res1 = repo.debugwireargs(*vals, **args)
3861 res1 = repo.debugwireargs(*vals, **args)
3855 res2 = repo.debugwireargs(*vals, **args)
3862 res2 = repo.debugwireargs(*vals, **args)
3856 ui.write(b"%s\n" % res1)
3863 ui.write(b"%s\n" % res1)
3857 if res1 != res2:
3864 if res1 != res2:
3858 ui.warn(b"%s\n" % res2)
3865 ui.warn(b"%s\n" % res2)
3859
3866
3860
3867
3861 def _parsewirelangblocks(fh):
3868 def _parsewirelangblocks(fh):
3862 activeaction = None
3869 activeaction = None
3863 blocklines = []
3870 blocklines = []
3864 lastindent = 0
3871 lastindent = 0
3865
3872
3866 for line in fh:
3873 for line in fh:
3867 line = line.rstrip()
3874 line = line.rstrip()
3868 if not line:
3875 if not line:
3869 continue
3876 continue
3870
3877
3871 if line.startswith(b'#'):
3878 if line.startswith(b'#'):
3872 continue
3879 continue
3873
3880
3874 if not line.startswith(b' '):
3881 if not line.startswith(b' '):
3875 # New block. Flush previous one.
3882 # New block. Flush previous one.
3876 if activeaction:
3883 if activeaction:
3877 yield activeaction, blocklines
3884 yield activeaction, blocklines
3878
3885
3879 activeaction = line
3886 activeaction = line
3880 blocklines = []
3887 blocklines = []
3881 lastindent = 0
3888 lastindent = 0
3882 continue
3889 continue
3883
3890
3884 # Else we start with an indent.
3891 # Else we start with an indent.
3885
3892
3886 if not activeaction:
3893 if not activeaction:
3887 raise error.Abort(_(b'indented line outside of block'))
3894 raise error.Abort(_(b'indented line outside of block'))
3888
3895
3889 indent = len(line) - len(line.lstrip())
3896 indent = len(line) - len(line.lstrip())
3890
3897
3891 # If this line is indented more than the last line, concatenate it.
3898 # If this line is indented more than the last line, concatenate it.
3892 if indent > lastindent and blocklines:
3899 if indent > lastindent and blocklines:
3893 blocklines[-1] += line.lstrip()
3900 blocklines[-1] += line.lstrip()
3894 else:
3901 else:
3895 blocklines.append(line)
3902 blocklines.append(line)
3896 lastindent = indent
3903 lastindent = indent
3897
3904
3898 # Flush last block.
3905 # Flush last block.
3899 if activeaction:
3906 if activeaction:
3900 yield activeaction, blocklines
3907 yield activeaction, blocklines
3901
3908
3902
3909
3903 @command(
3910 @command(
3904 b'debugwireproto',
3911 b'debugwireproto',
3905 [
3912 [
3906 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3913 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3907 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3914 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3908 (
3915 (
3909 b'',
3916 b'',
3910 b'noreadstderr',
3917 b'noreadstderr',
3911 False,
3918 False,
3912 _(b'do not read from stderr of the remote'),
3919 _(b'do not read from stderr of the remote'),
3913 ),
3920 ),
3914 (
3921 (
3915 b'',
3922 b'',
3916 b'nologhandshake',
3923 b'nologhandshake',
3917 False,
3924 False,
3918 _(b'do not log I/O related to the peer handshake'),
3925 _(b'do not log I/O related to the peer handshake'),
3919 ),
3926 ),
3920 ]
3927 ]
3921 + cmdutil.remoteopts,
3928 + cmdutil.remoteopts,
3922 _(b'[PATH]'),
3929 _(b'[PATH]'),
3923 optionalrepo=True,
3930 optionalrepo=True,
3924 )
3931 )
3925 def debugwireproto(ui, repo, path=None, **opts):
3932 def debugwireproto(ui, repo, path=None, **opts):
3926 """send wire protocol commands to a server
3933 """send wire protocol commands to a server
3927
3934
3928 This command can be used to issue wire protocol commands to remote
3935 This command can be used to issue wire protocol commands to remote
3929 peers and to debug the raw data being exchanged.
3936 peers and to debug the raw data being exchanged.
3930
3937
3931 ``--localssh`` will start an SSH server against the current repository
3938 ``--localssh`` will start an SSH server against the current repository
3932 and connect to that. By default, the connection will perform a handshake
3939 and connect to that. By default, the connection will perform a handshake
3933 and establish an appropriate peer instance.
3940 and establish an appropriate peer instance.
3934
3941
3935 ``--peer`` can be used to bypass the handshake protocol and construct a
3942 ``--peer`` can be used to bypass the handshake protocol and construct a
3936 peer instance using the specified class type. Valid values are ``raw``,
3943 peer instance using the specified class type. Valid values are ``raw``,
3937 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3944 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3938 raw data payloads and don't support higher-level command actions.
3945 raw data payloads and don't support higher-level command actions.
3939
3946
3940 ``--noreadstderr`` can be used to disable automatic reading from stderr
3947 ``--noreadstderr`` can be used to disable automatic reading from stderr
3941 of the peer (for SSH connections only). Disabling automatic reading of
3948 of the peer (for SSH connections only). Disabling automatic reading of
3942 stderr is useful for making output more deterministic.
3949 stderr is useful for making output more deterministic.
3943
3950
3944 Commands are issued via a mini language which is specified via stdin.
3951 Commands are issued via a mini language which is specified via stdin.
3945 The language consists of individual actions to perform. An action is
3952 The language consists of individual actions to perform. An action is
3946 defined by a block. A block is defined as a line with no leading
3953 defined by a block. A block is defined as a line with no leading
3947 space followed by 0 or more lines with leading space. Blocks are
3954 space followed by 0 or more lines with leading space. Blocks are
3948 effectively a high-level command with additional metadata.
3955 effectively a high-level command with additional metadata.
3949
3956
3950 Lines beginning with ``#`` are ignored.
3957 Lines beginning with ``#`` are ignored.
3951
3958
3952 The following sections denote available actions.
3959 The following sections denote available actions.
3953
3960
3954 raw
3961 raw
3955 ---
3962 ---
3956
3963
3957 Send raw data to the server.
3964 Send raw data to the server.
3958
3965
3959 The block payload contains the raw data to send as one atomic send
3966 The block payload contains the raw data to send as one atomic send
3960 operation. The data may not actually be delivered in a single system
3967 operation. The data may not actually be delivered in a single system
3961 call: it depends on the abilities of the transport being used.
3968 call: it depends on the abilities of the transport being used.
3962
3969
3963 Each line in the block is de-indented and concatenated. Then, that
3970 Each line in the block is de-indented and concatenated. Then, that
3964 value is evaluated as a Python b'' literal. This allows the use of
3971 value is evaluated as a Python b'' literal. This allows the use of
3965 backslash escaping, etc.
3972 backslash escaping, etc.
3966
3973
3967 raw+
3974 raw+
3968 ----
3975 ----
3969
3976
3970 Behaves like ``raw`` except flushes output afterwards.
3977 Behaves like ``raw`` except flushes output afterwards.
3971
3978
3972 command <X>
3979 command <X>
3973 -----------
3980 -----------
3974
3981
3975 Send a request to run a named command, whose name follows the ``command``
3982 Send a request to run a named command, whose name follows the ``command``
3976 string.
3983 string.
3977
3984
3978 Arguments to the command are defined as lines in this block. The format of
3985 Arguments to the command are defined as lines in this block. The format of
3979 each line is ``<key> <value>``. e.g.::
3986 each line is ``<key> <value>``. e.g.::
3980
3987
3981 command listkeys
3988 command listkeys
3982 namespace bookmarks
3989 namespace bookmarks
3983
3990
3984 If the value begins with ``eval:``, it will be interpreted as a Python
3991 If the value begins with ``eval:``, it will be interpreted as a Python
3985 literal expression. Otherwise values are interpreted as Python b'' literals.
3992 literal expression. Otherwise values are interpreted as Python b'' literals.
3986 This allows sending complex types and encoding special byte sequences via
3993 This allows sending complex types and encoding special byte sequences via
3987 backslash escaping.
3994 backslash escaping.
3988
3995
3989 The following arguments have special meaning:
3996 The following arguments have special meaning:
3990
3997
3991 ``PUSHFILE``
3998 ``PUSHFILE``
3992 When defined, the *push* mechanism of the peer will be used instead
3999 When defined, the *push* mechanism of the peer will be used instead
3993 of the static request-response mechanism and the content of the
4000 of the static request-response mechanism and the content of the
3994 file specified in the value of this argument will be sent as the
4001 file specified in the value of this argument will be sent as the
3995 command payload.
4002 command payload.
3996
4003
3997 This can be used to submit a local bundle file to the remote.
4004 This can be used to submit a local bundle file to the remote.
3998
4005
3999 batchbegin
4006 batchbegin
4000 ----------
4007 ----------
4001
4008
4002 Instruct the peer to begin a batched send.
4009 Instruct the peer to begin a batched send.
4003
4010
4004 All ``command`` blocks are queued for execution until the next
4011 All ``command`` blocks are queued for execution until the next
4005 ``batchsubmit`` block.
4012 ``batchsubmit`` block.
4006
4013
4007 batchsubmit
4014 batchsubmit
4008 -----------
4015 -----------
4009
4016
4010 Submit previously queued ``command`` blocks as a batch request.
4017 Submit previously queued ``command`` blocks as a batch request.
4011
4018
4012 This action MUST be paired with a ``batchbegin`` action.
4019 This action MUST be paired with a ``batchbegin`` action.
4013
4020
4014 httprequest <method> <path>
4021 httprequest <method> <path>
4015 ---------------------------
4022 ---------------------------
4016
4023
4017 (HTTP peer only)
4024 (HTTP peer only)
4018
4025
4019 Send an HTTP request to the peer.
4026 Send an HTTP request to the peer.
4020
4027
4021 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4028 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4022
4029
4023 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4030 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4024 headers to add to the request. e.g. ``Accept: foo``.
4031 headers to add to the request. e.g. ``Accept: foo``.
4025
4032
4026 The following arguments are special:
4033 The following arguments are special:
4027
4034
4028 ``BODYFILE``
4035 ``BODYFILE``
4029 The content of the file defined as the value to this argument will be
4036 The content of the file defined as the value to this argument will be
4030 transferred verbatim as the HTTP request body.
4037 transferred verbatim as the HTTP request body.
4031
4038
4032 ``frame <type> <flags> <payload>``
4039 ``frame <type> <flags> <payload>``
4033 Send a unified protocol frame as part of the request body.
4040 Send a unified protocol frame as part of the request body.
4034
4041
4035 All frames will be collected and sent as the body to the HTTP
4042 All frames will be collected and sent as the body to the HTTP
4036 request.
4043 request.
4037
4044
4038 close
4045 close
4039 -----
4046 -----
4040
4047
4041 Close the connection to the server.
4048 Close the connection to the server.
4042
4049
4043 flush
4050 flush
4044 -----
4051 -----
4045
4052
4046 Flush data written to the server.
4053 Flush data written to the server.
4047
4054
4048 readavailable
4055 readavailable
4049 -------------
4056 -------------
4050
4057
4051 Close the write end of the connection and read all available data from
4058 Close the write end of the connection and read all available data from
4052 the server.
4059 the server.
4053
4060
4054 If the connection to the server encompasses multiple pipes, we poll both
4061 If the connection to the server encompasses multiple pipes, we poll both
4055 pipes and read available data.
4062 pipes and read available data.
4056
4063
4057 readline
4064 readline
4058 --------
4065 --------
4059
4066
4060 Read a line of output from the server. If there are multiple output
4067 Read a line of output from the server. If there are multiple output
4061 pipes, reads only the main pipe.
4068 pipes, reads only the main pipe.
4062
4069
4063 ereadline
4070 ereadline
4064 ---------
4071 ---------
4065
4072
4066 Like ``readline``, but read from the stderr pipe, if available.
4073 Like ``readline``, but read from the stderr pipe, if available.
4067
4074
4068 read <X>
4075 read <X>
4069 --------
4076 --------
4070
4077
4071 ``read()`` N bytes from the server's main output pipe.
4078 ``read()`` N bytes from the server's main output pipe.
4072
4079
4073 eread <X>
4080 eread <X>
4074 ---------
4081 ---------
4075
4082
4076 ``read()`` N bytes from the server's stderr pipe, if available.
4083 ``read()`` N bytes from the server's stderr pipe, if available.
4077
4084
4078 Specifying Unified Frame-Based Protocol Frames
4085 Specifying Unified Frame-Based Protocol Frames
4079 ----------------------------------------------
4086 ----------------------------------------------
4080
4087
4081 It is possible to emit a *Unified Frame-Based Protocol* by using special
4088 It is possible to emit a *Unified Frame-Based Protocol* by using special
4082 syntax.
4089 syntax.
4083
4090
4084 A frame is composed as a type, flags, and payload. These can be parsed
4091 A frame is composed as a type, flags, and payload. These can be parsed
4085 from a string of the form:
4092 from a string of the form:
4086
4093
4087 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4094 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4088
4095
4089 ``request-id`` and ``stream-id`` are integers defining the request and
4096 ``request-id`` and ``stream-id`` are integers defining the request and
4090 stream identifiers.
4097 stream identifiers.
4091
4098
4092 ``type`` can be an integer value for the frame type or the string name
4099 ``type`` can be an integer value for the frame type or the string name
4093 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4100 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4094 ``command-name``.
4101 ``command-name``.
4095
4102
4096 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4103 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4097 components. Each component (and there can be just one) can be an integer
4104 components. Each component (and there can be just one) can be an integer
4098 or a flag name for stream flags or frame flags, respectively. Values are
4105 or a flag name for stream flags or frame flags, respectively. Values are
4099 resolved to integers and then bitwise OR'd together.
4106 resolved to integers and then bitwise OR'd together.
4100
4107
4101 ``payload`` represents the raw frame payload. If it begins with
4108 ``payload`` represents the raw frame payload. If it begins with
4102 ``cbor:``, the following string is evaluated as Python code and the
4109 ``cbor:``, the following string is evaluated as Python code and the
4103 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4110 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4104 as a Python byte string literal.
4111 as a Python byte string literal.
4105 """
4112 """
4106 opts = pycompat.byteskwargs(opts)
4113 opts = pycompat.byteskwargs(opts)
4107
4114
4108 if opts[b'localssh'] and not repo:
4115 if opts[b'localssh'] and not repo:
4109 raise error.Abort(_(b'--localssh requires a repository'))
4116 raise error.Abort(_(b'--localssh requires a repository'))
4110
4117
4111 if opts[b'peer'] and opts[b'peer'] not in (
4118 if opts[b'peer'] and opts[b'peer'] not in (
4112 b'raw',
4119 b'raw',
4113 b'http2',
4120 b'http2',
4114 b'ssh1',
4121 b'ssh1',
4115 b'ssh2',
4122 b'ssh2',
4116 ):
4123 ):
4117 raise error.Abort(
4124 raise error.Abort(
4118 _(b'invalid value for --peer'),
4125 _(b'invalid value for --peer'),
4119 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4126 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4120 )
4127 )
4121
4128
4122 if path and opts[b'localssh']:
4129 if path and opts[b'localssh']:
4123 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4130 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4124
4131
4125 if ui.interactive():
4132 if ui.interactive():
4126 ui.write(_(b'(waiting for commands on stdin)\n'))
4133 ui.write(_(b'(waiting for commands on stdin)\n'))
4127
4134
4128 blocks = list(_parsewirelangblocks(ui.fin))
4135 blocks = list(_parsewirelangblocks(ui.fin))
4129
4136
4130 proc = None
4137 proc = None
4131 stdin = None
4138 stdin = None
4132 stdout = None
4139 stdout = None
4133 stderr = None
4140 stderr = None
4134 opener = None
4141 opener = None
4135
4142
4136 if opts[b'localssh']:
4143 if opts[b'localssh']:
4137 # We start the SSH server in its own process so there is process
4144 # We start the SSH server in its own process so there is process
4138 # separation. This prevents a whole class of potential bugs around
4145 # separation. This prevents a whole class of potential bugs around
4139 # shared state from interfering with server operation.
4146 # shared state from interfering with server operation.
4140 args = procutil.hgcmd() + [
4147 args = procutil.hgcmd() + [
4141 b'-R',
4148 b'-R',
4142 repo.root,
4149 repo.root,
4143 b'debugserve',
4150 b'debugserve',
4144 b'--sshstdio',
4151 b'--sshstdio',
4145 ]
4152 ]
4146 proc = subprocess.Popen(
4153 proc = subprocess.Popen(
4147 pycompat.rapply(procutil.tonativestr, args),
4154 pycompat.rapply(procutil.tonativestr, args),
4148 stdin=subprocess.PIPE,
4155 stdin=subprocess.PIPE,
4149 stdout=subprocess.PIPE,
4156 stdout=subprocess.PIPE,
4150 stderr=subprocess.PIPE,
4157 stderr=subprocess.PIPE,
4151 bufsize=0,
4158 bufsize=0,
4152 )
4159 )
4153
4160
4154 stdin = proc.stdin
4161 stdin = proc.stdin
4155 stdout = proc.stdout
4162 stdout = proc.stdout
4156 stderr = proc.stderr
4163 stderr = proc.stderr
4157
4164
4158 # We turn the pipes into observers so we can log I/O.
4165 # We turn the pipes into observers so we can log I/O.
4159 if ui.verbose or opts[b'peer'] == b'raw':
4166 if ui.verbose or opts[b'peer'] == b'raw':
4160 stdin = util.makeloggingfileobject(
4167 stdin = util.makeloggingfileobject(
4161 ui, proc.stdin, b'i', logdata=True
4168 ui, proc.stdin, b'i', logdata=True
4162 )
4169 )
4163 stdout = util.makeloggingfileobject(
4170 stdout = util.makeloggingfileobject(
4164 ui, proc.stdout, b'o', logdata=True
4171 ui, proc.stdout, b'o', logdata=True
4165 )
4172 )
4166 stderr = util.makeloggingfileobject(
4173 stderr = util.makeloggingfileobject(
4167 ui, proc.stderr, b'e', logdata=True
4174 ui, proc.stderr, b'e', logdata=True
4168 )
4175 )
4169
4176
4170 # --localssh also implies the peer connection settings.
4177 # --localssh also implies the peer connection settings.
4171
4178
4172 url = b'ssh://localserver'
4179 url = b'ssh://localserver'
4173 autoreadstderr = not opts[b'noreadstderr']
4180 autoreadstderr = not opts[b'noreadstderr']
4174
4181
4175 if opts[b'peer'] == b'ssh1':
4182 if opts[b'peer'] == b'ssh1':
4176 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4183 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4177 peer = sshpeer.sshv1peer(
4184 peer = sshpeer.sshv1peer(
4178 ui,
4185 ui,
4179 url,
4186 url,
4180 proc,
4187 proc,
4181 stdin,
4188 stdin,
4182 stdout,
4189 stdout,
4183 stderr,
4190 stderr,
4184 None,
4191 None,
4185 autoreadstderr=autoreadstderr,
4192 autoreadstderr=autoreadstderr,
4186 )
4193 )
4187 elif opts[b'peer'] == b'ssh2':
4194 elif opts[b'peer'] == b'ssh2':
4188 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4195 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4189 peer = sshpeer.sshv2peer(
4196 peer = sshpeer.sshv2peer(
4190 ui,
4197 ui,
4191 url,
4198 url,
4192 proc,
4199 proc,
4193 stdin,
4200 stdin,
4194 stdout,
4201 stdout,
4195 stderr,
4202 stderr,
4196 None,
4203 None,
4197 autoreadstderr=autoreadstderr,
4204 autoreadstderr=autoreadstderr,
4198 )
4205 )
4199 elif opts[b'peer'] == b'raw':
4206 elif opts[b'peer'] == b'raw':
4200 ui.write(_(b'using raw connection to peer\n'))
4207 ui.write(_(b'using raw connection to peer\n'))
4201 peer = None
4208 peer = None
4202 else:
4209 else:
4203 ui.write(_(b'creating ssh peer from handshake results\n'))
4210 ui.write(_(b'creating ssh peer from handshake results\n'))
4204 peer = sshpeer.makepeer(
4211 peer = sshpeer.makepeer(
4205 ui,
4212 ui,
4206 url,
4213 url,
4207 proc,
4214 proc,
4208 stdin,
4215 stdin,
4209 stdout,
4216 stdout,
4210 stderr,
4217 stderr,
4211 autoreadstderr=autoreadstderr,
4218 autoreadstderr=autoreadstderr,
4212 )
4219 )
4213
4220
4214 elif path:
4221 elif path:
4215 # We bypass hg.peer() so we can proxy the sockets.
4222 # We bypass hg.peer() so we can proxy the sockets.
4216 # TODO consider not doing this because we skip
4223 # TODO consider not doing this because we skip
4217 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4224 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4218 u = util.url(path)
4225 u = util.url(path)
4219 if u.scheme != b'http':
4226 if u.scheme != b'http':
4220 raise error.Abort(_(b'only http:// paths are currently supported'))
4227 raise error.Abort(_(b'only http:// paths are currently supported'))
4221
4228
4222 url, authinfo = u.authinfo()
4229 url, authinfo = u.authinfo()
4223 openerargs = {
4230 openerargs = {
4224 'useragent': b'Mercurial debugwireproto',
4231 'useragent': b'Mercurial debugwireproto',
4225 }
4232 }
4226
4233
4227 # Turn pipes/sockets into observers so we can log I/O.
4234 # Turn pipes/sockets into observers so we can log I/O.
4228 if ui.verbose:
4235 if ui.verbose:
4229 openerargs.update(
4236 openerargs.update(
4230 {
4237 {
4231 'loggingfh': ui,
4238 'loggingfh': ui,
4232 'loggingname': b's',
4239 'loggingname': b's',
4233 'loggingopts': {'logdata': True, 'logdataapis': False,},
4240 'loggingopts': {'logdata': True, 'logdataapis': False,},
4234 }
4241 }
4235 )
4242 )
4236
4243
4237 if ui.debugflag:
4244 if ui.debugflag:
4238 openerargs['loggingopts']['logdataapis'] = True
4245 openerargs['loggingopts']['logdataapis'] = True
4239
4246
4240 # Don't send default headers when in raw mode. This allows us to
4247 # Don't send default headers when in raw mode. This allows us to
4241 # bypass most of the behavior of our URL handling code so we can
4248 # bypass most of the behavior of our URL handling code so we can
4242 # have near complete control over what's sent on the wire.
4249 # have near complete control over what's sent on the wire.
4243 if opts[b'peer'] == b'raw':
4250 if opts[b'peer'] == b'raw':
4244 openerargs['sendaccept'] = False
4251 openerargs['sendaccept'] = False
4245
4252
4246 opener = urlmod.opener(ui, authinfo, **openerargs)
4253 opener = urlmod.opener(ui, authinfo, **openerargs)
4247
4254
4248 if opts[b'peer'] == b'http2':
4255 if opts[b'peer'] == b'http2':
4249 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4256 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4250 # We go through makepeer() because we need an API descriptor for
4257 # We go through makepeer() because we need an API descriptor for
4251 # the peer instance to be useful.
4258 # the peer instance to be useful.
4252 with ui.configoverride(
4259 with ui.configoverride(
4253 {(b'experimental', b'httppeer.advertise-v2'): True}
4260 {(b'experimental', b'httppeer.advertise-v2'): True}
4254 ):
4261 ):
4255 if opts[b'nologhandshake']:
4262 if opts[b'nologhandshake']:
4256 ui.pushbuffer()
4263 ui.pushbuffer()
4257
4264
4258 peer = httppeer.makepeer(ui, path, opener=opener)
4265 peer = httppeer.makepeer(ui, path, opener=opener)
4259
4266
4260 if opts[b'nologhandshake']:
4267 if opts[b'nologhandshake']:
4261 ui.popbuffer()
4268 ui.popbuffer()
4262
4269
4263 if not isinstance(peer, httppeer.httpv2peer):
4270 if not isinstance(peer, httppeer.httpv2peer):
4264 raise error.Abort(
4271 raise error.Abort(
4265 _(
4272 _(
4266 b'could not instantiate HTTP peer for '
4273 b'could not instantiate HTTP peer for '
4267 b'wire protocol version 2'
4274 b'wire protocol version 2'
4268 ),
4275 ),
4269 hint=_(
4276 hint=_(
4270 b'the server may not have the feature '
4277 b'the server may not have the feature '
4271 b'enabled or is not allowing this '
4278 b'enabled or is not allowing this '
4272 b'client version'
4279 b'client version'
4273 ),
4280 ),
4274 )
4281 )
4275
4282
4276 elif opts[b'peer'] == b'raw':
4283 elif opts[b'peer'] == b'raw':
4277 ui.write(_(b'using raw connection to peer\n'))
4284 ui.write(_(b'using raw connection to peer\n'))
4278 peer = None
4285 peer = None
4279 elif opts[b'peer']:
4286 elif opts[b'peer']:
4280 raise error.Abort(
4287 raise error.Abort(
4281 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4288 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4282 )
4289 )
4283 else:
4290 else:
4284 peer = httppeer.makepeer(ui, path, opener=opener)
4291 peer = httppeer.makepeer(ui, path, opener=opener)
4285
4292
4286 # We /could/ populate stdin/stdout with sock.makefile()...
4293 # We /could/ populate stdin/stdout with sock.makefile()...
4287 else:
4294 else:
4288 raise error.Abort(_(b'unsupported connection configuration'))
4295 raise error.Abort(_(b'unsupported connection configuration'))
4289
4296
4290 batchedcommands = None
4297 batchedcommands = None
4291
4298
4292 # Now perform actions based on the parsed wire language instructions.
4299 # Now perform actions based on the parsed wire language instructions.
4293 for action, lines in blocks:
4300 for action, lines in blocks:
4294 if action in (b'raw', b'raw+'):
4301 if action in (b'raw', b'raw+'):
4295 if not stdin:
4302 if not stdin:
4296 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4303 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4297
4304
4298 # Concatenate the data together.
4305 # Concatenate the data together.
4299 data = b''.join(l.lstrip() for l in lines)
4306 data = b''.join(l.lstrip() for l in lines)
4300 data = stringutil.unescapestr(data)
4307 data = stringutil.unescapestr(data)
4301 stdin.write(data)
4308 stdin.write(data)
4302
4309
4303 if action == b'raw+':
4310 if action == b'raw+':
4304 stdin.flush()
4311 stdin.flush()
4305 elif action == b'flush':
4312 elif action == b'flush':
4306 if not stdin:
4313 if not stdin:
4307 raise error.Abort(_(b'cannot call flush on this peer'))
4314 raise error.Abort(_(b'cannot call flush on this peer'))
4308 stdin.flush()
4315 stdin.flush()
4309 elif action.startswith(b'command'):
4316 elif action.startswith(b'command'):
4310 if not peer:
4317 if not peer:
4311 raise error.Abort(
4318 raise error.Abort(
4312 _(
4319 _(
4313 b'cannot send commands unless peer instance '
4320 b'cannot send commands unless peer instance '
4314 b'is available'
4321 b'is available'
4315 )
4322 )
4316 )
4323 )
4317
4324
4318 command = action.split(b' ', 1)[1]
4325 command = action.split(b' ', 1)[1]
4319
4326
4320 args = {}
4327 args = {}
4321 for line in lines:
4328 for line in lines:
4322 # We need to allow empty values.
4329 # We need to allow empty values.
4323 fields = line.lstrip().split(b' ', 1)
4330 fields = line.lstrip().split(b' ', 1)
4324 if len(fields) == 1:
4331 if len(fields) == 1:
4325 key = fields[0]
4332 key = fields[0]
4326 value = b''
4333 value = b''
4327 else:
4334 else:
4328 key, value = fields
4335 key, value = fields
4329
4336
4330 if value.startswith(b'eval:'):
4337 if value.startswith(b'eval:'):
4331 value = stringutil.evalpythonliteral(value[5:])
4338 value = stringutil.evalpythonliteral(value[5:])
4332 else:
4339 else:
4333 value = stringutil.unescapestr(value)
4340 value = stringutil.unescapestr(value)
4334
4341
4335 args[key] = value
4342 args[key] = value
4336
4343
4337 if batchedcommands is not None:
4344 if batchedcommands is not None:
4338 batchedcommands.append((command, args))
4345 batchedcommands.append((command, args))
4339 continue
4346 continue
4340
4347
4341 ui.status(_(b'sending %s command\n') % command)
4348 ui.status(_(b'sending %s command\n') % command)
4342
4349
4343 if b'PUSHFILE' in args:
4350 if b'PUSHFILE' in args:
4344 with open(args[b'PUSHFILE'], 'rb') as fh:
4351 with open(args[b'PUSHFILE'], 'rb') as fh:
4345 del args[b'PUSHFILE']
4352 del args[b'PUSHFILE']
4346 res, output = peer._callpush(
4353 res, output = peer._callpush(
4347 command, fh, **pycompat.strkwargs(args)
4354 command, fh, **pycompat.strkwargs(args)
4348 )
4355 )
4349 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4356 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4350 ui.status(
4357 ui.status(
4351 _(b'remote output: %s\n') % stringutil.escapestr(output)
4358 _(b'remote output: %s\n') % stringutil.escapestr(output)
4352 )
4359 )
4353 else:
4360 else:
4354 with peer.commandexecutor() as e:
4361 with peer.commandexecutor() as e:
4355 res = e.callcommand(command, args).result()
4362 res = e.callcommand(command, args).result()
4356
4363
4357 if isinstance(res, wireprotov2peer.commandresponse):
4364 if isinstance(res, wireprotov2peer.commandresponse):
4358 val = res.objects()
4365 val = res.objects()
4359 ui.status(
4366 ui.status(
4360 _(b'response: %s\n')
4367 _(b'response: %s\n')
4361 % stringutil.pprint(val, bprefix=True, indent=2)
4368 % stringutil.pprint(val, bprefix=True, indent=2)
4362 )
4369 )
4363 else:
4370 else:
4364 ui.status(
4371 ui.status(
4365 _(b'response: %s\n')
4372 _(b'response: %s\n')
4366 % stringutil.pprint(res, bprefix=True, indent=2)
4373 % stringutil.pprint(res, bprefix=True, indent=2)
4367 )
4374 )
4368
4375
4369 elif action == b'batchbegin':
4376 elif action == b'batchbegin':
4370 if batchedcommands is not None:
4377 if batchedcommands is not None:
4371 raise error.Abort(_(b'nested batchbegin not allowed'))
4378 raise error.Abort(_(b'nested batchbegin not allowed'))
4372
4379
4373 batchedcommands = []
4380 batchedcommands = []
4374 elif action == b'batchsubmit':
4381 elif action == b'batchsubmit':
4375 # There is a batching API we could go through. But it would be
4382 # There is a batching API we could go through. But it would be
4376 # difficult to normalize requests into function calls. It is easier
4383 # difficult to normalize requests into function calls. It is easier
4377 # to bypass this layer and normalize to commands + args.
4384 # to bypass this layer and normalize to commands + args.
4378 ui.status(
4385 ui.status(
4379 _(b'sending batch with %d sub-commands\n')
4386 _(b'sending batch with %d sub-commands\n')
4380 % len(batchedcommands)
4387 % len(batchedcommands)
4381 )
4388 )
4382 assert peer is not None
4389 assert peer is not None
4383 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4390 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4384 ui.status(
4391 ui.status(
4385 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4392 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4386 )
4393 )
4387
4394
4388 batchedcommands = None
4395 batchedcommands = None
4389
4396
4390 elif action.startswith(b'httprequest '):
4397 elif action.startswith(b'httprequest '):
4391 if not opener:
4398 if not opener:
4392 raise error.Abort(
4399 raise error.Abort(
4393 _(b'cannot use httprequest without an HTTP peer')
4400 _(b'cannot use httprequest without an HTTP peer')
4394 )
4401 )
4395
4402
4396 request = action.split(b' ', 2)
4403 request = action.split(b' ', 2)
4397 if len(request) != 3:
4404 if len(request) != 3:
4398 raise error.Abort(
4405 raise error.Abort(
4399 _(
4406 _(
4400 b'invalid httprequest: expected format is '
4407 b'invalid httprequest: expected format is '
4401 b'"httprequest <method> <path>'
4408 b'"httprequest <method> <path>'
4402 )
4409 )
4403 )
4410 )
4404
4411
4405 method, httppath = request[1:]
4412 method, httppath = request[1:]
4406 headers = {}
4413 headers = {}
4407 body = None
4414 body = None
4408 frames = []
4415 frames = []
4409 for line in lines:
4416 for line in lines:
4410 line = line.lstrip()
4417 line = line.lstrip()
4411 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4418 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4412 if m:
4419 if m:
4413 # Headers need to use native strings.
4420 # Headers need to use native strings.
4414 key = pycompat.strurl(m.group(1))
4421 key = pycompat.strurl(m.group(1))
4415 value = pycompat.strurl(m.group(2))
4422 value = pycompat.strurl(m.group(2))
4416 headers[key] = value
4423 headers[key] = value
4417 continue
4424 continue
4418
4425
4419 if line.startswith(b'BODYFILE '):
4426 if line.startswith(b'BODYFILE '):
4420 with open(line.split(b' ', 1), b'rb') as fh:
4427 with open(line.split(b' ', 1), b'rb') as fh:
4421 body = fh.read()
4428 body = fh.read()
4422 elif line.startswith(b'frame '):
4429 elif line.startswith(b'frame '):
4423 frame = wireprotoframing.makeframefromhumanstring(
4430 frame = wireprotoframing.makeframefromhumanstring(
4424 line[len(b'frame ') :]
4431 line[len(b'frame ') :]
4425 )
4432 )
4426
4433
4427 frames.append(frame)
4434 frames.append(frame)
4428 else:
4435 else:
4429 raise error.Abort(
4436 raise error.Abort(
4430 _(b'unknown argument to httprequest: %s') % line
4437 _(b'unknown argument to httprequest: %s') % line
4431 )
4438 )
4432
4439
4433 url = path + httppath
4440 url = path + httppath
4434
4441
4435 if frames:
4442 if frames:
4436 body = b''.join(bytes(f) for f in frames)
4443 body = b''.join(bytes(f) for f in frames)
4437
4444
4438 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4445 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4439
4446
4440 # urllib.Request insists on using has_data() as a proxy for
4447 # urllib.Request insists on using has_data() as a proxy for
4441 # determining the request method. Override that to use our
4448 # determining the request method. Override that to use our
4442 # explicitly requested method.
4449 # explicitly requested method.
4443 req.get_method = lambda: pycompat.sysstr(method)
4450 req.get_method = lambda: pycompat.sysstr(method)
4444
4451
4445 try:
4452 try:
4446 res = opener.open(req)
4453 res = opener.open(req)
4447 body = res.read()
4454 body = res.read()
4448 except util.urlerr.urlerror as e:
4455 except util.urlerr.urlerror as e:
4449 # read() method must be called, but only exists in Python 2
4456 # read() method must be called, but only exists in Python 2
4450 getattr(e, 'read', lambda: None)()
4457 getattr(e, 'read', lambda: None)()
4451 continue
4458 continue
4452
4459
4453 ct = res.headers.get('Content-Type')
4460 ct = res.headers.get('Content-Type')
4454 if ct == 'application/mercurial-cbor':
4461 if ct == 'application/mercurial-cbor':
4455 ui.write(
4462 ui.write(
4456 _(b'cbor> %s\n')
4463 _(b'cbor> %s\n')
4457 % stringutil.pprint(
4464 % stringutil.pprint(
4458 cborutil.decodeall(body), bprefix=True, indent=2
4465 cborutil.decodeall(body), bprefix=True, indent=2
4459 )
4466 )
4460 )
4467 )
4461
4468
4462 elif action == b'close':
4469 elif action == b'close':
4463 assert peer is not None
4470 assert peer is not None
4464 peer.close()
4471 peer.close()
4465 elif action == b'readavailable':
4472 elif action == b'readavailable':
4466 if not stdout or not stderr:
4473 if not stdout or not stderr:
4467 raise error.Abort(
4474 raise error.Abort(
4468 _(b'readavailable not available on this peer')
4475 _(b'readavailable not available on this peer')
4469 )
4476 )
4470
4477
4471 stdin.close()
4478 stdin.close()
4472 stdout.read()
4479 stdout.read()
4473 stderr.read()
4480 stderr.read()
4474
4481
4475 elif action == b'readline':
4482 elif action == b'readline':
4476 if not stdout:
4483 if not stdout:
4477 raise error.Abort(_(b'readline not available on this peer'))
4484 raise error.Abort(_(b'readline not available on this peer'))
4478 stdout.readline()
4485 stdout.readline()
4479 elif action == b'ereadline':
4486 elif action == b'ereadline':
4480 if not stderr:
4487 if not stderr:
4481 raise error.Abort(_(b'ereadline not available on this peer'))
4488 raise error.Abort(_(b'ereadline not available on this peer'))
4482 stderr.readline()
4489 stderr.readline()
4483 elif action.startswith(b'read '):
4490 elif action.startswith(b'read '):
4484 count = int(action.split(b' ', 1)[1])
4491 count = int(action.split(b' ', 1)[1])
4485 if not stdout:
4492 if not stdout:
4486 raise error.Abort(_(b'read not available on this peer'))
4493 raise error.Abort(_(b'read not available on this peer'))
4487 stdout.read(count)
4494 stdout.read(count)
4488 elif action.startswith(b'eread '):
4495 elif action.startswith(b'eread '):
4489 count = int(action.split(b' ', 1)[1])
4496 count = int(action.split(b' ', 1)[1])
4490 if not stderr:
4497 if not stderr:
4491 raise error.Abort(_(b'eread not available on this peer'))
4498 raise error.Abort(_(b'eread not available on this peer'))
4492 stderr.read(count)
4499 stderr.read(count)
4493 else:
4500 else:
4494 raise error.Abort(_(b'unknown action: %s') % action)
4501 raise error.Abort(_(b'unknown action: %s') % action)
4495
4502
4496 if batchedcommands is not None:
4503 if batchedcommands is not None:
4497 raise error.Abort(_(b'unclosed "batchbegin" request'))
4504 raise error.Abort(_(b'unclosed "batchbegin" request'))
4498
4505
4499 if peer:
4506 if peer:
4500 peer.close()
4507 peer.close()
4501
4508
4502 if proc:
4509 if proc:
4503 proc.kill()
4510 proc.kill()
@@ -1,255 +1,261 b''
1 hg debuginstall
1 hg debuginstall
2 $ hg debuginstall
2 $ hg debuginstall
3 checking encoding (ascii)...
3 checking encoding (ascii)...
4 checking Python executable (*) (glob)
4 checking Python executable (*) (glob)
5 checking Python implementation (*) (glob)
5 checking Python implementation (*) (glob)
6 checking Python version (2.*) (glob) (no-py3 !)
6 checking Python version (2.*) (glob) (no-py3 !)
7 checking Python version (3.*) (glob) (py3 !)
7 checking Python version (3.*) (glob) (py3 !)
8 checking Python lib (.*[Ll]ib.*)... (re)
8 checking Python lib (.*[Ll]ib.*)... (re)
9 checking Python security support (*) (glob)
9 checking Python security support (*) (glob)
10 TLS 1.2 not supported by Python install; network connections lack modern security (?)
10 TLS 1.2 not supported by Python install; network connections lack modern security (?)
11 SNI not supported by Python install; may have connectivity issues with some servers (?)
11 SNI not supported by Python install; may have connectivity issues with some servers (?)
12 checking Rust extensions \((installed|missing)\) (re)
12 checking Rust extensions \((installed|missing)\) (re)
13 checking Mercurial version (*) (glob)
13 checking Mercurial version (*) (glob)
14 checking Mercurial custom build (*) (glob)
14 checking Mercurial custom build (*) (glob)
15 checking module policy (*) (glob)
15 checking module policy (*) (glob)
16 checking installed modules (*mercurial)... (glob)
16 checking installed modules (*mercurial)... (glob)
17 checking registered compression engines (*zlib*) (glob)
17 checking registered compression engines (*zlib*) (glob)
18 checking available compression engines (*zlib*) (glob)
18 checking available compression engines (*zlib*) (glob)
19 checking available compression engines for wire protocol (*zlib*) (glob)
19 checking available compression engines for wire protocol (*zlib*) (glob)
20 checking "re2" regexp engine \((available|missing)\) (re)
20 checking "re2" regexp engine \((available|missing)\) (re)
21 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
21 checking templates (*mercurial?templates)... (glob)
22 checking templates (*mercurial?templates)... (glob)
22 checking default template (*mercurial?templates?map-cmdline.default) (glob)
23 checking default template (*mercurial?templates?map-cmdline.default) (glob)
23 checking commit editor... (*) (glob)
24 checking commit editor... (*) (glob)
24 checking username (test)
25 checking username (test)
25 no problems detected
26 no problems detected
26
27
27 hg debuginstall JSON
28 hg debuginstall JSON
28 $ hg debuginstall -Tjson | sed 's|\\\\|\\|g'
29 $ hg debuginstall -Tjson | sed 's|\\\\|\\|g'
29 [
30 [
30 {
31 {
31 "compengines": ["bz2", "bz2truncated", "none", "zlib"*], (glob)
32 "compengines": ["bz2", "bz2truncated", "none", "zlib"*], (glob)
32 "compenginesavail": ["bz2", "bz2truncated", "none", "zlib"*], (glob)
33 "compenginesavail": ["bz2", "bz2truncated", "none", "zlib"*], (glob)
33 "compenginesserver": [*"zlib"*], (glob)
34 "compenginesserver": [*"zlib"*], (glob)
34 "defaulttemplate": "*mercurial?templates?map-cmdline.default", (glob)
35 "defaulttemplate": "*mercurial?templates?map-cmdline.default", (glob)
35 "defaulttemplateerror": null,
36 "defaulttemplateerror": null,
36 "defaulttemplatenotfound": "default",
37 "defaulttemplatenotfound": "default",
37 "editor": "*", (glob)
38 "editor": "*", (glob)
38 "editornotfound": false,
39 "editornotfound": false,
39 "encoding": "ascii",
40 "encoding": "ascii",
40 "encodingerror": null,
41 "encodingerror": null,
41 "extensionserror": null, (no-pure !)
42 "extensionserror": null, (no-pure !)
42 "hgmodulepolicy": "*", (glob)
43 "hgmodulepolicy": "*", (glob)
43 "hgmodules": "*mercurial", (glob)
44 "hgmodules": "*mercurial", (glob)
44 "hgver": "*", (glob)
45 "hgver": "*", (glob)
45 "hgverextra": "*", (glob)
46 "hgverextra": "*", (glob)
46 "problems": 0,
47 "problems": 0,
47 "pythonexe": "*", (glob)
48 "pythonexe": "*", (glob)
48 "pythonimplementation": "*", (glob)
49 "pythonimplementation": "*", (glob)
49 "pythonlib": "*", (glob)
50 "pythonlib": "*", (glob)
50 "pythonsecurity": [*], (glob)
51 "pythonsecurity": [*], (glob)
51 "pythonver": "*.*.*", (glob)
52 "pythonver": "*.*.*", (glob)
52 "re2": (true|false), (re)
53 "re2": (true|false), (re)
53 "templatedirs": "*mercurial?templates", (glob)
54 "templatedirs": "*mercurial?templates", (glob)
54 "username": "test",
55 "username": "test",
55 "usernameerror": null,
56 "usernameerror": null,
56 "vinotfound": false
57 "vinotfound": false
57 }
58 }
58 ]
59 ]
59
60
60 hg debuginstall with no username
61 hg debuginstall with no username
61 $ HGUSER= hg debuginstall
62 $ HGUSER= hg debuginstall
62 checking encoding (ascii)...
63 checking encoding (ascii)...
63 checking Python executable (*) (glob)
64 checking Python executable (*) (glob)
64 checking Python implementation (*) (glob)
65 checking Python implementation (*) (glob)
65 checking Python version (2.*) (glob) (no-py3 !)
66 checking Python version (2.*) (glob) (no-py3 !)
66 checking Python version (3.*) (glob) (py3 !)
67 checking Python version (3.*) (glob) (py3 !)
67 checking Python lib (.*[Ll]ib.*)... (re)
68 checking Python lib (.*[Ll]ib.*)... (re)
68 checking Python security support (*) (glob)
69 checking Python security support (*) (glob)
69 TLS 1.2 not supported by Python install; network connections lack modern security (?)
70 TLS 1.2 not supported by Python install; network connections lack modern security (?)
70 SNI not supported by Python install; may have connectivity issues with some servers (?)
71 SNI not supported by Python install; may have connectivity issues with some servers (?)
71 checking Rust extensions \((installed|missing)\) (re)
72 checking Rust extensions \((installed|missing)\) (re)
72 checking Mercurial version (*) (glob)
73 checking Mercurial version (*) (glob)
73 checking Mercurial custom build (*) (glob)
74 checking Mercurial custom build (*) (glob)
74 checking module policy (*) (glob)
75 checking module policy (*) (glob)
75 checking installed modules (*mercurial)... (glob)
76 checking installed modules (*mercurial)... (glob)
76 checking registered compression engines (*zlib*) (glob)
77 checking registered compression engines (*zlib*) (glob)
77 checking available compression engines (*zlib*) (glob)
78 checking available compression engines (*zlib*) (glob)
78 checking available compression engines for wire protocol (*zlib*) (glob)
79 checking available compression engines for wire protocol (*zlib*) (glob)
79 checking "re2" regexp engine \((available|missing)\) (re)
80 checking "re2" regexp engine \((available|missing)\) (re)
81 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
80 checking templates (*mercurial?templates)... (glob)
82 checking templates (*mercurial?templates)... (glob)
81 checking default template (*mercurial?templates?map-cmdline.default) (glob)
83 checking default template (*mercurial?templates?map-cmdline.default) (glob)
82 checking commit editor... (*) (glob)
84 checking commit editor... (*) (glob)
83 checking username...
85 checking username...
84 no username supplied
86 no username supplied
85 (specify a username in your configuration file)
87 (specify a username in your configuration file)
86 1 problems detected, please check your install!
88 1 problems detected, please check your install!
87 [1]
89 [1]
88
90
89 hg debuginstall with invalid encoding
91 hg debuginstall with invalid encoding
90 $ HGENCODING=invalidenc hg debuginstall | grep encoding
92 $ HGENCODING=invalidenc hg debuginstall | grep encoding
91 checking encoding (invalidenc)...
93 checking encoding (invalidenc)...
92 unknown encoding: invalidenc
94 unknown encoding: invalidenc
93
95
94 exception message in JSON
96 exception message in JSON
95
97
96 $ HGENCODING=invalidenc HGUSER= hg debuginstall -Tjson | grep error
98 $ HGENCODING=invalidenc HGUSER= hg debuginstall -Tjson | grep error
97 "defaulttemplateerror": null,
99 "defaulttemplateerror": null,
98 "encodingerror": "unknown encoding: invalidenc",
100 "encodingerror": "unknown encoding: invalidenc",
99 "extensionserror": null, (no-pure !)
101 "extensionserror": null, (no-pure !)
100 "usernameerror": "no username supplied",
102 "usernameerror": "no username supplied",
101
103
102 path variables are expanded (~ is the same as $TESTTMP)
104 path variables are expanded (~ is the same as $TESTTMP)
103 $ mkdir tools
105 $ mkdir tools
104 $ touch tools/testeditor.exe
106 $ touch tools/testeditor.exe
105 #if execbit
107 #if execbit
106 $ chmod 755 tools/testeditor.exe
108 $ chmod 755 tools/testeditor.exe
107 #endif
109 #endif
108 $ HGEDITOR="~/tools/testeditor.exe" hg debuginstall
110 $ HGEDITOR="~/tools/testeditor.exe" hg debuginstall
109 checking encoding (ascii)...
111 checking encoding (ascii)...
110 checking Python executable (*) (glob)
112 checking Python executable (*) (glob)
111 checking Python implementation (*) (glob)
113 checking Python implementation (*) (glob)
112 checking Python version (2.*) (glob) (no-py3 !)
114 checking Python version (2.*) (glob) (no-py3 !)
113 checking Python version (3.*) (glob) (py3 !)
115 checking Python version (3.*) (glob) (py3 !)
114 checking Python lib (.*[Ll]ib.*)... (re)
116 checking Python lib (.*[Ll]ib.*)... (re)
115 checking Python security support (*) (glob)
117 checking Python security support (*) (glob)
116 TLS 1.2 not supported by Python install; network connections lack modern security (?)
118 TLS 1.2 not supported by Python install; network connections lack modern security (?)
117 SNI not supported by Python install; may have connectivity issues with some servers (?)
119 SNI not supported by Python install; may have connectivity issues with some servers (?)
118 checking Rust extensions \((installed|missing)\) (re)
120 checking Rust extensions \((installed|missing)\) (re)
119 checking Mercurial version (*) (glob)
121 checking Mercurial version (*) (glob)
120 checking Mercurial custom build (*) (glob)
122 checking Mercurial custom build (*) (glob)
121 checking module policy (*) (glob)
123 checking module policy (*) (glob)
122 checking installed modules (*mercurial)... (glob)
124 checking installed modules (*mercurial)... (glob)
123 checking registered compression engines (*zlib*) (glob)
125 checking registered compression engines (*zlib*) (glob)
124 checking available compression engines (*zlib*) (glob)
126 checking available compression engines (*zlib*) (glob)
125 checking available compression engines for wire protocol (*zlib*) (glob)
127 checking available compression engines for wire protocol (*zlib*) (glob)
126 checking "re2" regexp engine \((available|missing)\) (re)
128 checking "re2" regexp engine \((available|missing)\) (re)
129 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
127 checking templates (*mercurial?templates)... (glob)
130 checking templates (*mercurial?templates)... (glob)
128 checking default template (*mercurial?templates?map-cmdline.default) (glob)
131 checking default template (*mercurial?templates?map-cmdline.default) (glob)
129 checking commit editor... ($TESTTMP/tools/testeditor.exe)
132 checking commit editor... ($TESTTMP/tools/testeditor.exe)
130 checking username (test)
133 checking username (test)
131 no problems detected
134 no problems detected
132
135
133 print out the binary post-shlexsplit in the error message when commit editor is
136 print out the binary post-shlexsplit in the error message when commit editor is
134 not found (this is intentionally using backslashes to mimic a windows usecase).
137 not found (this is intentionally using backslashes to mimic a windows usecase).
135 $ HGEDITOR="c:\foo\bar\baz.exe -y -z" hg debuginstall
138 $ HGEDITOR="c:\foo\bar\baz.exe -y -z" hg debuginstall
136 checking encoding (ascii)...
139 checking encoding (ascii)...
137 checking Python executable (*) (glob)
140 checking Python executable (*) (glob)
138 checking Python implementation (*) (glob)
141 checking Python implementation (*) (glob)
139 checking Python version (2.*) (glob) (no-py3 !)
142 checking Python version (2.*) (glob) (no-py3 !)
140 checking Python version (3.*) (glob) (py3 !)
143 checking Python version (3.*) (glob) (py3 !)
141 checking Python lib (.*[Ll]ib.*)... (re)
144 checking Python lib (.*[Ll]ib.*)... (re)
142 checking Python security support (*) (glob)
145 checking Python security support (*) (glob)
143 TLS 1.2 not supported by Python install; network connections lack modern security (?)
146 TLS 1.2 not supported by Python install; network connections lack modern security (?)
144 SNI not supported by Python install; may have connectivity issues with some servers (?)
147 SNI not supported by Python install; may have connectivity issues with some servers (?)
145 checking Rust extensions \((installed|missing)\) (re)
148 checking Rust extensions \((installed|missing)\) (re)
146 checking Mercurial version (*) (glob)
149 checking Mercurial version (*) (glob)
147 checking Mercurial custom build (*) (glob)
150 checking Mercurial custom build (*) (glob)
148 checking module policy (*) (glob)
151 checking module policy (*) (glob)
149 checking installed modules (*mercurial)... (glob)
152 checking installed modules (*mercurial)... (glob)
150 checking registered compression engines (*zlib*) (glob)
153 checking registered compression engines (*zlib*) (glob)
151 checking available compression engines (*zlib*) (glob)
154 checking available compression engines (*zlib*) (glob)
152 checking available compression engines for wire protocol (*zlib*) (glob)
155 checking available compression engines for wire protocol (*zlib*) (glob)
153 checking "re2" regexp engine \((available|missing)\) (re)
156 checking "re2" regexp engine \((available|missing)\) (re)
157 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
154 checking templates (*mercurial?templates)... (glob)
158 checking templates (*mercurial?templates)... (glob)
155 checking default template (*mercurial?templates?map-cmdline.default) (glob)
159 checking default template (*mercurial?templates?map-cmdline.default) (glob)
156 checking commit editor... (c:\foo\bar\baz.exe) (windows !)
160 checking commit editor... (c:\foo\bar\baz.exe) (windows !)
157 Can't find editor 'c:\foo\bar\baz.exe' in PATH (windows !)
161 Can't find editor 'c:\foo\bar\baz.exe' in PATH (windows !)
158 checking commit editor... (c:foobarbaz.exe) (no-windows !)
162 checking commit editor... (c:foobarbaz.exe) (no-windows !)
159 Can't find editor 'c:foobarbaz.exe' in PATH (no-windows !)
163 Can't find editor 'c:foobarbaz.exe' in PATH (no-windows !)
160 (specify a commit editor in your configuration file)
164 (specify a commit editor in your configuration file)
161 checking username (test)
165 checking username (test)
162 1 problems detected, please check your install!
166 1 problems detected, please check your install!
163 [1]
167 [1]
164
168
165 debuginstall extension support
169 debuginstall extension support
166 $ hg debuginstall --config extensions.fsmonitor= --config fsmonitor.watchman_exe=false | grep atchman
170 $ hg debuginstall --config extensions.fsmonitor= --config fsmonitor.watchman_exe=false | grep atchman
167 fsmonitor checking for watchman binary... (false)
171 fsmonitor checking for watchman binary... (false)
168 watchman binary missing or broken: warning: Watchman unavailable: watchman exited with code 1
172 watchman binary missing or broken: warning: Watchman unavailable: watchman exited with code 1
169 Verify the json works too:
173 Verify the json works too:
170 $ hg debuginstall --config extensions.fsmonitor= --config fsmonitor.watchman_exe=false -Tjson | grep atchman
174 $ hg debuginstall --config extensions.fsmonitor= --config fsmonitor.watchman_exe=false -Tjson | grep atchman
171 "fsmonitor-watchman": "false",
175 "fsmonitor-watchman": "false",
172 "fsmonitor-watchman-error": "warning: Watchman unavailable: watchman exited with code 1",
176 "fsmonitor-watchman-error": "warning: Watchman unavailable: watchman exited with code 1",
173
177
174 Verify that Mercurial is installable with pip. Note that this MUST be
178 Verify that Mercurial is installable with pip. Note that this MUST be
175 the last test in this file, because we do some nasty things to the
179 the last test in this file, because we do some nasty things to the
176 shell environment in order to make the virtualenv work reliably.
180 shell environment in order to make the virtualenv work reliably.
177
181
178 On Python 3, we use the venv module, which is part of the standard library.
182 On Python 3, we use the venv module, which is part of the standard library.
179 But some Linux distros strip out this module's functionality involving pip,
183 But some Linux distros strip out this module's functionality involving pip,
180 so we have to look for the ensurepip module, which these distros strip out
184 so we have to look for the ensurepip module, which these distros strip out
181 completely.
185 completely.
182 On Python 2, we use the 3rd party virtualenv module, if available.
186 On Python 2, we use the 3rd party virtualenv module, if available.
183
187
184 $ cd $TESTTMP
188 $ cd $TESTTMP
185 $ unset PYTHONPATH
189 $ unset PYTHONPATH
186
190
187 #if py3 ensurepip
191 #if py3 ensurepip
188 $ "$PYTHON" -m venv installenv >> pip.log
192 $ "$PYTHON" -m venv installenv >> pip.log
189
193
190 Note: we use this weird path to run pip and hg to avoid platform differences,
194 Note: we use this weird path to run pip and hg to avoid platform differences,
191 since it's bin on most platforms but Scripts on Windows.
195 since it's bin on most platforms but Scripts on Windows.
192 $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
196 $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
193 Failed building wheel for mercurial (?)
197 Failed building wheel for mercurial (?)
194 $ ./installenv/*/hg debuginstall || cat pip.log
198 $ ./installenv/*/hg debuginstall || cat pip.log
195 checking encoding (ascii)...
199 checking encoding (ascii)...
196 checking Python executable (*) (glob)
200 checking Python executable (*) (glob)
197 checking Python implementation (*) (glob)
201 checking Python implementation (*) (glob)
198 checking Python version (3.*) (glob)
202 checking Python version (3.*) (glob)
199 checking Python lib (*)... (glob)
203 checking Python lib (*)... (glob)
200 checking Python security support (*) (glob)
204 checking Python security support (*) (glob)
201 checking Rust extensions \((installed|missing)\) (re)
205 checking Rust extensions \((installed|missing)\) (re)
202 checking Mercurial version (*) (glob)
206 checking Mercurial version (*) (glob)
203 checking Mercurial custom build (*) (glob)
207 checking Mercurial custom build (*) (glob)
204 checking module policy (*) (glob)
208 checking module policy (*) (glob)
205 checking installed modules (*/mercurial)... (glob)
209 checking installed modules (*/mercurial)... (glob)
206 checking registered compression engines (*) (glob)
210 checking registered compression engines (*) (glob)
207 checking available compression engines (*) (glob)
211 checking available compression engines (*) (glob)
208 checking available compression engines for wire protocol (*) (glob)
212 checking available compression engines for wire protocol (*) (glob)
209 checking "re2" regexp engine \((available|missing)\) (re)
213 checking "re2" regexp engine \((available|missing)\) (re)
214 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
210 checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
215 checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
211 checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
216 checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
212 checking commit editor... (*) (glob)
217 checking commit editor... (*) (glob)
213 checking username (test)
218 checking username (test)
214 no problems detected
219 no problems detected
215 #endif
220 #endif
216
221
217 #if no-py3 virtualenv
222 #if no-py3 virtualenv
218
223
219 Note: --no-site-packages is deprecated, but some places have an
224 Note: --no-site-packages is deprecated, but some places have an
220 ancient virtualenv from their linux distro or similar and it's not yet
225 ancient virtualenv from their linux distro or similar and it's not yet
221 the default for them.
226 the default for them.
222
227
223 $ "$PYTHON" -m virtualenv --no-site-packages --never-download installenv >> pip.log
228 $ "$PYTHON" -m virtualenv --no-site-packages --never-download installenv >> pip.log
224 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
229 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
225 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
230 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
226
231
227 Note: we use this weird path to run pip and hg to avoid platform differences,
232 Note: we use this weird path to run pip and hg to avoid platform differences,
228 since it's bin on most platforms but Scripts on Windows.
233 since it's bin on most platforms but Scripts on Windows.
229 $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
234 $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
230 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
235 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
231 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
236 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
232 $ ./installenv/*/hg debuginstall || cat pip.log
237 $ ./installenv/*/hg debuginstall || cat pip.log
233 checking encoding (ascii)...
238 checking encoding (ascii)...
234 checking Python executable (*) (glob)
239 checking Python executable (*) (glob)
235 checking Python implementation (*) (glob)
240 checking Python implementation (*) (glob)
236 checking Python version (2.*) (glob)
241 checking Python version (2.*) (glob)
237 checking Python lib (*)... (glob)
242 checking Python lib (*)... (glob)
238 checking Python security support (*) (glob)
243 checking Python security support (*) (glob)
239 TLS 1.2 not supported by Python install; network connections lack modern security (?)
244 TLS 1.2 not supported by Python install; network connections lack modern security (?)
240 SNI not supported by Python install; may have connectivity issues with some servers (?)
245 SNI not supported by Python install; may have connectivity issues with some servers (?)
241 checking Rust extensions \((installed|missing)\) (re)
246 checking Rust extensions \((installed|missing)\) (re)
242 checking Mercurial version (*) (glob)
247 checking Mercurial version (*) (glob)
243 checking Mercurial custom build (*) (glob)
248 checking Mercurial custom build (*) (glob)
244 checking module policy (*) (glob)
249 checking module policy (*) (glob)
245 checking installed modules (*/mercurial)... (glob)
250 checking installed modules (*/mercurial)... (glob)
246 checking registered compression engines (*) (glob)
251 checking registered compression engines (*) (glob)
247 checking available compression engines (*) (glob)
252 checking available compression engines (*) (glob)
248 checking available compression engines for wire protocol (*) (glob)
253 checking available compression engines for wire protocol (*) (glob)
249 checking "re2" regexp engine \((available|missing)\) (re)
254 checking "re2" regexp engine \((available|missing)\) (re)
255 checking "re2" regexp engine Rust bindings \((installed|missing)\) (re)
250 checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
256 checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
251 checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
257 checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
252 checking commit editor... (*) (glob)
258 checking commit editor... (*) (glob)
253 checking username (test)
259 checking username (test)
254 no problems detected
260 no problems detected
255 #endif
261 #endif
General Comments 0
You need to be logged in to leave comments. Login now