##// END OF EJS Templates
ui: remove excessive strtolocal() from debuguigetpass...
Yuya Nishihara -
r46653:e614eeb7 stable
parent child Browse files
Show More
@@ -1,4582 +1,4580 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import glob
14 import glob
15 import operator
15 import operator
16 import os
16 import os
17 import platform
17 import platform
18 import random
18 import random
19 import re
19 import re
20 import socket
20 import socket
21 import ssl
21 import ssl
22 import stat
22 import stat
23 import string
23 import string
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 revlog,
72 revlog,
73 revset,
73 revset,
74 revsetlang,
74 revsetlang,
75 scmutil,
75 scmutil,
76 setdiscovery,
76 setdiscovery,
77 simplemerge,
77 simplemerge,
78 sshpeer,
78 sshpeer,
79 sslutil,
79 sslutil,
80 streamclone,
80 streamclone,
81 tags as tagsmod,
81 tags as tagsmod,
82 templater,
82 templater,
83 treediscovery,
83 treediscovery,
84 upgrade,
84 upgrade,
85 url as urlmod,
85 url as urlmod,
86 util,
86 util,
87 vfs as vfsmod,
87 vfs as vfsmod,
88 wireprotoframing,
88 wireprotoframing,
89 wireprotoserver,
89 wireprotoserver,
90 wireprotov2peer,
90 wireprotov2peer,
91 )
91 )
92 from .utils import (
92 from .utils import (
93 cborutil,
93 cborutil,
94 compression,
94 compression,
95 dateutil,
95 dateutil,
96 procutil,
96 procutil,
97 stringutil,
97 stringutil,
98 )
98 )
99
99
100 from .revlogutils import (
100 from .revlogutils import (
101 deltas as deltautil,
101 deltas as deltautil,
102 nodemap,
102 nodemap,
103 sidedata,
103 sidedata,
104 )
104 )
105
105
106 release = lockmod.release
106 release = lockmod.release
107
107
108 command = registrar.command()
108 command = registrar.command()
109
109
110
110
111 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
111 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
112 def debugancestor(ui, repo, *args):
112 def debugancestor(ui, repo, *args):
113 """find the ancestor revision of two revisions in a given index"""
113 """find the ancestor revision of two revisions in a given index"""
114 if len(args) == 3:
114 if len(args) == 3:
115 index, rev1, rev2 = args
115 index, rev1, rev2 = args
116 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
116 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
117 lookup = r.lookup
117 lookup = r.lookup
118 elif len(args) == 2:
118 elif len(args) == 2:
119 if not repo:
119 if not repo:
120 raise error.Abort(
120 raise error.Abort(
121 _(b'there is no Mercurial repository here (.hg not found)')
121 _(b'there is no Mercurial repository here (.hg not found)')
122 )
122 )
123 rev1, rev2 = args
123 rev1, rev2 = args
124 r = repo.changelog
124 r = repo.changelog
125 lookup = repo.lookup
125 lookup = repo.lookup
126 else:
126 else:
127 raise error.Abort(_(b'either two or three arguments required'))
127 raise error.Abort(_(b'either two or three arguments required'))
128 a = r.ancestor(lookup(rev1), lookup(rev2))
128 a = r.ancestor(lookup(rev1), lookup(rev2))
129 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
129 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
130
130
131
131
132 @command(b'debugantivirusrunning', [])
132 @command(b'debugantivirusrunning', [])
133 def debugantivirusrunning(ui, repo):
133 def debugantivirusrunning(ui, repo):
134 """attempt to trigger an antivirus scanner to see if one is active"""
134 """attempt to trigger an antivirus scanner to see if one is active"""
135 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
135 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
136 f.write(
136 f.write(
137 util.b85decode(
137 util.b85decode(
138 # This is a base85-armored version of the EICAR test file. See
138 # This is a base85-armored version of the EICAR test file. See
139 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
139 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
140 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
140 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
141 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
141 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
142 )
142 )
143 )
143 )
144 # Give an AV engine time to scan the file.
144 # Give an AV engine time to scan the file.
145 time.sleep(2)
145 time.sleep(2)
146 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
146 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
147
147
148
148
149 @command(b'debugapplystreamclonebundle', [], b'FILE')
149 @command(b'debugapplystreamclonebundle', [], b'FILE')
150 def debugapplystreamclonebundle(ui, repo, fname):
150 def debugapplystreamclonebundle(ui, repo, fname):
151 """apply a stream clone bundle file"""
151 """apply a stream clone bundle file"""
152 f = hg.openpath(ui, fname)
152 f = hg.openpath(ui, fname)
153 gen = exchange.readbundle(ui, f, fname)
153 gen = exchange.readbundle(ui, f, fname)
154 gen.apply(repo)
154 gen.apply(repo)
155
155
156
156
157 @command(
157 @command(
158 b'debugbuilddag',
158 b'debugbuilddag',
159 [
159 [
160 (
160 (
161 b'm',
161 b'm',
162 b'mergeable-file',
162 b'mergeable-file',
163 None,
163 None,
164 _(b'add single file mergeable changes'),
164 _(b'add single file mergeable changes'),
165 ),
165 ),
166 (
166 (
167 b'o',
167 b'o',
168 b'overwritten-file',
168 b'overwritten-file',
169 None,
169 None,
170 _(b'add single file all revs overwrite'),
170 _(b'add single file all revs overwrite'),
171 ),
171 ),
172 (b'n', b'new-file', None, _(b'add new file at each rev')),
172 (b'n', b'new-file', None, _(b'add new file at each rev')),
173 ],
173 ],
174 _(b'[OPTION]... [TEXT]'),
174 _(b'[OPTION]... [TEXT]'),
175 )
175 )
176 def debugbuilddag(
176 def debugbuilddag(
177 ui,
177 ui,
178 repo,
178 repo,
179 text=None,
179 text=None,
180 mergeable_file=False,
180 mergeable_file=False,
181 overwritten_file=False,
181 overwritten_file=False,
182 new_file=False,
182 new_file=False,
183 ):
183 ):
184 """builds a repo with a given DAG from scratch in the current empty repo
184 """builds a repo with a given DAG from scratch in the current empty repo
185
185
186 The description of the DAG is read from stdin if not given on the
186 The description of the DAG is read from stdin if not given on the
187 command line.
187 command line.
188
188
189 Elements:
189 Elements:
190
190
191 - "+n" is a linear run of n nodes based on the current default parent
191 - "+n" is a linear run of n nodes based on the current default parent
192 - "." is a single node based on the current default parent
192 - "." is a single node based on the current default parent
193 - "$" resets the default parent to null (implied at the start);
193 - "$" resets the default parent to null (implied at the start);
194 otherwise the default parent is always the last node created
194 otherwise the default parent is always the last node created
195 - "<p" sets the default parent to the backref p
195 - "<p" sets the default parent to the backref p
196 - "*p" is a fork at parent p, which is a backref
196 - "*p" is a fork at parent p, which is a backref
197 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
197 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
198 - "/p2" is a merge of the preceding node and p2
198 - "/p2" is a merge of the preceding node and p2
199 - ":tag" defines a local tag for the preceding node
199 - ":tag" defines a local tag for the preceding node
200 - "@branch" sets the named branch for subsequent nodes
200 - "@branch" sets the named branch for subsequent nodes
201 - "#...\\n" is a comment up to the end of the line
201 - "#...\\n" is a comment up to the end of the line
202
202
203 Whitespace between the above elements is ignored.
203 Whitespace between the above elements is ignored.
204
204
205 A backref is either
205 A backref is either
206
206
207 - a number n, which references the node curr-n, where curr is the current
207 - a number n, which references the node curr-n, where curr is the current
208 node, or
208 node, or
209 - the name of a local tag you placed earlier using ":tag", or
209 - the name of a local tag you placed earlier using ":tag", or
210 - empty to denote the default parent.
210 - empty to denote the default parent.
211
211
212 All string valued-elements are either strictly alphanumeric, or must
212 All string valued-elements are either strictly alphanumeric, or must
213 be enclosed in double quotes ("..."), with "\\" as escape character.
213 be enclosed in double quotes ("..."), with "\\" as escape character.
214 """
214 """
215
215
216 if text is None:
216 if text is None:
217 ui.status(_(b"reading DAG from stdin\n"))
217 ui.status(_(b"reading DAG from stdin\n"))
218 text = ui.fin.read()
218 text = ui.fin.read()
219
219
220 cl = repo.changelog
220 cl = repo.changelog
221 if len(cl) > 0:
221 if len(cl) > 0:
222 raise error.Abort(_(b'repository is not empty'))
222 raise error.Abort(_(b'repository is not empty'))
223
223
224 # determine number of revs in DAG
224 # determine number of revs in DAG
225 total = 0
225 total = 0
226 for type, data in dagparser.parsedag(text):
226 for type, data in dagparser.parsedag(text):
227 if type == b'n':
227 if type == b'n':
228 total += 1
228 total += 1
229
229
230 if mergeable_file:
230 if mergeable_file:
231 linesperrev = 2
231 linesperrev = 2
232 # make a file with k lines per rev
232 # make a file with k lines per rev
233 initialmergedlines = [
233 initialmergedlines = [
234 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
234 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
235 ]
235 ]
236 initialmergedlines.append(b"")
236 initialmergedlines.append(b"")
237
237
238 tags = []
238 tags = []
239 progress = ui.makeprogress(
239 progress = ui.makeprogress(
240 _(b'building'), unit=_(b'revisions'), total=total
240 _(b'building'), unit=_(b'revisions'), total=total
241 )
241 )
242 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
242 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
243 at = -1
243 at = -1
244 atbranch = b'default'
244 atbranch = b'default'
245 nodeids = []
245 nodeids = []
246 id = 0
246 id = 0
247 progress.update(id)
247 progress.update(id)
248 for type, data in dagparser.parsedag(text):
248 for type, data in dagparser.parsedag(text):
249 if type == b'n':
249 if type == b'n':
250 ui.note((b'node %s\n' % pycompat.bytestr(data)))
250 ui.note((b'node %s\n' % pycompat.bytestr(data)))
251 id, ps = data
251 id, ps = data
252
252
253 files = []
253 files = []
254 filecontent = {}
254 filecontent = {}
255
255
256 p2 = None
256 p2 = None
257 if mergeable_file:
257 if mergeable_file:
258 fn = b"mf"
258 fn = b"mf"
259 p1 = repo[ps[0]]
259 p1 = repo[ps[0]]
260 if len(ps) > 1:
260 if len(ps) > 1:
261 p2 = repo[ps[1]]
261 p2 = repo[ps[1]]
262 pa = p1.ancestor(p2)
262 pa = p1.ancestor(p2)
263 base, local, other = [
263 base, local, other = [
264 x[fn].data() for x in (pa, p1, p2)
264 x[fn].data() for x in (pa, p1, p2)
265 ]
265 ]
266 m3 = simplemerge.Merge3Text(base, local, other)
266 m3 = simplemerge.Merge3Text(base, local, other)
267 ml = [l.strip() for l in m3.merge_lines()]
267 ml = [l.strip() for l in m3.merge_lines()]
268 ml.append(b"")
268 ml.append(b"")
269 elif at > 0:
269 elif at > 0:
270 ml = p1[fn].data().split(b"\n")
270 ml = p1[fn].data().split(b"\n")
271 else:
271 else:
272 ml = initialmergedlines
272 ml = initialmergedlines
273 ml[id * linesperrev] += b" r%i" % id
273 ml[id * linesperrev] += b" r%i" % id
274 mergedtext = b"\n".join(ml)
274 mergedtext = b"\n".join(ml)
275 files.append(fn)
275 files.append(fn)
276 filecontent[fn] = mergedtext
276 filecontent[fn] = mergedtext
277
277
278 if overwritten_file:
278 if overwritten_file:
279 fn = b"of"
279 fn = b"of"
280 files.append(fn)
280 files.append(fn)
281 filecontent[fn] = b"r%i\n" % id
281 filecontent[fn] = b"r%i\n" % id
282
282
283 if new_file:
283 if new_file:
284 fn = b"nf%i" % id
284 fn = b"nf%i" % id
285 files.append(fn)
285 files.append(fn)
286 filecontent[fn] = b"r%i\n" % id
286 filecontent[fn] = b"r%i\n" % id
287 if len(ps) > 1:
287 if len(ps) > 1:
288 if not p2:
288 if not p2:
289 p2 = repo[ps[1]]
289 p2 = repo[ps[1]]
290 for fn in p2:
290 for fn in p2:
291 if fn.startswith(b"nf"):
291 if fn.startswith(b"nf"):
292 files.append(fn)
292 files.append(fn)
293 filecontent[fn] = p2[fn].data()
293 filecontent[fn] = p2[fn].data()
294
294
295 def fctxfn(repo, cx, path):
295 def fctxfn(repo, cx, path):
296 if path in filecontent:
296 if path in filecontent:
297 return context.memfilectx(
297 return context.memfilectx(
298 repo, cx, path, filecontent[path]
298 repo, cx, path, filecontent[path]
299 )
299 )
300 return None
300 return None
301
301
302 if len(ps) == 0 or ps[0] < 0:
302 if len(ps) == 0 or ps[0] < 0:
303 pars = [None, None]
303 pars = [None, None]
304 elif len(ps) == 1:
304 elif len(ps) == 1:
305 pars = [nodeids[ps[0]], None]
305 pars = [nodeids[ps[0]], None]
306 else:
306 else:
307 pars = [nodeids[p] for p in ps]
307 pars = [nodeids[p] for p in ps]
308 cx = context.memctx(
308 cx = context.memctx(
309 repo,
309 repo,
310 pars,
310 pars,
311 b"r%i" % id,
311 b"r%i" % id,
312 files,
312 files,
313 fctxfn,
313 fctxfn,
314 date=(id, 0),
314 date=(id, 0),
315 user=b"debugbuilddag",
315 user=b"debugbuilddag",
316 extra={b'branch': atbranch},
316 extra={b'branch': atbranch},
317 )
317 )
318 nodeid = repo.commitctx(cx)
318 nodeid = repo.commitctx(cx)
319 nodeids.append(nodeid)
319 nodeids.append(nodeid)
320 at = id
320 at = id
321 elif type == b'l':
321 elif type == b'l':
322 id, name = data
322 id, name = data
323 ui.note((b'tag %s\n' % name))
323 ui.note((b'tag %s\n' % name))
324 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
324 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
325 elif type == b'a':
325 elif type == b'a':
326 ui.note((b'branch %s\n' % data))
326 ui.note((b'branch %s\n' % data))
327 atbranch = data
327 atbranch = data
328 progress.update(id)
328 progress.update(id)
329
329
330 if tags:
330 if tags:
331 repo.vfs.write(b"localtags", b"".join(tags))
331 repo.vfs.write(b"localtags", b"".join(tags))
332
332
333
333
334 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
334 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
335 indent_string = b' ' * indent
335 indent_string = b' ' * indent
336 if all:
336 if all:
337 ui.writenoi18n(
337 ui.writenoi18n(
338 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
338 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
339 % indent_string
339 % indent_string
340 )
340 )
341
341
342 def showchunks(named):
342 def showchunks(named):
343 ui.write(b"\n%s%s\n" % (indent_string, named))
343 ui.write(b"\n%s%s\n" % (indent_string, named))
344 for deltadata in gen.deltaiter():
344 for deltadata in gen.deltaiter():
345 node, p1, p2, cs, deltabase, delta, flags = deltadata
345 node, p1, p2, cs, deltabase, delta, flags = deltadata
346 ui.write(
346 ui.write(
347 b"%s%s %s %s %s %s %d\n"
347 b"%s%s %s %s %s %s %d\n"
348 % (
348 % (
349 indent_string,
349 indent_string,
350 hex(node),
350 hex(node),
351 hex(p1),
351 hex(p1),
352 hex(p2),
352 hex(p2),
353 hex(cs),
353 hex(cs),
354 hex(deltabase),
354 hex(deltabase),
355 len(delta),
355 len(delta),
356 )
356 )
357 )
357 )
358
358
359 gen.changelogheader()
359 gen.changelogheader()
360 showchunks(b"changelog")
360 showchunks(b"changelog")
361 gen.manifestheader()
361 gen.manifestheader()
362 showchunks(b"manifest")
362 showchunks(b"manifest")
363 for chunkdata in iter(gen.filelogheader, {}):
363 for chunkdata in iter(gen.filelogheader, {}):
364 fname = chunkdata[b'filename']
364 fname = chunkdata[b'filename']
365 showchunks(fname)
365 showchunks(fname)
366 else:
366 else:
367 if isinstance(gen, bundle2.unbundle20):
367 if isinstance(gen, bundle2.unbundle20):
368 raise error.Abort(_(b'use debugbundle2 for this file'))
368 raise error.Abort(_(b'use debugbundle2 for this file'))
369 gen.changelogheader()
369 gen.changelogheader()
370 for deltadata in gen.deltaiter():
370 for deltadata in gen.deltaiter():
371 node, p1, p2, cs, deltabase, delta, flags = deltadata
371 node, p1, p2, cs, deltabase, delta, flags = deltadata
372 ui.write(b"%s%s\n" % (indent_string, hex(node)))
372 ui.write(b"%s%s\n" % (indent_string, hex(node)))
373
373
374
374
375 def _debugobsmarkers(ui, part, indent=0, **opts):
375 def _debugobsmarkers(ui, part, indent=0, **opts):
376 """display version and markers contained in 'data'"""
376 """display version and markers contained in 'data'"""
377 opts = pycompat.byteskwargs(opts)
377 opts = pycompat.byteskwargs(opts)
378 data = part.read()
378 data = part.read()
379 indent_string = b' ' * indent
379 indent_string = b' ' * indent
380 try:
380 try:
381 version, markers = obsolete._readmarkers(data)
381 version, markers = obsolete._readmarkers(data)
382 except error.UnknownVersion as exc:
382 except error.UnknownVersion as exc:
383 msg = b"%sunsupported version: %s (%d bytes)\n"
383 msg = b"%sunsupported version: %s (%d bytes)\n"
384 msg %= indent_string, exc.version, len(data)
384 msg %= indent_string, exc.version, len(data)
385 ui.write(msg)
385 ui.write(msg)
386 else:
386 else:
387 msg = b"%sversion: %d (%d bytes)\n"
387 msg = b"%sversion: %d (%d bytes)\n"
388 msg %= indent_string, version, len(data)
388 msg %= indent_string, version, len(data)
389 ui.write(msg)
389 ui.write(msg)
390 fm = ui.formatter(b'debugobsolete', opts)
390 fm = ui.formatter(b'debugobsolete', opts)
391 for rawmarker in sorted(markers):
391 for rawmarker in sorted(markers):
392 m = obsutil.marker(None, rawmarker)
392 m = obsutil.marker(None, rawmarker)
393 fm.startitem()
393 fm.startitem()
394 fm.plain(indent_string)
394 fm.plain(indent_string)
395 cmdutil.showmarker(fm, m)
395 cmdutil.showmarker(fm, m)
396 fm.end()
396 fm.end()
397
397
398
398
399 def _debugphaseheads(ui, data, indent=0):
399 def _debugphaseheads(ui, data, indent=0):
400 """display version and markers contained in 'data'"""
400 """display version and markers contained in 'data'"""
401 indent_string = b' ' * indent
401 indent_string = b' ' * indent
402 headsbyphase = phases.binarydecode(data)
402 headsbyphase = phases.binarydecode(data)
403 for phase in phases.allphases:
403 for phase in phases.allphases:
404 for head in headsbyphase[phase]:
404 for head in headsbyphase[phase]:
405 ui.write(indent_string)
405 ui.write(indent_string)
406 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
406 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
407
407
408
408
409 def _quasirepr(thing):
409 def _quasirepr(thing):
410 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
410 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
411 return b'{%s}' % (
411 return b'{%s}' % (
412 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
412 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
413 )
413 )
414 return pycompat.bytestr(repr(thing))
414 return pycompat.bytestr(repr(thing))
415
415
416
416
417 def _debugbundle2(ui, gen, all=None, **opts):
417 def _debugbundle2(ui, gen, all=None, **opts):
418 """lists the contents of a bundle2"""
418 """lists the contents of a bundle2"""
419 if not isinstance(gen, bundle2.unbundle20):
419 if not isinstance(gen, bundle2.unbundle20):
420 raise error.Abort(_(b'not a bundle2 file'))
420 raise error.Abort(_(b'not a bundle2 file'))
421 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
421 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
422 parttypes = opts.get('part_type', [])
422 parttypes = opts.get('part_type', [])
423 for part in gen.iterparts():
423 for part in gen.iterparts():
424 if parttypes and part.type not in parttypes:
424 if parttypes and part.type not in parttypes:
425 continue
425 continue
426 msg = b'%s -- %s (mandatory: %r)\n'
426 msg = b'%s -- %s (mandatory: %r)\n'
427 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
427 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
428 if part.type == b'changegroup':
428 if part.type == b'changegroup':
429 version = part.params.get(b'version', b'01')
429 version = part.params.get(b'version', b'01')
430 cg = changegroup.getunbundler(version, part, b'UN')
430 cg = changegroup.getunbundler(version, part, b'UN')
431 if not ui.quiet:
431 if not ui.quiet:
432 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
432 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
433 if part.type == b'obsmarkers':
433 if part.type == b'obsmarkers':
434 if not ui.quiet:
434 if not ui.quiet:
435 _debugobsmarkers(ui, part, indent=4, **opts)
435 _debugobsmarkers(ui, part, indent=4, **opts)
436 if part.type == b'phase-heads':
436 if part.type == b'phase-heads':
437 if not ui.quiet:
437 if not ui.quiet:
438 _debugphaseheads(ui, part, indent=4)
438 _debugphaseheads(ui, part, indent=4)
439
439
440
440
441 @command(
441 @command(
442 b'debugbundle',
442 b'debugbundle',
443 [
443 [
444 (b'a', b'all', None, _(b'show all details')),
444 (b'a', b'all', None, _(b'show all details')),
445 (b'', b'part-type', [], _(b'show only the named part type')),
445 (b'', b'part-type', [], _(b'show only the named part type')),
446 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
446 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
447 ],
447 ],
448 _(b'FILE'),
448 _(b'FILE'),
449 norepo=True,
449 norepo=True,
450 )
450 )
451 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
451 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
452 """lists the contents of a bundle"""
452 """lists the contents of a bundle"""
453 with hg.openpath(ui, bundlepath) as f:
453 with hg.openpath(ui, bundlepath) as f:
454 if spec:
454 if spec:
455 spec = exchange.getbundlespec(ui, f)
455 spec = exchange.getbundlespec(ui, f)
456 ui.write(b'%s\n' % spec)
456 ui.write(b'%s\n' % spec)
457 return
457 return
458
458
459 gen = exchange.readbundle(ui, f, bundlepath)
459 gen = exchange.readbundle(ui, f, bundlepath)
460 if isinstance(gen, bundle2.unbundle20):
460 if isinstance(gen, bundle2.unbundle20):
461 return _debugbundle2(ui, gen, all=all, **opts)
461 return _debugbundle2(ui, gen, all=all, **opts)
462 _debugchangegroup(ui, gen, all=all, **opts)
462 _debugchangegroup(ui, gen, all=all, **opts)
463
463
464
464
465 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
465 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
466 def debugcapabilities(ui, path, **opts):
466 def debugcapabilities(ui, path, **opts):
467 """lists the capabilities of a remote peer"""
467 """lists the capabilities of a remote peer"""
468 opts = pycompat.byteskwargs(opts)
468 opts = pycompat.byteskwargs(opts)
469 peer = hg.peer(ui, opts, path)
469 peer = hg.peer(ui, opts, path)
470 caps = peer.capabilities()
470 caps = peer.capabilities()
471 ui.writenoi18n(b'Main capabilities:\n')
471 ui.writenoi18n(b'Main capabilities:\n')
472 for c in sorted(caps):
472 for c in sorted(caps):
473 ui.write(b' %s\n' % c)
473 ui.write(b' %s\n' % c)
474 b2caps = bundle2.bundle2caps(peer)
474 b2caps = bundle2.bundle2caps(peer)
475 if b2caps:
475 if b2caps:
476 ui.writenoi18n(b'Bundle2 capabilities:\n')
476 ui.writenoi18n(b'Bundle2 capabilities:\n')
477 for key, values in sorted(pycompat.iteritems(b2caps)):
477 for key, values in sorted(pycompat.iteritems(b2caps)):
478 ui.write(b' %s\n' % key)
478 ui.write(b' %s\n' % key)
479 for v in values:
479 for v in values:
480 ui.write(b' %s\n' % v)
480 ui.write(b' %s\n' % v)
481
481
482
482
483 @command(b'debugchangedfiles', [], b'REV')
483 @command(b'debugchangedfiles', [], b'REV')
484 def debugchangedfiles(ui, repo, rev):
484 def debugchangedfiles(ui, repo, rev):
485 """list the stored files changes for a revision"""
485 """list the stored files changes for a revision"""
486 ctx = scmutil.revsingle(repo, rev, None)
486 ctx = scmutil.revsingle(repo, rev, None)
487 sd = repo.changelog.sidedata(ctx.rev())
487 sd = repo.changelog.sidedata(ctx.rev())
488 files_block = sd.get(sidedata.SD_FILES)
488 files_block = sd.get(sidedata.SD_FILES)
489 if files_block is not None:
489 if files_block is not None:
490 files = metadata.decode_files_sidedata(sd)
490 files = metadata.decode_files_sidedata(sd)
491 for f in sorted(files.touched):
491 for f in sorted(files.touched):
492 if f in files.added:
492 if f in files.added:
493 action = b"added"
493 action = b"added"
494 elif f in files.removed:
494 elif f in files.removed:
495 action = b"removed"
495 action = b"removed"
496 elif f in files.merged:
496 elif f in files.merged:
497 action = b"merged"
497 action = b"merged"
498 elif f in files.salvaged:
498 elif f in files.salvaged:
499 action = b"salvaged"
499 action = b"salvaged"
500 else:
500 else:
501 action = b"touched"
501 action = b"touched"
502
502
503 copy_parent = b""
503 copy_parent = b""
504 copy_source = b""
504 copy_source = b""
505 if f in files.copied_from_p1:
505 if f in files.copied_from_p1:
506 copy_parent = b"p1"
506 copy_parent = b"p1"
507 copy_source = files.copied_from_p1[f]
507 copy_source = files.copied_from_p1[f]
508 elif f in files.copied_from_p2:
508 elif f in files.copied_from_p2:
509 copy_parent = b"p2"
509 copy_parent = b"p2"
510 copy_source = files.copied_from_p2[f]
510 copy_source = files.copied_from_p2[f]
511
511
512 data = (action, copy_parent, f, copy_source)
512 data = (action, copy_parent, f, copy_source)
513 template = b"%-8s %2s: %s, %s;\n"
513 template = b"%-8s %2s: %s, %s;\n"
514 ui.write(template % data)
514 ui.write(template % data)
515
515
516
516
517 @command(b'debugcheckstate', [], b'')
517 @command(b'debugcheckstate', [], b'')
518 def debugcheckstate(ui, repo):
518 def debugcheckstate(ui, repo):
519 """validate the correctness of the current dirstate"""
519 """validate the correctness of the current dirstate"""
520 parent1, parent2 = repo.dirstate.parents()
520 parent1, parent2 = repo.dirstate.parents()
521 m1 = repo[parent1].manifest()
521 m1 = repo[parent1].manifest()
522 m2 = repo[parent2].manifest()
522 m2 = repo[parent2].manifest()
523 errors = 0
523 errors = 0
524 for f in repo.dirstate:
524 for f in repo.dirstate:
525 state = repo.dirstate[f]
525 state = repo.dirstate[f]
526 if state in b"nr" and f not in m1:
526 if state in b"nr" and f not in m1:
527 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
527 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
528 errors += 1
528 errors += 1
529 if state in b"a" and f in m1:
529 if state in b"a" and f in m1:
530 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
530 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
531 errors += 1
531 errors += 1
532 if state in b"m" and f not in m1 and f not in m2:
532 if state in b"m" and f not in m1 and f not in m2:
533 ui.warn(
533 ui.warn(
534 _(b"%s in state %s, but not in either manifest\n") % (f, state)
534 _(b"%s in state %s, but not in either manifest\n") % (f, state)
535 )
535 )
536 errors += 1
536 errors += 1
537 for f in m1:
537 for f in m1:
538 state = repo.dirstate[f]
538 state = repo.dirstate[f]
539 if state not in b"nrm":
539 if state not in b"nrm":
540 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
540 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
541 errors += 1
541 errors += 1
542 if errors:
542 if errors:
543 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
543 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
544 raise error.Abort(errstr)
544 raise error.Abort(errstr)
545
545
546
546
547 @command(
547 @command(
548 b'debugcolor',
548 b'debugcolor',
549 [(b'', b'style', None, _(b'show all configured styles'))],
549 [(b'', b'style', None, _(b'show all configured styles'))],
550 b'hg debugcolor',
550 b'hg debugcolor',
551 )
551 )
552 def debugcolor(ui, repo, **opts):
552 def debugcolor(ui, repo, **opts):
553 """show available color, effects or style"""
553 """show available color, effects or style"""
554 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
554 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
555 if opts.get('style'):
555 if opts.get('style'):
556 return _debugdisplaystyle(ui)
556 return _debugdisplaystyle(ui)
557 else:
557 else:
558 return _debugdisplaycolor(ui)
558 return _debugdisplaycolor(ui)
559
559
560
560
561 def _debugdisplaycolor(ui):
561 def _debugdisplaycolor(ui):
562 ui = ui.copy()
562 ui = ui.copy()
563 ui._styles.clear()
563 ui._styles.clear()
564 for effect in color._activeeffects(ui).keys():
564 for effect in color._activeeffects(ui).keys():
565 ui._styles[effect] = effect
565 ui._styles[effect] = effect
566 if ui._terminfoparams:
566 if ui._terminfoparams:
567 for k, v in ui.configitems(b'color'):
567 for k, v in ui.configitems(b'color'):
568 if k.startswith(b'color.'):
568 if k.startswith(b'color.'):
569 ui._styles[k] = k[6:]
569 ui._styles[k] = k[6:]
570 elif k.startswith(b'terminfo.'):
570 elif k.startswith(b'terminfo.'):
571 ui._styles[k] = k[9:]
571 ui._styles[k] = k[9:]
572 ui.write(_(b'available colors:\n'))
572 ui.write(_(b'available colors:\n'))
573 # sort label with a '_' after the other to group '_background' entry.
573 # sort label with a '_' after the other to group '_background' entry.
574 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
574 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
575 for colorname, label in items:
575 for colorname, label in items:
576 ui.write(b'%s\n' % colorname, label=label)
576 ui.write(b'%s\n' % colorname, label=label)
577
577
578
578
579 def _debugdisplaystyle(ui):
579 def _debugdisplaystyle(ui):
580 ui.write(_(b'available style:\n'))
580 ui.write(_(b'available style:\n'))
581 if not ui._styles:
581 if not ui._styles:
582 return
582 return
583 width = max(len(s) for s in ui._styles)
583 width = max(len(s) for s in ui._styles)
584 for label, effects in sorted(ui._styles.items()):
584 for label, effects in sorted(ui._styles.items()):
585 ui.write(b'%s' % label, label=label)
585 ui.write(b'%s' % label, label=label)
586 if effects:
586 if effects:
587 # 50
587 # 50
588 ui.write(b': ')
588 ui.write(b': ')
589 ui.write(b' ' * (max(0, width - len(label))))
589 ui.write(b' ' * (max(0, width - len(label))))
590 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
590 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
591 ui.write(b'\n')
591 ui.write(b'\n')
592
592
593
593
594 @command(b'debugcreatestreamclonebundle', [], b'FILE')
594 @command(b'debugcreatestreamclonebundle', [], b'FILE')
595 def debugcreatestreamclonebundle(ui, repo, fname):
595 def debugcreatestreamclonebundle(ui, repo, fname):
596 """create a stream clone bundle file
596 """create a stream clone bundle file
597
597
598 Stream bundles are special bundles that are essentially archives of
598 Stream bundles are special bundles that are essentially archives of
599 revlog files. They are commonly used for cloning very quickly.
599 revlog files. They are commonly used for cloning very quickly.
600 """
600 """
601 # TODO we may want to turn this into an abort when this functionality
601 # TODO we may want to turn this into an abort when this functionality
602 # is moved into `hg bundle`.
602 # is moved into `hg bundle`.
603 if phases.hassecret(repo):
603 if phases.hassecret(repo):
604 ui.warn(
604 ui.warn(
605 _(
605 _(
606 b'(warning: stream clone bundle will contain secret '
606 b'(warning: stream clone bundle will contain secret '
607 b'revisions)\n'
607 b'revisions)\n'
608 )
608 )
609 )
609 )
610
610
611 requirements, gen = streamclone.generatebundlev1(repo)
611 requirements, gen = streamclone.generatebundlev1(repo)
612 changegroup.writechunks(ui, gen, fname)
612 changegroup.writechunks(ui, gen, fname)
613
613
614 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
614 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
615
615
616
616
617 @command(
617 @command(
618 b'debugdag',
618 b'debugdag',
619 [
619 [
620 (b't', b'tags', None, _(b'use tags as labels')),
620 (b't', b'tags', None, _(b'use tags as labels')),
621 (b'b', b'branches', None, _(b'annotate with branch names')),
621 (b'b', b'branches', None, _(b'annotate with branch names')),
622 (b'', b'dots', None, _(b'use dots for runs')),
622 (b'', b'dots', None, _(b'use dots for runs')),
623 (b's', b'spaces', None, _(b'separate elements by spaces')),
623 (b's', b'spaces', None, _(b'separate elements by spaces')),
624 ],
624 ],
625 _(b'[OPTION]... [FILE [REV]...]'),
625 _(b'[OPTION]... [FILE [REV]...]'),
626 optionalrepo=True,
626 optionalrepo=True,
627 )
627 )
628 def debugdag(ui, repo, file_=None, *revs, **opts):
628 def debugdag(ui, repo, file_=None, *revs, **opts):
629 """format the changelog or an index DAG as a concise textual description
629 """format the changelog or an index DAG as a concise textual description
630
630
631 If you pass a revlog index, the revlog's DAG is emitted. If you list
631 If you pass a revlog index, the revlog's DAG is emitted. If you list
632 revision numbers, they get labeled in the output as rN.
632 revision numbers, they get labeled in the output as rN.
633
633
634 Otherwise, the changelog DAG of the current repo is emitted.
634 Otherwise, the changelog DAG of the current repo is emitted.
635 """
635 """
636 spaces = opts.get('spaces')
636 spaces = opts.get('spaces')
637 dots = opts.get('dots')
637 dots = opts.get('dots')
638 if file_:
638 if file_:
639 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
639 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
640 revs = {int(r) for r in revs}
640 revs = {int(r) for r in revs}
641
641
642 def events():
642 def events():
643 for r in rlog:
643 for r in rlog:
644 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
644 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
645 if r in revs:
645 if r in revs:
646 yield b'l', (r, b"r%i" % r)
646 yield b'l', (r, b"r%i" % r)
647
647
648 elif repo:
648 elif repo:
649 cl = repo.changelog
649 cl = repo.changelog
650 tags = opts.get('tags')
650 tags = opts.get('tags')
651 branches = opts.get('branches')
651 branches = opts.get('branches')
652 if tags:
652 if tags:
653 labels = {}
653 labels = {}
654 for l, n in repo.tags().items():
654 for l, n in repo.tags().items():
655 labels.setdefault(cl.rev(n), []).append(l)
655 labels.setdefault(cl.rev(n), []).append(l)
656
656
657 def events():
657 def events():
658 b = b"default"
658 b = b"default"
659 for r in cl:
659 for r in cl:
660 if branches:
660 if branches:
661 newb = cl.read(cl.node(r))[5][b'branch']
661 newb = cl.read(cl.node(r))[5][b'branch']
662 if newb != b:
662 if newb != b:
663 yield b'a', newb
663 yield b'a', newb
664 b = newb
664 b = newb
665 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
665 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
666 if tags:
666 if tags:
667 ls = labels.get(r)
667 ls = labels.get(r)
668 if ls:
668 if ls:
669 for l in ls:
669 for l in ls:
670 yield b'l', (r, l)
670 yield b'l', (r, l)
671
671
672 else:
672 else:
673 raise error.Abort(_(b'need repo for changelog dag'))
673 raise error.Abort(_(b'need repo for changelog dag'))
674
674
675 for line in dagparser.dagtextlines(
675 for line in dagparser.dagtextlines(
676 events(),
676 events(),
677 addspaces=spaces,
677 addspaces=spaces,
678 wraplabels=True,
678 wraplabels=True,
679 wrapannotations=True,
679 wrapannotations=True,
680 wrapnonlinear=dots,
680 wrapnonlinear=dots,
681 usedots=dots,
681 usedots=dots,
682 maxlinewidth=70,
682 maxlinewidth=70,
683 ):
683 ):
684 ui.write(line)
684 ui.write(line)
685 ui.write(b"\n")
685 ui.write(b"\n")
686
686
687
687
688 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
688 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
689 def debugdata(ui, repo, file_, rev=None, **opts):
689 def debugdata(ui, repo, file_, rev=None, **opts):
690 """dump the contents of a data file revision"""
690 """dump the contents of a data file revision"""
691 opts = pycompat.byteskwargs(opts)
691 opts = pycompat.byteskwargs(opts)
692 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
692 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
693 if rev is not None:
693 if rev is not None:
694 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
694 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
695 file_, rev = None, file_
695 file_, rev = None, file_
696 elif rev is None:
696 elif rev is None:
697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
698 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
699 try:
699 try:
700 ui.write(r.rawdata(r.lookup(rev)))
700 ui.write(r.rawdata(r.lookup(rev)))
701 except KeyError:
701 except KeyError:
702 raise error.Abort(_(b'invalid revision identifier %s') % rev)
702 raise error.Abort(_(b'invalid revision identifier %s') % rev)
703
703
704
704
705 @command(
705 @command(
706 b'debugdate',
706 b'debugdate',
707 [(b'e', b'extended', None, _(b'try extended date formats'))],
707 [(b'e', b'extended', None, _(b'try extended date formats'))],
708 _(b'[-e] DATE [RANGE]'),
708 _(b'[-e] DATE [RANGE]'),
709 norepo=True,
709 norepo=True,
710 optionalrepo=True,
710 optionalrepo=True,
711 )
711 )
712 def debugdate(ui, date, range=None, **opts):
712 def debugdate(ui, date, range=None, **opts):
713 """parse and display a date"""
713 """parse and display a date"""
714 if opts["extended"]:
714 if opts["extended"]:
715 d = dateutil.parsedate(date, dateutil.extendeddateformats)
715 d = dateutil.parsedate(date, dateutil.extendeddateformats)
716 else:
716 else:
717 d = dateutil.parsedate(date)
717 d = dateutil.parsedate(date)
718 ui.writenoi18n(b"internal: %d %d\n" % d)
718 ui.writenoi18n(b"internal: %d %d\n" % d)
719 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
719 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
720 if range:
720 if range:
721 m = dateutil.matchdate(range)
721 m = dateutil.matchdate(range)
722 ui.writenoi18n(b"match: %s\n" % m(d[0]))
722 ui.writenoi18n(b"match: %s\n" % m(d[0]))
723
723
724
724
725 @command(
725 @command(
726 b'debugdeltachain',
726 b'debugdeltachain',
727 cmdutil.debugrevlogopts + cmdutil.formatteropts,
727 cmdutil.debugrevlogopts + cmdutil.formatteropts,
728 _(b'-c|-m|FILE'),
728 _(b'-c|-m|FILE'),
729 optionalrepo=True,
729 optionalrepo=True,
730 )
730 )
731 def debugdeltachain(ui, repo, file_=None, **opts):
731 def debugdeltachain(ui, repo, file_=None, **opts):
732 """dump information about delta chains in a revlog
732 """dump information about delta chains in a revlog
733
733
734 Output can be templatized. Available template keywords are:
734 Output can be templatized. Available template keywords are:
735
735
736 :``rev``: revision number
736 :``rev``: revision number
737 :``chainid``: delta chain identifier (numbered by unique base)
737 :``chainid``: delta chain identifier (numbered by unique base)
738 :``chainlen``: delta chain length to this revision
738 :``chainlen``: delta chain length to this revision
739 :``prevrev``: previous revision in delta chain
739 :``prevrev``: previous revision in delta chain
740 :``deltatype``: role of delta / how it was computed
740 :``deltatype``: role of delta / how it was computed
741 :``compsize``: compressed size of revision
741 :``compsize``: compressed size of revision
742 :``uncompsize``: uncompressed size of revision
742 :``uncompsize``: uncompressed size of revision
743 :``chainsize``: total size of compressed revisions in chain
743 :``chainsize``: total size of compressed revisions in chain
744 :``chainratio``: total chain size divided by uncompressed revision size
744 :``chainratio``: total chain size divided by uncompressed revision size
745 (new delta chains typically start at ratio 2.00)
745 (new delta chains typically start at ratio 2.00)
746 :``lindist``: linear distance from base revision in delta chain to end
746 :``lindist``: linear distance from base revision in delta chain to end
747 of this revision
747 of this revision
748 :``extradist``: total size of revisions not part of this delta chain from
748 :``extradist``: total size of revisions not part of this delta chain from
749 base of delta chain to end of this revision; a measurement
749 base of delta chain to end of this revision; a measurement
750 of how much extra data we need to read/seek across to read
750 of how much extra data we need to read/seek across to read
751 the delta chain for this revision
751 the delta chain for this revision
752 :``extraratio``: extradist divided by chainsize; another representation of
752 :``extraratio``: extradist divided by chainsize; another representation of
753 how much unrelated data is needed to load this delta chain
753 how much unrelated data is needed to load this delta chain
754
754
755 If the repository is configured to use the sparse read, additional keywords
755 If the repository is configured to use the sparse read, additional keywords
756 are available:
756 are available:
757
757
758 :``readsize``: total size of data read from the disk for a revision
758 :``readsize``: total size of data read from the disk for a revision
759 (sum of the sizes of all the blocks)
759 (sum of the sizes of all the blocks)
760 :``largestblock``: size of the largest block of data read from the disk
760 :``largestblock``: size of the largest block of data read from the disk
761 :``readdensity``: density of useful bytes in the data read from the disk
761 :``readdensity``: density of useful bytes in the data read from the disk
762 :``srchunks``: in how many data hunks the whole revision would be read
762 :``srchunks``: in how many data hunks the whole revision would be read
763
763
764 The sparse read can be enabled with experimental.sparse-read = True
764 The sparse read can be enabled with experimental.sparse-read = True
765 """
765 """
766 opts = pycompat.byteskwargs(opts)
766 opts = pycompat.byteskwargs(opts)
767 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
767 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
768 index = r.index
768 index = r.index
769 start = r.start
769 start = r.start
770 length = r.length
770 length = r.length
771 generaldelta = r.version & revlog.FLAG_GENERALDELTA
771 generaldelta = r.version & revlog.FLAG_GENERALDELTA
772 withsparseread = getattr(r, '_withsparseread', False)
772 withsparseread = getattr(r, '_withsparseread', False)
773
773
774 def revinfo(rev):
774 def revinfo(rev):
775 e = index[rev]
775 e = index[rev]
776 compsize = e[1]
776 compsize = e[1]
777 uncompsize = e[2]
777 uncompsize = e[2]
778 chainsize = 0
778 chainsize = 0
779
779
780 if generaldelta:
780 if generaldelta:
781 if e[3] == e[5]:
781 if e[3] == e[5]:
782 deltatype = b'p1'
782 deltatype = b'p1'
783 elif e[3] == e[6]:
783 elif e[3] == e[6]:
784 deltatype = b'p2'
784 deltatype = b'p2'
785 elif e[3] == rev - 1:
785 elif e[3] == rev - 1:
786 deltatype = b'prev'
786 deltatype = b'prev'
787 elif e[3] == rev:
787 elif e[3] == rev:
788 deltatype = b'base'
788 deltatype = b'base'
789 else:
789 else:
790 deltatype = b'other'
790 deltatype = b'other'
791 else:
791 else:
792 if e[3] == rev:
792 if e[3] == rev:
793 deltatype = b'base'
793 deltatype = b'base'
794 else:
794 else:
795 deltatype = b'prev'
795 deltatype = b'prev'
796
796
797 chain = r._deltachain(rev)[0]
797 chain = r._deltachain(rev)[0]
798 for iterrev in chain:
798 for iterrev in chain:
799 e = index[iterrev]
799 e = index[iterrev]
800 chainsize += e[1]
800 chainsize += e[1]
801
801
802 return compsize, uncompsize, deltatype, chain, chainsize
802 return compsize, uncompsize, deltatype, chain, chainsize
803
803
804 fm = ui.formatter(b'debugdeltachain', opts)
804 fm = ui.formatter(b'debugdeltachain', opts)
805
805
806 fm.plain(
806 fm.plain(
807 b' rev chain# chainlen prev delta '
807 b' rev chain# chainlen prev delta '
808 b'size rawsize chainsize ratio lindist extradist '
808 b'size rawsize chainsize ratio lindist extradist '
809 b'extraratio'
809 b'extraratio'
810 )
810 )
811 if withsparseread:
811 if withsparseread:
812 fm.plain(b' readsize largestblk rddensity srchunks')
812 fm.plain(b' readsize largestblk rddensity srchunks')
813 fm.plain(b'\n')
813 fm.plain(b'\n')
814
814
815 chainbases = {}
815 chainbases = {}
816 for rev in r:
816 for rev in r:
817 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
817 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
818 chainbase = chain[0]
818 chainbase = chain[0]
819 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
819 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
820 basestart = start(chainbase)
820 basestart = start(chainbase)
821 revstart = start(rev)
821 revstart = start(rev)
822 lineardist = revstart + comp - basestart
822 lineardist = revstart + comp - basestart
823 extradist = lineardist - chainsize
823 extradist = lineardist - chainsize
824 try:
824 try:
825 prevrev = chain[-2]
825 prevrev = chain[-2]
826 except IndexError:
826 except IndexError:
827 prevrev = -1
827 prevrev = -1
828
828
829 if uncomp != 0:
829 if uncomp != 0:
830 chainratio = float(chainsize) / float(uncomp)
830 chainratio = float(chainsize) / float(uncomp)
831 else:
831 else:
832 chainratio = chainsize
832 chainratio = chainsize
833
833
834 if chainsize != 0:
834 if chainsize != 0:
835 extraratio = float(extradist) / float(chainsize)
835 extraratio = float(extradist) / float(chainsize)
836 else:
836 else:
837 extraratio = extradist
837 extraratio = extradist
838
838
839 fm.startitem()
839 fm.startitem()
840 fm.write(
840 fm.write(
841 b'rev chainid chainlen prevrev deltatype compsize '
841 b'rev chainid chainlen prevrev deltatype compsize '
842 b'uncompsize chainsize chainratio lindist extradist '
842 b'uncompsize chainsize chainratio lindist extradist '
843 b'extraratio',
843 b'extraratio',
844 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
844 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
845 rev,
845 rev,
846 chainid,
846 chainid,
847 len(chain),
847 len(chain),
848 prevrev,
848 prevrev,
849 deltatype,
849 deltatype,
850 comp,
850 comp,
851 uncomp,
851 uncomp,
852 chainsize,
852 chainsize,
853 chainratio,
853 chainratio,
854 lineardist,
854 lineardist,
855 extradist,
855 extradist,
856 extraratio,
856 extraratio,
857 rev=rev,
857 rev=rev,
858 chainid=chainid,
858 chainid=chainid,
859 chainlen=len(chain),
859 chainlen=len(chain),
860 prevrev=prevrev,
860 prevrev=prevrev,
861 deltatype=deltatype,
861 deltatype=deltatype,
862 compsize=comp,
862 compsize=comp,
863 uncompsize=uncomp,
863 uncompsize=uncomp,
864 chainsize=chainsize,
864 chainsize=chainsize,
865 chainratio=chainratio,
865 chainratio=chainratio,
866 lindist=lineardist,
866 lindist=lineardist,
867 extradist=extradist,
867 extradist=extradist,
868 extraratio=extraratio,
868 extraratio=extraratio,
869 )
869 )
870 if withsparseread:
870 if withsparseread:
871 readsize = 0
871 readsize = 0
872 largestblock = 0
872 largestblock = 0
873 srchunks = 0
873 srchunks = 0
874
874
875 for revschunk in deltautil.slicechunk(r, chain):
875 for revschunk in deltautil.slicechunk(r, chain):
876 srchunks += 1
876 srchunks += 1
877 blkend = start(revschunk[-1]) + length(revschunk[-1])
877 blkend = start(revschunk[-1]) + length(revschunk[-1])
878 blksize = blkend - start(revschunk[0])
878 blksize = blkend - start(revschunk[0])
879
879
880 readsize += blksize
880 readsize += blksize
881 if largestblock < blksize:
881 if largestblock < blksize:
882 largestblock = blksize
882 largestblock = blksize
883
883
884 if readsize:
884 if readsize:
885 readdensity = float(chainsize) / float(readsize)
885 readdensity = float(chainsize) / float(readsize)
886 else:
886 else:
887 readdensity = 1
887 readdensity = 1
888
888
889 fm.write(
889 fm.write(
890 b'readsize largestblock readdensity srchunks',
890 b'readsize largestblock readdensity srchunks',
891 b' %10d %10d %9.5f %8d',
891 b' %10d %10d %9.5f %8d',
892 readsize,
892 readsize,
893 largestblock,
893 largestblock,
894 readdensity,
894 readdensity,
895 srchunks,
895 srchunks,
896 readsize=readsize,
896 readsize=readsize,
897 largestblock=largestblock,
897 largestblock=largestblock,
898 readdensity=readdensity,
898 readdensity=readdensity,
899 srchunks=srchunks,
899 srchunks=srchunks,
900 )
900 )
901
901
902 fm.plain(b'\n')
902 fm.plain(b'\n')
903
903
904 fm.end()
904 fm.end()
905
905
906
906
907 @command(
907 @command(
908 b'debugdirstate|debugstate',
908 b'debugdirstate|debugstate',
909 [
909 [
910 (
910 (
911 b'',
911 b'',
912 b'nodates',
912 b'nodates',
913 None,
913 None,
914 _(b'do not display the saved mtime (DEPRECATED)'),
914 _(b'do not display the saved mtime (DEPRECATED)'),
915 ),
915 ),
916 (b'', b'dates', True, _(b'display the saved mtime')),
916 (b'', b'dates', True, _(b'display the saved mtime')),
917 (b'', b'datesort', None, _(b'sort by saved mtime')),
917 (b'', b'datesort', None, _(b'sort by saved mtime')),
918 ],
918 ],
919 _(b'[OPTION]...'),
919 _(b'[OPTION]...'),
920 )
920 )
921 def debugstate(ui, repo, **opts):
921 def debugstate(ui, repo, **opts):
922 """show the contents of the current dirstate"""
922 """show the contents of the current dirstate"""
923
923
924 nodates = not opts['dates']
924 nodates = not opts['dates']
925 if opts.get('nodates') is not None:
925 if opts.get('nodates') is not None:
926 nodates = True
926 nodates = True
927 datesort = opts.get('datesort')
927 datesort = opts.get('datesort')
928
928
929 if datesort:
929 if datesort:
930 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
930 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
931 else:
931 else:
932 keyfunc = None # sort by filename
932 keyfunc = None # sort by filename
933 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
933 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
934 if ent[3] == -1:
934 if ent[3] == -1:
935 timestr = b'unset '
935 timestr = b'unset '
936 elif nodates:
936 elif nodates:
937 timestr = b'set '
937 timestr = b'set '
938 else:
938 else:
939 timestr = time.strftime(
939 timestr = time.strftime(
940 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
940 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
941 )
941 )
942 timestr = encoding.strtolocal(timestr)
942 timestr = encoding.strtolocal(timestr)
943 if ent[1] & 0o20000:
943 if ent[1] & 0o20000:
944 mode = b'lnk'
944 mode = b'lnk'
945 else:
945 else:
946 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
946 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
947 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
947 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
948 for f in repo.dirstate.copies():
948 for f in repo.dirstate.copies():
949 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
949 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
950
950
951
951
952 @command(
952 @command(
953 b'debugdiscovery',
953 b'debugdiscovery',
954 [
954 [
955 (b'', b'old', None, _(b'use old-style discovery')),
955 (b'', b'old', None, _(b'use old-style discovery')),
956 (
956 (
957 b'',
957 b'',
958 b'nonheads',
958 b'nonheads',
959 None,
959 None,
960 _(b'use old-style discovery with non-heads included'),
960 _(b'use old-style discovery with non-heads included'),
961 ),
961 ),
962 (b'', b'rev', [], b'restrict discovery to this set of revs'),
962 (b'', b'rev', [], b'restrict discovery to this set of revs'),
963 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
963 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
964 ]
964 ]
965 + cmdutil.remoteopts,
965 + cmdutil.remoteopts,
966 _(b'[--rev REV] [OTHER]'),
966 _(b'[--rev REV] [OTHER]'),
967 )
967 )
968 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
968 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
969 """runs the changeset discovery protocol in isolation"""
969 """runs the changeset discovery protocol in isolation"""
970 opts = pycompat.byteskwargs(opts)
970 opts = pycompat.byteskwargs(opts)
971 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
971 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
972 remote = hg.peer(repo, opts, remoteurl)
972 remote = hg.peer(repo, opts, remoteurl)
973 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
973 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
974
974
975 # make sure tests are repeatable
975 # make sure tests are repeatable
976 random.seed(int(opts[b'seed']))
976 random.seed(int(opts[b'seed']))
977
977
978 if opts.get(b'old'):
978 if opts.get(b'old'):
979
979
980 def doit(pushedrevs, remoteheads, remote=remote):
980 def doit(pushedrevs, remoteheads, remote=remote):
981 if not util.safehasattr(remote, b'branches'):
981 if not util.safehasattr(remote, b'branches'):
982 # enable in-client legacy support
982 # enable in-client legacy support
983 remote = localrepo.locallegacypeer(remote.local())
983 remote = localrepo.locallegacypeer(remote.local())
984 common, _in, hds = treediscovery.findcommonincoming(
984 common, _in, hds = treediscovery.findcommonincoming(
985 repo, remote, force=True
985 repo, remote, force=True
986 )
986 )
987 common = set(common)
987 common = set(common)
988 if not opts.get(b'nonheads'):
988 if not opts.get(b'nonheads'):
989 ui.writenoi18n(
989 ui.writenoi18n(
990 b"unpruned common: %s\n"
990 b"unpruned common: %s\n"
991 % b" ".join(sorted(short(n) for n in common))
991 % b" ".join(sorted(short(n) for n in common))
992 )
992 )
993
993
994 clnode = repo.changelog.node
994 clnode = repo.changelog.node
995 common = repo.revs(b'heads(::%ln)', common)
995 common = repo.revs(b'heads(::%ln)', common)
996 common = {clnode(r) for r in common}
996 common = {clnode(r) for r in common}
997 return common, hds
997 return common, hds
998
998
999 else:
999 else:
1000
1000
1001 def doit(pushedrevs, remoteheads, remote=remote):
1001 def doit(pushedrevs, remoteheads, remote=remote):
1002 nodes = None
1002 nodes = None
1003 if pushedrevs:
1003 if pushedrevs:
1004 revs = scmutil.revrange(repo, pushedrevs)
1004 revs = scmutil.revrange(repo, pushedrevs)
1005 nodes = [repo[r].node() for r in revs]
1005 nodes = [repo[r].node() for r in revs]
1006 common, any, hds = setdiscovery.findcommonheads(
1006 common, any, hds = setdiscovery.findcommonheads(
1007 ui, repo, remote, ancestorsof=nodes
1007 ui, repo, remote, ancestorsof=nodes
1008 )
1008 )
1009 return common, hds
1009 return common, hds
1010
1010
1011 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1011 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1012 localrevs = opts[b'rev']
1012 localrevs = opts[b'rev']
1013 with util.timedcm('debug-discovery') as t:
1013 with util.timedcm('debug-discovery') as t:
1014 common, hds = doit(localrevs, remoterevs)
1014 common, hds = doit(localrevs, remoterevs)
1015
1015
1016 # compute all statistics
1016 # compute all statistics
1017 common = set(common)
1017 common = set(common)
1018 rheads = set(hds)
1018 rheads = set(hds)
1019 lheads = set(repo.heads())
1019 lheads = set(repo.heads())
1020
1020
1021 data = {}
1021 data = {}
1022 data[b'elapsed'] = t.elapsed
1022 data[b'elapsed'] = t.elapsed
1023 data[b'nb-common'] = len(common)
1023 data[b'nb-common'] = len(common)
1024 data[b'nb-common-local'] = len(common & lheads)
1024 data[b'nb-common-local'] = len(common & lheads)
1025 data[b'nb-common-remote'] = len(common & rheads)
1025 data[b'nb-common-remote'] = len(common & rheads)
1026 data[b'nb-common-both'] = len(common & rheads & lheads)
1026 data[b'nb-common-both'] = len(common & rheads & lheads)
1027 data[b'nb-local'] = len(lheads)
1027 data[b'nb-local'] = len(lheads)
1028 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
1028 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
1029 data[b'nb-remote'] = len(rheads)
1029 data[b'nb-remote'] = len(rheads)
1030 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
1030 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
1031 data[b'nb-revs'] = len(repo.revs(b'all()'))
1031 data[b'nb-revs'] = len(repo.revs(b'all()'))
1032 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
1032 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
1033 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
1033 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
1034
1034
1035 # display discovery summary
1035 # display discovery summary
1036 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1036 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1037 ui.writenoi18n(b"heads summary:\n")
1037 ui.writenoi18n(b"heads summary:\n")
1038 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
1038 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
1039 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
1039 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
1040 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
1040 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
1041 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
1041 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
1042 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
1042 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
1043 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
1043 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
1044 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
1044 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
1045 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
1045 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
1046 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
1046 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
1047 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
1047 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
1048 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1048 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1049 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1049 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1050 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1050 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1051
1051
1052 if ui.verbose:
1052 if ui.verbose:
1053 ui.writenoi18n(
1053 ui.writenoi18n(
1054 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1054 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1055 )
1055 )
1056
1056
1057
1057
1058 _chunksize = 4 << 10
1058 _chunksize = 4 << 10
1059
1059
1060
1060
1061 @command(
1061 @command(
1062 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1062 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1063 )
1063 )
1064 def debugdownload(ui, repo, url, output=None, **opts):
1064 def debugdownload(ui, repo, url, output=None, **opts):
1065 """download a resource using Mercurial logic and config
1065 """download a resource using Mercurial logic and config
1066 """
1066 """
1067 fh = urlmod.open(ui, url, output)
1067 fh = urlmod.open(ui, url, output)
1068
1068
1069 dest = ui
1069 dest = ui
1070 if output:
1070 if output:
1071 dest = open(output, b"wb", _chunksize)
1071 dest = open(output, b"wb", _chunksize)
1072 try:
1072 try:
1073 data = fh.read(_chunksize)
1073 data = fh.read(_chunksize)
1074 while data:
1074 while data:
1075 dest.write(data)
1075 dest.write(data)
1076 data = fh.read(_chunksize)
1076 data = fh.read(_chunksize)
1077 finally:
1077 finally:
1078 if output:
1078 if output:
1079 dest.close()
1079 dest.close()
1080
1080
1081
1081
1082 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1082 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1083 def debugextensions(ui, repo, **opts):
1083 def debugextensions(ui, repo, **opts):
1084 '''show information about active extensions'''
1084 '''show information about active extensions'''
1085 opts = pycompat.byteskwargs(opts)
1085 opts = pycompat.byteskwargs(opts)
1086 exts = extensions.extensions(ui)
1086 exts = extensions.extensions(ui)
1087 hgver = util.version()
1087 hgver = util.version()
1088 fm = ui.formatter(b'debugextensions', opts)
1088 fm = ui.formatter(b'debugextensions', opts)
1089 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1089 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1090 isinternal = extensions.ismoduleinternal(extmod)
1090 isinternal = extensions.ismoduleinternal(extmod)
1091 extsource = None
1091 extsource = None
1092
1092
1093 if util.safehasattr(extmod, '__file__'):
1093 if util.safehasattr(extmod, '__file__'):
1094 extsource = pycompat.fsencode(extmod.__file__)
1094 extsource = pycompat.fsencode(extmod.__file__)
1095 elif getattr(sys, 'oxidized', False):
1095 elif getattr(sys, 'oxidized', False):
1096 extsource = pycompat.sysexecutable
1096 extsource = pycompat.sysexecutable
1097 if isinternal:
1097 if isinternal:
1098 exttestedwith = [] # never expose magic string to users
1098 exttestedwith = [] # never expose magic string to users
1099 else:
1099 else:
1100 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1100 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1101 extbuglink = getattr(extmod, 'buglink', None)
1101 extbuglink = getattr(extmod, 'buglink', None)
1102
1102
1103 fm.startitem()
1103 fm.startitem()
1104
1104
1105 if ui.quiet or ui.verbose:
1105 if ui.quiet or ui.verbose:
1106 fm.write(b'name', b'%s\n', extname)
1106 fm.write(b'name', b'%s\n', extname)
1107 else:
1107 else:
1108 fm.write(b'name', b'%s', extname)
1108 fm.write(b'name', b'%s', extname)
1109 if isinternal or hgver in exttestedwith:
1109 if isinternal or hgver in exttestedwith:
1110 fm.plain(b'\n')
1110 fm.plain(b'\n')
1111 elif not exttestedwith:
1111 elif not exttestedwith:
1112 fm.plain(_(b' (untested!)\n'))
1112 fm.plain(_(b' (untested!)\n'))
1113 else:
1113 else:
1114 lasttestedversion = exttestedwith[-1]
1114 lasttestedversion = exttestedwith[-1]
1115 fm.plain(b' (%s!)\n' % lasttestedversion)
1115 fm.plain(b' (%s!)\n' % lasttestedversion)
1116
1116
1117 fm.condwrite(
1117 fm.condwrite(
1118 ui.verbose and extsource,
1118 ui.verbose and extsource,
1119 b'source',
1119 b'source',
1120 _(b' location: %s\n'),
1120 _(b' location: %s\n'),
1121 extsource or b"",
1121 extsource or b"",
1122 )
1122 )
1123
1123
1124 if ui.verbose:
1124 if ui.verbose:
1125 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1125 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1126 fm.data(bundled=isinternal)
1126 fm.data(bundled=isinternal)
1127
1127
1128 fm.condwrite(
1128 fm.condwrite(
1129 ui.verbose and exttestedwith,
1129 ui.verbose and exttestedwith,
1130 b'testedwith',
1130 b'testedwith',
1131 _(b' tested with: %s\n'),
1131 _(b' tested with: %s\n'),
1132 fm.formatlist(exttestedwith, name=b'ver'),
1132 fm.formatlist(exttestedwith, name=b'ver'),
1133 )
1133 )
1134
1134
1135 fm.condwrite(
1135 fm.condwrite(
1136 ui.verbose and extbuglink,
1136 ui.verbose and extbuglink,
1137 b'buglink',
1137 b'buglink',
1138 _(b' bug reporting: %s\n'),
1138 _(b' bug reporting: %s\n'),
1139 extbuglink or b"",
1139 extbuglink or b"",
1140 )
1140 )
1141
1141
1142 fm.end()
1142 fm.end()
1143
1143
1144
1144
1145 @command(
1145 @command(
1146 b'debugfileset',
1146 b'debugfileset',
1147 [
1147 [
1148 (
1148 (
1149 b'r',
1149 b'r',
1150 b'rev',
1150 b'rev',
1151 b'',
1151 b'',
1152 _(b'apply the filespec on this revision'),
1152 _(b'apply the filespec on this revision'),
1153 _(b'REV'),
1153 _(b'REV'),
1154 ),
1154 ),
1155 (
1155 (
1156 b'',
1156 b'',
1157 b'all-files',
1157 b'all-files',
1158 False,
1158 False,
1159 _(b'test files from all revisions and working directory'),
1159 _(b'test files from all revisions and working directory'),
1160 ),
1160 ),
1161 (
1161 (
1162 b's',
1162 b's',
1163 b'show-matcher',
1163 b'show-matcher',
1164 None,
1164 None,
1165 _(b'print internal representation of matcher'),
1165 _(b'print internal representation of matcher'),
1166 ),
1166 ),
1167 (
1167 (
1168 b'p',
1168 b'p',
1169 b'show-stage',
1169 b'show-stage',
1170 [],
1170 [],
1171 _(b'print parsed tree at the given stage'),
1171 _(b'print parsed tree at the given stage'),
1172 _(b'NAME'),
1172 _(b'NAME'),
1173 ),
1173 ),
1174 ],
1174 ],
1175 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1175 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1176 )
1176 )
1177 def debugfileset(ui, repo, expr, **opts):
1177 def debugfileset(ui, repo, expr, **opts):
1178 '''parse and apply a fileset specification'''
1178 '''parse and apply a fileset specification'''
1179 from . import fileset
1179 from . import fileset
1180
1180
1181 fileset.symbols # force import of fileset so we have predicates to optimize
1181 fileset.symbols # force import of fileset so we have predicates to optimize
1182 opts = pycompat.byteskwargs(opts)
1182 opts = pycompat.byteskwargs(opts)
1183 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1183 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1184
1184
1185 stages = [
1185 stages = [
1186 (b'parsed', pycompat.identity),
1186 (b'parsed', pycompat.identity),
1187 (b'analyzed', filesetlang.analyze),
1187 (b'analyzed', filesetlang.analyze),
1188 (b'optimized', filesetlang.optimize),
1188 (b'optimized', filesetlang.optimize),
1189 ]
1189 ]
1190 stagenames = {n for n, f in stages}
1190 stagenames = {n for n, f in stages}
1191
1191
1192 showalways = set()
1192 showalways = set()
1193 if ui.verbose and not opts[b'show_stage']:
1193 if ui.verbose and not opts[b'show_stage']:
1194 # show parsed tree by --verbose (deprecated)
1194 # show parsed tree by --verbose (deprecated)
1195 showalways.add(b'parsed')
1195 showalways.add(b'parsed')
1196 if opts[b'show_stage'] == [b'all']:
1196 if opts[b'show_stage'] == [b'all']:
1197 showalways.update(stagenames)
1197 showalways.update(stagenames)
1198 else:
1198 else:
1199 for n in opts[b'show_stage']:
1199 for n in opts[b'show_stage']:
1200 if n not in stagenames:
1200 if n not in stagenames:
1201 raise error.Abort(_(b'invalid stage name: %s') % n)
1201 raise error.Abort(_(b'invalid stage name: %s') % n)
1202 showalways.update(opts[b'show_stage'])
1202 showalways.update(opts[b'show_stage'])
1203
1203
1204 tree = filesetlang.parse(expr)
1204 tree = filesetlang.parse(expr)
1205 for n, f in stages:
1205 for n, f in stages:
1206 tree = f(tree)
1206 tree = f(tree)
1207 if n in showalways:
1207 if n in showalways:
1208 if opts[b'show_stage'] or n != b'parsed':
1208 if opts[b'show_stage'] or n != b'parsed':
1209 ui.write(b"* %s:\n" % n)
1209 ui.write(b"* %s:\n" % n)
1210 ui.write(filesetlang.prettyformat(tree), b"\n")
1210 ui.write(filesetlang.prettyformat(tree), b"\n")
1211
1211
1212 files = set()
1212 files = set()
1213 if opts[b'all_files']:
1213 if opts[b'all_files']:
1214 for r in repo:
1214 for r in repo:
1215 c = repo[r]
1215 c = repo[r]
1216 files.update(c.files())
1216 files.update(c.files())
1217 files.update(c.substate)
1217 files.update(c.substate)
1218 if opts[b'all_files'] or ctx.rev() is None:
1218 if opts[b'all_files'] or ctx.rev() is None:
1219 wctx = repo[None]
1219 wctx = repo[None]
1220 files.update(
1220 files.update(
1221 repo.dirstate.walk(
1221 repo.dirstate.walk(
1222 scmutil.matchall(repo),
1222 scmutil.matchall(repo),
1223 subrepos=list(wctx.substate),
1223 subrepos=list(wctx.substate),
1224 unknown=True,
1224 unknown=True,
1225 ignored=True,
1225 ignored=True,
1226 )
1226 )
1227 )
1227 )
1228 files.update(wctx.substate)
1228 files.update(wctx.substate)
1229 else:
1229 else:
1230 files.update(ctx.files())
1230 files.update(ctx.files())
1231 files.update(ctx.substate)
1231 files.update(ctx.substate)
1232
1232
1233 m = ctx.matchfileset(repo.getcwd(), expr)
1233 m = ctx.matchfileset(repo.getcwd(), expr)
1234 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1234 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1235 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1235 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1236 for f in sorted(files):
1236 for f in sorted(files):
1237 if not m(f):
1237 if not m(f):
1238 continue
1238 continue
1239 ui.write(b"%s\n" % f)
1239 ui.write(b"%s\n" % f)
1240
1240
1241
1241
1242 @command(b'debugformat', [] + cmdutil.formatteropts)
1242 @command(b'debugformat', [] + cmdutil.formatteropts)
1243 def debugformat(ui, repo, **opts):
1243 def debugformat(ui, repo, **opts):
1244 """display format information about the current repository
1244 """display format information about the current repository
1245
1245
1246 Use --verbose to get extra information about current config value and
1246 Use --verbose to get extra information about current config value and
1247 Mercurial default."""
1247 Mercurial default."""
1248 opts = pycompat.byteskwargs(opts)
1248 opts = pycompat.byteskwargs(opts)
1249 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1249 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1250 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1250 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1251
1251
1252 def makeformatname(name):
1252 def makeformatname(name):
1253 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1253 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1254
1254
1255 fm = ui.formatter(b'debugformat', opts)
1255 fm = ui.formatter(b'debugformat', opts)
1256 if fm.isplain():
1256 if fm.isplain():
1257
1257
1258 def formatvalue(value):
1258 def formatvalue(value):
1259 if util.safehasattr(value, b'startswith'):
1259 if util.safehasattr(value, b'startswith'):
1260 return value
1260 return value
1261 if value:
1261 if value:
1262 return b'yes'
1262 return b'yes'
1263 else:
1263 else:
1264 return b'no'
1264 return b'no'
1265
1265
1266 else:
1266 else:
1267 formatvalue = pycompat.identity
1267 formatvalue = pycompat.identity
1268
1268
1269 fm.plain(b'format-variant')
1269 fm.plain(b'format-variant')
1270 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1270 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1271 fm.plain(b' repo')
1271 fm.plain(b' repo')
1272 if ui.verbose:
1272 if ui.verbose:
1273 fm.plain(b' config default')
1273 fm.plain(b' config default')
1274 fm.plain(b'\n')
1274 fm.plain(b'\n')
1275 for fv in upgrade.allformatvariant:
1275 for fv in upgrade.allformatvariant:
1276 fm.startitem()
1276 fm.startitem()
1277 repovalue = fv.fromrepo(repo)
1277 repovalue = fv.fromrepo(repo)
1278 configvalue = fv.fromconfig(repo)
1278 configvalue = fv.fromconfig(repo)
1279
1279
1280 if repovalue != configvalue:
1280 if repovalue != configvalue:
1281 namelabel = b'formatvariant.name.mismatchconfig'
1281 namelabel = b'formatvariant.name.mismatchconfig'
1282 repolabel = b'formatvariant.repo.mismatchconfig'
1282 repolabel = b'formatvariant.repo.mismatchconfig'
1283 elif repovalue != fv.default:
1283 elif repovalue != fv.default:
1284 namelabel = b'formatvariant.name.mismatchdefault'
1284 namelabel = b'formatvariant.name.mismatchdefault'
1285 repolabel = b'formatvariant.repo.mismatchdefault'
1285 repolabel = b'formatvariant.repo.mismatchdefault'
1286 else:
1286 else:
1287 namelabel = b'formatvariant.name.uptodate'
1287 namelabel = b'formatvariant.name.uptodate'
1288 repolabel = b'formatvariant.repo.uptodate'
1288 repolabel = b'formatvariant.repo.uptodate'
1289
1289
1290 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1290 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1291 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1291 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1292 if fv.default != configvalue:
1292 if fv.default != configvalue:
1293 configlabel = b'formatvariant.config.special'
1293 configlabel = b'formatvariant.config.special'
1294 else:
1294 else:
1295 configlabel = b'formatvariant.config.default'
1295 configlabel = b'formatvariant.config.default'
1296 fm.condwrite(
1296 fm.condwrite(
1297 ui.verbose,
1297 ui.verbose,
1298 b'config',
1298 b'config',
1299 b' %6s',
1299 b' %6s',
1300 formatvalue(configvalue),
1300 formatvalue(configvalue),
1301 label=configlabel,
1301 label=configlabel,
1302 )
1302 )
1303 fm.condwrite(
1303 fm.condwrite(
1304 ui.verbose,
1304 ui.verbose,
1305 b'default',
1305 b'default',
1306 b' %7s',
1306 b' %7s',
1307 formatvalue(fv.default),
1307 formatvalue(fv.default),
1308 label=b'formatvariant.default',
1308 label=b'formatvariant.default',
1309 )
1309 )
1310 fm.plain(b'\n')
1310 fm.plain(b'\n')
1311 fm.end()
1311 fm.end()
1312
1312
1313
1313
1314 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1314 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1315 def debugfsinfo(ui, path=b"."):
1315 def debugfsinfo(ui, path=b"."):
1316 """show information detected about current filesystem"""
1316 """show information detected about current filesystem"""
1317 ui.writenoi18n(b'path: %s\n' % path)
1317 ui.writenoi18n(b'path: %s\n' % path)
1318 ui.writenoi18n(
1318 ui.writenoi18n(
1319 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1319 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1320 )
1320 )
1321 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1321 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1322 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1322 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1323 ui.writenoi18n(
1323 ui.writenoi18n(
1324 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1324 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1325 )
1325 )
1326 ui.writenoi18n(
1326 ui.writenoi18n(
1327 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1327 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1328 )
1328 )
1329 casesensitive = b'(unknown)'
1329 casesensitive = b'(unknown)'
1330 try:
1330 try:
1331 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1331 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1332 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1332 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1333 except OSError:
1333 except OSError:
1334 pass
1334 pass
1335 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1335 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1336
1336
1337
1337
1338 @command(
1338 @command(
1339 b'debuggetbundle',
1339 b'debuggetbundle',
1340 [
1340 [
1341 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1341 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1342 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1342 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1343 (
1343 (
1344 b't',
1344 b't',
1345 b'type',
1345 b'type',
1346 b'bzip2',
1346 b'bzip2',
1347 _(b'bundle compression type to use'),
1347 _(b'bundle compression type to use'),
1348 _(b'TYPE'),
1348 _(b'TYPE'),
1349 ),
1349 ),
1350 ],
1350 ],
1351 _(b'REPO FILE [-H|-C ID]...'),
1351 _(b'REPO FILE [-H|-C ID]...'),
1352 norepo=True,
1352 norepo=True,
1353 )
1353 )
1354 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1354 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1355 """retrieves a bundle from a repo
1355 """retrieves a bundle from a repo
1356
1356
1357 Every ID must be a full-length hex node id string. Saves the bundle to the
1357 Every ID must be a full-length hex node id string. Saves the bundle to the
1358 given file.
1358 given file.
1359 """
1359 """
1360 opts = pycompat.byteskwargs(opts)
1360 opts = pycompat.byteskwargs(opts)
1361 repo = hg.peer(ui, opts, repopath)
1361 repo = hg.peer(ui, opts, repopath)
1362 if not repo.capable(b'getbundle'):
1362 if not repo.capable(b'getbundle'):
1363 raise error.Abort(b"getbundle() not supported by target repository")
1363 raise error.Abort(b"getbundle() not supported by target repository")
1364 args = {}
1364 args = {}
1365 if common:
1365 if common:
1366 args['common'] = [bin(s) for s in common]
1366 args['common'] = [bin(s) for s in common]
1367 if head:
1367 if head:
1368 args['heads'] = [bin(s) for s in head]
1368 args['heads'] = [bin(s) for s in head]
1369 # TODO: get desired bundlecaps from command line.
1369 # TODO: get desired bundlecaps from command line.
1370 args['bundlecaps'] = None
1370 args['bundlecaps'] = None
1371 bundle = repo.getbundle(b'debug', **args)
1371 bundle = repo.getbundle(b'debug', **args)
1372
1372
1373 bundletype = opts.get(b'type', b'bzip2').lower()
1373 bundletype = opts.get(b'type', b'bzip2').lower()
1374 btypes = {
1374 btypes = {
1375 b'none': b'HG10UN',
1375 b'none': b'HG10UN',
1376 b'bzip2': b'HG10BZ',
1376 b'bzip2': b'HG10BZ',
1377 b'gzip': b'HG10GZ',
1377 b'gzip': b'HG10GZ',
1378 b'bundle2': b'HG20',
1378 b'bundle2': b'HG20',
1379 }
1379 }
1380 bundletype = btypes.get(bundletype)
1380 bundletype = btypes.get(bundletype)
1381 if bundletype not in bundle2.bundletypes:
1381 if bundletype not in bundle2.bundletypes:
1382 raise error.Abort(_(b'unknown bundle type specified with --type'))
1382 raise error.Abort(_(b'unknown bundle type specified with --type'))
1383 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1383 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1384
1384
1385
1385
1386 @command(b'debugignore', [], b'[FILE]')
1386 @command(b'debugignore', [], b'[FILE]')
1387 def debugignore(ui, repo, *files, **opts):
1387 def debugignore(ui, repo, *files, **opts):
1388 """display the combined ignore pattern and information about ignored files
1388 """display the combined ignore pattern and information about ignored files
1389
1389
1390 With no argument display the combined ignore pattern.
1390 With no argument display the combined ignore pattern.
1391
1391
1392 Given space separated file names, shows if the given file is ignored and
1392 Given space separated file names, shows if the given file is ignored and
1393 if so, show the ignore rule (file and line number) that matched it.
1393 if so, show the ignore rule (file and line number) that matched it.
1394 """
1394 """
1395 ignore = repo.dirstate._ignore
1395 ignore = repo.dirstate._ignore
1396 if not files:
1396 if not files:
1397 # Show all the patterns
1397 # Show all the patterns
1398 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1398 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1399 else:
1399 else:
1400 m = scmutil.match(repo[None], pats=files)
1400 m = scmutil.match(repo[None], pats=files)
1401 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1401 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1402 for f in m.files():
1402 for f in m.files():
1403 nf = util.normpath(f)
1403 nf = util.normpath(f)
1404 ignored = None
1404 ignored = None
1405 ignoredata = None
1405 ignoredata = None
1406 if nf != b'.':
1406 if nf != b'.':
1407 if ignore(nf):
1407 if ignore(nf):
1408 ignored = nf
1408 ignored = nf
1409 ignoredata = repo.dirstate._ignorefileandline(nf)
1409 ignoredata = repo.dirstate._ignorefileandline(nf)
1410 else:
1410 else:
1411 for p in pathutil.finddirs(nf):
1411 for p in pathutil.finddirs(nf):
1412 if ignore(p):
1412 if ignore(p):
1413 ignored = p
1413 ignored = p
1414 ignoredata = repo.dirstate._ignorefileandline(p)
1414 ignoredata = repo.dirstate._ignorefileandline(p)
1415 break
1415 break
1416 if ignored:
1416 if ignored:
1417 if ignored == nf:
1417 if ignored == nf:
1418 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1418 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1419 else:
1419 else:
1420 ui.write(
1420 ui.write(
1421 _(
1421 _(
1422 b"%s is ignored because of "
1422 b"%s is ignored because of "
1423 b"containing directory %s\n"
1423 b"containing directory %s\n"
1424 )
1424 )
1425 % (uipathfn(f), ignored)
1425 % (uipathfn(f), ignored)
1426 )
1426 )
1427 ignorefile, lineno, line = ignoredata
1427 ignorefile, lineno, line = ignoredata
1428 ui.write(
1428 ui.write(
1429 _(b"(ignore rule in %s, line %d: '%s')\n")
1429 _(b"(ignore rule in %s, line %d: '%s')\n")
1430 % (ignorefile, lineno, line)
1430 % (ignorefile, lineno, line)
1431 )
1431 )
1432 else:
1432 else:
1433 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1433 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1434
1434
1435
1435
1436 @command(
1436 @command(
1437 b'debugindex',
1437 b'debugindex',
1438 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1438 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1439 _(b'-c|-m|FILE'),
1439 _(b'-c|-m|FILE'),
1440 )
1440 )
1441 def debugindex(ui, repo, file_=None, **opts):
1441 def debugindex(ui, repo, file_=None, **opts):
1442 """dump index data for a storage primitive"""
1442 """dump index data for a storage primitive"""
1443 opts = pycompat.byteskwargs(opts)
1443 opts = pycompat.byteskwargs(opts)
1444 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1444 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1445
1445
1446 if ui.debugflag:
1446 if ui.debugflag:
1447 shortfn = hex
1447 shortfn = hex
1448 else:
1448 else:
1449 shortfn = short
1449 shortfn = short
1450
1450
1451 idlen = 12
1451 idlen = 12
1452 for i in store:
1452 for i in store:
1453 idlen = len(shortfn(store.node(i)))
1453 idlen = len(shortfn(store.node(i)))
1454 break
1454 break
1455
1455
1456 fm = ui.formatter(b'debugindex', opts)
1456 fm = ui.formatter(b'debugindex', opts)
1457 fm.plain(
1457 fm.plain(
1458 b' rev linkrev %s %s p2\n'
1458 b' rev linkrev %s %s p2\n'
1459 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1459 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1460 )
1460 )
1461
1461
1462 for rev in store:
1462 for rev in store:
1463 node = store.node(rev)
1463 node = store.node(rev)
1464 parents = store.parents(node)
1464 parents = store.parents(node)
1465
1465
1466 fm.startitem()
1466 fm.startitem()
1467 fm.write(b'rev', b'%6d ', rev)
1467 fm.write(b'rev', b'%6d ', rev)
1468 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1468 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1469 fm.write(b'node', b'%s ', shortfn(node))
1469 fm.write(b'node', b'%s ', shortfn(node))
1470 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1470 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1471 fm.write(b'p2', b'%s', shortfn(parents[1]))
1471 fm.write(b'p2', b'%s', shortfn(parents[1]))
1472 fm.plain(b'\n')
1472 fm.plain(b'\n')
1473
1473
1474 fm.end()
1474 fm.end()
1475
1475
1476
1476
1477 @command(
1477 @command(
1478 b'debugindexdot',
1478 b'debugindexdot',
1479 cmdutil.debugrevlogopts,
1479 cmdutil.debugrevlogopts,
1480 _(b'-c|-m|FILE'),
1480 _(b'-c|-m|FILE'),
1481 optionalrepo=True,
1481 optionalrepo=True,
1482 )
1482 )
1483 def debugindexdot(ui, repo, file_=None, **opts):
1483 def debugindexdot(ui, repo, file_=None, **opts):
1484 """dump an index DAG as a graphviz dot file"""
1484 """dump an index DAG as a graphviz dot file"""
1485 opts = pycompat.byteskwargs(opts)
1485 opts = pycompat.byteskwargs(opts)
1486 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1486 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1487 ui.writenoi18n(b"digraph G {\n")
1487 ui.writenoi18n(b"digraph G {\n")
1488 for i in r:
1488 for i in r:
1489 node = r.node(i)
1489 node = r.node(i)
1490 pp = r.parents(node)
1490 pp = r.parents(node)
1491 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1491 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1492 if pp[1] != nullid:
1492 if pp[1] != nullid:
1493 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1493 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1494 ui.write(b"}\n")
1494 ui.write(b"}\n")
1495
1495
1496
1496
1497 @command(b'debugindexstats', [])
1497 @command(b'debugindexstats', [])
1498 def debugindexstats(ui, repo):
1498 def debugindexstats(ui, repo):
1499 """show stats related to the changelog index"""
1499 """show stats related to the changelog index"""
1500 repo.changelog.shortest(nullid, 1)
1500 repo.changelog.shortest(nullid, 1)
1501 index = repo.changelog.index
1501 index = repo.changelog.index
1502 if not util.safehasattr(index, b'stats'):
1502 if not util.safehasattr(index, b'stats'):
1503 raise error.Abort(_(b'debugindexstats only works with native code'))
1503 raise error.Abort(_(b'debugindexstats only works with native code'))
1504 for k, v in sorted(index.stats().items()):
1504 for k, v in sorted(index.stats().items()):
1505 ui.write(b'%s: %d\n' % (k, v))
1505 ui.write(b'%s: %d\n' % (k, v))
1506
1506
1507
1507
1508 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1508 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1509 def debuginstall(ui, **opts):
1509 def debuginstall(ui, **opts):
1510 '''test Mercurial installation
1510 '''test Mercurial installation
1511
1511
1512 Returns 0 on success.
1512 Returns 0 on success.
1513 '''
1513 '''
1514 opts = pycompat.byteskwargs(opts)
1514 opts = pycompat.byteskwargs(opts)
1515
1515
1516 problems = 0
1516 problems = 0
1517
1517
1518 fm = ui.formatter(b'debuginstall', opts)
1518 fm = ui.formatter(b'debuginstall', opts)
1519 fm.startitem()
1519 fm.startitem()
1520
1520
1521 # encoding might be unknown or wrong. don't translate these messages.
1521 # encoding might be unknown or wrong. don't translate these messages.
1522 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1522 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1523 err = None
1523 err = None
1524 try:
1524 try:
1525 codecs.lookup(pycompat.sysstr(encoding.encoding))
1525 codecs.lookup(pycompat.sysstr(encoding.encoding))
1526 except LookupError as inst:
1526 except LookupError as inst:
1527 err = stringutil.forcebytestr(inst)
1527 err = stringutil.forcebytestr(inst)
1528 problems += 1
1528 problems += 1
1529 fm.condwrite(
1529 fm.condwrite(
1530 err,
1530 err,
1531 b'encodingerror',
1531 b'encodingerror',
1532 b" %s\n (check that your locale is properly set)\n",
1532 b" %s\n (check that your locale is properly set)\n",
1533 err,
1533 err,
1534 )
1534 )
1535
1535
1536 # Python
1536 # Python
1537 pythonlib = None
1537 pythonlib = None
1538 if util.safehasattr(os, '__file__'):
1538 if util.safehasattr(os, '__file__'):
1539 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1539 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1540 elif getattr(sys, 'oxidized', False):
1540 elif getattr(sys, 'oxidized', False):
1541 pythonlib = pycompat.sysexecutable
1541 pythonlib = pycompat.sysexecutable
1542
1542
1543 fm.write(
1543 fm.write(
1544 b'pythonexe',
1544 b'pythonexe',
1545 _(b"checking Python executable (%s)\n"),
1545 _(b"checking Python executable (%s)\n"),
1546 pycompat.sysexecutable or _(b"unknown"),
1546 pycompat.sysexecutable or _(b"unknown"),
1547 )
1547 )
1548 fm.write(
1548 fm.write(
1549 b'pythonimplementation',
1549 b'pythonimplementation',
1550 _(b"checking Python implementation (%s)\n"),
1550 _(b"checking Python implementation (%s)\n"),
1551 pycompat.sysbytes(platform.python_implementation()),
1551 pycompat.sysbytes(platform.python_implementation()),
1552 )
1552 )
1553 fm.write(
1553 fm.write(
1554 b'pythonver',
1554 b'pythonver',
1555 _(b"checking Python version (%s)\n"),
1555 _(b"checking Python version (%s)\n"),
1556 (b"%d.%d.%d" % sys.version_info[:3]),
1556 (b"%d.%d.%d" % sys.version_info[:3]),
1557 )
1557 )
1558 fm.write(
1558 fm.write(
1559 b'pythonlib',
1559 b'pythonlib',
1560 _(b"checking Python lib (%s)...\n"),
1560 _(b"checking Python lib (%s)...\n"),
1561 pythonlib or _(b"unknown"),
1561 pythonlib or _(b"unknown"),
1562 )
1562 )
1563
1563
1564 try:
1564 try:
1565 from . import rustext
1565 from . import rustext
1566
1566
1567 rustext.__doc__ # trigger lazy import
1567 rustext.__doc__ # trigger lazy import
1568 except ImportError:
1568 except ImportError:
1569 rustext = None
1569 rustext = None
1570
1570
1571 security = set(sslutil.supportedprotocols)
1571 security = set(sslutil.supportedprotocols)
1572 if sslutil.hassni:
1572 if sslutil.hassni:
1573 security.add(b'sni')
1573 security.add(b'sni')
1574
1574
1575 fm.write(
1575 fm.write(
1576 b'pythonsecurity',
1576 b'pythonsecurity',
1577 _(b"checking Python security support (%s)\n"),
1577 _(b"checking Python security support (%s)\n"),
1578 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1578 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1579 )
1579 )
1580
1580
1581 # These are warnings, not errors. So don't increment problem count. This
1581 # These are warnings, not errors. So don't increment problem count. This
1582 # may change in the future.
1582 # may change in the future.
1583 if b'tls1.2' not in security:
1583 if b'tls1.2' not in security:
1584 fm.plain(
1584 fm.plain(
1585 _(
1585 _(
1586 b' TLS 1.2 not supported by Python install; '
1586 b' TLS 1.2 not supported by Python install; '
1587 b'network connections lack modern security\n'
1587 b'network connections lack modern security\n'
1588 )
1588 )
1589 )
1589 )
1590 if b'sni' not in security:
1590 if b'sni' not in security:
1591 fm.plain(
1591 fm.plain(
1592 _(
1592 _(
1593 b' SNI not supported by Python install; may have '
1593 b' SNI not supported by Python install; may have '
1594 b'connectivity issues with some servers\n'
1594 b'connectivity issues with some servers\n'
1595 )
1595 )
1596 )
1596 )
1597
1597
1598 fm.plain(
1598 fm.plain(
1599 _(
1599 _(
1600 b"checking Rust extensions (%s)\n"
1600 b"checking Rust extensions (%s)\n"
1601 % (b'missing' if rustext is None else b'installed')
1601 % (b'missing' if rustext is None else b'installed')
1602 ),
1602 ),
1603 )
1603 )
1604
1604
1605 # TODO print CA cert info
1605 # TODO print CA cert info
1606
1606
1607 # hg version
1607 # hg version
1608 hgver = util.version()
1608 hgver = util.version()
1609 fm.write(
1609 fm.write(
1610 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1610 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1611 )
1611 )
1612 fm.write(
1612 fm.write(
1613 b'hgverextra',
1613 b'hgverextra',
1614 _(b"checking Mercurial custom build (%s)\n"),
1614 _(b"checking Mercurial custom build (%s)\n"),
1615 b'+'.join(hgver.split(b'+')[1:]),
1615 b'+'.join(hgver.split(b'+')[1:]),
1616 )
1616 )
1617
1617
1618 # compiled modules
1618 # compiled modules
1619 hgmodules = None
1619 hgmodules = None
1620 if util.safehasattr(sys.modules[__name__], '__file__'):
1620 if util.safehasattr(sys.modules[__name__], '__file__'):
1621 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1621 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1622 elif getattr(sys, 'oxidized', False):
1622 elif getattr(sys, 'oxidized', False):
1623 hgmodules = pycompat.sysexecutable
1623 hgmodules = pycompat.sysexecutable
1624
1624
1625 fm.write(
1625 fm.write(
1626 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1626 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1627 )
1627 )
1628 fm.write(
1628 fm.write(
1629 b'hgmodules',
1629 b'hgmodules',
1630 _(b"checking installed modules (%s)...\n"),
1630 _(b"checking installed modules (%s)...\n"),
1631 hgmodules or _(b"unknown"),
1631 hgmodules or _(b"unknown"),
1632 )
1632 )
1633
1633
1634 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1634 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1635 rustext = rustandc # for now, that's the only case
1635 rustext = rustandc # for now, that's the only case
1636 cext = policy.policy in (b'c', b'allow') or rustandc
1636 cext = policy.policy in (b'c', b'allow') or rustandc
1637 nopure = cext or rustext
1637 nopure = cext or rustext
1638 if nopure:
1638 if nopure:
1639 err = None
1639 err = None
1640 try:
1640 try:
1641 if cext:
1641 if cext:
1642 from .cext import ( # pytype: disable=import-error
1642 from .cext import ( # pytype: disable=import-error
1643 base85,
1643 base85,
1644 bdiff,
1644 bdiff,
1645 mpatch,
1645 mpatch,
1646 osutil,
1646 osutil,
1647 )
1647 )
1648
1648
1649 # quiet pyflakes
1649 # quiet pyflakes
1650 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1650 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1651 if rustext:
1651 if rustext:
1652 from .rustext import ( # pytype: disable=import-error
1652 from .rustext import ( # pytype: disable=import-error
1653 ancestor,
1653 ancestor,
1654 dirstate,
1654 dirstate,
1655 )
1655 )
1656
1656
1657 dir(ancestor), dir(dirstate) # quiet pyflakes
1657 dir(ancestor), dir(dirstate) # quiet pyflakes
1658 except Exception as inst:
1658 except Exception as inst:
1659 err = stringutil.forcebytestr(inst)
1659 err = stringutil.forcebytestr(inst)
1660 problems += 1
1660 problems += 1
1661 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1661 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1662
1662
1663 compengines = util.compengines._engines.values()
1663 compengines = util.compengines._engines.values()
1664 fm.write(
1664 fm.write(
1665 b'compengines',
1665 b'compengines',
1666 _(b'checking registered compression engines (%s)\n'),
1666 _(b'checking registered compression engines (%s)\n'),
1667 fm.formatlist(
1667 fm.formatlist(
1668 sorted(e.name() for e in compengines),
1668 sorted(e.name() for e in compengines),
1669 name=b'compengine',
1669 name=b'compengine',
1670 fmt=b'%s',
1670 fmt=b'%s',
1671 sep=b', ',
1671 sep=b', ',
1672 ),
1672 ),
1673 )
1673 )
1674 fm.write(
1674 fm.write(
1675 b'compenginesavail',
1675 b'compenginesavail',
1676 _(b'checking available compression engines (%s)\n'),
1676 _(b'checking available compression engines (%s)\n'),
1677 fm.formatlist(
1677 fm.formatlist(
1678 sorted(e.name() for e in compengines if e.available()),
1678 sorted(e.name() for e in compengines if e.available()),
1679 name=b'compengine',
1679 name=b'compengine',
1680 fmt=b'%s',
1680 fmt=b'%s',
1681 sep=b', ',
1681 sep=b', ',
1682 ),
1682 ),
1683 )
1683 )
1684 wirecompengines = compression.compengines.supportedwireengines(
1684 wirecompengines = compression.compengines.supportedwireengines(
1685 compression.SERVERROLE
1685 compression.SERVERROLE
1686 )
1686 )
1687 fm.write(
1687 fm.write(
1688 b'compenginesserver',
1688 b'compenginesserver',
1689 _(
1689 _(
1690 b'checking available compression engines '
1690 b'checking available compression engines '
1691 b'for wire protocol (%s)\n'
1691 b'for wire protocol (%s)\n'
1692 ),
1692 ),
1693 fm.formatlist(
1693 fm.formatlist(
1694 [e.name() for e in wirecompengines if e.wireprotosupport()],
1694 [e.name() for e in wirecompengines if e.wireprotosupport()],
1695 name=b'compengine',
1695 name=b'compengine',
1696 fmt=b'%s',
1696 fmt=b'%s',
1697 sep=b', ',
1697 sep=b', ',
1698 ),
1698 ),
1699 )
1699 )
1700 re2 = b'missing'
1700 re2 = b'missing'
1701 if util._re2:
1701 if util._re2:
1702 re2 = b'available'
1702 re2 = b'available'
1703 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1703 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1704 fm.data(re2=bool(util._re2))
1704 fm.data(re2=bool(util._re2))
1705
1705
1706 # templates
1706 # templates
1707 p = templater.templatedir()
1707 p = templater.templatedir()
1708 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1708 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1709 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1709 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1710 if p:
1710 if p:
1711 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1711 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1712 if m:
1712 if m:
1713 # template found, check if it is working
1713 # template found, check if it is working
1714 err = None
1714 err = None
1715 try:
1715 try:
1716 templater.templater.frommapfile(m)
1716 templater.templater.frommapfile(m)
1717 except Exception as inst:
1717 except Exception as inst:
1718 err = stringutil.forcebytestr(inst)
1718 err = stringutil.forcebytestr(inst)
1719 p = None
1719 p = None
1720 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1720 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1721 else:
1721 else:
1722 p = None
1722 p = None
1723 fm.condwrite(
1723 fm.condwrite(
1724 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1724 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1725 )
1725 )
1726 fm.condwrite(
1726 fm.condwrite(
1727 not m,
1727 not m,
1728 b'defaulttemplatenotfound',
1728 b'defaulttemplatenotfound',
1729 _(b" template '%s' not found\n"),
1729 _(b" template '%s' not found\n"),
1730 b"default",
1730 b"default",
1731 )
1731 )
1732 if not p:
1732 if not p:
1733 problems += 1
1733 problems += 1
1734 fm.condwrite(
1734 fm.condwrite(
1735 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1735 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1736 )
1736 )
1737
1737
1738 # editor
1738 # editor
1739 editor = ui.geteditor()
1739 editor = ui.geteditor()
1740 editor = util.expandpath(editor)
1740 editor = util.expandpath(editor)
1741 editorbin = procutil.shellsplit(editor)[0]
1741 editorbin = procutil.shellsplit(editor)[0]
1742 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1742 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1743 cmdpath = procutil.findexe(editorbin)
1743 cmdpath = procutil.findexe(editorbin)
1744 fm.condwrite(
1744 fm.condwrite(
1745 not cmdpath and editor == b'vi',
1745 not cmdpath and editor == b'vi',
1746 b'vinotfound',
1746 b'vinotfound',
1747 _(
1747 _(
1748 b" No commit editor set and can't find %s in PATH\n"
1748 b" No commit editor set and can't find %s in PATH\n"
1749 b" (specify a commit editor in your configuration"
1749 b" (specify a commit editor in your configuration"
1750 b" file)\n"
1750 b" file)\n"
1751 ),
1751 ),
1752 not cmdpath and editor == b'vi' and editorbin,
1752 not cmdpath and editor == b'vi' and editorbin,
1753 )
1753 )
1754 fm.condwrite(
1754 fm.condwrite(
1755 not cmdpath and editor != b'vi',
1755 not cmdpath and editor != b'vi',
1756 b'editornotfound',
1756 b'editornotfound',
1757 _(
1757 _(
1758 b" Can't find editor '%s' in PATH\n"
1758 b" Can't find editor '%s' in PATH\n"
1759 b" (specify a commit editor in your configuration"
1759 b" (specify a commit editor in your configuration"
1760 b" file)\n"
1760 b" file)\n"
1761 ),
1761 ),
1762 not cmdpath and editorbin,
1762 not cmdpath and editorbin,
1763 )
1763 )
1764 if not cmdpath and editor != b'vi':
1764 if not cmdpath and editor != b'vi':
1765 problems += 1
1765 problems += 1
1766
1766
1767 # check username
1767 # check username
1768 username = None
1768 username = None
1769 err = None
1769 err = None
1770 try:
1770 try:
1771 username = ui.username()
1771 username = ui.username()
1772 except error.Abort as e:
1772 except error.Abort as e:
1773 err = e.message
1773 err = e.message
1774 problems += 1
1774 problems += 1
1775
1775
1776 fm.condwrite(
1776 fm.condwrite(
1777 username, b'username', _(b"checking username (%s)\n"), username
1777 username, b'username', _(b"checking username (%s)\n"), username
1778 )
1778 )
1779 fm.condwrite(
1779 fm.condwrite(
1780 err,
1780 err,
1781 b'usernameerror',
1781 b'usernameerror',
1782 _(
1782 _(
1783 b"checking username...\n %s\n"
1783 b"checking username...\n %s\n"
1784 b" (specify a username in your configuration file)\n"
1784 b" (specify a username in your configuration file)\n"
1785 ),
1785 ),
1786 err,
1786 err,
1787 )
1787 )
1788
1788
1789 for name, mod in extensions.extensions():
1789 for name, mod in extensions.extensions():
1790 handler = getattr(mod, 'debuginstall', None)
1790 handler = getattr(mod, 'debuginstall', None)
1791 if handler is not None:
1791 if handler is not None:
1792 problems += handler(ui, fm)
1792 problems += handler(ui, fm)
1793
1793
1794 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1794 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1795 if not problems:
1795 if not problems:
1796 fm.data(problems=problems)
1796 fm.data(problems=problems)
1797 fm.condwrite(
1797 fm.condwrite(
1798 problems,
1798 problems,
1799 b'problems',
1799 b'problems',
1800 _(b"%d problems detected, please check your install!\n"),
1800 _(b"%d problems detected, please check your install!\n"),
1801 problems,
1801 problems,
1802 )
1802 )
1803 fm.end()
1803 fm.end()
1804
1804
1805 return problems
1805 return problems
1806
1806
1807
1807
1808 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1808 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1809 def debugknown(ui, repopath, *ids, **opts):
1809 def debugknown(ui, repopath, *ids, **opts):
1810 """test whether node ids are known to a repo
1810 """test whether node ids are known to a repo
1811
1811
1812 Every ID must be a full-length hex node id string. Returns a list of 0s
1812 Every ID must be a full-length hex node id string. Returns a list of 0s
1813 and 1s indicating unknown/known.
1813 and 1s indicating unknown/known.
1814 """
1814 """
1815 opts = pycompat.byteskwargs(opts)
1815 opts = pycompat.byteskwargs(opts)
1816 repo = hg.peer(ui, opts, repopath)
1816 repo = hg.peer(ui, opts, repopath)
1817 if not repo.capable(b'known'):
1817 if not repo.capable(b'known'):
1818 raise error.Abort(b"known() not supported by target repository")
1818 raise error.Abort(b"known() not supported by target repository")
1819 flags = repo.known([bin(s) for s in ids])
1819 flags = repo.known([bin(s) for s in ids])
1820 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1820 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1821
1821
1822
1822
1823 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1823 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1824 def debuglabelcomplete(ui, repo, *args):
1824 def debuglabelcomplete(ui, repo, *args):
1825 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1825 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1826 debugnamecomplete(ui, repo, *args)
1826 debugnamecomplete(ui, repo, *args)
1827
1827
1828
1828
1829 @command(
1829 @command(
1830 b'debuglocks',
1830 b'debuglocks',
1831 [
1831 [
1832 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1832 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1833 (
1833 (
1834 b'W',
1834 b'W',
1835 b'force-wlock',
1835 b'force-wlock',
1836 None,
1836 None,
1837 _(b'free the working state lock (DANGEROUS)'),
1837 _(b'free the working state lock (DANGEROUS)'),
1838 ),
1838 ),
1839 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1839 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1840 (
1840 (
1841 b'S',
1841 b'S',
1842 b'set-wlock',
1842 b'set-wlock',
1843 None,
1843 None,
1844 _(b'set the working state lock until stopped'),
1844 _(b'set the working state lock until stopped'),
1845 ),
1845 ),
1846 ],
1846 ],
1847 _(b'[OPTION]...'),
1847 _(b'[OPTION]...'),
1848 )
1848 )
1849 def debuglocks(ui, repo, **opts):
1849 def debuglocks(ui, repo, **opts):
1850 """show or modify state of locks
1850 """show or modify state of locks
1851
1851
1852 By default, this command will show which locks are held. This
1852 By default, this command will show which locks are held. This
1853 includes the user and process holding the lock, the amount of time
1853 includes the user and process holding the lock, the amount of time
1854 the lock has been held, and the machine name where the process is
1854 the lock has been held, and the machine name where the process is
1855 running if it's not local.
1855 running if it's not local.
1856
1856
1857 Locks protect the integrity of Mercurial's data, so should be
1857 Locks protect the integrity of Mercurial's data, so should be
1858 treated with care. System crashes or other interruptions may cause
1858 treated with care. System crashes or other interruptions may cause
1859 locks to not be properly released, though Mercurial will usually
1859 locks to not be properly released, though Mercurial will usually
1860 detect and remove such stale locks automatically.
1860 detect and remove such stale locks automatically.
1861
1861
1862 However, detecting stale locks may not always be possible (for
1862 However, detecting stale locks may not always be possible (for
1863 instance, on a shared filesystem). Removing locks may also be
1863 instance, on a shared filesystem). Removing locks may also be
1864 blocked by filesystem permissions.
1864 blocked by filesystem permissions.
1865
1865
1866 Setting a lock will prevent other commands from changing the data.
1866 Setting a lock will prevent other commands from changing the data.
1867 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1867 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1868 The set locks are removed when the command exits.
1868 The set locks are removed when the command exits.
1869
1869
1870 Returns 0 if no locks are held.
1870 Returns 0 if no locks are held.
1871
1871
1872 """
1872 """
1873
1873
1874 if opts.get('force_lock'):
1874 if opts.get('force_lock'):
1875 repo.svfs.unlink(b'lock')
1875 repo.svfs.unlink(b'lock')
1876 if opts.get('force_wlock'):
1876 if opts.get('force_wlock'):
1877 repo.vfs.unlink(b'wlock')
1877 repo.vfs.unlink(b'wlock')
1878 if opts.get('force_lock') or opts.get('force_wlock'):
1878 if opts.get('force_lock') or opts.get('force_wlock'):
1879 return 0
1879 return 0
1880
1880
1881 locks = []
1881 locks = []
1882 try:
1882 try:
1883 if opts.get('set_wlock'):
1883 if opts.get('set_wlock'):
1884 try:
1884 try:
1885 locks.append(repo.wlock(False))
1885 locks.append(repo.wlock(False))
1886 except error.LockHeld:
1886 except error.LockHeld:
1887 raise error.Abort(_(b'wlock is already held'))
1887 raise error.Abort(_(b'wlock is already held'))
1888 if opts.get('set_lock'):
1888 if opts.get('set_lock'):
1889 try:
1889 try:
1890 locks.append(repo.lock(False))
1890 locks.append(repo.lock(False))
1891 except error.LockHeld:
1891 except error.LockHeld:
1892 raise error.Abort(_(b'lock is already held'))
1892 raise error.Abort(_(b'lock is already held'))
1893 if len(locks):
1893 if len(locks):
1894 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1894 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1895 return 0
1895 return 0
1896 finally:
1896 finally:
1897 release(*locks)
1897 release(*locks)
1898
1898
1899 now = time.time()
1899 now = time.time()
1900 held = 0
1900 held = 0
1901
1901
1902 def report(vfs, name, method):
1902 def report(vfs, name, method):
1903 # this causes stale locks to get reaped for more accurate reporting
1903 # this causes stale locks to get reaped for more accurate reporting
1904 try:
1904 try:
1905 l = method(False)
1905 l = method(False)
1906 except error.LockHeld:
1906 except error.LockHeld:
1907 l = None
1907 l = None
1908
1908
1909 if l:
1909 if l:
1910 l.release()
1910 l.release()
1911 else:
1911 else:
1912 try:
1912 try:
1913 st = vfs.lstat(name)
1913 st = vfs.lstat(name)
1914 age = now - st[stat.ST_MTIME]
1914 age = now - st[stat.ST_MTIME]
1915 user = util.username(st.st_uid)
1915 user = util.username(st.st_uid)
1916 locker = vfs.readlock(name)
1916 locker = vfs.readlock(name)
1917 if b":" in locker:
1917 if b":" in locker:
1918 host, pid = locker.split(b':')
1918 host, pid = locker.split(b':')
1919 if host == socket.gethostname():
1919 if host == socket.gethostname():
1920 locker = b'user %s, process %s' % (user or b'None', pid)
1920 locker = b'user %s, process %s' % (user or b'None', pid)
1921 else:
1921 else:
1922 locker = b'user %s, process %s, host %s' % (
1922 locker = b'user %s, process %s, host %s' % (
1923 user or b'None',
1923 user or b'None',
1924 pid,
1924 pid,
1925 host,
1925 host,
1926 )
1926 )
1927 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1927 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1928 return 1
1928 return 1
1929 except OSError as e:
1929 except OSError as e:
1930 if e.errno != errno.ENOENT:
1930 if e.errno != errno.ENOENT:
1931 raise
1931 raise
1932
1932
1933 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1933 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1934 return 0
1934 return 0
1935
1935
1936 held += report(repo.svfs, b"lock", repo.lock)
1936 held += report(repo.svfs, b"lock", repo.lock)
1937 held += report(repo.vfs, b"wlock", repo.wlock)
1937 held += report(repo.vfs, b"wlock", repo.wlock)
1938
1938
1939 return held
1939 return held
1940
1940
1941
1941
1942 @command(
1942 @command(
1943 b'debugmanifestfulltextcache',
1943 b'debugmanifestfulltextcache',
1944 [
1944 [
1945 (b'', b'clear', False, _(b'clear the cache')),
1945 (b'', b'clear', False, _(b'clear the cache')),
1946 (
1946 (
1947 b'a',
1947 b'a',
1948 b'add',
1948 b'add',
1949 [],
1949 [],
1950 _(b'add the given manifest nodes to the cache'),
1950 _(b'add the given manifest nodes to the cache'),
1951 _(b'NODE'),
1951 _(b'NODE'),
1952 ),
1952 ),
1953 ],
1953 ],
1954 b'',
1954 b'',
1955 )
1955 )
1956 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1956 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1957 """show, clear or amend the contents of the manifest fulltext cache"""
1957 """show, clear or amend the contents of the manifest fulltext cache"""
1958
1958
1959 def getcache():
1959 def getcache():
1960 r = repo.manifestlog.getstorage(b'')
1960 r = repo.manifestlog.getstorage(b'')
1961 try:
1961 try:
1962 return r._fulltextcache
1962 return r._fulltextcache
1963 except AttributeError:
1963 except AttributeError:
1964 msg = _(
1964 msg = _(
1965 b"Current revlog implementation doesn't appear to have a "
1965 b"Current revlog implementation doesn't appear to have a "
1966 b"manifest fulltext cache\n"
1966 b"manifest fulltext cache\n"
1967 )
1967 )
1968 raise error.Abort(msg)
1968 raise error.Abort(msg)
1969
1969
1970 if opts.get('clear'):
1970 if opts.get('clear'):
1971 with repo.wlock():
1971 with repo.wlock():
1972 cache = getcache()
1972 cache = getcache()
1973 cache.clear(clear_persisted_data=True)
1973 cache.clear(clear_persisted_data=True)
1974 return
1974 return
1975
1975
1976 if add:
1976 if add:
1977 with repo.wlock():
1977 with repo.wlock():
1978 m = repo.manifestlog
1978 m = repo.manifestlog
1979 store = m.getstorage(b'')
1979 store = m.getstorage(b'')
1980 for n in add:
1980 for n in add:
1981 try:
1981 try:
1982 manifest = m[store.lookup(n)]
1982 manifest = m[store.lookup(n)]
1983 except error.LookupError as e:
1983 except error.LookupError as e:
1984 raise error.Abort(e, hint=b"Check your manifest node id")
1984 raise error.Abort(e, hint=b"Check your manifest node id")
1985 manifest.read() # stores revisision in cache too
1985 manifest.read() # stores revisision in cache too
1986 return
1986 return
1987
1987
1988 cache = getcache()
1988 cache = getcache()
1989 if not len(cache):
1989 if not len(cache):
1990 ui.write(_(b'cache empty\n'))
1990 ui.write(_(b'cache empty\n'))
1991 else:
1991 else:
1992 ui.write(
1992 ui.write(
1993 _(
1993 _(
1994 b'cache contains %d manifest entries, in order of most to '
1994 b'cache contains %d manifest entries, in order of most to '
1995 b'least recent:\n'
1995 b'least recent:\n'
1996 )
1996 )
1997 % (len(cache),)
1997 % (len(cache),)
1998 )
1998 )
1999 totalsize = 0
1999 totalsize = 0
2000 for nodeid in cache:
2000 for nodeid in cache:
2001 # Use cache.get to not update the LRU order
2001 # Use cache.get to not update the LRU order
2002 data = cache.peek(nodeid)
2002 data = cache.peek(nodeid)
2003 size = len(data)
2003 size = len(data)
2004 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2004 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2005 ui.write(
2005 ui.write(
2006 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2006 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2007 )
2007 )
2008 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2008 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2009 ui.write(
2009 ui.write(
2010 _(b'total cache data size %s, on-disk %s\n')
2010 _(b'total cache data size %s, on-disk %s\n')
2011 % (util.bytecount(totalsize), util.bytecount(ondisk))
2011 % (util.bytecount(totalsize), util.bytecount(ondisk))
2012 )
2012 )
2013
2013
2014
2014
2015 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2015 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2016 def debugmergestate(ui, repo, *args, **opts):
2016 def debugmergestate(ui, repo, *args, **opts):
2017 """print merge state
2017 """print merge state
2018
2018
2019 Use --verbose to print out information about whether v1 or v2 merge state
2019 Use --verbose to print out information about whether v1 or v2 merge state
2020 was chosen."""
2020 was chosen."""
2021
2021
2022 if ui.verbose:
2022 if ui.verbose:
2023 ms = mergestatemod.mergestate(repo)
2023 ms = mergestatemod.mergestate(repo)
2024
2024
2025 # sort so that reasonable information is on top
2025 # sort so that reasonable information is on top
2026 v1records = ms._readrecordsv1()
2026 v1records = ms._readrecordsv1()
2027 v2records = ms._readrecordsv2()
2027 v2records = ms._readrecordsv2()
2028
2028
2029 if not v1records and not v2records:
2029 if not v1records and not v2records:
2030 pass
2030 pass
2031 elif not v2records:
2031 elif not v2records:
2032 ui.writenoi18n(b'no version 2 merge state\n')
2032 ui.writenoi18n(b'no version 2 merge state\n')
2033 elif ms._v1v2match(v1records, v2records):
2033 elif ms._v1v2match(v1records, v2records):
2034 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2034 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2035 else:
2035 else:
2036 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2036 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2037
2037
2038 opts = pycompat.byteskwargs(opts)
2038 opts = pycompat.byteskwargs(opts)
2039 if not opts[b'template']:
2039 if not opts[b'template']:
2040 opts[b'template'] = (
2040 opts[b'template'] = (
2041 b'{if(commits, "", "no merge state found\n")}'
2041 b'{if(commits, "", "no merge state found\n")}'
2042 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2042 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2043 b'{files % "file: {path} (state \\"{state}\\")\n'
2043 b'{files % "file: {path} (state \\"{state}\\")\n'
2044 b'{if(local_path, "'
2044 b'{if(local_path, "'
2045 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2045 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2046 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2046 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2047 b' other path: {other_path} (node {other_node})\n'
2047 b' other path: {other_path} (node {other_node})\n'
2048 b'")}'
2048 b'")}'
2049 b'{if(rename_side, "'
2049 b'{if(rename_side, "'
2050 b' rename side: {rename_side}\n'
2050 b' rename side: {rename_side}\n'
2051 b' renamed path: {renamed_path}\n'
2051 b' renamed path: {renamed_path}\n'
2052 b'")}'
2052 b'")}'
2053 b'{extras % " extra: {key} = {value}\n"}'
2053 b'{extras % " extra: {key} = {value}\n"}'
2054 b'"}'
2054 b'"}'
2055 b'{extras % "extra: {file} ({key} = {value})\n"}'
2055 b'{extras % "extra: {file} ({key} = {value})\n"}'
2056 )
2056 )
2057
2057
2058 ms = mergestatemod.mergestate.read(repo)
2058 ms = mergestatemod.mergestate.read(repo)
2059
2059
2060 fm = ui.formatter(b'debugmergestate', opts)
2060 fm = ui.formatter(b'debugmergestate', opts)
2061 fm.startitem()
2061 fm.startitem()
2062
2062
2063 fm_commits = fm.nested(b'commits')
2063 fm_commits = fm.nested(b'commits')
2064 if ms.active():
2064 if ms.active():
2065 for name, node, label_index in (
2065 for name, node, label_index in (
2066 (b'local', ms.local, 0),
2066 (b'local', ms.local, 0),
2067 (b'other', ms.other, 1),
2067 (b'other', ms.other, 1),
2068 ):
2068 ):
2069 fm_commits.startitem()
2069 fm_commits.startitem()
2070 fm_commits.data(name=name)
2070 fm_commits.data(name=name)
2071 fm_commits.data(node=hex(node))
2071 fm_commits.data(node=hex(node))
2072 if ms._labels and len(ms._labels) > label_index:
2072 if ms._labels and len(ms._labels) > label_index:
2073 fm_commits.data(label=ms._labels[label_index])
2073 fm_commits.data(label=ms._labels[label_index])
2074 fm_commits.end()
2074 fm_commits.end()
2075
2075
2076 fm_files = fm.nested(b'files')
2076 fm_files = fm.nested(b'files')
2077 if ms.active():
2077 if ms.active():
2078 for f in ms:
2078 for f in ms:
2079 fm_files.startitem()
2079 fm_files.startitem()
2080 fm_files.data(path=f)
2080 fm_files.data(path=f)
2081 state = ms._state[f]
2081 state = ms._state[f]
2082 fm_files.data(state=state[0])
2082 fm_files.data(state=state[0])
2083 if state[0] in (
2083 if state[0] in (
2084 mergestatemod.MERGE_RECORD_UNRESOLVED,
2084 mergestatemod.MERGE_RECORD_UNRESOLVED,
2085 mergestatemod.MERGE_RECORD_RESOLVED,
2085 mergestatemod.MERGE_RECORD_RESOLVED,
2086 ):
2086 ):
2087 fm_files.data(local_key=state[1])
2087 fm_files.data(local_key=state[1])
2088 fm_files.data(local_path=state[2])
2088 fm_files.data(local_path=state[2])
2089 fm_files.data(ancestor_path=state[3])
2089 fm_files.data(ancestor_path=state[3])
2090 fm_files.data(ancestor_node=state[4])
2090 fm_files.data(ancestor_node=state[4])
2091 fm_files.data(other_path=state[5])
2091 fm_files.data(other_path=state[5])
2092 fm_files.data(other_node=state[6])
2092 fm_files.data(other_node=state[6])
2093 fm_files.data(local_flags=state[7])
2093 fm_files.data(local_flags=state[7])
2094 elif state[0] in (
2094 elif state[0] in (
2095 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2095 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2096 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2096 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2097 ):
2097 ):
2098 fm_files.data(renamed_path=state[1])
2098 fm_files.data(renamed_path=state[1])
2099 fm_files.data(rename_side=state[2])
2099 fm_files.data(rename_side=state[2])
2100 fm_extras = fm_files.nested(b'extras')
2100 fm_extras = fm_files.nested(b'extras')
2101 for k, v in sorted(ms.extras(f).items()):
2101 for k, v in sorted(ms.extras(f).items()):
2102 fm_extras.startitem()
2102 fm_extras.startitem()
2103 fm_extras.data(key=k)
2103 fm_extras.data(key=k)
2104 fm_extras.data(value=v)
2104 fm_extras.data(value=v)
2105 fm_extras.end()
2105 fm_extras.end()
2106
2106
2107 fm_files.end()
2107 fm_files.end()
2108
2108
2109 fm_extras = fm.nested(b'extras')
2109 fm_extras = fm.nested(b'extras')
2110 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2110 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2111 if f in ms:
2111 if f in ms:
2112 # If file is in mergestate, we have already processed it's extras
2112 # If file is in mergestate, we have already processed it's extras
2113 continue
2113 continue
2114 for k, v in pycompat.iteritems(d):
2114 for k, v in pycompat.iteritems(d):
2115 fm_extras.startitem()
2115 fm_extras.startitem()
2116 fm_extras.data(file=f)
2116 fm_extras.data(file=f)
2117 fm_extras.data(key=k)
2117 fm_extras.data(key=k)
2118 fm_extras.data(value=v)
2118 fm_extras.data(value=v)
2119 fm_extras.end()
2119 fm_extras.end()
2120
2120
2121 fm.end()
2121 fm.end()
2122
2122
2123
2123
2124 @command(b'debugnamecomplete', [], _(b'NAME...'))
2124 @command(b'debugnamecomplete', [], _(b'NAME...'))
2125 def debugnamecomplete(ui, repo, *args):
2125 def debugnamecomplete(ui, repo, *args):
2126 '''complete "names" - tags, open branch names, bookmark names'''
2126 '''complete "names" - tags, open branch names, bookmark names'''
2127
2127
2128 names = set()
2128 names = set()
2129 # since we previously only listed open branches, we will handle that
2129 # since we previously only listed open branches, we will handle that
2130 # specially (after this for loop)
2130 # specially (after this for loop)
2131 for name, ns in pycompat.iteritems(repo.names):
2131 for name, ns in pycompat.iteritems(repo.names):
2132 if name != b'branches':
2132 if name != b'branches':
2133 names.update(ns.listnames(repo))
2133 names.update(ns.listnames(repo))
2134 names.update(
2134 names.update(
2135 tag
2135 tag
2136 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2136 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2137 if not closed
2137 if not closed
2138 )
2138 )
2139 completions = set()
2139 completions = set()
2140 if not args:
2140 if not args:
2141 args = [b'']
2141 args = [b'']
2142 for a in args:
2142 for a in args:
2143 completions.update(n for n in names if n.startswith(a))
2143 completions.update(n for n in names if n.startswith(a))
2144 ui.write(b'\n'.join(sorted(completions)))
2144 ui.write(b'\n'.join(sorted(completions)))
2145 ui.write(b'\n')
2145 ui.write(b'\n')
2146
2146
2147
2147
2148 @command(
2148 @command(
2149 b'debugnodemap',
2149 b'debugnodemap',
2150 [
2150 [
2151 (
2151 (
2152 b'',
2152 b'',
2153 b'dump-new',
2153 b'dump-new',
2154 False,
2154 False,
2155 _(b'write a (new) persistent binary nodemap on stdin'),
2155 _(b'write a (new) persistent binary nodemap on stdin'),
2156 ),
2156 ),
2157 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2157 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2158 (
2158 (
2159 b'',
2159 b'',
2160 b'check',
2160 b'check',
2161 False,
2161 False,
2162 _(b'check that the data on disk data are correct.'),
2162 _(b'check that the data on disk data are correct.'),
2163 ),
2163 ),
2164 (
2164 (
2165 b'',
2165 b'',
2166 b'metadata',
2166 b'metadata',
2167 False,
2167 False,
2168 _(b'display the on disk meta data for the nodemap'),
2168 _(b'display the on disk meta data for the nodemap'),
2169 ),
2169 ),
2170 ],
2170 ],
2171 )
2171 )
2172 def debugnodemap(ui, repo, **opts):
2172 def debugnodemap(ui, repo, **opts):
2173 """write and inspect on disk nodemap
2173 """write and inspect on disk nodemap
2174 """
2174 """
2175 if opts['dump_new']:
2175 if opts['dump_new']:
2176 unfi = repo.unfiltered()
2176 unfi = repo.unfiltered()
2177 cl = unfi.changelog
2177 cl = unfi.changelog
2178 if util.safehasattr(cl.index, "nodemap_data_all"):
2178 if util.safehasattr(cl.index, "nodemap_data_all"):
2179 data = cl.index.nodemap_data_all()
2179 data = cl.index.nodemap_data_all()
2180 else:
2180 else:
2181 data = nodemap.persistent_data(cl.index)
2181 data = nodemap.persistent_data(cl.index)
2182 ui.write(data)
2182 ui.write(data)
2183 elif opts['dump_disk']:
2183 elif opts['dump_disk']:
2184 unfi = repo.unfiltered()
2184 unfi = repo.unfiltered()
2185 cl = unfi.changelog
2185 cl = unfi.changelog
2186 nm_data = nodemap.persisted_data(cl)
2186 nm_data = nodemap.persisted_data(cl)
2187 if nm_data is not None:
2187 if nm_data is not None:
2188 docket, data = nm_data
2188 docket, data = nm_data
2189 ui.write(data[:])
2189 ui.write(data[:])
2190 elif opts['check']:
2190 elif opts['check']:
2191 unfi = repo.unfiltered()
2191 unfi = repo.unfiltered()
2192 cl = unfi.changelog
2192 cl = unfi.changelog
2193 nm_data = nodemap.persisted_data(cl)
2193 nm_data = nodemap.persisted_data(cl)
2194 if nm_data is not None:
2194 if nm_data is not None:
2195 docket, data = nm_data
2195 docket, data = nm_data
2196 return nodemap.check_data(ui, cl.index, data)
2196 return nodemap.check_data(ui, cl.index, data)
2197 elif opts['metadata']:
2197 elif opts['metadata']:
2198 unfi = repo.unfiltered()
2198 unfi = repo.unfiltered()
2199 cl = unfi.changelog
2199 cl = unfi.changelog
2200 nm_data = nodemap.persisted_data(cl)
2200 nm_data = nodemap.persisted_data(cl)
2201 if nm_data is not None:
2201 if nm_data is not None:
2202 docket, data = nm_data
2202 docket, data = nm_data
2203 ui.write((b"uid: %s\n") % docket.uid)
2203 ui.write((b"uid: %s\n") % docket.uid)
2204 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2204 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2205 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2205 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2206 ui.write((b"data-length: %d\n") % docket.data_length)
2206 ui.write((b"data-length: %d\n") % docket.data_length)
2207 ui.write((b"data-unused: %d\n") % docket.data_unused)
2207 ui.write((b"data-unused: %d\n") % docket.data_unused)
2208 unused_perc = docket.data_unused * 100.0 / docket.data_length
2208 unused_perc = docket.data_unused * 100.0 / docket.data_length
2209 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2209 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2210
2210
2211
2211
2212 @command(
2212 @command(
2213 b'debugobsolete',
2213 b'debugobsolete',
2214 [
2214 [
2215 (b'', b'flags', 0, _(b'markers flag')),
2215 (b'', b'flags', 0, _(b'markers flag')),
2216 (
2216 (
2217 b'',
2217 b'',
2218 b'record-parents',
2218 b'record-parents',
2219 False,
2219 False,
2220 _(b'record parent information for the precursor'),
2220 _(b'record parent information for the precursor'),
2221 ),
2221 ),
2222 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2222 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2223 (
2223 (
2224 b'',
2224 b'',
2225 b'exclusive',
2225 b'exclusive',
2226 False,
2226 False,
2227 _(b'restrict display to markers only relevant to REV'),
2227 _(b'restrict display to markers only relevant to REV'),
2228 ),
2228 ),
2229 (b'', b'index', False, _(b'display index of the marker')),
2229 (b'', b'index', False, _(b'display index of the marker')),
2230 (b'', b'delete', [], _(b'delete markers specified by indices')),
2230 (b'', b'delete', [], _(b'delete markers specified by indices')),
2231 ]
2231 ]
2232 + cmdutil.commitopts2
2232 + cmdutil.commitopts2
2233 + cmdutil.formatteropts,
2233 + cmdutil.formatteropts,
2234 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2234 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2235 )
2235 )
2236 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2236 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2237 """create arbitrary obsolete marker
2237 """create arbitrary obsolete marker
2238
2238
2239 With no arguments, displays the list of obsolescence markers."""
2239 With no arguments, displays the list of obsolescence markers."""
2240
2240
2241 opts = pycompat.byteskwargs(opts)
2241 opts = pycompat.byteskwargs(opts)
2242
2242
2243 def parsenodeid(s):
2243 def parsenodeid(s):
2244 try:
2244 try:
2245 # We do not use revsingle/revrange functions here to accept
2245 # We do not use revsingle/revrange functions here to accept
2246 # arbitrary node identifiers, possibly not present in the
2246 # arbitrary node identifiers, possibly not present in the
2247 # local repository.
2247 # local repository.
2248 n = bin(s)
2248 n = bin(s)
2249 if len(n) != len(nullid):
2249 if len(n) != len(nullid):
2250 raise TypeError()
2250 raise TypeError()
2251 return n
2251 return n
2252 except TypeError:
2252 except TypeError:
2253 raise error.Abort(
2253 raise error.Abort(
2254 b'changeset references must be full hexadecimal '
2254 b'changeset references must be full hexadecimal '
2255 b'node identifiers'
2255 b'node identifiers'
2256 )
2256 )
2257
2257
2258 if opts.get(b'delete'):
2258 if opts.get(b'delete'):
2259 indices = []
2259 indices = []
2260 for v in opts.get(b'delete'):
2260 for v in opts.get(b'delete'):
2261 try:
2261 try:
2262 indices.append(int(v))
2262 indices.append(int(v))
2263 except ValueError:
2263 except ValueError:
2264 raise error.Abort(
2264 raise error.Abort(
2265 _(b'invalid index value: %r') % v,
2265 _(b'invalid index value: %r') % v,
2266 hint=_(b'use integers for indices'),
2266 hint=_(b'use integers for indices'),
2267 )
2267 )
2268
2268
2269 if repo.currenttransaction():
2269 if repo.currenttransaction():
2270 raise error.Abort(
2270 raise error.Abort(
2271 _(b'cannot delete obsmarkers in the middle of transaction.')
2271 _(b'cannot delete obsmarkers in the middle of transaction.')
2272 )
2272 )
2273
2273
2274 with repo.lock():
2274 with repo.lock():
2275 n = repair.deleteobsmarkers(repo.obsstore, indices)
2275 n = repair.deleteobsmarkers(repo.obsstore, indices)
2276 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2276 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2277
2277
2278 return
2278 return
2279
2279
2280 if precursor is not None:
2280 if precursor is not None:
2281 if opts[b'rev']:
2281 if opts[b'rev']:
2282 raise error.Abort(b'cannot select revision when creating marker')
2282 raise error.Abort(b'cannot select revision when creating marker')
2283 metadata = {}
2283 metadata = {}
2284 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2284 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2285 succs = tuple(parsenodeid(succ) for succ in successors)
2285 succs = tuple(parsenodeid(succ) for succ in successors)
2286 l = repo.lock()
2286 l = repo.lock()
2287 try:
2287 try:
2288 tr = repo.transaction(b'debugobsolete')
2288 tr = repo.transaction(b'debugobsolete')
2289 try:
2289 try:
2290 date = opts.get(b'date')
2290 date = opts.get(b'date')
2291 if date:
2291 if date:
2292 date = dateutil.parsedate(date)
2292 date = dateutil.parsedate(date)
2293 else:
2293 else:
2294 date = None
2294 date = None
2295 prec = parsenodeid(precursor)
2295 prec = parsenodeid(precursor)
2296 parents = None
2296 parents = None
2297 if opts[b'record_parents']:
2297 if opts[b'record_parents']:
2298 if prec not in repo.unfiltered():
2298 if prec not in repo.unfiltered():
2299 raise error.Abort(
2299 raise error.Abort(
2300 b'cannot used --record-parents on '
2300 b'cannot used --record-parents on '
2301 b'unknown changesets'
2301 b'unknown changesets'
2302 )
2302 )
2303 parents = repo.unfiltered()[prec].parents()
2303 parents = repo.unfiltered()[prec].parents()
2304 parents = tuple(p.node() for p in parents)
2304 parents = tuple(p.node() for p in parents)
2305 repo.obsstore.create(
2305 repo.obsstore.create(
2306 tr,
2306 tr,
2307 prec,
2307 prec,
2308 succs,
2308 succs,
2309 opts[b'flags'],
2309 opts[b'flags'],
2310 parents=parents,
2310 parents=parents,
2311 date=date,
2311 date=date,
2312 metadata=metadata,
2312 metadata=metadata,
2313 ui=ui,
2313 ui=ui,
2314 )
2314 )
2315 tr.close()
2315 tr.close()
2316 except ValueError as exc:
2316 except ValueError as exc:
2317 raise error.Abort(
2317 raise error.Abort(
2318 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2318 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2319 )
2319 )
2320 finally:
2320 finally:
2321 tr.release()
2321 tr.release()
2322 finally:
2322 finally:
2323 l.release()
2323 l.release()
2324 else:
2324 else:
2325 if opts[b'rev']:
2325 if opts[b'rev']:
2326 revs = scmutil.revrange(repo, opts[b'rev'])
2326 revs = scmutil.revrange(repo, opts[b'rev'])
2327 nodes = [repo[r].node() for r in revs]
2327 nodes = [repo[r].node() for r in revs]
2328 markers = list(
2328 markers = list(
2329 obsutil.getmarkers(
2329 obsutil.getmarkers(
2330 repo, nodes=nodes, exclusive=opts[b'exclusive']
2330 repo, nodes=nodes, exclusive=opts[b'exclusive']
2331 )
2331 )
2332 )
2332 )
2333 markers.sort(key=lambda x: x._data)
2333 markers.sort(key=lambda x: x._data)
2334 else:
2334 else:
2335 markers = obsutil.getmarkers(repo)
2335 markers = obsutil.getmarkers(repo)
2336
2336
2337 markerstoiter = markers
2337 markerstoiter = markers
2338 isrelevant = lambda m: True
2338 isrelevant = lambda m: True
2339 if opts.get(b'rev') and opts.get(b'index'):
2339 if opts.get(b'rev') and opts.get(b'index'):
2340 markerstoiter = obsutil.getmarkers(repo)
2340 markerstoiter = obsutil.getmarkers(repo)
2341 markerset = set(markers)
2341 markerset = set(markers)
2342 isrelevant = lambda m: m in markerset
2342 isrelevant = lambda m: m in markerset
2343
2343
2344 fm = ui.formatter(b'debugobsolete', opts)
2344 fm = ui.formatter(b'debugobsolete', opts)
2345 for i, m in enumerate(markerstoiter):
2345 for i, m in enumerate(markerstoiter):
2346 if not isrelevant(m):
2346 if not isrelevant(m):
2347 # marker can be irrelevant when we're iterating over a set
2347 # marker can be irrelevant when we're iterating over a set
2348 # of markers (markerstoiter) which is bigger than the set
2348 # of markers (markerstoiter) which is bigger than the set
2349 # of markers we want to display (markers)
2349 # of markers we want to display (markers)
2350 # this can happen if both --index and --rev options are
2350 # this can happen if both --index and --rev options are
2351 # provided and thus we need to iterate over all of the markers
2351 # provided and thus we need to iterate over all of the markers
2352 # to get the correct indices, but only display the ones that
2352 # to get the correct indices, but only display the ones that
2353 # are relevant to --rev value
2353 # are relevant to --rev value
2354 continue
2354 continue
2355 fm.startitem()
2355 fm.startitem()
2356 ind = i if opts.get(b'index') else None
2356 ind = i if opts.get(b'index') else None
2357 cmdutil.showmarker(fm, m, index=ind)
2357 cmdutil.showmarker(fm, m, index=ind)
2358 fm.end()
2358 fm.end()
2359
2359
2360
2360
2361 @command(
2361 @command(
2362 b'debugp1copies',
2362 b'debugp1copies',
2363 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2363 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2364 _(b'[-r REV]'),
2364 _(b'[-r REV]'),
2365 )
2365 )
2366 def debugp1copies(ui, repo, **opts):
2366 def debugp1copies(ui, repo, **opts):
2367 """dump copy information compared to p1"""
2367 """dump copy information compared to p1"""
2368
2368
2369 opts = pycompat.byteskwargs(opts)
2369 opts = pycompat.byteskwargs(opts)
2370 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2370 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2371 for dst, src in ctx.p1copies().items():
2371 for dst, src in ctx.p1copies().items():
2372 ui.write(b'%s -> %s\n' % (src, dst))
2372 ui.write(b'%s -> %s\n' % (src, dst))
2373
2373
2374
2374
2375 @command(
2375 @command(
2376 b'debugp2copies',
2376 b'debugp2copies',
2377 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2377 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2378 _(b'[-r REV]'),
2378 _(b'[-r REV]'),
2379 )
2379 )
2380 def debugp1copies(ui, repo, **opts):
2380 def debugp1copies(ui, repo, **opts):
2381 """dump copy information compared to p2"""
2381 """dump copy information compared to p2"""
2382
2382
2383 opts = pycompat.byteskwargs(opts)
2383 opts = pycompat.byteskwargs(opts)
2384 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2384 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2385 for dst, src in ctx.p2copies().items():
2385 for dst, src in ctx.p2copies().items():
2386 ui.write(b'%s -> %s\n' % (src, dst))
2386 ui.write(b'%s -> %s\n' % (src, dst))
2387
2387
2388
2388
2389 @command(
2389 @command(
2390 b'debugpathcomplete',
2390 b'debugpathcomplete',
2391 [
2391 [
2392 (b'f', b'full', None, _(b'complete an entire path')),
2392 (b'f', b'full', None, _(b'complete an entire path')),
2393 (b'n', b'normal', None, _(b'show only normal files')),
2393 (b'n', b'normal', None, _(b'show only normal files')),
2394 (b'a', b'added', None, _(b'show only added files')),
2394 (b'a', b'added', None, _(b'show only added files')),
2395 (b'r', b'removed', None, _(b'show only removed files')),
2395 (b'r', b'removed', None, _(b'show only removed files')),
2396 ],
2396 ],
2397 _(b'FILESPEC...'),
2397 _(b'FILESPEC...'),
2398 )
2398 )
2399 def debugpathcomplete(ui, repo, *specs, **opts):
2399 def debugpathcomplete(ui, repo, *specs, **opts):
2400 '''complete part or all of a tracked path
2400 '''complete part or all of a tracked path
2401
2401
2402 This command supports shells that offer path name completion. It
2402 This command supports shells that offer path name completion. It
2403 currently completes only files already known to the dirstate.
2403 currently completes only files already known to the dirstate.
2404
2404
2405 Completion extends only to the next path segment unless
2405 Completion extends only to the next path segment unless
2406 --full is specified, in which case entire paths are used.'''
2406 --full is specified, in which case entire paths are used.'''
2407
2407
2408 def complete(path, acceptable):
2408 def complete(path, acceptable):
2409 dirstate = repo.dirstate
2409 dirstate = repo.dirstate
2410 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2410 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2411 rootdir = repo.root + pycompat.ossep
2411 rootdir = repo.root + pycompat.ossep
2412 if spec != repo.root and not spec.startswith(rootdir):
2412 if spec != repo.root and not spec.startswith(rootdir):
2413 return [], []
2413 return [], []
2414 if os.path.isdir(spec):
2414 if os.path.isdir(spec):
2415 spec += b'/'
2415 spec += b'/'
2416 spec = spec[len(rootdir) :]
2416 spec = spec[len(rootdir) :]
2417 fixpaths = pycompat.ossep != b'/'
2417 fixpaths = pycompat.ossep != b'/'
2418 if fixpaths:
2418 if fixpaths:
2419 spec = spec.replace(pycompat.ossep, b'/')
2419 spec = spec.replace(pycompat.ossep, b'/')
2420 speclen = len(spec)
2420 speclen = len(spec)
2421 fullpaths = opts['full']
2421 fullpaths = opts['full']
2422 files, dirs = set(), set()
2422 files, dirs = set(), set()
2423 adddir, addfile = dirs.add, files.add
2423 adddir, addfile = dirs.add, files.add
2424 for f, st in pycompat.iteritems(dirstate):
2424 for f, st in pycompat.iteritems(dirstate):
2425 if f.startswith(spec) and st[0] in acceptable:
2425 if f.startswith(spec) and st[0] in acceptable:
2426 if fixpaths:
2426 if fixpaths:
2427 f = f.replace(b'/', pycompat.ossep)
2427 f = f.replace(b'/', pycompat.ossep)
2428 if fullpaths:
2428 if fullpaths:
2429 addfile(f)
2429 addfile(f)
2430 continue
2430 continue
2431 s = f.find(pycompat.ossep, speclen)
2431 s = f.find(pycompat.ossep, speclen)
2432 if s >= 0:
2432 if s >= 0:
2433 adddir(f[:s])
2433 adddir(f[:s])
2434 else:
2434 else:
2435 addfile(f)
2435 addfile(f)
2436 return files, dirs
2436 return files, dirs
2437
2437
2438 acceptable = b''
2438 acceptable = b''
2439 if opts['normal']:
2439 if opts['normal']:
2440 acceptable += b'nm'
2440 acceptable += b'nm'
2441 if opts['added']:
2441 if opts['added']:
2442 acceptable += b'a'
2442 acceptable += b'a'
2443 if opts['removed']:
2443 if opts['removed']:
2444 acceptable += b'r'
2444 acceptable += b'r'
2445 cwd = repo.getcwd()
2445 cwd = repo.getcwd()
2446 if not specs:
2446 if not specs:
2447 specs = [b'.']
2447 specs = [b'.']
2448
2448
2449 files, dirs = set(), set()
2449 files, dirs = set(), set()
2450 for spec in specs:
2450 for spec in specs:
2451 f, d = complete(spec, acceptable or b'nmar')
2451 f, d = complete(spec, acceptable or b'nmar')
2452 files.update(f)
2452 files.update(f)
2453 dirs.update(d)
2453 dirs.update(d)
2454 files.update(dirs)
2454 files.update(dirs)
2455 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2455 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2456 ui.write(b'\n')
2456 ui.write(b'\n')
2457
2457
2458
2458
2459 @command(
2459 @command(
2460 b'debugpathcopies',
2460 b'debugpathcopies',
2461 cmdutil.walkopts,
2461 cmdutil.walkopts,
2462 b'hg debugpathcopies REV1 REV2 [FILE]',
2462 b'hg debugpathcopies REV1 REV2 [FILE]',
2463 inferrepo=True,
2463 inferrepo=True,
2464 )
2464 )
2465 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2465 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2466 """show copies between two revisions"""
2466 """show copies between two revisions"""
2467 ctx1 = scmutil.revsingle(repo, rev1)
2467 ctx1 = scmutil.revsingle(repo, rev1)
2468 ctx2 = scmutil.revsingle(repo, rev2)
2468 ctx2 = scmutil.revsingle(repo, rev2)
2469 m = scmutil.match(ctx1, pats, opts)
2469 m = scmutil.match(ctx1, pats, opts)
2470 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2470 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2471 ui.write(b'%s -> %s\n' % (src, dst))
2471 ui.write(b'%s -> %s\n' % (src, dst))
2472
2472
2473
2473
2474 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2474 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2475 def debugpeer(ui, path):
2475 def debugpeer(ui, path):
2476 """establish a connection to a peer repository"""
2476 """establish a connection to a peer repository"""
2477 # Always enable peer request logging. Requires --debug to display
2477 # Always enable peer request logging. Requires --debug to display
2478 # though.
2478 # though.
2479 overrides = {
2479 overrides = {
2480 (b'devel', b'debug.peer-request'): True,
2480 (b'devel', b'debug.peer-request'): True,
2481 }
2481 }
2482
2482
2483 with ui.configoverride(overrides):
2483 with ui.configoverride(overrides):
2484 peer = hg.peer(ui, {}, path)
2484 peer = hg.peer(ui, {}, path)
2485
2485
2486 local = peer.local() is not None
2486 local = peer.local() is not None
2487 canpush = peer.canpush()
2487 canpush = peer.canpush()
2488
2488
2489 ui.write(_(b'url: %s\n') % peer.url())
2489 ui.write(_(b'url: %s\n') % peer.url())
2490 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2490 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2491 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2491 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2492
2492
2493
2493
2494 @command(
2494 @command(
2495 b'debugpickmergetool',
2495 b'debugpickmergetool',
2496 [
2496 [
2497 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2497 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2498 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2498 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2499 ]
2499 ]
2500 + cmdutil.walkopts
2500 + cmdutil.walkopts
2501 + cmdutil.mergetoolopts,
2501 + cmdutil.mergetoolopts,
2502 _(b'[PATTERN]...'),
2502 _(b'[PATTERN]...'),
2503 inferrepo=True,
2503 inferrepo=True,
2504 )
2504 )
2505 def debugpickmergetool(ui, repo, *pats, **opts):
2505 def debugpickmergetool(ui, repo, *pats, **opts):
2506 """examine which merge tool is chosen for specified file
2506 """examine which merge tool is chosen for specified file
2507
2507
2508 As described in :hg:`help merge-tools`, Mercurial examines
2508 As described in :hg:`help merge-tools`, Mercurial examines
2509 configurations below in this order to decide which merge tool is
2509 configurations below in this order to decide which merge tool is
2510 chosen for specified file.
2510 chosen for specified file.
2511
2511
2512 1. ``--tool`` option
2512 1. ``--tool`` option
2513 2. ``HGMERGE`` environment variable
2513 2. ``HGMERGE`` environment variable
2514 3. configurations in ``merge-patterns`` section
2514 3. configurations in ``merge-patterns`` section
2515 4. configuration of ``ui.merge``
2515 4. configuration of ``ui.merge``
2516 5. configurations in ``merge-tools`` section
2516 5. configurations in ``merge-tools`` section
2517 6. ``hgmerge`` tool (for historical reason only)
2517 6. ``hgmerge`` tool (for historical reason only)
2518 7. default tool for fallback (``:merge`` or ``:prompt``)
2518 7. default tool for fallback (``:merge`` or ``:prompt``)
2519
2519
2520 This command writes out examination result in the style below::
2520 This command writes out examination result in the style below::
2521
2521
2522 FILE = MERGETOOL
2522 FILE = MERGETOOL
2523
2523
2524 By default, all files known in the first parent context of the
2524 By default, all files known in the first parent context of the
2525 working directory are examined. Use file patterns and/or -I/-X
2525 working directory are examined. Use file patterns and/or -I/-X
2526 options to limit target files. -r/--rev is also useful to examine
2526 options to limit target files. -r/--rev is also useful to examine
2527 files in another context without actual updating to it.
2527 files in another context without actual updating to it.
2528
2528
2529 With --debug, this command shows warning messages while matching
2529 With --debug, this command shows warning messages while matching
2530 against ``merge-patterns`` and so on, too. It is recommended to
2530 against ``merge-patterns`` and so on, too. It is recommended to
2531 use this option with explicit file patterns and/or -I/-X options,
2531 use this option with explicit file patterns and/or -I/-X options,
2532 because this option increases amount of output per file according
2532 because this option increases amount of output per file according
2533 to configurations in hgrc.
2533 to configurations in hgrc.
2534
2534
2535 With -v/--verbose, this command shows configurations below at
2535 With -v/--verbose, this command shows configurations below at
2536 first (only if specified).
2536 first (only if specified).
2537
2537
2538 - ``--tool`` option
2538 - ``--tool`` option
2539 - ``HGMERGE`` environment variable
2539 - ``HGMERGE`` environment variable
2540 - configuration of ``ui.merge``
2540 - configuration of ``ui.merge``
2541
2541
2542 If merge tool is chosen before matching against
2542 If merge tool is chosen before matching against
2543 ``merge-patterns``, this command can't show any helpful
2543 ``merge-patterns``, this command can't show any helpful
2544 information, even with --debug. In such case, information above is
2544 information, even with --debug. In such case, information above is
2545 useful to know why a merge tool is chosen.
2545 useful to know why a merge tool is chosen.
2546 """
2546 """
2547 opts = pycompat.byteskwargs(opts)
2547 opts = pycompat.byteskwargs(opts)
2548 overrides = {}
2548 overrides = {}
2549 if opts[b'tool']:
2549 if opts[b'tool']:
2550 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2550 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2551 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2551 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2552
2552
2553 with ui.configoverride(overrides, b'debugmergepatterns'):
2553 with ui.configoverride(overrides, b'debugmergepatterns'):
2554 hgmerge = encoding.environ.get(b"HGMERGE")
2554 hgmerge = encoding.environ.get(b"HGMERGE")
2555 if hgmerge is not None:
2555 if hgmerge is not None:
2556 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2556 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2557 uimerge = ui.config(b"ui", b"merge")
2557 uimerge = ui.config(b"ui", b"merge")
2558 if uimerge:
2558 if uimerge:
2559 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2559 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2560
2560
2561 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2561 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2562 m = scmutil.match(ctx, pats, opts)
2562 m = scmutil.match(ctx, pats, opts)
2563 changedelete = opts[b'changedelete']
2563 changedelete = opts[b'changedelete']
2564 for path in ctx.walk(m):
2564 for path in ctx.walk(m):
2565 fctx = ctx[path]
2565 fctx = ctx[path]
2566 try:
2566 try:
2567 if not ui.debugflag:
2567 if not ui.debugflag:
2568 ui.pushbuffer(error=True)
2568 ui.pushbuffer(error=True)
2569 tool, toolpath = filemerge._picktool(
2569 tool, toolpath = filemerge._picktool(
2570 repo,
2570 repo,
2571 ui,
2571 ui,
2572 path,
2572 path,
2573 fctx.isbinary(),
2573 fctx.isbinary(),
2574 b'l' in fctx.flags(),
2574 b'l' in fctx.flags(),
2575 changedelete,
2575 changedelete,
2576 )
2576 )
2577 finally:
2577 finally:
2578 if not ui.debugflag:
2578 if not ui.debugflag:
2579 ui.popbuffer()
2579 ui.popbuffer()
2580 ui.write(b'%s = %s\n' % (path, tool))
2580 ui.write(b'%s = %s\n' % (path, tool))
2581
2581
2582
2582
2583 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2583 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2584 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2584 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2585 '''access the pushkey key/value protocol
2585 '''access the pushkey key/value protocol
2586
2586
2587 With two args, list the keys in the given namespace.
2587 With two args, list the keys in the given namespace.
2588
2588
2589 With five args, set a key to new if it currently is set to old.
2589 With five args, set a key to new if it currently is set to old.
2590 Reports success or failure.
2590 Reports success or failure.
2591 '''
2591 '''
2592
2592
2593 target = hg.peer(ui, {}, repopath)
2593 target = hg.peer(ui, {}, repopath)
2594 if keyinfo:
2594 if keyinfo:
2595 key, old, new = keyinfo
2595 key, old, new = keyinfo
2596 with target.commandexecutor() as e:
2596 with target.commandexecutor() as e:
2597 r = e.callcommand(
2597 r = e.callcommand(
2598 b'pushkey',
2598 b'pushkey',
2599 {
2599 {
2600 b'namespace': namespace,
2600 b'namespace': namespace,
2601 b'key': key,
2601 b'key': key,
2602 b'old': old,
2602 b'old': old,
2603 b'new': new,
2603 b'new': new,
2604 },
2604 },
2605 ).result()
2605 ).result()
2606
2606
2607 ui.status(pycompat.bytestr(r) + b'\n')
2607 ui.status(pycompat.bytestr(r) + b'\n')
2608 return not r
2608 return not r
2609 else:
2609 else:
2610 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2610 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2611 ui.write(
2611 ui.write(
2612 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2612 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2613 )
2613 )
2614
2614
2615
2615
2616 @command(b'debugpvec', [], _(b'A B'))
2616 @command(b'debugpvec', [], _(b'A B'))
2617 def debugpvec(ui, repo, a, b=None):
2617 def debugpvec(ui, repo, a, b=None):
2618 ca = scmutil.revsingle(repo, a)
2618 ca = scmutil.revsingle(repo, a)
2619 cb = scmutil.revsingle(repo, b)
2619 cb = scmutil.revsingle(repo, b)
2620 pa = pvec.ctxpvec(ca)
2620 pa = pvec.ctxpvec(ca)
2621 pb = pvec.ctxpvec(cb)
2621 pb = pvec.ctxpvec(cb)
2622 if pa == pb:
2622 if pa == pb:
2623 rel = b"="
2623 rel = b"="
2624 elif pa > pb:
2624 elif pa > pb:
2625 rel = b">"
2625 rel = b">"
2626 elif pa < pb:
2626 elif pa < pb:
2627 rel = b"<"
2627 rel = b"<"
2628 elif pa | pb:
2628 elif pa | pb:
2629 rel = b"|"
2629 rel = b"|"
2630 ui.write(_(b"a: %s\n") % pa)
2630 ui.write(_(b"a: %s\n") % pa)
2631 ui.write(_(b"b: %s\n") % pb)
2631 ui.write(_(b"b: %s\n") % pb)
2632 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2632 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2633 ui.write(
2633 ui.write(
2634 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2634 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2635 % (
2635 % (
2636 abs(pa._depth - pb._depth),
2636 abs(pa._depth - pb._depth),
2637 pvec._hamming(pa._vec, pb._vec),
2637 pvec._hamming(pa._vec, pb._vec),
2638 pa.distance(pb),
2638 pa.distance(pb),
2639 rel,
2639 rel,
2640 )
2640 )
2641 )
2641 )
2642
2642
2643
2643
2644 @command(
2644 @command(
2645 b'debugrebuilddirstate|debugrebuildstate',
2645 b'debugrebuilddirstate|debugrebuildstate',
2646 [
2646 [
2647 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2647 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2648 (
2648 (
2649 b'',
2649 b'',
2650 b'minimal',
2650 b'minimal',
2651 None,
2651 None,
2652 _(
2652 _(
2653 b'only rebuild files that are inconsistent with '
2653 b'only rebuild files that are inconsistent with '
2654 b'the working copy parent'
2654 b'the working copy parent'
2655 ),
2655 ),
2656 ),
2656 ),
2657 ],
2657 ],
2658 _(b'[-r REV]'),
2658 _(b'[-r REV]'),
2659 )
2659 )
2660 def debugrebuilddirstate(ui, repo, rev, **opts):
2660 def debugrebuilddirstate(ui, repo, rev, **opts):
2661 """rebuild the dirstate as it would look like for the given revision
2661 """rebuild the dirstate as it would look like for the given revision
2662
2662
2663 If no revision is specified the first current parent will be used.
2663 If no revision is specified the first current parent will be used.
2664
2664
2665 The dirstate will be set to the files of the given revision.
2665 The dirstate will be set to the files of the given revision.
2666 The actual working directory content or existing dirstate
2666 The actual working directory content or existing dirstate
2667 information such as adds or removes is not considered.
2667 information such as adds or removes is not considered.
2668
2668
2669 ``minimal`` will only rebuild the dirstate status for files that claim to be
2669 ``minimal`` will only rebuild the dirstate status for files that claim to be
2670 tracked but are not in the parent manifest, or that exist in the parent
2670 tracked but are not in the parent manifest, or that exist in the parent
2671 manifest but are not in the dirstate. It will not change adds, removes, or
2671 manifest but are not in the dirstate. It will not change adds, removes, or
2672 modified files that are in the working copy parent.
2672 modified files that are in the working copy parent.
2673
2673
2674 One use of this command is to make the next :hg:`status` invocation
2674 One use of this command is to make the next :hg:`status` invocation
2675 check the actual file content.
2675 check the actual file content.
2676 """
2676 """
2677 ctx = scmutil.revsingle(repo, rev)
2677 ctx = scmutil.revsingle(repo, rev)
2678 with repo.wlock():
2678 with repo.wlock():
2679 dirstate = repo.dirstate
2679 dirstate = repo.dirstate
2680 changedfiles = None
2680 changedfiles = None
2681 # See command doc for what minimal does.
2681 # See command doc for what minimal does.
2682 if opts.get('minimal'):
2682 if opts.get('minimal'):
2683 manifestfiles = set(ctx.manifest().keys())
2683 manifestfiles = set(ctx.manifest().keys())
2684 dirstatefiles = set(dirstate)
2684 dirstatefiles = set(dirstate)
2685 manifestonly = manifestfiles - dirstatefiles
2685 manifestonly = manifestfiles - dirstatefiles
2686 dsonly = dirstatefiles - manifestfiles
2686 dsonly = dirstatefiles - manifestfiles
2687 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2687 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2688 changedfiles = manifestonly | dsnotadded
2688 changedfiles = manifestonly | dsnotadded
2689
2689
2690 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2690 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2691
2691
2692
2692
2693 @command(b'debugrebuildfncache', [], b'')
2693 @command(b'debugrebuildfncache', [], b'')
2694 def debugrebuildfncache(ui, repo):
2694 def debugrebuildfncache(ui, repo):
2695 """rebuild the fncache file"""
2695 """rebuild the fncache file"""
2696 repair.rebuildfncache(ui, repo)
2696 repair.rebuildfncache(ui, repo)
2697
2697
2698
2698
2699 @command(
2699 @command(
2700 b'debugrename',
2700 b'debugrename',
2701 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2701 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2702 _(b'[-r REV] [FILE]...'),
2702 _(b'[-r REV] [FILE]...'),
2703 )
2703 )
2704 def debugrename(ui, repo, *pats, **opts):
2704 def debugrename(ui, repo, *pats, **opts):
2705 """dump rename information"""
2705 """dump rename information"""
2706
2706
2707 opts = pycompat.byteskwargs(opts)
2707 opts = pycompat.byteskwargs(opts)
2708 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2708 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2709 m = scmutil.match(ctx, pats, opts)
2709 m = scmutil.match(ctx, pats, opts)
2710 for abs in ctx.walk(m):
2710 for abs in ctx.walk(m):
2711 fctx = ctx[abs]
2711 fctx = ctx[abs]
2712 o = fctx.filelog().renamed(fctx.filenode())
2712 o = fctx.filelog().renamed(fctx.filenode())
2713 rel = repo.pathto(abs)
2713 rel = repo.pathto(abs)
2714 if o:
2714 if o:
2715 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2715 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2716 else:
2716 else:
2717 ui.write(_(b"%s not renamed\n") % rel)
2717 ui.write(_(b"%s not renamed\n") % rel)
2718
2718
2719
2719
2720 @command(b'debugrequires|debugrequirements', [], b'')
2720 @command(b'debugrequires|debugrequirements', [], b'')
2721 def debugrequirements(ui, repo):
2721 def debugrequirements(ui, repo):
2722 """ print the current repo requirements """
2722 """ print the current repo requirements """
2723 for r in sorted(repo.requirements):
2723 for r in sorted(repo.requirements):
2724 ui.write(b"%s\n" % r)
2724 ui.write(b"%s\n" % r)
2725
2725
2726
2726
2727 @command(
2727 @command(
2728 b'debugrevlog',
2728 b'debugrevlog',
2729 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2729 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2730 _(b'-c|-m|FILE'),
2730 _(b'-c|-m|FILE'),
2731 optionalrepo=True,
2731 optionalrepo=True,
2732 )
2732 )
2733 def debugrevlog(ui, repo, file_=None, **opts):
2733 def debugrevlog(ui, repo, file_=None, **opts):
2734 """show data and statistics about a revlog"""
2734 """show data and statistics about a revlog"""
2735 opts = pycompat.byteskwargs(opts)
2735 opts = pycompat.byteskwargs(opts)
2736 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2736 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2737
2737
2738 if opts.get(b"dump"):
2738 if opts.get(b"dump"):
2739 numrevs = len(r)
2739 numrevs = len(r)
2740 ui.write(
2740 ui.write(
2741 (
2741 (
2742 b"# rev p1rev p2rev start end deltastart base p1 p2"
2742 b"# rev p1rev p2rev start end deltastart base p1 p2"
2743 b" rawsize totalsize compression heads chainlen\n"
2743 b" rawsize totalsize compression heads chainlen\n"
2744 )
2744 )
2745 )
2745 )
2746 ts = 0
2746 ts = 0
2747 heads = set()
2747 heads = set()
2748
2748
2749 for rev in pycompat.xrange(numrevs):
2749 for rev in pycompat.xrange(numrevs):
2750 dbase = r.deltaparent(rev)
2750 dbase = r.deltaparent(rev)
2751 if dbase == -1:
2751 if dbase == -1:
2752 dbase = rev
2752 dbase = rev
2753 cbase = r.chainbase(rev)
2753 cbase = r.chainbase(rev)
2754 clen = r.chainlen(rev)
2754 clen = r.chainlen(rev)
2755 p1, p2 = r.parentrevs(rev)
2755 p1, p2 = r.parentrevs(rev)
2756 rs = r.rawsize(rev)
2756 rs = r.rawsize(rev)
2757 ts = ts + rs
2757 ts = ts + rs
2758 heads -= set(r.parentrevs(rev))
2758 heads -= set(r.parentrevs(rev))
2759 heads.add(rev)
2759 heads.add(rev)
2760 try:
2760 try:
2761 compression = ts / r.end(rev)
2761 compression = ts / r.end(rev)
2762 except ZeroDivisionError:
2762 except ZeroDivisionError:
2763 compression = 0
2763 compression = 0
2764 ui.write(
2764 ui.write(
2765 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2765 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2766 b"%11d %5d %8d\n"
2766 b"%11d %5d %8d\n"
2767 % (
2767 % (
2768 rev,
2768 rev,
2769 p1,
2769 p1,
2770 p2,
2770 p2,
2771 r.start(rev),
2771 r.start(rev),
2772 r.end(rev),
2772 r.end(rev),
2773 r.start(dbase),
2773 r.start(dbase),
2774 r.start(cbase),
2774 r.start(cbase),
2775 r.start(p1),
2775 r.start(p1),
2776 r.start(p2),
2776 r.start(p2),
2777 rs,
2777 rs,
2778 ts,
2778 ts,
2779 compression,
2779 compression,
2780 len(heads),
2780 len(heads),
2781 clen,
2781 clen,
2782 )
2782 )
2783 )
2783 )
2784 return 0
2784 return 0
2785
2785
2786 v = r.version
2786 v = r.version
2787 format = v & 0xFFFF
2787 format = v & 0xFFFF
2788 flags = []
2788 flags = []
2789 gdelta = False
2789 gdelta = False
2790 if v & revlog.FLAG_INLINE_DATA:
2790 if v & revlog.FLAG_INLINE_DATA:
2791 flags.append(b'inline')
2791 flags.append(b'inline')
2792 if v & revlog.FLAG_GENERALDELTA:
2792 if v & revlog.FLAG_GENERALDELTA:
2793 gdelta = True
2793 gdelta = True
2794 flags.append(b'generaldelta')
2794 flags.append(b'generaldelta')
2795 if not flags:
2795 if not flags:
2796 flags = [b'(none)']
2796 flags = [b'(none)']
2797
2797
2798 ### tracks merge vs single parent
2798 ### tracks merge vs single parent
2799 nummerges = 0
2799 nummerges = 0
2800
2800
2801 ### tracks ways the "delta" are build
2801 ### tracks ways the "delta" are build
2802 # nodelta
2802 # nodelta
2803 numempty = 0
2803 numempty = 0
2804 numemptytext = 0
2804 numemptytext = 0
2805 numemptydelta = 0
2805 numemptydelta = 0
2806 # full file content
2806 # full file content
2807 numfull = 0
2807 numfull = 0
2808 # intermediate snapshot against a prior snapshot
2808 # intermediate snapshot against a prior snapshot
2809 numsemi = 0
2809 numsemi = 0
2810 # snapshot count per depth
2810 # snapshot count per depth
2811 numsnapdepth = collections.defaultdict(lambda: 0)
2811 numsnapdepth = collections.defaultdict(lambda: 0)
2812 # delta against previous revision
2812 # delta against previous revision
2813 numprev = 0
2813 numprev = 0
2814 # delta against first or second parent (not prev)
2814 # delta against first or second parent (not prev)
2815 nump1 = 0
2815 nump1 = 0
2816 nump2 = 0
2816 nump2 = 0
2817 # delta against neither prev nor parents
2817 # delta against neither prev nor parents
2818 numother = 0
2818 numother = 0
2819 # delta against prev that are also first or second parent
2819 # delta against prev that are also first or second parent
2820 # (details of `numprev`)
2820 # (details of `numprev`)
2821 nump1prev = 0
2821 nump1prev = 0
2822 nump2prev = 0
2822 nump2prev = 0
2823
2823
2824 # data about delta chain of each revs
2824 # data about delta chain of each revs
2825 chainlengths = []
2825 chainlengths = []
2826 chainbases = []
2826 chainbases = []
2827 chainspans = []
2827 chainspans = []
2828
2828
2829 # data about each revision
2829 # data about each revision
2830 datasize = [None, 0, 0]
2830 datasize = [None, 0, 0]
2831 fullsize = [None, 0, 0]
2831 fullsize = [None, 0, 0]
2832 semisize = [None, 0, 0]
2832 semisize = [None, 0, 0]
2833 # snapshot count per depth
2833 # snapshot count per depth
2834 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2834 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2835 deltasize = [None, 0, 0]
2835 deltasize = [None, 0, 0]
2836 chunktypecounts = {}
2836 chunktypecounts = {}
2837 chunktypesizes = {}
2837 chunktypesizes = {}
2838
2838
2839 def addsize(size, l):
2839 def addsize(size, l):
2840 if l[0] is None or size < l[0]:
2840 if l[0] is None or size < l[0]:
2841 l[0] = size
2841 l[0] = size
2842 if size > l[1]:
2842 if size > l[1]:
2843 l[1] = size
2843 l[1] = size
2844 l[2] += size
2844 l[2] += size
2845
2845
2846 numrevs = len(r)
2846 numrevs = len(r)
2847 for rev in pycompat.xrange(numrevs):
2847 for rev in pycompat.xrange(numrevs):
2848 p1, p2 = r.parentrevs(rev)
2848 p1, p2 = r.parentrevs(rev)
2849 delta = r.deltaparent(rev)
2849 delta = r.deltaparent(rev)
2850 if format > 0:
2850 if format > 0:
2851 addsize(r.rawsize(rev), datasize)
2851 addsize(r.rawsize(rev), datasize)
2852 if p2 != nullrev:
2852 if p2 != nullrev:
2853 nummerges += 1
2853 nummerges += 1
2854 size = r.length(rev)
2854 size = r.length(rev)
2855 if delta == nullrev:
2855 if delta == nullrev:
2856 chainlengths.append(0)
2856 chainlengths.append(0)
2857 chainbases.append(r.start(rev))
2857 chainbases.append(r.start(rev))
2858 chainspans.append(size)
2858 chainspans.append(size)
2859 if size == 0:
2859 if size == 0:
2860 numempty += 1
2860 numempty += 1
2861 numemptytext += 1
2861 numemptytext += 1
2862 else:
2862 else:
2863 numfull += 1
2863 numfull += 1
2864 numsnapdepth[0] += 1
2864 numsnapdepth[0] += 1
2865 addsize(size, fullsize)
2865 addsize(size, fullsize)
2866 addsize(size, snapsizedepth[0])
2866 addsize(size, snapsizedepth[0])
2867 else:
2867 else:
2868 chainlengths.append(chainlengths[delta] + 1)
2868 chainlengths.append(chainlengths[delta] + 1)
2869 baseaddr = chainbases[delta]
2869 baseaddr = chainbases[delta]
2870 revaddr = r.start(rev)
2870 revaddr = r.start(rev)
2871 chainbases.append(baseaddr)
2871 chainbases.append(baseaddr)
2872 chainspans.append((revaddr - baseaddr) + size)
2872 chainspans.append((revaddr - baseaddr) + size)
2873 if size == 0:
2873 if size == 0:
2874 numempty += 1
2874 numempty += 1
2875 numemptydelta += 1
2875 numemptydelta += 1
2876 elif r.issnapshot(rev):
2876 elif r.issnapshot(rev):
2877 addsize(size, semisize)
2877 addsize(size, semisize)
2878 numsemi += 1
2878 numsemi += 1
2879 depth = r.snapshotdepth(rev)
2879 depth = r.snapshotdepth(rev)
2880 numsnapdepth[depth] += 1
2880 numsnapdepth[depth] += 1
2881 addsize(size, snapsizedepth[depth])
2881 addsize(size, snapsizedepth[depth])
2882 else:
2882 else:
2883 addsize(size, deltasize)
2883 addsize(size, deltasize)
2884 if delta == rev - 1:
2884 if delta == rev - 1:
2885 numprev += 1
2885 numprev += 1
2886 if delta == p1:
2886 if delta == p1:
2887 nump1prev += 1
2887 nump1prev += 1
2888 elif delta == p2:
2888 elif delta == p2:
2889 nump2prev += 1
2889 nump2prev += 1
2890 elif delta == p1:
2890 elif delta == p1:
2891 nump1 += 1
2891 nump1 += 1
2892 elif delta == p2:
2892 elif delta == p2:
2893 nump2 += 1
2893 nump2 += 1
2894 elif delta != nullrev:
2894 elif delta != nullrev:
2895 numother += 1
2895 numother += 1
2896
2896
2897 # Obtain data on the raw chunks in the revlog.
2897 # Obtain data on the raw chunks in the revlog.
2898 if util.safehasattr(r, b'_getsegmentforrevs'):
2898 if util.safehasattr(r, b'_getsegmentforrevs'):
2899 segment = r._getsegmentforrevs(rev, rev)[1]
2899 segment = r._getsegmentforrevs(rev, rev)[1]
2900 else:
2900 else:
2901 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2901 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2902 if segment:
2902 if segment:
2903 chunktype = bytes(segment[0:1])
2903 chunktype = bytes(segment[0:1])
2904 else:
2904 else:
2905 chunktype = b'empty'
2905 chunktype = b'empty'
2906
2906
2907 if chunktype not in chunktypecounts:
2907 if chunktype not in chunktypecounts:
2908 chunktypecounts[chunktype] = 0
2908 chunktypecounts[chunktype] = 0
2909 chunktypesizes[chunktype] = 0
2909 chunktypesizes[chunktype] = 0
2910
2910
2911 chunktypecounts[chunktype] += 1
2911 chunktypecounts[chunktype] += 1
2912 chunktypesizes[chunktype] += size
2912 chunktypesizes[chunktype] += size
2913
2913
2914 # Adjust size min value for empty cases
2914 # Adjust size min value for empty cases
2915 for size in (datasize, fullsize, semisize, deltasize):
2915 for size in (datasize, fullsize, semisize, deltasize):
2916 if size[0] is None:
2916 if size[0] is None:
2917 size[0] = 0
2917 size[0] = 0
2918
2918
2919 numdeltas = numrevs - numfull - numempty - numsemi
2919 numdeltas = numrevs - numfull - numempty - numsemi
2920 numoprev = numprev - nump1prev - nump2prev
2920 numoprev = numprev - nump1prev - nump2prev
2921 totalrawsize = datasize[2]
2921 totalrawsize = datasize[2]
2922 datasize[2] /= numrevs
2922 datasize[2] /= numrevs
2923 fulltotal = fullsize[2]
2923 fulltotal = fullsize[2]
2924 if numfull == 0:
2924 if numfull == 0:
2925 fullsize[2] = 0
2925 fullsize[2] = 0
2926 else:
2926 else:
2927 fullsize[2] /= numfull
2927 fullsize[2] /= numfull
2928 semitotal = semisize[2]
2928 semitotal = semisize[2]
2929 snaptotal = {}
2929 snaptotal = {}
2930 if numsemi > 0:
2930 if numsemi > 0:
2931 semisize[2] /= numsemi
2931 semisize[2] /= numsemi
2932 for depth in snapsizedepth:
2932 for depth in snapsizedepth:
2933 snaptotal[depth] = snapsizedepth[depth][2]
2933 snaptotal[depth] = snapsizedepth[depth][2]
2934 snapsizedepth[depth][2] /= numsnapdepth[depth]
2934 snapsizedepth[depth][2] /= numsnapdepth[depth]
2935
2935
2936 deltatotal = deltasize[2]
2936 deltatotal = deltasize[2]
2937 if numdeltas > 0:
2937 if numdeltas > 0:
2938 deltasize[2] /= numdeltas
2938 deltasize[2] /= numdeltas
2939 totalsize = fulltotal + semitotal + deltatotal
2939 totalsize = fulltotal + semitotal + deltatotal
2940 avgchainlen = sum(chainlengths) / numrevs
2940 avgchainlen = sum(chainlengths) / numrevs
2941 maxchainlen = max(chainlengths)
2941 maxchainlen = max(chainlengths)
2942 maxchainspan = max(chainspans)
2942 maxchainspan = max(chainspans)
2943 compratio = 1
2943 compratio = 1
2944 if totalsize:
2944 if totalsize:
2945 compratio = totalrawsize / totalsize
2945 compratio = totalrawsize / totalsize
2946
2946
2947 basedfmtstr = b'%%%dd\n'
2947 basedfmtstr = b'%%%dd\n'
2948 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2948 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2949
2949
2950 def dfmtstr(max):
2950 def dfmtstr(max):
2951 return basedfmtstr % len(str(max))
2951 return basedfmtstr % len(str(max))
2952
2952
2953 def pcfmtstr(max, padding=0):
2953 def pcfmtstr(max, padding=0):
2954 return basepcfmtstr % (len(str(max)), b' ' * padding)
2954 return basepcfmtstr % (len(str(max)), b' ' * padding)
2955
2955
2956 def pcfmt(value, total):
2956 def pcfmt(value, total):
2957 if total:
2957 if total:
2958 return (value, 100 * float(value) / total)
2958 return (value, 100 * float(value) / total)
2959 else:
2959 else:
2960 return value, 100.0
2960 return value, 100.0
2961
2961
2962 ui.writenoi18n(b'format : %d\n' % format)
2962 ui.writenoi18n(b'format : %d\n' % format)
2963 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2963 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2964
2964
2965 ui.write(b'\n')
2965 ui.write(b'\n')
2966 fmt = pcfmtstr(totalsize)
2966 fmt = pcfmtstr(totalsize)
2967 fmt2 = dfmtstr(totalsize)
2967 fmt2 = dfmtstr(totalsize)
2968 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2968 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2969 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2969 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2970 ui.writenoi18n(
2970 ui.writenoi18n(
2971 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2971 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2972 )
2972 )
2973 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2973 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2974 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2974 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2975 ui.writenoi18n(
2975 ui.writenoi18n(
2976 b' text : '
2976 b' text : '
2977 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2977 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2978 )
2978 )
2979 ui.writenoi18n(
2979 ui.writenoi18n(
2980 b' delta : '
2980 b' delta : '
2981 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2981 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2982 )
2982 )
2983 ui.writenoi18n(
2983 ui.writenoi18n(
2984 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2984 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2985 )
2985 )
2986 for depth in sorted(numsnapdepth):
2986 for depth in sorted(numsnapdepth):
2987 ui.write(
2987 ui.write(
2988 (b' lvl-%-3d : ' % depth)
2988 (b' lvl-%-3d : ' % depth)
2989 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2989 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2990 )
2990 )
2991 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2991 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2992 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2992 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2993 ui.writenoi18n(
2993 ui.writenoi18n(
2994 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2994 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2995 )
2995 )
2996 for depth in sorted(numsnapdepth):
2996 for depth in sorted(numsnapdepth):
2997 ui.write(
2997 ui.write(
2998 (b' lvl-%-3d : ' % depth)
2998 (b' lvl-%-3d : ' % depth)
2999 + fmt % pcfmt(snaptotal[depth], totalsize)
2999 + fmt % pcfmt(snaptotal[depth], totalsize)
3000 )
3000 )
3001 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3001 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3002
3002
3003 def fmtchunktype(chunktype):
3003 def fmtchunktype(chunktype):
3004 if chunktype == b'empty':
3004 if chunktype == b'empty':
3005 return b' %s : ' % chunktype
3005 return b' %s : ' % chunktype
3006 elif chunktype in pycompat.bytestr(string.ascii_letters):
3006 elif chunktype in pycompat.bytestr(string.ascii_letters):
3007 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3007 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3008 else:
3008 else:
3009 return b' 0x%s : ' % hex(chunktype)
3009 return b' 0x%s : ' % hex(chunktype)
3010
3010
3011 ui.write(b'\n')
3011 ui.write(b'\n')
3012 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3012 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3013 for chunktype in sorted(chunktypecounts):
3013 for chunktype in sorted(chunktypecounts):
3014 ui.write(fmtchunktype(chunktype))
3014 ui.write(fmtchunktype(chunktype))
3015 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3015 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3016 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3016 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3017 for chunktype in sorted(chunktypecounts):
3017 for chunktype in sorted(chunktypecounts):
3018 ui.write(fmtchunktype(chunktype))
3018 ui.write(fmtchunktype(chunktype))
3019 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3019 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3020
3020
3021 ui.write(b'\n')
3021 ui.write(b'\n')
3022 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3022 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3023 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3023 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3024 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3024 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3025 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3025 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3026 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3026 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3027
3027
3028 if format > 0:
3028 if format > 0:
3029 ui.write(b'\n')
3029 ui.write(b'\n')
3030 ui.writenoi18n(
3030 ui.writenoi18n(
3031 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3031 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3032 % tuple(datasize)
3032 % tuple(datasize)
3033 )
3033 )
3034 ui.writenoi18n(
3034 ui.writenoi18n(
3035 b'full revision size (min/max/avg) : %d / %d / %d\n'
3035 b'full revision size (min/max/avg) : %d / %d / %d\n'
3036 % tuple(fullsize)
3036 % tuple(fullsize)
3037 )
3037 )
3038 ui.writenoi18n(
3038 ui.writenoi18n(
3039 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3039 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3040 % tuple(semisize)
3040 % tuple(semisize)
3041 )
3041 )
3042 for depth in sorted(snapsizedepth):
3042 for depth in sorted(snapsizedepth):
3043 if depth == 0:
3043 if depth == 0:
3044 continue
3044 continue
3045 ui.writenoi18n(
3045 ui.writenoi18n(
3046 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3046 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3047 % ((depth,) + tuple(snapsizedepth[depth]))
3047 % ((depth,) + tuple(snapsizedepth[depth]))
3048 )
3048 )
3049 ui.writenoi18n(
3049 ui.writenoi18n(
3050 b'delta size (min/max/avg) : %d / %d / %d\n'
3050 b'delta size (min/max/avg) : %d / %d / %d\n'
3051 % tuple(deltasize)
3051 % tuple(deltasize)
3052 )
3052 )
3053
3053
3054 if numdeltas > 0:
3054 if numdeltas > 0:
3055 ui.write(b'\n')
3055 ui.write(b'\n')
3056 fmt = pcfmtstr(numdeltas)
3056 fmt = pcfmtstr(numdeltas)
3057 fmt2 = pcfmtstr(numdeltas, 4)
3057 fmt2 = pcfmtstr(numdeltas, 4)
3058 ui.writenoi18n(
3058 ui.writenoi18n(
3059 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3059 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3060 )
3060 )
3061 if numprev > 0:
3061 if numprev > 0:
3062 ui.writenoi18n(
3062 ui.writenoi18n(
3063 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3063 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3064 )
3064 )
3065 ui.writenoi18n(
3065 ui.writenoi18n(
3066 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3066 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3067 )
3067 )
3068 ui.writenoi18n(
3068 ui.writenoi18n(
3069 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3069 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3070 )
3070 )
3071 if gdelta:
3071 if gdelta:
3072 ui.writenoi18n(
3072 ui.writenoi18n(
3073 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3073 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3074 )
3074 )
3075 ui.writenoi18n(
3075 ui.writenoi18n(
3076 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3076 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3077 )
3077 )
3078 ui.writenoi18n(
3078 ui.writenoi18n(
3079 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3079 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3080 )
3080 )
3081
3081
3082
3082
3083 @command(
3083 @command(
3084 b'debugrevlogindex',
3084 b'debugrevlogindex',
3085 cmdutil.debugrevlogopts
3085 cmdutil.debugrevlogopts
3086 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3086 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3087 _(b'[-f FORMAT] -c|-m|FILE'),
3087 _(b'[-f FORMAT] -c|-m|FILE'),
3088 optionalrepo=True,
3088 optionalrepo=True,
3089 )
3089 )
3090 def debugrevlogindex(ui, repo, file_=None, **opts):
3090 def debugrevlogindex(ui, repo, file_=None, **opts):
3091 """dump the contents of a revlog index"""
3091 """dump the contents of a revlog index"""
3092 opts = pycompat.byteskwargs(opts)
3092 opts = pycompat.byteskwargs(opts)
3093 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3093 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3094 format = opts.get(b'format', 0)
3094 format = opts.get(b'format', 0)
3095 if format not in (0, 1):
3095 if format not in (0, 1):
3096 raise error.Abort(_(b"unknown format %d") % format)
3096 raise error.Abort(_(b"unknown format %d") % format)
3097
3097
3098 if ui.debugflag:
3098 if ui.debugflag:
3099 shortfn = hex
3099 shortfn = hex
3100 else:
3100 else:
3101 shortfn = short
3101 shortfn = short
3102
3102
3103 # There might not be anything in r, so have a sane default
3103 # There might not be anything in r, so have a sane default
3104 idlen = 12
3104 idlen = 12
3105 for i in r:
3105 for i in r:
3106 idlen = len(shortfn(r.node(i)))
3106 idlen = len(shortfn(r.node(i)))
3107 break
3107 break
3108
3108
3109 if format == 0:
3109 if format == 0:
3110 if ui.verbose:
3110 if ui.verbose:
3111 ui.writenoi18n(
3111 ui.writenoi18n(
3112 b" rev offset length linkrev %s %s p2\n"
3112 b" rev offset length linkrev %s %s p2\n"
3113 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3113 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3114 )
3114 )
3115 else:
3115 else:
3116 ui.writenoi18n(
3116 ui.writenoi18n(
3117 b" rev linkrev %s %s p2\n"
3117 b" rev linkrev %s %s p2\n"
3118 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3118 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3119 )
3119 )
3120 elif format == 1:
3120 elif format == 1:
3121 if ui.verbose:
3121 if ui.verbose:
3122 ui.writenoi18n(
3122 ui.writenoi18n(
3123 (
3123 (
3124 b" rev flag offset length size link p1"
3124 b" rev flag offset length size link p1"
3125 b" p2 %s\n"
3125 b" p2 %s\n"
3126 )
3126 )
3127 % b"nodeid".rjust(idlen)
3127 % b"nodeid".rjust(idlen)
3128 )
3128 )
3129 else:
3129 else:
3130 ui.writenoi18n(
3130 ui.writenoi18n(
3131 b" rev flag size link p1 p2 %s\n"
3131 b" rev flag size link p1 p2 %s\n"
3132 % b"nodeid".rjust(idlen)
3132 % b"nodeid".rjust(idlen)
3133 )
3133 )
3134
3134
3135 for i in r:
3135 for i in r:
3136 node = r.node(i)
3136 node = r.node(i)
3137 if format == 0:
3137 if format == 0:
3138 try:
3138 try:
3139 pp = r.parents(node)
3139 pp = r.parents(node)
3140 except Exception:
3140 except Exception:
3141 pp = [nullid, nullid]
3141 pp = [nullid, nullid]
3142 if ui.verbose:
3142 if ui.verbose:
3143 ui.write(
3143 ui.write(
3144 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3144 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3145 % (
3145 % (
3146 i,
3146 i,
3147 r.start(i),
3147 r.start(i),
3148 r.length(i),
3148 r.length(i),
3149 r.linkrev(i),
3149 r.linkrev(i),
3150 shortfn(node),
3150 shortfn(node),
3151 shortfn(pp[0]),
3151 shortfn(pp[0]),
3152 shortfn(pp[1]),
3152 shortfn(pp[1]),
3153 )
3153 )
3154 )
3154 )
3155 else:
3155 else:
3156 ui.write(
3156 ui.write(
3157 b"% 6d % 7d %s %s %s\n"
3157 b"% 6d % 7d %s %s %s\n"
3158 % (
3158 % (
3159 i,
3159 i,
3160 r.linkrev(i),
3160 r.linkrev(i),
3161 shortfn(node),
3161 shortfn(node),
3162 shortfn(pp[0]),
3162 shortfn(pp[0]),
3163 shortfn(pp[1]),
3163 shortfn(pp[1]),
3164 )
3164 )
3165 )
3165 )
3166 elif format == 1:
3166 elif format == 1:
3167 pr = r.parentrevs(i)
3167 pr = r.parentrevs(i)
3168 if ui.verbose:
3168 if ui.verbose:
3169 ui.write(
3169 ui.write(
3170 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3170 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3171 % (
3171 % (
3172 i,
3172 i,
3173 r.flags(i),
3173 r.flags(i),
3174 r.start(i),
3174 r.start(i),
3175 r.length(i),
3175 r.length(i),
3176 r.rawsize(i),
3176 r.rawsize(i),
3177 r.linkrev(i),
3177 r.linkrev(i),
3178 pr[0],
3178 pr[0],
3179 pr[1],
3179 pr[1],
3180 shortfn(node),
3180 shortfn(node),
3181 )
3181 )
3182 )
3182 )
3183 else:
3183 else:
3184 ui.write(
3184 ui.write(
3185 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3185 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3186 % (
3186 % (
3187 i,
3187 i,
3188 r.flags(i),
3188 r.flags(i),
3189 r.rawsize(i),
3189 r.rawsize(i),
3190 r.linkrev(i),
3190 r.linkrev(i),
3191 pr[0],
3191 pr[0],
3192 pr[1],
3192 pr[1],
3193 shortfn(node),
3193 shortfn(node),
3194 )
3194 )
3195 )
3195 )
3196
3196
3197
3197
3198 @command(
3198 @command(
3199 b'debugrevspec',
3199 b'debugrevspec',
3200 [
3200 [
3201 (
3201 (
3202 b'',
3202 b'',
3203 b'optimize',
3203 b'optimize',
3204 None,
3204 None,
3205 _(b'print parsed tree after optimizing (DEPRECATED)'),
3205 _(b'print parsed tree after optimizing (DEPRECATED)'),
3206 ),
3206 ),
3207 (
3207 (
3208 b'',
3208 b'',
3209 b'show-revs',
3209 b'show-revs',
3210 True,
3210 True,
3211 _(b'print list of result revisions (default)'),
3211 _(b'print list of result revisions (default)'),
3212 ),
3212 ),
3213 (
3213 (
3214 b's',
3214 b's',
3215 b'show-set',
3215 b'show-set',
3216 None,
3216 None,
3217 _(b'print internal representation of result set'),
3217 _(b'print internal representation of result set'),
3218 ),
3218 ),
3219 (
3219 (
3220 b'p',
3220 b'p',
3221 b'show-stage',
3221 b'show-stage',
3222 [],
3222 [],
3223 _(b'print parsed tree at the given stage'),
3223 _(b'print parsed tree at the given stage'),
3224 _(b'NAME'),
3224 _(b'NAME'),
3225 ),
3225 ),
3226 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3226 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3227 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3227 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3228 ],
3228 ],
3229 b'REVSPEC',
3229 b'REVSPEC',
3230 )
3230 )
3231 def debugrevspec(ui, repo, expr, **opts):
3231 def debugrevspec(ui, repo, expr, **opts):
3232 """parse and apply a revision specification
3232 """parse and apply a revision specification
3233
3233
3234 Use -p/--show-stage option to print the parsed tree at the given stages.
3234 Use -p/--show-stage option to print the parsed tree at the given stages.
3235 Use -p all to print tree at every stage.
3235 Use -p all to print tree at every stage.
3236
3236
3237 Use --no-show-revs option with -s or -p to print only the set
3237 Use --no-show-revs option with -s or -p to print only the set
3238 representation or the parsed tree respectively.
3238 representation or the parsed tree respectively.
3239
3239
3240 Use --verify-optimized to compare the optimized result with the unoptimized
3240 Use --verify-optimized to compare the optimized result with the unoptimized
3241 one. Returns 1 if the optimized result differs.
3241 one. Returns 1 if the optimized result differs.
3242 """
3242 """
3243 opts = pycompat.byteskwargs(opts)
3243 opts = pycompat.byteskwargs(opts)
3244 aliases = ui.configitems(b'revsetalias')
3244 aliases = ui.configitems(b'revsetalias')
3245 stages = [
3245 stages = [
3246 (b'parsed', lambda tree: tree),
3246 (b'parsed', lambda tree: tree),
3247 (
3247 (
3248 b'expanded',
3248 b'expanded',
3249 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3249 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3250 ),
3250 ),
3251 (b'concatenated', revsetlang.foldconcat),
3251 (b'concatenated', revsetlang.foldconcat),
3252 (b'analyzed', revsetlang.analyze),
3252 (b'analyzed', revsetlang.analyze),
3253 (b'optimized', revsetlang.optimize),
3253 (b'optimized', revsetlang.optimize),
3254 ]
3254 ]
3255 if opts[b'no_optimized']:
3255 if opts[b'no_optimized']:
3256 stages = stages[:-1]
3256 stages = stages[:-1]
3257 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3257 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3258 raise error.Abort(
3258 raise error.Abort(
3259 _(b'cannot use --verify-optimized with --no-optimized')
3259 _(b'cannot use --verify-optimized with --no-optimized')
3260 )
3260 )
3261 stagenames = {n for n, f in stages}
3261 stagenames = {n for n, f in stages}
3262
3262
3263 showalways = set()
3263 showalways = set()
3264 showchanged = set()
3264 showchanged = set()
3265 if ui.verbose and not opts[b'show_stage']:
3265 if ui.verbose and not opts[b'show_stage']:
3266 # show parsed tree by --verbose (deprecated)
3266 # show parsed tree by --verbose (deprecated)
3267 showalways.add(b'parsed')
3267 showalways.add(b'parsed')
3268 showchanged.update([b'expanded', b'concatenated'])
3268 showchanged.update([b'expanded', b'concatenated'])
3269 if opts[b'optimize']:
3269 if opts[b'optimize']:
3270 showalways.add(b'optimized')
3270 showalways.add(b'optimized')
3271 if opts[b'show_stage'] and opts[b'optimize']:
3271 if opts[b'show_stage'] and opts[b'optimize']:
3272 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3272 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3273 if opts[b'show_stage'] == [b'all']:
3273 if opts[b'show_stage'] == [b'all']:
3274 showalways.update(stagenames)
3274 showalways.update(stagenames)
3275 else:
3275 else:
3276 for n in opts[b'show_stage']:
3276 for n in opts[b'show_stage']:
3277 if n not in stagenames:
3277 if n not in stagenames:
3278 raise error.Abort(_(b'invalid stage name: %s') % n)
3278 raise error.Abort(_(b'invalid stage name: %s') % n)
3279 showalways.update(opts[b'show_stage'])
3279 showalways.update(opts[b'show_stage'])
3280
3280
3281 treebystage = {}
3281 treebystage = {}
3282 printedtree = None
3282 printedtree = None
3283 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3283 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3284 for n, f in stages:
3284 for n, f in stages:
3285 treebystage[n] = tree = f(tree)
3285 treebystage[n] = tree = f(tree)
3286 if n in showalways or (n in showchanged and tree != printedtree):
3286 if n in showalways or (n in showchanged and tree != printedtree):
3287 if opts[b'show_stage'] or n != b'parsed':
3287 if opts[b'show_stage'] or n != b'parsed':
3288 ui.write(b"* %s:\n" % n)
3288 ui.write(b"* %s:\n" % n)
3289 ui.write(revsetlang.prettyformat(tree), b"\n")
3289 ui.write(revsetlang.prettyformat(tree), b"\n")
3290 printedtree = tree
3290 printedtree = tree
3291
3291
3292 if opts[b'verify_optimized']:
3292 if opts[b'verify_optimized']:
3293 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3293 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3294 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3294 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3295 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3295 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3296 ui.writenoi18n(
3296 ui.writenoi18n(
3297 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3297 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3298 )
3298 )
3299 ui.writenoi18n(
3299 ui.writenoi18n(
3300 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3300 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3301 )
3301 )
3302 arevs = list(arevs)
3302 arevs = list(arevs)
3303 brevs = list(brevs)
3303 brevs = list(brevs)
3304 if arevs == brevs:
3304 if arevs == brevs:
3305 return 0
3305 return 0
3306 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3306 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3307 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3307 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3308 sm = difflib.SequenceMatcher(None, arevs, brevs)
3308 sm = difflib.SequenceMatcher(None, arevs, brevs)
3309 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3309 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3310 if tag in ('delete', 'replace'):
3310 if tag in ('delete', 'replace'):
3311 for c in arevs[alo:ahi]:
3311 for c in arevs[alo:ahi]:
3312 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3312 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3313 if tag in ('insert', 'replace'):
3313 if tag in ('insert', 'replace'):
3314 for c in brevs[blo:bhi]:
3314 for c in brevs[blo:bhi]:
3315 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3315 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3316 if tag == 'equal':
3316 if tag == 'equal':
3317 for c in arevs[alo:ahi]:
3317 for c in arevs[alo:ahi]:
3318 ui.write(b' %d\n' % c)
3318 ui.write(b' %d\n' % c)
3319 return 1
3319 return 1
3320
3320
3321 func = revset.makematcher(tree)
3321 func = revset.makematcher(tree)
3322 revs = func(repo)
3322 revs = func(repo)
3323 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3323 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3324 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3324 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3325 if not opts[b'show_revs']:
3325 if not opts[b'show_revs']:
3326 return
3326 return
3327 for c in revs:
3327 for c in revs:
3328 ui.write(b"%d\n" % c)
3328 ui.write(b"%d\n" % c)
3329
3329
3330
3330
3331 @command(
3331 @command(
3332 b'debugserve',
3332 b'debugserve',
3333 [
3333 [
3334 (
3334 (
3335 b'',
3335 b'',
3336 b'sshstdio',
3336 b'sshstdio',
3337 False,
3337 False,
3338 _(b'run an SSH server bound to process handles'),
3338 _(b'run an SSH server bound to process handles'),
3339 ),
3339 ),
3340 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3340 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3341 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3341 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3342 ],
3342 ],
3343 b'',
3343 b'',
3344 )
3344 )
3345 def debugserve(ui, repo, **opts):
3345 def debugserve(ui, repo, **opts):
3346 """run a server with advanced settings
3346 """run a server with advanced settings
3347
3347
3348 This command is similar to :hg:`serve`. It exists partially as a
3348 This command is similar to :hg:`serve`. It exists partially as a
3349 workaround to the fact that ``hg serve --stdio`` must have specific
3349 workaround to the fact that ``hg serve --stdio`` must have specific
3350 arguments for security reasons.
3350 arguments for security reasons.
3351 """
3351 """
3352 opts = pycompat.byteskwargs(opts)
3352 opts = pycompat.byteskwargs(opts)
3353
3353
3354 if not opts[b'sshstdio']:
3354 if not opts[b'sshstdio']:
3355 raise error.Abort(_(b'only --sshstdio is currently supported'))
3355 raise error.Abort(_(b'only --sshstdio is currently supported'))
3356
3356
3357 logfh = None
3357 logfh = None
3358
3358
3359 if opts[b'logiofd'] and opts[b'logiofile']:
3359 if opts[b'logiofd'] and opts[b'logiofile']:
3360 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3360 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3361
3361
3362 if opts[b'logiofd']:
3362 if opts[b'logiofd']:
3363 # Ideally we would be line buffered. But line buffering in binary
3363 # Ideally we would be line buffered. But line buffering in binary
3364 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3364 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3365 # buffering could have performance impacts. But since this isn't
3365 # buffering could have performance impacts. But since this isn't
3366 # performance critical code, it should be fine.
3366 # performance critical code, it should be fine.
3367 try:
3367 try:
3368 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3368 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3369 except OSError as e:
3369 except OSError as e:
3370 if e.errno != errno.ESPIPE:
3370 if e.errno != errno.ESPIPE:
3371 raise
3371 raise
3372 # can't seek a pipe, so `ab` mode fails on py3
3372 # can't seek a pipe, so `ab` mode fails on py3
3373 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3373 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3374 elif opts[b'logiofile']:
3374 elif opts[b'logiofile']:
3375 logfh = open(opts[b'logiofile'], b'ab', 0)
3375 logfh = open(opts[b'logiofile'], b'ab', 0)
3376
3376
3377 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3377 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3378 s.serve_forever()
3378 s.serve_forever()
3379
3379
3380
3380
3381 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3381 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3382 def debugsetparents(ui, repo, rev1, rev2=None):
3382 def debugsetparents(ui, repo, rev1, rev2=None):
3383 """manually set the parents of the current working directory
3383 """manually set the parents of the current working directory
3384
3384
3385 This is useful for writing repository conversion tools, but should
3385 This is useful for writing repository conversion tools, but should
3386 be used with care. For example, neither the working directory nor the
3386 be used with care. For example, neither the working directory nor the
3387 dirstate is updated, so file status may be incorrect after running this
3387 dirstate is updated, so file status may be incorrect after running this
3388 command.
3388 command.
3389
3389
3390 Returns 0 on success.
3390 Returns 0 on success.
3391 """
3391 """
3392
3392
3393 node1 = scmutil.revsingle(repo, rev1).node()
3393 node1 = scmutil.revsingle(repo, rev1).node()
3394 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3394 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3395
3395
3396 with repo.wlock():
3396 with repo.wlock():
3397 repo.setparents(node1, node2)
3397 repo.setparents(node1, node2)
3398
3398
3399
3399
3400 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3400 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3401 def debugsidedata(ui, repo, file_, rev=None, **opts):
3401 def debugsidedata(ui, repo, file_, rev=None, **opts):
3402 """dump the side data for a cl/manifest/file revision
3402 """dump the side data for a cl/manifest/file revision
3403
3403
3404 Use --verbose to dump the sidedata content."""
3404 Use --verbose to dump the sidedata content."""
3405 opts = pycompat.byteskwargs(opts)
3405 opts = pycompat.byteskwargs(opts)
3406 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3406 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3407 if rev is not None:
3407 if rev is not None:
3408 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3408 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3409 file_, rev = None, file_
3409 file_, rev = None, file_
3410 elif rev is None:
3410 elif rev is None:
3411 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3411 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3412 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3412 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3413 r = getattr(r, '_revlog', r)
3413 r = getattr(r, '_revlog', r)
3414 try:
3414 try:
3415 sidedata = r.sidedata(r.lookup(rev))
3415 sidedata = r.sidedata(r.lookup(rev))
3416 except KeyError:
3416 except KeyError:
3417 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3417 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3418 if sidedata:
3418 if sidedata:
3419 sidedata = list(sidedata.items())
3419 sidedata = list(sidedata.items())
3420 sidedata.sort()
3420 sidedata.sort()
3421 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3421 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3422 for key, value in sidedata:
3422 for key, value in sidedata:
3423 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3423 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3424 if ui.verbose:
3424 if ui.verbose:
3425 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3425 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3426
3426
3427
3427
3428 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3428 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3429 def debugssl(ui, repo, source=None, **opts):
3429 def debugssl(ui, repo, source=None, **opts):
3430 '''test a secure connection to a server
3430 '''test a secure connection to a server
3431
3431
3432 This builds the certificate chain for the server on Windows, installing the
3432 This builds the certificate chain for the server on Windows, installing the
3433 missing intermediates and trusted root via Windows Update if necessary. It
3433 missing intermediates and trusted root via Windows Update if necessary. It
3434 does nothing on other platforms.
3434 does nothing on other platforms.
3435
3435
3436 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3436 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3437 that server is used. See :hg:`help urls` for more information.
3437 that server is used. See :hg:`help urls` for more information.
3438
3438
3439 If the update succeeds, retry the original operation. Otherwise, the cause
3439 If the update succeeds, retry the original operation. Otherwise, the cause
3440 of the SSL error is likely another issue.
3440 of the SSL error is likely another issue.
3441 '''
3441 '''
3442 if not pycompat.iswindows:
3442 if not pycompat.iswindows:
3443 raise error.Abort(
3443 raise error.Abort(
3444 _(b'certificate chain building is only possible on Windows')
3444 _(b'certificate chain building is only possible on Windows')
3445 )
3445 )
3446
3446
3447 if not source:
3447 if not source:
3448 if not repo:
3448 if not repo:
3449 raise error.Abort(
3449 raise error.Abort(
3450 _(
3450 _(
3451 b"there is no Mercurial repository here, and no "
3451 b"there is no Mercurial repository here, and no "
3452 b"server specified"
3452 b"server specified"
3453 )
3453 )
3454 )
3454 )
3455 source = b"default"
3455 source = b"default"
3456
3456
3457 source, branches = hg.parseurl(ui.expandpath(source))
3457 source, branches = hg.parseurl(ui.expandpath(source))
3458 url = util.url(source)
3458 url = util.url(source)
3459
3459
3460 defaultport = {b'https': 443, b'ssh': 22}
3460 defaultport = {b'https': 443, b'ssh': 22}
3461 if url.scheme in defaultport:
3461 if url.scheme in defaultport:
3462 try:
3462 try:
3463 addr = (url.host, int(url.port or defaultport[url.scheme]))
3463 addr = (url.host, int(url.port or defaultport[url.scheme]))
3464 except ValueError:
3464 except ValueError:
3465 raise error.Abort(_(b"malformed port number in URL"))
3465 raise error.Abort(_(b"malformed port number in URL"))
3466 else:
3466 else:
3467 raise error.Abort(_(b"only https and ssh connections are supported"))
3467 raise error.Abort(_(b"only https and ssh connections are supported"))
3468
3468
3469 from . import win32
3469 from . import win32
3470
3470
3471 s = ssl.wrap_socket(
3471 s = ssl.wrap_socket(
3472 socket.socket(),
3472 socket.socket(),
3473 ssl_version=ssl.PROTOCOL_TLS,
3473 ssl_version=ssl.PROTOCOL_TLS,
3474 cert_reqs=ssl.CERT_NONE,
3474 cert_reqs=ssl.CERT_NONE,
3475 ca_certs=None,
3475 ca_certs=None,
3476 )
3476 )
3477
3477
3478 try:
3478 try:
3479 s.connect(addr)
3479 s.connect(addr)
3480 cert = s.getpeercert(True)
3480 cert = s.getpeercert(True)
3481
3481
3482 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3482 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3483
3483
3484 complete = win32.checkcertificatechain(cert, build=False)
3484 complete = win32.checkcertificatechain(cert, build=False)
3485
3485
3486 if not complete:
3486 if not complete:
3487 ui.status(_(b'certificate chain is incomplete, updating... '))
3487 ui.status(_(b'certificate chain is incomplete, updating... '))
3488
3488
3489 if not win32.checkcertificatechain(cert):
3489 if not win32.checkcertificatechain(cert):
3490 ui.status(_(b'failed.\n'))
3490 ui.status(_(b'failed.\n'))
3491 else:
3491 else:
3492 ui.status(_(b'done.\n'))
3492 ui.status(_(b'done.\n'))
3493 else:
3493 else:
3494 ui.status(_(b'full certificate chain is available\n'))
3494 ui.status(_(b'full certificate chain is available\n'))
3495 finally:
3495 finally:
3496 s.close()
3496 s.close()
3497
3497
3498
3498
3499 @command(
3499 @command(
3500 b"debugbackupbundle",
3500 b"debugbackupbundle",
3501 [
3501 [
3502 (
3502 (
3503 b"",
3503 b"",
3504 b"recover",
3504 b"recover",
3505 b"",
3505 b"",
3506 b"brings the specified changeset back into the repository",
3506 b"brings the specified changeset back into the repository",
3507 )
3507 )
3508 ]
3508 ]
3509 + cmdutil.logopts,
3509 + cmdutil.logopts,
3510 _(b"hg debugbackupbundle [--recover HASH]"),
3510 _(b"hg debugbackupbundle [--recover HASH]"),
3511 )
3511 )
3512 def debugbackupbundle(ui, repo, *pats, **opts):
3512 def debugbackupbundle(ui, repo, *pats, **opts):
3513 """lists the changesets available in backup bundles
3513 """lists the changesets available in backup bundles
3514
3514
3515 Without any arguments, this command prints a list of the changesets in each
3515 Without any arguments, this command prints a list of the changesets in each
3516 backup bundle.
3516 backup bundle.
3517
3517
3518 --recover takes a changeset hash and unbundles the first bundle that
3518 --recover takes a changeset hash and unbundles the first bundle that
3519 contains that hash, which puts that changeset back in your repository.
3519 contains that hash, which puts that changeset back in your repository.
3520
3520
3521 --verbose will print the entire commit message and the bundle path for that
3521 --verbose will print the entire commit message and the bundle path for that
3522 backup.
3522 backup.
3523 """
3523 """
3524 backups = list(
3524 backups = list(
3525 filter(
3525 filter(
3526 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3526 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3527 )
3527 )
3528 )
3528 )
3529 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3529 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3530
3530
3531 opts = pycompat.byteskwargs(opts)
3531 opts = pycompat.byteskwargs(opts)
3532 opts[b"bundle"] = b""
3532 opts[b"bundle"] = b""
3533 opts[b"force"] = None
3533 opts[b"force"] = None
3534 limit = logcmdutil.getlimit(opts)
3534 limit = logcmdutil.getlimit(opts)
3535
3535
3536 def display(other, chlist, displayer):
3536 def display(other, chlist, displayer):
3537 if opts.get(b"newest_first"):
3537 if opts.get(b"newest_first"):
3538 chlist.reverse()
3538 chlist.reverse()
3539 count = 0
3539 count = 0
3540 for n in chlist:
3540 for n in chlist:
3541 if limit is not None and count >= limit:
3541 if limit is not None and count >= limit:
3542 break
3542 break
3543 parents = [True for p in other.changelog.parents(n) if p != nullid]
3543 parents = [True for p in other.changelog.parents(n) if p != nullid]
3544 if opts.get(b"no_merges") and len(parents) == 2:
3544 if opts.get(b"no_merges") and len(parents) == 2:
3545 continue
3545 continue
3546 count += 1
3546 count += 1
3547 displayer.show(other[n])
3547 displayer.show(other[n])
3548
3548
3549 recovernode = opts.get(b"recover")
3549 recovernode = opts.get(b"recover")
3550 if recovernode:
3550 if recovernode:
3551 if scmutil.isrevsymbol(repo, recovernode):
3551 if scmutil.isrevsymbol(repo, recovernode):
3552 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3552 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3553 return
3553 return
3554 elif backups:
3554 elif backups:
3555 msg = _(
3555 msg = _(
3556 b"Recover changesets using: hg debugbackupbundle --recover "
3556 b"Recover changesets using: hg debugbackupbundle --recover "
3557 b"<changeset hash>\n\nAvailable backup changesets:"
3557 b"<changeset hash>\n\nAvailable backup changesets:"
3558 )
3558 )
3559 ui.status(msg, label=b"status.removed")
3559 ui.status(msg, label=b"status.removed")
3560 else:
3560 else:
3561 ui.status(_(b"no backup changesets found\n"))
3561 ui.status(_(b"no backup changesets found\n"))
3562 return
3562 return
3563
3563
3564 for backup in backups:
3564 for backup in backups:
3565 # Much of this is copied from the hg incoming logic
3565 # Much of this is copied from the hg incoming logic
3566 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3566 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3567 source, branches = hg.parseurl(source, opts.get(b"branch"))
3567 source, branches = hg.parseurl(source, opts.get(b"branch"))
3568 try:
3568 try:
3569 other = hg.peer(repo, opts, source)
3569 other = hg.peer(repo, opts, source)
3570 except error.LookupError as ex:
3570 except error.LookupError as ex:
3571 msg = _(b"\nwarning: unable to open bundle %s") % source
3571 msg = _(b"\nwarning: unable to open bundle %s") % source
3572 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3572 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3573 ui.warn(msg, hint=hint)
3573 ui.warn(msg, hint=hint)
3574 continue
3574 continue
3575 revs, checkout = hg.addbranchrevs(
3575 revs, checkout = hg.addbranchrevs(
3576 repo, other, branches, opts.get(b"rev")
3576 repo, other, branches, opts.get(b"rev")
3577 )
3577 )
3578
3578
3579 if revs:
3579 if revs:
3580 revs = [other.lookup(rev) for rev in revs]
3580 revs = [other.lookup(rev) for rev in revs]
3581
3581
3582 quiet = ui.quiet
3582 quiet = ui.quiet
3583 try:
3583 try:
3584 ui.quiet = True
3584 ui.quiet = True
3585 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3585 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3586 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3586 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3587 )
3587 )
3588 except error.LookupError:
3588 except error.LookupError:
3589 continue
3589 continue
3590 finally:
3590 finally:
3591 ui.quiet = quiet
3591 ui.quiet = quiet
3592
3592
3593 try:
3593 try:
3594 if not chlist:
3594 if not chlist:
3595 continue
3595 continue
3596 if recovernode:
3596 if recovernode:
3597 with repo.lock(), repo.transaction(b"unbundle") as tr:
3597 with repo.lock(), repo.transaction(b"unbundle") as tr:
3598 if scmutil.isrevsymbol(other, recovernode):
3598 if scmutil.isrevsymbol(other, recovernode):
3599 ui.status(_(b"Unbundling %s\n") % (recovernode))
3599 ui.status(_(b"Unbundling %s\n") % (recovernode))
3600 f = hg.openpath(ui, source)
3600 f = hg.openpath(ui, source)
3601 gen = exchange.readbundle(ui, f, source)
3601 gen = exchange.readbundle(ui, f, source)
3602 if isinstance(gen, bundle2.unbundle20):
3602 if isinstance(gen, bundle2.unbundle20):
3603 bundle2.applybundle(
3603 bundle2.applybundle(
3604 repo,
3604 repo,
3605 gen,
3605 gen,
3606 tr,
3606 tr,
3607 source=b"unbundle",
3607 source=b"unbundle",
3608 url=b"bundle:" + source,
3608 url=b"bundle:" + source,
3609 )
3609 )
3610 else:
3610 else:
3611 gen.apply(repo, b"unbundle", b"bundle:" + source)
3611 gen.apply(repo, b"unbundle", b"bundle:" + source)
3612 break
3612 break
3613 else:
3613 else:
3614 backupdate = encoding.strtolocal(
3614 backupdate = encoding.strtolocal(
3615 time.strftime(
3615 time.strftime(
3616 "%a %H:%M, %Y-%m-%d",
3616 "%a %H:%M, %Y-%m-%d",
3617 time.localtime(os.path.getmtime(source)),
3617 time.localtime(os.path.getmtime(source)),
3618 )
3618 )
3619 )
3619 )
3620 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3620 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3621 if ui.verbose:
3621 if ui.verbose:
3622 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3622 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3623 else:
3623 else:
3624 opts[
3624 opts[
3625 b"template"
3625 b"template"
3626 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3626 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3627 displayer = logcmdutil.changesetdisplayer(
3627 displayer = logcmdutil.changesetdisplayer(
3628 ui, other, opts, False
3628 ui, other, opts, False
3629 )
3629 )
3630 display(other, chlist, displayer)
3630 display(other, chlist, displayer)
3631 displayer.close()
3631 displayer.close()
3632 finally:
3632 finally:
3633 cleanupfn()
3633 cleanupfn()
3634
3634
3635
3635
3636 @command(
3636 @command(
3637 b'debugsub',
3637 b'debugsub',
3638 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3638 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3639 _(b'[-r REV] [REV]'),
3639 _(b'[-r REV] [REV]'),
3640 )
3640 )
3641 def debugsub(ui, repo, rev=None):
3641 def debugsub(ui, repo, rev=None):
3642 ctx = scmutil.revsingle(repo, rev, None)
3642 ctx = scmutil.revsingle(repo, rev, None)
3643 for k, v in sorted(ctx.substate.items()):
3643 for k, v in sorted(ctx.substate.items()):
3644 ui.writenoi18n(b'path %s\n' % k)
3644 ui.writenoi18n(b'path %s\n' % k)
3645 ui.writenoi18n(b' source %s\n' % v[0])
3645 ui.writenoi18n(b' source %s\n' % v[0])
3646 ui.writenoi18n(b' revision %s\n' % v[1])
3646 ui.writenoi18n(b' revision %s\n' % v[1])
3647
3647
3648
3648
3649 @command(
3649 @command(
3650 b'debugsuccessorssets',
3650 b'debugsuccessorssets',
3651 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3651 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3652 _(b'[REV]'),
3652 _(b'[REV]'),
3653 )
3653 )
3654 def debugsuccessorssets(ui, repo, *revs, **opts):
3654 def debugsuccessorssets(ui, repo, *revs, **opts):
3655 """show set of successors for revision
3655 """show set of successors for revision
3656
3656
3657 A successors set of changeset A is a consistent group of revisions that
3657 A successors set of changeset A is a consistent group of revisions that
3658 succeed A. It contains non-obsolete changesets only unless closests
3658 succeed A. It contains non-obsolete changesets only unless closests
3659 successors set is set.
3659 successors set is set.
3660
3660
3661 In most cases a changeset A has a single successors set containing a single
3661 In most cases a changeset A has a single successors set containing a single
3662 successor (changeset A replaced by A').
3662 successor (changeset A replaced by A').
3663
3663
3664 A changeset that is made obsolete with no successors are called "pruned".
3664 A changeset that is made obsolete with no successors are called "pruned".
3665 Such changesets have no successors sets at all.
3665 Such changesets have no successors sets at all.
3666
3666
3667 A changeset that has been "split" will have a successors set containing
3667 A changeset that has been "split" will have a successors set containing
3668 more than one successor.
3668 more than one successor.
3669
3669
3670 A changeset that has been rewritten in multiple different ways is called
3670 A changeset that has been rewritten in multiple different ways is called
3671 "divergent". Such changesets have multiple successor sets (each of which
3671 "divergent". Such changesets have multiple successor sets (each of which
3672 may also be split, i.e. have multiple successors).
3672 may also be split, i.e. have multiple successors).
3673
3673
3674 Results are displayed as follows::
3674 Results are displayed as follows::
3675
3675
3676 <rev1>
3676 <rev1>
3677 <successors-1A>
3677 <successors-1A>
3678 <rev2>
3678 <rev2>
3679 <successors-2A>
3679 <successors-2A>
3680 <successors-2B1> <successors-2B2> <successors-2B3>
3680 <successors-2B1> <successors-2B2> <successors-2B3>
3681
3681
3682 Here rev2 has two possible (i.e. divergent) successors sets. The first
3682 Here rev2 has two possible (i.e. divergent) successors sets. The first
3683 holds one element, whereas the second holds three (i.e. the changeset has
3683 holds one element, whereas the second holds three (i.e. the changeset has
3684 been split).
3684 been split).
3685 """
3685 """
3686 # passed to successorssets caching computation from one call to another
3686 # passed to successorssets caching computation from one call to another
3687 cache = {}
3687 cache = {}
3688 ctx2str = bytes
3688 ctx2str = bytes
3689 node2str = short
3689 node2str = short
3690 for rev in scmutil.revrange(repo, revs):
3690 for rev in scmutil.revrange(repo, revs):
3691 ctx = repo[rev]
3691 ctx = repo[rev]
3692 ui.write(b'%s\n' % ctx2str(ctx))
3692 ui.write(b'%s\n' % ctx2str(ctx))
3693 for succsset in obsutil.successorssets(
3693 for succsset in obsutil.successorssets(
3694 repo, ctx.node(), closest=opts['closest'], cache=cache
3694 repo, ctx.node(), closest=opts['closest'], cache=cache
3695 ):
3695 ):
3696 if succsset:
3696 if succsset:
3697 ui.write(b' ')
3697 ui.write(b' ')
3698 ui.write(node2str(succsset[0]))
3698 ui.write(node2str(succsset[0]))
3699 for node in succsset[1:]:
3699 for node in succsset[1:]:
3700 ui.write(b' ')
3700 ui.write(b' ')
3701 ui.write(node2str(node))
3701 ui.write(node2str(node))
3702 ui.write(b'\n')
3702 ui.write(b'\n')
3703
3703
3704
3704
3705 @command(b'debugtagscache', [])
3705 @command(b'debugtagscache', [])
3706 def debugtagscache(ui, repo):
3706 def debugtagscache(ui, repo):
3707 """display the contents of .hg/cache/hgtagsfnodes1"""
3707 """display the contents of .hg/cache/hgtagsfnodes1"""
3708 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3708 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3709 for r in repo:
3709 for r in repo:
3710 node = repo[r].node()
3710 node = repo[r].node()
3711 tagsnode = cache.getfnode(node, computemissing=False)
3711 tagsnode = cache.getfnode(node, computemissing=False)
3712 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3712 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3713 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3713 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3714
3714
3715
3715
3716 @command(
3716 @command(
3717 b'debugtemplate',
3717 b'debugtemplate',
3718 [
3718 [
3719 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3719 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3720 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3720 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3721 ],
3721 ],
3722 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3722 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3723 optionalrepo=True,
3723 optionalrepo=True,
3724 )
3724 )
3725 def debugtemplate(ui, repo, tmpl, **opts):
3725 def debugtemplate(ui, repo, tmpl, **opts):
3726 """parse and apply a template
3726 """parse and apply a template
3727
3727
3728 If -r/--rev is given, the template is processed as a log template and
3728 If -r/--rev is given, the template is processed as a log template and
3729 applied to the given changesets. Otherwise, it is processed as a generic
3729 applied to the given changesets. Otherwise, it is processed as a generic
3730 template.
3730 template.
3731
3731
3732 Use --verbose to print the parsed tree.
3732 Use --verbose to print the parsed tree.
3733 """
3733 """
3734 revs = None
3734 revs = None
3735 if opts['rev']:
3735 if opts['rev']:
3736 if repo is None:
3736 if repo is None:
3737 raise error.RepoError(
3737 raise error.RepoError(
3738 _(b'there is no Mercurial repository here (.hg not found)')
3738 _(b'there is no Mercurial repository here (.hg not found)')
3739 )
3739 )
3740 revs = scmutil.revrange(repo, opts['rev'])
3740 revs = scmutil.revrange(repo, opts['rev'])
3741
3741
3742 props = {}
3742 props = {}
3743 for d in opts['define']:
3743 for d in opts['define']:
3744 try:
3744 try:
3745 k, v = (e.strip() for e in d.split(b'=', 1))
3745 k, v = (e.strip() for e in d.split(b'=', 1))
3746 if not k or k == b'ui':
3746 if not k or k == b'ui':
3747 raise ValueError
3747 raise ValueError
3748 props[k] = v
3748 props[k] = v
3749 except ValueError:
3749 except ValueError:
3750 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3750 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3751
3751
3752 if ui.verbose:
3752 if ui.verbose:
3753 aliases = ui.configitems(b'templatealias')
3753 aliases = ui.configitems(b'templatealias')
3754 tree = templater.parse(tmpl)
3754 tree = templater.parse(tmpl)
3755 ui.note(templater.prettyformat(tree), b'\n')
3755 ui.note(templater.prettyformat(tree), b'\n')
3756 newtree = templater.expandaliases(tree, aliases)
3756 newtree = templater.expandaliases(tree, aliases)
3757 if newtree != tree:
3757 if newtree != tree:
3758 ui.notenoi18n(
3758 ui.notenoi18n(
3759 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3759 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3760 )
3760 )
3761
3761
3762 if revs is None:
3762 if revs is None:
3763 tres = formatter.templateresources(ui, repo)
3763 tres = formatter.templateresources(ui, repo)
3764 t = formatter.maketemplater(ui, tmpl, resources=tres)
3764 t = formatter.maketemplater(ui, tmpl, resources=tres)
3765 if ui.verbose:
3765 if ui.verbose:
3766 kwds, funcs = t.symbolsuseddefault()
3766 kwds, funcs = t.symbolsuseddefault()
3767 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3767 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3768 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3768 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3769 ui.write(t.renderdefault(props))
3769 ui.write(t.renderdefault(props))
3770 else:
3770 else:
3771 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3771 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3772 if ui.verbose:
3772 if ui.verbose:
3773 kwds, funcs = displayer.t.symbolsuseddefault()
3773 kwds, funcs = displayer.t.symbolsuseddefault()
3774 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3774 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3775 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3775 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3776 for r in revs:
3776 for r in revs:
3777 displayer.show(repo[r], **pycompat.strkwargs(props))
3777 displayer.show(repo[r], **pycompat.strkwargs(props))
3778 displayer.close()
3778 displayer.close()
3779
3779
3780
3780
3781 @command(
3781 @command(
3782 b'debuguigetpass',
3782 b'debuguigetpass',
3783 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3783 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3784 _(b'[-p TEXT]'),
3784 _(b'[-p TEXT]'),
3785 norepo=True,
3785 norepo=True,
3786 )
3786 )
3787 def debuguigetpass(ui, prompt=b''):
3787 def debuguigetpass(ui, prompt=b''):
3788 """show prompt to type password"""
3788 """show prompt to type password"""
3789 r = ui.getpass(prompt)
3789 r = ui.getpass(prompt)
3790 if r is not None:
3790 if r is None:
3791 r = encoding.strtolocal(r)
3792 else:
3793 r = b"<default response>"
3791 r = b"<default response>"
3794 ui.writenoi18n(b'response: %s\n' % r)
3792 ui.writenoi18n(b'response: %s\n' % r)
3795
3793
3796
3794
3797 @command(
3795 @command(
3798 b'debuguiprompt',
3796 b'debuguiprompt',
3799 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3797 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3800 _(b'[-p TEXT]'),
3798 _(b'[-p TEXT]'),
3801 norepo=True,
3799 norepo=True,
3802 )
3800 )
3803 def debuguiprompt(ui, prompt=b''):
3801 def debuguiprompt(ui, prompt=b''):
3804 """show plain prompt"""
3802 """show plain prompt"""
3805 r = ui.prompt(prompt)
3803 r = ui.prompt(prompt)
3806 ui.writenoi18n(b'response: %s\n' % r)
3804 ui.writenoi18n(b'response: %s\n' % r)
3807
3805
3808
3806
3809 @command(b'debugupdatecaches', [])
3807 @command(b'debugupdatecaches', [])
3810 def debugupdatecaches(ui, repo, *pats, **opts):
3808 def debugupdatecaches(ui, repo, *pats, **opts):
3811 """warm all known caches in the repository"""
3809 """warm all known caches in the repository"""
3812 with repo.wlock(), repo.lock():
3810 with repo.wlock(), repo.lock():
3813 repo.updatecaches(full=True)
3811 repo.updatecaches(full=True)
3814
3812
3815
3813
3816 @command(
3814 @command(
3817 b'debugupgraderepo',
3815 b'debugupgraderepo',
3818 [
3816 [
3819 (
3817 (
3820 b'o',
3818 b'o',
3821 b'optimize',
3819 b'optimize',
3822 [],
3820 [],
3823 _(b'extra optimization to perform'),
3821 _(b'extra optimization to perform'),
3824 _(b'NAME'),
3822 _(b'NAME'),
3825 ),
3823 ),
3826 (b'', b'run', False, _(b'performs an upgrade')),
3824 (b'', b'run', False, _(b'performs an upgrade')),
3827 (b'', b'backup', True, _(b'keep the old repository content around')),
3825 (b'', b'backup', True, _(b'keep the old repository content around')),
3828 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3826 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3829 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3827 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3830 ],
3828 ],
3831 )
3829 )
3832 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3830 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3833 """upgrade a repository to use different features
3831 """upgrade a repository to use different features
3834
3832
3835 If no arguments are specified, the repository is evaluated for upgrade
3833 If no arguments are specified, the repository is evaluated for upgrade
3836 and a list of problems and potential optimizations is printed.
3834 and a list of problems and potential optimizations is printed.
3837
3835
3838 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3836 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3839 can be influenced via additional arguments. More details will be provided
3837 can be influenced via additional arguments. More details will be provided
3840 by the command output when run without ``--run``.
3838 by the command output when run without ``--run``.
3841
3839
3842 During the upgrade, the repository will be locked and no writes will be
3840 During the upgrade, the repository will be locked and no writes will be
3843 allowed.
3841 allowed.
3844
3842
3845 At the end of the upgrade, the repository may not be readable while new
3843 At the end of the upgrade, the repository may not be readable while new
3846 repository data is swapped in. This window will be as long as it takes to
3844 repository data is swapped in. This window will be as long as it takes to
3847 rename some directories inside the ``.hg`` directory. On most machines, this
3845 rename some directories inside the ``.hg`` directory. On most machines, this
3848 should complete almost instantaneously and the chances of a consumer being
3846 should complete almost instantaneously and the chances of a consumer being
3849 unable to access the repository should be low.
3847 unable to access the repository should be low.
3850
3848
3851 By default, all revlog will be upgraded. You can restrict this using flag
3849 By default, all revlog will be upgraded. You can restrict this using flag
3852 such as `--manifest`:
3850 such as `--manifest`:
3853
3851
3854 * `--manifest`: only optimize the manifest
3852 * `--manifest`: only optimize the manifest
3855 * `--no-manifest`: optimize all revlog but the manifest
3853 * `--no-manifest`: optimize all revlog but the manifest
3856 * `--changelog`: optimize the changelog only
3854 * `--changelog`: optimize the changelog only
3857 * `--no-changelog --no-manifest`: optimize filelogs only
3855 * `--no-changelog --no-manifest`: optimize filelogs only
3858 """
3856 """
3859 return upgrade.upgraderepo(
3857 return upgrade.upgraderepo(
3860 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3858 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3861 )
3859 )
3862
3860
3863
3861
3864 @command(
3862 @command(
3865 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3863 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3866 )
3864 )
3867 def debugwalk(ui, repo, *pats, **opts):
3865 def debugwalk(ui, repo, *pats, **opts):
3868 """show how files match on given patterns"""
3866 """show how files match on given patterns"""
3869 opts = pycompat.byteskwargs(opts)
3867 opts = pycompat.byteskwargs(opts)
3870 m = scmutil.match(repo[None], pats, opts)
3868 m = scmutil.match(repo[None], pats, opts)
3871 if ui.verbose:
3869 if ui.verbose:
3872 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3870 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3873 items = list(repo[None].walk(m))
3871 items = list(repo[None].walk(m))
3874 if not items:
3872 if not items:
3875 return
3873 return
3876 f = lambda fn: fn
3874 f = lambda fn: fn
3877 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3875 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3878 f = lambda fn: util.normpath(fn)
3876 f = lambda fn: util.normpath(fn)
3879 fmt = b'f %%-%ds %%-%ds %%s' % (
3877 fmt = b'f %%-%ds %%-%ds %%s' % (
3880 max([len(abs) for abs in items]),
3878 max([len(abs) for abs in items]),
3881 max([len(repo.pathto(abs)) for abs in items]),
3879 max([len(repo.pathto(abs)) for abs in items]),
3882 )
3880 )
3883 for abs in items:
3881 for abs in items:
3884 line = fmt % (
3882 line = fmt % (
3885 abs,
3883 abs,
3886 f(repo.pathto(abs)),
3884 f(repo.pathto(abs)),
3887 m.exact(abs) and b'exact' or b'',
3885 m.exact(abs) and b'exact' or b'',
3888 )
3886 )
3889 ui.write(b"%s\n" % line.rstrip())
3887 ui.write(b"%s\n" % line.rstrip())
3890
3888
3891
3889
3892 @command(b'debugwhyunstable', [], _(b'REV'))
3890 @command(b'debugwhyunstable', [], _(b'REV'))
3893 def debugwhyunstable(ui, repo, rev):
3891 def debugwhyunstable(ui, repo, rev):
3894 """explain instabilities of a changeset"""
3892 """explain instabilities of a changeset"""
3895 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3893 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3896 dnodes = b''
3894 dnodes = b''
3897 if entry.get(b'divergentnodes'):
3895 if entry.get(b'divergentnodes'):
3898 dnodes = (
3896 dnodes = (
3899 b' '.join(
3897 b' '.join(
3900 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3898 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3901 for ctx in entry[b'divergentnodes']
3899 for ctx in entry[b'divergentnodes']
3902 )
3900 )
3903 + b' '
3901 + b' '
3904 )
3902 )
3905 ui.write(
3903 ui.write(
3906 b'%s: %s%s %s\n'
3904 b'%s: %s%s %s\n'
3907 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3905 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3908 )
3906 )
3909
3907
3910
3908
3911 @command(
3909 @command(
3912 b'debugwireargs',
3910 b'debugwireargs',
3913 [
3911 [
3914 (b'', b'three', b'', b'three'),
3912 (b'', b'three', b'', b'three'),
3915 (b'', b'four', b'', b'four'),
3913 (b'', b'four', b'', b'four'),
3916 (b'', b'five', b'', b'five'),
3914 (b'', b'five', b'', b'five'),
3917 ]
3915 ]
3918 + cmdutil.remoteopts,
3916 + cmdutil.remoteopts,
3919 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3917 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3920 norepo=True,
3918 norepo=True,
3921 )
3919 )
3922 def debugwireargs(ui, repopath, *vals, **opts):
3920 def debugwireargs(ui, repopath, *vals, **opts):
3923 opts = pycompat.byteskwargs(opts)
3921 opts = pycompat.byteskwargs(opts)
3924 repo = hg.peer(ui, opts, repopath)
3922 repo = hg.peer(ui, opts, repopath)
3925 for opt in cmdutil.remoteopts:
3923 for opt in cmdutil.remoteopts:
3926 del opts[opt[1]]
3924 del opts[opt[1]]
3927 args = {}
3925 args = {}
3928 for k, v in pycompat.iteritems(opts):
3926 for k, v in pycompat.iteritems(opts):
3929 if v:
3927 if v:
3930 args[k] = v
3928 args[k] = v
3931 args = pycompat.strkwargs(args)
3929 args = pycompat.strkwargs(args)
3932 # run twice to check that we don't mess up the stream for the next command
3930 # run twice to check that we don't mess up the stream for the next command
3933 res1 = repo.debugwireargs(*vals, **args)
3931 res1 = repo.debugwireargs(*vals, **args)
3934 res2 = repo.debugwireargs(*vals, **args)
3932 res2 = repo.debugwireargs(*vals, **args)
3935 ui.write(b"%s\n" % res1)
3933 ui.write(b"%s\n" % res1)
3936 if res1 != res2:
3934 if res1 != res2:
3937 ui.warn(b"%s\n" % res2)
3935 ui.warn(b"%s\n" % res2)
3938
3936
3939
3937
3940 def _parsewirelangblocks(fh):
3938 def _parsewirelangblocks(fh):
3941 activeaction = None
3939 activeaction = None
3942 blocklines = []
3940 blocklines = []
3943 lastindent = 0
3941 lastindent = 0
3944
3942
3945 for line in fh:
3943 for line in fh:
3946 line = line.rstrip()
3944 line = line.rstrip()
3947 if not line:
3945 if not line:
3948 continue
3946 continue
3949
3947
3950 if line.startswith(b'#'):
3948 if line.startswith(b'#'):
3951 continue
3949 continue
3952
3950
3953 if not line.startswith(b' '):
3951 if not line.startswith(b' '):
3954 # New block. Flush previous one.
3952 # New block. Flush previous one.
3955 if activeaction:
3953 if activeaction:
3956 yield activeaction, blocklines
3954 yield activeaction, blocklines
3957
3955
3958 activeaction = line
3956 activeaction = line
3959 blocklines = []
3957 blocklines = []
3960 lastindent = 0
3958 lastindent = 0
3961 continue
3959 continue
3962
3960
3963 # Else we start with an indent.
3961 # Else we start with an indent.
3964
3962
3965 if not activeaction:
3963 if not activeaction:
3966 raise error.Abort(_(b'indented line outside of block'))
3964 raise error.Abort(_(b'indented line outside of block'))
3967
3965
3968 indent = len(line) - len(line.lstrip())
3966 indent = len(line) - len(line.lstrip())
3969
3967
3970 # If this line is indented more than the last line, concatenate it.
3968 # If this line is indented more than the last line, concatenate it.
3971 if indent > lastindent and blocklines:
3969 if indent > lastindent and blocklines:
3972 blocklines[-1] += line.lstrip()
3970 blocklines[-1] += line.lstrip()
3973 else:
3971 else:
3974 blocklines.append(line)
3972 blocklines.append(line)
3975 lastindent = indent
3973 lastindent = indent
3976
3974
3977 # Flush last block.
3975 # Flush last block.
3978 if activeaction:
3976 if activeaction:
3979 yield activeaction, blocklines
3977 yield activeaction, blocklines
3980
3978
3981
3979
3982 @command(
3980 @command(
3983 b'debugwireproto',
3981 b'debugwireproto',
3984 [
3982 [
3985 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3983 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3986 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3984 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3987 (
3985 (
3988 b'',
3986 b'',
3989 b'noreadstderr',
3987 b'noreadstderr',
3990 False,
3988 False,
3991 _(b'do not read from stderr of the remote'),
3989 _(b'do not read from stderr of the remote'),
3992 ),
3990 ),
3993 (
3991 (
3994 b'',
3992 b'',
3995 b'nologhandshake',
3993 b'nologhandshake',
3996 False,
3994 False,
3997 _(b'do not log I/O related to the peer handshake'),
3995 _(b'do not log I/O related to the peer handshake'),
3998 ),
3996 ),
3999 ]
3997 ]
4000 + cmdutil.remoteopts,
3998 + cmdutil.remoteopts,
4001 _(b'[PATH]'),
3999 _(b'[PATH]'),
4002 optionalrepo=True,
4000 optionalrepo=True,
4003 )
4001 )
4004 def debugwireproto(ui, repo, path=None, **opts):
4002 def debugwireproto(ui, repo, path=None, **opts):
4005 """send wire protocol commands to a server
4003 """send wire protocol commands to a server
4006
4004
4007 This command can be used to issue wire protocol commands to remote
4005 This command can be used to issue wire protocol commands to remote
4008 peers and to debug the raw data being exchanged.
4006 peers and to debug the raw data being exchanged.
4009
4007
4010 ``--localssh`` will start an SSH server against the current repository
4008 ``--localssh`` will start an SSH server against the current repository
4011 and connect to that. By default, the connection will perform a handshake
4009 and connect to that. By default, the connection will perform a handshake
4012 and establish an appropriate peer instance.
4010 and establish an appropriate peer instance.
4013
4011
4014 ``--peer`` can be used to bypass the handshake protocol and construct a
4012 ``--peer`` can be used to bypass the handshake protocol and construct a
4015 peer instance using the specified class type. Valid values are ``raw``,
4013 peer instance using the specified class type. Valid values are ``raw``,
4016 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4014 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4017 raw data payloads and don't support higher-level command actions.
4015 raw data payloads and don't support higher-level command actions.
4018
4016
4019 ``--noreadstderr`` can be used to disable automatic reading from stderr
4017 ``--noreadstderr`` can be used to disable automatic reading from stderr
4020 of the peer (for SSH connections only). Disabling automatic reading of
4018 of the peer (for SSH connections only). Disabling automatic reading of
4021 stderr is useful for making output more deterministic.
4019 stderr is useful for making output more deterministic.
4022
4020
4023 Commands are issued via a mini language which is specified via stdin.
4021 Commands are issued via a mini language which is specified via stdin.
4024 The language consists of individual actions to perform. An action is
4022 The language consists of individual actions to perform. An action is
4025 defined by a block. A block is defined as a line with no leading
4023 defined by a block. A block is defined as a line with no leading
4026 space followed by 0 or more lines with leading space. Blocks are
4024 space followed by 0 or more lines with leading space. Blocks are
4027 effectively a high-level command with additional metadata.
4025 effectively a high-level command with additional metadata.
4028
4026
4029 Lines beginning with ``#`` are ignored.
4027 Lines beginning with ``#`` are ignored.
4030
4028
4031 The following sections denote available actions.
4029 The following sections denote available actions.
4032
4030
4033 raw
4031 raw
4034 ---
4032 ---
4035
4033
4036 Send raw data to the server.
4034 Send raw data to the server.
4037
4035
4038 The block payload contains the raw data to send as one atomic send
4036 The block payload contains the raw data to send as one atomic send
4039 operation. The data may not actually be delivered in a single system
4037 operation. The data may not actually be delivered in a single system
4040 call: it depends on the abilities of the transport being used.
4038 call: it depends on the abilities of the transport being used.
4041
4039
4042 Each line in the block is de-indented and concatenated. Then, that
4040 Each line in the block is de-indented and concatenated. Then, that
4043 value is evaluated as a Python b'' literal. This allows the use of
4041 value is evaluated as a Python b'' literal. This allows the use of
4044 backslash escaping, etc.
4042 backslash escaping, etc.
4045
4043
4046 raw+
4044 raw+
4047 ----
4045 ----
4048
4046
4049 Behaves like ``raw`` except flushes output afterwards.
4047 Behaves like ``raw`` except flushes output afterwards.
4050
4048
4051 command <X>
4049 command <X>
4052 -----------
4050 -----------
4053
4051
4054 Send a request to run a named command, whose name follows the ``command``
4052 Send a request to run a named command, whose name follows the ``command``
4055 string.
4053 string.
4056
4054
4057 Arguments to the command are defined as lines in this block. The format of
4055 Arguments to the command are defined as lines in this block. The format of
4058 each line is ``<key> <value>``. e.g.::
4056 each line is ``<key> <value>``. e.g.::
4059
4057
4060 command listkeys
4058 command listkeys
4061 namespace bookmarks
4059 namespace bookmarks
4062
4060
4063 If the value begins with ``eval:``, it will be interpreted as a Python
4061 If the value begins with ``eval:``, it will be interpreted as a Python
4064 literal expression. Otherwise values are interpreted as Python b'' literals.
4062 literal expression. Otherwise values are interpreted as Python b'' literals.
4065 This allows sending complex types and encoding special byte sequences via
4063 This allows sending complex types and encoding special byte sequences via
4066 backslash escaping.
4064 backslash escaping.
4067
4065
4068 The following arguments have special meaning:
4066 The following arguments have special meaning:
4069
4067
4070 ``PUSHFILE``
4068 ``PUSHFILE``
4071 When defined, the *push* mechanism of the peer will be used instead
4069 When defined, the *push* mechanism of the peer will be used instead
4072 of the static request-response mechanism and the content of the
4070 of the static request-response mechanism and the content of the
4073 file specified in the value of this argument will be sent as the
4071 file specified in the value of this argument will be sent as the
4074 command payload.
4072 command payload.
4075
4073
4076 This can be used to submit a local bundle file to the remote.
4074 This can be used to submit a local bundle file to the remote.
4077
4075
4078 batchbegin
4076 batchbegin
4079 ----------
4077 ----------
4080
4078
4081 Instruct the peer to begin a batched send.
4079 Instruct the peer to begin a batched send.
4082
4080
4083 All ``command`` blocks are queued for execution until the next
4081 All ``command`` blocks are queued for execution until the next
4084 ``batchsubmit`` block.
4082 ``batchsubmit`` block.
4085
4083
4086 batchsubmit
4084 batchsubmit
4087 -----------
4085 -----------
4088
4086
4089 Submit previously queued ``command`` blocks as a batch request.
4087 Submit previously queued ``command`` blocks as a batch request.
4090
4088
4091 This action MUST be paired with a ``batchbegin`` action.
4089 This action MUST be paired with a ``batchbegin`` action.
4092
4090
4093 httprequest <method> <path>
4091 httprequest <method> <path>
4094 ---------------------------
4092 ---------------------------
4095
4093
4096 (HTTP peer only)
4094 (HTTP peer only)
4097
4095
4098 Send an HTTP request to the peer.
4096 Send an HTTP request to the peer.
4099
4097
4100 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4098 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4101
4099
4102 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4100 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4103 headers to add to the request. e.g. ``Accept: foo``.
4101 headers to add to the request. e.g. ``Accept: foo``.
4104
4102
4105 The following arguments are special:
4103 The following arguments are special:
4106
4104
4107 ``BODYFILE``
4105 ``BODYFILE``
4108 The content of the file defined as the value to this argument will be
4106 The content of the file defined as the value to this argument will be
4109 transferred verbatim as the HTTP request body.
4107 transferred verbatim as the HTTP request body.
4110
4108
4111 ``frame <type> <flags> <payload>``
4109 ``frame <type> <flags> <payload>``
4112 Send a unified protocol frame as part of the request body.
4110 Send a unified protocol frame as part of the request body.
4113
4111
4114 All frames will be collected and sent as the body to the HTTP
4112 All frames will be collected and sent as the body to the HTTP
4115 request.
4113 request.
4116
4114
4117 close
4115 close
4118 -----
4116 -----
4119
4117
4120 Close the connection to the server.
4118 Close the connection to the server.
4121
4119
4122 flush
4120 flush
4123 -----
4121 -----
4124
4122
4125 Flush data written to the server.
4123 Flush data written to the server.
4126
4124
4127 readavailable
4125 readavailable
4128 -------------
4126 -------------
4129
4127
4130 Close the write end of the connection and read all available data from
4128 Close the write end of the connection and read all available data from
4131 the server.
4129 the server.
4132
4130
4133 If the connection to the server encompasses multiple pipes, we poll both
4131 If the connection to the server encompasses multiple pipes, we poll both
4134 pipes and read available data.
4132 pipes and read available data.
4135
4133
4136 readline
4134 readline
4137 --------
4135 --------
4138
4136
4139 Read a line of output from the server. If there are multiple output
4137 Read a line of output from the server. If there are multiple output
4140 pipes, reads only the main pipe.
4138 pipes, reads only the main pipe.
4141
4139
4142 ereadline
4140 ereadline
4143 ---------
4141 ---------
4144
4142
4145 Like ``readline``, but read from the stderr pipe, if available.
4143 Like ``readline``, but read from the stderr pipe, if available.
4146
4144
4147 read <X>
4145 read <X>
4148 --------
4146 --------
4149
4147
4150 ``read()`` N bytes from the server's main output pipe.
4148 ``read()`` N bytes from the server's main output pipe.
4151
4149
4152 eread <X>
4150 eread <X>
4153 ---------
4151 ---------
4154
4152
4155 ``read()`` N bytes from the server's stderr pipe, if available.
4153 ``read()`` N bytes from the server's stderr pipe, if available.
4156
4154
4157 Specifying Unified Frame-Based Protocol Frames
4155 Specifying Unified Frame-Based Protocol Frames
4158 ----------------------------------------------
4156 ----------------------------------------------
4159
4157
4160 It is possible to emit a *Unified Frame-Based Protocol* by using special
4158 It is possible to emit a *Unified Frame-Based Protocol* by using special
4161 syntax.
4159 syntax.
4162
4160
4163 A frame is composed as a type, flags, and payload. These can be parsed
4161 A frame is composed as a type, flags, and payload. These can be parsed
4164 from a string of the form:
4162 from a string of the form:
4165
4163
4166 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4164 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4167
4165
4168 ``request-id`` and ``stream-id`` are integers defining the request and
4166 ``request-id`` and ``stream-id`` are integers defining the request and
4169 stream identifiers.
4167 stream identifiers.
4170
4168
4171 ``type`` can be an integer value for the frame type or the string name
4169 ``type`` can be an integer value for the frame type or the string name
4172 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4170 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4173 ``command-name``.
4171 ``command-name``.
4174
4172
4175 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4173 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4176 components. Each component (and there can be just one) can be an integer
4174 components. Each component (and there can be just one) can be an integer
4177 or a flag name for stream flags or frame flags, respectively. Values are
4175 or a flag name for stream flags or frame flags, respectively. Values are
4178 resolved to integers and then bitwise OR'd together.
4176 resolved to integers and then bitwise OR'd together.
4179
4177
4180 ``payload`` represents the raw frame payload. If it begins with
4178 ``payload`` represents the raw frame payload. If it begins with
4181 ``cbor:``, the following string is evaluated as Python code and the
4179 ``cbor:``, the following string is evaluated as Python code and the
4182 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4180 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4183 as a Python byte string literal.
4181 as a Python byte string literal.
4184 """
4182 """
4185 opts = pycompat.byteskwargs(opts)
4183 opts = pycompat.byteskwargs(opts)
4186
4184
4187 if opts[b'localssh'] and not repo:
4185 if opts[b'localssh'] and not repo:
4188 raise error.Abort(_(b'--localssh requires a repository'))
4186 raise error.Abort(_(b'--localssh requires a repository'))
4189
4187
4190 if opts[b'peer'] and opts[b'peer'] not in (
4188 if opts[b'peer'] and opts[b'peer'] not in (
4191 b'raw',
4189 b'raw',
4192 b'http2',
4190 b'http2',
4193 b'ssh1',
4191 b'ssh1',
4194 b'ssh2',
4192 b'ssh2',
4195 ):
4193 ):
4196 raise error.Abort(
4194 raise error.Abort(
4197 _(b'invalid value for --peer'),
4195 _(b'invalid value for --peer'),
4198 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4196 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4199 )
4197 )
4200
4198
4201 if path and opts[b'localssh']:
4199 if path and opts[b'localssh']:
4202 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4200 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4203
4201
4204 if ui.interactive():
4202 if ui.interactive():
4205 ui.write(_(b'(waiting for commands on stdin)\n'))
4203 ui.write(_(b'(waiting for commands on stdin)\n'))
4206
4204
4207 blocks = list(_parsewirelangblocks(ui.fin))
4205 blocks = list(_parsewirelangblocks(ui.fin))
4208
4206
4209 proc = None
4207 proc = None
4210 stdin = None
4208 stdin = None
4211 stdout = None
4209 stdout = None
4212 stderr = None
4210 stderr = None
4213 opener = None
4211 opener = None
4214
4212
4215 if opts[b'localssh']:
4213 if opts[b'localssh']:
4216 # We start the SSH server in its own process so there is process
4214 # We start the SSH server in its own process so there is process
4217 # separation. This prevents a whole class of potential bugs around
4215 # separation. This prevents a whole class of potential bugs around
4218 # shared state from interfering with server operation.
4216 # shared state from interfering with server operation.
4219 args = procutil.hgcmd() + [
4217 args = procutil.hgcmd() + [
4220 b'-R',
4218 b'-R',
4221 repo.root,
4219 repo.root,
4222 b'debugserve',
4220 b'debugserve',
4223 b'--sshstdio',
4221 b'--sshstdio',
4224 ]
4222 ]
4225 proc = subprocess.Popen(
4223 proc = subprocess.Popen(
4226 pycompat.rapply(procutil.tonativestr, args),
4224 pycompat.rapply(procutil.tonativestr, args),
4227 stdin=subprocess.PIPE,
4225 stdin=subprocess.PIPE,
4228 stdout=subprocess.PIPE,
4226 stdout=subprocess.PIPE,
4229 stderr=subprocess.PIPE,
4227 stderr=subprocess.PIPE,
4230 bufsize=0,
4228 bufsize=0,
4231 )
4229 )
4232
4230
4233 stdin = proc.stdin
4231 stdin = proc.stdin
4234 stdout = proc.stdout
4232 stdout = proc.stdout
4235 stderr = proc.stderr
4233 stderr = proc.stderr
4236
4234
4237 # We turn the pipes into observers so we can log I/O.
4235 # We turn the pipes into observers so we can log I/O.
4238 if ui.verbose or opts[b'peer'] == b'raw':
4236 if ui.verbose or opts[b'peer'] == b'raw':
4239 stdin = util.makeloggingfileobject(
4237 stdin = util.makeloggingfileobject(
4240 ui, proc.stdin, b'i', logdata=True
4238 ui, proc.stdin, b'i', logdata=True
4241 )
4239 )
4242 stdout = util.makeloggingfileobject(
4240 stdout = util.makeloggingfileobject(
4243 ui, proc.stdout, b'o', logdata=True
4241 ui, proc.stdout, b'o', logdata=True
4244 )
4242 )
4245 stderr = util.makeloggingfileobject(
4243 stderr = util.makeloggingfileobject(
4246 ui, proc.stderr, b'e', logdata=True
4244 ui, proc.stderr, b'e', logdata=True
4247 )
4245 )
4248
4246
4249 # --localssh also implies the peer connection settings.
4247 # --localssh also implies the peer connection settings.
4250
4248
4251 url = b'ssh://localserver'
4249 url = b'ssh://localserver'
4252 autoreadstderr = not opts[b'noreadstderr']
4250 autoreadstderr = not opts[b'noreadstderr']
4253
4251
4254 if opts[b'peer'] == b'ssh1':
4252 if opts[b'peer'] == b'ssh1':
4255 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4253 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4256 peer = sshpeer.sshv1peer(
4254 peer = sshpeer.sshv1peer(
4257 ui,
4255 ui,
4258 url,
4256 url,
4259 proc,
4257 proc,
4260 stdin,
4258 stdin,
4261 stdout,
4259 stdout,
4262 stderr,
4260 stderr,
4263 None,
4261 None,
4264 autoreadstderr=autoreadstderr,
4262 autoreadstderr=autoreadstderr,
4265 )
4263 )
4266 elif opts[b'peer'] == b'ssh2':
4264 elif opts[b'peer'] == b'ssh2':
4267 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4265 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4268 peer = sshpeer.sshv2peer(
4266 peer = sshpeer.sshv2peer(
4269 ui,
4267 ui,
4270 url,
4268 url,
4271 proc,
4269 proc,
4272 stdin,
4270 stdin,
4273 stdout,
4271 stdout,
4274 stderr,
4272 stderr,
4275 None,
4273 None,
4276 autoreadstderr=autoreadstderr,
4274 autoreadstderr=autoreadstderr,
4277 )
4275 )
4278 elif opts[b'peer'] == b'raw':
4276 elif opts[b'peer'] == b'raw':
4279 ui.write(_(b'using raw connection to peer\n'))
4277 ui.write(_(b'using raw connection to peer\n'))
4280 peer = None
4278 peer = None
4281 else:
4279 else:
4282 ui.write(_(b'creating ssh peer from handshake results\n'))
4280 ui.write(_(b'creating ssh peer from handshake results\n'))
4283 peer = sshpeer.makepeer(
4281 peer = sshpeer.makepeer(
4284 ui,
4282 ui,
4285 url,
4283 url,
4286 proc,
4284 proc,
4287 stdin,
4285 stdin,
4288 stdout,
4286 stdout,
4289 stderr,
4287 stderr,
4290 autoreadstderr=autoreadstderr,
4288 autoreadstderr=autoreadstderr,
4291 )
4289 )
4292
4290
4293 elif path:
4291 elif path:
4294 # We bypass hg.peer() so we can proxy the sockets.
4292 # We bypass hg.peer() so we can proxy the sockets.
4295 # TODO consider not doing this because we skip
4293 # TODO consider not doing this because we skip
4296 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4294 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4297 u = util.url(path)
4295 u = util.url(path)
4298 if u.scheme != b'http':
4296 if u.scheme != b'http':
4299 raise error.Abort(_(b'only http:// paths are currently supported'))
4297 raise error.Abort(_(b'only http:// paths are currently supported'))
4300
4298
4301 url, authinfo = u.authinfo()
4299 url, authinfo = u.authinfo()
4302 openerargs = {
4300 openerargs = {
4303 'useragent': b'Mercurial debugwireproto',
4301 'useragent': b'Mercurial debugwireproto',
4304 }
4302 }
4305
4303
4306 # Turn pipes/sockets into observers so we can log I/O.
4304 # Turn pipes/sockets into observers so we can log I/O.
4307 if ui.verbose:
4305 if ui.verbose:
4308 openerargs.update(
4306 openerargs.update(
4309 {
4307 {
4310 'loggingfh': ui,
4308 'loggingfh': ui,
4311 'loggingname': b's',
4309 'loggingname': b's',
4312 'loggingopts': {'logdata': True, 'logdataapis': False,},
4310 'loggingopts': {'logdata': True, 'logdataapis': False,},
4313 }
4311 }
4314 )
4312 )
4315
4313
4316 if ui.debugflag:
4314 if ui.debugflag:
4317 openerargs['loggingopts']['logdataapis'] = True
4315 openerargs['loggingopts']['logdataapis'] = True
4318
4316
4319 # Don't send default headers when in raw mode. This allows us to
4317 # Don't send default headers when in raw mode. This allows us to
4320 # bypass most of the behavior of our URL handling code so we can
4318 # bypass most of the behavior of our URL handling code so we can
4321 # have near complete control over what's sent on the wire.
4319 # have near complete control over what's sent on the wire.
4322 if opts[b'peer'] == b'raw':
4320 if opts[b'peer'] == b'raw':
4323 openerargs['sendaccept'] = False
4321 openerargs['sendaccept'] = False
4324
4322
4325 opener = urlmod.opener(ui, authinfo, **openerargs)
4323 opener = urlmod.opener(ui, authinfo, **openerargs)
4326
4324
4327 if opts[b'peer'] == b'http2':
4325 if opts[b'peer'] == b'http2':
4328 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4326 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4329 # We go through makepeer() because we need an API descriptor for
4327 # We go through makepeer() because we need an API descriptor for
4330 # the peer instance to be useful.
4328 # the peer instance to be useful.
4331 with ui.configoverride(
4329 with ui.configoverride(
4332 {(b'experimental', b'httppeer.advertise-v2'): True}
4330 {(b'experimental', b'httppeer.advertise-v2'): True}
4333 ):
4331 ):
4334 if opts[b'nologhandshake']:
4332 if opts[b'nologhandshake']:
4335 ui.pushbuffer()
4333 ui.pushbuffer()
4336
4334
4337 peer = httppeer.makepeer(ui, path, opener=opener)
4335 peer = httppeer.makepeer(ui, path, opener=opener)
4338
4336
4339 if opts[b'nologhandshake']:
4337 if opts[b'nologhandshake']:
4340 ui.popbuffer()
4338 ui.popbuffer()
4341
4339
4342 if not isinstance(peer, httppeer.httpv2peer):
4340 if not isinstance(peer, httppeer.httpv2peer):
4343 raise error.Abort(
4341 raise error.Abort(
4344 _(
4342 _(
4345 b'could not instantiate HTTP peer for '
4343 b'could not instantiate HTTP peer for '
4346 b'wire protocol version 2'
4344 b'wire protocol version 2'
4347 ),
4345 ),
4348 hint=_(
4346 hint=_(
4349 b'the server may not have the feature '
4347 b'the server may not have the feature '
4350 b'enabled or is not allowing this '
4348 b'enabled or is not allowing this '
4351 b'client version'
4349 b'client version'
4352 ),
4350 ),
4353 )
4351 )
4354
4352
4355 elif opts[b'peer'] == b'raw':
4353 elif opts[b'peer'] == b'raw':
4356 ui.write(_(b'using raw connection to peer\n'))
4354 ui.write(_(b'using raw connection to peer\n'))
4357 peer = None
4355 peer = None
4358 elif opts[b'peer']:
4356 elif opts[b'peer']:
4359 raise error.Abort(
4357 raise error.Abort(
4360 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4358 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4361 )
4359 )
4362 else:
4360 else:
4363 peer = httppeer.makepeer(ui, path, opener=opener)
4361 peer = httppeer.makepeer(ui, path, opener=opener)
4364
4362
4365 # We /could/ populate stdin/stdout with sock.makefile()...
4363 # We /could/ populate stdin/stdout with sock.makefile()...
4366 else:
4364 else:
4367 raise error.Abort(_(b'unsupported connection configuration'))
4365 raise error.Abort(_(b'unsupported connection configuration'))
4368
4366
4369 batchedcommands = None
4367 batchedcommands = None
4370
4368
4371 # Now perform actions based on the parsed wire language instructions.
4369 # Now perform actions based on the parsed wire language instructions.
4372 for action, lines in blocks:
4370 for action, lines in blocks:
4373 if action in (b'raw', b'raw+'):
4371 if action in (b'raw', b'raw+'):
4374 if not stdin:
4372 if not stdin:
4375 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4373 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4376
4374
4377 # Concatenate the data together.
4375 # Concatenate the data together.
4378 data = b''.join(l.lstrip() for l in lines)
4376 data = b''.join(l.lstrip() for l in lines)
4379 data = stringutil.unescapestr(data)
4377 data = stringutil.unescapestr(data)
4380 stdin.write(data)
4378 stdin.write(data)
4381
4379
4382 if action == b'raw+':
4380 if action == b'raw+':
4383 stdin.flush()
4381 stdin.flush()
4384 elif action == b'flush':
4382 elif action == b'flush':
4385 if not stdin:
4383 if not stdin:
4386 raise error.Abort(_(b'cannot call flush on this peer'))
4384 raise error.Abort(_(b'cannot call flush on this peer'))
4387 stdin.flush()
4385 stdin.flush()
4388 elif action.startswith(b'command'):
4386 elif action.startswith(b'command'):
4389 if not peer:
4387 if not peer:
4390 raise error.Abort(
4388 raise error.Abort(
4391 _(
4389 _(
4392 b'cannot send commands unless peer instance '
4390 b'cannot send commands unless peer instance '
4393 b'is available'
4391 b'is available'
4394 )
4392 )
4395 )
4393 )
4396
4394
4397 command = action.split(b' ', 1)[1]
4395 command = action.split(b' ', 1)[1]
4398
4396
4399 args = {}
4397 args = {}
4400 for line in lines:
4398 for line in lines:
4401 # We need to allow empty values.
4399 # We need to allow empty values.
4402 fields = line.lstrip().split(b' ', 1)
4400 fields = line.lstrip().split(b' ', 1)
4403 if len(fields) == 1:
4401 if len(fields) == 1:
4404 key = fields[0]
4402 key = fields[0]
4405 value = b''
4403 value = b''
4406 else:
4404 else:
4407 key, value = fields
4405 key, value = fields
4408
4406
4409 if value.startswith(b'eval:'):
4407 if value.startswith(b'eval:'):
4410 value = stringutil.evalpythonliteral(value[5:])
4408 value = stringutil.evalpythonliteral(value[5:])
4411 else:
4409 else:
4412 value = stringutil.unescapestr(value)
4410 value = stringutil.unescapestr(value)
4413
4411
4414 args[key] = value
4412 args[key] = value
4415
4413
4416 if batchedcommands is not None:
4414 if batchedcommands is not None:
4417 batchedcommands.append((command, args))
4415 batchedcommands.append((command, args))
4418 continue
4416 continue
4419
4417
4420 ui.status(_(b'sending %s command\n') % command)
4418 ui.status(_(b'sending %s command\n') % command)
4421
4419
4422 if b'PUSHFILE' in args:
4420 if b'PUSHFILE' in args:
4423 with open(args[b'PUSHFILE'], 'rb') as fh:
4421 with open(args[b'PUSHFILE'], 'rb') as fh:
4424 del args[b'PUSHFILE']
4422 del args[b'PUSHFILE']
4425 res, output = peer._callpush(
4423 res, output = peer._callpush(
4426 command, fh, **pycompat.strkwargs(args)
4424 command, fh, **pycompat.strkwargs(args)
4427 )
4425 )
4428 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4426 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4429 ui.status(
4427 ui.status(
4430 _(b'remote output: %s\n') % stringutil.escapestr(output)
4428 _(b'remote output: %s\n') % stringutil.escapestr(output)
4431 )
4429 )
4432 else:
4430 else:
4433 with peer.commandexecutor() as e:
4431 with peer.commandexecutor() as e:
4434 res = e.callcommand(command, args).result()
4432 res = e.callcommand(command, args).result()
4435
4433
4436 if isinstance(res, wireprotov2peer.commandresponse):
4434 if isinstance(res, wireprotov2peer.commandresponse):
4437 val = res.objects()
4435 val = res.objects()
4438 ui.status(
4436 ui.status(
4439 _(b'response: %s\n')
4437 _(b'response: %s\n')
4440 % stringutil.pprint(val, bprefix=True, indent=2)
4438 % stringutil.pprint(val, bprefix=True, indent=2)
4441 )
4439 )
4442 else:
4440 else:
4443 ui.status(
4441 ui.status(
4444 _(b'response: %s\n')
4442 _(b'response: %s\n')
4445 % stringutil.pprint(res, bprefix=True, indent=2)
4443 % stringutil.pprint(res, bprefix=True, indent=2)
4446 )
4444 )
4447
4445
4448 elif action == b'batchbegin':
4446 elif action == b'batchbegin':
4449 if batchedcommands is not None:
4447 if batchedcommands is not None:
4450 raise error.Abort(_(b'nested batchbegin not allowed'))
4448 raise error.Abort(_(b'nested batchbegin not allowed'))
4451
4449
4452 batchedcommands = []
4450 batchedcommands = []
4453 elif action == b'batchsubmit':
4451 elif action == b'batchsubmit':
4454 # There is a batching API we could go through. But it would be
4452 # There is a batching API we could go through. But it would be
4455 # difficult to normalize requests into function calls. It is easier
4453 # difficult to normalize requests into function calls. It is easier
4456 # to bypass this layer and normalize to commands + args.
4454 # to bypass this layer and normalize to commands + args.
4457 ui.status(
4455 ui.status(
4458 _(b'sending batch with %d sub-commands\n')
4456 _(b'sending batch with %d sub-commands\n')
4459 % len(batchedcommands)
4457 % len(batchedcommands)
4460 )
4458 )
4461 assert peer is not None
4459 assert peer is not None
4462 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4460 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4463 ui.status(
4461 ui.status(
4464 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4462 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4465 )
4463 )
4466
4464
4467 batchedcommands = None
4465 batchedcommands = None
4468
4466
4469 elif action.startswith(b'httprequest '):
4467 elif action.startswith(b'httprequest '):
4470 if not opener:
4468 if not opener:
4471 raise error.Abort(
4469 raise error.Abort(
4472 _(b'cannot use httprequest without an HTTP peer')
4470 _(b'cannot use httprequest without an HTTP peer')
4473 )
4471 )
4474
4472
4475 request = action.split(b' ', 2)
4473 request = action.split(b' ', 2)
4476 if len(request) != 3:
4474 if len(request) != 3:
4477 raise error.Abort(
4475 raise error.Abort(
4478 _(
4476 _(
4479 b'invalid httprequest: expected format is '
4477 b'invalid httprequest: expected format is '
4480 b'"httprequest <method> <path>'
4478 b'"httprequest <method> <path>'
4481 )
4479 )
4482 )
4480 )
4483
4481
4484 method, httppath = request[1:]
4482 method, httppath = request[1:]
4485 headers = {}
4483 headers = {}
4486 body = None
4484 body = None
4487 frames = []
4485 frames = []
4488 for line in lines:
4486 for line in lines:
4489 line = line.lstrip()
4487 line = line.lstrip()
4490 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4488 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4491 if m:
4489 if m:
4492 # Headers need to use native strings.
4490 # Headers need to use native strings.
4493 key = pycompat.strurl(m.group(1))
4491 key = pycompat.strurl(m.group(1))
4494 value = pycompat.strurl(m.group(2))
4492 value = pycompat.strurl(m.group(2))
4495 headers[key] = value
4493 headers[key] = value
4496 continue
4494 continue
4497
4495
4498 if line.startswith(b'BODYFILE '):
4496 if line.startswith(b'BODYFILE '):
4499 with open(line.split(b' ', 1), b'rb') as fh:
4497 with open(line.split(b' ', 1), b'rb') as fh:
4500 body = fh.read()
4498 body = fh.read()
4501 elif line.startswith(b'frame '):
4499 elif line.startswith(b'frame '):
4502 frame = wireprotoframing.makeframefromhumanstring(
4500 frame = wireprotoframing.makeframefromhumanstring(
4503 line[len(b'frame ') :]
4501 line[len(b'frame ') :]
4504 )
4502 )
4505
4503
4506 frames.append(frame)
4504 frames.append(frame)
4507 else:
4505 else:
4508 raise error.Abort(
4506 raise error.Abort(
4509 _(b'unknown argument to httprequest: %s') % line
4507 _(b'unknown argument to httprequest: %s') % line
4510 )
4508 )
4511
4509
4512 url = path + httppath
4510 url = path + httppath
4513
4511
4514 if frames:
4512 if frames:
4515 body = b''.join(bytes(f) for f in frames)
4513 body = b''.join(bytes(f) for f in frames)
4516
4514
4517 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4515 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4518
4516
4519 # urllib.Request insists on using has_data() as a proxy for
4517 # urllib.Request insists on using has_data() as a proxy for
4520 # determining the request method. Override that to use our
4518 # determining the request method. Override that to use our
4521 # explicitly requested method.
4519 # explicitly requested method.
4522 req.get_method = lambda: pycompat.sysstr(method)
4520 req.get_method = lambda: pycompat.sysstr(method)
4523
4521
4524 try:
4522 try:
4525 res = opener.open(req)
4523 res = opener.open(req)
4526 body = res.read()
4524 body = res.read()
4527 except util.urlerr.urlerror as e:
4525 except util.urlerr.urlerror as e:
4528 # read() method must be called, but only exists in Python 2
4526 # read() method must be called, but only exists in Python 2
4529 getattr(e, 'read', lambda: None)()
4527 getattr(e, 'read', lambda: None)()
4530 continue
4528 continue
4531
4529
4532 ct = res.headers.get('Content-Type')
4530 ct = res.headers.get('Content-Type')
4533 if ct == 'application/mercurial-cbor':
4531 if ct == 'application/mercurial-cbor':
4534 ui.write(
4532 ui.write(
4535 _(b'cbor> %s\n')
4533 _(b'cbor> %s\n')
4536 % stringutil.pprint(
4534 % stringutil.pprint(
4537 cborutil.decodeall(body), bprefix=True, indent=2
4535 cborutil.decodeall(body), bprefix=True, indent=2
4538 )
4536 )
4539 )
4537 )
4540
4538
4541 elif action == b'close':
4539 elif action == b'close':
4542 assert peer is not None
4540 assert peer is not None
4543 peer.close()
4541 peer.close()
4544 elif action == b'readavailable':
4542 elif action == b'readavailable':
4545 if not stdout or not stderr:
4543 if not stdout or not stderr:
4546 raise error.Abort(
4544 raise error.Abort(
4547 _(b'readavailable not available on this peer')
4545 _(b'readavailable not available on this peer')
4548 )
4546 )
4549
4547
4550 stdin.close()
4548 stdin.close()
4551 stdout.read()
4549 stdout.read()
4552 stderr.read()
4550 stderr.read()
4553
4551
4554 elif action == b'readline':
4552 elif action == b'readline':
4555 if not stdout:
4553 if not stdout:
4556 raise error.Abort(_(b'readline not available on this peer'))
4554 raise error.Abort(_(b'readline not available on this peer'))
4557 stdout.readline()
4555 stdout.readline()
4558 elif action == b'ereadline':
4556 elif action == b'ereadline':
4559 if not stderr:
4557 if not stderr:
4560 raise error.Abort(_(b'ereadline not available on this peer'))
4558 raise error.Abort(_(b'ereadline not available on this peer'))
4561 stderr.readline()
4559 stderr.readline()
4562 elif action.startswith(b'read '):
4560 elif action.startswith(b'read '):
4563 count = int(action.split(b' ', 1)[1])
4561 count = int(action.split(b' ', 1)[1])
4564 if not stdout:
4562 if not stdout:
4565 raise error.Abort(_(b'read not available on this peer'))
4563 raise error.Abort(_(b'read not available on this peer'))
4566 stdout.read(count)
4564 stdout.read(count)
4567 elif action.startswith(b'eread '):
4565 elif action.startswith(b'eread '):
4568 count = int(action.split(b' ', 1)[1])
4566 count = int(action.split(b' ', 1)[1])
4569 if not stderr:
4567 if not stderr:
4570 raise error.Abort(_(b'eread not available on this peer'))
4568 raise error.Abort(_(b'eread not available on this peer'))
4571 stderr.read(count)
4569 stderr.read(count)
4572 else:
4570 else:
4573 raise error.Abort(_(b'unknown action: %s') % action)
4571 raise error.Abort(_(b'unknown action: %s') % action)
4574
4572
4575 if batchedcommands is not None:
4573 if batchedcommands is not None:
4576 raise error.Abort(_(b'unclosed "batchbegin" request'))
4574 raise error.Abort(_(b'unclosed "batchbegin" request'))
4577
4575
4578 if peer:
4576 if peer:
4579 peer.close()
4577 peer.close()
4580
4578
4581 if proc:
4579 if proc:
4582 proc.kill()
4580 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now