##// END OF EJS Templates
debugdeltachain: document the possible value for deltatype...
marmoute -
r50117:e7d23c51 default
parent child Browse files
Show More
@@ -1,4941 +1,4947 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 )
94 )
95 from .interfaces import repository
95 from .interfaces import repository
96 from .utils import (
96 from .utils import (
97 cborutil,
97 cborutil,
98 compression,
98 compression,
99 dateutil,
99 dateutil,
100 procutil,
100 procutil,
101 stringutil,
101 stringutil,
102 urlutil,
102 urlutil,
103 )
103 )
104
104
105 from .revlogutils import (
105 from .revlogutils import (
106 constants as revlog_constants,
106 constants as revlog_constants,
107 deltas as deltautil,
107 deltas as deltautil,
108 nodemap,
108 nodemap,
109 rewrite,
109 rewrite,
110 sidedata,
110 sidedata,
111 )
111 )
112
112
113 release = lockmod.release
113 release = lockmod.release
114
114
115 table = {}
115 table = {}
116 table.update(strip.command._table)
116 table.update(strip.command._table)
117 command = registrar.command(table)
117 command = registrar.command(table)
118
118
119
119
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 def debugancestor(ui, repo, *args):
121 def debugancestor(ui, repo, *args):
122 """find the ancestor revision of two revisions in a given index"""
122 """find the ancestor revision of two revisions in a given index"""
123 if len(args) == 3:
123 if len(args) == 3:
124 index, rev1, rev2 = args
124 index, rev1, rev2 = args
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 lookup = r.lookup
126 lookup = r.lookup
127 elif len(args) == 2:
127 elif len(args) == 2:
128 if not repo:
128 if not repo:
129 raise error.Abort(
129 raise error.Abort(
130 _(b'there is no Mercurial repository here (.hg not found)')
130 _(b'there is no Mercurial repository here (.hg not found)')
131 )
131 )
132 rev1, rev2 = args
132 rev1, rev2 = args
133 r = repo.changelog
133 r = repo.changelog
134 lookup = repo.lookup
134 lookup = repo.lookup
135 else:
135 else:
136 raise error.Abort(_(b'either two or three arguments required'))
136 raise error.Abort(_(b'either two or three arguments required'))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139
139
140
140
141 @command(b'debugantivirusrunning', [])
141 @command(b'debugantivirusrunning', [])
142 def debugantivirusrunning(ui, repo):
142 def debugantivirusrunning(ui, repo):
143 """attempt to trigger an antivirus scanner to see if one is active"""
143 """attempt to trigger an antivirus scanner to see if one is active"""
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 f.write(
145 f.write(
146 util.b85decode(
146 util.b85decode(
147 # This is a base85-armored version of the EICAR test file. See
147 # This is a base85-armored version of the EICAR test file. See
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 )
151 )
152 )
152 )
153 # Give an AV engine time to scan the file.
153 # Give an AV engine time to scan the file.
154 time.sleep(2)
154 time.sleep(2)
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156
156
157
157
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 def debugapplystreamclonebundle(ui, repo, fname):
159 def debugapplystreamclonebundle(ui, repo, fname):
160 """apply a stream clone bundle file"""
160 """apply a stream clone bundle file"""
161 f = hg.openpath(ui, fname)
161 f = hg.openpath(ui, fname)
162 gen = exchange.readbundle(ui, f, fname)
162 gen = exchange.readbundle(ui, f, fname)
163 gen.apply(repo)
163 gen.apply(repo)
164
164
165
165
166 @command(
166 @command(
167 b'debugbuilddag',
167 b'debugbuilddag',
168 [
168 [
169 (
169 (
170 b'm',
170 b'm',
171 b'mergeable-file',
171 b'mergeable-file',
172 None,
172 None,
173 _(b'add single file mergeable changes'),
173 _(b'add single file mergeable changes'),
174 ),
174 ),
175 (
175 (
176 b'o',
176 b'o',
177 b'overwritten-file',
177 b'overwritten-file',
178 None,
178 None,
179 _(b'add single file all revs overwrite'),
179 _(b'add single file all revs overwrite'),
180 ),
180 ),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 (
182 (
183 b'',
183 b'',
184 b'from-existing',
184 b'from-existing',
185 None,
185 None,
186 _(b'continue from a non-empty repository'),
186 _(b'continue from a non-empty repository'),
187 ),
187 ),
188 ],
188 ],
189 _(b'[OPTION]... [TEXT]'),
189 _(b'[OPTION]... [TEXT]'),
190 )
190 )
191 def debugbuilddag(
191 def debugbuilddag(
192 ui,
192 ui,
193 repo,
193 repo,
194 text=None,
194 text=None,
195 mergeable_file=False,
195 mergeable_file=False,
196 overwritten_file=False,
196 overwritten_file=False,
197 new_file=False,
197 new_file=False,
198 from_existing=False,
198 from_existing=False,
199 ):
199 ):
200 """builds a repo with a given DAG from scratch in the current empty repo
200 """builds a repo with a given DAG from scratch in the current empty repo
201
201
202 The description of the DAG is read from stdin if not given on the
202 The description of the DAG is read from stdin if not given on the
203 command line.
203 command line.
204
204
205 Elements:
205 Elements:
206
206
207 - "+n" is a linear run of n nodes based on the current default parent
207 - "+n" is a linear run of n nodes based on the current default parent
208 - "." is a single node based on the current default parent
208 - "." is a single node based on the current default parent
209 - "$" resets the default parent to null (implied at the start);
209 - "$" resets the default parent to null (implied at the start);
210 otherwise the default parent is always the last node created
210 otherwise the default parent is always the last node created
211 - "<p" sets the default parent to the backref p
211 - "<p" sets the default parent to the backref p
212 - "*p" is a fork at parent p, which is a backref
212 - "*p" is a fork at parent p, which is a backref
213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
214 - "/p2" is a merge of the preceding node and p2
214 - "/p2" is a merge of the preceding node and p2
215 - ":tag" defines a local tag for the preceding node
215 - ":tag" defines a local tag for the preceding node
216 - "@branch" sets the named branch for subsequent nodes
216 - "@branch" sets the named branch for subsequent nodes
217 - "#...\\n" is a comment up to the end of the line
217 - "#...\\n" is a comment up to the end of the line
218
218
219 Whitespace between the above elements is ignored.
219 Whitespace between the above elements is ignored.
220
220
221 A backref is either
221 A backref is either
222
222
223 - a number n, which references the node curr-n, where curr is the current
223 - a number n, which references the node curr-n, where curr is the current
224 node, or
224 node, or
225 - the name of a local tag you placed earlier using ":tag", or
225 - the name of a local tag you placed earlier using ":tag", or
226 - empty to denote the default parent.
226 - empty to denote the default parent.
227
227
228 All string valued-elements are either strictly alphanumeric, or must
228 All string valued-elements are either strictly alphanumeric, or must
229 be enclosed in double quotes ("..."), with "\\" as escape character.
229 be enclosed in double quotes ("..."), with "\\" as escape character.
230 """
230 """
231
231
232 if text is None:
232 if text is None:
233 ui.status(_(b"reading DAG from stdin\n"))
233 ui.status(_(b"reading DAG from stdin\n"))
234 text = ui.fin.read()
234 text = ui.fin.read()
235
235
236 cl = repo.changelog
236 cl = repo.changelog
237 if len(cl) > 0 and not from_existing:
237 if len(cl) > 0 and not from_existing:
238 raise error.Abort(_(b'repository is not empty'))
238 raise error.Abort(_(b'repository is not empty'))
239
239
240 # determine number of revs in DAG
240 # determine number of revs in DAG
241 total = 0
241 total = 0
242 for type, data in dagparser.parsedag(text):
242 for type, data in dagparser.parsedag(text):
243 if type == b'n':
243 if type == b'n':
244 total += 1
244 total += 1
245
245
246 if mergeable_file:
246 if mergeable_file:
247 linesperrev = 2
247 linesperrev = 2
248 # make a file with k lines per rev
248 # make a file with k lines per rev
249 initialmergedlines = [
249 initialmergedlines = [
250 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
250 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
251 ]
251 ]
252 initialmergedlines.append(b"")
252 initialmergedlines.append(b"")
253
253
254 tags = []
254 tags = []
255 progress = ui.makeprogress(
255 progress = ui.makeprogress(
256 _(b'building'), unit=_(b'revisions'), total=total
256 _(b'building'), unit=_(b'revisions'), total=total
257 )
257 )
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 at = -1
259 at = -1
260 atbranch = b'default'
260 atbranch = b'default'
261 nodeids = []
261 nodeids = []
262 id = 0
262 id = 0
263 progress.update(id)
263 progress.update(id)
264 for type, data in dagparser.parsedag(text):
264 for type, data in dagparser.parsedag(text):
265 if type == b'n':
265 if type == b'n':
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 id, ps = data
267 id, ps = data
268
268
269 files = []
269 files = []
270 filecontent = {}
270 filecontent = {}
271
271
272 p2 = None
272 p2 = None
273 if mergeable_file:
273 if mergeable_file:
274 fn = b"mf"
274 fn = b"mf"
275 p1 = repo[ps[0]]
275 p1 = repo[ps[0]]
276 if len(ps) > 1:
276 if len(ps) > 1:
277 p2 = repo[ps[1]]
277 p2 = repo[ps[1]]
278 pa = p1.ancestor(p2)
278 pa = p1.ancestor(p2)
279 base, local, other = [
279 base, local, other = [
280 x[fn].data() for x in (pa, p1, p2)
280 x[fn].data() for x in (pa, p1, p2)
281 ]
281 ]
282 m3 = simplemerge.Merge3Text(base, local, other)
282 m3 = simplemerge.Merge3Text(base, local, other)
283 ml = [
283 ml = [
284 l.strip()
284 l.strip()
285 for l in simplemerge.render_minimized(m3)[0]
285 for l in simplemerge.render_minimized(m3)[0]
286 ]
286 ]
287 ml.append(b"")
287 ml.append(b"")
288 elif at > 0:
288 elif at > 0:
289 ml = p1[fn].data().split(b"\n")
289 ml = p1[fn].data().split(b"\n")
290 else:
290 else:
291 ml = initialmergedlines
291 ml = initialmergedlines
292 ml[id * linesperrev] += b" r%i" % id
292 ml[id * linesperrev] += b" r%i" % id
293 mergedtext = b"\n".join(ml)
293 mergedtext = b"\n".join(ml)
294 files.append(fn)
294 files.append(fn)
295 filecontent[fn] = mergedtext
295 filecontent[fn] = mergedtext
296
296
297 if overwritten_file:
297 if overwritten_file:
298 fn = b"of"
298 fn = b"of"
299 files.append(fn)
299 files.append(fn)
300 filecontent[fn] = b"r%i\n" % id
300 filecontent[fn] = b"r%i\n" % id
301
301
302 if new_file:
302 if new_file:
303 fn = b"nf%i" % id
303 fn = b"nf%i" % id
304 files.append(fn)
304 files.append(fn)
305 filecontent[fn] = b"r%i\n" % id
305 filecontent[fn] = b"r%i\n" % id
306 if len(ps) > 1:
306 if len(ps) > 1:
307 if not p2:
307 if not p2:
308 p2 = repo[ps[1]]
308 p2 = repo[ps[1]]
309 for fn in p2:
309 for fn in p2:
310 if fn.startswith(b"nf"):
310 if fn.startswith(b"nf"):
311 files.append(fn)
311 files.append(fn)
312 filecontent[fn] = p2[fn].data()
312 filecontent[fn] = p2[fn].data()
313
313
314 def fctxfn(repo, cx, path):
314 def fctxfn(repo, cx, path):
315 if path in filecontent:
315 if path in filecontent:
316 return context.memfilectx(
316 return context.memfilectx(
317 repo, cx, path, filecontent[path]
317 repo, cx, path, filecontent[path]
318 )
318 )
319 return None
319 return None
320
320
321 if len(ps) == 0 or ps[0] < 0:
321 if len(ps) == 0 or ps[0] < 0:
322 pars = [None, None]
322 pars = [None, None]
323 elif len(ps) == 1:
323 elif len(ps) == 1:
324 pars = [nodeids[ps[0]], None]
324 pars = [nodeids[ps[0]], None]
325 else:
325 else:
326 pars = [nodeids[p] for p in ps]
326 pars = [nodeids[p] for p in ps]
327 cx = context.memctx(
327 cx = context.memctx(
328 repo,
328 repo,
329 pars,
329 pars,
330 b"r%i" % id,
330 b"r%i" % id,
331 files,
331 files,
332 fctxfn,
332 fctxfn,
333 date=(id, 0),
333 date=(id, 0),
334 user=b"debugbuilddag",
334 user=b"debugbuilddag",
335 extra={b'branch': atbranch},
335 extra={b'branch': atbranch},
336 )
336 )
337 nodeid = repo.commitctx(cx)
337 nodeid = repo.commitctx(cx)
338 nodeids.append(nodeid)
338 nodeids.append(nodeid)
339 at = id
339 at = id
340 elif type == b'l':
340 elif type == b'l':
341 id, name = data
341 id, name = data
342 ui.note((b'tag %s\n' % name))
342 ui.note((b'tag %s\n' % name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 elif type == b'a':
344 elif type == b'a':
345 ui.note((b'branch %s\n' % data))
345 ui.note((b'branch %s\n' % data))
346 atbranch = data
346 atbranch = data
347 progress.update(id)
347 progress.update(id)
348
348
349 if tags:
349 if tags:
350 repo.vfs.write(b"localtags", b"".join(tags))
350 repo.vfs.write(b"localtags", b"".join(tags))
351
351
352
352
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 if all:
355 if all:
356 ui.writenoi18n(
356 ui.writenoi18n(
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 % indent_string
358 % indent_string
359 )
359 )
360
360
361 def showchunks(named):
361 def showchunks(named):
362 ui.write(b"\n%s%s\n" % (indent_string, named))
362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 for deltadata in gen.deltaiter():
363 for deltadata in gen.deltaiter():
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 ui.write(
365 ui.write(
366 b"%s%s %s %s %s %s %d\n"
366 b"%s%s %s %s %s %s %d\n"
367 % (
367 % (
368 indent_string,
368 indent_string,
369 hex(node),
369 hex(node),
370 hex(p1),
370 hex(p1),
371 hex(p2),
371 hex(p2),
372 hex(cs),
372 hex(cs),
373 hex(deltabase),
373 hex(deltabase),
374 len(delta),
374 len(delta),
375 )
375 )
376 )
376 )
377
377
378 gen.changelogheader()
378 gen.changelogheader()
379 showchunks(b"changelog")
379 showchunks(b"changelog")
380 gen.manifestheader()
380 gen.manifestheader()
381 showchunks(b"manifest")
381 showchunks(b"manifest")
382 for chunkdata in iter(gen.filelogheader, {}):
382 for chunkdata in iter(gen.filelogheader, {}):
383 fname = chunkdata[b'filename']
383 fname = chunkdata[b'filename']
384 showchunks(fname)
384 showchunks(fname)
385 else:
385 else:
386 if isinstance(gen, bundle2.unbundle20):
386 if isinstance(gen, bundle2.unbundle20):
387 raise error.Abort(_(b'use debugbundle2 for this file'))
387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 gen.changelogheader()
388 gen.changelogheader()
389 for deltadata in gen.deltaiter():
389 for deltadata in gen.deltaiter():
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392
392
393
393
394 def _debugobsmarkers(ui, part, indent=0, **opts):
394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 """display version and markers contained in 'data'"""
395 """display version and markers contained in 'data'"""
396 opts = pycompat.byteskwargs(opts)
396 opts = pycompat.byteskwargs(opts)
397 data = part.read()
397 data = part.read()
398 indent_string = b' ' * indent
398 indent_string = b' ' * indent
399 try:
399 try:
400 version, markers = obsolete._readmarkers(data)
400 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
401 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
403 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
404 ui.write(msg)
405 else:
405 else:
406 msg = b"%sversion: %d (%d bytes)\n"
406 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
407 msg %= indent_string, version, len(data)
408 ui.write(msg)
408 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', opts)
409 fm = ui.formatter(b'debugobsolete', opts)
410 for rawmarker in sorted(markers):
410 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
411 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
412 fm.startitem()
413 fm.plain(indent_string)
413 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
414 cmdutil.showmarker(fm, m)
415 fm.end()
415 fm.end()
416
416
417
417
418 def _debugphaseheads(ui, data, indent=0):
418 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
419 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
420 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
421 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
422 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
423 for head in headsbyphase[phase]:
424 ui.write(indent_string)
424 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
426
427
427
428 def _quasirepr(thing):
428 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
430 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
432 )
433 return pycompat.bytestr(repr(thing))
433 return pycompat.bytestr(repr(thing))
434
434
435
435
436 def _debugbundle2(ui, gen, all=None, **opts):
436 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
437 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
438 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
439 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
441 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
442 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
443 if parttypes and part.type not in parttypes:
444 continue
444 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
445 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
447 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
448 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
449 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
450 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
452 if part.type == b'obsmarkers':
453 if not ui.quiet:
453 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
454 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
455 if part.type == b'phase-heads':
456 if not ui.quiet:
456 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
457 _debugphaseheads(ui, part, indent=4)
458
458
459
459
460 @command(
460 @command(
461 b'debugbundle',
461 b'debugbundle',
462 [
462 [
463 (b'a', b'all', None, _(b'show all details')),
463 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
466 ],
467 _(b'FILE'),
467 _(b'FILE'),
468 norepo=True,
468 norepo=True,
469 )
469 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
471 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
472 with hg.openpath(ui, bundlepath) as f:
473 if spec:
473 if spec:
474 spec = exchange.getbundlespec(ui, f)
474 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
475 ui.write(b'%s\n' % spec)
476 return
476 return
477
477
478 gen = exchange.readbundle(ui, f, bundlepath)
478 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
479 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
480 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
482
482
483
483
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
485 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
486 """lists the capabilities of a remote peer"""
487 opts = pycompat.byteskwargs(opts)
487 opts = pycompat.byteskwargs(opts)
488 peer = hg.peer(ui, opts, path)
488 peer = hg.peer(ui, opts, path)
489 try:
489 try:
490 caps = peer.capabilities()
490 caps = peer.capabilities()
491 ui.writenoi18n(b'Main capabilities:\n')
491 ui.writenoi18n(b'Main capabilities:\n')
492 for c in sorted(caps):
492 for c in sorted(caps):
493 ui.write(b' %s\n' % c)
493 ui.write(b' %s\n' % c)
494 b2caps = bundle2.bundle2caps(peer)
494 b2caps = bundle2.bundle2caps(peer)
495 if b2caps:
495 if b2caps:
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 for key, values in sorted(b2caps.items()):
497 for key, values in sorted(b2caps.items()):
498 ui.write(b' %s\n' % key)
498 ui.write(b' %s\n' % key)
499 for v in values:
499 for v in values:
500 ui.write(b' %s\n' % v)
500 ui.write(b' %s\n' % v)
501 finally:
501 finally:
502 peer.close()
502 peer.close()
503
503
504
504
505 @command(
505 @command(
506 b'debugchangedfiles',
506 b'debugchangedfiles',
507 [
507 [
508 (
508 (
509 b'',
509 b'',
510 b'compute',
510 b'compute',
511 False,
511 False,
512 b"compute information instead of reading it from storage",
512 b"compute information instead of reading it from storage",
513 ),
513 ),
514 ],
514 ],
515 b'REV',
515 b'REV',
516 )
516 )
517 def debugchangedfiles(ui, repo, rev, **opts):
517 def debugchangedfiles(ui, repo, rev, **opts):
518 """list the stored files changes for a revision"""
518 """list the stored files changes for a revision"""
519 ctx = logcmdutil.revsingle(repo, rev, None)
519 ctx = logcmdutil.revsingle(repo, rev, None)
520 files = None
520 files = None
521
521
522 if opts['compute']:
522 if opts['compute']:
523 files = metadata.compute_all_files_changes(ctx)
523 files = metadata.compute_all_files_changes(ctx)
524 else:
524 else:
525 sd = repo.changelog.sidedata(ctx.rev())
525 sd = repo.changelog.sidedata(ctx.rev())
526 files_block = sd.get(sidedata.SD_FILES)
526 files_block = sd.get(sidedata.SD_FILES)
527 if files_block is not None:
527 if files_block is not None:
528 files = metadata.decode_files_sidedata(sd)
528 files = metadata.decode_files_sidedata(sd)
529 if files is not None:
529 if files is not None:
530 for f in sorted(files.touched):
530 for f in sorted(files.touched):
531 if f in files.added:
531 if f in files.added:
532 action = b"added"
532 action = b"added"
533 elif f in files.removed:
533 elif f in files.removed:
534 action = b"removed"
534 action = b"removed"
535 elif f in files.merged:
535 elif f in files.merged:
536 action = b"merged"
536 action = b"merged"
537 elif f in files.salvaged:
537 elif f in files.salvaged:
538 action = b"salvaged"
538 action = b"salvaged"
539 else:
539 else:
540 action = b"touched"
540 action = b"touched"
541
541
542 copy_parent = b""
542 copy_parent = b""
543 copy_source = b""
543 copy_source = b""
544 if f in files.copied_from_p1:
544 if f in files.copied_from_p1:
545 copy_parent = b"p1"
545 copy_parent = b"p1"
546 copy_source = files.copied_from_p1[f]
546 copy_source = files.copied_from_p1[f]
547 elif f in files.copied_from_p2:
547 elif f in files.copied_from_p2:
548 copy_parent = b"p2"
548 copy_parent = b"p2"
549 copy_source = files.copied_from_p2[f]
549 copy_source = files.copied_from_p2[f]
550
550
551 data = (action, copy_parent, f, copy_source)
551 data = (action, copy_parent, f, copy_source)
552 template = b"%-8s %2s: %s, %s;\n"
552 template = b"%-8s %2s: %s, %s;\n"
553 ui.write(template % data)
553 ui.write(template % data)
554
554
555
555
556 @command(b'debugcheckstate', [], b'')
556 @command(b'debugcheckstate', [], b'')
557 def debugcheckstate(ui, repo):
557 def debugcheckstate(ui, repo):
558 """validate the correctness of the current dirstate"""
558 """validate the correctness of the current dirstate"""
559 parent1, parent2 = repo.dirstate.parents()
559 parent1, parent2 = repo.dirstate.parents()
560 m1 = repo[parent1].manifest()
560 m1 = repo[parent1].manifest()
561 m2 = repo[parent2].manifest()
561 m2 = repo[parent2].manifest()
562 errors = 0
562 errors = 0
563 for err in repo.dirstate.verify(m1, m2):
563 for err in repo.dirstate.verify(m1, m2):
564 ui.warn(err[0] % err[1:])
564 ui.warn(err[0] % err[1:])
565 errors += 1
565 errors += 1
566 if errors:
566 if errors:
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 raise error.Abort(errstr)
568 raise error.Abort(errstr)
569
569
570
570
571 @command(
571 @command(
572 b'debugcolor',
572 b'debugcolor',
573 [(b'', b'style', None, _(b'show all configured styles'))],
573 [(b'', b'style', None, _(b'show all configured styles'))],
574 b'hg debugcolor',
574 b'hg debugcolor',
575 )
575 )
576 def debugcolor(ui, repo, **opts):
576 def debugcolor(ui, repo, **opts):
577 """show available color, effects or style"""
577 """show available color, effects or style"""
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 if opts.get('style'):
579 if opts.get('style'):
580 return _debugdisplaystyle(ui)
580 return _debugdisplaystyle(ui)
581 else:
581 else:
582 return _debugdisplaycolor(ui)
582 return _debugdisplaycolor(ui)
583
583
584
584
585 def _debugdisplaycolor(ui):
585 def _debugdisplaycolor(ui):
586 ui = ui.copy()
586 ui = ui.copy()
587 ui._styles.clear()
587 ui._styles.clear()
588 for effect in color._activeeffects(ui).keys():
588 for effect in color._activeeffects(ui).keys():
589 ui._styles[effect] = effect
589 ui._styles[effect] = effect
590 if ui._terminfoparams:
590 if ui._terminfoparams:
591 for k, v in ui.configitems(b'color'):
591 for k, v in ui.configitems(b'color'):
592 if k.startswith(b'color.'):
592 if k.startswith(b'color.'):
593 ui._styles[k] = k[6:]
593 ui._styles[k] = k[6:]
594 elif k.startswith(b'terminfo.'):
594 elif k.startswith(b'terminfo.'):
595 ui._styles[k] = k[9:]
595 ui._styles[k] = k[9:]
596 ui.write(_(b'available colors:\n'))
596 ui.write(_(b'available colors:\n'))
597 # sort label with a '_' after the other to group '_background' entry.
597 # sort label with a '_' after the other to group '_background' entry.
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 for colorname, label in items:
599 for colorname, label in items:
600 ui.write(b'%s\n' % colorname, label=label)
600 ui.write(b'%s\n' % colorname, label=label)
601
601
602
602
603 def _debugdisplaystyle(ui):
603 def _debugdisplaystyle(ui):
604 ui.write(_(b'available style:\n'))
604 ui.write(_(b'available style:\n'))
605 if not ui._styles:
605 if not ui._styles:
606 return
606 return
607 width = max(len(s) for s in ui._styles)
607 width = max(len(s) for s in ui._styles)
608 for label, effects in sorted(ui._styles.items()):
608 for label, effects in sorted(ui._styles.items()):
609 ui.write(b'%s' % label, label=label)
609 ui.write(b'%s' % label, label=label)
610 if effects:
610 if effects:
611 # 50
611 # 50
612 ui.write(b': ')
612 ui.write(b': ')
613 ui.write(b' ' * (max(0, width - len(label))))
613 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b'\n')
615 ui.write(b'\n')
616
616
617
617
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 def debugcreatestreamclonebundle(ui, repo, fname):
619 def debugcreatestreamclonebundle(ui, repo, fname):
620 """create a stream clone bundle file
620 """create a stream clone bundle file
621
621
622 Stream bundles are special bundles that are essentially archives of
622 Stream bundles are special bundles that are essentially archives of
623 revlog files. They are commonly used for cloning very quickly.
623 revlog files. They are commonly used for cloning very quickly.
624 """
624 """
625 # TODO we may want to turn this into an abort when this functionality
625 # TODO we may want to turn this into an abort when this functionality
626 # is moved into `hg bundle`.
626 # is moved into `hg bundle`.
627 if phases.hassecret(repo):
627 if phases.hassecret(repo):
628 ui.warn(
628 ui.warn(
629 _(
629 _(
630 b'(warning: stream clone bundle will contain secret '
630 b'(warning: stream clone bundle will contain secret '
631 b'revisions)\n'
631 b'revisions)\n'
632 )
632 )
633 )
633 )
634
634
635 requirements, gen = streamclone.generatebundlev1(repo)
635 requirements, gen = streamclone.generatebundlev1(repo)
636 changegroup.writechunks(ui, gen, fname)
636 changegroup.writechunks(ui, gen, fname)
637
637
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639
639
640
640
641 @command(
641 @command(
642 b'debugdag',
642 b'debugdag',
643 [
643 [
644 (b't', b'tags', None, _(b'use tags as labels')),
644 (b't', b'tags', None, _(b'use tags as labels')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'', b'dots', None, _(b'use dots for runs')),
646 (b'', b'dots', None, _(b'use dots for runs')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 ],
648 ],
649 _(b'[OPTION]... [FILE [REV]...]'),
649 _(b'[OPTION]... [FILE [REV]...]'),
650 optionalrepo=True,
650 optionalrepo=True,
651 )
651 )
652 def debugdag(ui, repo, file_=None, *revs, **opts):
652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 """format the changelog or an index DAG as a concise textual description
653 """format the changelog or an index DAG as a concise textual description
654
654
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 revision numbers, they get labeled in the output as rN.
656 revision numbers, they get labeled in the output as rN.
657
657
658 Otherwise, the changelog DAG of the current repo is emitted.
658 Otherwise, the changelog DAG of the current repo is emitted.
659 """
659 """
660 spaces = opts.get('spaces')
660 spaces = opts.get('spaces')
661 dots = opts.get('dots')
661 dots = opts.get('dots')
662 if file_:
662 if file_:
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 revs = {int(r) for r in revs}
664 revs = {int(r) for r in revs}
665
665
666 def events():
666 def events():
667 for r in rlog:
667 for r in rlog:
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 if r in revs:
669 if r in revs:
670 yield b'l', (r, b"r%i" % r)
670 yield b'l', (r, b"r%i" % r)
671
671
672 elif repo:
672 elif repo:
673 cl = repo.changelog
673 cl = repo.changelog
674 tags = opts.get('tags')
674 tags = opts.get('tags')
675 branches = opts.get('branches')
675 branches = opts.get('branches')
676 if tags:
676 if tags:
677 labels = {}
677 labels = {}
678 for l, n in repo.tags().items():
678 for l, n in repo.tags().items():
679 labels.setdefault(cl.rev(n), []).append(l)
679 labels.setdefault(cl.rev(n), []).append(l)
680
680
681 def events():
681 def events():
682 b = b"default"
682 b = b"default"
683 for r in cl:
683 for r in cl:
684 if branches:
684 if branches:
685 newb = cl.read(cl.node(r))[5][b'branch']
685 newb = cl.read(cl.node(r))[5][b'branch']
686 if newb != b:
686 if newb != b:
687 yield b'a', newb
687 yield b'a', newb
688 b = newb
688 b = newb
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 if tags:
690 if tags:
691 ls = labels.get(r)
691 ls = labels.get(r)
692 if ls:
692 if ls:
693 for l in ls:
693 for l in ls:
694 yield b'l', (r, l)
694 yield b'l', (r, l)
695
695
696 else:
696 else:
697 raise error.Abort(_(b'need repo for changelog dag'))
697 raise error.Abort(_(b'need repo for changelog dag'))
698
698
699 for line in dagparser.dagtextlines(
699 for line in dagparser.dagtextlines(
700 events(),
700 events(),
701 addspaces=spaces,
701 addspaces=spaces,
702 wraplabels=True,
702 wraplabels=True,
703 wrapannotations=True,
703 wrapannotations=True,
704 wrapnonlinear=dots,
704 wrapnonlinear=dots,
705 usedots=dots,
705 usedots=dots,
706 maxlinewidth=70,
706 maxlinewidth=70,
707 ):
707 ):
708 ui.write(line)
708 ui.write(line)
709 ui.write(b"\n")
709 ui.write(b"\n")
710
710
711
711
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 def debugdata(ui, repo, file_, rev=None, **opts):
713 def debugdata(ui, repo, file_, rev=None, **opts):
714 """dump the contents of a data file revision"""
714 """dump the contents of a data file revision"""
715 opts = pycompat.byteskwargs(opts)
715 opts = pycompat.byteskwargs(opts)
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if rev is not None:
717 if rev is not None:
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 file_, rev = None, file_
719 file_, rev = None, file_
720 elif rev is None:
720 elif rev is None:
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 try:
723 try:
724 ui.write(r.rawdata(r.lookup(rev)))
724 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
725 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
727
728
728
729 @command(
729 @command(
730 b'debugdate',
730 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
732 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
733 norepo=True,
734 optionalrepo=True,
734 optionalrepo=True,
735 )
735 )
736 def debugdate(ui, date, range=None, **opts):
736 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
737 """parse and display a date"""
738 if opts["extended"]:
738 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
740 else:
741 d = dateutil.parsedate(date)
741 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
744 if range:
745 m = dateutil.matchdate(range)
745 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
747
748
748
749 @command(
749 @command(
750 b'debugdeltachain',
750 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
752 _(b'-c|-m|FILE'),
753 optionalrepo=True,
753 optionalrepo=True,
754 )
754 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
755 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
756 """dump information about delta chains in a revlog
757
757
758 Output can be templatized. Available template keywords are:
758 Output can be templatized. Available template keywords are:
759
759
760 :``rev``: revision number
760 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
761 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
768 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
770 - p2: a delta against the second parent
771 - prev: a delta against the previous revision
772 - other: a delta against an arbitrary revision
767 :``compsize``: compressed size of revision
773 :``compsize``: compressed size of revision
768 :``uncompsize``: uncompressed size of revision
774 :``uncompsize``: uncompressed size of revision
769 :``chainsize``: total size of compressed revisions in chain
775 :``chainsize``: total size of compressed revisions in chain
770 :``chainratio``: total chain size divided by uncompressed revision size
776 :``chainratio``: total chain size divided by uncompressed revision size
771 (new delta chains typically start at ratio 2.00)
777 (new delta chains typically start at ratio 2.00)
772 :``lindist``: linear distance from base revision in delta chain to end
778 :``lindist``: linear distance from base revision in delta chain to end
773 of this revision
779 of this revision
774 :``extradist``: total size of revisions not part of this delta chain from
780 :``extradist``: total size of revisions not part of this delta chain from
775 base of delta chain to end of this revision; a measurement
781 base of delta chain to end of this revision; a measurement
776 of how much extra data we need to read/seek across to read
782 of how much extra data we need to read/seek across to read
777 the delta chain for this revision
783 the delta chain for this revision
778 :``extraratio``: extradist divided by chainsize; another representation of
784 :``extraratio``: extradist divided by chainsize; another representation of
779 how much unrelated data is needed to load this delta chain
785 how much unrelated data is needed to load this delta chain
780
786
781 If the repository is configured to use the sparse read, additional keywords
787 If the repository is configured to use the sparse read, additional keywords
782 are available:
788 are available:
783
789
784 :``readsize``: total size of data read from the disk for a revision
790 :``readsize``: total size of data read from the disk for a revision
785 (sum of the sizes of all the blocks)
791 (sum of the sizes of all the blocks)
786 :``largestblock``: size of the largest block of data read from the disk
792 :``largestblock``: size of the largest block of data read from the disk
787 :``readdensity``: density of useful bytes in the data read from the disk
793 :``readdensity``: density of useful bytes in the data read from the disk
788 :``srchunks``: in how many data hunks the whole revision would be read
794 :``srchunks``: in how many data hunks the whole revision would be read
789
795
790 The sparse read can be enabled with experimental.sparse-read = True
796 The sparse read can be enabled with experimental.sparse-read = True
791 """
797 """
792 opts = pycompat.byteskwargs(opts)
798 opts = pycompat.byteskwargs(opts)
793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
799 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
794 index = r.index
800 index = r.index
795 start = r.start
801 start = r.start
796 length = r.length
802 length = r.length
797 generaldelta = r._generaldelta
803 generaldelta = r._generaldelta
798 withsparseread = getattr(r, '_withsparseread', False)
804 withsparseread = getattr(r, '_withsparseread', False)
799
805
800 def revinfo(rev):
806 def revinfo(rev):
801 e = index[rev]
807 e = index[rev]
802 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
808 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
803 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
809 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
804 chainsize = 0
810 chainsize = 0
805
811
806 base = e[revlog_constants.ENTRY_DELTA_BASE]
812 base = e[revlog_constants.ENTRY_DELTA_BASE]
807 p1 = e[revlog_constants.ENTRY_PARENT_1]
813 p1 = e[revlog_constants.ENTRY_PARENT_1]
808 p2 = e[revlog_constants.ENTRY_PARENT_2]
814 p2 = e[revlog_constants.ENTRY_PARENT_2]
809
815
810 if generaldelta:
816 if generaldelta:
811 if base == p1:
817 if base == p1:
812 deltatype = b'p1'
818 deltatype = b'p1'
813 elif base == p2:
819 elif base == p2:
814 deltatype = b'p2'
820 deltatype = b'p2'
815 elif base == rev:
821 elif base == rev:
816 deltatype = b'base'
822 deltatype = b'base'
817 elif r.issnapshot(rev):
823 elif r.issnapshot(rev):
818 deltatype = b'snap'
824 deltatype = b'snap'
819 elif base == rev - 1:
825 elif base == rev - 1:
820 deltatype = b'prev'
826 deltatype = b'prev'
821 else:
827 else:
822 deltatype = b'other'
828 deltatype = b'other'
823 else:
829 else:
824 if base == rev:
830 if base == rev:
825 deltatype = b'base'
831 deltatype = b'base'
826 else:
832 else:
827 deltatype = b'prev'
833 deltatype = b'prev'
828
834
829 chain = r._deltachain(rev)[0]
835 chain = r._deltachain(rev)[0]
830 for iterrev in chain:
836 for iterrev in chain:
831 e = index[iterrev]
837 e = index[iterrev]
832 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
838 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
833
839
834 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
840 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
835
841
836 fm = ui.formatter(b'debugdeltachain', opts)
842 fm = ui.formatter(b'debugdeltachain', opts)
837
843
838 fm.plain(
844 fm.plain(
839 b' rev p1 p2 chain# chainlen prev delta '
845 b' rev p1 p2 chain# chainlen prev delta '
840 b'size rawsize chainsize ratio lindist extradist '
846 b'size rawsize chainsize ratio lindist extradist '
841 b'extraratio'
847 b'extraratio'
842 )
848 )
843 if withsparseread:
849 if withsparseread:
844 fm.plain(b' readsize largestblk rddensity srchunks')
850 fm.plain(b' readsize largestblk rddensity srchunks')
845 fm.plain(b'\n')
851 fm.plain(b'\n')
846
852
847 chainbases = {}
853 chainbases = {}
848 for rev in r:
854 for rev in r:
849 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
855 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
850 chainbase = chain[0]
856 chainbase = chain[0]
851 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
857 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
852 basestart = start(chainbase)
858 basestart = start(chainbase)
853 revstart = start(rev)
859 revstart = start(rev)
854 lineardist = revstart + comp - basestart
860 lineardist = revstart + comp - basestart
855 extradist = lineardist - chainsize
861 extradist = lineardist - chainsize
856 try:
862 try:
857 prevrev = chain[-2]
863 prevrev = chain[-2]
858 except IndexError:
864 except IndexError:
859 prevrev = -1
865 prevrev = -1
860
866
861 if uncomp != 0:
867 if uncomp != 0:
862 chainratio = float(chainsize) / float(uncomp)
868 chainratio = float(chainsize) / float(uncomp)
863 else:
869 else:
864 chainratio = chainsize
870 chainratio = chainsize
865
871
866 if chainsize != 0:
872 if chainsize != 0:
867 extraratio = float(extradist) / float(chainsize)
873 extraratio = float(extradist) / float(chainsize)
868 else:
874 else:
869 extraratio = extradist
875 extraratio = extradist
870
876
871 fm.startitem()
877 fm.startitem()
872 fm.write(
878 fm.write(
873 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
879 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
874 b'uncompsize chainsize chainratio lindist extradist '
880 b'uncompsize chainsize chainratio lindist extradist '
875 b'extraratio',
881 b'extraratio',
876 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
882 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
877 rev,
883 rev,
878 p1,
884 p1,
879 p2,
885 p2,
880 chainid,
886 chainid,
881 len(chain),
887 len(chain),
882 prevrev,
888 prevrev,
883 deltatype,
889 deltatype,
884 comp,
890 comp,
885 uncomp,
891 uncomp,
886 chainsize,
892 chainsize,
887 chainratio,
893 chainratio,
888 lineardist,
894 lineardist,
889 extradist,
895 extradist,
890 extraratio,
896 extraratio,
891 rev=rev,
897 rev=rev,
892 chainid=chainid,
898 chainid=chainid,
893 chainlen=len(chain),
899 chainlen=len(chain),
894 prevrev=prevrev,
900 prevrev=prevrev,
895 deltatype=deltatype,
901 deltatype=deltatype,
896 compsize=comp,
902 compsize=comp,
897 uncompsize=uncomp,
903 uncompsize=uncomp,
898 chainsize=chainsize,
904 chainsize=chainsize,
899 chainratio=chainratio,
905 chainratio=chainratio,
900 lindist=lineardist,
906 lindist=lineardist,
901 extradist=extradist,
907 extradist=extradist,
902 extraratio=extraratio,
908 extraratio=extraratio,
903 )
909 )
904 if withsparseread:
910 if withsparseread:
905 readsize = 0
911 readsize = 0
906 largestblock = 0
912 largestblock = 0
907 srchunks = 0
913 srchunks = 0
908
914
909 for revschunk in deltautil.slicechunk(r, chain):
915 for revschunk in deltautil.slicechunk(r, chain):
910 srchunks += 1
916 srchunks += 1
911 blkend = start(revschunk[-1]) + length(revschunk[-1])
917 blkend = start(revschunk[-1]) + length(revschunk[-1])
912 blksize = blkend - start(revschunk[0])
918 blksize = blkend - start(revschunk[0])
913
919
914 readsize += blksize
920 readsize += blksize
915 if largestblock < blksize:
921 if largestblock < blksize:
916 largestblock = blksize
922 largestblock = blksize
917
923
918 if readsize:
924 if readsize:
919 readdensity = float(chainsize) / float(readsize)
925 readdensity = float(chainsize) / float(readsize)
920 else:
926 else:
921 readdensity = 1
927 readdensity = 1
922
928
923 fm.write(
929 fm.write(
924 b'readsize largestblock readdensity srchunks',
930 b'readsize largestblock readdensity srchunks',
925 b' %10d %10d %9.5f %8d',
931 b' %10d %10d %9.5f %8d',
926 readsize,
932 readsize,
927 largestblock,
933 largestblock,
928 readdensity,
934 readdensity,
929 srchunks,
935 srchunks,
930 readsize=readsize,
936 readsize=readsize,
931 largestblock=largestblock,
937 largestblock=largestblock,
932 readdensity=readdensity,
938 readdensity=readdensity,
933 srchunks=srchunks,
939 srchunks=srchunks,
934 )
940 )
935
941
936 fm.plain(b'\n')
942 fm.plain(b'\n')
937
943
938 fm.end()
944 fm.end()
939
945
940
946
941 @command(
947 @command(
942 b'debugdirstate|debugstate',
948 b'debugdirstate|debugstate',
943 [
949 [
944 (
950 (
945 b'',
951 b'',
946 b'nodates',
952 b'nodates',
947 None,
953 None,
948 _(b'do not display the saved mtime (DEPRECATED)'),
954 _(b'do not display the saved mtime (DEPRECATED)'),
949 ),
955 ),
950 (b'', b'dates', True, _(b'display the saved mtime')),
956 (b'', b'dates', True, _(b'display the saved mtime')),
951 (b'', b'datesort', None, _(b'sort by saved mtime')),
957 (b'', b'datesort', None, _(b'sort by saved mtime')),
952 (
958 (
953 b'',
959 b'',
954 b'docket',
960 b'docket',
955 False,
961 False,
956 _(b'display the docket (metadata file) instead'),
962 _(b'display the docket (metadata file) instead'),
957 ),
963 ),
958 (
964 (
959 b'',
965 b'',
960 b'all',
966 b'all',
961 False,
967 False,
962 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
968 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
963 ),
969 ),
964 ],
970 ],
965 _(b'[OPTION]...'),
971 _(b'[OPTION]...'),
966 )
972 )
967 def debugstate(ui, repo, **opts):
973 def debugstate(ui, repo, **opts):
968 """show the contents of the current dirstate"""
974 """show the contents of the current dirstate"""
969
975
970 if opts.get("docket"):
976 if opts.get("docket"):
971 if not repo.dirstate._use_dirstate_v2:
977 if not repo.dirstate._use_dirstate_v2:
972 raise error.Abort(_(b'dirstate v1 does not have a docket'))
978 raise error.Abort(_(b'dirstate v1 does not have a docket'))
973
979
974 docket = repo.dirstate._map.docket
980 docket = repo.dirstate._map.docket
975 (
981 (
976 start_offset,
982 start_offset,
977 root_nodes,
983 root_nodes,
978 nodes_with_entry,
984 nodes_with_entry,
979 nodes_with_copy,
985 nodes_with_copy,
980 unused_bytes,
986 unused_bytes,
981 _unused,
987 _unused,
982 ignore_pattern,
988 ignore_pattern,
983 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
989 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
984
990
985 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
991 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
986 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
992 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
987 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
993 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
988 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
994 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
989 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
995 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
990 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
996 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
991 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
997 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
992 ui.write(
998 ui.write(
993 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
999 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
994 )
1000 )
995 return
1001 return
996
1002
997 nodates = not opts['dates']
1003 nodates = not opts['dates']
998 if opts.get('nodates') is not None:
1004 if opts.get('nodates') is not None:
999 nodates = True
1005 nodates = True
1000 datesort = opts.get('datesort')
1006 datesort = opts.get('datesort')
1001
1007
1002 if datesort:
1008 if datesort:
1003
1009
1004 def keyfunc(entry):
1010 def keyfunc(entry):
1005 filename, _state, _mode, _size, mtime = entry
1011 filename, _state, _mode, _size, mtime = entry
1006 return (mtime, filename)
1012 return (mtime, filename)
1007
1013
1008 else:
1014 else:
1009 keyfunc = None # sort by filename
1015 keyfunc = None # sort by filename
1010 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1016 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1011 entries.sort(key=keyfunc)
1017 entries.sort(key=keyfunc)
1012 for entry in entries:
1018 for entry in entries:
1013 filename, state, mode, size, mtime = entry
1019 filename, state, mode, size, mtime = entry
1014 if mtime == -1:
1020 if mtime == -1:
1015 timestr = b'unset '
1021 timestr = b'unset '
1016 elif nodates:
1022 elif nodates:
1017 timestr = b'set '
1023 timestr = b'set '
1018 else:
1024 else:
1019 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1025 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1020 timestr = encoding.strtolocal(timestr)
1026 timestr = encoding.strtolocal(timestr)
1021 if mode & 0o20000:
1027 if mode & 0o20000:
1022 mode = b'lnk'
1028 mode = b'lnk'
1023 else:
1029 else:
1024 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1030 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1025 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1031 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1026 for f in repo.dirstate.copies():
1032 for f in repo.dirstate.copies():
1027 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1033 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1028
1034
1029
1035
1030 @command(
1036 @command(
1031 b'debugdirstateignorepatternshash',
1037 b'debugdirstateignorepatternshash',
1032 [],
1038 [],
1033 _(b''),
1039 _(b''),
1034 )
1040 )
1035 def debugdirstateignorepatternshash(ui, repo, **opts):
1041 def debugdirstateignorepatternshash(ui, repo, **opts):
1036 """show the hash of ignore patterns stored in dirstate if v2,
1042 """show the hash of ignore patterns stored in dirstate if v2,
1037 or nothing for dirstate-v2
1043 or nothing for dirstate-v2
1038 """
1044 """
1039 if repo.dirstate._use_dirstate_v2:
1045 if repo.dirstate._use_dirstate_v2:
1040 docket = repo.dirstate._map.docket
1046 docket = repo.dirstate._map.docket
1041 hash_len = 20 # 160 bits for SHA-1
1047 hash_len = 20 # 160 bits for SHA-1
1042 hash_bytes = docket.tree_metadata[-hash_len:]
1048 hash_bytes = docket.tree_metadata[-hash_len:]
1043 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1049 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1044
1050
1045
1051
1046 @command(
1052 @command(
1047 b'debugdiscovery',
1053 b'debugdiscovery',
1048 [
1054 [
1049 (b'', b'old', None, _(b'use old-style discovery')),
1055 (b'', b'old', None, _(b'use old-style discovery')),
1050 (
1056 (
1051 b'',
1057 b'',
1052 b'nonheads',
1058 b'nonheads',
1053 None,
1059 None,
1054 _(b'use old-style discovery with non-heads included'),
1060 _(b'use old-style discovery with non-heads included'),
1055 ),
1061 ),
1056 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1062 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1057 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1063 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1058 (
1064 (
1059 b'',
1065 b'',
1060 b'local-as-revs',
1066 b'local-as-revs',
1061 b"",
1067 b"",
1062 b'treat local has having these revisions only',
1068 b'treat local has having these revisions only',
1063 ),
1069 ),
1064 (
1070 (
1065 b'',
1071 b'',
1066 b'remote-as-revs',
1072 b'remote-as-revs',
1067 b"",
1073 b"",
1068 b'use local as remote, with only these revisions',
1074 b'use local as remote, with only these revisions',
1069 ),
1075 ),
1070 ]
1076 ]
1071 + cmdutil.remoteopts
1077 + cmdutil.remoteopts
1072 + cmdutil.formatteropts,
1078 + cmdutil.formatteropts,
1073 _(b'[--rev REV] [OTHER]'),
1079 _(b'[--rev REV] [OTHER]'),
1074 )
1080 )
1075 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1081 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1076 """runs the changeset discovery protocol in isolation
1082 """runs the changeset discovery protocol in isolation
1077
1083
1078 The local peer can be "replaced" by a subset of the local repository by
1084 The local peer can be "replaced" by a subset of the local repository by
1079 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1085 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1080 be "replaced" by a subset of the local repository using the
1086 be "replaced" by a subset of the local repository using the
1081 `--local-as-revs` flag. This is useful to efficiently debug pathological
1087 `--local-as-revs` flag. This is useful to efficiently debug pathological
1082 discovery situation.
1088 discovery situation.
1083
1089
1084 The following developer oriented config are relevant for people playing with this command:
1090 The following developer oriented config are relevant for people playing with this command:
1085
1091
1086 * devel.discovery.exchange-heads=True
1092 * devel.discovery.exchange-heads=True
1087
1093
1088 If False, the discovery will not start with
1094 If False, the discovery will not start with
1089 remote head fetching and local head querying.
1095 remote head fetching and local head querying.
1090
1096
1091 * devel.discovery.grow-sample=True
1097 * devel.discovery.grow-sample=True
1092
1098
1093 If False, the sample size used in set discovery will not be increased
1099 If False, the sample size used in set discovery will not be increased
1094 through the process
1100 through the process
1095
1101
1096 * devel.discovery.grow-sample.dynamic=True
1102 * devel.discovery.grow-sample.dynamic=True
1097
1103
1098 When discovery.grow-sample.dynamic is True, the default, the sample size is
1104 When discovery.grow-sample.dynamic is True, the default, the sample size is
1099 adapted to the shape of the undecided set (it is set to the max of:
1105 adapted to the shape of the undecided set (it is set to the max of:
1100 <target-size>, len(roots(undecided)), len(heads(undecided)
1106 <target-size>, len(roots(undecided)), len(heads(undecided)
1101
1107
1102 * devel.discovery.grow-sample.rate=1.05
1108 * devel.discovery.grow-sample.rate=1.05
1103
1109
1104 the rate at which the sample grow
1110 the rate at which the sample grow
1105
1111
1106 * devel.discovery.randomize=True
1112 * devel.discovery.randomize=True
1107
1113
1108 If andom sampling during discovery are deterministic. It is meant for
1114 If andom sampling during discovery are deterministic. It is meant for
1109 integration tests.
1115 integration tests.
1110
1116
1111 * devel.discovery.sample-size=200
1117 * devel.discovery.sample-size=200
1112
1118
1113 Control the initial size of the discovery sample
1119 Control the initial size of the discovery sample
1114
1120
1115 * devel.discovery.sample-size.initial=100
1121 * devel.discovery.sample-size.initial=100
1116
1122
1117 Control the initial size of the discovery for initial change
1123 Control the initial size of the discovery for initial change
1118 """
1124 """
1119 opts = pycompat.byteskwargs(opts)
1125 opts = pycompat.byteskwargs(opts)
1120 unfi = repo.unfiltered()
1126 unfi = repo.unfiltered()
1121
1127
1122 # setup potential extra filtering
1128 # setup potential extra filtering
1123 local_revs = opts[b"local_as_revs"]
1129 local_revs = opts[b"local_as_revs"]
1124 remote_revs = opts[b"remote_as_revs"]
1130 remote_revs = opts[b"remote_as_revs"]
1125
1131
1126 # make sure tests are repeatable
1132 # make sure tests are repeatable
1127 random.seed(int(opts[b'seed']))
1133 random.seed(int(opts[b'seed']))
1128
1134
1129 if not remote_revs:
1135 if not remote_revs:
1130
1136
1131 remoteurl, branches = urlutil.get_unique_pull_path(
1137 remoteurl, branches = urlutil.get_unique_pull_path(
1132 b'debugdiscovery', repo, ui, remoteurl
1138 b'debugdiscovery', repo, ui, remoteurl
1133 )
1139 )
1134 remote = hg.peer(repo, opts, remoteurl)
1140 remote = hg.peer(repo, opts, remoteurl)
1135 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1141 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1136 else:
1142 else:
1137 branches = (None, [])
1143 branches = (None, [])
1138 remote_filtered_revs = logcmdutil.revrange(
1144 remote_filtered_revs = logcmdutil.revrange(
1139 unfi, [b"not (::(%s))" % remote_revs]
1145 unfi, [b"not (::(%s))" % remote_revs]
1140 )
1146 )
1141 remote_filtered_revs = frozenset(remote_filtered_revs)
1147 remote_filtered_revs = frozenset(remote_filtered_revs)
1142
1148
1143 def remote_func(x):
1149 def remote_func(x):
1144 return remote_filtered_revs
1150 return remote_filtered_revs
1145
1151
1146 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1152 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1147
1153
1148 remote = repo.peer()
1154 remote = repo.peer()
1149 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1155 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1150
1156
1151 if local_revs:
1157 if local_revs:
1152 local_filtered_revs = logcmdutil.revrange(
1158 local_filtered_revs = logcmdutil.revrange(
1153 unfi, [b"not (::(%s))" % local_revs]
1159 unfi, [b"not (::(%s))" % local_revs]
1154 )
1160 )
1155 local_filtered_revs = frozenset(local_filtered_revs)
1161 local_filtered_revs = frozenset(local_filtered_revs)
1156
1162
1157 def local_func(x):
1163 def local_func(x):
1158 return local_filtered_revs
1164 return local_filtered_revs
1159
1165
1160 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1166 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1161 repo = repo.filtered(b'debug-discovery-local-filter')
1167 repo = repo.filtered(b'debug-discovery-local-filter')
1162
1168
1163 data = {}
1169 data = {}
1164 if opts.get(b'old'):
1170 if opts.get(b'old'):
1165
1171
1166 def doit(pushedrevs, remoteheads, remote=remote):
1172 def doit(pushedrevs, remoteheads, remote=remote):
1167 if not util.safehasattr(remote, b'branches'):
1173 if not util.safehasattr(remote, b'branches'):
1168 # enable in-client legacy support
1174 # enable in-client legacy support
1169 remote = localrepo.locallegacypeer(remote.local())
1175 remote = localrepo.locallegacypeer(remote.local())
1170 common, _in, hds = treediscovery.findcommonincoming(
1176 common, _in, hds = treediscovery.findcommonincoming(
1171 repo, remote, force=True, audit=data
1177 repo, remote, force=True, audit=data
1172 )
1178 )
1173 common = set(common)
1179 common = set(common)
1174 if not opts.get(b'nonheads'):
1180 if not opts.get(b'nonheads'):
1175 ui.writenoi18n(
1181 ui.writenoi18n(
1176 b"unpruned common: %s\n"
1182 b"unpruned common: %s\n"
1177 % b" ".join(sorted(short(n) for n in common))
1183 % b" ".join(sorted(short(n) for n in common))
1178 )
1184 )
1179
1185
1180 clnode = repo.changelog.node
1186 clnode = repo.changelog.node
1181 common = repo.revs(b'heads(::%ln)', common)
1187 common = repo.revs(b'heads(::%ln)', common)
1182 common = {clnode(r) for r in common}
1188 common = {clnode(r) for r in common}
1183 return common, hds
1189 return common, hds
1184
1190
1185 else:
1191 else:
1186
1192
1187 def doit(pushedrevs, remoteheads, remote=remote):
1193 def doit(pushedrevs, remoteheads, remote=remote):
1188 nodes = None
1194 nodes = None
1189 if pushedrevs:
1195 if pushedrevs:
1190 revs = logcmdutil.revrange(repo, pushedrevs)
1196 revs = logcmdutil.revrange(repo, pushedrevs)
1191 nodes = [repo[r].node() for r in revs]
1197 nodes = [repo[r].node() for r in revs]
1192 common, any, hds = setdiscovery.findcommonheads(
1198 common, any, hds = setdiscovery.findcommonheads(
1193 ui, repo, remote, ancestorsof=nodes, audit=data
1199 ui, repo, remote, ancestorsof=nodes, audit=data
1194 )
1200 )
1195 return common, hds
1201 return common, hds
1196
1202
1197 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1203 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1198 localrevs = opts[b'rev']
1204 localrevs = opts[b'rev']
1199
1205
1200 fm = ui.formatter(b'debugdiscovery', opts)
1206 fm = ui.formatter(b'debugdiscovery', opts)
1201 if fm.strict_format:
1207 if fm.strict_format:
1202
1208
1203 @contextlib.contextmanager
1209 @contextlib.contextmanager
1204 def may_capture_output():
1210 def may_capture_output():
1205 ui.pushbuffer()
1211 ui.pushbuffer()
1206 yield
1212 yield
1207 data[b'output'] = ui.popbuffer()
1213 data[b'output'] = ui.popbuffer()
1208
1214
1209 else:
1215 else:
1210 may_capture_output = util.nullcontextmanager
1216 may_capture_output = util.nullcontextmanager
1211 with may_capture_output():
1217 with may_capture_output():
1212 with util.timedcm('debug-discovery') as t:
1218 with util.timedcm('debug-discovery') as t:
1213 common, hds = doit(localrevs, remoterevs)
1219 common, hds = doit(localrevs, remoterevs)
1214
1220
1215 # compute all statistics
1221 # compute all statistics
1216 heads_common = set(common)
1222 heads_common = set(common)
1217 heads_remote = set(hds)
1223 heads_remote = set(hds)
1218 heads_local = set(repo.heads())
1224 heads_local = set(repo.heads())
1219 # note: they cannot be a local or remote head that is in common and not
1225 # note: they cannot be a local or remote head that is in common and not
1220 # itself a head of common.
1226 # itself a head of common.
1221 heads_common_local = heads_common & heads_local
1227 heads_common_local = heads_common & heads_local
1222 heads_common_remote = heads_common & heads_remote
1228 heads_common_remote = heads_common & heads_remote
1223 heads_common_both = heads_common & heads_remote & heads_local
1229 heads_common_both = heads_common & heads_remote & heads_local
1224
1230
1225 all = repo.revs(b'all()')
1231 all = repo.revs(b'all()')
1226 common = repo.revs(b'::%ln', common)
1232 common = repo.revs(b'::%ln', common)
1227 roots_common = repo.revs(b'roots(::%ld)', common)
1233 roots_common = repo.revs(b'roots(::%ld)', common)
1228 missing = repo.revs(b'not ::%ld', common)
1234 missing = repo.revs(b'not ::%ld', common)
1229 heads_missing = repo.revs(b'heads(%ld)', missing)
1235 heads_missing = repo.revs(b'heads(%ld)', missing)
1230 roots_missing = repo.revs(b'roots(%ld)', missing)
1236 roots_missing = repo.revs(b'roots(%ld)', missing)
1231 assert len(common) + len(missing) == len(all)
1237 assert len(common) + len(missing) == len(all)
1232
1238
1233 initial_undecided = repo.revs(
1239 initial_undecided = repo.revs(
1234 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1240 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1235 )
1241 )
1236 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1242 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1237 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1243 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1238 common_initial_undecided = initial_undecided & common
1244 common_initial_undecided = initial_undecided & common
1239 missing_initial_undecided = initial_undecided & missing
1245 missing_initial_undecided = initial_undecided & missing
1240
1246
1241 data[b'elapsed'] = t.elapsed
1247 data[b'elapsed'] = t.elapsed
1242 data[b'nb-common-heads'] = len(heads_common)
1248 data[b'nb-common-heads'] = len(heads_common)
1243 data[b'nb-common-heads-local'] = len(heads_common_local)
1249 data[b'nb-common-heads-local'] = len(heads_common_local)
1244 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1250 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1245 data[b'nb-common-heads-both'] = len(heads_common_both)
1251 data[b'nb-common-heads-both'] = len(heads_common_both)
1246 data[b'nb-common-roots'] = len(roots_common)
1252 data[b'nb-common-roots'] = len(roots_common)
1247 data[b'nb-head-local'] = len(heads_local)
1253 data[b'nb-head-local'] = len(heads_local)
1248 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1254 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1249 data[b'nb-head-remote'] = len(heads_remote)
1255 data[b'nb-head-remote'] = len(heads_remote)
1250 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1256 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1251 heads_common_remote
1257 heads_common_remote
1252 )
1258 )
1253 data[b'nb-revs'] = len(all)
1259 data[b'nb-revs'] = len(all)
1254 data[b'nb-revs-common'] = len(common)
1260 data[b'nb-revs-common'] = len(common)
1255 data[b'nb-revs-missing'] = len(missing)
1261 data[b'nb-revs-missing'] = len(missing)
1256 data[b'nb-missing-heads'] = len(heads_missing)
1262 data[b'nb-missing-heads'] = len(heads_missing)
1257 data[b'nb-missing-roots'] = len(roots_missing)
1263 data[b'nb-missing-roots'] = len(roots_missing)
1258 data[b'nb-ini_und'] = len(initial_undecided)
1264 data[b'nb-ini_und'] = len(initial_undecided)
1259 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1265 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1260 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1266 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1261 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1267 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1262 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1268 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1263
1269
1264 fm.startitem()
1270 fm.startitem()
1265 fm.data(**pycompat.strkwargs(data))
1271 fm.data(**pycompat.strkwargs(data))
1266 # display discovery summary
1272 # display discovery summary
1267 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1273 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1268 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1274 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1269 fm.plain(b"queries: %(total-queries)9d\n" % data)
1275 fm.plain(b"queries: %(total-queries)9d\n" % data)
1270 fm.plain(b"heads summary:\n")
1276 fm.plain(b"heads summary:\n")
1271 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1277 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1272 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1278 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1273 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1279 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1274 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1280 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1275 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1281 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1276 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1282 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1277 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1283 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1278 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1284 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1279 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1285 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1280 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1286 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1281 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1287 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1282 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1288 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1283 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1289 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1284 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1290 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1285 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1291 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1286 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1292 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1287 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1293 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1288 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1294 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1289 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1295 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1290 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1296 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1291 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1297 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1292 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1298 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1293
1299
1294 if ui.verbose:
1300 if ui.verbose:
1295 fm.plain(
1301 fm.plain(
1296 b"common heads: %s\n"
1302 b"common heads: %s\n"
1297 % b" ".join(sorted(short(n) for n in heads_common))
1303 % b" ".join(sorted(short(n) for n in heads_common))
1298 )
1304 )
1299 fm.end()
1305 fm.end()
1300
1306
1301
1307
1302 _chunksize = 4 << 10
1308 _chunksize = 4 << 10
1303
1309
1304
1310
1305 @command(
1311 @command(
1306 b'debugdownload',
1312 b'debugdownload',
1307 [
1313 [
1308 (b'o', b'output', b'', _(b'path')),
1314 (b'o', b'output', b'', _(b'path')),
1309 ],
1315 ],
1310 optionalrepo=True,
1316 optionalrepo=True,
1311 )
1317 )
1312 def debugdownload(ui, repo, url, output=None, **opts):
1318 def debugdownload(ui, repo, url, output=None, **opts):
1313 """download a resource using Mercurial logic and config"""
1319 """download a resource using Mercurial logic and config"""
1314 fh = urlmod.open(ui, url, output)
1320 fh = urlmod.open(ui, url, output)
1315
1321
1316 dest = ui
1322 dest = ui
1317 if output:
1323 if output:
1318 dest = open(output, b"wb", _chunksize)
1324 dest = open(output, b"wb", _chunksize)
1319 try:
1325 try:
1320 data = fh.read(_chunksize)
1326 data = fh.read(_chunksize)
1321 while data:
1327 while data:
1322 dest.write(data)
1328 dest.write(data)
1323 data = fh.read(_chunksize)
1329 data = fh.read(_chunksize)
1324 finally:
1330 finally:
1325 if output:
1331 if output:
1326 dest.close()
1332 dest.close()
1327
1333
1328
1334
1329 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1335 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1330 def debugextensions(ui, repo, **opts):
1336 def debugextensions(ui, repo, **opts):
1331 '''show information about active extensions'''
1337 '''show information about active extensions'''
1332 opts = pycompat.byteskwargs(opts)
1338 opts = pycompat.byteskwargs(opts)
1333 exts = extensions.extensions(ui)
1339 exts = extensions.extensions(ui)
1334 hgver = util.version()
1340 hgver = util.version()
1335 fm = ui.formatter(b'debugextensions', opts)
1341 fm = ui.formatter(b'debugextensions', opts)
1336 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1342 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1337 isinternal = extensions.ismoduleinternal(extmod)
1343 isinternal = extensions.ismoduleinternal(extmod)
1338 extsource = None
1344 extsource = None
1339
1345
1340 if util.safehasattr(extmod, '__file__'):
1346 if util.safehasattr(extmod, '__file__'):
1341 extsource = pycompat.fsencode(extmod.__file__)
1347 extsource = pycompat.fsencode(extmod.__file__)
1342 elif getattr(sys, 'oxidized', False):
1348 elif getattr(sys, 'oxidized', False):
1343 extsource = pycompat.sysexecutable
1349 extsource = pycompat.sysexecutable
1344 if isinternal:
1350 if isinternal:
1345 exttestedwith = [] # never expose magic string to users
1351 exttestedwith = [] # never expose magic string to users
1346 else:
1352 else:
1347 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1353 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1348 extbuglink = getattr(extmod, 'buglink', None)
1354 extbuglink = getattr(extmod, 'buglink', None)
1349
1355
1350 fm.startitem()
1356 fm.startitem()
1351
1357
1352 if ui.quiet or ui.verbose:
1358 if ui.quiet or ui.verbose:
1353 fm.write(b'name', b'%s\n', extname)
1359 fm.write(b'name', b'%s\n', extname)
1354 else:
1360 else:
1355 fm.write(b'name', b'%s', extname)
1361 fm.write(b'name', b'%s', extname)
1356 if isinternal or hgver in exttestedwith:
1362 if isinternal or hgver in exttestedwith:
1357 fm.plain(b'\n')
1363 fm.plain(b'\n')
1358 elif not exttestedwith:
1364 elif not exttestedwith:
1359 fm.plain(_(b' (untested!)\n'))
1365 fm.plain(_(b' (untested!)\n'))
1360 else:
1366 else:
1361 lasttestedversion = exttestedwith[-1]
1367 lasttestedversion = exttestedwith[-1]
1362 fm.plain(b' (%s!)\n' % lasttestedversion)
1368 fm.plain(b' (%s!)\n' % lasttestedversion)
1363
1369
1364 fm.condwrite(
1370 fm.condwrite(
1365 ui.verbose and extsource,
1371 ui.verbose and extsource,
1366 b'source',
1372 b'source',
1367 _(b' location: %s\n'),
1373 _(b' location: %s\n'),
1368 extsource or b"",
1374 extsource or b"",
1369 )
1375 )
1370
1376
1371 if ui.verbose:
1377 if ui.verbose:
1372 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1378 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1373 fm.data(bundled=isinternal)
1379 fm.data(bundled=isinternal)
1374
1380
1375 fm.condwrite(
1381 fm.condwrite(
1376 ui.verbose and exttestedwith,
1382 ui.verbose and exttestedwith,
1377 b'testedwith',
1383 b'testedwith',
1378 _(b' tested with: %s\n'),
1384 _(b' tested with: %s\n'),
1379 fm.formatlist(exttestedwith, name=b'ver'),
1385 fm.formatlist(exttestedwith, name=b'ver'),
1380 )
1386 )
1381
1387
1382 fm.condwrite(
1388 fm.condwrite(
1383 ui.verbose and extbuglink,
1389 ui.verbose and extbuglink,
1384 b'buglink',
1390 b'buglink',
1385 _(b' bug reporting: %s\n'),
1391 _(b' bug reporting: %s\n'),
1386 extbuglink or b"",
1392 extbuglink or b"",
1387 )
1393 )
1388
1394
1389 fm.end()
1395 fm.end()
1390
1396
1391
1397
1392 @command(
1398 @command(
1393 b'debugfileset',
1399 b'debugfileset',
1394 [
1400 [
1395 (
1401 (
1396 b'r',
1402 b'r',
1397 b'rev',
1403 b'rev',
1398 b'',
1404 b'',
1399 _(b'apply the filespec on this revision'),
1405 _(b'apply the filespec on this revision'),
1400 _(b'REV'),
1406 _(b'REV'),
1401 ),
1407 ),
1402 (
1408 (
1403 b'',
1409 b'',
1404 b'all-files',
1410 b'all-files',
1405 False,
1411 False,
1406 _(b'test files from all revisions and working directory'),
1412 _(b'test files from all revisions and working directory'),
1407 ),
1413 ),
1408 (
1414 (
1409 b's',
1415 b's',
1410 b'show-matcher',
1416 b'show-matcher',
1411 None,
1417 None,
1412 _(b'print internal representation of matcher'),
1418 _(b'print internal representation of matcher'),
1413 ),
1419 ),
1414 (
1420 (
1415 b'p',
1421 b'p',
1416 b'show-stage',
1422 b'show-stage',
1417 [],
1423 [],
1418 _(b'print parsed tree at the given stage'),
1424 _(b'print parsed tree at the given stage'),
1419 _(b'NAME'),
1425 _(b'NAME'),
1420 ),
1426 ),
1421 ],
1427 ],
1422 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1428 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1423 )
1429 )
1424 def debugfileset(ui, repo, expr, **opts):
1430 def debugfileset(ui, repo, expr, **opts):
1425 '''parse and apply a fileset specification'''
1431 '''parse and apply a fileset specification'''
1426 from . import fileset
1432 from . import fileset
1427
1433
1428 fileset.symbols # force import of fileset so we have predicates to optimize
1434 fileset.symbols # force import of fileset so we have predicates to optimize
1429 opts = pycompat.byteskwargs(opts)
1435 opts = pycompat.byteskwargs(opts)
1430 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1436 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1431
1437
1432 stages = [
1438 stages = [
1433 (b'parsed', pycompat.identity),
1439 (b'parsed', pycompat.identity),
1434 (b'analyzed', filesetlang.analyze),
1440 (b'analyzed', filesetlang.analyze),
1435 (b'optimized', filesetlang.optimize),
1441 (b'optimized', filesetlang.optimize),
1436 ]
1442 ]
1437 stagenames = {n for n, f in stages}
1443 stagenames = {n for n, f in stages}
1438
1444
1439 showalways = set()
1445 showalways = set()
1440 if ui.verbose and not opts[b'show_stage']:
1446 if ui.verbose and not opts[b'show_stage']:
1441 # show parsed tree by --verbose (deprecated)
1447 # show parsed tree by --verbose (deprecated)
1442 showalways.add(b'parsed')
1448 showalways.add(b'parsed')
1443 if opts[b'show_stage'] == [b'all']:
1449 if opts[b'show_stage'] == [b'all']:
1444 showalways.update(stagenames)
1450 showalways.update(stagenames)
1445 else:
1451 else:
1446 for n in opts[b'show_stage']:
1452 for n in opts[b'show_stage']:
1447 if n not in stagenames:
1453 if n not in stagenames:
1448 raise error.Abort(_(b'invalid stage name: %s') % n)
1454 raise error.Abort(_(b'invalid stage name: %s') % n)
1449 showalways.update(opts[b'show_stage'])
1455 showalways.update(opts[b'show_stage'])
1450
1456
1451 tree = filesetlang.parse(expr)
1457 tree = filesetlang.parse(expr)
1452 for n, f in stages:
1458 for n, f in stages:
1453 tree = f(tree)
1459 tree = f(tree)
1454 if n in showalways:
1460 if n in showalways:
1455 if opts[b'show_stage'] or n != b'parsed':
1461 if opts[b'show_stage'] or n != b'parsed':
1456 ui.write(b"* %s:\n" % n)
1462 ui.write(b"* %s:\n" % n)
1457 ui.write(filesetlang.prettyformat(tree), b"\n")
1463 ui.write(filesetlang.prettyformat(tree), b"\n")
1458
1464
1459 files = set()
1465 files = set()
1460 if opts[b'all_files']:
1466 if opts[b'all_files']:
1461 for r in repo:
1467 for r in repo:
1462 c = repo[r]
1468 c = repo[r]
1463 files.update(c.files())
1469 files.update(c.files())
1464 files.update(c.substate)
1470 files.update(c.substate)
1465 if opts[b'all_files'] or ctx.rev() is None:
1471 if opts[b'all_files'] or ctx.rev() is None:
1466 wctx = repo[None]
1472 wctx = repo[None]
1467 files.update(
1473 files.update(
1468 repo.dirstate.walk(
1474 repo.dirstate.walk(
1469 scmutil.matchall(repo),
1475 scmutil.matchall(repo),
1470 subrepos=list(wctx.substate),
1476 subrepos=list(wctx.substate),
1471 unknown=True,
1477 unknown=True,
1472 ignored=True,
1478 ignored=True,
1473 )
1479 )
1474 )
1480 )
1475 files.update(wctx.substate)
1481 files.update(wctx.substate)
1476 else:
1482 else:
1477 files.update(ctx.files())
1483 files.update(ctx.files())
1478 files.update(ctx.substate)
1484 files.update(ctx.substate)
1479
1485
1480 m = ctx.matchfileset(repo.getcwd(), expr)
1486 m = ctx.matchfileset(repo.getcwd(), expr)
1481 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1487 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1482 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1488 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1483 for f in sorted(files):
1489 for f in sorted(files):
1484 if not m(f):
1490 if not m(f):
1485 continue
1491 continue
1486 ui.write(b"%s\n" % f)
1492 ui.write(b"%s\n" % f)
1487
1493
1488
1494
1489 @command(
1495 @command(
1490 b"debug-repair-issue6528",
1496 b"debug-repair-issue6528",
1491 [
1497 [
1492 (
1498 (
1493 b'',
1499 b'',
1494 b'to-report',
1500 b'to-report',
1495 b'',
1501 b'',
1496 _(b'build a report of affected revisions to this file'),
1502 _(b'build a report of affected revisions to this file'),
1497 _(b'FILE'),
1503 _(b'FILE'),
1498 ),
1504 ),
1499 (
1505 (
1500 b'',
1506 b'',
1501 b'from-report',
1507 b'from-report',
1502 b'',
1508 b'',
1503 _(b'repair revisions listed in this report file'),
1509 _(b'repair revisions listed in this report file'),
1504 _(b'FILE'),
1510 _(b'FILE'),
1505 ),
1511 ),
1506 (
1512 (
1507 b'',
1513 b'',
1508 b'paranoid',
1514 b'paranoid',
1509 False,
1515 False,
1510 _(b'check that both detection methods do the same thing'),
1516 _(b'check that both detection methods do the same thing'),
1511 ),
1517 ),
1512 ]
1518 ]
1513 + cmdutil.dryrunopts,
1519 + cmdutil.dryrunopts,
1514 )
1520 )
1515 def debug_repair_issue6528(ui, repo, **opts):
1521 def debug_repair_issue6528(ui, repo, **opts):
1516 """find affected revisions and repair them. See issue6528 for more details.
1522 """find affected revisions and repair them. See issue6528 for more details.
1517
1523
1518 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1524 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1519 computation of affected revisions for a given repository across clones.
1525 computation of affected revisions for a given repository across clones.
1520 The report format is line-based (with empty lines ignored):
1526 The report format is line-based (with empty lines ignored):
1521
1527
1522 ```
1528 ```
1523 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1529 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1524 ```
1530 ```
1525
1531
1526 There can be multiple broken revisions per filelog, they are separated by
1532 There can be multiple broken revisions per filelog, they are separated by
1527 a comma with no spaces. The only space is between the revision(s) and the
1533 a comma with no spaces. The only space is between the revision(s) and the
1528 filename.
1534 filename.
1529
1535
1530 Note that this does *not* mean that this repairs future affected revisions,
1536 Note that this does *not* mean that this repairs future affected revisions,
1531 that needs a separate fix at the exchange level that was introduced in
1537 that needs a separate fix at the exchange level that was introduced in
1532 Mercurial 5.9.1.
1538 Mercurial 5.9.1.
1533
1539
1534 There is a `--paranoid` flag to test that the fast implementation is correct
1540 There is a `--paranoid` flag to test that the fast implementation is correct
1535 by checking it against the slow implementation. Since this matter is quite
1541 by checking it against the slow implementation. Since this matter is quite
1536 urgent and testing every edge-case is probably quite costly, we use this
1542 urgent and testing every edge-case is probably quite costly, we use this
1537 method to test on large repositories as a fuzzing method of sorts.
1543 method to test on large repositories as a fuzzing method of sorts.
1538 """
1544 """
1539 cmdutil.check_incompatible_arguments(
1545 cmdutil.check_incompatible_arguments(
1540 opts, 'to_report', ['from_report', 'dry_run']
1546 opts, 'to_report', ['from_report', 'dry_run']
1541 )
1547 )
1542 dry_run = opts.get('dry_run')
1548 dry_run = opts.get('dry_run')
1543 to_report = opts.get('to_report')
1549 to_report = opts.get('to_report')
1544 from_report = opts.get('from_report')
1550 from_report = opts.get('from_report')
1545 paranoid = opts.get('paranoid')
1551 paranoid = opts.get('paranoid')
1546 # TODO maybe add filelog pattern and revision pattern parameters to help
1552 # TODO maybe add filelog pattern and revision pattern parameters to help
1547 # narrow down the search for users that know what they're looking for?
1553 # narrow down the search for users that know what they're looking for?
1548
1554
1549 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1555 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1550 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1556 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1551 raise error.Abort(_(msg))
1557 raise error.Abort(_(msg))
1552
1558
1553 rewrite.repair_issue6528(
1559 rewrite.repair_issue6528(
1554 ui,
1560 ui,
1555 repo,
1561 repo,
1556 dry_run=dry_run,
1562 dry_run=dry_run,
1557 to_report=to_report,
1563 to_report=to_report,
1558 from_report=from_report,
1564 from_report=from_report,
1559 paranoid=paranoid,
1565 paranoid=paranoid,
1560 )
1566 )
1561
1567
1562
1568
1563 @command(b'debugformat', [] + cmdutil.formatteropts)
1569 @command(b'debugformat', [] + cmdutil.formatteropts)
1564 def debugformat(ui, repo, **opts):
1570 def debugformat(ui, repo, **opts):
1565 """display format information about the current repository
1571 """display format information about the current repository
1566
1572
1567 Use --verbose to get extra information about current config value and
1573 Use --verbose to get extra information about current config value and
1568 Mercurial default."""
1574 Mercurial default."""
1569 opts = pycompat.byteskwargs(opts)
1575 opts = pycompat.byteskwargs(opts)
1570 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1576 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1571 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1577 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1572
1578
1573 def makeformatname(name):
1579 def makeformatname(name):
1574 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1580 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1575
1581
1576 fm = ui.formatter(b'debugformat', opts)
1582 fm = ui.formatter(b'debugformat', opts)
1577 if fm.isplain():
1583 if fm.isplain():
1578
1584
1579 def formatvalue(value):
1585 def formatvalue(value):
1580 if util.safehasattr(value, b'startswith'):
1586 if util.safehasattr(value, b'startswith'):
1581 return value
1587 return value
1582 if value:
1588 if value:
1583 return b'yes'
1589 return b'yes'
1584 else:
1590 else:
1585 return b'no'
1591 return b'no'
1586
1592
1587 else:
1593 else:
1588 formatvalue = pycompat.identity
1594 formatvalue = pycompat.identity
1589
1595
1590 fm.plain(b'format-variant')
1596 fm.plain(b'format-variant')
1591 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1597 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1592 fm.plain(b' repo')
1598 fm.plain(b' repo')
1593 if ui.verbose:
1599 if ui.verbose:
1594 fm.plain(b' config default')
1600 fm.plain(b' config default')
1595 fm.plain(b'\n')
1601 fm.plain(b'\n')
1596 for fv in upgrade.allformatvariant:
1602 for fv in upgrade.allformatvariant:
1597 fm.startitem()
1603 fm.startitem()
1598 repovalue = fv.fromrepo(repo)
1604 repovalue = fv.fromrepo(repo)
1599 configvalue = fv.fromconfig(repo)
1605 configvalue = fv.fromconfig(repo)
1600
1606
1601 if repovalue != configvalue:
1607 if repovalue != configvalue:
1602 namelabel = b'formatvariant.name.mismatchconfig'
1608 namelabel = b'formatvariant.name.mismatchconfig'
1603 repolabel = b'formatvariant.repo.mismatchconfig'
1609 repolabel = b'formatvariant.repo.mismatchconfig'
1604 elif repovalue != fv.default:
1610 elif repovalue != fv.default:
1605 namelabel = b'formatvariant.name.mismatchdefault'
1611 namelabel = b'formatvariant.name.mismatchdefault'
1606 repolabel = b'formatvariant.repo.mismatchdefault'
1612 repolabel = b'formatvariant.repo.mismatchdefault'
1607 else:
1613 else:
1608 namelabel = b'formatvariant.name.uptodate'
1614 namelabel = b'formatvariant.name.uptodate'
1609 repolabel = b'formatvariant.repo.uptodate'
1615 repolabel = b'formatvariant.repo.uptodate'
1610
1616
1611 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1617 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1612 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1618 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1613 if fv.default != configvalue:
1619 if fv.default != configvalue:
1614 configlabel = b'formatvariant.config.special'
1620 configlabel = b'formatvariant.config.special'
1615 else:
1621 else:
1616 configlabel = b'formatvariant.config.default'
1622 configlabel = b'formatvariant.config.default'
1617 fm.condwrite(
1623 fm.condwrite(
1618 ui.verbose,
1624 ui.verbose,
1619 b'config',
1625 b'config',
1620 b' %6s',
1626 b' %6s',
1621 formatvalue(configvalue),
1627 formatvalue(configvalue),
1622 label=configlabel,
1628 label=configlabel,
1623 )
1629 )
1624 fm.condwrite(
1630 fm.condwrite(
1625 ui.verbose,
1631 ui.verbose,
1626 b'default',
1632 b'default',
1627 b' %7s',
1633 b' %7s',
1628 formatvalue(fv.default),
1634 formatvalue(fv.default),
1629 label=b'formatvariant.default',
1635 label=b'formatvariant.default',
1630 )
1636 )
1631 fm.plain(b'\n')
1637 fm.plain(b'\n')
1632 fm.end()
1638 fm.end()
1633
1639
1634
1640
1635 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1641 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1636 def debugfsinfo(ui, path=b"."):
1642 def debugfsinfo(ui, path=b"."):
1637 """show information detected about current filesystem"""
1643 """show information detected about current filesystem"""
1638 ui.writenoi18n(b'path: %s\n' % path)
1644 ui.writenoi18n(b'path: %s\n' % path)
1639 ui.writenoi18n(
1645 ui.writenoi18n(
1640 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1646 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1641 )
1647 )
1642 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1648 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1643 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1649 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1644 ui.writenoi18n(
1650 ui.writenoi18n(
1645 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1651 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1646 )
1652 )
1647 ui.writenoi18n(
1653 ui.writenoi18n(
1648 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1654 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1649 )
1655 )
1650 casesensitive = b'(unknown)'
1656 casesensitive = b'(unknown)'
1651 try:
1657 try:
1652 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1658 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1653 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1659 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1654 except OSError:
1660 except OSError:
1655 pass
1661 pass
1656 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1662 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1657
1663
1658
1664
1659 @command(
1665 @command(
1660 b'debuggetbundle',
1666 b'debuggetbundle',
1661 [
1667 [
1662 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1668 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1663 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1669 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1664 (
1670 (
1665 b't',
1671 b't',
1666 b'type',
1672 b'type',
1667 b'bzip2',
1673 b'bzip2',
1668 _(b'bundle compression type to use'),
1674 _(b'bundle compression type to use'),
1669 _(b'TYPE'),
1675 _(b'TYPE'),
1670 ),
1676 ),
1671 ],
1677 ],
1672 _(b'REPO FILE [-H|-C ID]...'),
1678 _(b'REPO FILE [-H|-C ID]...'),
1673 norepo=True,
1679 norepo=True,
1674 )
1680 )
1675 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1681 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1676 """retrieves a bundle from a repo
1682 """retrieves a bundle from a repo
1677
1683
1678 Every ID must be a full-length hex node id string. Saves the bundle to the
1684 Every ID must be a full-length hex node id string. Saves the bundle to the
1679 given file.
1685 given file.
1680 """
1686 """
1681 opts = pycompat.byteskwargs(opts)
1687 opts = pycompat.byteskwargs(opts)
1682 repo = hg.peer(ui, opts, repopath)
1688 repo = hg.peer(ui, opts, repopath)
1683 if not repo.capable(b'getbundle'):
1689 if not repo.capable(b'getbundle'):
1684 raise error.Abort(b"getbundle() not supported by target repository")
1690 raise error.Abort(b"getbundle() not supported by target repository")
1685 args = {}
1691 args = {}
1686 if common:
1692 if common:
1687 args['common'] = [bin(s) for s in common]
1693 args['common'] = [bin(s) for s in common]
1688 if head:
1694 if head:
1689 args['heads'] = [bin(s) for s in head]
1695 args['heads'] = [bin(s) for s in head]
1690 # TODO: get desired bundlecaps from command line.
1696 # TODO: get desired bundlecaps from command line.
1691 args['bundlecaps'] = None
1697 args['bundlecaps'] = None
1692 bundle = repo.getbundle(b'debug', **args)
1698 bundle = repo.getbundle(b'debug', **args)
1693
1699
1694 bundletype = opts.get(b'type', b'bzip2').lower()
1700 bundletype = opts.get(b'type', b'bzip2').lower()
1695 btypes = {
1701 btypes = {
1696 b'none': b'HG10UN',
1702 b'none': b'HG10UN',
1697 b'bzip2': b'HG10BZ',
1703 b'bzip2': b'HG10BZ',
1698 b'gzip': b'HG10GZ',
1704 b'gzip': b'HG10GZ',
1699 b'bundle2': b'HG20',
1705 b'bundle2': b'HG20',
1700 }
1706 }
1701 bundletype = btypes.get(bundletype)
1707 bundletype = btypes.get(bundletype)
1702 if bundletype not in bundle2.bundletypes:
1708 if bundletype not in bundle2.bundletypes:
1703 raise error.Abort(_(b'unknown bundle type specified with --type'))
1709 raise error.Abort(_(b'unknown bundle type specified with --type'))
1704 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1710 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1705
1711
1706
1712
1707 @command(b'debugignore', [], b'[FILE]')
1713 @command(b'debugignore', [], b'[FILE]')
1708 def debugignore(ui, repo, *files, **opts):
1714 def debugignore(ui, repo, *files, **opts):
1709 """display the combined ignore pattern and information about ignored files
1715 """display the combined ignore pattern and information about ignored files
1710
1716
1711 With no argument display the combined ignore pattern.
1717 With no argument display the combined ignore pattern.
1712
1718
1713 Given space separated file names, shows if the given file is ignored and
1719 Given space separated file names, shows if the given file is ignored and
1714 if so, show the ignore rule (file and line number) that matched it.
1720 if so, show the ignore rule (file and line number) that matched it.
1715 """
1721 """
1716 ignore = repo.dirstate._ignore
1722 ignore = repo.dirstate._ignore
1717 if not files:
1723 if not files:
1718 # Show all the patterns
1724 # Show all the patterns
1719 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1725 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1720 else:
1726 else:
1721 m = scmutil.match(repo[None], pats=files)
1727 m = scmutil.match(repo[None], pats=files)
1722 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1728 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1723 for f in m.files():
1729 for f in m.files():
1724 nf = util.normpath(f)
1730 nf = util.normpath(f)
1725 ignored = None
1731 ignored = None
1726 ignoredata = None
1732 ignoredata = None
1727 if nf != b'.':
1733 if nf != b'.':
1728 if ignore(nf):
1734 if ignore(nf):
1729 ignored = nf
1735 ignored = nf
1730 ignoredata = repo.dirstate._ignorefileandline(nf)
1736 ignoredata = repo.dirstate._ignorefileandline(nf)
1731 else:
1737 else:
1732 for p in pathutil.finddirs(nf):
1738 for p in pathutil.finddirs(nf):
1733 if ignore(p):
1739 if ignore(p):
1734 ignored = p
1740 ignored = p
1735 ignoredata = repo.dirstate._ignorefileandline(p)
1741 ignoredata = repo.dirstate._ignorefileandline(p)
1736 break
1742 break
1737 if ignored:
1743 if ignored:
1738 if ignored == nf:
1744 if ignored == nf:
1739 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1745 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1740 else:
1746 else:
1741 ui.write(
1747 ui.write(
1742 _(
1748 _(
1743 b"%s is ignored because of "
1749 b"%s is ignored because of "
1744 b"containing directory %s\n"
1750 b"containing directory %s\n"
1745 )
1751 )
1746 % (uipathfn(f), ignored)
1752 % (uipathfn(f), ignored)
1747 )
1753 )
1748 ignorefile, lineno, line = ignoredata
1754 ignorefile, lineno, line = ignoredata
1749 ui.write(
1755 ui.write(
1750 _(b"(ignore rule in %s, line %d: '%s')\n")
1756 _(b"(ignore rule in %s, line %d: '%s')\n")
1751 % (ignorefile, lineno, line)
1757 % (ignorefile, lineno, line)
1752 )
1758 )
1753 else:
1759 else:
1754 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1760 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1755
1761
1756
1762
1757 @command(
1763 @command(
1758 b'debugindex',
1764 b'debugindex',
1759 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1765 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1760 _(b'-c|-m|FILE'),
1766 _(b'-c|-m|FILE'),
1761 )
1767 )
1762 def debugindex(ui, repo, file_=None, **opts):
1768 def debugindex(ui, repo, file_=None, **opts):
1763 """dump index data for a storage primitive"""
1769 """dump index data for a storage primitive"""
1764 opts = pycompat.byteskwargs(opts)
1770 opts = pycompat.byteskwargs(opts)
1765 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1771 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1766
1772
1767 if ui.debugflag:
1773 if ui.debugflag:
1768 shortfn = hex
1774 shortfn = hex
1769 else:
1775 else:
1770 shortfn = short
1776 shortfn = short
1771
1777
1772 idlen = 12
1778 idlen = 12
1773 for i in store:
1779 for i in store:
1774 idlen = len(shortfn(store.node(i)))
1780 idlen = len(shortfn(store.node(i)))
1775 break
1781 break
1776
1782
1777 fm = ui.formatter(b'debugindex', opts)
1783 fm = ui.formatter(b'debugindex', opts)
1778 fm.plain(
1784 fm.plain(
1779 b' rev linkrev %s %s p2\n'
1785 b' rev linkrev %s %s p2\n'
1780 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1786 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1781 )
1787 )
1782
1788
1783 for rev in store:
1789 for rev in store:
1784 node = store.node(rev)
1790 node = store.node(rev)
1785 parents = store.parents(node)
1791 parents = store.parents(node)
1786
1792
1787 fm.startitem()
1793 fm.startitem()
1788 fm.write(b'rev', b'%6d ', rev)
1794 fm.write(b'rev', b'%6d ', rev)
1789 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1795 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1790 fm.write(b'node', b'%s ', shortfn(node))
1796 fm.write(b'node', b'%s ', shortfn(node))
1791 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1797 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1792 fm.write(b'p2', b'%s', shortfn(parents[1]))
1798 fm.write(b'p2', b'%s', shortfn(parents[1]))
1793 fm.plain(b'\n')
1799 fm.plain(b'\n')
1794
1800
1795 fm.end()
1801 fm.end()
1796
1802
1797
1803
1798 @command(
1804 @command(
1799 b'debugindexdot',
1805 b'debugindexdot',
1800 cmdutil.debugrevlogopts,
1806 cmdutil.debugrevlogopts,
1801 _(b'-c|-m|FILE'),
1807 _(b'-c|-m|FILE'),
1802 optionalrepo=True,
1808 optionalrepo=True,
1803 )
1809 )
1804 def debugindexdot(ui, repo, file_=None, **opts):
1810 def debugindexdot(ui, repo, file_=None, **opts):
1805 """dump an index DAG as a graphviz dot file"""
1811 """dump an index DAG as a graphviz dot file"""
1806 opts = pycompat.byteskwargs(opts)
1812 opts = pycompat.byteskwargs(opts)
1807 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1813 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1808 ui.writenoi18n(b"digraph G {\n")
1814 ui.writenoi18n(b"digraph G {\n")
1809 for i in r:
1815 for i in r:
1810 node = r.node(i)
1816 node = r.node(i)
1811 pp = r.parents(node)
1817 pp = r.parents(node)
1812 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1818 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1813 if pp[1] != repo.nullid:
1819 if pp[1] != repo.nullid:
1814 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1820 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1815 ui.write(b"}\n")
1821 ui.write(b"}\n")
1816
1822
1817
1823
1818 @command(b'debugindexstats', [])
1824 @command(b'debugindexstats', [])
1819 def debugindexstats(ui, repo):
1825 def debugindexstats(ui, repo):
1820 """show stats related to the changelog index"""
1826 """show stats related to the changelog index"""
1821 repo.changelog.shortest(repo.nullid, 1)
1827 repo.changelog.shortest(repo.nullid, 1)
1822 index = repo.changelog.index
1828 index = repo.changelog.index
1823 if not util.safehasattr(index, b'stats'):
1829 if not util.safehasattr(index, b'stats'):
1824 raise error.Abort(_(b'debugindexstats only works with native code'))
1830 raise error.Abort(_(b'debugindexstats only works with native code'))
1825 for k, v in sorted(index.stats().items()):
1831 for k, v in sorted(index.stats().items()):
1826 ui.write(b'%s: %d\n' % (k, v))
1832 ui.write(b'%s: %d\n' % (k, v))
1827
1833
1828
1834
1829 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1835 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1830 def debuginstall(ui, **opts):
1836 def debuginstall(ui, **opts):
1831 """test Mercurial installation
1837 """test Mercurial installation
1832
1838
1833 Returns 0 on success.
1839 Returns 0 on success.
1834 """
1840 """
1835 opts = pycompat.byteskwargs(opts)
1841 opts = pycompat.byteskwargs(opts)
1836
1842
1837 problems = 0
1843 problems = 0
1838
1844
1839 fm = ui.formatter(b'debuginstall', opts)
1845 fm = ui.formatter(b'debuginstall', opts)
1840 fm.startitem()
1846 fm.startitem()
1841
1847
1842 # encoding might be unknown or wrong. don't translate these messages.
1848 # encoding might be unknown or wrong. don't translate these messages.
1843 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1849 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1844 err = None
1850 err = None
1845 try:
1851 try:
1846 codecs.lookup(pycompat.sysstr(encoding.encoding))
1852 codecs.lookup(pycompat.sysstr(encoding.encoding))
1847 except LookupError as inst:
1853 except LookupError as inst:
1848 err = stringutil.forcebytestr(inst)
1854 err = stringutil.forcebytestr(inst)
1849 problems += 1
1855 problems += 1
1850 fm.condwrite(
1856 fm.condwrite(
1851 err,
1857 err,
1852 b'encodingerror',
1858 b'encodingerror',
1853 b" %s\n (check that your locale is properly set)\n",
1859 b" %s\n (check that your locale is properly set)\n",
1854 err,
1860 err,
1855 )
1861 )
1856
1862
1857 # Python
1863 # Python
1858 pythonlib = None
1864 pythonlib = None
1859 if util.safehasattr(os, '__file__'):
1865 if util.safehasattr(os, '__file__'):
1860 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1866 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1861 elif getattr(sys, 'oxidized', False):
1867 elif getattr(sys, 'oxidized', False):
1862 pythonlib = pycompat.sysexecutable
1868 pythonlib = pycompat.sysexecutable
1863
1869
1864 fm.write(
1870 fm.write(
1865 b'pythonexe',
1871 b'pythonexe',
1866 _(b"checking Python executable (%s)\n"),
1872 _(b"checking Python executable (%s)\n"),
1867 pycompat.sysexecutable or _(b"unknown"),
1873 pycompat.sysexecutable or _(b"unknown"),
1868 )
1874 )
1869 fm.write(
1875 fm.write(
1870 b'pythonimplementation',
1876 b'pythonimplementation',
1871 _(b"checking Python implementation (%s)\n"),
1877 _(b"checking Python implementation (%s)\n"),
1872 pycompat.sysbytes(platform.python_implementation()),
1878 pycompat.sysbytes(platform.python_implementation()),
1873 )
1879 )
1874 fm.write(
1880 fm.write(
1875 b'pythonver',
1881 b'pythonver',
1876 _(b"checking Python version (%s)\n"),
1882 _(b"checking Python version (%s)\n"),
1877 (b"%d.%d.%d" % sys.version_info[:3]),
1883 (b"%d.%d.%d" % sys.version_info[:3]),
1878 )
1884 )
1879 fm.write(
1885 fm.write(
1880 b'pythonlib',
1886 b'pythonlib',
1881 _(b"checking Python lib (%s)...\n"),
1887 _(b"checking Python lib (%s)...\n"),
1882 pythonlib or _(b"unknown"),
1888 pythonlib or _(b"unknown"),
1883 )
1889 )
1884
1890
1885 try:
1891 try:
1886 from . import rustext # pytype: disable=import-error
1892 from . import rustext # pytype: disable=import-error
1887
1893
1888 rustext.__doc__ # trigger lazy import
1894 rustext.__doc__ # trigger lazy import
1889 except ImportError:
1895 except ImportError:
1890 rustext = None
1896 rustext = None
1891
1897
1892 security = set(sslutil.supportedprotocols)
1898 security = set(sslutil.supportedprotocols)
1893 if sslutil.hassni:
1899 if sslutil.hassni:
1894 security.add(b'sni')
1900 security.add(b'sni')
1895
1901
1896 fm.write(
1902 fm.write(
1897 b'pythonsecurity',
1903 b'pythonsecurity',
1898 _(b"checking Python security support (%s)\n"),
1904 _(b"checking Python security support (%s)\n"),
1899 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1905 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1900 )
1906 )
1901
1907
1902 # These are warnings, not errors. So don't increment problem count. This
1908 # These are warnings, not errors. So don't increment problem count. This
1903 # may change in the future.
1909 # may change in the future.
1904 if b'tls1.2' not in security:
1910 if b'tls1.2' not in security:
1905 fm.plain(
1911 fm.plain(
1906 _(
1912 _(
1907 b' TLS 1.2 not supported by Python install; '
1913 b' TLS 1.2 not supported by Python install; '
1908 b'network connections lack modern security\n'
1914 b'network connections lack modern security\n'
1909 )
1915 )
1910 )
1916 )
1911 if b'sni' not in security:
1917 if b'sni' not in security:
1912 fm.plain(
1918 fm.plain(
1913 _(
1919 _(
1914 b' SNI not supported by Python install; may have '
1920 b' SNI not supported by Python install; may have '
1915 b'connectivity issues with some servers\n'
1921 b'connectivity issues with some servers\n'
1916 )
1922 )
1917 )
1923 )
1918
1924
1919 fm.plain(
1925 fm.plain(
1920 _(
1926 _(
1921 b"checking Rust extensions (%s)\n"
1927 b"checking Rust extensions (%s)\n"
1922 % (b'missing' if rustext is None else b'installed')
1928 % (b'missing' if rustext is None else b'installed')
1923 ),
1929 ),
1924 )
1930 )
1925
1931
1926 # TODO print CA cert info
1932 # TODO print CA cert info
1927
1933
1928 # hg version
1934 # hg version
1929 hgver = util.version()
1935 hgver = util.version()
1930 fm.write(
1936 fm.write(
1931 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1937 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1932 )
1938 )
1933 fm.write(
1939 fm.write(
1934 b'hgverextra',
1940 b'hgverextra',
1935 _(b"checking Mercurial custom build (%s)\n"),
1941 _(b"checking Mercurial custom build (%s)\n"),
1936 b'+'.join(hgver.split(b'+')[1:]),
1942 b'+'.join(hgver.split(b'+')[1:]),
1937 )
1943 )
1938
1944
1939 # compiled modules
1945 # compiled modules
1940 hgmodules = None
1946 hgmodules = None
1941 if util.safehasattr(sys.modules[__name__], '__file__'):
1947 if util.safehasattr(sys.modules[__name__], '__file__'):
1942 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1948 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1943 elif getattr(sys, 'oxidized', False):
1949 elif getattr(sys, 'oxidized', False):
1944 hgmodules = pycompat.sysexecutable
1950 hgmodules = pycompat.sysexecutable
1945
1951
1946 fm.write(
1952 fm.write(
1947 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1953 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1948 )
1954 )
1949 fm.write(
1955 fm.write(
1950 b'hgmodules',
1956 b'hgmodules',
1951 _(b"checking installed modules (%s)...\n"),
1957 _(b"checking installed modules (%s)...\n"),
1952 hgmodules or _(b"unknown"),
1958 hgmodules or _(b"unknown"),
1953 )
1959 )
1954
1960
1955 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1961 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1956 rustext = rustandc # for now, that's the only case
1962 rustext = rustandc # for now, that's the only case
1957 cext = policy.policy in (b'c', b'allow') or rustandc
1963 cext = policy.policy in (b'c', b'allow') or rustandc
1958 nopure = cext or rustext
1964 nopure = cext or rustext
1959 if nopure:
1965 if nopure:
1960 err = None
1966 err = None
1961 try:
1967 try:
1962 if cext:
1968 if cext:
1963 from .cext import ( # pytype: disable=import-error
1969 from .cext import ( # pytype: disable=import-error
1964 base85,
1970 base85,
1965 bdiff,
1971 bdiff,
1966 mpatch,
1972 mpatch,
1967 osutil,
1973 osutil,
1968 )
1974 )
1969
1975
1970 # quiet pyflakes
1976 # quiet pyflakes
1971 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1977 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1972 if rustext:
1978 if rustext:
1973 from .rustext import ( # pytype: disable=import-error
1979 from .rustext import ( # pytype: disable=import-error
1974 ancestor,
1980 ancestor,
1975 dirstate,
1981 dirstate,
1976 )
1982 )
1977
1983
1978 dir(ancestor), dir(dirstate) # quiet pyflakes
1984 dir(ancestor), dir(dirstate) # quiet pyflakes
1979 except Exception as inst:
1985 except Exception as inst:
1980 err = stringutil.forcebytestr(inst)
1986 err = stringutil.forcebytestr(inst)
1981 problems += 1
1987 problems += 1
1982 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1988 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1983
1989
1984 compengines = util.compengines._engines.values()
1990 compengines = util.compengines._engines.values()
1985 fm.write(
1991 fm.write(
1986 b'compengines',
1992 b'compengines',
1987 _(b'checking registered compression engines (%s)\n'),
1993 _(b'checking registered compression engines (%s)\n'),
1988 fm.formatlist(
1994 fm.formatlist(
1989 sorted(e.name() for e in compengines),
1995 sorted(e.name() for e in compengines),
1990 name=b'compengine',
1996 name=b'compengine',
1991 fmt=b'%s',
1997 fmt=b'%s',
1992 sep=b', ',
1998 sep=b', ',
1993 ),
1999 ),
1994 )
2000 )
1995 fm.write(
2001 fm.write(
1996 b'compenginesavail',
2002 b'compenginesavail',
1997 _(b'checking available compression engines (%s)\n'),
2003 _(b'checking available compression engines (%s)\n'),
1998 fm.formatlist(
2004 fm.formatlist(
1999 sorted(e.name() for e in compengines if e.available()),
2005 sorted(e.name() for e in compengines if e.available()),
2000 name=b'compengine',
2006 name=b'compengine',
2001 fmt=b'%s',
2007 fmt=b'%s',
2002 sep=b', ',
2008 sep=b', ',
2003 ),
2009 ),
2004 )
2010 )
2005 wirecompengines = compression.compengines.supportedwireengines(
2011 wirecompengines = compression.compengines.supportedwireengines(
2006 compression.SERVERROLE
2012 compression.SERVERROLE
2007 )
2013 )
2008 fm.write(
2014 fm.write(
2009 b'compenginesserver',
2015 b'compenginesserver',
2010 _(
2016 _(
2011 b'checking available compression engines '
2017 b'checking available compression engines '
2012 b'for wire protocol (%s)\n'
2018 b'for wire protocol (%s)\n'
2013 ),
2019 ),
2014 fm.formatlist(
2020 fm.formatlist(
2015 [e.name() for e in wirecompengines if e.wireprotosupport()],
2021 [e.name() for e in wirecompengines if e.wireprotosupport()],
2016 name=b'compengine',
2022 name=b'compengine',
2017 fmt=b'%s',
2023 fmt=b'%s',
2018 sep=b', ',
2024 sep=b', ',
2019 ),
2025 ),
2020 )
2026 )
2021 re2 = b'missing'
2027 re2 = b'missing'
2022 if util._re2:
2028 if util._re2:
2023 re2 = b'available'
2029 re2 = b'available'
2024 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2030 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2025 fm.data(re2=bool(util._re2))
2031 fm.data(re2=bool(util._re2))
2026
2032
2027 # templates
2033 # templates
2028 p = templater.templatedir()
2034 p = templater.templatedir()
2029 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2035 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2030 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2036 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2031 if p:
2037 if p:
2032 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2038 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2033 if m:
2039 if m:
2034 # template found, check if it is working
2040 # template found, check if it is working
2035 err = None
2041 err = None
2036 try:
2042 try:
2037 templater.templater.frommapfile(m)
2043 templater.templater.frommapfile(m)
2038 except Exception as inst:
2044 except Exception as inst:
2039 err = stringutil.forcebytestr(inst)
2045 err = stringutil.forcebytestr(inst)
2040 p = None
2046 p = None
2041 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2047 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2042 else:
2048 else:
2043 p = None
2049 p = None
2044 fm.condwrite(
2050 fm.condwrite(
2045 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2051 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2046 )
2052 )
2047 fm.condwrite(
2053 fm.condwrite(
2048 not m,
2054 not m,
2049 b'defaulttemplatenotfound',
2055 b'defaulttemplatenotfound',
2050 _(b" template '%s' not found\n"),
2056 _(b" template '%s' not found\n"),
2051 b"default",
2057 b"default",
2052 )
2058 )
2053 if not p:
2059 if not p:
2054 problems += 1
2060 problems += 1
2055 fm.condwrite(
2061 fm.condwrite(
2056 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2062 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2057 )
2063 )
2058
2064
2059 # editor
2065 # editor
2060 editor = ui.geteditor()
2066 editor = ui.geteditor()
2061 editor = util.expandpath(editor)
2067 editor = util.expandpath(editor)
2062 editorbin = procutil.shellsplit(editor)[0]
2068 editorbin = procutil.shellsplit(editor)[0]
2063 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2069 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2064 cmdpath = procutil.findexe(editorbin)
2070 cmdpath = procutil.findexe(editorbin)
2065 fm.condwrite(
2071 fm.condwrite(
2066 not cmdpath and editor == b'vi',
2072 not cmdpath and editor == b'vi',
2067 b'vinotfound',
2073 b'vinotfound',
2068 _(
2074 _(
2069 b" No commit editor set and can't find %s in PATH\n"
2075 b" No commit editor set and can't find %s in PATH\n"
2070 b" (specify a commit editor in your configuration"
2076 b" (specify a commit editor in your configuration"
2071 b" file)\n"
2077 b" file)\n"
2072 ),
2078 ),
2073 not cmdpath and editor == b'vi' and editorbin,
2079 not cmdpath and editor == b'vi' and editorbin,
2074 )
2080 )
2075 fm.condwrite(
2081 fm.condwrite(
2076 not cmdpath and editor != b'vi',
2082 not cmdpath and editor != b'vi',
2077 b'editornotfound',
2083 b'editornotfound',
2078 _(
2084 _(
2079 b" Can't find editor '%s' in PATH\n"
2085 b" Can't find editor '%s' in PATH\n"
2080 b" (specify a commit editor in your configuration"
2086 b" (specify a commit editor in your configuration"
2081 b" file)\n"
2087 b" file)\n"
2082 ),
2088 ),
2083 not cmdpath and editorbin,
2089 not cmdpath and editorbin,
2084 )
2090 )
2085 if not cmdpath and editor != b'vi':
2091 if not cmdpath and editor != b'vi':
2086 problems += 1
2092 problems += 1
2087
2093
2088 # check username
2094 # check username
2089 username = None
2095 username = None
2090 err = None
2096 err = None
2091 try:
2097 try:
2092 username = ui.username()
2098 username = ui.username()
2093 except error.Abort as e:
2099 except error.Abort as e:
2094 err = e.message
2100 err = e.message
2095 problems += 1
2101 problems += 1
2096
2102
2097 fm.condwrite(
2103 fm.condwrite(
2098 username, b'username', _(b"checking username (%s)\n"), username
2104 username, b'username', _(b"checking username (%s)\n"), username
2099 )
2105 )
2100 fm.condwrite(
2106 fm.condwrite(
2101 err,
2107 err,
2102 b'usernameerror',
2108 b'usernameerror',
2103 _(
2109 _(
2104 b"checking username...\n %s\n"
2110 b"checking username...\n %s\n"
2105 b" (specify a username in your configuration file)\n"
2111 b" (specify a username in your configuration file)\n"
2106 ),
2112 ),
2107 err,
2113 err,
2108 )
2114 )
2109
2115
2110 for name, mod in extensions.extensions():
2116 for name, mod in extensions.extensions():
2111 handler = getattr(mod, 'debuginstall', None)
2117 handler = getattr(mod, 'debuginstall', None)
2112 if handler is not None:
2118 if handler is not None:
2113 problems += handler(ui, fm)
2119 problems += handler(ui, fm)
2114
2120
2115 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2121 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2116 if not problems:
2122 if not problems:
2117 fm.data(problems=problems)
2123 fm.data(problems=problems)
2118 fm.condwrite(
2124 fm.condwrite(
2119 problems,
2125 problems,
2120 b'problems',
2126 b'problems',
2121 _(b"%d problems detected, please check your install!\n"),
2127 _(b"%d problems detected, please check your install!\n"),
2122 problems,
2128 problems,
2123 )
2129 )
2124 fm.end()
2130 fm.end()
2125
2131
2126 return problems
2132 return problems
2127
2133
2128
2134
2129 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2135 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2130 def debugknown(ui, repopath, *ids, **opts):
2136 def debugknown(ui, repopath, *ids, **opts):
2131 """test whether node ids are known to a repo
2137 """test whether node ids are known to a repo
2132
2138
2133 Every ID must be a full-length hex node id string. Returns a list of 0s
2139 Every ID must be a full-length hex node id string. Returns a list of 0s
2134 and 1s indicating unknown/known.
2140 and 1s indicating unknown/known.
2135 """
2141 """
2136 opts = pycompat.byteskwargs(opts)
2142 opts = pycompat.byteskwargs(opts)
2137 repo = hg.peer(ui, opts, repopath)
2143 repo = hg.peer(ui, opts, repopath)
2138 if not repo.capable(b'known'):
2144 if not repo.capable(b'known'):
2139 raise error.Abort(b"known() not supported by target repository")
2145 raise error.Abort(b"known() not supported by target repository")
2140 flags = repo.known([bin(s) for s in ids])
2146 flags = repo.known([bin(s) for s in ids])
2141 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2147 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2142
2148
2143
2149
2144 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2150 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2145 def debuglabelcomplete(ui, repo, *args):
2151 def debuglabelcomplete(ui, repo, *args):
2146 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2152 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2147 debugnamecomplete(ui, repo, *args)
2153 debugnamecomplete(ui, repo, *args)
2148
2154
2149
2155
2150 @command(
2156 @command(
2151 b'debuglocks',
2157 b'debuglocks',
2152 [
2158 [
2153 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2159 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2154 (
2160 (
2155 b'W',
2161 b'W',
2156 b'force-free-wlock',
2162 b'force-free-wlock',
2157 None,
2163 None,
2158 _(b'free the working state lock (DANGEROUS)'),
2164 _(b'free the working state lock (DANGEROUS)'),
2159 ),
2165 ),
2160 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2166 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2161 (
2167 (
2162 b'S',
2168 b'S',
2163 b'set-wlock',
2169 b'set-wlock',
2164 None,
2170 None,
2165 _(b'set the working state lock until stopped'),
2171 _(b'set the working state lock until stopped'),
2166 ),
2172 ),
2167 ],
2173 ],
2168 _(b'[OPTION]...'),
2174 _(b'[OPTION]...'),
2169 )
2175 )
2170 def debuglocks(ui, repo, **opts):
2176 def debuglocks(ui, repo, **opts):
2171 """show or modify state of locks
2177 """show or modify state of locks
2172
2178
2173 By default, this command will show which locks are held. This
2179 By default, this command will show which locks are held. This
2174 includes the user and process holding the lock, the amount of time
2180 includes the user and process holding the lock, the amount of time
2175 the lock has been held, and the machine name where the process is
2181 the lock has been held, and the machine name where the process is
2176 running if it's not local.
2182 running if it's not local.
2177
2183
2178 Locks protect the integrity of Mercurial's data, so should be
2184 Locks protect the integrity of Mercurial's data, so should be
2179 treated with care. System crashes or other interruptions may cause
2185 treated with care. System crashes or other interruptions may cause
2180 locks to not be properly released, though Mercurial will usually
2186 locks to not be properly released, though Mercurial will usually
2181 detect and remove such stale locks automatically.
2187 detect and remove such stale locks automatically.
2182
2188
2183 However, detecting stale locks may not always be possible (for
2189 However, detecting stale locks may not always be possible (for
2184 instance, on a shared filesystem). Removing locks may also be
2190 instance, on a shared filesystem). Removing locks may also be
2185 blocked by filesystem permissions.
2191 blocked by filesystem permissions.
2186
2192
2187 Setting a lock will prevent other commands from changing the data.
2193 Setting a lock will prevent other commands from changing the data.
2188 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2194 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2189 The set locks are removed when the command exits.
2195 The set locks are removed when the command exits.
2190
2196
2191 Returns 0 if no locks are held.
2197 Returns 0 if no locks are held.
2192
2198
2193 """
2199 """
2194
2200
2195 if opts.get('force_free_lock'):
2201 if opts.get('force_free_lock'):
2196 repo.svfs.tryunlink(b'lock')
2202 repo.svfs.tryunlink(b'lock')
2197 if opts.get('force_free_wlock'):
2203 if opts.get('force_free_wlock'):
2198 repo.vfs.tryunlink(b'wlock')
2204 repo.vfs.tryunlink(b'wlock')
2199 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2205 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2200 return 0
2206 return 0
2201
2207
2202 locks = []
2208 locks = []
2203 try:
2209 try:
2204 if opts.get('set_wlock'):
2210 if opts.get('set_wlock'):
2205 try:
2211 try:
2206 locks.append(repo.wlock(False))
2212 locks.append(repo.wlock(False))
2207 except error.LockHeld:
2213 except error.LockHeld:
2208 raise error.Abort(_(b'wlock is already held'))
2214 raise error.Abort(_(b'wlock is already held'))
2209 if opts.get('set_lock'):
2215 if opts.get('set_lock'):
2210 try:
2216 try:
2211 locks.append(repo.lock(False))
2217 locks.append(repo.lock(False))
2212 except error.LockHeld:
2218 except error.LockHeld:
2213 raise error.Abort(_(b'lock is already held'))
2219 raise error.Abort(_(b'lock is already held'))
2214 if len(locks):
2220 if len(locks):
2215 try:
2221 try:
2216 if ui.interactive():
2222 if ui.interactive():
2217 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2223 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2218 ui.promptchoice(prompt)
2224 ui.promptchoice(prompt)
2219 else:
2225 else:
2220 msg = b"%d locks held, waiting for signal\n"
2226 msg = b"%d locks held, waiting for signal\n"
2221 msg %= len(locks)
2227 msg %= len(locks)
2222 ui.status(msg)
2228 ui.status(msg)
2223 while True: # XXX wait for a signal
2229 while True: # XXX wait for a signal
2224 time.sleep(0.1)
2230 time.sleep(0.1)
2225 except KeyboardInterrupt:
2231 except KeyboardInterrupt:
2226 msg = b"signal-received releasing locks\n"
2232 msg = b"signal-received releasing locks\n"
2227 ui.status(msg)
2233 ui.status(msg)
2228 return 0
2234 return 0
2229 finally:
2235 finally:
2230 release(*locks)
2236 release(*locks)
2231
2237
2232 now = time.time()
2238 now = time.time()
2233 held = 0
2239 held = 0
2234
2240
2235 def report(vfs, name, method):
2241 def report(vfs, name, method):
2236 # this causes stale locks to get reaped for more accurate reporting
2242 # this causes stale locks to get reaped for more accurate reporting
2237 try:
2243 try:
2238 l = method(False)
2244 l = method(False)
2239 except error.LockHeld:
2245 except error.LockHeld:
2240 l = None
2246 l = None
2241
2247
2242 if l:
2248 if l:
2243 l.release()
2249 l.release()
2244 else:
2250 else:
2245 try:
2251 try:
2246 st = vfs.lstat(name)
2252 st = vfs.lstat(name)
2247 age = now - st[stat.ST_MTIME]
2253 age = now - st[stat.ST_MTIME]
2248 user = util.username(st.st_uid)
2254 user = util.username(st.st_uid)
2249 locker = vfs.readlock(name)
2255 locker = vfs.readlock(name)
2250 if b":" in locker:
2256 if b":" in locker:
2251 host, pid = locker.split(b':')
2257 host, pid = locker.split(b':')
2252 if host == socket.gethostname():
2258 if host == socket.gethostname():
2253 locker = b'user %s, process %s' % (user or b'None', pid)
2259 locker = b'user %s, process %s' % (user or b'None', pid)
2254 else:
2260 else:
2255 locker = b'user %s, process %s, host %s' % (
2261 locker = b'user %s, process %s, host %s' % (
2256 user or b'None',
2262 user or b'None',
2257 pid,
2263 pid,
2258 host,
2264 host,
2259 )
2265 )
2260 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2266 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2261 return 1
2267 return 1
2262 except OSError as e:
2268 except OSError as e:
2263 if e.errno != errno.ENOENT:
2269 if e.errno != errno.ENOENT:
2264 raise
2270 raise
2265
2271
2266 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2272 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2267 return 0
2273 return 0
2268
2274
2269 held += report(repo.svfs, b"lock", repo.lock)
2275 held += report(repo.svfs, b"lock", repo.lock)
2270 held += report(repo.vfs, b"wlock", repo.wlock)
2276 held += report(repo.vfs, b"wlock", repo.wlock)
2271
2277
2272 return held
2278 return held
2273
2279
2274
2280
2275 @command(
2281 @command(
2276 b'debugmanifestfulltextcache',
2282 b'debugmanifestfulltextcache',
2277 [
2283 [
2278 (b'', b'clear', False, _(b'clear the cache')),
2284 (b'', b'clear', False, _(b'clear the cache')),
2279 (
2285 (
2280 b'a',
2286 b'a',
2281 b'add',
2287 b'add',
2282 [],
2288 [],
2283 _(b'add the given manifest nodes to the cache'),
2289 _(b'add the given manifest nodes to the cache'),
2284 _(b'NODE'),
2290 _(b'NODE'),
2285 ),
2291 ),
2286 ],
2292 ],
2287 b'',
2293 b'',
2288 )
2294 )
2289 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2295 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2290 """show, clear or amend the contents of the manifest fulltext cache"""
2296 """show, clear or amend the contents of the manifest fulltext cache"""
2291
2297
2292 def getcache():
2298 def getcache():
2293 r = repo.manifestlog.getstorage(b'')
2299 r = repo.manifestlog.getstorage(b'')
2294 try:
2300 try:
2295 return r._fulltextcache
2301 return r._fulltextcache
2296 except AttributeError:
2302 except AttributeError:
2297 msg = _(
2303 msg = _(
2298 b"Current revlog implementation doesn't appear to have a "
2304 b"Current revlog implementation doesn't appear to have a "
2299 b"manifest fulltext cache\n"
2305 b"manifest fulltext cache\n"
2300 )
2306 )
2301 raise error.Abort(msg)
2307 raise error.Abort(msg)
2302
2308
2303 if opts.get('clear'):
2309 if opts.get('clear'):
2304 with repo.wlock():
2310 with repo.wlock():
2305 cache = getcache()
2311 cache = getcache()
2306 cache.clear(clear_persisted_data=True)
2312 cache.clear(clear_persisted_data=True)
2307 return
2313 return
2308
2314
2309 if add:
2315 if add:
2310 with repo.wlock():
2316 with repo.wlock():
2311 m = repo.manifestlog
2317 m = repo.manifestlog
2312 store = m.getstorage(b'')
2318 store = m.getstorage(b'')
2313 for n in add:
2319 for n in add:
2314 try:
2320 try:
2315 manifest = m[store.lookup(n)]
2321 manifest = m[store.lookup(n)]
2316 except error.LookupError as e:
2322 except error.LookupError as e:
2317 raise error.Abort(
2323 raise error.Abort(
2318 bytes(e), hint=b"Check your manifest node id"
2324 bytes(e), hint=b"Check your manifest node id"
2319 )
2325 )
2320 manifest.read() # stores revisision in cache too
2326 manifest.read() # stores revisision in cache too
2321 return
2327 return
2322
2328
2323 cache = getcache()
2329 cache = getcache()
2324 if not len(cache):
2330 if not len(cache):
2325 ui.write(_(b'cache empty\n'))
2331 ui.write(_(b'cache empty\n'))
2326 else:
2332 else:
2327 ui.write(
2333 ui.write(
2328 _(
2334 _(
2329 b'cache contains %d manifest entries, in order of most to '
2335 b'cache contains %d manifest entries, in order of most to '
2330 b'least recent:\n'
2336 b'least recent:\n'
2331 )
2337 )
2332 % (len(cache),)
2338 % (len(cache),)
2333 )
2339 )
2334 totalsize = 0
2340 totalsize = 0
2335 for nodeid in cache:
2341 for nodeid in cache:
2336 # Use cache.get to not update the LRU order
2342 # Use cache.get to not update the LRU order
2337 data = cache.peek(nodeid)
2343 data = cache.peek(nodeid)
2338 size = len(data)
2344 size = len(data)
2339 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2345 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2340 ui.write(
2346 ui.write(
2341 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2347 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2342 )
2348 )
2343 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2349 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2344 ui.write(
2350 ui.write(
2345 _(b'total cache data size %s, on-disk %s\n')
2351 _(b'total cache data size %s, on-disk %s\n')
2346 % (util.bytecount(totalsize), util.bytecount(ondisk))
2352 % (util.bytecount(totalsize), util.bytecount(ondisk))
2347 )
2353 )
2348
2354
2349
2355
2350 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2356 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2351 def debugmergestate(ui, repo, *args, **opts):
2357 def debugmergestate(ui, repo, *args, **opts):
2352 """print merge state
2358 """print merge state
2353
2359
2354 Use --verbose to print out information about whether v1 or v2 merge state
2360 Use --verbose to print out information about whether v1 or v2 merge state
2355 was chosen."""
2361 was chosen."""
2356
2362
2357 if ui.verbose:
2363 if ui.verbose:
2358 ms = mergestatemod.mergestate(repo)
2364 ms = mergestatemod.mergestate(repo)
2359
2365
2360 # sort so that reasonable information is on top
2366 # sort so that reasonable information is on top
2361 v1records = ms._readrecordsv1()
2367 v1records = ms._readrecordsv1()
2362 v2records = ms._readrecordsv2()
2368 v2records = ms._readrecordsv2()
2363
2369
2364 if not v1records and not v2records:
2370 if not v1records and not v2records:
2365 pass
2371 pass
2366 elif not v2records:
2372 elif not v2records:
2367 ui.writenoi18n(b'no version 2 merge state\n')
2373 ui.writenoi18n(b'no version 2 merge state\n')
2368 elif ms._v1v2match(v1records, v2records):
2374 elif ms._v1v2match(v1records, v2records):
2369 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2375 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2370 else:
2376 else:
2371 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2377 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2372
2378
2373 opts = pycompat.byteskwargs(opts)
2379 opts = pycompat.byteskwargs(opts)
2374 if not opts[b'template']:
2380 if not opts[b'template']:
2375 opts[b'template'] = (
2381 opts[b'template'] = (
2376 b'{if(commits, "", "no merge state found\n")}'
2382 b'{if(commits, "", "no merge state found\n")}'
2377 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2383 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2378 b'{files % "file: {path} (state \\"{state}\\")\n'
2384 b'{files % "file: {path} (state \\"{state}\\")\n'
2379 b'{if(local_path, "'
2385 b'{if(local_path, "'
2380 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2386 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2381 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2387 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2382 b' other path: {other_path} (node {other_node})\n'
2388 b' other path: {other_path} (node {other_node})\n'
2383 b'")}'
2389 b'")}'
2384 b'{if(rename_side, "'
2390 b'{if(rename_side, "'
2385 b' rename side: {rename_side}\n'
2391 b' rename side: {rename_side}\n'
2386 b' renamed path: {renamed_path}\n'
2392 b' renamed path: {renamed_path}\n'
2387 b'")}'
2393 b'")}'
2388 b'{extras % " extra: {key} = {value}\n"}'
2394 b'{extras % " extra: {key} = {value}\n"}'
2389 b'"}'
2395 b'"}'
2390 b'{extras % "extra: {file} ({key} = {value})\n"}'
2396 b'{extras % "extra: {file} ({key} = {value})\n"}'
2391 )
2397 )
2392
2398
2393 ms = mergestatemod.mergestate.read(repo)
2399 ms = mergestatemod.mergestate.read(repo)
2394
2400
2395 fm = ui.formatter(b'debugmergestate', opts)
2401 fm = ui.formatter(b'debugmergestate', opts)
2396 fm.startitem()
2402 fm.startitem()
2397
2403
2398 fm_commits = fm.nested(b'commits')
2404 fm_commits = fm.nested(b'commits')
2399 if ms.active():
2405 if ms.active():
2400 for name, node, label_index in (
2406 for name, node, label_index in (
2401 (b'local', ms.local, 0),
2407 (b'local', ms.local, 0),
2402 (b'other', ms.other, 1),
2408 (b'other', ms.other, 1),
2403 ):
2409 ):
2404 fm_commits.startitem()
2410 fm_commits.startitem()
2405 fm_commits.data(name=name)
2411 fm_commits.data(name=name)
2406 fm_commits.data(node=hex(node))
2412 fm_commits.data(node=hex(node))
2407 if ms._labels and len(ms._labels) > label_index:
2413 if ms._labels and len(ms._labels) > label_index:
2408 fm_commits.data(label=ms._labels[label_index])
2414 fm_commits.data(label=ms._labels[label_index])
2409 fm_commits.end()
2415 fm_commits.end()
2410
2416
2411 fm_files = fm.nested(b'files')
2417 fm_files = fm.nested(b'files')
2412 if ms.active():
2418 if ms.active():
2413 for f in ms:
2419 for f in ms:
2414 fm_files.startitem()
2420 fm_files.startitem()
2415 fm_files.data(path=f)
2421 fm_files.data(path=f)
2416 state = ms._state[f]
2422 state = ms._state[f]
2417 fm_files.data(state=state[0])
2423 fm_files.data(state=state[0])
2418 if state[0] in (
2424 if state[0] in (
2419 mergestatemod.MERGE_RECORD_UNRESOLVED,
2425 mergestatemod.MERGE_RECORD_UNRESOLVED,
2420 mergestatemod.MERGE_RECORD_RESOLVED,
2426 mergestatemod.MERGE_RECORD_RESOLVED,
2421 ):
2427 ):
2422 fm_files.data(local_key=state[1])
2428 fm_files.data(local_key=state[1])
2423 fm_files.data(local_path=state[2])
2429 fm_files.data(local_path=state[2])
2424 fm_files.data(ancestor_path=state[3])
2430 fm_files.data(ancestor_path=state[3])
2425 fm_files.data(ancestor_node=state[4])
2431 fm_files.data(ancestor_node=state[4])
2426 fm_files.data(other_path=state[5])
2432 fm_files.data(other_path=state[5])
2427 fm_files.data(other_node=state[6])
2433 fm_files.data(other_node=state[6])
2428 fm_files.data(local_flags=state[7])
2434 fm_files.data(local_flags=state[7])
2429 elif state[0] in (
2435 elif state[0] in (
2430 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2436 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2431 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2437 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2432 ):
2438 ):
2433 fm_files.data(renamed_path=state[1])
2439 fm_files.data(renamed_path=state[1])
2434 fm_files.data(rename_side=state[2])
2440 fm_files.data(rename_side=state[2])
2435 fm_extras = fm_files.nested(b'extras')
2441 fm_extras = fm_files.nested(b'extras')
2436 for k, v in sorted(ms.extras(f).items()):
2442 for k, v in sorted(ms.extras(f).items()):
2437 fm_extras.startitem()
2443 fm_extras.startitem()
2438 fm_extras.data(key=k)
2444 fm_extras.data(key=k)
2439 fm_extras.data(value=v)
2445 fm_extras.data(value=v)
2440 fm_extras.end()
2446 fm_extras.end()
2441
2447
2442 fm_files.end()
2448 fm_files.end()
2443
2449
2444 fm_extras = fm.nested(b'extras')
2450 fm_extras = fm.nested(b'extras')
2445 for f, d in sorted(ms.allextras().items()):
2451 for f, d in sorted(ms.allextras().items()):
2446 if f in ms:
2452 if f in ms:
2447 # If file is in mergestate, we have already processed it's extras
2453 # If file is in mergestate, we have already processed it's extras
2448 continue
2454 continue
2449 for k, v in d.items():
2455 for k, v in d.items():
2450 fm_extras.startitem()
2456 fm_extras.startitem()
2451 fm_extras.data(file=f)
2457 fm_extras.data(file=f)
2452 fm_extras.data(key=k)
2458 fm_extras.data(key=k)
2453 fm_extras.data(value=v)
2459 fm_extras.data(value=v)
2454 fm_extras.end()
2460 fm_extras.end()
2455
2461
2456 fm.end()
2462 fm.end()
2457
2463
2458
2464
2459 @command(b'debugnamecomplete', [], _(b'NAME...'))
2465 @command(b'debugnamecomplete', [], _(b'NAME...'))
2460 def debugnamecomplete(ui, repo, *args):
2466 def debugnamecomplete(ui, repo, *args):
2461 '''complete "names" - tags, open branch names, bookmark names'''
2467 '''complete "names" - tags, open branch names, bookmark names'''
2462
2468
2463 names = set()
2469 names = set()
2464 # since we previously only listed open branches, we will handle that
2470 # since we previously only listed open branches, we will handle that
2465 # specially (after this for loop)
2471 # specially (after this for loop)
2466 for name, ns in repo.names.items():
2472 for name, ns in repo.names.items():
2467 if name != b'branches':
2473 if name != b'branches':
2468 names.update(ns.listnames(repo))
2474 names.update(ns.listnames(repo))
2469 names.update(
2475 names.update(
2470 tag
2476 tag
2471 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2477 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2472 if not closed
2478 if not closed
2473 )
2479 )
2474 completions = set()
2480 completions = set()
2475 if not args:
2481 if not args:
2476 args = [b'']
2482 args = [b'']
2477 for a in args:
2483 for a in args:
2478 completions.update(n for n in names if n.startswith(a))
2484 completions.update(n for n in names if n.startswith(a))
2479 ui.write(b'\n'.join(sorted(completions)))
2485 ui.write(b'\n'.join(sorted(completions)))
2480 ui.write(b'\n')
2486 ui.write(b'\n')
2481
2487
2482
2488
2483 @command(
2489 @command(
2484 b'debugnodemap',
2490 b'debugnodemap',
2485 [
2491 [
2486 (
2492 (
2487 b'',
2493 b'',
2488 b'dump-new',
2494 b'dump-new',
2489 False,
2495 False,
2490 _(b'write a (new) persistent binary nodemap on stdout'),
2496 _(b'write a (new) persistent binary nodemap on stdout'),
2491 ),
2497 ),
2492 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2498 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2493 (
2499 (
2494 b'',
2500 b'',
2495 b'check',
2501 b'check',
2496 False,
2502 False,
2497 _(b'check that the data on disk data are correct.'),
2503 _(b'check that the data on disk data are correct.'),
2498 ),
2504 ),
2499 (
2505 (
2500 b'',
2506 b'',
2501 b'metadata',
2507 b'metadata',
2502 False,
2508 False,
2503 _(b'display the on disk meta data for the nodemap'),
2509 _(b'display the on disk meta data for the nodemap'),
2504 ),
2510 ),
2505 ],
2511 ],
2506 )
2512 )
2507 def debugnodemap(ui, repo, **opts):
2513 def debugnodemap(ui, repo, **opts):
2508 """write and inspect on disk nodemap"""
2514 """write and inspect on disk nodemap"""
2509 if opts['dump_new']:
2515 if opts['dump_new']:
2510 unfi = repo.unfiltered()
2516 unfi = repo.unfiltered()
2511 cl = unfi.changelog
2517 cl = unfi.changelog
2512 if util.safehasattr(cl.index, "nodemap_data_all"):
2518 if util.safehasattr(cl.index, "nodemap_data_all"):
2513 data = cl.index.nodemap_data_all()
2519 data = cl.index.nodemap_data_all()
2514 else:
2520 else:
2515 data = nodemap.persistent_data(cl.index)
2521 data = nodemap.persistent_data(cl.index)
2516 ui.write(data)
2522 ui.write(data)
2517 elif opts['dump_disk']:
2523 elif opts['dump_disk']:
2518 unfi = repo.unfiltered()
2524 unfi = repo.unfiltered()
2519 cl = unfi.changelog
2525 cl = unfi.changelog
2520 nm_data = nodemap.persisted_data(cl)
2526 nm_data = nodemap.persisted_data(cl)
2521 if nm_data is not None:
2527 if nm_data is not None:
2522 docket, data = nm_data
2528 docket, data = nm_data
2523 ui.write(data[:])
2529 ui.write(data[:])
2524 elif opts['check']:
2530 elif opts['check']:
2525 unfi = repo.unfiltered()
2531 unfi = repo.unfiltered()
2526 cl = unfi.changelog
2532 cl = unfi.changelog
2527 nm_data = nodemap.persisted_data(cl)
2533 nm_data = nodemap.persisted_data(cl)
2528 if nm_data is not None:
2534 if nm_data is not None:
2529 docket, data = nm_data
2535 docket, data = nm_data
2530 return nodemap.check_data(ui, cl.index, data)
2536 return nodemap.check_data(ui, cl.index, data)
2531 elif opts['metadata']:
2537 elif opts['metadata']:
2532 unfi = repo.unfiltered()
2538 unfi = repo.unfiltered()
2533 cl = unfi.changelog
2539 cl = unfi.changelog
2534 nm_data = nodemap.persisted_data(cl)
2540 nm_data = nodemap.persisted_data(cl)
2535 if nm_data is not None:
2541 if nm_data is not None:
2536 docket, data = nm_data
2542 docket, data = nm_data
2537 ui.write((b"uid: %s\n") % docket.uid)
2543 ui.write((b"uid: %s\n") % docket.uid)
2538 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2544 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2539 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2545 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2540 ui.write((b"data-length: %d\n") % docket.data_length)
2546 ui.write((b"data-length: %d\n") % docket.data_length)
2541 ui.write((b"data-unused: %d\n") % docket.data_unused)
2547 ui.write((b"data-unused: %d\n") % docket.data_unused)
2542 unused_perc = docket.data_unused * 100.0 / docket.data_length
2548 unused_perc = docket.data_unused * 100.0 / docket.data_length
2543 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2549 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2544
2550
2545
2551
2546 @command(
2552 @command(
2547 b'debugobsolete',
2553 b'debugobsolete',
2548 [
2554 [
2549 (b'', b'flags', 0, _(b'markers flag')),
2555 (b'', b'flags', 0, _(b'markers flag')),
2550 (
2556 (
2551 b'',
2557 b'',
2552 b'record-parents',
2558 b'record-parents',
2553 False,
2559 False,
2554 _(b'record parent information for the precursor'),
2560 _(b'record parent information for the precursor'),
2555 ),
2561 ),
2556 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2562 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2557 (
2563 (
2558 b'',
2564 b'',
2559 b'exclusive',
2565 b'exclusive',
2560 False,
2566 False,
2561 _(b'restrict display to markers only relevant to REV'),
2567 _(b'restrict display to markers only relevant to REV'),
2562 ),
2568 ),
2563 (b'', b'index', False, _(b'display index of the marker')),
2569 (b'', b'index', False, _(b'display index of the marker')),
2564 (b'', b'delete', [], _(b'delete markers specified by indices')),
2570 (b'', b'delete', [], _(b'delete markers specified by indices')),
2565 ]
2571 ]
2566 + cmdutil.commitopts2
2572 + cmdutil.commitopts2
2567 + cmdutil.formatteropts,
2573 + cmdutil.formatteropts,
2568 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2574 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2569 )
2575 )
2570 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2576 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2571 """create arbitrary obsolete marker
2577 """create arbitrary obsolete marker
2572
2578
2573 With no arguments, displays the list of obsolescence markers."""
2579 With no arguments, displays the list of obsolescence markers."""
2574
2580
2575 opts = pycompat.byteskwargs(opts)
2581 opts = pycompat.byteskwargs(opts)
2576
2582
2577 def parsenodeid(s):
2583 def parsenodeid(s):
2578 try:
2584 try:
2579 # We do not use revsingle/revrange functions here to accept
2585 # We do not use revsingle/revrange functions here to accept
2580 # arbitrary node identifiers, possibly not present in the
2586 # arbitrary node identifiers, possibly not present in the
2581 # local repository.
2587 # local repository.
2582 n = bin(s)
2588 n = bin(s)
2583 if len(n) != repo.nodeconstants.nodelen:
2589 if len(n) != repo.nodeconstants.nodelen:
2584 raise TypeError()
2590 raise TypeError()
2585 return n
2591 return n
2586 except TypeError:
2592 except TypeError:
2587 raise error.InputError(
2593 raise error.InputError(
2588 b'changeset references must be full hexadecimal '
2594 b'changeset references must be full hexadecimal '
2589 b'node identifiers'
2595 b'node identifiers'
2590 )
2596 )
2591
2597
2592 if opts.get(b'delete'):
2598 if opts.get(b'delete'):
2593 indices = []
2599 indices = []
2594 for v in opts.get(b'delete'):
2600 for v in opts.get(b'delete'):
2595 try:
2601 try:
2596 indices.append(int(v))
2602 indices.append(int(v))
2597 except ValueError:
2603 except ValueError:
2598 raise error.InputError(
2604 raise error.InputError(
2599 _(b'invalid index value: %r') % v,
2605 _(b'invalid index value: %r') % v,
2600 hint=_(b'use integers for indices'),
2606 hint=_(b'use integers for indices'),
2601 )
2607 )
2602
2608
2603 if repo.currenttransaction():
2609 if repo.currenttransaction():
2604 raise error.Abort(
2610 raise error.Abort(
2605 _(b'cannot delete obsmarkers in the middle of transaction.')
2611 _(b'cannot delete obsmarkers in the middle of transaction.')
2606 )
2612 )
2607
2613
2608 with repo.lock():
2614 with repo.lock():
2609 n = repair.deleteobsmarkers(repo.obsstore, indices)
2615 n = repair.deleteobsmarkers(repo.obsstore, indices)
2610 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2616 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2611
2617
2612 return
2618 return
2613
2619
2614 if precursor is not None:
2620 if precursor is not None:
2615 if opts[b'rev']:
2621 if opts[b'rev']:
2616 raise error.InputError(
2622 raise error.InputError(
2617 b'cannot select revision when creating marker'
2623 b'cannot select revision when creating marker'
2618 )
2624 )
2619 metadata = {}
2625 metadata = {}
2620 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2626 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2621 succs = tuple(parsenodeid(succ) for succ in successors)
2627 succs = tuple(parsenodeid(succ) for succ in successors)
2622 l = repo.lock()
2628 l = repo.lock()
2623 try:
2629 try:
2624 tr = repo.transaction(b'debugobsolete')
2630 tr = repo.transaction(b'debugobsolete')
2625 try:
2631 try:
2626 date = opts.get(b'date')
2632 date = opts.get(b'date')
2627 if date:
2633 if date:
2628 date = dateutil.parsedate(date)
2634 date = dateutil.parsedate(date)
2629 else:
2635 else:
2630 date = None
2636 date = None
2631 prec = parsenodeid(precursor)
2637 prec = parsenodeid(precursor)
2632 parents = None
2638 parents = None
2633 if opts[b'record_parents']:
2639 if opts[b'record_parents']:
2634 if prec not in repo.unfiltered():
2640 if prec not in repo.unfiltered():
2635 raise error.Abort(
2641 raise error.Abort(
2636 b'cannot used --record-parents on '
2642 b'cannot used --record-parents on '
2637 b'unknown changesets'
2643 b'unknown changesets'
2638 )
2644 )
2639 parents = repo.unfiltered()[prec].parents()
2645 parents = repo.unfiltered()[prec].parents()
2640 parents = tuple(p.node() for p in parents)
2646 parents = tuple(p.node() for p in parents)
2641 repo.obsstore.create(
2647 repo.obsstore.create(
2642 tr,
2648 tr,
2643 prec,
2649 prec,
2644 succs,
2650 succs,
2645 opts[b'flags'],
2651 opts[b'flags'],
2646 parents=parents,
2652 parents=parents,
2647 date=date,
2653 date=date,
2648 metadata=metadata,
2654 metadata=metadata,
2649 ui=ui,
2655 ui=ui,
2650 )
2656 )
2651 tr.close()
2657 tr.close()
2652 except ValueError as exc:
2658 except ValueError as exc:
2653 raise error.Abort(
2659 raise error.Abort(
2654 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2660 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2655 )
2661 )
2656 finally:
2662 finally:
2657 tr.release()
2663 tr.release()
2658 finally:
2664 finally:
2659 l.release()
2665 l.release()
2660 else:
2666 else:
2661 if opts[b'rev']:
2667 if opts[b'rev']:
2662 revs = logcmdutil.revrange(repo, opts[b'rev'])
2668 revs = logcmdutil.revrange(repo, opts[b'rev'])
2663 nodes = [repo[r].node() for r in revs]
2669 nodes = [repo[r].node() for r in revs]
2664 markers = list(
2670 markers = list(
2665 obsutil.getmarkers(
2671 obsutil.getmarkers(
2666 repo, nodes=nodes, exclusive=opts[b'exclusive']
2672 repo, nodes=nodes, exclusive=opts[b'exclusive']
2667 )
2673 )
2668 )
2674 )
2669 markers.sort(key=lambda x: x._data)
2675 markers.sort(key=lambda x: x._data)
2670 else:
2676 else:
2671 markers = obsutil.getmarkers(repo)
2677 markers = obsutil.getmarkers(repo)
2672
2678
2673 markerstoiter = markers
2679 markerstoiter = markers
2674 isrelevant = lambda m: True
2680 isrelevant = lambda m: True
2675 if opts.get(b'rev') and opts.get(b'index'):
2681 if opts.get(b'rev') and opts.get(b'index'):
2676 markerstoiter = obsutil.getmarkers(repo)
2682 markerstoiter = obsutil.getmarkers(repo)
2677 markerset = set(markers)
2683 markerset = set(markers)
2678 isrelevant = lambda m: m in markerset
2684 isrelevant = lambda m: m in markerset
2679
2685
2680 fm = ui.formatter(b'debugobsolete', opts)
2686 fm = ui.formatter(b'debugobsolete', opts)
2681 for i, m in enumerate(markerstoiter):
2687 for i, m in enumerate(markerstoiter):
2682 if not isrelevant(m):
2688 if not isrelevant(m):
2683 # marker can be irrelevant when we're iterating over a set
2689 # marker can be irrelevant when we're iterating over a set
2684 # of markers (markerstoiter) which is bigger than the set
2690 # of markers (markerstoiter) which is bigger than the set
2685 # of markers we want to display (markers)
2691 # of markers we want to display (markers)
2686 # this can happen if both --index and --rev options are
2692 # this can happen if both --index and --rev options are
2687 # provided and thus we need to iterate over all of the markers
2693 # provided and thus we need to iterate over all of the markers
2688 # to get the correct indices, but only display the ones that
2694 # to get the correct indices, but only display the ones that
2689 # are relevant to --rev value
2695 # are relevant to --rev value
2690 continue
2696 continue
2691 fm.startitem()
2697 fm.startitem()
2692 ind = i if opts.get(b'index') else None
2698 ind = i if opts.get(b'index') else None
2693 cmdutil.showmarker(fm, m, index=ind)
2699 cmdutil.showmarker(fm, m, index=ind)
2694 fm.end()
2700 fm.end()
2695
2701
2696
2702
2697 @command(
2703 @command(
2698 b'debugp1copies',
2704 b'debugp1copies',
2699 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2705 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2700 _(b'[-r REV]'),
2706 _(b'[-r REV]'),
2701 )
2707 )
2702 def debugp1copies(ui, repo, **opts):
2708 def debugp1copies(ui, repo, **opts):
2703 """dump copy information compared to p1"""
2709 """dump copy information compared to p1"""
2704
2710
2705 opts = pycompat.byteskwargs(opts)
2711 opts = pycompat.byteskwargs(opts)
2706 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2712 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2707 for dst, src in ctx.p1copies().items():
2713 for dst, src in ctx.p1copies().items():
2708 ui.write(b'%s -> %s\n' % (src, dst))
2714 ui.write(b'%s -> %s\n' % (src, dst))
2709
2715
2710
2716
2711 @command(
2717 @command(
2712 b'debugp2copies',
2718 b'debugp2copies',
2713 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2719 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2714 _(b'[-r REV]'),
2720 _(b'[-r REV]'),
2715 )
2721 )
2716 def debugp1copies(ui, repo, **opts):
2722 def debugp1copies(ui, repo, **opts):
2717 """dump copy information compared to p2"""
2723 """dump copy information compared to p2"""
2718
2724
2719 opts = pycompat.byteskwargs(opts)
2725 opts = pycompat.byteskwargs(opts)
2720 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2726 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2721 for dst, src in ctx.p2copies().items():
2727 for dst, src in ctx.p2copies().items():
2722 ui.write(b'%s -> %s\n' % (src, dst))
2728 ui.write(b'%s -> %s\n' % (src, dst))
2723
2729
2724
2730
2725 @command(
2731 @command(
2726 b'debugpathcomplete',
2732 b'debugpathcomplete',
2727 [
2733 [
2728 (b'f', b'full', None, _(b'complete an entire path')),
2734 (b'f', b'full', None, _(b'complete an entire path')),
2729 (b'n', b'normal', None, _(b'show only normal files')),
2735 (b'n', b'normal', None, _(b'show only normal files')),
2730 (b'a', b'added', None, _(b'show only added files')),
2736 (b'a', b'added', None, _(b'show only added files')),
2731 (b'r', b'removed', None, _(b'show only removed files')),
2737 (b'r', b'removed', None, _(b'show only removed files')),
2732 ],
2738 ],
2733 _(b'FILESPEC...'),
2739 _(b'FILESPEC...'),
2734 )
2740 )
2735 def debugpathcomplete(ui, repo, *specs, **opts):
2741 def debugpathcomplete(ui, repo, *specs, **opts):
2736 """complete part or all of a tracked path
2742 """complete part or all of a tracked path
2737
2743
2738 This command supports shells that offer path name completion. It
2744 This command supports shells that offer path name completion. It
2739 currently completes only files already known to the dirstate.
2745 currently completes only files already known to the dirstate.
2740
2746
2741 Completion extends only to the next path segment unless
2747 Completion extends only to the next path segment unless
2742 --full is specified, in which case entire paths are used."""
2748 --full is specified, in which case entire paths are used."""
2743
2749
2744 def complete(path, acceptable):
2750 def complete(path, acceptable):
2745 dirstate = repo.dirstate
2751 dirstate = repo.dirstate
2746 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2752 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2747 rootdir = repo.root + pycompat.ossep
2753 rootdir = repo.root + pycompat.ossep
2748 if spec != repo.root and not spec.startswith(rootdir):
2754 if spec != repo.root and not spec.startswith(rootdir):
2749 return [], []
2755 return [], []
2750 if os.path.isdir(spec):
2756 if os.path.isdir(spec):
2751 spec += b'/'
2757 spec += b'/'
2752 spec = spec[len(rootdir) :]
2758 spec = spec[len(rootdir) :]
2753 fixpaths = pycompat.ossep != b'/'
2759 fixpaths = pycompat.ossep != b'/'
2754 if fixpaths:
2760 if fixpaths:
2755 spec = spec.replace(pycompat.ossep, b'/')
2761 spec = spec.replace(pycompat.ossep, b'/')
2756 speclen = len(spec)
2762 speclen = len(spec)
2757 fullpaths = opts['full']
2763 fullpaths = opts['full']
2758 files, dirs = set(), set()
2764 files, dirs = set(), set()
2759 adddir, addfile = dirs.add, files.add
2765 adddir, addfile = dirs.add, files.add
2760 for f, st in dirstate.items():
2766 for f, st in dirstate.items():
2761 if f.startswith(spec) and st.state in acceptable:
2767 if f.startswith(spec) and st.state in acceptable:
2762 if fixpaths:
2768 if fixpaths:
2763 f = f.replace(b'/', pycompat.ossep)
2769 f = f.replace(b'/', pycompat.ossep)
2764 if fullpaths:
2770 if fullpaths:
2765 addfile(f)
2771 addfile(f)
2766 continue
2772 continue
2767 s = f.find(pycompat.ossep, speclen)
2773 s = f.find(pycompat.ossep, speclen)
2768 if s >= 0:
2774 if s >= 0:
2769 adddir(f[:s])
2775 adddir(f[:s])
2770 else:
2776 else:
2771 addfile(f)
2777 addfile(f)
2772 return files, dirs
2778 return files, dirs
2773
2779
2774 acceptable = b''
2780 acceptable = b''
2775 if opts['normal']:
2781 if opts['normal']:
2776 acceptable += b'nm'
2782 acceptable += b'nm'
2777 if opts['added']:
2783 if opts['added']:
2778 acceptable += b'a'
2784 acceptable += b'a'
2779 if opts['removed']:
2785 if opts['removed']:
2780 acceptable += b'r'
2786 acceptable += b'r'
2781 cwd = repo.getcwd()
2787 cwd = repo.getcwd()
2782 if not specs:
2788 if not specs:
2783 specs = [b'.']
2789 specs = [b'.']
2784
2790
2785 files, dirs = set(), set()
2791 files, dirs = set(), set()
2786 for spec in specs:
2792 for spec in specs:
2787 f, d = complete(spec, acceptable or b'nmar')
2793 f, d = complete(spec, acceptable or b'nmar')
2788 files.update(f)
2794 files.update(f)
2789 dirs.update(d)
2795 dirs.update(d)
2790 files.update(dirs)
2796 files.update(dirs)
2791 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2797 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2792 ui.write(b'\n')
2798 ui.write(b'\n')
2793
2799
2794
2800
2795 @command(
2801 @command(
2796 b'debugpathcopies',
2802 b'debugpathcopies',
2797 cmdutil.walkopts,
2803 cmdutil.walkopts,
2798 b'hg debugpathcopies REV1 REV2 [FILE]',
2804 b'hg debugpathcopies REV1 REV2 [FILE]',
2799 inferrepo=True,
2805 inferrepo=True,
2800 )
2806 )
2801 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2807 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2802 """show copies between two revisions"""
2808 """show copies between two revisions"""
2803 ctx1 = scmutil.revsingle(repo, rev1)
2809 ctx1 = scmutil.revsingle(repo, rev1)
2804 ctx2 = scmutil.revsingle(repo, rev2)
2810 ctx2 = scmutil.revsingle(repo, rev2)
2805 m = scmutil.match(ctx1, pats, opts)
2811 m = scmutil.match(ctx1, pats, opts)
2806 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2812 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2807 ui.write(b'%s -> %s\n' % (src, dst))
2813 ui.write(b'%s -> %s\n' % (src, dst))
2808
2814
2809
2815
2810 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2816 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2811 def debugpeer(ui, path):
2817 def debugpeer(ui, path):
2812 """establish a connection to a peer repository"""
2818 """establish a connection to a peer repository"""
2813 # Always enable peer request logging. Requires --debug to display
2819 # Always enable peer request logging. Requires --debug to display
2814 # though.
2820 # though.
2815 overrides = {
2821 overrides = {
2816 (b'devel', b'debug.peer-request'): True,
2822 (b'devel', b'debug.peer-request'): True,
2817 }
2823 }
2818
2824
2819 with ui.configoverride(overrides):
2825 with ui.configoverride(overrides):
2820 peer = hg.peer(ui, {}, path)
2826 peer = hg.peer(ui, {}, path)
2821
2827
2822 try:
2828 try:
2823 local = peer.local() is not None
2829 local = peer.local() is not None
2824 canpush = peer.canpush()
2830 canpush = peer.canpush()
2825
2831
2826 ui.write(_(b'url: %s\n') % peer.url())
2832 ui.write(_(b'url: %s\n') % peer.url())
2827 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2833 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2828 ui.write(
2834 ui.write(
2829 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2835 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2830 )
2836 )
2831 finally:
2837 finally:
2832 peer.close()
2838 peer.close()
2833
2839
2834
2840
2835 @command(
2841 @command(
2836 b'debugpickmergetool',
2842 b'debugpickmergetool',
2837 [
2843 [
2838 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2844 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2839 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2845 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2840 ]
2846 ]
2841 + cmdutil.walkopts
2847 + cmdutil.walkopts
2842 + cmdutil.mergetoolopts,
2848 + cmdutil.mergetoolopts,
2843 _(b'[PATTERN]...'),
2849 _(b'[PATTERN]...'),
2844 inferrepo=True,
2850 inferrepo=True,
2845 )
2851 )
2846 def debugpickmergetool(ui, repo, *pats, **opts):
2852 def debugpickmergetool(ui, repo, *pats, **opts):
2847 """examine which merge tool is chosen for specified file
2853 """examine which merge tool is chosen for specified file
2848
2854
2849 As described in :hg:`help merge-tools`, Mercurial examines
2855 As described in :hg:`help merge-tools`, Mercurial examines
2850 configurations below in this order to decide which merge tool is
2856 configurations below in this order to decide which merge tool is
2851 chosen for specified file.
2857 chosen for specified file.
2852
2858
2853 1. ``--tool`` option
2859 1. ``--tool`` option
2854 2. ``HGMERGE`` environment variable
2860 2. ``HGMERGE`` environment variable
2855 3. configurations in ``merge-patterns`` section
2861 3. configurations in ``merge-patterns`` section
2856 4. configuration of ``ui.merge``
2862 4. configuration of ``ui.merge``
2857 5. configurations in ``merge-tools`` section
2863 5. configurations in ``merge-tools`` section
2858 6. ``hgmerge`` tool (for historical reason only)
2864 6. ``hgmerge`` tool (for historical reason only)
2859 7. default tool for fallback (``:merge`` or ``:prompt``)
2865 7. default tool for fallback (``:merge`` or ``:prompt``)
2860
2866
2861 This command writes out examination result in the style below::
2867 This command writes out examination result in the style below::
2862
2868
2863 FILE = MERGETOOL
2869 FILE = MERGETOOL
2864
2870
2865 By default, all files known in the first parent context of the
2871 By default, all files known in the first parent context of the
2866 working directory are examined. Use file patterns and/or -I/-X
2872 working directory are examined. Use file patterns and/or -I/-X
2867 options to limit target files. -r/--rev is also useful to examine
2873 options to limit target files. -r/--rev is also useful to examine
2868 files in another context without actual updating to it.
2874 files in another context without actual updating to it.
2869
2875
2870 With --debug, this command shows warning messages while matching
2876 With --debug, this command shows warning messages while matching
2871 against ``merge-patterns`` and so on, too. It is recommended to
2877 against ``merge-patterns`` and so on, too. It is recommended to
2872 use this option with explicit file patterns and/or -I/-X options,
2878 use this option with explicit file patterns and/or -I/-X options,
2873 because this option increases amount of output per file according
2879 because this option increases amount of output per file according
2874 to configurations in hgrc.
2880 to configurations in hgrc.
2875
2881
2876 With -v/--verbose, this command shows configurations below at
2882 With -v/--verbose, this command shows configurations below at
2877 first (only if specified).
2883 first (only if specified).
2878
2884
2879 - ``--tool`` option
2885 - ``--tool`` option
2880 - ``HGMERGE`` environment variable
2886 - ``HGMERGE`` environment variable
2881 - configuration of ``ui.merge``
2887 - configuration of ``ui.merge``
2882
2888
2883 If merge tool is chosen before matching against
2889 If merge tool is chosen before matching against
2884 ``merge-patterns``, this command can't show any helpful
2890 ``merge-patterns``, this command can't show any helpful
2885 information, even with --debug. In such case, information above is
2891 information, even with --debug. In such case, information above is
2886 useful to know why a merge tool is chosen.
2892 useful to know why a merge tool is chosen.
2887 """
2893 """
2888 opts = pycompat.byteskwargs(opts)
2894 opts = pycompat.byteskwargs(opts)
2889 overrides = {}
2895 overrides = {}
2890 if opts[b'tool']:
2896 if opts[b'tool']:
2891 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2897 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2892 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2898 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2893
2899
2894 with ui.configoverride(overrides, b'debugmergepatterns'):
2900 with ui.configoverride(overrides, b'debugmergepatterns'):
2895 hgmerge = encoding.environ.get(b"HGMERGE")
2901 hgmerge = encoding.environ.get(b"HGMERGE")
2896 if hgmerge is not None:
2902 if hgmerge is not None:
2897 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2903 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2898 uimerge = ui.config(b"ui", b"merge")
2904 uimerge = ui.config(b"ui", b"merge")
2899 if uimerge:
2905 if uimerge:
2900 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2906 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2901
2907
2902 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2908 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2903 m = scmutil.match(ctx, pats, opts)
2909 m = scmutil.match(ctx, pats, opts)
2904 changedelete = opts[b'changedelete']
2910 changedelete = opts[b'changedelete']
2905 for path in ctx.walk(m):
2911 for path in ctx.walk(m):
2906 fctx = ctx[path]
2912 fctx = ctx[path]
2907 with ui.silent(
2913 with ui.silent(
2908 error=True
2914 error=True
2909 ) if not ui.debugflag else util.nullcontextmanager():
2915 ) if not ui.debugflag else util.nullcontextmanager():
2910 tool, toolpath = filemerge._picktool(
2916 tool, toolpath = filemerge._picktool(
2911 repo,
2917 repo,
2912 ui,
2918 ui,
2913 path,
2919 path,
2914 fctx.isbinary(),
2920 fctx.isbinary(),
2915 b'l' in fctx.flags(),
2921 b'l' in fctx.flags(),
2916 changedelete,
2922 changedelete,
2917 )
2923 )
2918 ui.write(b'%s = %s\n' % (path, tool))
2924 ui.write(b'%s = %s\n' % (path, tool))
2919
2925
2920
2926
2921 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2927 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2922 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2923 """access the pushkey key/value protocol
2929 """access the pushkey key/value protocol
2924
2930
2925 With two args, list the keys in the given namespace.
2931 With two args, list the keys in the given namespace.
2926
2932
2927 With five args, set a key to new if it currently is set to old.
2933 With five args, set a key to new if it currently is set to old.
2928 Reports success or failure.
2934 Reports success or failure.
2929 """
2935 """
2930
2936
2931 target = hg.peer(ui, {}, repopath)
2937 target = hg.peer(ui, {}, repopath)
2932 try:
2938 try:
2933 if keyinfo:
2939 if keyinfo:
2934 key, old, new = keyinfo
2940 key, old, new = keyinfo
2935 with target.commandexecutor() as e:
2941 with target.commandexecutor() as e:
2936 r = e.callcommand(
2942 r = e.callcommand(
2937 b'pushkey',
2943 b'pushkey',
2938 {
2944 {
2939 b'namespace': namespace,
2945 b'namespace': namespace,
2940 b'key': key,
2946 b'key': key,
2941 b'old': old,
2947 b'old': old,
2942 b'new': new,
2948 b'new': new,
2943 },
2949 },
2944 ).result()
2950 ).result()
2945
2951
2946 ui.status(pycompat.bytestr(r) + b'\n')
2952 ui.status(pycompat.bytestr(r) + b'\n')
2947 return not r
2953 return not r
2948 else:
2954 else:
2949 for k, v in sorted(target.listkeys(namespace).items()):
2955 for k, v in sorted(target.listkeys(namespace).items()):
2950 ui.write(
2956 ui.write(
2951 b"%s\t%s\n"
2957 b"%s\t%s\n"
2952 % (stringutil.escapestr(k), stringutil.escapestr(v))
2958 % (stringutil.escapestr(k), stringutil.escapestr(v))
2953 )
2959 )
2954 finally:
2960 finally:
2955 target.close()
2961 target.close()
2956
2962
2957
2963
2958 @command(b'debugpvec', [], _(b'A B'))
2964 @command(b'debugpvec', [], _(b'A B'))
2959 def debugpvec(ui, repo, a, b=None):
2965 def debugpvec(ui, repo, a, b=None):
2960 ca = scmutil.revsingle(repo, a)
2966 ca = scmutil.revsingle(repo, a)
2961 cb = scmutil.revsingle(repo, b)
2967 cb = scmutil.revsingle(repo, b)
2962 pa = pvec.ctxpvec(ca)
2968 pa = pvec.ctxpvec(ca)
2963 pb = pvec.ctxpvec(cb)
2969 pb = pvec.ctxpvec(cb)
2964 if pa == pb:
2970 if pa == pb:
2965 rel = b"="
2971 rel = b"="
2966 elif pa > pb:
2972 elif pa > pb:
2967 rel = b">"
2973 rel = b">"
2968 elif pa < pb:
2974 elif pa < pb:
2969 rel = b"<"
2975 rel = b"<"
2970 elif pa | pb:
2976 elif pa | pb:
2971 rel = b"|"
2977 rel = b"|"
2972 ui.write(_(b"a: %s\n") % pa)
2978 ui.write(_(b"a: %s\n") % pa)
2973 ui.write(_(b"b: %s\n") % pb)
2979 ui.write(_(b"b: %s\n") % pb)
2974 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2980 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2975 ui.write(
2981 ui.write(
2976 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2982 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2977 % (
2983 % (
2978 abs(pa._depth - pb._depth),
2984 abs(pa._depth - pb._depth),
2979 pvec._hamming(pa._vec, pb._vec),
2985 pvec._hamming(pa._vec, pb._vec),
2980 pa.distance(pb),
2986 pa.distance(pb),
2981 rel,
2987 rel,
2982 )
2988 )
2983 )
2989 )
2984
2990
2985
2991
2986 @command(
2992 @command(
2987 b'debugrebuilddirstate|debugrebuildstate',
2993 b'debugrebuilddirstate|debugrebuildstate',
2988 [
2994 [
2989 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2995 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2990 (
2996 (
2991 b'',
2997 b'',
2992 b'minimal',
2998 b'minimal',
2993 None,
2999 None,
2994 _(
3000 _(
2995 b'only rebuild files that are inconsistent with '
3001 b'only rebuild files that are inconsistent with '
2996 b'the working copy parent'
3002 b'the working copy parent'
2997 ),
3003 ),
2998 ),
3004 ),
2999 ],
3005 ],
3000 _(b'[-r REV]'),
3006 _(b'[-r REV]'),
3001 )
3007 )
3002 def debugrebuilddirstate(ui, repo, rev, **opts):
3008 def debugrebuilddirstate(ui, repo, rev, **opts):
3003 """rebuild the dirstate as it would look like for the given revision
3009 """rebuild the dirstate as it would look like for the given revision
3004
3010
3005 If no revision is specified the first current parent will be used.
3011 If no revision is specified the first current parent will be used.
3006
3012
3007 The dirstate will be set to the files of the given revision.
3013 The dirstate will be set to the files of the given revision.
3008 The actual working directory content or existing dirstate
3014 The actual working directory content or existing dirstate
3009 information such as adds or removes is not considered.
3015 information such as adds or removes is not considered.
3010
3016
3011 ``minimal`` will only rebuild the dirstate status for files that claim to be
3017 ``minimal`` will only rebuild the dirstate status for files that claim to be
3012 tracked but are not in the parent manifest, or that exist in the parent
3018 tracked but are not in the parent manifest, or that exist in the parent
3013 manifest but are not in the dirstate. It will not change adds, removes, or
3019 manifest but are not in the dirstate. It will not change adds, removes, or
3014 modified files that are in the working copy parent.
3020 modified files that are in the working copy parent.
3015
3021
3016 One use of this command is to make the next :hg:`status` invocation
3022 One use of this command is to make the next :hg:`status` invocation
3017 check the actual file content.
3023 check the actual file content.
3018 """
3024 """
3019 ctx = scmutil.revsingle(repo, rev)
3025 ctx = scmutil.revsingle(repo, rev)
3020 with repo.wlock():
3026 with repo.wlock():
3021 dirstate = repo.dirstate
3027 dirstate = repo.dirstate
3022 changedfiles = None
3028 changedfiles = None
3023 # See command doc for what minimal does.
3029 # See command doc for what minimal does.
3024 if opts.get('minimal'):
3030 if opts.get('minimal'):
3025 manifestfiles = set(ctx.manifest().keys())
3031 manifestfiles = set(ctx.manifest().keys())
3026 dirstatefiles = set(dirstate)
3032 dirstatefiles = set(dirstate)
3027 manifestonly = manifestfiles - dirstatefiles
3033 manifestonly = manifestfiles - dirstatefiles
3028 dsonly = dirstatefiles - manifestfiles
3034 dsonly = dirstatefiles - manifestfiles
3029 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3035 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3030 changedfiles = manifestonly | dsnotadded
3036 changedfiles = manifestonly | dsnotadded
3031
3037
3032 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3038 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3033
3039
3034
3040
3035 @command(
3041 @command(
3036 b'debugrebuildfncache',
3042 b'debugrebuildfncache',
3037 [
3043 [
3038 (
3044 (
3039 b'',
3045 b'',
3040 b'only-data',
3046 b'only-data',
3041 False,
3047 False,
3042 _(b'only look for wrong .d files (much faster)'),
3048 _(b'only look for wrong .d files (much faster)'),
3043 )
3049 )
3044 ],
3050 ],
3045 b'',
3051 b'',
3046 )
3052 )
3047 def debugrebuildfncache(ui, repo, **opts):
3053 def debugrebuildfncache(ui, repo, **opts):
3048 """rebuild the fncache file"""
3054 """rebuild the fncache file"""
3049 opts = pycompat.byteskwargs(opts)
3055 opts = pycompat.byteskwargs(opts)
3050 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3056 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3051
3057
3052
3058
3053 @command(
3059 @command(
3054 b'debugrename',
3060 b'debugrename',
3055 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3061 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3056 _(b'[-r REV] [FILE]...'),
3062 _(b'[-r REV] [FILE]...'),
3057 )
3063 )
3058 def debugrename(ui, repo, *pats, **opts):
3064 def debugrename(ui, repo, *pats, **opts):
3059 """dump rename information"""
3065 """dump rename information"""
3060
3066
3061 opts = pycompat.byteskwargs(opts)
3067 opts = pycompat.byteskwargs(opts)
3062 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3068 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3063 m = scmutil.match(ctx, pats, opts)
3069 m = scmutil.match(ctx, pats, opts)
3064 for abs in ctx.walk(m):
3070 for abs in ctx.walk(m):
3065 fctx = ctx[abs]
3071 fctx = ctx[abs]
3066 o = fctx.filelog().renamed(fctx.filenode())
3072 o = fctx.filelog().renamed(fctx.filenode())
3067 rel = repo.pathto(abs)
3073 rel = repo.pathto(abs)
3068 if o:
3074 if o:
3069 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3075 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3070 else:
3076 else:
3071 ui.write(_(b"%s not renamed\n") % rel)
3077 ui.write(_(b"%s not renamed\n") % rel)
3072
3078
3073
3079
3074 @command(b'debugrequires|debugrequirements', [], b'')
3080 @command(b'debugrequires|debugrequirements', [], b'')
3075 def debugrequirements(ui, repo):
3081 def debugrequirements(ui, repo):
3076 """print the current repo requirements"""
3082 """print the current repo requirements"""
3077 for r in sorted(repo.requirements):
3083 for r in sorted(repo.requirements):
3078 ui.write(b"%s\n" % r)
3084 ui.write(b"%s\n" % r)
3079
3085
3080
3086
3081 @command(
3087 @command(
3082 b'debugrevlog',
3088 b'debugrevlog',
3083 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3089 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3084 _(b'-c|-m|FILE'),
3090 _(b'-c|-m|FILE'),
3085 optionalrepo=True,
3091 optionalrepo=True,
3086 )
3092 )
3087 def debugrevlog(ui, repo, file_=None, **opts):
3093 def debugrevlog(ui, repo, file_=None, **opts):
3088 """show data and statistics about a revlog"""
3094 """show data and statistics about a revlog"""
3089 opts = pycompat.byteskwargs(opts)
3095 opts = pycompat.byteskwargs(opts)
3090 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3096 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3091
3097
3092 if opts.get(b"dump"):
3098 if opts.get(b"dump"):
3093 numrevs = len(r)
3099 numrevs = len(r)
3094 ui.write(
3100 ui.write(
3095 (
3101 (
3096 b"# rev p1rev p2rev start end deltastart base p1 p2"
3102 b"# rev p1rev p2rev start end deltastart base p1 p2"
3097 b" rawsize totalsize compression heads chainlen\n"
3103 b" rawsize totalsize compression heads chainlen\n"
3098 )
3104 )
3099 )
3105 )
3100 ts = 0
3106 ts = 0
3101 heads = set()
3107 heads = set()
3102
3108
3103 for rev in pycompat.xrange(numrevs):
3109 for rev in pycompat.xrange(numrevs):
3104 dbase = r.deltaparent(rev)
3110 dbase = r.deltaparent(rev)
3105 if dbase == -1:
3111 if dbase == -1:
3106 dbase = rev
3112 dbase = rev
3107 cbase = r.chainbase(rev)
3113 cbase = r.chainbase(rev)
3108 clen = r.chainlen(rev)
3114 clen = r.chainlen(rev)
3109 p1, p2 = r.parentrevs(rev)
3115 p1, p2 = r.parentrevs(rev)
3110 rs = r.rawsize(rev)
3116 rs = r.rawsize(rev)
3111 ts = ts + rs
3117 ts = ts + rs
3112 heads -= set(r.parentrevs(rev))
3118 heads -= set(r.parentrevs(rev))
3113 heads.add(rev)
3119 heads.add(rev)
3114 try:
3120 try:
3115 compression = ts / r.end(rev)
3121 compression = ts / r.end(rev)
3116 except ZeroDivisionError:
3122 except ZeroDivisionError:
3117 compression = 0
3123 compression = 0
3118 ui.write(
3124 ui.write(
3119 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3125 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3120 b"%11d %5d %8d\n"
3126 b"%11d %5d %8d\n"
3121 % (
3127 % (
3122 rev,
3128 rev,
3123 p1,
3129 p1,
3124 p2,
3130 p2,
3125 r.start(rev),
3131 r.start(rev),
3126 r.end(rev),
3132 r.end(rev),
3127 r.start(dbase),
3133 r.start(dbase),
3128 r.start(cbase),
3134 r.start(cbase),
3129 r.start(p1),
3135 r.start(p1),
3130 r.start(p2),
3136 r.start(p2),
3131 rs,
3137 rs,
3132 ts,
3138 ts,
3133 compression,
3139 compression,
3134 len(heads),
3140 len(heads),
3135 clen,
3141 clen,
3136 )
3142 )
3137 )
3143 )
3138 return 0
3144 return 0
3139
3145
3140 format = r._format_version
3146 format = r._format_version
3141 v = r._format_flags
3147 v = r._format_flags
3142 flags = []
3148 flags = []
3143 gdelta = False
3149 gdelta = False
3144 if v & revlog.FLAG_INLINE_DATA:
3150 if v & revlog.FLAG_INLINE_DATA:
3145 flags.append(b'inline')
3151 flags.append(b'inline')
3146 if v & revlog.FLAG_GENERALDELTA:
3152 if v & revlog.FLAG_GENERALDELTA:
3147 gdelta = True
3153 gdelta = True
3148 flags.append(b'generaldelta')
3154 flags.append(b'generaldelta')
3149 if not flags:
3155 if not flags:
3150 flags = [b'(none)']
3156 flags = [b'(none)']
3151
3157
3152 ### tracks merge vs single parent
3158 ### tracks merge vs single parent
3153 nummerges = 0
3159 nummerges = 0
3154
3160
3155 ### tracks ways the "delta" are build
3161 ### tracks ways the "delta" are build
3156 # nodelta
3162 # nodelta
3157 numempty = 0
3163 numempty = 0
3158 numemptytext = 0
3164 numemptytext = 0
3159 numemptydelta = 0
3165 numemptydelta = 0
3160 # full file content
3166 # full file content
3161 numfull = 0
3167 numfull = 0
3162 # intermediate snapshot against a prior snapshot
3168 # intermediate snapshot against a prior snapshot
3163 numsemi = 0
3169 numsemi = 0
3164 # snapshot count per depth
3170 # snapshot count per depth
3165 numsnapdepth = collections.defaultdict(lambda: 0)
3171 numsnapdepth = collections.defaultdict(lambda: 0)
3166 # delta against previous revision
3172 # delta against previous revision
3167 numprev = 0
3173 numprev = 0
3168 # delta against first or second parent (not prev)
3174 # delta against first or second parent (not prev)
3169 nump1 = 0
3175 nump1 = 0
3170 nump2 = 0
3176 nump2 = 0
3171 # delta against neither prev nor parents
3177 # delta against neither prev nor parents
3172 numother = 0
3178 numother = 0
3173 # delta against prev that are also first or second parent
3179 # delta against prev that are also first or second parent
3174 # (details of `numprev`)
3180 # (details of `numprev`)
3175 nump1prev = 0
3181 nump1prev = 0
3176 nump2prev = 0
3182 nump2prev = 0
3177
3183
3178 # data about delta chain of each revs
3184 # data about delta chain of each revs
3179 chainlengths = []
3185 chainlengths = []
3180 chainbases = []
3186 chainbases = []
3181 chainspans = []
3187 chainspans = []
3182
3188
3183 # data about each revision
3189 # data about each revision
3184 datasize = [None, 0, 0]
3190 datasize = [None, 0, 0]
3185 fullsize = [None, 0, 0]
3191 fullsize = [None, 0, 0]
3186 semisize = [None, 0, 0]
3192 semisize = [None, 0, 0]
3187 # snapshot count per depth
3193 # snapshot count per depth
3188 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3194 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3189 deltasize = [None, 0, 0]
3195 deltasize = [None, 0, 0]
3190 chunktypecounts = {}
3196 chunktypecounts = {}
3191 chunktypesizes = {}
3197 chunktypesizes = {}
3192
3198
3193 def addsize(size, l):
3199 def addsize(size, l):
3194 if l[0] is None or size < l[0]:
3200 if l[0] is None or size < l[0]:
3195 l[0] = size
3201 l[0] = size
3196 if size > l[1]:
3202 if size > l[1]:
3197 l[1] = size
3203 l[1] = size
3198 l[2] += size
3204 l[2] += size
3199
3205
3200 numrevs = len(r)
3206 numrevs = len(r)
3201 for rev in pycompat.xrange(numrevs):
3207 for rev in pycompat.xrange(numrevs):
3202 p1, p2 = r.parentrevs(rev)
3208 p1, p2 = r.parentrevs(rev)
3203 delta = r.deltaparent(rev)
3209 delta = r.deltaparent(rev)
3204 if format > 0:
3210 if format > 0:
3205 addsize(r.rawsize(rev), datasize)
3211 addsize(r.rawsize(rev), datasize)
3206 if p2 != nullrev:
3212 if p2 != nullrev:
3207 nummerges += 1
3213 nummerges += 1
3208 size = r.length(rev)
3214 size = r.length(rev)
3209 if delta == nullrev:
3215 if delta == nullrev:
3210 chainlengths.append(0)
3216 chainlengths.append(0)
3211 chainbases.append(r.start(rev))
3217 chainbases.append(r.start(rev))
3212 chainspans.append(size)
3218 chainspans.append(size)
3213 if size == 0:
3219 if size == 0:
3214 numempty += 1
3220 numempty += 1
3215 numemptytext += 1
3221 numemptytext += 1
3216 else:
3222 else:
3217 numfull += 1
3223 numfull += 1
3218 numsnapdepth[0] += 1
3224 numsnapdepth[0] += 1
3219 addsize(size, fullsize)
3225 addsize(size, fullsize)
3220 addsize(size, snapsizedepth[0])
3226 addsize(size, snapsizedepth[0])
3221 else:
3227 else:
3222 chainlengths.append(chainlengths[delta] + 1)
3228 chainlengths.append(chainlengths[delta] + 1)
3223 baseaddr = chainbases[delta]
3229 baseaddr = chainbases[delta]
3224 revaddr = r.start(rev)
3230 revaddr = r.start(rev)
3225 chainbases.append(baseaddr)
3231 chainbases.append(baseaddr)
3226 chainspans.append((revaddr - baseaddr) + size)
3232 chainspans.append((revaddr - baseaddr) + size)
3227 if size == 0:
3233 if size == 0:
3228 numempty += 1
3234 numempty += 1
3229 numemptydelta += 1
3235 numemptydelta += 1
3230 elif r.issnapshot(rev):
3236 elif r.issnapshot(rev):
3231 addsize(size, semisize)
3237 addsize(size, semisize)
3232 numsemi += 1
3238 numsemi += 1
3233 depth = r.snapshotdepth(rev)
3239 depth = r.snapshotdepth(rev)
3234 numsnapdepth[depth] += 1
3240 numsnapdepth[depth] += 1
3235 addsize(size, snapsizedepth[depth])
3241 addsize(size, snapsizedepth[depth])
3236 else:
3242 else:
3237 addsize(size, deltasize)
3243 addsize(size, deltasize)
3238 if delta == rev - 1:
3244 if delta == rev - 1:
3239 numprev += 1
3245 numprev += 1
3240 if delta == p1:
3246 if delta == p1:
3241 nump1prev += 1
3247 nump1prev += 1
3242 elif delta == p2:
3248 elif delta == p2:
3243 nump2prev += 1
3249 nump2prev += 1
3244 elif delta == p1:
3250 elif delta == p1:
3245 nump1 += 1
3251 nump1 += 1
3246 elif delta == p2:
3252 elif delta == p2:
3247 nump2 += 1
3253 nump2 += 1
3248 elif delta != nullrev:
3254 elif delta != nullrev:
3249 numother += 1
3255 numother += 1
3250
3256
3251 # Obtain data on the raw chunks in the revlog.
3257 # Obtain data on the raw chunks in the revlog.
3252 if util.safehasattr(r, b'_getsegmentforrevs'):
3258 if util.safehasattr(r, b'_getsegmentforrevs'):
3253 segment = r._getsegmentforrevs(rev, rev)[1]
3259 segment = r._getsegmentforrevs(rev, rev)[1]
3254 else:
3260 else:
3255 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3261 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3256 if segment:
3262 if segment:
3257 chunktype = bytes(segment[0:1])
3263 chunktype = bytes(segment[0:1])
3258 else:
3264 else:
3259 chunktype = b'empty'
3265 chunktype = b'empty'
3260
3266
3261 if chunktype not in chunktypecounts:
3267 if chunktype not in chunktypecounts:
3262 chunktypecounts[chunktype] = 0
3268 chunktypecounts[chunktype] = 0
3263 chunktypesizes[chunktype] = 0
3269 chunktypesizes[chunktype] = 0
3264
3270
3265 chunktypecounts[chunktype] += 1
3271 chunktypecounts[chunktype] += 1
3266 chunktypesizes[chunktype] += size
3272 chunktypesizes[chunktype] += size
3267
3273
3268 # Adjust size min value for empty cases
3274 # Adjust size min value for empty cases
3269 for size in (datasize, fullsize, semisize, deltasize):
3275 for size in (datasize, fullsize, semisize, deltasize):
3270 if size[0] is None:
3276 if size[0] is None:
3271 size[0] = 0
3277 size[0] = 0
3272
3278
3273 numdeltas = numrevs - numfull - numempty - numsemi
3279 numdeltas = numrevs - numfull - numempty - numsemi
3274 numoprev = numprev - nump1prev - nump2prev
3280 numoprev = numprev - nump1prev - nump2prev
3275 totalrawsize = datasize[2]
3281 totalrawsize = datasize[2]
3276 datasize[2] /= numrevs
3282 datasize[2] /= numrevs
3277 fulltotal = fullsize[2]
3283 fulltotal = fullsize[2]
3278 if numfull == 0:
3284 if numfull == 0:
3279 fullsize[2] = 0
3285 fullsize[2] = 0
3280 else:
3286 else:
3281 fullsize[2] /= numfull
3287 fullsize[2] /= numfull
3282 semitotal = semisize[2]
3288 semitotal = semisize[2]
3283 snaptotal = {}
3289 snaptotal = {}
3284 if numsemi > 0:
3290 if numsemi > 0:
3285 semisize[2] /= numsemi
3291 semisize[2] /= numsemi
3286 for depth in snapsizedepth:
3292 for depth in snapsizedepth:
3287 snaptotal[depth] = snapsizedepth[depth][2]
3293 snaptotal[depth] = snapsizedepth[depth][2]
3288 snapsizedepth[depth][2] /= numsnapdepth[depth]
3294 snapsizedepth[depth][2] /= numsnapdepth[depth]
3289
3295
3290 deltatotal = deltasize[2]
3296 deltatotal = deltasize[2]
3291 if numdeltas > 0:
3297 if numdeltas > 0:
3292 deltasize[2] /= numdeltas
3298 deltasize[2] /= numdeltas
3293 totalsize = fulltotal + semitotal + deltatotal
3299 totalsize = fulltotal + semitotal + deltatotal
3294 avgchainlen = sum(chainlengths) / numrevs
3300 avgchainlen = sum(chainlengths) / numrevs
3295 maxchainlen = max(chainlengths)
3301 maxchainlen = max(chainlengths)
3296 maxchainspan = max(chainspans)
3302 maxchainspan = max(chainspans)
3297 compratio = 1
3303 compratio = 1
3298 if totalsize:
3304 if totalsize:
3299 compratio = totalrawsize / totalsize
3305 compratio = totalrawsize / totalsize
3300
3306
3301 basedfmtstr = b'%%%dd\n'
3307 basedfmtstr = b'%%%dd\n'
3302 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3308 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3303
3309
3304 def dfmtstr(max):
3310 def dfmtstr(max):
3305 return basedfmtstr % len(str(max))
3311 return basedfmtstr % len(str(max))
3306
3312
3307 def pcfmtstr(max, padding=0):
3313 def pcfmtstr(max, padding=0):
3308 return basepcfmtstr % (len(str(max)), b' ' * padding)
3314 return basepcfmtstr % (len(str(max)), b' ' * padding)
3309
3315
3310 def pcfmt(value, total):
3316 def pcfmt(value, total):
3311 if total:
3317 if total:
3312 return (value, 100 * float(value) / total)
3318 return (value, 100 * float(value) / total)
3313 else:
3319 else:
3314 return value, 100.0
3320 return value, 100.0
3315
3321
3316 ui.writenoi18n(b'format : %d\n' % format)
3322 ui.writenoi18n(b'format : %d\n' % format)
3317 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3323 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3318
3324
3319 ui.write(b'\n')
3325 ui.write(b'\n')
3320 fmt = pcfmtstr(totalsize)
3326 fmt = pcfmtstr(totalsize)
3321 fmt2 = dfmtstr(totalsize)
3327 fmt2 = dfmtstr(totalsize)
3322 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3328 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3323 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3329 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3324 ui.writenoi18n(
3330 ui.writenoi18n(
3325 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3331 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3326 )
3332 )
3327 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3333 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3328 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3334 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3329 ui.writenoi18n(
3335 ui.writenoi18n(
3330 b' text : '
3336 b' text : '
3331 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3337 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3332 )
3338 )
3333 ui.writenoi18n(
3339 ui.writenoi18n(
3334 b' delta : '
3340 b' delta : '
3335 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3341 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3336 )
3342 )
3337 ui.writenoi18n(
3343 ui.writenoi18n(
3338 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3344 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3339 )
3345 )
3340 for depth in sorted(numsnapdepth):
3346 for depth in sorted(numsnapdepth):
3341 ui.write(
3347 ui.write(
3342 (b' lvl-%-3d : ' % depth)
3348 (b' lvl-%-3d : ' % depth)
3343 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3349 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3344 )
3350 )
3345 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3351 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3346 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3352 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3347 ui.writenoi18n(
3353 ui.writenoi18n(
3348 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3354 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3349 )
3355 )
3350 for depth in sorted(numsnapdepth):
3356 for depth in sorted(numsnapdepth):
3351 ui.write(
3357 ui.write(
3352 (b' lvl-%-3d : ' % depth)
3358 (b' lvl-%-3d : ' % depth)
3353 + fmt % pcfmt(snaptotal[depth], totalsize)
3359 + fmt % pcfmt(snaptotal[depth], totalsize)
3354 )
3360 )
3355 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3361 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3356
3362
3357 def fmtchunktype(chunktype):
3363 def fmtchunktype(chunktype):
3358 if chunktype == b'empty':
3364 if chunktype == b'empty':
3359 return b' %s : ' % chunktype
3365 return b' %s : ' % chunktype
3360 elif chunktype in pycompat.bytestr(string.ascii_letters):
3366 elif chunktype in pycompat.bytestr(string.ascii_letters):
3361 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3367 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3362 else:
3368 else:
3363 return b' 0x%s : ' % hex(chunktype)
3369 return b' 0x%s : ' % hex(chunktype)
3364
3370
3365 ui.write(b'\n')
3371 ui.write(b'\n')
3366 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3372 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3367 for chunktype in sorted(chunktypecounts):
3373 for chunktype in sorted(chunktypecounts):
3368 ui.write(fmtchunktype(chunktype))
3374 ui.write(fmtchunktype(chunktype))
3369 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3375 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3370 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3376 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3371 for chunktype in sorted(chunktypecounts):
3377 for chunktype in sorted(chunktypecounts):
3372 ui.write(fmtchunktype(chunktype))
3378 ui.write(fmtchunktype(chunktype))
3373 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3379 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3374
3380
3375 ui.write(b'\n')
3381 ui.write(b'\n')
3376 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3382 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3377 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3383 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3378 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3384 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3379 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3385 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3380 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3386 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3381
3387
3382 if format > 0:
3388 if format > 0:
3383 ui.write(b'\n')
3389 ui.write(b'\n')
3384 ui.writenoi18n(
3390 ui.writenoi18n(
3385 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3391 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3386 % tuple(datasize)
3392 % tuple(datasize)
3387 )
3393 )
3388 ui.writenoi18n(
3394 ui.writenoi18n(
3389 b'full revision size (min/max/avg) : %d / %d / %d\n'
3395 b'full revision size (min/max/avg) : %d / %d / %d\n'
3390 % tuple(fullsize)
3396 % tuple(fullsize)
3391 )
3397 )
3392 ui.writenoi18n(
3398 ui.writenoi18n(
3393 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3399 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3394 % tuple(semisize)
3400 % tuple(semisize)
3395 )
3401 )
3396 for depth in sorted(snapsizedepth):
3402 for depth in sorted(snapsizedepth):
3397 if depth == 0:
3403 if depth == 0:
3398 continue
3404 continue
3399 ui.writenoi18n(
3405 ui.writenoi18n(
3400 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3406 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3401 % ((depth,) + tuple(snapsizedepth[depth]))
3407 % ((depth,) + tuple(snapsizedepth[depth]))
3402 )
3408 )
3403 ui.writenoi18n(
3409 ui.writenoi18n(
3404 b'delta size (min/max/avg) : %d / %d / %d\n'
3410 b'delta size (min/max/avg) : %d / %d / %d\n'
3405 % tuple(deltasize)
3411 % tuple(deltasize)
3406 )
3412 )
3407
3413
3408 if numdeltas > 0:
3414 if numdeltas > 0:
3409 ui.write(b'\n')
3415 ui.write(b'\n')
3410 fmt = pcfmtstr(numdeltas)
3416 fmt = pcfmtstr(numdeltas)
3411 fmt2 = pcfmtstr(numdeltas, 4)
3417 fmt2 = pcfmtstr(numdeltas, 4)
3412 ui.writenoi18n(
3418 ui.writenoi18n(
3413 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3419 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3414 )
3420 )
3415 if numprev > 0:
3421 if numprev > 0:
3416 ui.writenoi18n(
3422 ui.writenoi18n(
3417 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3423 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3418 )
3424 )
3419 ui.writenoi18n(
3425 ui.writenoi18n(
3420 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3426 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3421 )
3427 )
3422 ui.writenoi18n(
3428 ui.writenoi18n(
3423 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3429 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3424 )
3430 )
3425 if gdelta:
3431 if gdelta:
3426 ui.writenoi18n(
3432 ui.writenoi18n(
3427 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3433 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3428 )
3434 )
3429 ui.writenoi18n(
3435 ui.writenoi18n(
3430 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3436 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3431 )
3437 )
3432 ui.writenoi18n(
3438 ui.writenoi18n(
3433 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3439 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3434 )
3440 )
3435
3441
3436
3442
3437 @command(
3443 @command(
3438 b'debugrevlogindex',
3444 b'debugrevlogindex',
3439 cmdutil.debugrevlogopts
3445 cmdutil.debugrevlogopts
3440 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3446 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3441 _(b'[-f FORMAT] -c|-m|FILE'),
3447 _(b'[-f FORMAT] -c|-m|FILE'),
3442 optionalrepo=True,
3448 optionalrepo=True,
3443 )
3449 )
3444 def debugrevlogindex(ui, repo, file_=None, **opts):
3450 def debugrevlogindex(ui, repo, file_=None, **opts):
3445 """dump the contents of a revlog index"""
3451 """dump the contents of a revlog index"""
3446 opts = pycompat.byteskwargs(opts)
3452 opts = pycompat.byteskwargs(opts)
3447 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3453 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3448 format = opts.get(b'format', 0)
3454 format = opts.get(b'format', 0)
3449 if format not in (0, 1):
3455 if format not in (0, 1):
3450 raise error.Abort(_(b"unknown format %d") % format)
3456 raise error.Abort(_(b"unknown format %d") % format)
3451
3457
3452 if ui.debugflag:
3458 if ui.debugflag:
3453 shortfn = hex
3459 shortfn = hex
3454 else:
3460 else:
3455 shortfn = short
3461 shortfn = short
3456
3462
3457 # There might not be anything in r, so have a sane default
3463 # There might not be anything in r, so have a sane default
3458 idlen = 12
3464 idlen = 12
3459 for i in r:
3465 for i in r:
3460 idlen = len(shortfn(r.node(i)))
3466 idlen = len(shortfn(r.node(i)))
3461 break
3467 break
3462
3468
3463 if format == 0:
3469 if format == 0:
3464 if ui.verbose:
3470 if ui.verbose:
3465 ui.writenoi18n(
3471 ui.writenoi18n(
3466 b" rev offset length linkrev %s %s p2\n"
3472 b" rev offset length linkrev %s %s p2\n"
3467 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3473 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3468 )
3474 )
3469 else:
3475 else:
3470 ui.writenoi18n(
3476 ui.writenoi18n(
3471 b" rev linkrev %s %s p2\n"
3477 b" rev linkrev %s %s p2\n"
3472 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3478 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3473 )
3479 )
3474 elif format == 1:
3480 elif format == 1:
3475 if ui.verbose:
3481 if ui.verbose:
3476 ui.writenoi18n(
3482 ui.writenoi18n(
3477 (
3483 (
3478 b" rev flag offset length size link p1"
3484 b" rev flag offset length size link p1"
3479 b" p2 %s\n"
3485 b" p2 %s\n"
3480 )
3486 )
3481 % b"nodeid".rjust(idlen)
3487 % b"nodeid".rjust(idlen)
3482 )
3488 )
3483 else:
3489 else:
3484 ui.writenoi18n(
3490 ui.writenoi18n(
3485 b" rev flag size link p1 p2 %s\n"
3491 b" rev flag size link p1 p2 %s\n"
3486 % b"nodeid".rjust(idlen)
3492 % b"nodeid".rjust(idlen)
3487 )
3493 )
3488
3494
3489 for i in r:
3495 for i in r:
3490 node = r.node(i)
3496 node = r.node(i)
3491 if format == 0:
3497 if format == 0:
3492 try:
3498 try:
3493 pp = r.parents(node)
3499 pp = r.parents(node)
3494 except Exception:
3500 except Exception:
3495 pp = [repo.nullid, repo.nullid]
3501 pp = [repo.nullid, repo.nullid]
3496 if ui.verbose:
3502 if ui.verbose:
3497 ui.write(
3503 ui.write(
3498 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3504 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3499 % (
3505 % (
3500 i,
3506 i,
3501 r.start(i),
3507 r.start(i),
3502 r.length(i),
3508 r.length(i),
3503 r.linkrev(i),
3509 r.linkrev(i),
3504 shortfn(node),
3510 shortfn(node),
3505 shortfn(pp[0]),
3511 shortfn(pp[0]),
3506 shortfn(pp[1]),
3512 shortfn(pp[1]),
3507 )
3513 )
3508 )
3514 )
3509 else:
3515 else:
3510 ui.write(
3516 ui.write(
3511 b"% 6d % 7d %s %s %s\n"
3517 b"% 6d % 7d %s %s %s\n"
3512 % (
3518 % (
3513 i,
3519 i,
3514 r.linkrev(i),
3520 r.linkrev(i),
3515 shortfn(node),
3521 shortfn(node),
3516 shortfn(pp[0]),
3522 shortfn(pp[0]),
3517 shortfn(pp[1]),
3523 shortfn(pp[1]),
3518 )
3524 )
3519 )
3525 )
3520 elif format == 1:
3526 elif format == 1:
3521 pr = r.parentrevs(i)
3527 pr = r.parentrevs(i)
3522 if ui.verbose:
3528 if ui.verbose:
3523 ui.write(
3529 ui.write(
3524 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3530 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3525 % (
3531 % (
3526 i,
3532 i,
3527 r.flags(i),
3533 r.flags(i),
3528 r.start(i),
3534 r.start(i),
3529 r.length(i),
3535 r.length(i),
3530 r.rawsize(i),
3536 r.rawsize(i),
3531 r.linkrev(i),
3537 r.linkrev(i),
3532 pr[0],
3538 pr[0],
3533 pr[1],
3539 pr[1],
3534 shortfn(node),
3540 shortfn(node),
3535 )
3541 )
3536 )
3542 )
3537 else:
3543 else:
3538 ui.write(
3544 ui.write(
3539 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3545 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3540 % (
3546 % (
3541 i,
3547 i,
3542 r.flags(i),
3548 r.flags(i),
3543 r.rawsize(i),
3549 r.rawsize(i),
3544 r.linkrev(i),
3550 r.linkrev(i),
3545 pr[0],
3551 pr[0],
3546 pr[1],
3552 pr[1],
3547 shortfn(node),
3553 shortfn(node),
3548 )
3554 )
3549 )
3555 )
3550
3556
3551
3557
3552 @command(
3558 @command(
3553 b'debugrevspec',
3559 b'debugrevspec',
3554 [
3560 [
3555 (
3561 (
3556 b'',
3562 b'',
3557 b'optimize',
3563 b'optimize',
3558 None,
3564 None,
3559 _(b'print parsed tree after optimizing (DEPRECATED)'),
3565 _(b'print parsed tree after optimizing (DEPRECATED)'),
3560 ),
3566 ),
3561 (
3567 (
3562 b'',
3568 b'',
3563 b'show-revs',
3569 b'show-revs',
3564 True,
3570 True,
3565 _(b'print list of result revisions (default)'),
3571 _(b'print list of result revisions (default)'),
3566 ),
3572 ),
3567 (
3573 (
3568 b's',
3574 b's',
3569 b'show-set',
3575 b'show-set',
3570 None,
3576 None,
3571 _(b'print internal representation of result set'),
3577 _(b'print internal representation of result set'),
3572 ),
3578 ),
3573 (
3579 (
3574 b'p',
3580 b'p',
3575 b'show-stage',
3581 b'show-stage',
3576 [],
3582 [],
3577 _(b'print parsed tree at the given stage'),
3583 _(b'print parsed tree at the given stage'),
3578 _(b'NAME'),
3584 _(b'NAME'),
3579 ),
3585 ),
3580 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3586 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3581 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3587 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3582 ],
3588 ],
3583 b'REVSPEC',
3589 b'REVSPEC',
3584 )
3590 )
3585 def debugrevspec(ui, repo, expr, **opts):
3591 def debugrevspec(ui, repo, expr, **opts):
3586 """parse and apply a revision specification
3592 """parse and apply a revision specification
3587
3593
3588 Use -p/--show-stage option to print the parsed tree at the given stages.
3594 Use -p/--show-stage option to print the parsed tree at the given stages.
3589 Use -p all to print tree at every stage.
3595 Use -p all to print tree at every stage.
3590
3596
3591 Use --no-show-revs option with -s or -p to print only the set
3597 Use --no-show-revs option with -s or -p to print only the set
3592 representation or the parsed tree respectively.
3598 representation or the parsed tree respectively.
3593
3599
3594 Use --verify-optimized to compare the optimized result with the unoptimized
3600 Use --verify-optimized to compare the optimized result with the unoptimized
3595 one. Returns 1 if the optimized result differs.
3601 one. Returns 1 if the optimized result differs.
3596 """
3602 """
3597 opts = pycompat.byteskwargs(opts)
3603 opts = pycompat.byteskwargs(opts)
3598 aliases = ui.configitems(b'revsetalias')
3604 aliases = ui.configitems(b'revsetalias')
3599 stages = [
3605 stages = [
3600 (b'parsed', lambda tree: tree),
3606 (b'parsed', lambda tree: tree),
3601 (
3607 (
3602 b'expanded',
3608 b'expanded',
3603 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3609 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3604 ),
3610 ),
3605 (b'concatenated', revsetlang.foldconcat),
3611 (b'concatenated', revsetlang.foldconcat),
3606 (b'analyzed', revsetlang.analyze),
3612 (b'analyzed', revsetlang.analyze),
3607 (b'optimized', revsetlang.optimize),
3613 (b'optimized', revsetlang.optimize),
3608 ]
3614 ]
3609 if opts[b'no_optimized']:
3615 if opts[b'no_optimized']:
3610 stages = stages[:-1]
3616 stages = stages[:-1]
3611 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3617 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3612 raise error.Abort(
3618 raise error.Abort(
3613 _(b'cannot use --verify-optimized with --no-optimized')
3619 _(b'cannot use --verify-optimized with --no-optimized')
3614 )
3620 )
3615 stagenames = {n for n, f in stages}
3621 stagenames = {n for n, f in stages}
3616
3622
3617 showalways = set()
3623 showalways = set()
3618 showchanged = set()
3624 showchanged = set()
3619 if ui.verbose and not opts[b'show_stage']:
3625 if ui.verbose and not opts[b'show_stage']:
3620 # show parsed tree by --verbose (deprecated)
3626 # show parsed tree by --verbose (deprecated)
3621 showalways.add(b'parsed')
3627 showalways.add(b'parsed')
3622 showchanged.update([b'expanded', b'concatenated'])
3628 showchanged.update([b'expanded', b'concatenated'])
3623 if opts[b'optimize']:
3629 if opts[b'optimize']:
3624 showalways.add(b'optimized')
3630 showalways.add(b'optimized')
3625 if opts[b'show_stage'] and opts[b'optimize']:
3631 if opts[b'show_stage'] and opts[b'optimize']:
3626 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3632 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3627 if opts[b'show_stage'] == [b'all']:
3633 if opts[b'show_stage'] == [b'all']:
3628 showalways.update(stagenames)
3634 showalways.update(stagenames)
3629 else:
3635 else:
3630 for n in opts[b'show_stage']:
3636 for n in opts[b'show_stage']:
3631 if n not in stagenames:
3637 if n not in stagenames:
3632 raise error.Abort(_(b'invalid stage name: %s') % n)
3638 raise error.Abort(_(b'invalid stage name: %s') % n)
3633 showalways.update(opts[b'show_stage'])
3639 showalways.update(opts[b'show_stage'])
3634
3640
3635 treebystage = {}
3641 treebystage = {}
3636 printedtree = None
3642 printedtree = None
3637 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3643 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3638 for n, f in stages:
3644 for n, f in stages:
3639 treebystage[n] = tree = f(tree)
3645 treebystage[n] = tree = f(tree)
3640 if n in showalways or (n in showchanged and tree != printedtree):
3646 if n in showalways or (n in showchanged and tree != printedtree):
3641 if opts[b'show_stage'] or n != b'parsed':
3647 if opts[b'show_stage'] or n != b'parsed':
3642 ui.write(b"* %s:\n" % n)
3648 ui.write(b"* %s:\n" % n)
3643 ui.write(revsetlang.prettyformat(tree), b"\n")
3649 ui.write(revsetlang.prettyformat(tree), b"\n")
3644 printedtree = tree
3650 printedtree = tree
3645
3651
3646 if opts[b'verify_optimized']:
3652 if opts[b'verify_optimized']:
3647 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3653 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3648 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3654 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3649 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3655 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3650 ui.writenoi18n(
3656 ui.writenoi18n(
3651 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3657 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3652 )
3658 )
3653 ui.writenoi18n(
3659 ui.writenoi18n(
3654 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3660 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3655 )
3661 )
3656 arevs = list(arevs)
3662 arevs = list(arevs)
3657 brevs = list(brevs)
3663 brevs = list(brevs)
3658 if arevs == brevs:
3664 if arevs == brevs:
3659 return 0
3665 return 0
3660 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3666 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3661 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3667 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3662 sm = difflib.SequenceMatcher(None, arevs, brevs)
3668 sm = difflib.SequenceMatcher(None, arevs, brevs)
3663 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3669 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3664 if tag in ('delete', 'replace'):
3670 if tag in ('delete', 'replace'):
3665 for c in arevs[alo:ahi]:
3671 for c in arevs[alo:ahi]:
3666 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3672 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3667 if tag in ('insert', 'replace'):
3673 if tag in ('insert', 'replace'):
3668 for c in brevs[blo:bhi]:
3674 for c in brevs[blo:bhi]:
3669 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3675 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3670 if tag == 'equal':
3676 if tag == 'equal':
3671 for c in arevs[alo:ahi]:
3677 for c in arevs[alo:ahi]:
3672 ui.write(b' %d\n' % c)
3678 ui.write(b' %d\n' % c)
3673 return 1
3679 return 1
3674
3680
3675 func = revset.makematcher(tree)
3681 func = revset.makematcher(tree)
3676 revs = func(repo)
3682 revs = func(repo)
3677 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3683 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3678 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3684 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3679 if not opts[b'show_revs']:
3685 if not opts[b'show_revs']:
3680 return
3686 return
3681 for c in revs:
3687 for c in revs:
3682 ui.write(b"%d\n" % c)
3688 ui.write(b"%d\n" % c)
3683
3689
3684
3690
3685 @command(
3691 @command(
3686 b'debugserve',
3692 b'debugserve',
3687 [
3693 [
3688 (
3694 (
3689 b'',
3695 b'',
3690 b'sshstdio',
3696 b'sshstdio',
3691 False,
3697 False,
3692 _(b'run an SSH server bound to process handles'),
3698 _(b'run an SSH server bound to process handles'),
3693 ),
3699 ),
3694 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3700 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3695 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3701 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3696 ],
3702 ],
3697 b'',
3703 b'',
3698 )
3704 )
3699 def debugserve(ui, repo, **opts):
3705 def debugserve(ui, repo, **opts):
3700 """run a server with advanced settings
3706 """run a server with advanced settings
3701
3707
3702 This command is similar to :hg:`serve`. It exists partially as a
3708 This command is similar to :hg:`serve`. It exists partially as a
3703 workaround to the fact that ``hg serve --stdio`` must have specific
3709 workaround to the fact that ``hg serve --stdio`` must have specific
3704 arguments for security reasons.
3710 arguments for security reasons.
3705 """
3711 """
3706 opts = pycompat.byteskwargs(opts)
3712 opts = pycompat.byteskwargs(opts)
3707
3713
3708 if not opts[b'sshstdio']:
3714 if not opts[b'sshstdio']:
3709 raise error.Abort(_(b'only --sshstdio is currently supported'))
3715 raise error.Abort(_(b'only --sshstdio is currently supported'))
3710
3716
3711 logfh = None
3717 logfh = None
3712
3718
3713 if opts[b'logiofd'] and opts[b'logiofile']:
3719 if opts[b'logiofd'] and opts[b'logiofile']:
3714 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3720 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3715
3721
3716 if opts[b'logiofd']:
3722 if opts[b'logiofd']:
3717 # Ideally we would be line buffered. But line buffering in binary
3723 # Ideally we would be line buffered. But line buffering in binary
3718 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3724 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3719 # buffering could have performance impacts. But since this isn't
3725 # buffering could have performance impacts. But since this isn't
3720 # performance critical code, it should be fine.
3726 # performance critical code, it should be fine.
3721 try:
3727 try:
3722 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3728 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3723 except OSError as e:
3729 except OSError as e:
3724 if e.errno != errno.ESPIPE:
3730 if e.errno != errno.ESPIPE:
3725 raise
3731 raise
3726 # can't seek a pipe, so `ab` mode fails on py3
3732 # can't seek a pipe, so `ab` mode fails on py3
3727 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3733 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3728 elif opts[b'logiofile']:
3734 elif opts[b'logiofile']:
3729 logfh = open(opts[b'logiofile'], b'ab', 0)
3735 logfh = open(opts[b'logiofile'], b'ab', 0)
3730
3736
3731 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3737 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3732 s.serve_forever()
3738 s.serve_forever()
3733
3739
3734
3740
3735 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3741 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3736 def debugsetparents(ui, repo, rev1, rev2=None):
3742 def debugsetparents(ui, repo, rev1, rev2=None):
3737 """manually set the parents of the current working directory (DANGEROUS)
3743 """manually set the parents of the current working directory (DANGEROUS)
3738
3744
3739 This command is not what you are looking for and should not be used. Using
3745 This command is not what you are looking for and should not be used. Using
3740 this command will most certainly results in slight corruption of the file
3746 this command will most certainly results in slight corruption of the file
3741 level histories withing your repository. DO NOT USE THIS COMMAND.
3747 level histories withing your repository. DO NOT USE THIS COMMAND.
3742
3748
3743 The command update the p1 and p2 field in the dirstate, and not touching
3749 The command update the p1 and p2 field in the dirstate, and not touching
3744 anything else. This useful for writing repository conversion tools, but
3750 anything else. This useful for writing repository conversion tools, but
3745 should be used with extreme care. For example, neither the working
3751 should be used with extreme care. For example, neither the working
3746 directory nor the dirstate is updated, so file status may be incorrect
3752 directory nor the dirstate is updated, so file status may be incorrect
3747 after running this command. Only used if you are one of the few people that
3753 after running this command. Only used if you are one of the few people that
3748 deeply unstand both conversion tools and file level histories. If you are
3754 deeply unstand both conversion tools and file level histories. If you are
3749 reading this help, you are not one of this people (most of them sailed west
3755 reading this help, you are not one of this people (most of them sailed west
3750 from Mithlond anyway.
3756 from Mithlond anyway.
3751
3757
3752 So one last time DO NOT USE THIS COMMAND.
3758 So one last time DO NOT USE THIS COMMAND.
3753
3759
3754 Returns 0 on success.
3760 Returns 0 on success.
3755 """
3761 """
3756
3762
3757 node1 = scmutil.revsingle(repo, rev1).node()
3763 node1 = scmutil.revsingle(repo, rev1).node()
3758 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3764 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3759
3765
3760 with repo.wlock():
3766 with repo.wlock():
3761 repo.setparents(node1, node2)
3767 repo.setparents(node1, node2)
3762
3768
3763
3769
3764 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3770 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3765 def debugsidedata(ui, repo, file_, rev=None, **opts):
3771 def debugsidedata(ui, repo, file_, rev=None, **opts):
3766 """dump the side data for a cl/manifest/file revision
3772 """dump the side data for a cl/manifest/file revision
3767
3773
3768 Use --verbose to dump the sidedata content."""
3774 Use --verbose to dump the sidedata content."""
3769 opts = pycompat.byteskwargs(opts)
3775 opts = pycompat.byteskwargs(opts)
3770 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3776 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3771 if rev is not None:
3777 if rev is not None:
3772 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3778 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3773 file_, rev = None, file_
3779 file_, rev = None, file_
3774 elif rev is None:
3780 elif rev is None:
3775 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3781 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3776 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3782 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3777 r = getattr(r, '_revlog', r)
3783 r = getattr(r, '_revlog', r)
3778 try:
3784 try:
3779 sidedata = r.sidedata(r.lookup(rev))
3785 sidedata = r.sidedata(r.lookup(rev))
3780 except KeyError:
3786 except KeyError:
3781 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3787 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3782 if sidedata:
3788 if sidedata:
3783 sidedata = list(sidedata.items())
3789 sidedata = list(sidedata.items())
3784 sidedata.sort()
3790 sidedata.sort()
3785 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3791 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3786 for key, value in sidedata:
3792 for key, value in sidedata:
3787 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3793 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3788 if ui.verbose:
3794 if ui.verbose:
3789 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3795 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3790
3796
3791
3797
3792 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3798 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3793 def debugssl(ui, repo, source=None, **opts):
3799 def debugssl(ui, repo, source=None, **opts):
3794 """test a secure connection to a server
3800 """test a secure connection to a server
3795
3801
3796 This builds the certificate chain for the server on Windows, installing the
3802 This builds the certificate chain for the server on Windows, installing the
3797 missing intermediates and trusted root via Windows Update if necessary. It
3803 missing intermediates and trusted root via Windows Update if necessary. It
3798 does nothing on other platforms.
3804 does nothing on other platforms.
3799
3805
3800 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3806 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3801 that server is used. See :hg:`help urls` for more information.
3807 that server is used. See :hg:`help urls` for more information.
3802
3808
3803 If the update succeeds, retry the original operation. Otherwise, the cause
3809 If the update succeeds, retry the original operation. Otherwise, the cause
3804 of the SSL error is likely another issue.
3810 of the SSL error is likely another issue.
3805 """
3811 """
3806 if not pycompat.iswindows:
3812 if not pycompat.iswindows:
3807 raise error.Abort(
3813 raise error.Abort(
3808 _(b'certificate chain building is only possible on Windows')
3814 _(b'certificate chain building is only possible on Windows')
3809 )
3815 )
3810
3816
3811 if not source:
3817 if not source:
3812 if not repo:
3818 if not repo:
3813 raise error.Abort(
3819 raise error.Abort(
3814 _(
3820 _(
3815 b"there is no Mercurial repository here, and no "
3821 b"there is no Mercurial repository here, and no "
3816 b"server specified"
3822 b"server specified"
3817 )
3823 )
3818 )
3824 )
3819 source = b"default"
3825 source = b"default"
3820
3826
3821 source, branches = urlutil.get_unique_pull_path(
3827 source, branches = urlutil.get_unique_pull_path(
3822 b'debugssl', repo, ui, source
3828 b'debugssl', repo, ui, source
3823 )
3829 )
3824 url = urlutil.url(source)
3830 url = urlutil.url(source)
3825
3831
3826 defaultport = {b'https': 443, b'ssh': 22}
3832 defaultport = {b'https': 443, b'ssh': 22}
3827 if url.scheme in defaultport:
3833 if url.scheme in defaultport:
3828 try:
3834 try:
3829 addr = (url.host, int(url.port or defaultport[url.scheme]))
3835 addr = (url.host, int(url.port or defaultport[url.scheme]))
3830 except ValueError:
3836 except ValueError:
3831 raise error.Abort(_(b"malformed port number in URL"))
3837 raise error.Abort(_(b"malformed port number in URL"))
3832 else:
3838 else:
3833 raise error.Abort(_(b"only https and ssh connections are supported"))
3839 raise error.Abort(_(b"only https and ssh connections are supported"))
3834
3840
3835 from . import win32
3841 from . import win32
3836
3842
3837 s = ssl.wrap_socket(
3843 s = ssl.wrap_socket(
3838 socket.socket(),
3844 socket.socket(),
3839 ssl_version=ssl.PROTOCOL_TLS,
3845 ssl_version=ssl.PROTOCOL_TLS,
3840 cert_reqs=ssl.CERT_NONE,
3846 cert_reqs=ssl.CERT_NONE,
3841 ca_certs=None,
3847 ca_certs=None,
3842 )
3848 )
3843
3849
3844 try:
3850 try:
3845 s.connect(addr)
3851 s.connect(addr)
3846 cert = s.getpeercert(True)
3852 cert = s.getpeercert(True)
3847
3853
3848 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3854 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3849
3855
3850 complete = win32.checkcertificatechain(cert, build=False)
3856 complete = win32.checkcertificatechain(cert, build=False)
3851
3857
3852 if not complete:
3858 if not complete:
3853 ui.status(_(b'certificate chain is incomplete, updating... '))
3859 ui.status(_(b'certificate chain is incomplete, updating... '))
3854
3860
3855 if not win32.checkcertificatechain(cert):
3861 if not win32.checkcertificatechain(cert):
3856 ui.status(_(b'failed.\n'))
3862 ui.status(_(b'failed.\n'))
3857 else:
3863 else:
3858 ui.status(_(b'done.\n'))
3864 ui.status(_(b'done.\n'))
3859 else:
3865 else:
3860 ui.status(_(b'full certificate chain is available\n'))
3866 ui.status(_(b'full certificate chain is available\n'))
3861 finally:
3867 finally:
3862 s.close()
3868 s.close()
3863
3869
3864
3870
3865 @command(
3871 @command(
3866 b"debugbackupbundle",
3872 b"debugbackupbundle",
3867 [
3873 [
3868 (
3874 (
3869 b"",
3875 b"",
3870 b"recover",
3876 b"recover",
3871 b"",
3877 b"",
3872 b"brings the specified changeset back into the repository",
3878 b"brings the specified changeset back into the repository",
3873 )
3879 )
3874 ]
3880 ]
3875 + cmdutil.logopts,
3881 + cmdutil.logopts,
3876 _(b"hg debugbackupbundle [--recover HASH]"),
3882 _(b"hg debugbackupbundle [--recover HASH]"),
3877 )
3883 )
3878 def debugbackupbundle(ui, repo, *pats, **opts):
3884 def debugbackupbundle(ui, repo, *pats, **opts):
3879 """lists the changesets available in backup bundles
3885 """lists the changesets available in backup bundles
3880
3886
3881 Without any arguments, this command prints a list of the changesets in each
3887 Without any arguments, this command prints a list of the changesets in each
3882 backup bundle.
3888 backup bundle.
3883
3889
3884 --recover takes a changeset hash and unbundles the first bundle that
3890 --recover takes a changeset hash and unbundles the first bundle that
3885 contains that hash, which puts that changeset back in your repository.
3891 contains that hash, which puts that changeset back in your repository.
3886
3892
3887 --verbose will print the entire commit message and the bundle path for that
3893 --verbose will print the entire commit message and the bundle path for that
3888 backup.
3894 backup.
3889 """
3895 """
3890 backups = list(
3896 backups = list(
3891 filter(
3897 filter(
3892 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3898 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3893 )
3899 )
3894 )
3900 )
3895 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3901 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3896
3902
3897 opts = pycompat.byteskwargs(opts)
3903 opts = pycompat.byteskwargs(opts)
3898 opts[b"bundle"] = b""
3904 opts[b"bundle"] = b""
3899 opts[b"force"] = None
3905 opts[b"force"] = None
3900 limit = logcmdutil.getlimit(opts)
3906 limit = logcmdutil.getlimit(opts)
3901
3907
3902 def display(other, chlist, displayer):
3908 def display(other, chlist, displayer):
3903 if opts.get(b"newest_first"):
3909 if opts.get(b"newest_first"):
3904 chlist.reverse()
3910 chlist.reverse()
3905 count = 0
3911 count = 0
3906 for n in chlist:
3912 for n in chlist:
3907 if limit is not None and count >= limit:
3913 if limit is not None and count >= limit:
3908 break
3914 break
3909 parents = [
3915 parents = [
3910 True for p in other.changelog.parents(n) if p != repo.nullid
3916 True for p in other.changelog.parents(n) if p != repo.nullid
3911 ]
3917 ]
3912 if opts.get(b"no_merges") and len(parents) == 2:
3918 if opts.get(b"no_merges") and len(parents) == 2:
3913 continue
3919 continue
3914 count += 1
3920 count += 1
3915 displayer.show(other[n])
3921 displayer.show(other[n])
3916
3922
3917 recovernode = opts.get(b"recover")
3923 recovernode = opts.get(b"recover")
3918 if recovernode:
3924 if recovernode:
3919 if scmutil.isrevsymbol(repo, recovernode):
3925 if scmutil.isrevsymbol(repo, recovernode):
3920 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3926 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3921 return
3927 return
3922 elif backups:
3928 elif backups:
3923 msg = _(
3929 msg = _(
3924 b"Recover changesets using: hg debugbackupbundle --recover "
3930 b"Recover changesets using: hg debugbackupbundle --recover "
3925 b"<changeset hash>\n\nAvailable backup changesets:"
3931 b"<changeset hash>\n\nAvailable backup changesets:"
3926 )
3932 )
3927 ui.status(msg, label=b"status.removed")
3933 ui.status(msg, label=b"status.removed")
3928 else:
3934 else:
3929 ui.status(_(b"no backup changesets found\n"))
3935 ui.status(_(b"no backup changesets found\n"))
3930 return
3936 return
3931
3937
3932 for backup in backups:
3938 for backup in backups:
3933 # Much of this is copied from the hg incoming logic
3939 # Much of this is copied from the hg incoming logic
3934 source = os.path.relpath(backup, encoding.getcwd())
3940 source = os.path.relpath(backup, encoding.getcwd())
3935 source, branches = urlutil.get_unique_pull_path(
3941 source, branches = urlutil.get_unique_pull_path(
3936 b'debugbackupbundle',
3942 b'debugbackupbundle',
3937 repo,
3943 repo,
3938 ui,
3944 ui,
3939 source,
3945 source,
3940 default_branches=opts.get(b'branch'),
3946 default_branches=opts.get(b'branch'),
3941 )
3947 )
3942 try:
3948 try:
3943 other = hg.peer(repo, opts, source)
3949 other = hg.peer(repo, opts, source)
3944 except error.LookupError as ex:
3950 except error.LookupError as ex:
3945 msg = _(b"\nwarning: unable to open bundle %s") % source
3951 msg = _(b"\nwarning: unable to open bundle %s") % source
3946 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3952 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3947 ui.warn(msg, hint=hint)
3953 ui.warn(msg, hint=hint)
3948 continue
3954 continue
3949 revs, checkout = hg.addbranchrevs(
3955 revs, checkout = hg.addbranchrevs(
3950 repo, other, branches, opts.get(b"rev")
3956 repo, other, branches, opts.get(b"rev")
3951 )
3957 )
3952
3958
3953 if revs:
3959 if revs:
3954 revs = [other.lookup(rev) for rev in revs]
3960 revs = [other.lookup(rev) for rev in revs]
3955
3961
3956 with ui.silent():
3962 with ui.silent():
3957 try:
3963 try:
3958 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3964 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3959 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3965 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3960 )
3966 )
3961 except error.LookupError:
3967 except error.LookupError:
3962 continue
3968 continue
3963
3969
3964 try:
3970 try:
3965 if not chlist:
3971 if not chlist:
3966 continue
3972 continue
3967 if recovernode:
3973 if recovernode:
3968 with repo.lock(), repo.transaction(b"unbundle") as tr:
3974 with repo.lock(), repo.transaction(b"unbundle") as tr:
3969 if scmutil.isrevsymbol(other, recovernode):
3975 if scmutil.isrevsymbol(other, recovernode):
3970 ui.status(_(b"Unbundling %s\n") % (recovernode))
3976 ui.status(_(b"Unbundling %s\n") % (recovernode))
3971 f = hg.openpath(ui, source)
3977 f = hg.openpath(ui, source)
3972 gen = exchange.readbundle(ui, f, source)
3978 gen = exchange.readbundle(ui, f, source)
3973 if isinstance(gen, bundle2.unbundle20):
3979 if isinstance(gen, bundle2.unbundle20):
3974 bundle2.applybundle(
3980 bundle2.applybundle(
3975 repo,
3981 repo,
3976 gen,
3982 gen,
3977 tr,
3983 tr,
3978 source=b"unbundle",
3984 source=b"unbundle",
3979 url=b"bundle:" + source,
3985 url=b"bundle:" + source,
3980 )
3986 )
3981 else:
3987 else:
3982 gen.apply(repo, b"unbundle", b"bundle:" + source)
3988 gen.apply(repo, b"unbundle", b"bundle:" + source)
3983 break
3989 break
3984 else:
3990 else:
3985 backupdate = encoding.strtolocal(
3991 backupdate = encoding.strtolocal(
3986 time.strftime(
3992 time.strftime(
3987 "%a %H:%M, %Y-%m-%d",
3993 "%a %H:%M, %Y-%m-%d",
3988 time.localtime(os.path.getmtime(source)),
3994 time.localtime(os.path.getmtime(source)),
3989 )
3995 )
3990 )
3996 )
3991 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3997 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3992 if ui.verbose:
3998 if ui.verbose:
3993 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3999 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3994 else:
4000 else:
3995 opts[
4001 opts[
3996 b"template"
4002 b"template"
3997 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4003 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3998 displayer = logcmdutil.changesetdisplayer(
4004 displayer = logcmdutil.changesetdisplayer(
3999 ui, other, opts, False
4005 ui, other, opts, False
4000 )
4006 )
4001 display(other, chlist, displayer)
4007 display(other, chlist, displayer)
4002 displayer.close()
4008 displayer.close()
4003 finally:
4009 finally:
4004 cleanupfn()
4010 cleanupfn()
4005
4011
4006
4012
4007 @command(
4013 @command(
4008 b'debugsub',
4014 b'debugsub',
4009 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4015 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4010 _(b'[-r REV] [REV]'),
4016 _(b'[-r REV] [REV]'),
4011 )
4017 )
4012 def debugsub(ui, repo, rev=None):
4018 def debugsub(ui, repo, rev=None):
4013 ctx = scmutil.revsingle(repo, rev, None)
4019 ctx = scmutil.revsingle(repo, rev, None)
4014 for k, v in sorted(ctx.substate.items()):
4020 for k, v in sorted(ctx.substate.items()):
4015 ui.writenoi18n(b'path %s\n' % k)
4021 ui.writenoi18n(b'path %s\n' % k)
4016 ui.writenoi18n(b' source %s\n' % v[0])
4022 ui.writenoi18n(b' source %s\n' % v[0])
4017 ui.writenoi18n(b' revision %s\n' % v[1])
4023 ui.writenoi18n(b' revision %s\n' % v[1])
4018
4024
4019
4025
4020 @command(b'debugshell', optionalrepo=True)
4026 @command(b'debugshell', optionalrepo=True)
4021 def debugshell(ui, repo):
4027 def debugshell(ui, repo):
4022 """run an interactive Python interpreter
4028 """run an interactive Python interpreter
4023
4029
4024 The local namespace is provided with a reference to the ui and
4030 The local namespace is provided with a reference to the ui and
4025 the repo instance (if available).
4031 the repo instance (if available).
4026 """
4032 """
4027 import code
4033 import code
4028
4034
4029 imported_objects = {
4035 imported_objects = {
4030 'ui': ui,
4036 'ui': ui,
4031 'repo': repo,
4037 'repo': repo,
4032 }
4038 }
4033
4039
4034 code.interact(local=imported_objects)
4040 code.interact(local=imported_objects)
4035
4041
4036
4042
4037 @command(
4043 @command(
4038 b'debugsuccessorssets',
4044 b'debugsuccessorssets',
4039 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4045 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4040 _(b'[REV]'),
4046 _(b'[REV]'),
4041 )
4047 )
4042 def debugsuccessorssets(ui, repo, *revs, **opts):
4048 def debugsuccessorssets(ui, repo, *revs, **opts):
4043 """show set of successors for revision
4049 """show set of successors for revision
4044
4050
4045 A successors set of changeset A is a consistent group of revisions that
4051 A successors set of changeset A is a consistent group of revisions that
4046 succeed A. It contains non-obsolete changesets only unless closests
4052 succeed A. It contains non-obsolete changesets only unless closests
4047 successors set is set.
4053 successors set is set.
4048
4054
4049 In most cases a changeset A has a single successors set containing a single
4055 In most cases a changeset A has a single successors set containing a single
4050 successor (changeset A replaced by A').
4056 successor (changeset A replaced by A').
4051
4057
4052 A changeset that is made obsolete with no successors are called "pruned".
4058 A changeset that is made obsolete with no successors are called "pruned".
4053 Such changesets have no successors sets at all.
4059 Such changesets have no successors sets at all.
4054
4060
4055 A changeset that has been "split" will have a successors set containing
4061 A changeset that has been "split" will have a successors set containing
4056 more than one successor.
4062 more than one successor.
4057
4063
4058 A changeset that has been rewritten in multiple different ways is called
4064 A changeset that has been rewritten in multiple different ways is called
4059 "divergent". Such changesets have multiple successor sets (each of which
4065 "divergent". Such changesets have multiple successor sets (each of which
4060 may also be split, i.e. have multiple successors).
4066 may also be split, i.e. have multiple successors).
4061
4067
4062 Results are displayed as follows::
4068 Results are displayed as follows::
4063
4069
4064 <rev1>
4070 <rev1>
4065 <successors-1A>
4071 <successors-1A>
4066 <rev2>
4072 <rev2>
4067 <successors-2A>
4073 <successors-2A>
4068 <successors-2B1> <successors-2B2> <successors-2B3>
4074 <successors-2B1> <successors-2B2> <successors-2B3>
4069
4075
4070 Here rev2 has two possible (i.e. divergent) successors sets. The first
4076 Here rev2 has two possible (i.e. divergent) successors sets. The first
4071 holds one element, whereas the second holds three (i.e. the changeset has
4077 holds one element, whereas the second holds three (i.e. the changeset has
4072 been split).
4078 been split).
4073 """
4079 """
4074 # passed to successorssets caching computation from one call to another
4080 # passed to successorssets caching computation from one call to another
4075 cache = {}
4081 cache = {}
4076 ctx2str = bytes
4082 ctx2str = bytes
4077 node2str = short
4083 node2str = short
4078 for rev in logcmdutil.revrange(repo, revs):
4084 for rev in logcmdutil.revrange(repo, revs):
4079 ctx = repo[rev]
4085 ctx = repo[rev]
4080 ui.write(b'%s\n' % ctx2str(ctx))
4086 ui.write(b'%s\n' % ctx2str(ctx))
4081 for succsset in obsutil.successorssets(
4087 for succsset in obsutil.successorssets(
4082 repo, ctx.node(), closest=opts['closest'], cache=cache
4088 repo, ctx.node(), closest=opts['closest'], cache=cache
4083 ):
4089 ):
4084 if succsset:
4090 if succsset:
4085 ui.write(b' ')
4091 ui.write(b' ')
4086 ui.write(node2str(succsset[0]))
4092 ui.write(node2str(succsset[0]))
4087 for node in succsset[1:]:
4093 for node in succsset[1:]:
4088 ui.write(b' ')
4094 ui.write(b' ')
4089 ui.write(node2str(node))
4095 ui.write(node2str(node))
4090 ui.write(b'\n')
4096 ui.write(b'\n')
4091
4097
4092
4098
4093 @command(b'debugtagscache', [])
4099 @command(b'debugtagscache', [])
4094 def debugtagscache(ui, repo):
4100 def debugtagscache(ui, repo):
4095 """display the contents of .hg/cache/hgtagsfnodes1"""
4101 """display the contents of .hg/cache/hgtagsfnodes1"""
4096 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4102 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4097 flog = repo.file(b'.hgtags')
4103 flog = repo.file(b'.hgtags')
4098 for r in repo:
4104 for r in repo:
4099 node = repo[r].node()
4105 node = repo[r].node()
4100 tagsnode = cache.getfnode(node, computemissing=False)
4106 tagsnode = cache.getfnode(node, computemissing=False)
4101 if tagsnode:
4107 if tagsnode:
4102 tagsnodedisplay = hex(tagsnode)
4108 tagsnodedisplay = hex(tagsnode)
4103 if not flog.hasnode(tagsnode):
4109 if not flog.hasnode(tagsnode):
4104 tagsnodedisplay += b' (unknown node)'
4110 tagsnodedisplay += b' (unknown node)'
4105 elif tagsnode is None:
4111 elif tagsnode is None:
4106 tagsnodedisplay = b'missing'
4112 tagsnodedisplay = b'missing'
4107 else:
4113 else:
4108 tagsnodedisplay = b'invalid'
4114 tagsnodedisplay = b'invalid'
4109
4115
4110 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4116 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4111
4117
4112
4118
4113 @command(
4119 @command(
4114 b'debugtemplate',
4120 b'debugtemplate',
4115 [
4121 [
4116 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4122 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4117 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4123 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4118 ],
4124 ],
4119 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4125 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4120 optionalrepo=True,
4126 optionalrepo=True,
4121 )
4127 )
4122 def debugtemplate(ui, repo, tmpl, **opts):
4128 def debugtemplate(ui, repo, tmpl, **opts):
4123 """parse and apply a template
4129 """parse and apply a template
4124
4130
4125 If -r/--rev is given, the template is processed as a log template and
4131 If -r/--rev is given, the template is processed as a log template and
4126 applied to the given changesets. Otherwise, it is processed as a generic
4132 applied to the given changesets. Otherwise, it is processed as a generic
4127 template.
4133 template.
4128
4134
4129 Use --verbose to print the parsed tree.
4135 Use --verbose to print the parsed tree.
4130 """
4136 """
4131 revs = None
4137 revs = None
4132 if opts['rev']:
4138 if opts['rev']:
4133 if repo is None:
4139 if repo is None:
4134 raise error.RepoError(
4140 raise error.RepoError(
4135 _(b'there is no Mercurial repository here (.hg not found)')
4141 _(b'there is no Mercurial repository here (.hg not found)')
4136 )
4142 )
4137 revs = logcmdutil.revrange(repo, opts['rev'])
4143 revs = logcmdutil.revrange(repo, opts['rev'])
4138
4144
4139 props = {}
4145 props = {}
4140 for d in opts['define']:
4146 for d in opts['define']:
4141 try:
4147 try:
4142 k, v = (e.strip() for e in d.split(b'=', 1))
4148 k, v = (e.strip() for e in d.split(b'=', 1))
4143 if not k or k == b'ui':
4149 if not k or k == b'ui':
4144 raise ValueError
4150 raise ValueError
4145 props[k] = v
4151 props[k] = v
4146 except ValueError:
4152 except ValueError:
4147 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4153 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4148
4154
4149 if ui.verbose:
4155 if ui.verbose:
4150 aliases = ui.configitems(b'templatealias')
4156 aliases = ui.configitems(b'templatealias')
4151 tree = templater.parse(tmpl)
4157 tree = templater.parse(tmpl)
4152 ui.note(templater.prettyformat(tree), b'\n')
4158 ui.note(templater.prettyformat(tree), b'\n')
4153 newtree = templater.expandaliases(tree, aliases)
4159 newtree = templater.expandaliases(tree, aliases)
4154 if newtree != tree:
4160 if newtree != tree:
4155 ui.notenoi18n(
4161 ui.notenoi18n(
4156 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4162 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4157 )
4163 )
4158
4164
4159 if revs is None:
4165 if revs is None:
4160 tres = formatter.templateresources(ui, repo)
4166 tres = formatter.templateresources(ui, repo)
4161 t = formatter.maketemplater(ui, tmpl, resources=tres)
4167 t = formatter.maketemplater(ui, tmpl, resources=tres)
4162 if ui.verbose:
4168 if ui.verbose:
4163 kwds, funcs = t.symbolsuseddefault()
4169 kwds, funcs = t.symbolsuseddefault()
4164 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4170 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4165 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4171 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4166 ui.write(t.renderdefault(props))
4172 ui.write(t.renderdefault(props))
4167 else:
4173 else:
4168 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4174 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4169 if ui.verbose:
4175 if ui.verbose:
4170 kwds, funcs = displayer.t.symbolsuseddefault()
4176 kwds, funcs = displayer.t.symbolsuseddefault()
4171 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4177 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4172 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4178 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4173 for r in revs:
4179 for r in revs:
4174 displayer.show(repo[r], **pycompat.strkwargs(props))
4180 displayer.show(repo[r], **pycompat.strkwargs(props))
4175 displayer.close()
4181 displayer.close()
4176
4182
4177
4183
4178 @command(
4184 @command(
4179 b'debuguigetpass',
4185 b'debuguigetpass',
4180 [
4186 [
4181 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4187 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4182 ],
4188 ],
4183 _(b'[-p TEXT]'),
4189 _(b'[-p TEXT]'),
4184 norepo=True,
4190 norepo=True,
4185 )
4191 )
4186 def debuguigetpass(ui, prompt=b''):
4192 def debuguigetpass(ui, prompt=b''):
4187 """show prompt to type password"""
4193 """show prompt to type password"""
4188 r = ui.getpass(prompt)
4194 r = ui.getpass(prompt)
4189 if r is None:
4195 if r is None:
4190 r = b"<default response>"
4196 r = b"<default response>"
4191 ui.writenoi18n(b'response: %s\n' % r)
4197 ui.writenoi18n(b'response: %s\n' % r)
4192
4198
4193
4199
4194 @command(
4200 @command(
4195 b'debuguiprompt',
4201 b'debuguiprompt',
4196 [
4202 [
4197 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4203 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4198 ],
4204 ],
4199 _(b'[-p TEXT]'),
4205 _(b'[-p TEXT]'),
4200 norepo=True,
4206 norepo=True,
4201 )
4207 )
4202 def debuguiprompt(ui, prompt=b''):
4208 def debuguiprompt(ui, prompt=b''):
4203 """show plain prompt"""
4209 """show plain prompt"""
4204 r = ui.prompt(prompt)
4210 r = ui.prompt(prompt)
4205 ui.writenoi18n(b'response: %s\n' % r)
4211 ui.writenoi18n(b'response: %s\n' % r)
4206
4212
4207
4213
4208 @command(b'debugupdatecaches', [])
4214 @command(b'debugupdatecaches', [])
4209 def debugupdatecaches(ui, repo, *pats, **opts):
4215 def debugupdatecaches(ui, repo, *pats, **opts):
4210 """warm all known caches in the repository"""
4216 """warm all known caches in the repository"""
4211 with repo.wlock(), repo.lock():
4217 with repo.wlock(), repo.lock():
4212 repo.updatecaches(caches=repository.CACHES_ALL)
4218 repo.updatecaches(caches=repository.CACHES_ALL)
4213
4219
4214
4220
4215 @command(
4221 @command(
4216 b'debugupgraderepo',
4222 b'debugupgraderepo',
4217 [
4223 [
4218 (
4224 (
4219 b'o',
4225 b'o',
4220 b'optimize',
4226 b'optimize',
4221 [],
4227 [],
4222 _(b'extra optimization to perform'),
4228 _(b'extra optimization to perform'),
4223 _(b'NAME'),
4229 _(b'NAME'),
4224 ),
4230 ),
4225 (b'', b'run', False, _(b'performs an upgrade')),
4231 (b'', b'run', False, _(b'performs an upgrade')),
4226 (b'', b'backup', True, _(b'keep the old repository content around')),
4232 (b'', b'backup', True, _(b'keep the old repository content around')),
4227 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4233 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4228 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4234 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4229 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4235 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4230 ],
4236 ],
4231 )
4237 )
4232 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4238 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4233 """upgrade a repository to use different features
4239 """upgrade a repository to use different features
4234
4240
4235 If no arguments are specified, the repository is evaluated for upgrade
4241 If no arguments are specified, the repository is evaluated for upgrade
4236 and a list of problems and potential optimizations is printed.
4242 and a list of problems and potential optimizations is printed.
4237
4243
4238 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4244 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4239 can be influenced via additional arguments. More details will be provided
4245 can be influenced via additional arguments. More details will be provided
4240 by the command output when run without ``--run``.
4246 by the command output when run without ``--run``.
4241
4247
4242 During the upgrade, the repository will be locked and no writes will be
4248 During the upgrade, the repository will be locked and no writes will be
4243 allowed.
4249 allowed.
4244
4250
4245 At the end of the upgrade, the repository may not be readable while new
4251 At the end of the upgrade, the repository may not be readable while new
4246 repository data is swapped in. This window will be as long as it takes to
4252 repository data is swapped in. This window will be as long as it takes to
4247 rename some directories inside the ``.hg`` directory. On most machines, this
4253 rename some directories inside the ``.hg`` directory. On most machines, this
4248 should complete almost instantaneously and the chances of a consumer being
4254 should complete almost instantaneously and the chances of a consumer being
4249 unable to access the repository should be low.
4255 unable to access the repository should be low.
4250
4256
4251 By default, all revlogs will be upgraded. You can restrict this using flags
4257 By default, all revlogs will be upgraded. You can restrict this using flags
4252 such as `--manifest`:
4258 such as `--manifest`:
4253
4259
4254 * `--manifest`: only optimize the manifest
4260 * `--manifest`: only optimize the manifest
4255 * `--no-manifest`: optimize all revlog but the manifest
4261 * `--no-manifest`: optimize all revlog but the manifest
4256 * `--changelog`: optimize the changelog only
4262 * `--changelog`: optimize the changelog only
4257 * `--no-changelog --no-manifest`: optimize filelogs only
4263 * `--no-changelog --no-manifest`: optimize filelogs only
4258 * `--filelogs`: optimize the filelogs only
4264 * `--filelogs`: optimize the filelogs only
4259 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4265 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4260 """
4266 """
4261 return upgrade.upgraderepo(
4267 return upgrade.upgraderepo(
4262 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4268 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4263 )
4269 )
4264
4270
4265
4271
4266 @command(
4272 @command(
4267 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4273 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4268 )
4274 )
4269 def debugwalk(ui, repo, *pats, **opts):
4275 def debugwalk(ui, repo, *pats, **opts):
4270 """show how files match on given patterns"""
4276 """show how files match on given patterns"""
4271 opts = pycompat.byteskwargs(opts)
4277 opts = pycompat.byteskwargs(opts)
4272 m = scmutil.match(repo[None], pats, opts)
4278 m = scmutil.match(repo[None], pats, opts)
4273 if ui.verbose:
4279 if ui.verbose:
4274 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4280 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4275 items = list(repo[None].walk(m))
4281 items = list(repo[None].walk(m))
4276 if not items:
4282 if not items:
4277 return
4283 return
4278 f = lambda fn: fn
4284 f = lambda fn: fn
4279 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4285 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4280 f = lambda fn: util.normpath(fn)
4286 f = lambda fn: util.normpath(fn)
4281 fmt = b'f %%-%ds %%-%ds %%s' % (
4287 fmt = b'f %%-%ds %%-%ds %%s' % (
4282 max([len(abs) for abs in items]),
4288 max([len(abs) for abs in items]),
4283 max([len(repo.pathto(abs)) for abs in items]),
4289 max([len(repo.pathto(abs)) for abs in items]),
4284 )
4290 )
4285 for abs in items:
4291 for abs in items:
4286 line = fmt % (
4292 line = fmt % (
4287 abs,
4293 abs,
4288 f(repo.pathto(abs)),
4294 f(repo.pathto(abs)),
4289 m.exact(abs) and b'exact' or b'',
4295 m.exact(abs) and b'exact' or b'',
4290 )
4296 )
4291 ui.write(b"%s\n" % line.rstrip())
4297 ui.write(b"%s\n" % line.rstrip())
4292
4298
4293
4299
4294 @command(b'debugwhyunstable', [], _(b'REV'))
4300 @command(b'debugwhyunstable', [], _(b'REV'))
4295 def debugwhyunstable(ui, repo, rev):
4301 def debugwhyunstable(ui, repo, rev):
4296 """explain instabilities of a changeset"""
4302 """explain instabilities of a changeset"""
4297 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4303 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4298 dnodes = b''
4304 dnodes = b''
4299 if entry.get(b'divergentnodes'):
4305 if entry.get(b'divergentnodes'):
4300 dnodes = (
4306 dnodes = (
4301 b' '.join(
4307 b' '.join(
4302 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4308 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4303 for ctx in entry[b'divergentnodes']
4309 for ctx in entry[b'divergentnodes']
4304 )
4310 )
4305 + b' '
4311 + b' '
4306 )
4312 )
4307 ui.write(
4313 ui.write(
4308 b'%s: %s%s %s\n'
4314 b'%s: %s%s %s\n'
4309 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4315 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4310 )
4316 )
4311
4317
4312
4318
4313 @command(
4319 @command(
4314 b'debugwireargs',
4320 b'debugwireargs',
4315 [
4321 [
4316 (b'', b'three', b'', b'three'),
4322 (b'', b'three', b'', b'three'),
4317 (b'', b'four', b'', b'four'),
4323 (b'', b'four', b'', b'four'),
4318 (b'', b'five', b'', b'five'),
4324 (b'', b'five', b'', b'five'),
4319 ]
4325 ]
4320 + cmdutil.remoteopts,
4326 + cmdutil.remoteopts,
4321 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4327 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4322 norepo=True,
4328 norepo=True,
4323 )
4329 )
4324 def debugwireargs(ui, repopath, *vals, **opts):
4330 def debugwireargs(ui, repopath, *vals, **opts):
4325 opts = pycompat.byteskwargs(opts)
4331 opts = pycompat.byteskwargs(opts)
4326 repo = hg.peer(ui, opts, repopath)
4332 repo = hg.peer(ui, opts, repopath)
4327 try:
4333 try:
4328 for opt in cmdutil.remoteopts:
4334 for opt in cmdutil.remoteopts:
4329 del opts[opt[1]]
4335 del opts[opt[1]]
4330 args = {}
4336 args = {}
4331 for k, v in opts.items():
4337 for k, v in opts.items():
4332 if v:
4338 if v:
4333 args[k] = v
4339 args[k] = v
4334 args = pycompat.strkwargs(args)
4340 args = pycompat.strkwargs(args)
4335 # run twice to check that we don't mess up the stream for the next command
4341 # run twice to check that we don't mess up the stream for the next command
4336 res1 = repo.debugwireargs(*vals, **args)
4342 res1 = repo.debugwireargs(*vals, **args)
4337 res2 = repo.debugwireargs(*vals, **args)
4343 res2 = repo.debugwireargs(*vals, **args)
4338 ui.write(b"%s\n" % res1)
4344 ui.write(b"%s\n" % res1)
4339 if res1 != res2:
4345 if res1 != res2:
4340 ui.warn(b"%s\n" % res2)
4346 ui.warn(b"%s\n" % res2)
4341 finally:
4347 finally:
4342 repo.close()
4348 repo.close()
4343
4349
4344
4350
4345 def _parsewirelangblocks(fh):
4351 def _parsewirelangblocks(fh):
4346 activeaction = None
4352 activeaction = None
4347 blocklines = []
4353 blocklines = []
4348 lastindent = 0
4354 lastindent = 0
4349
4355
4350 for line in fh:
4356 for line in fh:
4351 line = line.rstrip()
4357 line = line.rstrip()
4352 if not line:
4358 if not line:
4353 continue
4359 continue
4354
4360
4355 if line.startswith(b'#'):
4361 if line.startswith(b'#'):
4356 continue
4362 continue
4357
4363
4358 if not line.startswith(b' '):
4364 if not line.startswith(b' '):
4359 # New block. Flush previous one.
4365 # New block. Flush previous one.
4360 if activeaction:
4366 if activeaction:
4361 yield activeaction, blocklines
4367 yield activeaction, blocklines
4362
4368
4363 activeaction = line
4369 activeaction = line
4364 blocklines = []
4370 blocklines = []
4365 lastindent = 0
4371 lastindent = 0
4366 continue
4372 continue
4367
4373
4368 # Else we start with an indent.
4374 # Else we start with an indent.
4369
4375
4370 if not activeaction:
4376 if not activeaction:
4371 raise error.Abort(_(b'indented line outside of block'))
4377 raise error.Abort(_(b'indented line outside of block'))
4372
4378
4373 indent = len(line) - len(line.lstrip())
4379 indent = len(line) - len(line.lstrip())
4374
4380
4375 # If this line is indented more than the last line, concatenate it.
4381 # If this line is indented more than the last line, concatenate it.
4376 if indent > lastindent and blocklines:
4382 if indent > lastindent and blocklines:
4377 blocklines[-1] += line.lstrip()
4383 blocklines[-1] += line.lstrip()
4378 else:
4384 else:
4379 blocklines.append(line)
4385 blocklines.append(line)
4380 lastindent = indent
4386 lastindent = indent
4381
4387
4382 # Flush last block.
4388 # Flush last block.
4383 if activeaction:
4389 if activeaction:
4384 yield activeaction, blocklines
4390 yield activeaction, blocklines
4385
4391
4386
4392
4387 @command(
4393 @command(
4388 b'debugwireproto',
4394 b'debugwireproto',
4389 [
4395 [
4390 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4396 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4391 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4397 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4392 (
4398 (
4393 b'',
4399 b'',
4394 b'noreadstderr',
4400 b'noreadstderr',
4395 False,
4401 False,
4396 _(b'do not read from stderr of the remote'),
4402 _(b'do not read from stderr of the remote'),
4397 ),
4403 ),
4398 (
4404 (
4399 b'',
4405 b'',
4400 b'nologhandshake',
4406 b'nologhandshake',
4401 False,
4407 False,
4402 _(b'do not log I/O related to the peer handshake'),
4408 _(b'do not log I/O related to the peer handshake'),
4403 ),
4409 ),
4404 ]
4410 ]
4405 + cmdutil.remoteopts,
4411 + cmdutil.remoteopts,
4406 _(b'[PATH]'),
4412 _(b'[PATH]'),
4407 optionalrepo=True,
4413 optionalrepo=True,
4408 )
4414 )
4409 def debugwireproto(ui, repo, path=None, **opts):
4415 def debugwireproto(ui, repo, path=None, **opts):
4410 """send wire protocol commands to a server
4416 """send wire protocol commands to a server
4411
4417
4412 This command can be used to issue wire protocol commands to remote
4418 This command can be used to issue wire protocol commands to remote
4413 peers and to debug the raw data being exchanged.
4419 peers and to debug the raw data being exchanged.
4414
4420
4415 ``--localssh`` will start an SSH server against the current repository
4421 ``--localssh`` will start an SSH server against the current repository
4416 and connect to that. By default, the connection will perform a handshake
4422 and connect to that. By default, the connection will perform a handshake
4417 and establish an appropriate peer instance.
4423 and establish an appropriate peer instance.
4418
4424
4419 ``--peer`` can be used to bypass the handshake protocol and construct a
4425 ``--peer`` can be used to bypass the handshake protocol and construct a
4420 peer instance using the specified class type. Valid values are ``raw``,
4426 peer instance using the specified class type. Valid values are ``raw``,
4421 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4427 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4422 don't support higher-level command actions.
4428 don't support higher-level command actions.
4423
4429
4424 ``--noreadstderr`` can be used to disable automatic reading from stderr
4430 ``--noreadstderr`` can be used to disable automatic reading from stderr
4425 of the peer (for SSH connections only). Disabling automatic reading of
4431 of the peer (for SSH connections only). Disabling automatic reading of
4426 stderr is useful for making output more deterministic.
4432 stderr is useful for making output more deterministic.
4427
4433
4428 Commands are issued via a mini language which is specified via stdin.
4434 Commands are issued via a mini language which is specified via stdin.
4429 The language consists of individual actions to perform. An action is
4435 The language consists of individual actions to perform. An action is
4430 defined by a block. A block is defined as a line with no leading
4436 defined by a block. A block is defined as a line with no leading
4431 space followed by 0 or more lines with leading space. Blocks are
4437 space followed by 0 or more lines with leading space. Blocks are
4432 effectively a high-level command with additional metadata.
4438 effectively a high-level command with additional metadata.
4433
4439
4434 Lines beginning with ``#`` are ignored.
4440 Lines beginning with ``#`` are ignored.
4435
4441
4436 The following sections denote available actions.
4442 The following sections denote available actions.
4437
4443
4438 raw
4444 raw
4439 ---
4445 ---
4440
4446
4441 Send raw data to the server.
4447 Send raw data to the server.
4442
4448
4443 The block payload contains the raw data to send as one atomic send
4449 The block payload contains the raw data to send as one atomic send
4444 operation. The data may not actually be delivered in a single system
4450 operation. The data may not actually be delivered in a single system
4445 call: it depends on the abilities of the transport being used.
4451 call: it depends on the abilities of the transport being used.
4446
4452
4447 Each line in the block is de-indented and concatenated. Then, that
4453 Each line in the block is de-indented and concatenated. Then, that
4448 value is evaluated as a Python b'' literal. This allows the use of
4454 value is evaluated as a Python b'' literal. This allows the use of
4449 backslash escaping, etc.
4455 backslash escaping, etc.
4450
4456
4451 raw+
4457 raw+
4452 ----
4458 ----
4453
4459
4454 Behaves like ``raw`` except flushes output afterwards.
4460 Behaves like ``raw`` except flushes output afterwards.
4455
4461
4456 command <X>
4462 command <X>
4457 -----------
4463 -----------
4458
4464
4459 Send a request to run a named command, whose name follows the ``command``
4465 Send a request to run a named command, whose name follows the ``command``
4460 string.
4466 string.
4461
4467
4462 Arguments to the command are defined as lines in this block. The format of
4468 Arguments to the command are defined as lines in this block. The format of
4463 each line is ``<key> <value>``. e.g.::
4469 each line is ``<key> <value>``. e.g.::
4464
4470
4465 command listkeys
4471 command listkeys
4466 namespace bookmarks
4472 namespace bookmarks
4467
4473
4468 If the value begins with ``eval:``, it will be interpreted as a Python
4474 If the value begins with ``eval:``, it will be interpreted as a Python
4469 literal expression. Otherwise values are interpreted as Python b'' literals.
4475 literal expression. Otherwise values are interpreted as Python b'' literals.
4470 This allows sending complex types and encoding special byte sequences via
4476 This allows sending complex types and encoding special byte sequences via
4471 backslash escaping.
4477 backslash escaping.
4472
4478
4473 The following arguments have special meaning:
4479 The following arguments have special meaning:
4474
4480
4475 ``PUSHFILE``
4481 ``PUSHFILE``
4476 When defined, the *push* mechanism of the peer will be used instead
4482 When defined, the *push* mechanism of the peer will be used instead
4477 of the static request-response mechanism and the content of the
4483 of the static request-response mechanism and the content of the
4478 file specified in the value of this argument will be sent as the
4484 file specified in the value of this argument will be sent as the
4479 command payload.
4485 command payload.
4480
4486
4481 This can be used to submit a local bundle file to the remote.
4487 This can be used to submit a local bundle file to the remote.
4482
4488
4483 batchbegin
4489 batchbegin
4484 ----------
4490 ----------
4485
4491
4486 Instruct the peer to begin a batched send.
4492 Instruct the peer to begin a batched send.
4487
4493
4488 All ``command`` blocks are queued for execution until the next
4494 All ``command`` blocks are queued for execution until the next
4489 ``batchsubmit`` block.
4495 ``batchsubmit`` block.
4490
4496
4491 batchsubmit
4497 batchsubmit
4492 -----------
4498 -----------
4493
4499
4494 Submit previously queued ``command`` blocks as a batch request.
4500 Submit previously queued ``command`` blocks as a batch request.
4495
4501
4496 This action MUST be paired with a ``batchbegin`` action.
4502 This action MUST be paired with a ``batchbegin`` action.
4497
4503
4498 httprequest <method> <path>
4504 httprequest <method> <path>
4499 ---------------------------
4505 ---------------------------
4500
4506
4501 (HTTP peer only)
4507 (HTTP peer only)
4502
4508
4503 Send an HTTP request to the peer.
4509 Send an HTTP request to the peer.
4504
4510
4505 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4511 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4506
4512
4507 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4513 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4508 headers to add to the request. e.g. ``Accept: foo``.
4514 headers to add to the request. e.g. ``Accept: foo``.
4509
4515
4510 The following arguments are special:
4516 The following arguments are special:
4511
4517
4512 ``BODYFILE``
4518 ``BODYFILE``
4513 The content of the file defined as the value to this argument will be
4519 The content of the file defined as the value to this argument will be
4514 transferred verbatim as the HTTP request body.
4520 transferred verbatim as the HTTP request body.
4515
4521
4516 ``frame <type> <flags> <payload>``
4522 ``frame <type> <flags> <payload>``
4517 Send a unified protocol frame as part of the request body.
4523 Send a unified protocol frame as part of the request body.
4518
4524
4519 All frames will be collected and sent as the body to the HTTP
4525 All frames will be collected and sent as the body to the HTTP
4520 request.
4526 request.
4521
4527
4522 close
4528 close
4523 -----
4529 -----
4524
4530
4525 Close the connection to the server.
4531 Close the connection to the server.
4526
4532
4527 flush
4533 flush
4528 -----
4534 -----
4529
4535
4530 Flush data written to the server.
4536 Flush data written to the server.
4531
4537
4532 readavailable
4538 readavailable
4533 -------------
4539 -------------
4534
4540
4535 Close the write end of the connection and read all available data from
4541 Close the write end of the connection and read all available data from
4536 the server.
4542 the server.
4537
4543
4538 If the connection to the server encompasses multiple pipes, we poll both
4544 If the connection to the server encompasses multiple pipes, we poll both
4539 pipes and read available data.
4545 pipes and read available data.
4540
4546
4541 readline
4547 readline
4542 --------
4548 --------
4543
4549
4544 Read a line of output from the server. If there are multiple output
4550 Read a line of output from the server. If there are multiple output
4545 pipes, reads only the main pipe.
4551 pipes, reads only the main pipe.
4546
4552
4547 ereadline
4553 ereadline
4548 ---------
4554 ---------
4549
4555
4550 Like ``readline``, but read from the stderr pipe, if available.
4556 Like ``readline``, but read from the stderr pipe, if available.
4551
4557
4552 read <X>
4558 read <X>
4553 --------
4559 --------
4554
4560
4555 ``read()`` N bytes from the server's main output pipe.
4561 ``read()`` N bytes from the server's main output pipe.
4556
4562
4557 eread <X>
4563 eread <X>
4558 ---------
4564 ---------
4559
4565
4560 ``read()`` N bytes from the server's stderr pipe, if available.
4566 ``read()`` N bytes from the server's stderr pipe, if available.
4561
4567
4562 Specifying Unified Frame-Based Protocol Frames
4568 Specifying Unified Frame-Based Protocol Frames
4563 ----------------------------------------------
4569 ----------------------------------------------
4564
4570
4565 It is possible to emit a *Unified Frame-Based Protocol* by using special
4571 It is possible to emit a *Unified Frame-Based Protocol* by using special
4566 syntax.
4572 syntax.
4567
4573
4568 A frame is composed as a type, flags, and payload. These can be parsed
4574 A frame is composed as a type, flags, and payload. These can be parsed
4569 from a string of the form:
4575 from a string of the form:
4570
4576
4571 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4577 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4572
4578
4573 ``request-id`` and ``stream-id`` are integers defining the request and
4579 ``request-id`` and ``stream-id`` are integers defining the request and
4574 stream identifiers.
4580 stream identifiers.
4575
4581
4576 ``type`` can be an integer value for the frame type or the string name
4582 ``type`` can be an integer value for the frame type or the string name
4577 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4583 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4578 ``command-name``.
4584 ``command-name``.
4579
4585
4580 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4586 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4581 components. Each component (and there can be just one) can be an integer
4587 components. Each component (and there can be just one) can be an integer
4582 or a flag name for stream flags or frame flags, respectively. Values are
4588 or a flag name for stream flags or frame flags, respectively. Values are
4583 resolved to integers and then bitwise OR'd together.
4589 resolved to integers and then bitwise OR'd together.
4584
4590
4585 ``payload`` represents the raw frame payload. If it begins with
4591 ``payload`` represents the raw frame payload. If it begins with
4586 ``cbor:``, the following string is evaluated as Python code and the
4592 ``cbor:``, the following string is evaluated as Python code and the
4587 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4593 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4588 as a Python byte string literal.
4594 as a Python byte string literal.
4589 """
4595 """
4590 opts = pycompat.byteskwargs(opts)
4596 opts = pycompat.byteskwargs(opts)
4591
4597
4592 if opts[b'localssh'] and not repo:
4598 if opts[b'localssh'] and not repo:
4593 raise error.Abort(_(b'--localssh requires a repository'))
4599 raise error.Abort(_(b'--localssh requires a repository'))
4594
4600
4595 if opts[b'peer'] and opts[b'peer'] not in (
4601 if opts[b'peer'] and opts[b'peer'] not in (
4596 b'raw',
4602 b'raw',
4597 b'ssh1',
4603 b'ssh1',
4598 ):
4604 ):
4599 raise error.Abort(
4605 raise error.Abort(
4600 _(b'invalid value for --peer'),
4606 _(b'invalid value for --peer'),
4601 hint=_(b'valid values are "raw" and "ssh1"'),
4607 hint=_(b'valid values are "raw" and "ssh1"'),
4602 )
4608 )
4603
4609
4604 if path and opts[b'localssh']:
4610 if path and opts[b'localssh']:
4605 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4611 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4606
4612
4607 if ui.interactive():
4613 if ui.interactive():
4608 ui.write(_(b'(waiting for commands on stdin)\n'))
4614 ui.write(_(b'(waiting for commands on stdin)\n'))
4609
4615
4610 blocks = list(_parsewirelangblocks(ui.fin))
4616 blocks = list(_parsewirelangblocks(ui.fin))
4611
4617
4612 proc = None
4618 proc = None
4613 stdin = None
4619 stdin = None
4614 stdout = None
4620 stdout = None
4615 stderr = None
4621 stderr = None
4616 opener = None
4622 opener = None
4617
4623
4618 if opts[b'localssh']:
4624 if opts[b'localssh']:
4619 # We start the SSH server in its own process so there is process
4625 # We start the SSH server in its own process so there is process
4620 # separation. This prevents a whole class of potential bugs around
4626 # separation. This prevents a whole class of potential bugs around
4621 # shared state from interfering with server operation.
4627 # shared state from interfering with server operation.
4622 args = procutil.hgcmd() + [
4628 args = procutil.hgcmd() + [
4623 b'-R',
4629 b'-R',
4624 repo.root,
4630 repo.root,
4625 b'debugserve',
4631 b'debugserve',
4626 b'--sshstdio',
4632 b'--sshstdio',
4627 ]
4633 ]
4628 proc = subprocess.Popen(
4634 proc = subprocess.Popen(
4629 pycompat.rapply(procutil.tonativestr, args),
4635 pycompat.rapply(procutil.tonativestr, args),
4630 stdin=subprocess.PIPE,
4636 stdin=subprocess.PIPE,
4631 stdout=subprocess.PIPE,
4637 stdout=subprocess.PIPE,
4632 stderr=subprocess.PIPE,
4638 stderr=subprocess.PIPE,
4633 bufsize=0,
4639 bufsize=0,
4634 )
4640 )
4635
4641
4636 stdin = proc.stdin
4642 stdin = proc.stdin
4637 stdout = proc.stdout
4643 stdout = proc.stdout
4638 stderr = proc.stderr
4644 stderr = proc.stderr
4639
4645
4640 # We turn the pipes into observers so we can log I/O.
4646 # We turn the pipes into observers so we can log I/O.
4641 if ui.verbose or opts[b'peer'] == b'raw':
4647 if ui.verbose or opts[b'peer'] == b'raw':
4642 stdin = util.makeloggingfileobject(
4648 stdin = util.makeloggingfileobject(
4643 ui, proc.stdin, b'i', logdata=True
4649 ui, proc.stdin, b'i', logdata=True
4644 )
4650 )
4645 stdout = util.makeloggingfileobject(
4651 stdout = util.makeloggingfileobject(
4646 ui, proc.stdout, b'o', logdata=True
4652 ui, proc.stdout, b'o', logdata=True
4647 )
4653 )
4648 stderr = util.makeloggingfileobject(
4654 stderr = util.makeloggingfileobject(
4649 ui, proc.stderr, b'e', logdata=True
4655 ui, proc.stderr, b'e', logdata=True
4650 )
4656 )
4651
4657
4652 # --localssh also implies the peer connection settings.
4658 # --localssh also implies the peer connection settings.
4653
4659
4654 url = b'ssh://localserver'
4660 url = b'ssh://localserver'
4655 autoreadstderr = not opts[b'noreadstderr']
4661 autoreadstderr = not opts[b'noreadstderr']
4656
4662
4657 if opts[b'peer'] == b'ssh1':
4663 if opts[b'peer'] == b'ssh1':
4658 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4664 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4659 peer = sshpeer.sshv1peer(
4665 peer = sshpeer.sshv1peer(
4660 ui,
4666 ui,
4661 url,
4667 url,
4662 proc,
4668 proc,
4663 stdin,
4669 stdin,
4664 stdout,
4670 stdout,
4665 stderr,
4671 stderr,
4666 None,
4672 None,
4667 autoreadstderr=autoreadstderr,
4673 autoreadstderr=autoreadstderr,
4668 )
4674 )
4669 elif opts[b'peer'] == b'raw':
4675 elif opts[b'peer'] == b'raw':
4670 ui.write(_(b'using raw connection to peer\n'))
4676 ui.write(_(b'using raw connection to peer\n'))
4671 peer = None
4677 peer = None
4672 else:
4678 else:
4673 ui.write(_(b'creating ssh peer from handshake results\n'))
4679 ui.write(_(b'creating ssh peer from handshake results\n'))
4674 peer = sshpeer.makepeer(
4680 peer = sshpeer.makepeer(
4675 ui,
4681 ui,
4676 url,
4682 url,
4677 proc,
4683 proc,
4678 stdin,
4684 stdin,
4679 stdout,
4685 stdout,
4680 stderr,
4686 stderr,
4681 autoreadstderr=autoreadstderr,
4687 autoreadstderr=autoreadstderr,
4682 )
4688 )
4683
4689
4684 elif path:
4690 elif path:
4685 # We bypass hg.peer() so we can proxy the sockets.
4691 # We bypass hg.peer() so we can proxy the sockets.
4686 # TODO consider not doing this because we skip
4692 # TODO consider not doing this because we skip
4687 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4693 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4688 u = urlutil.url(path)
4694 u = urlutil.url(path)
4689 if u.scheme != b'http':
4695 if u.scheme != b'http':
4690 raise error.Abort(_(b'only http:// paths are currently supported'))
4696 raise error.Abort(_(b'only http:// paths are currently supported'))
4691
4697
4692 url, authinfo = u.authinfo()
4698 url, authinfo = u.authinfo()
4693 openerargs = {
4699 openerargs = {
4694 'useragent': b'Mercurial debugwireproto',
4700 'useragent': b'Mercurial debugwireproto',
4695 }
4701 }
4696
4702
4697 # Turn pipes/sockets into observers so we can log I/O.
4703 # Turn pipes/sockets into observers so we can log I/O.
4698 if ui.verbose:
4704 if ui.verbose:
4699 openerargs.update(
4705 openerargs.update(
4700 {
4706 {
4701 'loggingfh': ui,
4707 'loggingfh': ui,
4702 'loggingname': b's',
4708 'loggingname': b's',
4703 'loggingopts': {
4709 'loggingopts': {
4704 'logdata': True,
4710 'logdata': True,
4705 'logdataapis': False,
4711 'logdataapis': False,
4706 },
4712 },
4707 }
4713 }
4708 )
4714 )
4709
4715
4710 if ui.debugflag:
4716 if ui.debugflag:
4711 openerargs['loggingopts']['logdataapis'] = True
4717 openerargs['loggingopts']['logdataapis'] = True
4712
4718
4713 # Don't send default headers when in raw mode. This allows us to
4719 # Don't send default headers when in raw mode. This allows us to
4714 # bypass most of the behavior of our URL handling code so we can
4720 # bypass most of the behavior of our URL handling code so we can
4715 # have near complete control over what's sent on the wire.
4721 # have near complete control over what's sent on the wire.
4716 if opts[b'peer'] == b'raw':
4722 if opts[b'peer'] == b'raw':
4717 openerargs['sendaccept'] = False
4723 openerargs['sendaccept'] = False
4718
4724
4719 opener = urlmod.opener(ui, authinfo, **openerargs)
4725 opener = urlmod.opener(ui, authinfo, **openerargs)
4720
4726
4721 if opts[b'peer'] == b'raw':
4727 if opts[b'peer'] == b'raw':
4722 ui.write(_(b'using raw connection to peer\n'))
4728 ui.write(_(b'using raw connection to peer\n'))
4723 peer = None
4729 peer = None
4724 elif opts[b'peer']:
4730 elif opts[b'peer']:
4725 raise error.Abort(
4731 raise error.Abort(
4726 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4732 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4727 )
4733 )
4728 else:
4734 else:
4729 peer = httppeer.makepeer(ui, path, opener=opener)
4735 peer = httppeer.makepeer(ui, path, opener=opener)
4730
4736
4731 # We /could/ populate stdin/stdout with sock.makefile()...
4737 # We /could/ populate stdin/stdout with sock.makefile()...
4732 else:
4738 else:
4733 raise error.Abort(_(b'unsupported connection configuration'))
4739 raise error.Abort(_(b'unsupported connection configuration'))
4734
4740
4735 batchedcommands = None
4741 batchedcommands = None
4736
4742
4737 # Now perform actions based on the parsed wire language instructions.
4743 # Now perform actions based on the parsed wire language instructions.
4738 for action, lines in blocks:
4744 for action, lines in blocks:
4739 if action in (b'raw', b'raw+'):
4745 if action in (b'raw', b'raw+'):
4740 if not stdin:
4746 if not stdin:
4741 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4747 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4742
4748
4743 # Concatenate the data together.
4749 # Concatenate the data together.
4744 data = b''.join(l.lstrip() for l in lines)
4750 data = b''.join(l.lstrip() for l in lines)
4745 data = stringutil.unescapestr(data)
4751 data = stringutil.unescapestr(data)
4746 stdin.write(data)
4752 stdin.write(data)
4747
4753
4748 if action == b'raw+':
4754 if action == b'raw+':
4749 stdin.flush()
4755 stdin.flush()
4750 elif action == b'flush':
4756 elif action == b'flush':
4751 if not stdin:
4757 if not stdin:
4752 raise error.Abort(_(b'cannot call flush on this peer'))
4758 raise error.Abort(_(b'cannot call flush on this peer'))
4753 stdin.flush()
4759 stdin.flush()
4754 elif action.startswith(b'command'):
4760 elif action.startswith(b'command'):
4755 if not peer:
4761 if not peer:
4756 raise error.Abort(
4762 raise error.Abort(
4757 _(
4763 _(
4758 b'cannot send commands unless peer instance '
4764 b'cannot send commands unless peer instance '
4759 b'is available'
4765 b'is available'
4760 )
4766 )
4761 )
4767 )
4762
4768
4763 command = action.split(b' ', 1)[1]
4769 command = action.split(b' ', 1)[1]
4764
4770
4765 args = {}
4771 args = {}
4766 for line in lines:
4772 for line in lines:
4767 # We need to allow empty values.
4773 # We need to allow empty values.
4768 fields = line.lstrip().split(b' ', 1)
4774 fields = line.lstrip().split(b' ', 1)
4769 if len(fields) == 1:
4775 if len(fields) == 1:
4770 key = fields[0]
4776 key = fields[0]
4771 value = b''
4777 value = b''
4772 else:
4778 else:
4773 key, value = fields
4779 key, value = fields
4774
4780
4775 if value.startswith(b'eval:'):
4781 if value.startswith(b'eval:'):
4776 value = stringutil.evalpythonliteral(value[5:])
4782 value = stringutil.evalpythonliteral(value[5:])
4777 else:
4783 else:
4778 value = stringutil.unescapestr(value)
4784 value = stringutil.unescapestr(value)
4779
4785
4780 args[key] = value
4786 args[key] = value
4781
4787
4782 if batchedcommands is not None:
4788 if batchedcommands is not None:
4783 batchedcommands.append((command, args))
4789 batchedcommands.append((command, args))
4784 continue
4790 continue
4785
4791
4786 ui.status(_(b'sending %s command\n') % command)
4792 ui.status(_(b'sending %s command\n') % command)
4787
4793
4788 if b'PUSHFILE' in args:
4794 if b'PUSHFILE' in args:
4789 with open(args[b'PUSHFILE'], 'rb') as fh:
4795 with open(args[b'PUSHFILE'], 'rb') as fh:
4790 del args[b'PUSHFILE']
4796 del args[b'PUSHFILE']
4791 res, output = peer._callpush(
4797 res, output = peer._callpush(
4792 command, fh, **pycompat.strkwargs(args)
4798 command, fh, **pycompat.strkwargs(args)
4793 )
4799 )
4794 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4800 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4795 ui.status(
4801 ui.status(
4796 _(b'remote output: %s\n') % stringutil.escapestr(output)
4802 _(b'remote output: %s\n') % stringutil.escapestr(output)
4797 )
4803 )
4798 else:
4804 else:
4799 with peer.commandexecutor() as e:
4805 with peer.commandexecutor() as e:
4800 res = e.callcommand(command, args).result()
4806 res = e.callcommand(command, args).result()
4801
4807
4802 ui.status(
4808 ui.status(
4803 _(b'response: %s\n')
4809 _(b'response: %s\n')
4804 % stringutil.pprint(res, bprefix=True, indent=2)
4810 % stringutil.pprint(res, bprefix=True, indent=2)
4805 )
4811 )
4806
4812
4807 elif action == b'batchbegin':
4813 elif action == b'batchbegin':
4808 if batchedcommands is not None:
4814 if batchedcommands is not None:
4809 raise error.Abort(_(b'nested batchbegin not allowed'))
4815 raise error.Abort(_(b'nested batchbegin not allowed'))
4810
4816
4811 batchedcommands = []
4817 batchedcommands = []
4812 elif action == b'batchsubmit':
4818 elif action == b'batchsubmit':
4813 # There is a batching API we could go through. But it would be
4819 # There is a batching API we could go through. But it would be
4814 # difficult to normalize requests into function calls. It is easier
4820 # difficult to normalize requests into function calls. It is easier
4815 # to bypass this layer and normalize to commands + args.
4821 # to bypass this layer and normalize to commands + args.
4816 ui.status(
4822 ui.status(
4817 _(b'sending batch with %d sub-commands\n')
4823 _(b'sending batch with %d sub-commands\n')
4818 % len(batchedcommands)
4824 % len(batchedcommands)
4819 )
4825 )
4820 assert peer is not None
4826 assert peer is not None
4821 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4827 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4822 ui.status(
4828 ui.status(
4823 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4829 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4824 )
4830 )
4825
4831
4826 batchedcommands = None
4832 batchedcommands = None
4827
4833
4828 elif action.startswith(b'httprequest '):
4834 elif action.startswith(b'httprequest '):
4829 if not opener:
4835 if not opener:
4830 raise error.Abort(
4836 raise error.Abort(
4831 _(b'cannot use httprequest without an HTTP peer')
4837 _(b'cannot use httprequest without an HTTP peer')
4832 )
4838 )
4833
4839
4834 request = action.split(b' ', 2)
4840 request = action.split(b' ', 2)
4835 if len(request) != 3:
4841 if len(request) != 3:
4836 raise error.Abort(
4842 raise error.Abort(
4837 _(
4843 _(
4838 b'invalid httprequest: expected format is '
4844 b'invalid httprequest: expected format is '
4839 b'"httprequest <method> <path>'
4845 b'"httprequest <method> <path>'
4840 )
4846 )
4841 )
4847 )
4842
4848
4843 method, httppath = request[1:]
4849 method, httppath = request[1:]
4844 headers = {}
4850 headers = {}
4845 body = None
4851 body = None
4846 frames = []
4852 frames = []
4847 for line in lines:
4853 for line in lines:
4848 line = line.lstrip()
4854 line = line.lstrip()
4849 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4855 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4850 if m:
4856 if m:
4851 # Headers need to use native strings.
4857 # Headers need to use native strings.
4852 key = pycompat.strurl(m.group(1))
4858 key = pycompat.strurl(m.group(1))
4853 value = pycompat.strurl(m.group(2))
4859 value = pycompat.strurl(m.group(2))
4854 headers[key] = value
4860 headers[key] = value
4855 continue
4861 continue
4856
4862
4857 if line.startswith(b'BODYFILE '):
4863 if line.startswith(b'BODYFILE '):
4858 with open(line.split(b' ', 1), b'rb') as fh:
4864 with open(line.split(b' ', 1), b'rb') as fh:
4859 body = fh.read()
4865 body = fh.read()
4860 elif line.startswith(b'frame '):
4866 elif line.startswith(b'frame '):
4861 frame = wireprotoframing.makeframefromhumanstring(
4867 frame = wireprotoframing.makeframefromhumanstring(
4862 line[len(b'frame ') :]
4868 line[len(b'frame ') :]
4863 )
4869 )
4864
4870
4865 frames.append(frame)
4871 frames.append(frame)
4866 else:
4872 else:
4867 raise error.Abort(
4873 raise error.Abort(
4868 _(b'unknown argument to httprequest: %s') % line
4874 _(b'unknown argument to httprequest: %s') % line
4869 )
4875 )
4870
4876
4871 url = path + httppath
4877 url = path + httppath
4872
4878
4873 if frames:
4879 if frames:
4874 body = b''.join(bytes(f) for f in frames)
4880 body = b''.join(bytes(f) for f in frames)
4875
4881
4876 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4882 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4877
4883
4878 # urllib.Request insists on using has_data() as a proxy for
4884 # urllib.Request insists on using has_data() as a proxy for
4879 # determining the request method. Override that to use our
4885 # determining the request method. Override that to use our
4880 # explicitly requested method.
4886 # explicitly requested method.
4881 req.get_method = lambda: pycompat.sysstr(method)
4887 req.get_method = lambda: pycompat.sysstr(method)
4882
4888
4883 try:
4889 try:
4884 res = opener.open(req)
4890 res = opener.open(req)
4885 body = res.read()
4891 body = res.read()
4886 except util.urlerr.urlerror as e:
4892 except util.urlerr.urlerror as e:
4887 # read() method must be called, but only exists in Python 2
4893 # read() method must be called, but only exists in Python 2
4888 getattr(e, 'read', lambda: None)()
4894 getattr(e, 'read', lambda: None)()
4889 continue
4895 continue
4890
4896
4891 ct = res.headers.get('Content-Type')
4897 ct = res.headers.get('Content-Type')
4892 if ct == 'application/mercurial-cbor':
4898 if ct == 'application/mercurial-cbor':
4893 ui.write(
4899 ui.write(
4894 _(b'cbor> %s\n')
4900 _(b'cbor> %s\n')
4895 % stringutil.pprint(
4901 % stringutil.pprint(
4896 cborutil.decodeall(body), bprefix=True, indent=2
4902 cborutil.decodeall(body), bprefix=True, indent=2
4897 )
4903 )
4898 )
4904 )
4899
4905
4900 elif action == b'close':
4906 elif action == b'close':
4901 assert peer is not None
4907 assert peer is not None
4902 peer.close()
4908 peer.close()
4903 elif action == b'readavailable':
4909 elif action == b'readavailable':
4904 if not stdout or not stderr:
4910 if not stdout or not stderr:
4905 raise error.Abort(
4911 raise error.Abort(
4906 _(b'readavailable not available on this peer')
4912 _(b'readavailable not available on this peer')
4907 )
4913 )
4908
4914
4909 stdin.close()
4915 stdin.close()
4910 stdout.read()
4916 stdout.read()
4911 stderr.read()
4917 stderr.read()
4912
4918
4913 elif action == b'readline':
4919 elif action == b'readline':
4914 if not stdout:
4920 if not stdout:
4915 raise error.Abort(_(b'readline not available on this peer'))
4921 raise error.Abort(_(b'readline not available on this peer'))
4916 stdout.readline()
4922 stdout.readline()
4917 elif action == b'ereadline':
4923 elif action == b'ereadline':
4918 if not stderr:
4924 if not stderr:
4919 raise error.Abort(_(b'ereadline not available on this peer'))
4925 raise error.Abort(_(b'ereadline not available on this peer'))
4920 stderr.readline()
4926 stderr.readline()
4921 elif action.startswith(b'read '):
4927 elif action.startswith(b'read '):
4922 count = int(action.split(b' ', 1)[1])
4928 count = int(action.split(b' ', 1)[1])
4923 if not stdout:
4929 if not stdout:
4924 raise error.Abort(_(b'read not available on this peer'))
4930 raise error.Abort(_(b'read not available on this peer'))
4925 stdout.read(count)
4931 stdout.read(count)
4926 elif action.startswith(b'eread '):
4932 elif action.startswith(b'eread '):
4927 count = int(action.split(b' ', 1)[1])
4933 count = int(action.split(b' ', 1)[1])
4928 if not stderr:
4934 if not stderr:
4929 raise error.Abort(_(b'eread not available on this peer'))
4935 raise error.Abort(_(b'eread not available on this peer'))
4930 stderr.read(count)
4936 stderr.read(count)
4931 else:
4937 else:
4932 raise error.Abort(_(b'unknown action: %s') % action)
4938 raise error.Abort(_(b'unknown action: %s') % action)
4933
4939
4934 if batchedcommands is not None:
4940 if batchedcommands is not None:
4935 raise error.Abort(_(b'unclosed "batchbegin" request'))
4941 raise error.Abort(_(b'unclosed "batchbegin" request'))
4936
4942
4937 if peer:
4943 if peer:
4938 peer.close()
4944 peer.close()
4939
4945
4940 if proc:
4946 if proc:
4941 proc.kill()
4947 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now