##// END OF EJS Templates
debug-discovery: do not abort on unrelated repositories...
marmoute -
r50298:ac4fda5d stable
parent child Browse files
Show More
@@ -1,5051 +1,5056 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revlogutils,
76 revlogutils,
77 revset,
77 revset,
78 revsetlang,
78 revsetlang,
79 scmutil,
79 scmutil,
80 setdiscovery,
80 setdiscovery,
81 simplemerge,
81 simplemerge,
82 sshpeer,
82 sshpeer,
83 sslutil,
83 sslutil,
84 streamclone,
84 streamclone,
85 strip,
85 strip,
86 tags as tagsmod,
86 tags as tagsmod,
87 templater,
87 templater,
88 treediscovery,
88 treediscovery,
89 upgrade,
89 upgrade,
90 url as urlmod,
90 url as urlmod,
91 util,
91 util,
92 vfs as vfsmod,
92 vfs as vfsmod,
93 wireprotoframing,
93 wireprotoframing,
94 wireprotoserver,
94 wireprotoserver,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .utils import (
97 from .utils import (
98 cborutil,
98 cborutil,
99 compression,
99 compression,
100 dateutil,
100 dateutil,
101 procutil,
101 procutil,
102 stringutil,
102 stringutil,
103 urlutil,
103 urlutil,
104 )
104 )
105
105
106 from .revlogutils import (
106 from .revlogutils import (
107 constants as revlog_constants,
107 constants as revlog_constants,
108 debug as revlog_debug,
108 debug as revlog_debug,
109 deltas as deltautil,
109 deltas as deltautil,
110 nodemap,
110 nodemap,
111 rewrite,
111 rewrite,
112 sidedata,
112 sidedata,
113 )
113 )
114
114
115 release = lockmod.release
115 release = lockmod.release
116
116
117 table = {}
117 table = {}
118 table.update(strip.command._table)
118 table.update(strip.command._table)
119 command = registrar.command(table)
119 command = registrar.command(table)
120
120
121
121
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 def debugancestor(ui, repo, *args):
123 def debugancestor(ui, repo, *args):
124 """find the ancestor revision of two revisions in a given index"""
124 """find the ancestor revision of two revisions in a given index"""
125 if len(args) == 3:
125 if len(args) == 3:
126 index, rev1, rev2 = args
126 index, rev1, rev2 = args
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 lookup = r.lookup
128 lookup = r.lookup
129 elif len(args) == 2:
129 elif len(args) == 2:
130 if not repo:
130 if not repo:
131 raise error.Abort(
131 raise error.Abort(
132 _(b'there is no Mercurial repository here (.hg not found)')
132 _(b'there is no Mercurial repository here (.hg not found)')
133 )
133 )
134 rev1, rev2 = args
134 rev1, rev2 = args
135 r = repo.changelog
135 r = repo.changelog
136 lookup = repo.lookup
136 lookup = repo.lookup
137 else:
137 else:
138 raise error.Abort(_(b'either two or three arguments required'))
138 raise error.Abort(_(b'either two or three arguments required'))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141
141
142
142
143 @command(b'debugantivirusrunning', [])
143 @command(b'debugantivirusrunning', [])
144 def debugantivirusrunning(ui, repo):
144 def debugantivirusrunning(ui, repo):
145 """attempt to trigger an antivirus scanner to see if one is active"""
145 """attempt to trigger an antivirus scanner to see if one is active"""
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 f.write(
147 f.write(
148 util.b85decode(
148 util.b85decode(
149 # This is a base85-armored version of the EICAR test file. See
149 # This is a base85-armored version of the EICAR test file. See
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 )
153 )
154 )
154 )
155 # Give an AV engine time to scan the file.
155 # Give an AV engine time to scan the file.
156 time.sleep(2)
156 time.sleep(2)
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158
158
159
159
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 def debugapplystreamclonebundle(ui, repo, fname):
161 def debugapplystreamclonebundle(ui, repo, fname):
162 """apply a stream clone bundle file"""
162 """apply a stream clone bundle file"""
163 f = hg.openpath(ui, fname)
163 f = hg.openpath(ui, fname)
164 gen = exchange.readbundle(ui, f, fname)
164 gen = exchange.readbundle(ui, f, fname)
165 gen.apply(repo)
165 gen.apply(repo)
166
166
167
167
168 @command(
168 @command(
169 b'debugbuilddag',
169 b'debugbuilddag',
170 [
170 [
171 (
171 (
172 b'm',
172 b'm',
173 b'mergeable-file',
173 b'mergeable-file',
174 None,
174 None,
175 _(b'add single file mergeable changes'),
175 _(b'add single file mergeable changes'),
176 ),
176 ),
177 (
177 (
178 b'o',
178 b'o',
179 b'overwritten-file',
179 b'overwritten-file',
180 None,
180 None,
181 _(b'add single file all revs overwrite'),
181 _(b'add single file all revs overwrite'),
182 ),
182 ),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (
184 (
185 b'',
185 b'',
186 b'from-existing',
186 b'from-existing',
187 None,
187 None,
188 _(b'continue from a non-empty repository'),
188 _(b'continue from a non-empty repository'),
189 ),
189 ),
190 ],
190 ],
191 _(b'[OPTION]... [TEXT]'),
191 _(b'[OPTION]... [TEXT]'),
192 )
192 )
193 def debugbuilddag(
193 def debugbuilddag(
194 ui,
194 ui,
195 repo,
195 repo,
196 text=None,
196 text=None,
197 mergeable_file=False,
197 mergeable_file=False,
198 overwritten_file=False,
198 overwritten_file=False,
199 new_file=False,
199 new_file=False,
200 from_existing=False,
200 from_existing=False,
201 ):
201 ):
202 """builds a repo with a given DAG from scratch in the current empty repo
202 """builds a repo with a given DAG from scratch in the current empty repo
203
203
204 The description of the DAG is read from stdin if not given on the
204 The description of the DAG is read from stdin if not given on the
205 command line.
205 command line.
206
206
207 Elements:
207 Elements:
208
208
209 - "+n" is a linear run of n nodes based on the current default parent
209 - "+n" is a linear run of n nodes based on the current default parent
210 - "." is a single node based on the current default parent
210 - "." is a single node based on the current default parent
211 - "$" resets the default parent to null (implied at the start);
211 - "$" resets the default parent to null (implied at the start);
212 otherwise the default parent is always the last node created
212 otherwise the default parent is always the last node created
213 - "<p" sets the default parent to the backref p
213 - "<p" sets the default parent to the backref p
214 - "*p" is a fork at parent p, which is a backref
214 - "*p" is a fork at parent p, which is a backref
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "/p2" is a merge of the preceding node and p2
216 - "/p2" is a merge of the preceding node and p2
217 - ":tag" defines a local tag for the preceding node
217 - ":tag" defines a local tag for the preceding node
218 - "@branch" sets the named branch for subsequent nodes
218 - "@branch" sets the named branch for subsequent nodes
219 - "#...\\n" is a comment up to the end of the line
219 - "#...\\n" is a comment up to the end of the line
220
220
221 Whitespace between the above elements is ignored.
221 Whitespace between the above elements is ignored.
222
222
223 A backref is either
223 A backref is either
224
224
225 - a number n, which references the node curr-n, where curr is the current
225 - a number n, which references the node curr-n, where curr is the current
226 node, or
226 node, or
227 - the name of a local tag you placed earlier using ":tag", or
227 - the name of a local tag you placed earlier using ":tag", or
228 - empty to denote the default parent.
228 - empty to denote the default parent.
229
229
230 All string valued-elements are either strictly alphanumeric, or must
230 All string valued-elements are either strictly alphanumeric, or must
231 be enclosed in double quotes ("..."), with "\\" as escape character.
231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 """
232 """
233
233
234 if text is None:
234 if text is None:
235 ui.status(_(b"reading DAG from stdin\n"))
235 ui.status(_(b"reading DAG from stdin\n"))
236 text = ui.fin.read()
236 text = ui.fin.read()
237
237
238 cl = repo.changelog
238 cl = repo.changelog
239 if len(cl) > 0 and not from_existing:
239 if len(cl) > 0 and not from_existing:
240 raise error.Abort(_(b'repository is not empty'))
240 raise error.Abort(_(b'repository is not empty'))
241
241
242 # determine number of revs in DAG
242 # determine number of revs in DAG
243 total = 0
243 total = 0
244 for type, data in dagparser.parsedag(text):
244 for type, data in dagparser.parsedag(text):
245 if type == b'n':
245 if type == b'n':
246 total += 1
246 total += 1
247
247
248 if mergeable_file:
248 if mergeable_file:
249 linesperrev = 2
249 linesperrev = 2
250 # make a file with k lines per rev
250 # make a file with k lines per rev
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines.append(b"")
252 initialmergedlines.append(b"")
253
253
254 tags = []
254 tags = []
255 progress = ui.makeprogress(
255 progress = ui.makeprogress(
256 _(b'building'), unit=_(b'revisions'), total=total
256 _(b'building'), unit=_(b'revisions'), total=total
257 )
257 )
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 at = -1
259 at = -1
260 atbranch = b'default'
260 atbranch = b'default'
261 nodeids = []
261 nodeids = []
262 id = 0
262 id = 0
263 progress.update(id)
263 progress.update(id)
264 for type, data in dagparser.parsedag(text):
264 for type, data in dagparser.parsedag(text):
265 if type == b'n':
265 if type == b'n':
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 id, ps = data
267 id, ps = data
268
268
269 files = []
269 files = []
270 filecontent = {}
270 filecontent = {}
271
271
272 p2 = None
272 p2 = None
273 if mergeable_file:
273 if mergeable_file:
274 fn = b"mf"
274 fn = b"mf"
275 p1 = repo[ps[0]]
275 p1 = repo[ps[0]]
276 if len(ps) > 1:
276 if len(ps) > 1:
277 p2 = repo[ps[1]]
277 p2 = repo[ps[1]]
278 pa = p1.ancestor(p2)
278 pa = p1.ancestor(p2)
279 base, local, other = [
279 base, local, other = [
280 x[fn].data() for x in (pa, p1, p2)
280 x[fn].data() for x in (pa, p1, p2)
281 ]
281 ]
282 m3 = simplemerge.Merge3Text(base, local, other)
282 m3 = simplemerge.Merge3Text(base, local, other)
283 ml = [
283 ml = [
284 l.strip()
284 l.strip()
285 for l in simplemerge.render_minimized(m3)[0]
285 for l in simplemerge.render_minimized(m3)[0]
286 ]
286 ]
287 ml.append(b"")
287 ml.append(b"")
288 elif at > 0:
288 elif at > 0:
289 ml = p1[fn].data().split(b"\n")
289 ml = p1[fn].data().split(b"\n")
290 else:
290 else:
291 ml = initialmergedlines
291 ml = initialmergedlines
292 ml[id * linesperrev] += b" r%i" % id
292 ml[id * linesperrev] += b" r%i" % id
293 mergedtext = b"\n".join(ml)
293 mergedtext = b"\n".join(ml)
294 files.append(fn)
294 files.append(fn)
295 filecontent[fn] = mergedtext
295 filecontent[fn] = mergedtext
296
296
297 if overwritten_file:
297 if overwritten_file:
298 fn = b"of"
298 fn = b"of"
299 files.append(fn)
299 files.append(fn)
300 filecontent[fn] = b"r%i\n" % id
300 filecontent[fn] = b"r%i\n" % id
301
301
302 if new_file:
302 if new_file:
303 fn = b"nf%i" % id
303 fn = b"nf%i" % id
304 files.append(fn)
304 files.append(fn)
305 filecontent[fn] = b"r%i\n" % id
305 filecontent[fn] = b"r%i\n" % id
306 if len(ps) > 1:
306 if len(ps) > 1:
307 if not p2:
307 if not p2:
308 p2 = repo[ps[1]]
308 p2 = repo[ps[1]]
309 for fn in p2:
309 for fn in p2:
310 if fn.startswith(b"nf"):
310 if fn.startswith(b"nf"):
311 files.append(fn)
311 files.append(fn)
312 filecontent[fn] = p2[fn].data()
312 filecontent[fn] = p2[fn].data()
313
313
314 def fctxfn(repo, cx, path):
314 def fctxfn(repo, cx, path):
315 if path in filecontent:
315 if path in filecontent:
316 return context.memfilectx(
316 return context.memfilectx(
317 repo, cx, path, filecontent[path]
317 repo, cx, path, filecontent[path]
318 )
318 )
319 return None
319 return None
320
320
321 if len(ps) == 0 or ps[0] < 0:
321 if len(ps) == 0 or ps[0] < 0:
322 pars = [None, None]
322 pars = [None, None]
323 elif len(ps) == 1:
323 elif len(ps) == 1:
324 pars = [nodeids[ps[0]], None]
324 pars = [nodeids[ps[0]], None]
325 else:
325 else:
326 pars = [nodeids[p] for p in ps]
326 pars = [nodeids[p] for p in ps]
327 cx = context.memctx(
327 cx = context.memctx(
328 repo,
328 repo,
329 pars,
329 pars,
330 b"r%i" % id,
330 b"r%i" % id,
331 files,
331 files,
332 fctxfn,
332 fctxfn,
333 date=(id, 0),
333 date=(id, 0),
334 user=b"debugbuilddag",
334 user=b"debugbuilddag",
335 extra={b'branch': atbranch},
335 extra={b'branch': atbranch},
336 )
336 )
337 nodeid = repo.commitctx(cx)
337 nodeid = repo.commitctx(cx)
338 nodeids.append(nodeid)
338 nodeids.append(nodeid)
339 at = id
339 at = id
340 elif type == b'l':
340 elif type == b'l':
341 id, name = data
341 id, name = data
342 ui.note((b'tag %s\n' % name))
342 ui.note((b'tag %s\n' % name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 elif type == b'a':
344 elif type == b'a':
345 ui.note((b'branch %s\n' % data))
345 ui.note((b'branch %s\n' % data))
346 atbranch = data
346 atbranch = data
347 progress.update(id)
347 progress.update(id)
348
348
349 if tags:
349 if tags:
350 repo.vfs.write(b"localtags", b"".join(tags))
350 repo.vfs.write(b"localtags", b"".join(tags))
351
351
352
352
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 if all:
355 if all:
356 ui.writenoi18n(
356 ui.writenoi18n(
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 % indent_string
358 % indent_string
359 )
359 )
360
360
361 def showchunks(named):
361 def showchunks(named):
362 ui.write(b"\n%s%s\n" % (indent_string, named))
362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 for deltadata in gen.deltaiter():
363 for deltadata in gen.deltaiter():
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 ui.write(
365 ui.write(
366 b"%s%s %s %s %s %s %d\n"
366 b"%s%s %s %s %s %s %d\n"
367 % (
367 % (
368 indent_string,
368 indent_string,
369 hex(node),
369 hex(node),
370 hex(p1),
370 hex(p1),
371 hex(p2),
371 hex(p2),
372 hex(cs),
372 hex(cs),
373 hex(deltabase),
373 hex(deltabase),
374 len(delta),
374 len(delta),
375 )
375 )
376 )
376 )
377
377
378 gen.changelogheader()
378 gen.changelogheader()
379 showchunks(b"changelog")
379 showchunks(b"changelog")
380 gen.manifestheader()
380 gen.manifestheader()
381 showchunks(b"manifest")
381 showchunks(b"manifest")
382 for chunkdata in iter(gen.filelogheader, {}):
382 for chunkdata in iter(gen.filelogheader, {}):
383 fname = chunkdata[b'filename']
383 fname = chunkdata[b'filename']
384 showchunks(fname)
384 showchunks(fname)
385 else:
385 else:
386 if isinstance(gen, bundle2.unbundle20):
386 if isinstance(gen, bundle2.unbundle20):
387 raise error.Abort(_(b'use debugbundle2 for this file'))
387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 gen.changelogheader()
388 gen.changelogheader()
389 for deltadata in gen.deltaiter():
389 for deltadata in gen.deltaiter():
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392
392
393
393
394 def _debugobsmarkers(ui, part, indent=0, **opts):
394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 """display version and markers contained in 'data'"""
395 """display version and markers contained in 'data'"""
396 opts = pycompat.byteskwargs(opts)
396 opts = pycompat.byteskwargs(opts)
397 data = part.read()
397 data = part.read()
398 indent_string = b' ' * indent
398 indent_string = b' ' * indent
399 try:
399 try:
400 version, markers = obsolete._readmarkers(data)
400 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
401 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
403 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
404 ui.write(msg)
405 else:
405 else:
406 msg = b"%sversion: %d (%d bytes)\n"
406 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
407 msg %= indent_string, version, len(data)
408 ui.write(msg)
408 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', opts)
409 fm = ui.formatter(b'debugobsolete', opts)
410 for rawmarker in sorted(markers):
410 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
411 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
412 fm.startitem()
413 fm.plain(indent_string)
413 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
414 cmdutil.showmarker(fm, m)
415 fm.end()
415 fm.end()
416
416
417
417
418 def _debugphaseheads(ui, data, indent=0):
418 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
419 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
420 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
421 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
422 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
423 for head in headsbyphase[phase]:
424 ui.write(indent_string)
424 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
426
427
427
428 def _quasirepr(thing):
428 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
430 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
432 )
433 return pycompat.bytestr(repr(thing))
433 return pycompat.bytestr(repr(thing))
434
434
435
435
436 def _debugbundle2(ui, gen, all=None, **opts):
436 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
437 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
438 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
439 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
441 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
442 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
443 if parttypes and part.type not in parttypes:
444 continue
444 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
445 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
447 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
448 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
449 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
450 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
452 if part.type == b'obsmarkers':
453 if not ui.quiet:
453 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
454 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
455 if part.type == b'phase-heads':
456 if not ui.quiet:
456 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
457 _debugphaseheads(ui, part, indent=4)
458
458
459
459
460 @command(
460 @command(
461 b'debugbundle',
461 b'debugbundle',
462 [
462 [
463 (b'a', b'all', None, _(b'show all details')),
463 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
466 ],
467 _(b'FILE'),
467 _(b'FILE'),
468 norepo=True,
468 norepo=True,
469 )
469 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
471 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
472 with hg.openpath(ui, bundlepath) as f:
473 if spec:
473 if spec:
474 spec = exchange.getbundlespec(ui, f)
474 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
475 ui.write(b'%s\n' % spec)
476 return
476 return
477
477
478 gen = exchange.readbundle(ui, f, bundlepath)
478 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
479 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
480 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
482
482
483
483
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
485 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
486 """lists the capabilities of a remote peer"""
487 opts = pycompat.byteskwargs(opts)
487 opts = pycompat.byteskwargs(opts)
488 peer = hg.peer(ui, opts, path)
488 peer = hg.peer(ui, opts, path)
489 try:
489 try:
490 caps = peer.capabilities()
490 caps = peer.capabilities()
491 ui.writenoi18n(b'Main capabilities:\n')
491 ui.writenoi18n(b'Main capabilities:\n')
492 for c in sorted(caps):
492 for c in sorted(caps):
493 ui.write(b' %s\n' % c)
493 ui.write(b' %s\n' % c)
494 b2caps = bundle2.bundle2caps(peer)
494 b2caps = bundle2.bundle2caps(peer)
495 if b2caps:
495 if b2caps:
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 for key, values in sorted(b2caps.items()):
497 for key, values in sorted(b2caps.items()):
498 ui.write(b' %s\n' % key)
498 ui.write(b' %s\n' % key)
499 for v in values:
499 for v in values:
500 ui.write(b' %s\n' % v)
500 ui.write(b' %s\n' % v)
501 finally:
501 finally:
502 peer.close()
502 peer.close()
503
503
504
504
505 @command(
505 @command(
506 b'debugchangedfiles',
506 b'debugchangedfiles',
507 [
507 [
508 (
508 (
509 b'',
509 b'',
510 b'compute',
510 b'compute',
511 False,
511 False,
512 b"compute information instead of reading it from storage",
512 b"compute information instead of reading it from storage",
513 ),
513 ),
514 ],
514 ],
515 b'REV',
515 b'REV',
516 )
516 )
517 def debugchangedfiles(ui, repo, rev, **opts):
517 def debugchangedfiles(ui, repo, rev, **opts):
518 """list the stored files changes for a revision"""
518 """list the stored files changes for a revision"""
519 ctx = logcmdutil.revsingle(repo, rev, None)
519 ctx = logcmdutil.revsingle(repo, rev, None)
520 files = None
520 files = None
521
521
522 if opts['compute']:
522 if opts['compute']:
523 files = metadata.compute_all_files_changes(ctx)
523 files = metadata.compute_all_files_changes(ctx)
524 else:
524 else:
525 sd = repo.changelog.sidedata(ctx.rev())
525 sd = repo.changelog.sidedata(ctx.rev())
526 files_block = sd.get(sidedata.SD_FILES)
526 files_block = sd.get(sidedata.SD_FILES)
527 if files_block is not None:
527 if files_block is not None:
528 files = metadata.decode_files_sidedata(sd)
528 files = metadata.decode_files_sidedata(sd)
529 if files is not None:
529 if files is not None:
530 for f in sorted(files.touched):
530 for f in sorted(files.touched):
531 if f in files.added:
531 if f in files.added:
532 action = b"added"
532 action = b"added"
533 elif f in files.removed:
533 elif f in files.removed:
534 action = b"removed"
534 action = b"removed"
535 elif f in files.merged:
535 elif f in files.merged:
536 action = b"merged"
536 action = b"merged"
537 elif f in files.salvaged:
537 elif f in files.salvaged:
538 action = b"salvaged"
538 action = b"salvaged"
539 else:
539 else:
540 action = b"touched"
540 action = b"touched"
541
541
542 copy_parent = b""
542 copy_parent = b""
543 copy_source = b""
543 copy_source = b""
544 if f in files.copied_from_p1:
544 if f in files.copied_from_p1:
545 copy_parent = b"p1"
545 copy_parent = b"p1"
546 copy_source = files.copied_from_p1[f]
546 copy_source = files.copied_from_p1[f]
547 elif f in files.copied_from_p2:
547 elif f in files.copied_from_p2:
548 copy_parent = b"p2"
548 copy_parent = b"p2"
549 copy_source = files.copied_from_p2[f]
549 copy_source = files.copied_from_p2[f]
550
550
551 data = (action, copy_parent, f, copy_source)
551 data = (action, copy_parent, f, copy_source)
552 template = b"%-8s %2s: %s, %s;\n"
552 template = b"%-8s %2s: %s, %s;\n"
553 ui.write(template % data)
553 ui.write(template % data)
554
554
555
555
556 @command(b'debugcheckstate', [], b'')
556 @command(b'debugcheckstate', [], b'')
557 def debugcheckstate(ui, repo):
557 def debugcheckstate(ui, repo):
558 """validate the correctness of the current dirstate"""
558 """validate the correctness of the current dirstate"""
559 parent1, parent2 = repo.dirstate.parents()
559 parent1, parent2 = repo.dirstate.parents()
560 m1 = repo[parent1].manifest()
560 m1 = repo[parent1].manifest()
561 m2 = repo[parent2].manifest()
561 m2 = repo[parent2].manifest()
562 errors = 0
562 errors = 0
563 for err in repo.dirstate.verify(m1, m2):
563 for err in repo.dirstate.verify(m1, m2):
564 ui.warn(err[0] % err[1:])
564 ui.warn(err[0] % err[1:])
565 errors += 1
565 errors += 1
566 if errors:
566 if errors:
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 raise error.Abort(errstr)
568 raise error.Abort(errstr)
569
569
570
570
571 @command(
571 @command(
572 b'debugcolor',
572 b'debugcolor',
573 [(b'', b'style', None, _(b'show all configured styles'))],
573 [(b'', b'style', None, _(b'show all configured styles'))],
574 b'hg debugcolor',
574 b'hg debugcolor',
575 )
575 )
576 def debugcolor(ui, repo, **opts):
576 def debugcolor(ui, repo, **opts):
577 """show available color, effects or style"""
577 """show available color, effects or style"""
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 if opts.get('style'):
579 if opts.get('style'):
580 return _debugdisplaystyle(ui)
580 return _debugdisplaystyle(ui)
581 else:
581 else:
582 return _debugdisplaycolor(ui)
582 return _debugdisplaycolor(ui)
583
583
584
584
585 def _debugdisplaycolor(ui):
585 def _debugdisplaycolor(ui):
586 ui = ui.copy()
586 ui = ui.copy()
587 ui._styles.clear()
587 ui._styles.clear()
588 for effect in color._activeeffects(ui).keys():
588 for effect in color._activeeffects(ui).keys():
589 ui._styles[effect] = effect
589 ui._styles[effect] = effect
590 if ui._terminfoparams:
590 if ui._terminfoparams:
591 for k, v in ui.configitems(b'color'):
591 for k, v in ui.configitems(b'color'):
592 if k.startswith(b'color.'):
592 if k.startswith(b'color.'):
593 ui._styles[k] = k[6:]
593 ui._styles[k] = k[6:]
594 elif k.startswith(b'terminfo.'):
594 elif k.startswith(b'terminfo.'):
595 ui._styles[k] = k[9:]
595 ui._styles[k] = k[9:]
596 ui.write(_(b'available colors:\n'))
596 ui.write(_(b'available colors:\n'))
597 # sort label with a '_' after the other to group '_background' entry.
597 # sort label with a '_' after the other to group '_background' entry.
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 for colorname, label in items:
599 for colorname, label in items:
600 ui.write(b'%s\n' % colorname, label=label)
600 ui.write(b'%s\n' % colorname, label=label)
601
601
602
602
603 def _debugdisplaystyle(ui):
603 def _debugdisplaystyle(ui):
604 ui.write(_(b'available style:\n'))
604 ui.write(_(b'available style:\n'))
605 if not ui._styles:
605 if not ui._styles:
606 return
606 return
607 width = max(len(s) for s in ui._styles)
607 width = max(len(s) for s in ui._styles)
608 for label, effects in sorted(ui._styles.items()):
608 for label, effects in sorted(ui._styles.items()):
609 ui.write(b'%s' % label, label=label)
609 ui.write(b'%s' % label, label=label)
610 if effects:
610 if effects:
611 # 50
611 # 50
612 ui.write(b': ')
612 ui.write(b': ')
613 ui.write(b' ' * (max(0, width - len(label))))
613 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b'\n')
615 ui.write(b'\n')
616
616
617
617
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 def debugcreatestreamclonebundle(ui, repo, fname):
619 def debugcreatestreamclonebundle(ui, repo, fname):
620 """create a stream clone bundle file
620 """create a stream clone bundle file
621
621
622 Stream bundles are special bundles that are essentially archives of
622 Stream bundles are special bundles that are essentially archives of
623 revlog files. They are commonly used for cloning very quickly.
623 revlog files. They are commonly used for cloning very quickly.
624 """
624 """
625 # TODO we may want to turn this into an abort when this functionality
625 # TODO we may want to turn this into an abort when this functionality
626 # is moved into `hg bundle`.
626 # is moved into `hg bundle`.
627 if phases.hassecret(repo):
627 if phases.hassecret(repo):
628 ui.warn(
628 ui.warn(
629 _(
629 _(
630 b'(warning: stream clone bundle will contain secret '
630 b'(warning: stream clone bundle will contain secret '
631 b'revisions)\n'
631 b'revisions)\n'
632 )
632 )
633 )
633 )
634
634
635 requirements, gen = streamclone.generatebundlev1(repo)
635 requirements, gen = streamclone.generatebundlev1(repo)
636 changegroup.writechunks(ui, gen, fname)
636 changegroup.writechunks(ui, gen, fname)
637
637
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639
639
640
640
641 @command(
641 @command(
642 b'debugdag',
642 b'debugdag',
643 [
643 [
644 (b't', b'tags', None, _(b'use tags as labels')),
644 (b't', b'tags', None, _(b'use tags as labels')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'', b'dots', None, _(b'use dots for runs')),
646 (b'', b'dots', None, _(b'use dots for runs')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 ],
648 ],
649 _(b'[OPTION]... [FILE [REV]...]'),
649 _(b'[OPTION]... [FILE [REV]...]'),
650 optionalrepo=True,
650 optionalrepo=True,
651 )
651 )
652 def debugdag(ui, repo, file_=None, *revs, **opts):
652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 """format the changelog or an index DAG as a concise textual description
653 """format the changelog or an index DAG as a concise textual description
654
654
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 revision numbers, they get labeled in the output as rN.
656 revision numbers, they get labeled in the output as rN.
657
657
658 Otherwise, the changelog DAG of the current repo is emitted.
658 Otherwise, the changelog DAG of the current repo is emitted.
659 """
659 """
660 spaces = opts.get('spaces')
660 spaces = opts.get('spaces')
661 dots = opts.get('dots')
661 dots = opts.get('dots')
662 if file_:
662 if file_:
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 revs = {int(r) for r in revs}
664 revs = {int(r) for r in revs}
665
665
666 def events():
666 def events():
667 for r in rlog:
667 for r in rlog:
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 if r in revs:
669 if r in revs:
670 yield b'l', (r, b"r%i" % r)
670 yield b'l', (r, b"r%i" % r)
671
671
672 elif repo:
672 elif repo:
673 cl = repo.changelog
673 cl = repo.changelog
674 tags = opts.get('tags')
674 tags = opts.get('tags')
675 branches = opts.get('branches')
675 branches = opts.get('branches')
676 if tags:
676 if tags:
677 labels = {}
677 labels = {}
678 for l, n in repo.tags().items():
678 for l, n in repo.tags().items():
679 labels.setdefault(cl.rev(n), []).append(l)
679 labels.setdefault(cl.rev(n), []).append(l)
680
680
681 def events():
681 def events():
682 b = b"default"
682 b = b"default"
683 for r in cl:
683 for r in cl:
684 if branches:
684 if branches:
685 newb = cl.read(cl.node(r))[5][b'branch']
685 newb = cl.read(cl.node(r))[5][b'branch']
686 if newb != b:
686 if newb != b:
687 yield b'a', newb
687 yield b'a', newb
688 b = newb
688 b = newb
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 if tags:
690 if tags:
691 ls = labels.get(r)
691 ls = labels.get(r)
692 if ls:
692 if ls:
693 for l in ls:
693 for l in ls:
694 yield b'l', (r, l)
694 yield b'l', (r, l)
695
695
696 else:
696 else:
697 raise error.Abort(_(b'need repo for changelog dag'))
697 raise error.Abort(_(b'need repo for changelog dag'))
698
698
699 for line in dagparser.dagtextlines(
699 for line in dagparser.dagtextlines(
700 events(),
700 events(),
701 addspaces=spaces,
701 addspaces=spaces,
702 wraplabels=True,
702 wraplabels=True,
703 wrapannotations=True,
703 wrapannotations=True,
704 wrapnonlinear=dots,
704 wrapnonlinear=dots,
705 usedots=dots,
705 usedots=dots,
706 maxlinewidth=70,
706 maxlinewidth=70,
707 ):
707 ):
708 ui.write(line)
708 ui.write(line)
709 ui.write(b"\n")
709 ui.write(b"\n")
710
710
711
711
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 def debugdata(ui, repo, file_, rev=None, **opts):
713 def debugdata(ui, repo, file_, rev=None, **opts):
714 """dump the contents of a data file revision"""
714 """dump the contents of a data file revision"""
715 opts = pycompat.byteskwargs(opts)
715 opts = pycompat.byteskwargs(opts)
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if rev is not None:
717 if rev is not None:
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 file_, rev = None, file_
719 file_, rev = None, file_
720 elif rev is None:
720 elif rev is None:
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 try:
723 try:
724 ui.write(r.rawdata(r.lookup(rev)))
724 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
725 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
727
728
728
729 @command(
729 @command(
730 b'debugdate',
730 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
732 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
733 norepo=True,
734 optionalrepo=True,
734 optionalrepo=True,
735 )
735 )
736 def debugdate(ui, date, range=None, **opts):
736 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
737 """parse and display a date"""
738 if opts["extended"]:
738 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
740 else:
741 d = dateutil.parsedate(date)
741 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
744 if range:
745 m = dateutil.matchdate(range)
745 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
747
748
748
749 @command(
749 @command(
750 b'debugdeltachain',
750 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
752 _(b'-c|-m|FILE'),
753 optionalrepo=True,
753 optionalrepo=True,
754 )
754 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
755 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
756 """dump information about delta chains in a revlog
757
757
758 Output can be templatized. Available template keywords are:
758 Output can be templatized. Available template keywords are:
759
759
760 :``rev``: revision number
760 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
761 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
767 - base: a full snapshot
768 - snap: an intermediate snapshot
768 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
769 - p1: a delta against the first parent
770 - p2: a delta against the second parent
770 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
771 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
772 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
773 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
774 (when p2 has empty delta
775 - prev: a delta against the previous revision
775 - prev: a delta against the previous revision
776 - other: a delta against an arbitrary revision
776 - other: a delta against an arbitrary revision
777 :``compsize``: compressed size of revision
777 :``compsize``: compressed size of revision
778 :``uncompsize``: uncompressed size of revision
778 :``uncompsize``: uncompressed size of revision
779 :``chainsize``: total size of compressed revisions in chain
779 :``chainsize``: total size of compressed revisions in chain
780 :``chainratio``: total chain size divided by uncompressed revision size
780 :``chainratio``: total chain size divided by uncompressed revision size
781 (new delta chains typically start at ratio 2.00)
781 (new delta chains typically start at ratio 2.00)
782 :``lindist``: linear distance from base revision in delta chain to end
782 :``lindist``: linear distance from base revision in delta chain to end
783 of this revision
783 of this revision
784 :``extradist``: total size of revisions not part of this delta chain from
784 :``extradist``: total size of revisions not part of this delta chain from
785 base of delta chain to end of this revision; a measurement
785 base of delta chain to end of this revision; a measurement
786 of how much extra data we need to read/seek across to read
786 of how much extra data we need to read/seek across to read
787 the delta chain for this revision
787 the delta chain for this revision
788 :``extraratio``: extradist divided by chainsize; another representation of
788 :``extraratio``: extradist divided by chainsize; another representation of
789 how much unrelated data is needed to load this delta chain
789 how much unrelated data is needed to load this delta chain
790
790
791 If the repository is configured to use the sparse read, additional keywords
791 If the repository is configured to use the sparse read, additional keywords
792 are available:
792 are available:
793
793
794 :``readsize``: total size of data read from the disk for a revision
794 :``readsize``: total size of data read from the disk for a revision
795 (sum of the sizes of all the blocks)
795 (sum of the sizes of all the blocks)
796 :``largestblock``: size of the largest block of data read from the disk
796 :``largestblock``: size of the largest block of data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
798 :``srchunks``: in how many data hunks the whole revision would be read
798 :``srchunks``: in how many data hunks the whole revision would be read
799
799
800 The sparse read can be enabled with experimental.sparse-read = True
800 The sparse read can be enabled with experimental.sparse-read = True
801 """
801 """
802 opts = pycompat.byteskwargs(opts)
802 opts = pycompat.byteskwargs(opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 index = r.index
804 index = r.index
805 start = r.start
805 start = r.start
806 length = r.length
806 length = r.length
807 generaldelta = r._generaldelta
807 generaldelta = r._generaldelta
808 withsparseread = getattr(r, '_withsparseread', False)
808 withsparseread = getattr(r, '_withsparseread', False)
809
809
810 # security to avoid crash on corrupted revlogs
810 # security to avoid crash on corrupted revlogs
811 total_revs = len(index)
811 total_revs = len(index)
812
812
813 def revinfo(rev):
813 def revinfo(rev):
814 e = index[rev]
814 e = index[rev]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 chainsize = 0
817 chainsize = 0
818
818
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
822
822
823 # If the parents of a revision has an empty delta, we never try to delta
823 # If the parents of a revision has an empty delta, we never try to delta
824 # against that parent, but directly against the delta base of that
824 # against that parent, but directly against the delta base of that
825 # parent (recursively). It avoids adding a useless entry in the chain.
825 # parent (recursively). It avoids adding a useless entry in the chain.
826 #
826 #
827 # However we need to detect that as a special case for delta-type, that
827 # However we need to detect that as a special case for delta-type, that
828 # is not simply "other".
828 # is not simply "other".
829 p1_base = p1
829 p1_base = p1
830 if p1 != nullrev and p1 < total_revs:
830 if p1 != nullrev and p1 < total_revs:
831 e1 = index[p1]
831 e1 = index[p1]
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 if (
834 if (
835 new_base == p1_base
835 new_base == p1_base
836 or new_base == nullrev
836 or new_base == nullrev
837 or new_base >= total_revs
837 or new_base >= total_revs
838 ):
838 ):
839 break
839 break
840 p1_base = new_base
840 p1_base = new_base
841 e1 = index[p1_base]
841 e1 = index[p1_base]
842 p2_base = p2
842 p2_base = p2
843 if p2 != nullrev and p2 < total_revs:
843 if p2 != nullrev and p2 < total_revs:
844 e2 = index[p2]
844 e2 = index[p2]
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 if (
847 if (
848 new_base == p2_base
848 new_base == p2_base
849 or new_base == nullrev
849 or new_base == nullrev
850 or new_base >= total_revs
850 or new_base >= total_revs
851 ):
851 ):
852 break
852 break
853 p2_base = new_base
853 p2_base = new_base
854 e2 = index[p2_base]
854 e2 = index[p2_base]
855
855
856 if generaldelta:
856 if generaldelta:
857 if base == p1:
857 if base == p1:
858 deltatype = b'p1'
858 deltatype = b'p1'
859 elif base == p2:
859 elif base == p2:
860 deltatype = b'p2'
860 deltatype = b'p2'
861 elif base == rev:
861 elif base == rev:
862 deltatype = b'base'
862 deltatype = b'base'
863 elif base == p1_base:
863 elif base == p1_base:
864 deltatype = b'skip1'
864 deltatype = b'skip1'
865 elif base == p2_base:
865 elif base == p2_base:
866 deltatype = b'skip2'
866 deltatype = b'skip2'
867 elif r.issnapshot(rev):
867 elif r.issnapshot(rev):
868 deltatype = b'snap'
868 deltatype = b'snap'
869 elif base == rev - 1:
869 elif base == rev - 1:
870 deltatype = b'prev'
870 deltatype = b'prev'
871 else:
871 else:
872 deltatype = b'other'
872 deltatype = b'other'
873 else:
873 else:
874 if base == rev:
874 if base == rev:
875 deltatype = b'base'
875 deltatype = b'base'
876 else:
876 else:
877 deltatype = b'prev'
877 deltatype = b'prev'
878
878
879 chain = r._deltachain(rev)[0]
879 chain = r._deltachain(rev)[0]
880 for iterrev in chain:
880 for iterrev in chain:
881 e = index[iterrev]
881 e = index[iterrev]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883
883
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885
885
886 fm = ui.formatter(b'debugdeltachain', opts)
886 fm = ui.formatter(b'debugdeltachain', opts)
887
887
888 fm.plain(
888 fm.plain(
889 b' rev p1 p2 chain# chainlen prev delta '
889 b' rev p1 p2 chain# chainlen prev delta '
890 b'size rawsize chainsize ratio lindist extradist '
890 b'size rawsize chainsize ratio lindist extradist '
891 b'extraratio'
891 b'extraratio'
892 )
892 )
893 if withsparseread:
893 if withsparseread:
894 fm.plain(b' readsize largestblk rddensity srchunks')
894 fm.plain(b' readsize largestblk rddensity srchunks')
895 fm.plain(b'\n')
895 fm.plain(b'\n')
896
896
897 chainbases = {}
897 chainbases = {}
898 for rev in r:
898 for rev in r:
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 chainbase = chain[0]
900 chainbase = chain[0]
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 basestart = start(chainbase)
902 basestart = start(chainbase)
903 revstart = start(rev)
903 revstart = start(rev)
904 lineardist = revstart + comp - basestart
904 lineardist = revstart + comp - basestart
905 extradist = lineardist - chainsize
905 extradist = lineardist - chainsize
906 try:
906 try:
907 prevrev = chain[-2]
907 prevrev = chain[-2]
908 except IndexError:
908 except IndexError:
909 prevrev = -1
909 prevrev = -1
910
910
911 if uncomp != 0:
911 if uncomp != 0:
912 chainratio = float(chainsize) / float(uncomp)
912 chainratio = float(chainsize) / float(uncomp)
913 else:
913 else:
914 chainratio = chainsize
914 chainratio = chainsize
915
915
916 if chainsize != 0:
916 if chainsize != 0:
917 extraratio = float(extradist) / float(chainsize)
917 extraratio = float(extradist) / float(chainsize)
918 else:
918 else:
919 extraratio = extradist
919 extraratio = extradist
920
920
921 fm.startitem()
921 fm.startitem()
922 fm.write(
922 fm.write(
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 b'uncompsize chainsize chainratio lindist extradist '
924 b'uncompsize chainsize chainratio lindist extradist '
925 b'extraratio',
925 b'extraratio',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 rev,
927 rev,
928 p1,
928 p1,
929 p2,
929 p2,
930 chainid,
930 chainid,
931 len(chain),
931 len(chain),
932 prevrev,
932 prevrev,
933 deltatype,
933 deltatype,
934 comp,
934 comp,
935 uncomp,
935 uncomp,
936 chainsize,
936 chainsize,
937 chainratio,
937 chainratio,
938 lineardist,
938 lineardist,
939 extradist,
939 extradist,
940 extraratio,
940 extraratio,
941 rev=rev,
941 rev=rev,
942 chainid=chainid,
942 chainid=chainid,
943 chainlen=len(chain),
943 chainlen=len(chain),
944 prevrev=prevrev,
944 prevrev=prevrev,
945 deltatype=deltatype,
945 deltatype=deltatype,
946 compsize=comp,
946 compsize=comp,
947 uncompsize=uncomp,
947 uncompsize=uncomp,
948 chainsize=chainsize,
948 chainsize=chainsize,
949 chainratio=chainratio,
949 chainratio=chainratio,
950 lindist=lineardist,
950 lindist=lineardist,
951 extradist=extradist,
951 extradist=extradist,
952 extraratio=extraratio,
952 extraratio=extraratio,
953 )
953 )
954 if withsparseread:
954 if withsparseread:
955 readsize = 0
955 readsize = 0
956 largestblock = 0
956 largestblock = 0
957 srchunks = 0
957 srchunks = 0
958
958
959 for revschunk in deltautil.slicechunk(r, chain):
959 for revschunk in deltautil.slicechunk(r, chain):
960 srchunks += 1
960 srchunks += 1
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 blksize = blkend - start(revschunk[0])
962 blksize = blkend - start(revschunk[0])
963
963
964 readsize += blksize
964 readsize += blksize
965 if largestblock < blksize:
965 if largestblock < blksize:
966 largestblock = blksize
966 largestblock = blksize
967
967
968 if readsize:
968 if readsize:
969 readdensity = float(chainsize) / float(readsize)
969 readdensity = float(chainsize) / float(readsize)
970 else:
970 else:
971 readdensity = 1
971 readdensity = 1
972
972
973 fm.write(
973 fm.write(
974 b'readsize largestblock readdensity srchunks',
974 b'readsize largestblock readdensity srchunks',
975 b' %10d %10d %9.5f %8d',
975 b' %10d %10d %9.5f %8d',
976 readsize,
976 readsize,
977 largestblock,
977 largestblock,
978 readdensity,
978 readdensity,
979 srchunks,
979 srchunks,
980 readsize=readsize,
980 readsize=readsize,
981 largestblock=largestblock,
981 largestblock=largestblock,
982 readdensity=readdensity,
982 readdensity=readdensity,
983 srchunks=srchunks,
983 srchunks=srchunks,
984 )
984 )
985
985
986 fm.plain(b'\n')
986 fm.plain(b'\n')
987
987
988 fm.end()
988 fm.end()
989
989
990
990
991 @command(
991 @command(
992 b'debug-delta-find',
992 b'debug-delta-find',
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 _(b'-c|-m|FILE REV'),
994 _(b'-c|-m|FILE REV'),
995 optionalrepo=True,
995 optionalrepo=True,
996 )
996 )
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
998 """display the computation to get to a valid delta for storing REV
998 """display the computation to get to a valid delta for storing REV
999
999
1000 This command will replay the process used to find the "best" delta to store
1000 This command will replay the process used to find the "best" delta to store
1001 a revision and display information about all the steps used to get to that
1001 a revision and display information about all the steps used to get to that
1002 result.
1002 result.
1003
1003
1004 The revision use the revision number of the target storage (not changelog
1004 The revision use the revision number of the target storage (not changelog
1005 revision number).
1005 revision number).
1006
1006
1007 note: the process is initiated from a full text of the revision to store.
1007 note: the process is initiated from a full text of the revision to store.
1008 """
1008 """
1009 opts = pycompat.byteskwargs(opts)
1009 opts = pycompat.byteskwargs(opts)
1010 if arg_2 is None:
1010 if arg_2 is None:
1011 file_ = None
1011 file_ = None
1012 rev = arg_1
1012 rev = arg_1
1013 else:
1013 else:
1014 file_ = arg_1
1014 file_ = arg_1
1015 rev = arg_2
1015 rev = arg_2
1016
1016
1017 rev = int(rev)
1017 rev = int(rev)
1018
1018
1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020
1020
1021 deltacomputer = deltautil.deltacomputer(
1021 deltacomputer = deltautil.deltacomputer(
1022 revlog,
1022 revlog,
1023 write_debug=ui.write,
1023 write_debug=ui.write,
1024 debug_search=True,
1024 debug_search=True,
1025 )
1025 )
1026
1026
1027 node = revlog.node(rev)
1027 node = revlog.node(rev)
1028 p1r, p2r = revlog.parentrevs(rev)
1028 p1r, p2r = revlog.parentrevs(rev)
1029 p1 = revlog.node(p1r)
1029 p1 = revlog.node(p1r)
1030 p2 = revlog.node(p2r)
1030 p2 = revlog.node(p2r)
1031 btext = [revlog.revision(rev)]
1031 btext = [revlog.revision(rev)]
1032 textlen = len(btext[0])
1032 textlen = len(btext[0])
1033 cachedelta = None
1033 cachedelta = None
1034 flags = revlog.flags(rev)
1034 flags = revlog.flags(rev)
1035
1035
1036 revinfo = revlogutils.revisioninfo(
1036 revinfo = revlogutils.revisioninfo(
1037 node,
1037 node,
1038 p1,
1038 p1,
1039 p2,
1039 p2,
1040 btext,
1040 btext,
1041 textlen,
1041 textlen,
1042 cachedelta,
1042 cachedelta,
1043 flags,
1043 flags,
1044 )
1044 )
1045
1045
1046 fh = revlog._datafp()
1046 fh = revlog._datafp()
1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048
1048
1049
1049
1050 @command(
1050 @command(
1051 b'debugdirstate|debugstate',
1051 b'debugdirstate|debugstate',
1052 [
1052 [
1053 (
1053 (
1054 b'',
1054 b'',
1055 b'nodates',
1055 b'nodates',
1056 None,
1056 None,
1057 _(b'do not display the saved mtime (DEPRECATED)'),
1057 _(b'do not display the saved mtime (DEPRECATED)'),
1058 ),
1058 ),
1059 (b'', b'dates', True, _(b'display the saved mtime')),
1059 (b'', b'dates', True, _(b'display the saved mtime')),
1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1061 (
1061 (
1062 b'',
1062 b'',
1063 b'docket',
1063 b'docket',
1064 False,
1064 False,
1065 _(b'display the docket (metadata file) instead'),
1065 _(b'display the docket (metadata file) instead'),
1066 ),
1066 ),
1067 (
1067 (
1068 b'',
1068 b'',
1069 b'all',
1069 b'all',
1070 False,
1070 False,
1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1072 ),
1072 ),
1073 ],
1073 ],
1074 _(b'[OPTION]...'),
1074 _(b'[OPTION]...'),
1075 )
1075 )
1076 def debugstate(ui, repo, **opts):
1076 def debugstate(ui, repo, **opts):
1077 """show the contents of the current dirstate"""
1077 """show the contents of the current dirstate"""
1078
1078
1079 if opts.get("docket"):
1079 if opts.get("docket"):
1080 if not repo.dirstate._use_dirstate_v2:
1080 if not repo.dirstate._use_dirstate_v2:
1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1082
1082
1083 docket = repo.dirstate._map.docket
1083 docket = repo.dirstate._map.docket
1084 (
1084 (
1085 start_offset,
1085 start_offset,
1086 root_nodes,
1086 root_nodes,
1087 nodes_with_entry,
1087 nodes_with_entry,
1088 nodes_with_copy,
1088 nodes_with_copy,
1089 unused_bytes,
1089 unused_bytes,
1090 _unused,
1090 _unused,
1091 ignore_pattern,
1091 ignore_pattern,
1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1093
1093
1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1101 ui.write(
1101 ui.write(
1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1103 )
1103 )
1104 return
1104 return
1105
1105
1106 nodates = not opts['dates']
1106 nodates = not opts['dates']
1107 if opts.get('nodates') is not None:
1107 if opts.get('nodates') is not None:
1108 nodates = True
1108 nodates = True
1109 datesort = opts.get('datesort')
1109 datesort = opts.get('datesort')
1110
1110
1111 if datesort:
1111 if datesort:
1112
1112
1113 def keyfunc(entry):
1113 def keyfunc(entry):
1114 filename, _state, _mode, _size, mtime = entry
1114 filename, _state, _mode, _size, mtime = entry
1115 return (mtime, filename)
1115 return (mtime, filename)
1116
1116
1117 else:
1117 else:
1118 keyfunc = None # sort by filename
1118 keyfunc = None # sort by filename
1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1120 entries.sort(key=keyfunc)
1120 entries.sort(key=keyfunc)
1121 for entry in entries:
1121 for entry in entries:
1122 filename, state, mode, size, mtime = entry
1122 filename, state, mode, size, mtime = entry
1123 if mtime == -1:
1123 if mtime == -1:
1124 timestr = b'unset '
1124 timestr = b'unset '
1125 elif nodates:
1125 elif nodates:
1126 timestr = b'set '
1126 timestr = b'set '
1127 else:
1127 else:
1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1129 timestr = encoding.strtolocal(timestr)
1129 timestr = encoding.strtolocal(timestr)
1130 if mode & 0o20000:
1130 if mode & 0o20000:
1131 mode = b'lnk'
1131 mode = b'lnk'
1132 else:
1132 else:
1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1135 for f in repo.dirstate.copies():
1135 for f in repo.dirstate.copies():
1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1137
1137
1138
1138
1139 @command(
1139 @command(
1140 b'debugdirstateignorepatternshash',
1140 b'debugdirstateignorepatternshash',
1141 [],
1141 [],
1142 _(b''),
1142 _(b''),
1143 )
1143 )
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 """show the hash of ignore patterns stored in dirstate if v2,
1145 """show the hash of ignore patterns stored in dirstate if v2,
1146 or nothing for dirstate-v2
1146 or nothing for dirstate-v2
1147 """
1147 """
1148 if repo.dirstate._use_dirstate_v2:
1148 if repo.dirstate._use_dirstate_v2:
1149 docket = repo.dirstate._map.docket
1149 docket = repo.dirstate._map.docket
1150 hash_len = 20 # 160 bits for SHA-1
1150 hash_len = 20 # 160 bits for SHA-1
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153
1153
1154
1154
1155 @command(
1155 @command(
1156 b'debugdiscovery',
1156 b'debugdiscovery',
1157 [
1157 [
1158 (b'', b'old', None, _(b'use old-style discovery')),
1158 (b'', b'old', None, _(b'use old-style discovery')),
1159 (
1159 (
1160 b'',
1160 b'',
1161 b'nonheads',
1161 b'nonheads',
1162 None,
1162 None,
1163 _(b'use old-style discovery with non-heads included'),
1163 _(b'use old-style discovery with non-heads included'),
1164 ),
1164 ),
1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1167 (
1167 (
1168 b'',
1168 b'',
1169 b'local-as-revs',
1169 b'local-as-revs',
1170 b"",
1170 b"",
1171 b'treat local has having these revisions only',
1171 b'treat local has having these revisions only',
1172 ),
1172 ),
1173 (
1173 (
1174 b'',
1174 b'',
1175 b'remote-as-revs',
1175 b'remote-as-revs',
1176 b"",
1176 b"",
1177 b'use local as remote, with only these revisions',
1177 b'use local as remote, with only these revisions',
1178 ),
1178 ),
1179 ]
1179 ]
1180 + cmdutil.remoteopts
1180 + cmdutil.remoteopts
1181 + cmdutil.formatteropts,
1181 + cmdutil.formatteropts,
1182 _(b'[--rev REV] [OTHER]'),
1182 _(b'[--rev REV] [OTHER]'),
1183 )
1183 )
1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1185 """runs the changeset discovery protocol in isolation
1185 """runs the changeset discovery protocol in isolation
1186
1186
1187 The local peer can be "replaced" by a subset of the local repository by
1187 The local peer can be "replaced" by a subset of the local repository by
1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1189 be "replaced" by a subset of the local repository using the
1189 be "replaced" by a subset of the local repository using the
1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1191 discovery situation.
1191 discovery situation.
1192
1192
1193 The following developer oriented config are relevant for people playing with this command:
1193 The following developer oriented config are relevant for people playing with this command:
1194
1194
1195 * devel.discovery.exchange-heads=True
1195 * devel.discovery.exchange-heads=True
1196
1196
1197 If False, the discovery will not start with
1197 If False, the discovery will not start with
1198 remote head fetching and local head querying.
1198 remote head fetching and local head querying.
1199
1199
1200 * devel.discovery.grow-sample=True
1200 * devel.discovery.grow-sample=True
1201
1201
1202 If False, the sample size used in set discovery will not be increased
1202 If False, the sample size used in set discovery will not be increased
1203 through the process
1203 through the process
1204
1204
1205 * devel.discovery.grow-sample.dynamic=True
1205 * devel.discovery.grow-sample.dynamic=True
1206
1206
1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1208 adapted to the shape of the undecided set (it is set to the max of:
1208 adapted to the shape of the undecided set (it is set to the max of:
1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1210
1210
1211 * devel.discovery.grow-sample.rate=1.05
1211 * devel.discovery.grow-sample.rate=1.05
1212
1212
1213 the rate at which the sample grow
1213 the rate at which the sample grow
1214
1214
1215 * devel.discovery.randomize=True
1215 * devel.discovery.randomize=True
1216
1216
1217 If andom sampling during discovery are deterministic. It is meant for
1217 If andom sampling during discovery are deterministic. It is meant for
1218 integration tests.
1218 integration tests.
1219
1219
1220 * devel.discovery.sample-size=200
1220 * devel.discovery.sample-size=200
1221
1221
1222 Control the initial size of the discovery sample
1222 Control the initial size of the discovery sample
1223
1223
1224 * devel.discovery.sample-size.initial=100
1224 * devel.discovery.sample-size.initial=100
1225
1225
1226 Control the initial size of the discovery for initial change
1226 Control the initial size of the discovery for initial change
1227 """
1227 """
1228 opts = pycompat.byteskwargs(opts)
1228 opts = pycompat.byteskwargs(opts)
1229 unfi = repo.unfiltered()
1229 unfi = repo.unfiltered()
1230
1230
1231 # setup potential extra filtering
1231 # setup potential extra filtering
1232 local_revs = opts[b"local_as_revs"]
1232 local_revs = opts[b"local_as_revs"]
1233 remote_revs = opts[b"remote_as_revs"]
1233 remote_revs = opts[b"remote_as_revs"]
1234
1234
1235 # make sure tests are repeatable
1235 # make sure tests are repeatable
1236 random.seed(int(opts[b'seed']))
1236 random.seed(int(opts[b'seed']))
1237
1237
1238 if not remote_revs:
1238 if not remote_revs:
1239
1239
1240 remoteurl, branches = urlutil.get_unique_pull_path(
1240 remoteurl, branches = urlutil.get_unique_pull_path(
1241 b'debugdiscovery', repo, ui, remoteurl
1241 b'debugdiscovery', repo, ui, remoteurl
1242 )
1242 )
1243 remote = hg.peer(repo, opts, remoteurl)
1243 remote = hg.peer(repo, opts, remoteurl)
1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1245 else:
1245 else:
1246 branches = (None, [])
1246 branches = (None, [])
1247 remote_filtered_revs = logcmdutil.revrange(
1247 remote_filtered_revs = logcmdutil.revrange(
1248 unfi, [b"not (::(%s))" % remote_revs]
1248 unfi, [b"not (::(%s))" % remote_revs]
1249 )
1249 )
1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1251
1251
1252 def remote_func(x):
1252 def remote_func(x):
1253 return remote_filtered_revs
1253 return remote_filtered_revs
1254
1254
1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1256
1256
1257 remote = repo.peer()
1257 remote = repo.peer()
1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1259
1259
1260 if local_revs:
1260 if local_revs:
1261 local_filtered_revs = logcmdutil.revrange(
1261 local_filtered_revs = logcmdutil.revrange(
1262 unfi, [b"not (::(%s))" % local_revs]
1262 unfi, [b"not (::(%s))" % local_revs]
1263 )
1263 )
1264 local_filtered_revs = frozenset(local_filtered_revs)
1264 local_filtered_revs = frozenset(local_filtered_revs)
1265
1265
1266 def local_func(x):
1266 def local_func(x):
1267 return local_filtered_revs
1267 return local_filtered_revs
1268
1268
1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1270 repo = repo.filtered(b'debug-discovery-local-filter')
1270 repo = repo.filtered(b'debug-discovery-local-filter')
1271
1271
1272 data = {}
1272 data = {}
1273 if opts.get(b'old'):
1273 if opts.get(b'old'):
1274
1274
1275 def doit(pushedrevs, remoteheads, remote=remote):
1275 def doit(pushedrevs, remoteheads, remote=remote):
1276 if not util.safehasattr(remote, b'branches'):
1276 if not util.safehasattr(remote, b'branches'):
1277 # enable in-client legacy support
1277 # enable in-client legacy support
1278 remote = localrepo.locallegacypeer(remote.local())
1278 remote = localrepo.locallegacypeer(remote.local())
1279 if remote_revs:
1279 if remote_revs:
1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1281 remote._repo = r
1281 remote._repo = r
1282 common, _in, hds = treediscovery.findcommonincoming(
1282 common, _in, hds = treediscovery.findcommonincoming(
1283 repo, remote, force=True, audit=data
1283 repo, remote, force=True, audit=data
1284 )
1284 )
1285 common = set(common)
1285 common = set(common)
1286 if not opts.get(b'nonheads'):
1286 if not opts.get(b'nonheads'):
1287 ui.writenoi18n(
1287 ui.writenoi18n(
1288 b"unpruned common: %s\n"
1288 b"unpruned common: %s\n"
1289 % b" ".join(sorted(short(n) for n in common))
1289 % b" ".join(sorted(short(n) for n in common))
1290 )
1290 )
1291
1291
1292 clnode = repo.changelog.node
1292 clnode = repo.changelog.node
1293 common = repo.revs(b'heads(::%ln)', common)
1293 common = repo.revs(b'heads(::%ln)', common)
1294 common = {clnode(r) for r in common}
1294 common = {clnode(r) for r in common}
1295 return common, hds
1295 return common, hds
1296
1296
1297 else:
1297 else:
1298
1298
1299 def doit(pushedrevs, remoteheads, remote=remote):
1299 def doit(pushedrevs, remoteheads, remote=remote):
1300 nodes = None
1300 nodes = None
1301 if pushedrevs:
1301 if pushedrevs:
1302 revs = logcmdutil.revrange(repo, pushedrevs)
1302 revs = logcmdutil.revrange(repo, pushedrevs)
1303 nodes = [repo[r].node() for r in revs]
1303 nodes = [repo[r].node() for r in revs]
1304 common, any, hds = setdiscovery.findcommonheads(
1304 common, any, hds = setdiscovery.findcommonheads(
1305 ui, repo, remote, ancestorsof=nodes, audit=data
1305 ui,
1306 repo,
1307 remote,
1308 ancestorsof=nodes,
1309 audit=data,
1310 abortwhenunrelated=False,
1306 )
1311 )
1307 return common, hds
1312 return common, hds
1308
1313
1309 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1314 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1310 localrevs = opts[b'rev']
1315 localrevs = opts[b'rev']
1311
1316
1312 fm = ui.formatter(b'debugdiscovery', opts)
1317 fm = ui.formatter(b'debugdiscovery', opts)
1313 if fm.strict_format:
1318 if fm.strict_format:
1314
1319
1315 @contextlib.contextmanager
1320 @contextlib.contextmanager
1316 def may_capture_output():
1321 def may_capture_output():
1317 ui.pushbuffer()
1322 ui.pushbuffer()
1318 yield
1323 yield
1319 data[b'output'] = ui.popbuffer()
1324 data[b'output'] = ui.popbuffer()
1320
1325
1321 else:
1326 else:
1322 may_capture_output = util.nullcontextmanager
1327 may_capture_output = util.nullcontextmanager
1323 with may_capture_output():
1328 with may_capture_output():
1324 with util.timedcm('debug-discovery') as t:
1329 with util.timedcm('debug-discovery') as t:
1325 common, hds = doit(localrevs, remoterevs)
1330 common, hds = doit(localrevs, remoterevs)
1326
1331
1327 # compute all statistics
1332 # compute all statistics
1328 heads_common = set(common)
1333 heads_common = set(common)
1329 heads_remote = set(hds)
1334 heads_remote = set(hds)
1330 heads_local = set(repo.heads())
1335 heads_local = set(repo.heads())
1331 # note: they cannot be a local or remote head that is in common and not
1336 # note: they cannot be a local or remote head that is in common and not
1332 # itself a head of common.
1337 # itself a head of common.
1333 heads_common_local = heads_common & heads_local
1338 heads_common_local = heads_common & heads_local
1334 heads_common_remote = heads_common & heads_remote
1339 heads_common_remote = heads_common & heads_remote
1335 heads_common_both = heads_common & heads_remote & heads_local
1340 heads_common_both = heads_common & heads_remote & heads_local
1336
1341
1337 all = repo.revs(b'all()')
1342 all = repo.revs(b'all()')
1338 common = repo.revs(b'::%ln', common)
1343 common = repo.revs(b'::%ln', common)
1339 roots_common = repo.revs(b'roots(::%ld)', common)
1344 roots_common = repo.revs(b'roots(::%ld)', common)
1340 missing = repo.revs(b'not ::%ld', common)
1345 missing = repo.revs(b'not ::%ld', common)
1341 heads_missing = repo.revs(b'heads(%ld)', missing)
1346 heads_missing = repo.revs(b'heads(%ld)', missing)
1342 roots_missing = repo.revs(b'roots(%ld)', missing)
1347 roots_missing = repo.revs(b'roots(%ld)', missing)
1343 assert len(common) + len(missing) == len(all)
1348 assert len(common) + len(missing) == len(all)
1344
1349
1345 initial_undecided = repo.revs(
1350 initial_undecided = repo.revs(
1346 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1351 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1347 )
1352 )
1348 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1353 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1349 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1354 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1350 common_initial_undecided = initial_undecided & common
1355 common_initial_undecided = initial_undecided & common
1351 missing_initial_undecided = initial_undecided & missing
1356 missing_initial_undecided = initial_undecided & missing
1352
1357
1353 data[b'elapsed'] = t.elapsed
1358 data[b'elapsed'] = t.elapsed
1354 data[b'nb-common-heads'] = len(heads_common)
1359 data[b'nb-common-heads'] = len(heads_common)
1355 data[b'nb-common-heads-local'] = len(heads_common_local)
1360 data[b'nb-common-heads-local'] = len(heads_common_local)
1356 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1361 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1357 data[b'nb-common-heads-both'] = len(heads_common_both)
1362 data[b'nb-common-heads-both'] = len(heads_common_both)
1358 data[b'nb-common-roots'] = len(roots_common)
1363 data[b'nb-common-roots'] = len(roots_common)
1359 data[b'nb-head-local'] = len(heads_local)
1364 data[b'nb-head-local'] = len(heads_local)
1360 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1365 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1361 data[b'nb-head-remote'] = len(heads_remote)
1366 data[b'nb-head-remote'] = len(heads_remote)
1362 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1367 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1363 heads_common_remote
1368 heads_common_remote
1364 )
1369 )
1365 data[b'nb-revs'] = len(all)
1370 data[b'nb-revs'] = len(all)
1366 data[b'nb-revs-common'] = len(common)
1371 data[b'nb-revs-common'] = len(common)
1367 data[b'nb-revs-missing'] = len(missing)
1372 data[b'nb-revs-missing'] = len(missing)
1368 data[b'nb-missing-heads'] = len(heads_missing)
1373 data[b'nb-missing-heads'] = len(heads_missing)
1369 data[b'nb-missing-roots'] = len(roots_missing)
1374 data[b'nb-missing-roots'] = len(roots_missing)
1370 data[b'nb-ini_und'] = len(initial_undecided)
1375 data[b'nb-ini_und'] = len(initial_undecided)
1371 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1376 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1372 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1377 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1373 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1378 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1374 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1379 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1375
1380
1376 fm.startitem()
1381 fm.startitem()
1377 fm.data(**pycompat.strkwargs(data))
1382 fm.data(**pycompat.strkwargs(data))
1378 # display discovery summary
1383 # display discovery summary
1379 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1384 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1380 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1385 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1381 if b'total-round-trips-heads' in data:
1386 if b'total-round-trips-heads' in data:
1382 fm.plain(
1387 fm.plain(
1383 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1388 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1384 )
1389 )
1385 if b'total-round-trips-branches' in data:
1390 if b'total-round-trips-branches' in data:
1386 fm.plain(
1391 fm.plain(
1387 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1392 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1388 % data
1393 % data
1389 )
1394 )
1390 if b'total-round-trips-between' in data:
1395 if b'total-round-trips-between' in data:
1391 fm.plain(
1396 fm.plain(
1392 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1397 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1393 )
1398 )
1394 fm.plain(b"queries: %(total-queries)9d\n" % data)
1399 fm.plain(b"queries: %(total-queries)9d\n" % data)
1395 if b'total-queries-branches' in data:
1400 if b'total-queries-branches' in data:
1396 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1401 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1397 if b'total-queries-between' in data:
1402 if b'total-queries-between' in data:
1398 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1403 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1399 fm.plain(b"heads summary:\n")
1404 fm.plain(b"heads summary:\n")
1400 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1405 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1401 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1406 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1402 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1407 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1403 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1408 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1404 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1409 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1405 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1410 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1406 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1411 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1407 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1412 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1408 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1413 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1409 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1414 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1410 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1415 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1411 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1416 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1412 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1417 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1413 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1418 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1414 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1419 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1415 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1420 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1416 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1421 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1417 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1422 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1418 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1423 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1419 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1424 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1420 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1425 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1421 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1426 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1422
1427
1423 if ui.verbose:
1428 if ui.verbose:
1424 fm.plain(
1429 fm.plain(
1425 b"common heads: %s\n"
1430 b"common heads: %s\n"
1426 % b" ".join(sorted(short(n) for n in heads_common))
1431 % b" ".join(sorted(short(n) for n in heads_common))
1427 )
1432 )
1428 fm.end()
1433 fm.end()
1429
1434
1430
1435
1431 _chunksize = 4 << 10
1436 _chunksize = 4 << 10
1432
1437
1433
1438
1434 @command(
1439 @command(
1435 b'debugdownload',
1440 b'debugdownload',
1436 [
1441 [
1437 (b'o', b'output', b'', _(b'path')),
1442 (b'o', b'output', b'', _(b'path')),
1438 ],
1443 ],
1439 optionalrepo=True,
1444 optionalrepo=True,
1440 )
1445 )
1441 def debugdownload(ui, repo, url, output=None, **opts):
1446 def debugdownload(ui, repo, url, output=None, **opts):
1442 """download a resource using Mercurial logic and config"""
1447 """download a resource using Mercurial logic and config"""
1443 fh = urlmod.open(ui, url, output)
1448 fh = urlmod.open(ui, url, output)
1444
1449
1445 dest = ui
1450 dest = ui
1446 if output:
1451 if output:
1447 dest = open(output, b"wb", _chunksize)
1452 dest = open(output, b"wb", _chunksize)
1448 try:
1453 try:
1449 data = fh.read(_chunksize)
1454 data = fh.read(_chunksize)
1450 while data:
1455 while data:
1451 dest.write(data)
1456 dest.write(data)
1452 data = fh.read(_chunksize)
1457 data = fh.read(_chunksize)
1453 finally:
1458 finally:
1454 if output:
1459 if output:
1455 dest.close()
1460 dest.close()
1456
1461
1457
1462
1458 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1463 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1459 def debugextensions(ui, repo, **opts):
1464 def debugextensions(ui, repo, **opts):
1460 '''show information about active extensions'''
1465 '''show information about active extensions'''
1461 opts = pycompat.byteskwargs(opts)
1466 opts = pycompat.byteskwargs(opts)
1462 exts = extensions.extensions(ui)
1467 exts = extensions.extensions(ui)
1463 hgver = util.version()
1468 hgver = util.version()
1464 fm = ui.formatter(b'debugextensions', opts)
1469 fm = ui.formatter(b'debugextensions', opts)
1465 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1470 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1466 isinternal = extensions.ismoduleinternal(extmod)
1471 isinternal = extensions.ismoduleinternal(extmod)
1467 extsource = None
1472 extsource = None
1468
1473
1469 if util.safehasattr(extmod, '__file__'):
1474 if util.safehasattr(extmod, '__file__'):
1470 extsource = pycompat.fsencode(extmod.__file__)
1475 extsource = pycompat.fsencode(extmod.__file__)
1471 elif getattr(sys, 'oxidized', False):
1476 elif getattr(sys, 'oxidized', False):
1472 extsource = pycompat.sysexecutable
1477 extsource = pycompat.sysexecutable
1473 if isinternal:
1478 if isinternal:
1474 exttestedwith = [] # never expose magic string to users
1479 exttestedwith = [] # never expose magic string to users
1475 else:
1480 else:
1476 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1481 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1477 extbuglink = getattr(extmod, 'buglink', None)
1482 extbuglink = getattr(extmod, 'buglink', None)
1478
1483
1479 fm.startitem()
1484 fm.startitem()
1480
1485
1481 if ui.quiet or ui.verbose:
1486 if ui.quiet or ui.verbose:
1482 fm.write(b'name', b'%s\n', extname)
1487 fm.write(b'name', b'%s\n', extname)
1483 else:
1488 else:
1484 fm.write(b'name', b'%s', extname)
1489 fm.write(b'name', b'%s', extname)
1485 if isinternal or hgver in exttestedwith:
1490 if isinternal or hgver in exttestedwith:
1486 fm.plain(b'\n')
1491 fm.plain(b'\n')
1487 elif not exttestedwith:
1492 elif not exttestedwith:
1488 fm.plain(_(b' (untested!)\n'))
1493 fm.plain(_(b' (untested!)\n'))
1489 else:
1494 else:
1490 lasttestedversion = exttestedwith[-1]
1495 lasttestedversion = exttestedwith[-1]
1491 fm.plain(b' (%s!)\n' % lasttestedversion)
1496 fm.plain(b' (%s!)\n' % lasttestedversion)
1492
1497
1493 fm.condwrite(
1498 fm.condwrite(
1494 ui.verbose and extsource,
1499 ui.verbose and extsource,
1495 b'source',
1500 b'source',
1496 _(b' location: %s\n'),
1501 _(b' location: %s\n'),
1497 extsource or b"",
1502 extsource or b"",
1498 )
1503 )
1499
1504
1500 if ui.verbose:
1505 if ui.verbose:
1501 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1506 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1502 fm.data(bundled=isinternal)
1507 fm.data(bundled=isinternal)
1503
1508
1504 fm.condwrite(
1509 fm.condwrite(
1505 ui.verbose and exttestedwith,
1510 ui.verbose and exttestedwith,
1506 b'testedwith',
1511 b'testedwith',
1507 _(b' tested with: %s\n'),
1512 _(b' tested with: %s\n'),
1508 fm.formatlist(exttestedwith, name=b'ver'),
1513 fm.formatlist(exttestedwith, name=b'ver'),
1509 )
1514 )
1510
1515
1511 fm.condwrite(
1516 fm.condwrite(
1512 ui.verbose and extbuglink,
1517 ui.verbose and extbuglink,
1513 b'buglink',
1518 b'buglink',
1514 _(b' bug reporting: %s\n'),
1519 _(b' bug reporting: %s\n'),
1515 extbuglink or b"",
1520 extbuglink or b"",
1516 )
1521 )
1517
1522
1518 fm.end()
1523 fm.end()
1519
1524
1520
1525
1521 @command(
1526 @command(
1522 b'debugfileset',
1527 b'debugfileset',
1523 [
1528 [
1524 (
1529 (
1525 b'r',
1530 b'r',
1526 b'rev',
1531 b'rev',
1527 b'',
1532 b'',
1528 _(b'apply the filespec on this revision'),
1533 _(b'apply the filespec on this revision'),
1529 _(b'REV'),
1534 _(b'REV'),
1530 ),
1535 ),
1531 (
1536 (
1532 b'',
1537 b'',
1533 b'all-files',
1538 b'all-files',
1534 False,
1539 False,
1535 _(b'test files from all revisions and working directory'),
1540 _(b'test files from all revisions and working directory'),
1536 ),
1541 ),
1537 (
1542 (
1538 b's',
1543 b's',
1539 b'show-matcher',
1544 b'show-matcher',
1540 None,
1545 None,
1541 _(b'print internal representation of matcher'),
1546 _(b'print internal representation of matcher'),
1542 ),
1547 ),
1543 (
1548 (
1544 b'p',
1549 b'p',
1545 b'show-stage',
1550 b'show-stage',
1546 [],
1551 [],
1547 _(b'print parsed tree at the given stage'),
1552 _(b'print parsed tree at the given stage'),
1548 _(b'NAME'),
1553 _(b'NAME'),
1549 ),
1554 ),
1550 ],
1555 ],
1551 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1556 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1552 )
1557 )
1553 def debugfileset(ui, repo, expr, **opts):
1558 def debugfileset(ui, repo, expr, **opts):
1554 '''parse and apply a fileset specification'''
1559 '''parse and apply a fileset specification'''
1555 from . import fileset
1560 from . import fileset
1556
1561
1557 fileset.symbols # force import of fileset so we have predicates to optimize
1562 fileset.symbols # force import of fileset so we have predicates to optimize
1558 opts = pycompat.byteskwargs(opts)
1563 opts = pycompat.byteskwargs(opts)
1559 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1564 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1560
1565
1561 stages = [
1566 stages = [
1562 (b'parsed', pycompat.identity),
1567 (b'parsed', pycompat.identity),
1563 (b'analyzed', filesetlang.analyze),
1568 (b'analyzed', filesetlang.analyze),
1564 (b'optimized', filesetlang.optimize),
1569 (b'optimized', filesetlang.optimize),
1565 ]
1570 ]
1566 stagenames = {n for n, f in stages}
1571 stagenames = {n for n, f in stages}
1567
1572
1568 showalways = set()
1573 showalways = set()
1569 if ui.verbose and not opts[b'show_stage']:
1574 if ui.verbose and not opts[b'show_stage']:
1570 # show parsed tree by --verbose (deprecated)
1575 # show parsed tree by --verbose (deprecated)
1571 showalways.add(b'parsed')
1576 showalways.add(b'parsed')
1572 if opts[b'show_stage'] == [b'all']:
1577 if opts[b'show_stage'] == [b'all']:
1573 showalways.update(stagenames)
1578 showalways.update(stagenames)
1574 else:
1579 else:
1575 for n in opts[b'show_stage']:
1580 for n in opts[b'show_stage']:
1576 if n not in stagenames:
1581 if n not in stagenames:
1577 raise error.Abort(_(b'invalid stage name: %s') % n)
1582 raise error.Abort(_(b'invalid stage name: %s') % n)
1578 showalways.update(opts[b'show_stage'])
1583 showalways.update(opts[b'show_stage'])
1579
1584
1580 tree = filesetlang.parse(expr)
1585 tree = filesetlang.parse(expr)
1581 for n, f in stages:
1586 for n, f in stages:
1582 tree = f(tree)
1587 tree = f(tree)
1583 if n in showalways:
1588 if n in showalways:
1584 if opts[b'show_stage'] or n != b'parsed':
1589 if opts[b'show_stage'] or n != b'parsed':
1585 ui.write(b"* %s:\n" % n)
1590 ui.write(b"* %s:\n" % n)
1586 ui.write(filesetlang.prettyformat(tree), b"\n")
1591 ui.write(filesetlang.prettyformat(tree), b"\n")
1587
1592
1588 files = set()
1593 files = set()
1589 if opts[b'all_files']:
1594 if opts[b'all_files']:
1590 for r in repo:
1595 for r in repo:
1591 c = repo[r]
1596 c = repo[r]
1592 files.update(c.files())
1597 files.update(c.files())
1593 files.update(c.substate)
1598 files.update(c.substate)
1594 if opts[b'all_files'] or ctx.rev() is None:
1599 if opts[b'all_files'] or ctx.rev() is None:
1595 wctx = repo[None]
1600 wctx = repo[None]
1596 files.update(
1601 files.update(
1597 repo.dirstate.walk(
1602 repo.dirstate.walk(
1598 scmutil.matchall(repo),
1603 scmutil.matchall(repo),
1599 subrepos=list(wctx.substate),
1604 subrepos=list(wctx.substate),
1600 unknown=True,
1605 unknown=True,
1601 ignored=True,
1606 ignored=True,
1602 )
1607 )
1603 )
1608 )
1604 files.update(wctx.substate)
1609 files.update(wctx.substate)
1605 else:
1610 else:
1606 files.update(ctx.files())
1611 files.update(ctx.files())
1607 files.update(ctx.substate)
1612 files.update(ctx.substate)
1608
1613
1609 m = ctx.matchfileset(repo.getcwd(), expr)
1614 m = ctx.matchfileset(repo.getcwd(), expr)
1610 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1615 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1611 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1616 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1612 for f in sorted(files):
1617 for f in sorted(files):
1613 if not m(f):
1618 if not m(f):
1614 continue
1619 continue
1615 ui.write(b"%s\n" % f)
1620 ui.write(b"%s\n" % f)
1616
1621
1617
1622
1618 @command(
1623 @command(
1619 b"debug-repair-issue6528",
1624 b"debug-repair-issue6528",
1620 [
1625 [
1621 (
1626 (
1622 b'',
1627 b'',
1623 b'to-report',
1628 b'to-report',
1624 b'',
1629 b'',
1625 _(b'build a report of affected revisions to this file'),
1630 _(b'build a report of affected revisions to this file'),
1626 _(b'FILE'),
1631 _(b'FILE'),
1627 ),
1632 ),
1628 (
1633 (
1629 b'',
1634 b'',
1630 b'from-report',
1635 b'from-report',
1631 b'',
1636 b'',
1632 _(b'repair revisions listed in this report file'),
1637 _(b'repair revisions listed in this report file'),
1633 _(b'FILE'),
1638 _(b'FILE'),
1634 ),
1639 ),
1635 (
1640 (
1636 b'',
1641 b'',
1637 b'paranoid',
1642 b'paranoid',
1638 False,
1643 False,
1639 _(b'check that both detection methods do the same thing'),
1644 _(b'check that both detection methods do the same thing'),
1640 ),
1645 ),
1641 ]
1646 ]
1642 + cmdutil.dryrunopts,
1647 + cmdutil.dryrunopts,
1643 )
1648 )
1644 def debug_repair_issue6528(ui, repo, **opts):
1649 def debug_repair_issue6528(ui, repo, **opts):
1645 """find affected revisions and repair them. See issue6528 for more details.
1650 """find affected revisions and repair them. See issue6528 for more details.
1646
1651
1647 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1652 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1648 computation of affected revisions for a given repository across clones.
1653 computation of affected revisions for a given repository across clones.
1649 The report format is line-based (with empty lines ignored):
1654 The report format is line-based (with empty lines ignored):
1650
1655
1651 ```
1656 ```
1652 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1657 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1653 ```
1658 ```
1654
1659
1655 There can be multiple broken revisions per filelog, they are separated by
1660 There can be multiple broken revisions per filelog, they are separated by
1656 a comma with no spaces. The only space is between the revision(s) and the
1661 a comma with no spaces. The only space is between the revision(s) and the
1657 filename.
1662 filename.
1658
1663
1659 Note that this does *not* mean that this repairs future affected revisions,
1664 Note that this does *not* mean that this repairs future affected revisions,
1660 that needs a separate fix at the exchange level that was introduced in
1665 that needs a separate fix at the exchange level that was introduced in
1661 Mercurial 5.9.1.
1666 Mercurial 5.9.1.
1662
1667
1663 There is a `--paranoid` flag to test that the fast implementation is correct
1668 There is a `--paranoid` flag to test that the fast implementation is correct
1664 by checking it against the slow implementation. Since this matter is quite
1669 by checking it against the slow implementation. Since this matter is quite
1665 urgent and testing every edge-case is probably quite costly, we use this
1670 urgent and testing every edge-case is probably quite costly, we use this
1666 method to test on large repositories as a fuzzing method of sorts.
1671 method to test on large repositories as a fuzzing method of sorts.
1667 """
1672 """
1668 cmdutil.check_incompatible_arguments(
1673 cmdutil.check_incompatible_arguments(
1669 opts, 'to_report', ['from_report', 'dry_run']
1674 opts, 'to_report', ['from_report', 'dry_run']
1670 )
1675 )
1671 dry_run = opts.get('dry_run')
1676 dry_run = opts.get('dry_run')
1672 to_report = opts.get('to_report')
1677 to_report = opts.get('to_report')
1673 from_report = opts.get('from_report')
1678 from_report = opts.get('from_report')
1674 paranoid = opts.get('paranoid')
1679 paranoid = opts.get('paranoid')
1675 # TODO maybe add filelog pattern and revision pattern parameters to help
1680 # TODO maybe add filelog pattern and revision pattern parameters to help
1676 # narrow down the search for users that know what they're looking for?
1681 # narrow down the search for users that know what they're looking for?
1677
1682
1678 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1683 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1679 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1684 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1680 raise error.Abort(_(msg))
1685 raise error.Abort(_(msg))
1681
1686
1682 rewrite.repair_issue6528(
1687 rewrite.repair_issue6528(
1683 ui,
1688 ui,
1684 repo,
1689 repo,
1685 dry_run=dry_run,
1690 dry_run=dry_run,
1686 to_report=to_report,
1691 to_report=to_report,
1687 from_report=from_report,
1692 from_report=from_report,
1688 paranoid=paranoid,
1693 paranoid=paranoid,
1689 )
1694 )
1690
1695
1691
1696
1692 @command(b'debugformat', [] + cmdutil.formatteropts)
1697 @command(b'debugformat', [] + cmdutil.formatteropts)
1693 def debugformat(ui, repo, **opts):
1698 def debugformat(ui, repo, **opts):
1694 """display format information about the current repository
1699 """display format information about the current repository
1695
1700
1696 Use --verbose to get extra information about current config value and
1701 Use --verbose to get extra information about current config value and
1697 Mercurial default."""
1702 Mercurial default."""
1698 opts = pycompat.byteskwargs(opts)
1703 opts = pycompat.byteskwargs(opts)
1699 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1704 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1700 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1705 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1701
1706
1702 def makeformatname(name):
1707 def makeformatname(name):
1703 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1708 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1704
1709
1705 fm = ui.formatter(b'debugformat', opts)
1710 fm = ui.formatter(b'debugformat', opts)
1706 if fm.isplain():
1711 if fm.isplain():
1707
1712
1708 def formatvalue(value):
1713 def formatvalue(value):
1709 if util.safehasattr(value, b'startswith'):
1714 if util.safehasattr(value, b'startswith'):
1710 return value
1715 return value
1711 if value:
1716 if value:
1712 return b'yes'
1717 return b'yes'
1713 else:
1718 else:
1714 return b'no'
1719 return b'no'
1715
1720
1716 else:
1721 else:
1717 formatvalue = pycompat.identity
1722 formatvalue = pycompat.identity
1718
1723
1719 fm.plain(b'format-variant')
1724 fm.plain(b'format-variant')
1720 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1725 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1721 fm.plain(b' repo')
1726 fm.plain(b' repo')
1722 if ui.verbose:
1727 if ui.verbose:
1723 fm.plain(b' config default')
1728 fm.plain(b' config default')
1724 fm.plain(b'\n')
1729 fm.plain(b'\n')
1725 for fv in upgrade.allformatvariant:
1730 for fv in upgrade.allformatvariant:
1726 fm.startitem()
1731 fm.startitem()
1727 repovalue = fv.fromrepo(repo)
1732 repovalue = fv.fromrepo(repo)
1728 configvalue = fv.fromconfig(repo)
1733 configvalue = fv.fromconfig(repo)
1729
1734
1730 if repovalue != configvalue:
1735 if repovalue != configvalue:
1731 namelabel = b'formatvariant.name.mismatchconfig'
1736 namelabel = b'formatvariant.name.mismatchconfig'
1732 repolabel = b'formatvariant.repo.mismatchconfig'
1737 repolabel = b'formatvariant.repo.mismatchconfig'
1733 elif repovalue != fv.default:
1738 elif repovalue != fv.default:
1734 namelabel = b'formatvariant.name.mismatchdefault'
1739 namelabel = b'formatvariant.name.mismatchdefault'
1735 repolabel = b'formatvariant.repo.mismatchdefault'
1740 repolabel = b'formatvariant.repo.mismatchdefault'
1736 else:
1741 else:
1737 namelabel = b'formatvariant.name.uptodate'
1742 namelabel = b'formatvariant.name.uptodate'
1738 repolabel = b'formatvariant.repo.uptodate'
1743 repolabel = b'formatvariant.repo.uptodate'
1739
1744
1740 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1745 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1741 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1746 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1742 if fv.default != configvalue:
1747 if fv.default != configvalue:
1743 configlabel = b'formatvariant.config.special'
1748 configlabel = b'formatvariant.config.special'
1744 else:
1749 else:
1745 configlabel = b'formatvariant.config.default'
1750 configlabel = b'formatvariant.config.default'
1746 fm.condwrite(
1751 fm.condwrite(
1747 ui.verbose,
1752 ui.verbose,
1748 b'config',
1753 b'config',
1749 b' %6s',
1754 b' %6s',
1750 formatvalue(configvalue),
1755 formatvalue(configvalue),
1751 label=configlabel,
1756 label=configlabel,
1752 )
1757 )
1753 fm.condwrite(
1758 fm.condwrite(
1754 ui.verbose,
1759 ui.verbose,
1755 b'default',
1760 b'default',
1756 b' %7s',
1761 b' %7s',
1757 formatvalue(fv.default),
1762 formatvalue(fv.default),
1758 label=b'formatvariant.default',
1763 label=b'formatvariant.default',
1759 )
1764 )
1760 fm.plain(b'\n')
1765 fm.plain(b'\n')
1761 fm.end()
1766 fm.end()
1762
1767
1763
1768
1764 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1769 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1765 def debugfsinfo(ui, path=b"."):
1770 def debugfsinfo(ui, path=b"."):
1766 """show information detected about current filesystem"""
1771 """show information detected about current filesystem"""
1767 ui.writenoi18n(b'path: %s\n' % path)
1772 ui.writenoi18n(b'path: %s\n' % path)
1768 ui.writenoi18n(
1773 ui.writenoi18n(
1769 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1774 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1770 )
1775 )
1771 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1776 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1772 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1777 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1773 ui.writenoi18n(
1778 ui.writenoi18n(
1774 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1779 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1775 )
1780 )
1776 ui.writenoi18n(
1781 ui.writenoi18n(
1777 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1782 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1778 )
1783 )
1779 casesensitive = b'(unknown)'
1784 casesensitive = b'(unknown)'
1780 try:
1785 try:
1781 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1786 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1782 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1787 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1783 except OSError:
1788 except OSError:
1784 pass
1789 pass
1785 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1790 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1786
1791
1787
1792
1788 @command(
1793 @command(
1789 b'debuggetbundle',
1794 b'debuggetbundle',
1790 [
1795 [
1791 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1796 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1792 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1797 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1793 (
1798 (
1794 b't',
1799 b't',
1795 b'type',
1800 b'type',
1796 b'bzip2',
1801 b'bzip2',
1797 _(b'bundle compression type to use'),
1802 _(b'bundle compression type to use'),
1798 _(b'TYPE'),
1803 _(b'TYPE'),
1799 ),
1804 ),
1800 ],
1805 ],
1801 _(b'REPO FILE [-H|-C ID]...'),
1806 _(b'REPO FILE [-H|-C ID]...'),
1802 norepo=True,
1807 norepo=True,
1803 )
1808 )
1804 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1805 """retrieves a bundle from a repo
1810 """retrieves a bundle from a repo
1806
1811
1807 Every ID must be a full-length hex node id string. Saves the bundle to the
1812 Every ID must be a full-length hex node id string. Saves the bundle to the
1808 given file.
1813 given file.
1809 """
1814 """
1810 opts = pycompat.byteskwargs(opts)
1815 opts = pycompat.byteskwargs(opts)
1811 repo = hg.peer(ui, opts, repopath)
1816 repo = hg.peer(ui, opts, repopath)
1812 if not repo.capable(b'getbundle'):
1817 if not repo.capable(b'getbundle'):
1813 raise error.Abort(b"getbundle() not supported by target repository")
1818 raise error.Abort(b"getbundle() not supported by target repository")
1814 args = {}
1819 args = {}
1815 if common:
1820 if common:
1816 args['common'] = [bin(s) for s in common]
1821 args['common'] = [bin(s) for s in common]
1817 if head:
1822 if head:
1818 args['heads'] = [bin(s) for s in head]
1823 args['heads'] = [bin(s) for s in head]
1819 # TODO: get desired bundlecaps from command line.
1824 # TODO: get desired bundlecaps from command line.
1820 args['bundlecaps'] = None
1825 args['bundlecaps'] = None
1821 bundle = repo.getbundle(b'debug', **args)
1826 bundle = repo.getbundle(b'debug', **args)
1822
1827
1823 bundletype = opts.get(b'type', b'bzip2').lower()
1828 bundletype = opts.get(b'type', b'bzip2').lower()
1824 btypes = {
1829 btypes = {
1825 b'none': b'HG10UN',
1830 b'none': b'HG10UN',
1826 b'bzip2': b'HG10BZ',
1831 b'bzip2': b'HG10BZ',
1827 b'gzip': b'HG10GZ',
1832 b'gzip': b'HG10GZ',
1828 b'bundle2': b'HG20',
1833 b'bundle2': b'HG20',
1829 }
1834 }
1830 bundletype = btypes.get(bundletype)
1835 bundletype = btypes.get(bundletype)
1831 if bundletype not in bundle2.bundletypes:
1836 if bundletype not in bundle2.bundletypes:
1832 raise error.Abort(_(b'unknown bundle type specified with --type'))
1837 raise error.Abort(_(b'unknown bundle type specified with --type'))
1833 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1834
1839
1835
1840
1836 @command(b'debugignore', [], b'[FILE]')
1841 @command(b'debugignore', [], b'[FILE]')
1837 def debugignore(ui, repo, *files, **opts):
1842 def debugignore(ui, repo, *files, **opts):
1838 """display the combined ignore pattern and information about ignored files
1843 """display the combined ignore pattern and information about ignored files
1839
1844
1840 With no argument display the combined ignore pattern.
1845 With no argument display the combined ignore pattern.
1841
1846
1842 Given space separated file names, shows if the given file is ignored and
1847 Given space separated file names, shows if the given file is ignored and
1843 if so, show the ignore rule (file and line number) that matched it.
1848 if so, show the ignore rule (file and line number) that matched it.
1844 """
1849 """
1845 ignore = repo.dirstate._ignore
1850 ignore = repo.dirstate._ignore
1846 if not files:
1851 if not files:
1847 # Show all the patterns
1852 # Show all the patterns
1848 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1853 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1849 else:
1854 else:
1850 m = scmutil.match(repo[None], pats=files)
1855 m = scmutil.match(repo[None], pats=files)
1851 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1856 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1852 for f in m.files():
1857 for f in m.files():
1853 nf = util.normpath(f)
1858 nf = util.normpath(f)
1854 ignored = None
1859 ignored = None
1855 ignoredata = None
1860 ignoredata = None
1856 if nf != b'.':
1861 if nf != b'.':
1857 if ignore(nf):
1862 if ignore(nf):
1858 ignored = nf
1863 ignored = nf
1859 ignoredata = repo.dirstate._ignorefileandline(nf)
1864 ignoredata = repo.dirstate._ignorefileandline(nf)
1860 else:
1865 else:
1861 for p in pathutil.finddirs(nf):
1866 for p in pathutil.finddirs(nf):
1862 if ignore(p):
1867 if ignore(p):
1863 ignored = p
1868 ignored = p
1864 ignoredata = repo.dirstate._ignorefileandline(p)
1869 ignoredata = repo.dirstate._ignorefileandline(p)
1865 break
1870 break
1866 if ignored:
1871 if ignored:
1867 if ignored == nf:
1872 if ignored == nf:
1868 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1873 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1869 else:
1874 else:
1870 ui.write(
1875 ui.write(
1871 _(
1876 _(
1872 b"%s is ignored because of "
1877 b"%s is ignored because of "
1873 b"containing directory %s\n"
1878 b"containing directory %s\n"
1874 )
1879 )
1875 % (uipathfn(f), ignored)
1880 % (uipathfn(f), ignored)
1876 )
1881 )
1877 ignorefile, lineno, line = ignoredata
1882 ignorefile, lineno, line = ignoredata
1878 ui.write(
1883 ui.write(
1879 _(b"(ignore rule in %s, line %d: '%s')\n")
1884 _(b"(ignore rule in %s, line %d: '%s')\n")
1880 % (ignorefile, lineno, line)
1885 % (ignorefile, lineno, line)
1881 )
1886 )
1882 else:
1887 else:
1883 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1888 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1884
1889
1885
1890
1886 @command(
1891 @command(
1887 b'debug-revlog-index|debugindex',
1892 b'debug-revlog-index|debugindex',
1888 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1893 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1889 _(b'-c|-m|FILE'),
1894 _(b'-c|-m|FILE'),
1890 )
1895 )
1891 def debugindex(ui, repo, file_=None, **opts):
1896 def debugindex(ui, repo, file_=None, **opts):
1892 """dump index data for a revlog"""
1897 """dump index data for a revlog"""
1893 opts = pycompat.byteskwargs(opts)
1898 opts = pycompat.byteskwargs(opts)
1894 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1899 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1895
1900
1896 fm = ui.formatter(b'debugindex', opts)
1901 fm = ui.formatter(b'debugindex', opts)
1897
1902
1898 revlog = getattr(store, b'_revlog', store)
1903 revlog = getattr(store, b'_revlog', store)
1899
1904
1900 return revlog_debug.debug_index(
1905 return revlog_debug.debug_index(
1901 ui,
1906 ui,
1902 repo,
1907 repo,
1903 formatter=fm,
1908 formatter=fm,
1904 revlog=revlog,
1909 revlog=revlog,
1905 full_node=ui.debugflag,
1910 full_node=ui.debugflag,
1906 )
1911 )
1907
1912
1908
1913
1909 @command(
1914 @command(
1910 b'debugindexdot',
1915 b'debugindexdot',
1911 cmdutil.debugrevlogopts,
1916 cmdutil.debugrevlogopts,
1912 _(b'-c|-m|FILE'),
1917 _(b'-c|-m|FILE'),
1913 optionalrepo=True,
1918 optionalrepo=True,
1914 )
1919 )
1915 def debugindexdot(ui, repo, file_=None, **opts):
1920 def debugindexdot(ui, repo, file_=None, **opts):
1916 """dump an index DAG as a graphviz dot file"""
1921 """dump an index DAG as a graphviz dot file"""
1917 opts = pycompat.byteskwargs(opts)
1922 opts = pycompat.byteskwargs(opts)
1918 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1923 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1919 ui.writenoi18n(b"digraph G {\n")
1924 ui.writenoi18n(b"digraph G {\n")
1920 for i in r:
1925 for i in r:
1921 node = r.node(i)
1926 node = r.node(i)
1922 pp = r.parents(node)
1927 pp = r.parents(node)
1923 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1928 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1924 if pp[1] != repo.nullid:
1929 if pp[1] != repo.nullid:
1925 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1926 ui.write(b"}\n")
1931 ui.write(b"}\n")
1927
1932
1928
1933
1929 @command(b'debugindexstats', [])
1934 @command(b'debugindexstats', [])
1930 def debugindexstats(ui, repo):
1935 def debugindexstats(ui, repo):
1931 """show stats related to the changelog index"""
1936 """show stats related to the changelog index"""
1932 repo.changelog.shortest(repo.nullid, 1)
1937 repo.changelog.shortest(repo.nullid, 1)
1933 index = repo.changelog.index
1938 index = repo.changelog.index
1934 if not util.safehasattr(index, b'stats'):
1939 if not util.safehasattr(index, b'stats'):
1935 raise error.Abort(_(b'debugindexstats only works with native code'))
1940 raise error.Abort(_(b'debugindexstats only works with native code'))
1936 for k, v in sorted(index.stats().items()):
1941 for k, v in sorted(index.stats().items()):
1937 ui.write(b'%s: %d\n' % (k, v))
1942 ui.write(b'%s: %d\n' % (k, v))
1938
1943
1939
1944
1940 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1945 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1941 def debuginstall(ui, **opts):
1946 def debuginstall(ui, **opts):
1942 """test Mercurial installation
1947 """test Mercurial installation
1943
1948
1944 Returns 0 on success.
1949 Returns 0 on success.
1945 """
1950 """
1946 opts = pycompat.byteskwargs(opts)
1951 opts = pycompat.byteskwargs(opts)
1947
1952
1948 problems = 0
1953 problems = 0
1949
1954
1950 fm = ui.formatter(b'debuginstall', opts)
1955 fm = ui.formatter(b'debuginstall', opts)
1951 fm.startitem()
1956 fm.startitem()
1952
1957
1953 # encoding might be unknown or wrong. don't translate these messages.
1958 # encoding might be unknown or wrong. don't translate these messages.
1954 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1959 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1955 err = None
1960 err = None
1956 try:
1961 try:
1957 codecs.lookup(pycompat.sysstr(encoding.encoding))
1962 codecs.lookup(pycompat.sysstr(encoding.encoding))
1958 except LookupError as inst:
1963 except LookupError as inst:
1959 err = stringutil.forcebytestr(inst)
1964 err = stringutil.forcebytestr(inst)
1960 problems += 1
1965 problems += 1
1961 fm.condwrite(
1966 fm.condwrite(
1962 err,
1967 err,
1963 b'encodingerror',
1968 b'encodingerror',
1964 b" %s\n (check that your locale is properly set)\n",
1969 b" %s\n (check that your locale is properly set)\n",
1965 err,
1970 err,
1966 )
1971 )
1967
1972
1968 # Python
1973 # Python
1969 pythonlib = None
1974 pythonlib = None
1970 if util.safehasattr(os, '__file__'):
1975 if util.safehasattr(os, '__file__'):
1971 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1976 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1972 elif getattr(sys, 'oxidized', False):
1977 elif getattr(sys, 'oxidized', False):
1973 pythonlib = pycompat.sysexecutable
1978 pythonlib = pycompat.sysexecutable
1974
1979
1975 fm.write(
1980 fm.write(
1976 b'pythonexe',
1981 b'pythonexe',
1977 _(b"checking Python executable (%s)\n"),
1982 _(b"checking Python executable (%s)\n"),
1978 pycompat.sysexecutable or _(b"unknown"),
1983 pycompat.sysexecutable or _(b"unknown"),
1979 )
1984 )
1980 fm.write(
1985 fm.write(
1981 b'pythonimplementation',
1986 b'pythonimplementation',
1982 _(b"checking Python implementation (%s)\n"),
1987 _(b"checking Python implementation (%s)\n"),
1983 pycompat.sysbytes(platform.python_implementation()),
1988 pycompat.sysbytes(platform.python_implementation()),
1984 )
1989 )
1985 fm.write(
1990 fm.write(
1986 b'pythonver',
1991 b'pythonver',
1987 _(b"checking Python version (%s)\n"),
1992 _(b"checking Python version (%s)\n"),
1988 (b"%d.%d.%d" % sys.version_info[:3]),
1993 (b"%d.%d.%d" % sys.version_info[:3]),
1989 )
1994 )
1990 fm.write(
1995 fm.write(
1991 b'pythonlib',
1996 b'pythonlib',
1992 _(b"checking Python lib (%s)...\n"),
1997 _(b"checking Python lib (%s)...\n"),
1993 pythonlib or _(b"unknown"),
1998 pythonlib or _(b"unknown"),
1994 )
1999 )
1995
2000
1996 try:
2001 try:
1997 from . import rustext # pytype: disable=import-error
2002 from . import rustext # pytype: disable=import-error
1998
2003
1999 rustext.__doc__ # trigger lazy import
2004 rustext.__doc__ # trigger lazy import
2000 except ImportError:
2005 except ImportError:
2001 rustext = None
2006 rustext = None
2002
2007
2003 security = set(sslutil.supportedprotocols)
2008 security = set(sslutil.supportedprotocols)
2004 if sslutil.hassni:
2009 if sslutil.hassni:
2005 security.add(b'sni')
2010 security.add(b'sni')
2006
2011
2007 fm.write(
2012 fm.write(
2008 b'pythonsecurity',
2013 b'pythonsecurity',
2009 _(b"checking Python security support (%s)\n"),
2014 _(b"checking Python security support (%s)\n"),
2010 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2015 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2011 )
2016 )
2012
2017
2013 # These are warnings, not errors. So don't increment problem count. This
2018 # These are warnings, not errors. So don't increment problem count. This
2014 # may change in the future.
2019 # may change in the future.
2015 if b'tls1.2' not in security:
2020 if b'tls1.2' not in security:
2016 fm.plain(
2021 fm.plain(
2017 _(
2022 _(
2018 b' TLS 1.2 not supported by Python install; '
2023 b' TLS 1.2 not supported by Python install; '
2019 b'network connections lack modern security\n'
2024 b'network connections lack modern security\n'
2020 )
2025 )
2021 )
2026 )
2022 if b'sni' not in security:
2027 if b'sni' not in security:
2023 fm.plain(
2028 fm.plain(
2024 _(
2029 _(
2025 b' SNI not supported by Python install; may have '
2030 b' SNI not supported by Python install; may have '
2026 b'connectivity issues with some servers\n'
2031 b'connectivity issues with some servers\n'
2027 )
2032 )
2028 )
2033 )
2029
2034
2030 fm.plain(
2035 fm.plain(
2031 _(
2036 _(
2032 b"checking Rust extensions (%s)\n"
2037 b"checking Rust extensions (%s)\n"
2033 % (b'missing' if rustext is None else b'installed')
2038 % (b'missing' if rustext is None else b'installed')
2034 ),
2039 ),
2035 )
2040 )
2036
2041
2037 # TODO print CA cert info
2042 # TODO print CA cert info
2038
2043
2039 # hg version
2044 # hg version
2040 hgver = util.version()
2045 hgver = util.version()
2041 fm.write(
2046 fm.write(
2042 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2047 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2043 )
2048 )
2044 fm.write(
2049 fm.write(
2045 b'hgverextra',
2050 b'hgverextra',
2046 _(b"checking Mercurial custom build (%s)\n"),
2051 _(b"checking Mercurial custom build (%s)\n"),
2047 b'+'.join(hgver.split(b'+')[1:]),
2052 b'+'.join(hgver.split(b'+')[1:]),
2048 )
2053 )
2049
2054
2050 # compiled modules
2055 # compiled modules
2051 hgmodules = None
2056 hgmodules = None
2052 if util.safehasattr(sys.modules[__name__], '__file__'):
2057 if util.safehasattr(sys.modules[__name__], '__file__'):
2053 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2058 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2054 elif getattr(sys, 'oxidized', False):
2059 elif getattr(sys, 'oxidized', False):
2055 hgmodules = pycompat.sysexecutable
2060 hgmodules = pycompat.sysexecutable
2056
2061
2057 fm.write(
2062 fm.write(
2058 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2063 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2059 )
2064 )
2060 fm.write(
2065 fm.write(
2061 b'hgmodules',
2066 b'hgmodules',
2062 _(b"checking installed modules (%s)...\n"),
2067 _(b"checking installed modules (%s)...\n"),
2063 hgmodules or _(b"unknown"),
2068 hgmodules or _(b"unknown"),
2064 )
2069 )
2065
2070
2066 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2071 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2067 rustext = rustandc # for now, that's the only case
2072 rustext = rustandc # for now, that's the only case
2068 cext = policy.policy in (b'c', b'allow') or rustandc
2073 cext = policy.policy in (b'c', b'allow') or rustandc
2069 nopure = cext or rustext
2074 nopure = cext or rustext
2070 if nopure:
2075 if nopure:
2071 err = None
2076 err = None
2072 try:
2077 try:
2073 if cext:
2078 if cext:
2074 from .cext import ( # pytype: disable=import-error
2079 from .cext import ( # pytype: disable=import-error
2075 base85,
2080 base85,
2076 bdiff,
2081 bdiff,
2077 mpatch,
2082 mpatch,
2078 osutil,
2083 osutil,
2079 )
2084 )
2080
2085
2081 # quiet pyflakes
2086 # quiet pyflakes
2082 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2087 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2083 if rustext:
2088 if rustext:
2084 from .rustext import ( # pytype: disable=import-error
2089 from .rustext import ( # pytype: disable=import-error
2085 ancestor,
2090 ancestor,
2086 dirstate,
2091 dirstate,
2087 )
2092 )
2088
2093
2089 dir(ancestor), dir(dirstate) # quiet pyflakes
2094 dir(ancestor), dir(dirstate) # quiet pyflakes
2090 except Exception as inst:
2095 except Exception as inst:
2091 err = stringutil.forcebytestr(inst)
2096 err = stringutil.forcebytestr(inst)
2092 problems += 1
2097 problems += 1
2093 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2098 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2094
2099
2095 compengines = util.compengines._engines.values()
2100 compengines = util.compengines._engines.values()
2096 fm.write(
2101 fm.write(
2097 b'compengines',
2102 b'compengines',
2098 _(b'checking registered compression engines (%s)\n'),
2103 _(b'checking registered compression engines (%s)\n'),
2099 fm.formatlist(
2104 fm.formatlist(
2100 sorted(e.name() for e in compengines),
2105 sorted(e.name() for e in compengines),
2101 name=b'compengine',
2106 name=b'compengine',
2102 fmt=b'%s',
2107 fmt=b'%s',
2103 sep=b', ',
2108 sep=b', ',
2104 ),
2109 ),
2105 )
2110 )
2106 fm.write(
2111 fm.write(
2107 b'compenginesavail',
2112 b'compenginesavail',
2108 _(b'checking available compression engines (%s)\n'),
2113 _(b'checking available compression engines (%s)\n'),
2109 fm.formatlist(
2114 fm.formatlist(
2110 sorted(e.name() for e in compengines if e.available()),
2115 sorted(e.name() for e in compengines if e.available()),
2111 name=b'compengine',
2116 name=b'compengine',
2112 fmt=b'%s',
2117 fmt=b'%s',
2113 sep=b', ',
2118 sep=b', ',
2114 ),
2119 ),
2115 )
2120 )
2116 wirecompengines = compression.compengines.supportedwireengines(
2121 wirecompengines = compression.compengines.supportedwireengines(
2117 compression.SERVERROLE
2122 compression.SERVERROLE
2118 )
2123 )
2119 fm.write(
2124 fm.write(
2120 b'compenginesserver',
2125 b'compenginesserver',
2121 _(
2126 _(
2122 b'checking available compression engines '
2127 b'checking available compression engines '
2123 b'for wire protocol (%s)\n'
2128 b'for wire protocol (%s)\n'
2124 ),
2129 ),
2125 fm.formatlist(
2130 fm.formatlist(
2126 [e.name() for e in wirecompengines if e.wireprotosupport()],
2131 [e.name() for e in wirecompengines if e.wireprotosupport()],
2127 name=b'compengine',
2132 name=b'compengine',
2128 fmt=b'%s',
2133 fmt=b'%s',
2129 sep=b', ',
2134 sep=b', ',
2130 ),
2135 ),
2131 )
2136 )
2132 re2 = b'missing'
2137 re2 = b'missing'
2133 if util._re2:
2138 if util._re2:
2134 re2 = b'available'
2139 re2 = b'available'
2135 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2140 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2136 fm.data(re2=bool(util._re2))
2141 fm.data(re2=bool(util._re2))
2137
2142
2138 # templates
2143 # templates
2139 p = templater.templatedir()
2144 p = templater.templatedir()
2140 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2145 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2141 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2146 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2142 if p:
2147 if p:
2143 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2148 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2144 if m:
2149 if m:
2145 # template found, check if it is working
2150 # template found, check if it is working
2146 err = None
2151 err = None
2147 try:
2152 try:
2148 templater.templater.frommapfile(m)
2153 templater.templater.frommapfile(m)
2149 except Exception as inst:
2154 except Exception as inst:
2150 err = stringutil.forcebytestr(inst)
2155 err = stringutil.forcebytestr(inst)
2151 p = None
2156 p = None
2152 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2157 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2153 else:
2158 else:
2154 p = None
2159 p = None
2155 fm.condwrite(
2160 fm.condwrite(
2156 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2161 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2157 )
2162 )
2158 fm.condwrite(
2163 fm.condwrite(
2159 not m,
2164 not m,
2160 b'defaulttemplatenotfound',
2165 b'defaulttemplatenotfound',
2161 _(b" template '%s' not found\n"),
2166 _(b" template '%s' not found\n"),
2162 b"default",
2167 b"default",
2163 )
2168 )
2164 if not p:
2169 if not p:
2165 problems += 1
2170 problems += 1
2166 fm.condwrite(
2171 fm.condwrite(
2167 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2172 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2168 )
2173 )
2169
2174
2170 # editor
2175 # editor
2171 editor = ui.geteditor()
2176 editor = ui.geteditor()
2172 editor = util.expandpath(editor)
2177 editor = util.expandpath(editor)
2173 editorbin = procutil.shellsplit(editor)[0]
2178 editorbin = procutil.shellsplit(editor)[0]
2174 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2179 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2175 cmdpath = procutil.findexe(editorbin)
2180 cmdpath = procutil.findexe(editorbin)
2176 fm.condwrite(
2181 fm.condwrite(
2177 not cmdpath and editor == b'vi',
2182 not cmdpath and editor == b'vi',
2178 b'vinotfound',
2183 b'vinotfound',
2179 _(
2184 _(
2180 b" No commit editor set and can't find %s in PATH\n"
2185 b" No commit editor set and can't find %s in PATH\n"
2181 b" (specify a commit editor in your configuration"
2186 b" (specify a commit editor in your configuration"
2182 b" file)\n"
2187 b" file)\n"
2183 ),
2188 ),
2184 not cmdpath and editor == b'vi' and editorbin,
2189 not cmdpath and editor == b'vi' and editorbin,
2185 )
2190 )
2186 fm.condwrite(
2191 fm.condwrite(
2187 not cmdpath and editor != b'vi',
2192 not cmdpath and editor != b'vi',
2188 b'editornotfound',
2193 b'editornotfound',
2189 _(
2194 _(
2190 b" Can't find editor '%s' in PATH\n"
2195 b" Can't find editor '%s' in PATH\n"
2191 b" (specify a commit editor in your configuration"
2196 b" (specify a commit editor in your configuration"
2192 b" file)\n"
2197 b" file)\n"
2193 ),
2198 ),
2194 not cmdpath and editorbin,
2199 not cmdpath and editorbin,
2195 )
2200 )
2196 if not cmdpath and editor != b'vi':
2201 if not cmdpath and editor != b'vi':
2197 problems += 1
2202 problems += 1
2198
2203
2199 # check username
2204 # check username
2200 username = None
2205 username = None
2201 err = None
2206 err = None
2202 try:
2207 try:
2203 username = ui.username()
2208 username = ui.username()
2204 except error.Abort as e:
2209 except error.Abort as e:
2205 err = e.message
2210 err = e.message
2206 problems += 1
2211 problems += 1
2207
2212
2208 fm.condwrite(
2213 fm.condwrite(
2209 username, b'username', _(b"checking username (%s)\n"), username
2214 username, b'username', _(b"checking username (%s)\n"), username
2210 )
2215 )
2211 fm.condwrite(
2216 fm.condwrite(
2212 err,
2217 err,
2213 b'usernameerror',
2218 b'usernameerror',
2214 _(
2219 _(
2215 b"checking username...\n %s\n"
2220 b"checking username...\n %s\n"
2216 b" (specify a username in your configuration file)\n"
2221 b" (specify a username in your configuration file)\n"
2217 ),
2222 ),
2218 err,
2223 err,
2219 )
2224 )
2220
2225
2221 for name, mod in extensions.extensions():
2226 for name, mod in extensions.extensions():
2222 handler = getattr(mod, 'debuginstall', None)
2227 handler = getattr(mod, 'debuginstall', None)
2223 if handler is not None:
2228 if handler is not None:
2224 problems += handler(ui, fm)
2229 problems += handler(ui, fm)
2225
2230
2226 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2231 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2227 if not problems:
2232 if not problems:
2228 fm.data(problems=problems)
2233 fm.data(problems=problems)
2229 fm.condwrite(
2234 fm.condwrite(
2230 problems,
2235 problems,
2231 b'problems',
2236 b'problems',
2232 _(b"%d problems detected, please check your install!\n"),
2237 _(b"%d problems detected, please check your install!\n"),
2233 problems,
2238 problems,
2234 )
2239 )
2235 fm.end()
2240 fm.end()
2236
2241
2237 return problems
2242 return problems
2238
2243
2239
2244
2240 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2245 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2241 def debugknown(ui, repopath, *ids, **opts):
2246 def debugknown(ui, repopath, *ids, **opts):
2242 """test whether node ids are known to a repo
2247 """test whether node ids are known to a repo
2243
2248
2244 Every ID must be a full-length hex node id string. Returns a list of 0s
2249 Every ID must be a full-length hex node id string. Returns a list of 0s
2245 and 1s indicating unknown/known.
2250 and 1s indicating unknown/known.
2246 """
2251 """
2247 opts = pycompat.byteskwargs(opts)
2252 opts = pycompat.byteskwargs(opts)
2248 repo = hg.peer(ui, opts, repopath)
2253 repo = hg.peer(ui, opts, repopath)
2249 if not repo.capable(b'known'):
2254 if not repo.capable(b'known'):
2250 raise error.Abort(b"known() not supported by target repository")
2255 raise error.Abort(b"known() not supported by target repository")
2251 flags = repo.known([bin(s) for s in ids])
2256 flags = repo.known([bin(s) for s in ids])
2252 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2257 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2253
2258
2254
2259
2255 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2260 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2256 def debuglabelcomplete(ui, repo, *args):
2261 def debuglabelcomplete(ui, repo, *args):
2257 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2262 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2258 debugnamecomplete(ui, repo, *args)
2263 debugnamecomplete(ui, repo, *args)
2259
2264
2260
2265
2261 @command(
2266 @command(
2262 b'debuglocks',
2267 b'debuglocks',
2263 [
2268 [
2264 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2269 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2265 (
2270 (
2266 b'W',
2271 b'W',
2267 b'force-free-wlock',
2272 b'force-free-wlock',
2268 None,
2273 None,
2269 _(b'free the working state lock (DANGEROUS)'),
2274 _(b'free the working state lock (DANGEROUS)'),
2270 ),
2275 ),
2271 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2276 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2272 (
2277 (
2273 b'S',
2278 b'S',
2274 b'set-wlock',
2279 b'set-wlock',
2275 None,
2280 None,
2276 _(b'set the working state lock until stopped'),
2281 _(b'set the working state lock until stopped'),
2277 ),
2282 ),
2278 ],
2283 ],
2279 _(b'[OPTION]...'),
2284 _(b'[OPTION]...'),
2280 )
2285 )
2281 def debuglocks(ui, repo, **opts):
2286 def debuglocks(ui, repo, **opts):
2282 """show or modify state of locks
2287 """show or modify state of locks
2283
2288
2284 By default, this command will show which locks are held. This
2289 By default, this command will show which locks are held. This
2285 includes the user and process holding the lock, the amount of time
2290 includes the user and process holding the lock, the amount of time
2286 the lock has been held, and the machine name where the process is
2291 the lock has been held, and the machine name where the process is
2287 running if it's not local.
2292 running if it's not local.
2288
2293
2289 Locks protect the integrity of Mercurial's data, so should be
2294 Locks protect the integrity of Mercurial's data, so should be
2290 treated with care. System crashes or other interruptions may cause
2295 treated with care. System crashes or other interruptions may cause
2291 locks to not be properly released, though Mercurial will usually
2296 locks to not be properly released, though Mercurial will usually
2292 detect and remove such stale locks automatically.
2297 detect and remove such stale locks automatically.
2293
2298
2294 However, detecting stale locks may not always be possible (for
2299 However, detecting stale locks may not always be possible (for
2295 instance, on a shared filesystem). Removing locks may also be
2300 instance, on a shared filesystem). Removing locks may also be
2296 blocked by filesystem permissions.
2301 blocked by filesystem permissions.
2297
2302
2298 Setting a lock will prevent other commands from changing the data.
2303 Setting a lock will prevent other commands from changing the data.
2299 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2304 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2300 The set locks are removed when the command exits.
2305 The set locks are removed when the command exits.
2301
2306
2302 Returns 0 if no locks are held.
2307 Returns 0 if no locks are held.
2303
2308
2304 """
2309 """
2305
2310
2306 if opts.get('force_free_lock'):
2311 if opts.get('force_free_lock'):
2307 repo.svfs.tryunlink(b'lock')
2312 repo.svfs.tryunlink(b'lock')
2308 if opts.get('force_free_wlock'):
2313 if opts.get('force_free_wlock'):
2309 repo.vfs.tryunlink(b'wlock')
2314 repo.vfs.tryunlink(b'wlock')
2310 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2315 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2311 return 0
2316 return 0
2312
2317
2313 locks = []
2318 locks = []
2314 try:
2319 try:
2315 if opts.get('set_wlock'):
2320 if opts.get('set_wlock'):
2316 try:
2321 try:
2317 locks.append(repo.wlock(False))
2322 locks.append(repo.wlock(False))
2318 except error.LockHeld:
2323 except error.LockHeld:
2319 raise error.Abort(_(b'wlock is already held'))
2324 raise error.Abort(_(b'wlock is already held'))
2320 if opts.get('set_lock'):
2325 if opts.get('set_lock'):
2321 try:
2326 try:
2322 locks.append(repo.lock(False))
2327 locks.append(repo.lock(False))
2323 except error.LockHeld:
2328 except error.LockHeld:
2324 raise error.Abort(_(b'lock is already held'))
2329 raise error.Abort(_(b'lock is already held'))
2325 if len(locks):
2330 if len(locks):
2326 try:
2331 try:
2327 if ui.interactive():
2332 if ui.interactive():
2328 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2333 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2329 ui.promptchoice(prompt)
2334 ui.promptchoice(prompt)
2330 else:
2335 else:
2331 msg = b"%d locks held, waiting for signal\n"
2336 msg = b"%d locks held, waiting for signal\n"
2332 msg %= len(locks)
2337 msg %= len(locks)
2333 ui.status(msg)
2338 ui.status(msg)
2334 while True: # XXX wait for a signal
2339 while True: # XXX wait for a signal
2335 time.sleep(0.1)
2340 time.sleep(0.1)
2336 except KeyboardInterrupt:
2341 except KeyboardInterrupt:
2337 msg = b"signal-received releasing locks\n"
2342 msg = b"signal-received releasing locks\n"
2338 ui.status(msg)
2343 ui.status(msg)
2339 return 0
2344 return 0
2340 finally:
2345 finally:
2341 release(*locks)
2346 release(*locks)
2342
2347
2343 now = time.time()
2348 now = time.time()
2344 held = 0
2349 held = 0
2345
2350
2346 def report(vfs, name, method):
2351 def report(vfs, name, method):
2347 # this causes stale locks to get reaped for more accurate reporting
2352 # this causes stale locks to get reaped for more accurate reporting
2348 try:
2353 try:
2349 l = method(False)
2354 l = method(False)
2350 except error.LockHeld:
2355 except error.LockHeld:
2351 l = None
2356 l = None
2352
2357
2353 if l:
2358 if l:
2354 l.release()
2359 l.release()
2355 else:
2360 else:
2356 try:
2361 try:
2357 st = vfs.lstat(name)
2362 st = vfs.lstat(name)
2358 age = now - st[stat.ST_MTIME]
2363 age = now - st[stat.ST_MTIME]
2359 user = util.username(st.st_uid)
2364 user = util.username(st.st_uid)
2360 locker = vfs.readlock(name)
2365 locker = vfs.readlock(name)
2361 if b":" in locker:
2366 if b":" in locker:
2362 host, pid = locker.split(b':')
2367 host, pid = locker.split(b':')
2363 if host == socket.gethostname():
2368 if host == socket.gethostname():
2364 locker = b'user %s, process %s' % (user or b'None', pid)
2369 locker = b'user %s, process %s' % (user or b'None', pid)
2365 else:
2370 else:
2366 locker = b'user %s, process %s, host %s' % (
2371 locker = b'user %s, process %s, host %s' % (
2367 user or b'None',
2372 user or b'None',
2368 pid,
2373 pid,
2369 host,
2374 host,
2370 )
2375 )
2371 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2376 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2372 return 1
2377 return 1
2373 except FileNotFoundError:
2378 except FileNotFoundError:
2374 pass
2379 pass
2375
2380
2376 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2381 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2377 return 0
2382 return 0
2378
2383
2379 held += report(repo.svfs, b"lock", repo.lock)
2384 held += report(repo.svfs, b"lock", repo.lock)
2380 held += report(repo.vfs, b"wlock", repo.wlock)
2385 held += report(repo.vfs, b"wlock", repo.wlock)
2381
2386
2382 return held
2387 return held
2383
2388
2384
2389
2385 @command(
2390 @command(
2386 b'debugmanifestfulltextcache',
2391 b'debugmanifestfulltextcache',
2387 [
2392 [
2388 (b'', b'clear', False, _(b'clear the cache')),
2393 (b'', b'clear', False, _(b'clear the cache')),
2389 (
2394 (
2390 b'a',
2395 b'a',
2391 b'add',
2396 b'add',
2392 [],
2397 [],
2393 _(b'add the given manifest nodes to the cache'),
2398 _(b'add the given manifest nodes to the cache'),
2394 _(b'NODE'),
2399 _(b'NODE'),
2395 ),
2400 ),
2396 ],
2401 ],
2397 b'',
2402 b'',
2398 )
2403 )
2399 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2404 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2400 """show, clear or amend the contents of the manifest fulltext cache"""
2405 """show, clear or amend the contents of the manifest fulltext cache"""
2401
2406
2402 def getcache():
2407 def getcache():
2403 r = repo.manifestlog.getstorage(b'')
2408 r = repo.manifestlog.getstorage(b'')
2404 try:
2409 try:
2405 return r._fulltextcache
2410 return r._fulltextcache
2406 except AttributeError:
2411 except AttributeError:
2407 msg = _(
2412 msg = _(
2408 b"Current revlog implementation doesn't appear to have a "
2413 b"Current revlog implementation doesn't appear to have a "
2409 b"manifest fulltext cache\n"
2414 b"manifest fulltext cache\n"
2410 )
2415 )
2411 raise error.Abort(msg)
2416 raise error.Abort(msg)
2412
2417
2413 if opts.get('clear'):
2418 if opts.get('clear'):
2414 with repo.wlock():
2419 with repo.wlock():
2415 cache = getcache()
2420 cache = getcache()
2416 cache.clear(clear_persisted_data=True)
2421 cache.clear(clear_persisted_data=True)
2417 return
2422 return
2418
2423
2419 if add:
2424 if add:
2420 with repo.wlock():
2425 with repo.wlock():
2421 m = repo.manifestlog
2426 m = repo.manifestlog
2422 store = m.getstorage(b'')
2427 store = m.getstorage(b'')
2423 for n in add:
2428 for n in add:
2424 try:
2429 try:
2425 manifest = m[store.lookup(n)]
2430 manifest = m[store.lookup(n)]
2426 except error.LookupError as e:
2431 except error.LookupError as e:
2427 raise error.Abort(
2432 raise error.Abort(
2428 bytes(e), hint=b"Check your manifest node id"
2433 bytes(e), hint=b"Check your manifest node id"
2429 )
2434 )
2430 manifest.read() # stores revisision in cache too
2435 manifest.read() # stores revisision in cache too
2431 return
2436 return
2432
2437
2433 cache = getcache()
2438 cache = getcache()
2434 if not len(cache):
2439 if not len(cache):
2435 ui.write(_(b'cache empty\n'))
2440 ui.write(_(b'cache empty\n'))
2436 else:
2441 else:
2437 ui.write(
2442 ui.write(
2438 _(
2443 _(
2439 b'cache contains %d manifest entries, in order of most to '
2444 b'cache contains %d manifest entries, in order of most to '
2440 b'least recent:\n'
2445 b'least recent:\n'
2441 )
2446 )
2442 % (len(cache),)
2447 % (len(cache),)
2443 )
2448 )
2444 totalsize = 0
2449 totalsize = 0
2445 for nodeid in cache:
2450 for nodeid in cache:
2446 # Use cache.get to not update the LRU order
2451 # Use cache.get to not update the LRU order
2447 data = cache.peek(nodeid)
2452 data = cache.peek(nodeid)
2448 size = len(data)
2453 size = len(data)
2449 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2454 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2450 ui.write(
2455 ui.write(
2451 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2456 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2452 )
2457 )
2453 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2458 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2454 ui.write(
2459 ui.write(
2455 _(b'total cache data size %s, on-disk %s\n')
2460 _(b'total cache data size %s, on-disk %s\n')
2456 % (util.bytecount(totalsize), util.bytecount(ondisk))
2461 % (util.bytecount(totalsize), util.bytecount(ondisk))
2457 )
2462 )
2458
2463
2459
2464
2460 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2465 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2461 def debugmergestate(ui, repo, *args, **opts):
2466 def debugmergestate(ui, repo, *args, **opts):
2462 """print merge state
2467 """print merge state
2463
2468
2464 Use --verbose to print out information about whether v1 or v2 merge state
2469 Use --verbose to print out information about whether v1 or v2 merge state
2465 was chosen."""
2470 was chosen."""
2466
2471
2467 if ui.verbose:
2472 if ui.verbose:
2468 ms = mergestatemod.mergestate(repo)
2473 ms = mergestatemod.mergestate(repo)
2469
2474
2470 # sort so that reasonable information is on top
2475 # sort so that reasonable information is on top
2471 v1records = ms._readrecordsv1()
2476 v1records = ms._readrecordsv1()
2472 v2records = ms._readrecordsv2()
2477 v2records = ms._readrecordsv2()
2473
2478
2474 if not v1records and not v2records:
2479 if not v1records and not v2records:
2475 pass
2480 pass
2476 elif not v2records:
2481 elif not v2records:
2477 ui.writenoi18n(b'no version 2 merge state\n')
2482 ui.writenoi18n(b'no version 2 merge state\n')
2478 elif ms._v1v2match(v1records, v2records):
2483 elif ms._v1v2match(v1records, v2records):
2479 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2484 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2480 else:
2485 else:
2481 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2486 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2482
2487
2483 opts = pycompat.byteskwargs(opts)
2488 opts = pycompat.byteskwargs(opts)
2484 if not opts[b'template']:
2489 if not opts[b'template']:
2485 opts[b'template'] = (
2490 opts[b'template'] = (
2486 b'{if(commits, "", "no merge state found\n")}'
2491 b'{if(commits, "", "no merge state found\n")}'
2487 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2492 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2488 b'{files % "file: {path} (state \\"{state}\\")\n'
2493 b'{files % "file: {path} (state \\"{state}\\")\n'
2489 b'{if(local_path, "'
2494 b'{if(local_path, "'
2490 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2495 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2491 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2496 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2492 b' other path: {other_path} (node {other_node})\n'
2497 b' other path: {other_path} (node {other_node})\n'
2493 b'")}'
2498 b'")}'
2494 b'{if(rename_side, "'
2499 b'{if(rename_side, "'
2495 b' rename side: {rename_side}\n'
2500 b' rename side: {rename_side}\n'
2496 b' renamed path: {renamed_path}\n'
2501 b' renamed path: {renamed_path}\n'
2497 b'")}'
2502 b'")}'
2498 b'{extras % " extra: {key} = {value}\n"}'
2503 b'{extras % " extra: {key} = {value}\n"}'
2499 b'"}'
2504 b'"}'
2500 b'{extras % "extra: {file} ({key} = {value})\n"}'
2505 b'{extras % "extra: {file} ({key} = {value})\n"}'
2501 )
2506 )
2502
2507
2503 ms = mergestatemod.mergestate.read(repo)
2508 ms = mergestatemod.mergestate.read(repo)
2504
2509
2505 fm = ui.formatter(b'debugmergestate', opts)
2510 fm = ui.formatter(b'debugmergestate', opts)
2506 fm.startitem()
2511 fm.startitem()
2507
2512
2508 fm_commits = fm.nested(b'commits')
2513 fm_commits = fm.nested(b'commits')
2509 if ms.active():
2514 if ms.active():
2510 for name, node, label_index in (
2515 for name, node, label_index in (
2511 (b'local', ms.local, 0),
2516 (b'local', ms.local, 0),
2512 (b'other', ms.other, 1),
2517 (b'other', ms.other, 1),
2513 ):
2518 ):
2514 fm_commits.startitem()
2519 fm_commits.startitem()
2515 fm_commits.data(name=name)
2520 fm_commits.data(name=name)
2516 fm_commits.data(node=hex(node))
2521 fm_commits.data(node=hex(node))
2517 if ms._labels and len(ms._labels) > label_index:
2522 if ms._labels and len(ms._labels) > label_index:
2518 fm_commits.data(label=ms._labels[label_index])
2523 fm_commits.data(label=ms._labels[label_index])
2519 fm_commits.end()
2524 fm_commits.end()
2520
2525
2521 fm_files = fm.nested(b'files')
2526 fm_files = fm.nested(b'files')
2522 if ms.active():
2527 if ms.active():
2523 for f in ms:
2528 for f in ms:
2524 fm_files.startitem()
2529 fm_files.startitem()
2525 fm_files.data(path=f)
2530 fm_files.data(path=f)
2526 state = ms._state[f]
2531 state = ms._state[f]
2527 fm_files.data(state=state[0])
2532 fm_files.data(state=state[0])
2528 if state[0] in (
2533 if state[0] in (
2529 mergestatemod.MERGE_RECORD_UNRESOLVED,
2534 mergestatemod.MERGE_RECORD_UNRESOLVED,
2530 mergestatemod.MERGE_RECORD_RESOLVED,
2535 mergestatemod.MERGE_RECORD_RESOLVED,
2531 ):
2536 ):
2532 fm_files.data(local_key=state[1])
2537 fm_files.data(local_key=state[1])
2533 fm_files.data(local_path=state[2])
2538 fm_files.data(local_path=state[2])
2534 fm_files.data(ancestor_path=state[3])
2539 fm_files.data(ancestor_path=state[3])
2535 fm_files.data(ancestor_node=state[4])
2540 fm_files.data(ancestor_node=state[4])
2536 fm_files.data(other_path=state[5])
2541 fm_files.data(other_path=state[5])
2537 fm_files.data(other_node=state[6])
2542 fm_files.data(other_node=state[6])
2538 fm_files.data(local_flags=state[7])
2543 fm_files.data(local_flags=state[7])
2539 elif state[0] in (
2544 elif state[0] in (
2540 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2545 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2541 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2546 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2542 ):
2547 ):
2543 fm_files.data(renamed_path=state[1])
2548 fm_files.data(renamed_path=state[1])
2544 fm_files.data(rename_side=state[2])
2549 fm_files.data(rename_side=state[2])
2545 fm_extras = fm_files.nested(b'extras')
2550 fm_extras = fm_files.nested(b'extras')
2546 for k, v in sorted(ms.extras(f).items()):
2551 for k, v in sorted(ms.extras(f).items()):
2547 fm_extras.startitem()
2552 fm_extras.startitem()
2548 fm_extras.data(key=k)
2553 fm_extras.data(key=k)
2549 fm_extras.data(value=v)
2554 fm_extras.data(value=v)
2550 fm_extras.end()
2555 fm_extras.end()
2551
2556
2552 fm_files.end()
2557 fm_files.end()
2553
2558
2554 fm_extras = fm.nested(b'extras')
2559 fm_extras = fm.nested(b'extras')
2555 for f, d in sorted(ms.allextras().items()):
2560 for f, d in sorted(ms.allextras().items()):
2556 if f in ms:
2561 if f in ms:
2557 # If file is in mergestate, we have already processed it's extras
2562 # If file is in mergestate, we have already processed it's extras
2558 continue
2563 continue
2559 for k, v in d.items():
2564 for k, v in d.items():
2560 fm_extras.startitem()
2565 fm_extras.startitem()
2561 fm_extras.data(file=f)
2566 fm_extras.data(file=f)
2562 fm_extras.data(key=k)
2567 fm_extras.data(key=k)
2563 fm_extras.data(value=v)
2568 fm_extras.data(value=v)
2564 fm_extras.end()
2569 fm_extras.end()
2565
2570
2566 fm.end()
2571 fm.end()
2567
2572
2568
2573
2569 @command(b'debugnamecomplete', [], _(b'NAME...'))
2574 @command(b'debugnamecomplete', [], _(b'NAME...'))
2570 def debugnamecomplete(ui, repo, *args):
2575 def debugnamecomplete(ui, repo, *args):
2571 '''complete "names" - tags, open branch names, bookmark names'''
2576 '''complete "names" - tags, open branch names, bookmark names'''
2572
2577
2573 names = set()
2578 names = set()
2574 # since we previously only listed open branches, we will handle that
2579 # since we previously only listed open branches, we will handle that
2575 # specially (after this for loop)
2580 # specially (after this for loop)
2576 for name, ns in repo.names.items():
2581 for name, ns in repo.names.items():
2577 if name != b'branches':
2582 if name != b'branches':
2578 names.update(ns.listnames(repo))
2583 names.update(ns.listnames(repo))
2579 names.update(
2584 names.update(
2580 tag
2585 tag
2581 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2586 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2582 if not closed
2587 if not closed
2583 )
2588 )
2584 completions = set()
2589 completions = set()
2585 if not args:
2590 if not args:
2586 args = [b'']
2591 args = [b'']
2587 for a in args:
2592 for a in args:
2588 completions.update(n for n in names if n.startswith(a))
2593 completions.update(n for n in names if n.startswith(a))
2589 ui.write(b'\n'.join(sorted(completions)))
2594 ui.write(b'\n'.join(sorted(completions)))
2590 ui.write(b'\n')
2595 ui.write(b'\n')
2591
2596
2592
2597
2593 @command(
2598 @command(
2594 b'debugnodemap',
2599 b'debugnodemap',
2595 [
2600 [
2596 (
2601 (
2597 b'',
2602 b'',
2598 b'dump-new',
2603 b'dump-new',
2599 False,
2604 False,
2600 _(b'write a (new) persistent binary nodemap on stdout'),
2605 _(b'write a (new) persistent binary nodemap on stdout'),
2601 ),
2606 ),
2602 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2607 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2603 (
2608 (
2604 b'',
2609 b'',
2605 b'check',
2610 b'check',
2606 False,
2611 False,
2607 _(b'check that the data on disk data are correct.'),
2612 _(b'check that the data on disk data are correct.'),
2608 ),
2613 ),
2609 (
2614 (
2610 b'',
2615 b'',
2611 b'metadata',
2616 b'metadata',
2612 False,
2617 False,
2613 _(b'display the on disk meta data for the nodemap'),
2618 _(b'display the on disk meta data for the nodemap'),
2614 ),
2619 ),
2615 ],
2620 ],
2616 )
2621 )
2617 def debugnodemap(ui, repo, **opts):
2622 def debugnodemap(ui, repo, **opts):
2618 """write and inspect on disk nodemap"""
2623 """write and inspect on disk nodemap"""
2619 if opts['dump_new']:
2624 if opts['dump_new']:
2620 unfi = repo.unfiltered()
2625 unfi = repo.unfiltered()
2621 cl = unfi.changelog
2626 cl = unfi.changelog
2622 if util.safehasattr(cl.index, "nodemap_data_all"):
2627 if util.safehasattr(cl.index, "nodemap_data_all"):
2623 data = cl.index.nodemap_data_all()
2628 data = cl.index.nodemap_data_all()
2624 else:
2629 else:
2625 data = nodemap.persistent_data(cl.index)
2630 data = nodemap.persistent_data(cl.index)
2626 ui.write(data)
2631 ui.write(data)
2627 elif opts['dump_disk']:
2632 elif opts['dump_disk']:
2628 unfi = repo.unfiltered()
2633 unfi = repo.unfiltered()
2629 cl = unfi.changelog
2634 cl = unfi.changelog
2630 nm_data = nodemap.persisted_data(cl)
2635 nm_data = nodemap.persisted_data(cl)
2631 if nm_data is not None:
2636 if nm_data is not None:
2632 docket, data = nm_data
2637 docket, data = nm_data
2633 ui.write(data[:])
2638 ui.write(data[:])
2634 elif opts['check']:
2639 elif opts['check']:
2635 unfi = repo.unfiltered()
2640 unfi = repo.unfiltered()
2636 cl = unfi.changelog
2641 cl = unfi.changelog
2637 nm_data = nodemap.persisted_data(cl)
2642 nm_data = nodemap.persisted_data(cl)
2638 if nm_data is not None:
2643 if nm_data is not None:
2639 docket, data = nm_data
2644 docket, data = nm_data
2640 return nodemap.check_data(ui, cl.index, data)
2645 return nodemap.check_data(ui, cl.index, data)
2641 elif opts['metadata']:
2646 elif opts['metadata']:
2642 unfi = repo.unfiltered()
2647 unfi = repo.unfiltered()
2643 cl = unfi.changelog
2648 cl = unfi.changelog
2644 nm_data = nodemap.persisted_data(cl)
2649 nm_data = nodemap.persisted_data(cl)
2645 if nm_data is not None:
2650 if nm_data is not None:
2646 docket, data = nm_data
2651 docket, data = nm_data
2647 ui.write((b"uid: %s\n") % docket.uid)
2652 ui.write((b"uid: %s\n") % docket.uid)
2648 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2653 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2649 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2654 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2650 ui.write((b"data-length: %d\n") % docket.data_length)
2655 ui.write((b"data-length: %d\n") % docket.data_length)
2651 ui.write((b"data-unused: %d\n") % docket.data_unused)
2656 ui.write((b"data-unused: %d\n") % docket.data_unused)
2652 unused_perc = docket.data_unused * 100.0 / docket.data_length
2657 unused_perc = docket.data_unused * 100.0 / docket.data_length
2653 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2658 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2654
2659
2655
2660
2656 @command(
2661 @command(
2657 b'debugobsolete',
2662 b'debugobsolete',
2658 [
2663 [
2659 (b'', b'flags', 0, _(b'markers flag')),
2664 (b'', b'flags', 0, _(b'markers flag')),
2660 (
2665 (
2661 b'',
2666 b'',
2662 b'record-parents',
2667 b'record-parents',
2663 False,
2668 False,
2664 _(b'record parent information for the precursor'),
2669 _(b'record parent information for the precursor'),
2665 ),
2670 ),
2666 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2671 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2667 (
2672 (
2668 b'',
2673 b'',
2669 b'exclusive',
2674 b'exclusive',
2670 False,
2675 False,
2671 _(b'restrict display to markers only relevant to REV'),
2676 _(b'restrict display to markers only relevant to REV'),
2672 ),
2677 ),
2673 (b'', b'index', False, _(b'display index of the marker')),
2678 (b'', b'index', False, _(b'display index of the marker')),
2674 (b'', b'delete', [], _(b'delete markers specified by indices')),
2679 (b'', b'delete', [], _(b'delete markers specified by indices')),
2675 ]
2680 ]
2676 + cmdutil.commitopts2
2681 + cmdutil.commitopts2
2677 + cmdutil.formatteropts,
2682 + cmdutil.formatteropts,
2678 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2683 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2679 )
2684 )
2680 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2685 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2681 """create arbitrary obsolete marker
2686 """create arbitrary obsolete marker
2682
2687
2683 With no arguments, displays the list of obsolescence markers."""
2688 With no arguments, displays the list of obsolescence markers."""
2684
2689
2685 opts = pycompat.byteskwargs(opts)
2690 opts = pycompat.byteskwargs(opts)
2686
2691
2687 def parsenodeid(s):
2692 def parsenodeid(s):
2688 try:
2693 try:
2689 # We do not use revsingle/revrange functions here to accept
2694 # We do not use revsingle/revrange functions here to accept
2690 # arbitrary node identifiers, possibly not present in the
2695 # arbitrary node identifiers, possibly not present in the
2691 # local repository.
2696 # local repository.
2692 n = bin(s)
2697 n = bin(s)
2693 if len(n) != repo.nodeconstants.nodelen:
2698 if len(n) != repo.nodeconstants.nodelen:
2694 raise ValueError
2699 raise ValueError
2695 return n
2700 return n
2696 except ValueError:
2701 except ValueError:
2697 raise error.InputError(
2702 raise error.InputError(
2698 b'changeset references must be full hexadecimal '
2703 b'changeset references must be full hexadecimal '
2699 b'node identifiers'
2704 b'node identifiers'
2700 )
2705 )
2701
2706
2702 if opts.get(b'delete'):
2707 if opts.get(b'delete'):
2703 indices = []
2708 indices = []
2704 for v in opts.get(b'delete'):
2709 for v in opts.get(b'delete'):
2705 try:
2710 try:
2706 indices.append(int(v))
2711 indices.append(int(v))
2707 except ValueError:
2712 except ValueError:
2708 raise error.InputError(
2713 raise error.InputError(
2709 _(b'invalid index value: %r') % v,
2714 _(b'invalid index value: %r') % v,
2710 hint=_(b'use integers for indices'),
2715 hint=_(b'use integers for indices'),
2711 )
2716 )
2712
2717
2713 if repo.currenttransaction():
2718 if repo.currenttransaction():
2714 raise error.Abort(
2719 raise error.Abort(
2715 _(b'cannot delete obsmarkers in the middle of transaction.')
2720 _(b'cannot delete obsmarkers in the middle of transaction.')
2716 )
2721 )
2717
2722
2718 with repo.lock():
2723 with repo.lock():
2719 n = repair.deleteobsmarkers(repo.obsstore, indices)
2724 n = repair.deleteobsmarkers(repo.obsstore, indices)
2720 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2725 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2721
2726
2722 return
2727 return
2723
2728
2724 if precursor is not None:
2729 if precursor is not None:
2725 if opts[b'rev']:
2730 if opts[b'rev']:
2726 raise error.InputError(
2731 raise error.InputError(
2727 b'cannot select revision when creating marker'
2732 b'cannot select revision when creating marker'
2728 )
2733 )
2729 metadata = {}
2734 metadata = {}
2730 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2735 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2731 succs = tuple(parsenodeid(succ) for succ in successors)
2736 succs = tuple(parsenodeid(succ) for succ in successors)
2732 l = repo.lock()
2737 l = repo.lock()
2733 try:
2738 try:
2734 tr = repo.transaction(b'debugobsolete')
2739 tr = repo.transaction(b'debugobsolete')
2735 try:
2740 try:
2736 date = opts.get(b'date')
2741 date = opts.get(b'date')
2737 if date:
2742 if date:
2738 date = dateutil.parsedate(date)
2743 date = dateutil.parsedate(date)
2739 else:
2744 else:
2740 date = None
2745 date = None
2741 prec = parsenodeid(precursor)
2746 prec = parsenodeid(precursor)
2742 parents = None
2747 parents = None
2743 if opts[b'record_parents']:
2748 if opts[b'record_parents']:
2744 if prec not in repo.unfiltered():
2749 if prec not in repo.unfiltered():
2745 raise error.Abort(
2750 raise error.Abort(
2746 b'cannot used --record-parents on '
2751 b'cannot used --record-parents on '
2747 b'unknown changesets'
2752 b'unknown changesets'
2748 )
2753 )
2749 parents = repo.unfiltered()[prec].parents()
2754 parents = repo.unfiltered()[prec].parents()
2750 parents = tuple(p.node() for p in parents)
2755 parents = tuple(p.node() for p in parents)
2751 repo.obsstore.create(
2756 repo.obsstore.create(
2752 tr,
2757 tr,
2753 prec,
2758 prec,
2754 succs,
2759 succs,
2755 opts[b'flags'],
2760 opts[b'flags'],
2756 parents=parents,
2761 parents=parents,
2757 date=date,
2762 date=date,
2758 metadata=metadata,
2763 metadata=metadata,
2759 ui=ui,
2764 ui=ui,
2760 )
2765 )
2761 tr.close()
2766 tr.close()
2762 except ValueError as exc:
2767 except ValueError as exc:
2763 raise error.Abort(
2768 raise error.Abort(
2764 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2769 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2765 )
2770 )
2766 finally:
2771 finally:
2767 tr.release()
2772 tr.release()
2768 finally:
2773 finally:
2769 l.release()
2774 l.release()
2770 else:
2775 else:
2771 if opts[b'rev']:
2776 if opts[b'rev']:
2772 revs = logcmdutil.revrange(repo, opts[b'rev'])
2777 revs = logcmdutil.revrange(repo, opts[b'rev'])
2773 nodes = [repo[r].node() for r in revs]
2778 nodes = [repo[r].node() for r in revs]
2774 markers = list(
2779 markers = list(
2775 obsutil.getmarkers(
2780 obsutil.getmarkers(
2776 repo, nodes=nodes, exclusive=opts[b'exclusive']
2781 repo, nodes=nodes, exclusive=opts[b'exclusive']
2777 )
2782 )
2778 )
2783 )
2779 markers.sort(key=lambda x: x._data)
2784 markers.sort(key=lambda x: x._data)
2780 else:
2785 else:
2781 markers = obsutil.getmarkers(repo)
2786 markers = obsutil.getmarkers(repo)
2782
2787
2783 markerstoiter = markers
2788 markerstoiter = markers
2784 isrelevant = lambda m: True
2789 isrelevant = lambda m: True
2785 if opts.get(b'rev') and opts.get(b'index'):
2790 if opts.get(b'rev') and opts.get(b'index'):
2786 markerstoiter = obsutil.getmarkers(repo)
2791 markerstoiter = obsutil.getmarkers(repo)
2787 markerset = set(markers)
2792 markerset = set(markers)
2788 isrelevant = lambda m: m in markerset
2793 isrelevant = lambda m: m in markerset
2789
2794
2790 fm = ui.formatter(b'debugobsolete', opts)
2795 fm = ui.formatter(b'debugobsolete', opts)
2791 for i, m in enumerate(markerstoiter):
2796 for i, m in enumerate(markerstoiter):
2792 if not isrelevant(m):
2797 if not isrelevant(m):
2793 # marker can be irrelevant when we're iterating over a set
2798 # marker can be irrelevant when we're iterating over a set
2794 # of markers (markerstoiter) which is bigger than the set
2799 # of markers (markerstoiter) which is bigger than the set
2795 # of markers we want to display (markers)
2800 # of markers we want to display (markers)
2796 # this can happen if both --index and --rev options are
2801 # this can happen if both --index and --rev options are
2797 # provided and thus we need to iterate over all of the markers
2802 # provided and thus we need to iterate over all of the markers
2798 # to get the correct indices, but only display the ones that
2803 # to get the correct indices, but only display the ones that
2799 # are relevant to --rev value
2804 # are relevant to --rev value
2800 continue
2805 continue
2801 fm.startitem()
2806 fm.startitem()
2802 ind = i if opts.get(b'index') else None
2807 ind = i if opts.get(b'index') else None
2803 cmdutil.showmarker(fm, m, index=ind)
2808 cmdutil.showmarker(fm, m, index=ind)
2804 fm.end()
2809 fm.end()
2805
2810
2806
2811
2807 @command(
2812 @command(
2808 b'debugp1copies',
2813 b'debugp1copies',
2809 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2814 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2810 _(b'[-r REV]'),
2815 _(b'[-r REV]'),
2811 )
2816 )
2812 def debugp1copies(ui, repo, **opts):
2817 def debugp1copies(ui, repo, **opts):
2813 """dump copy information compared to p1"""
2818 """dump copy information compared to p1"""
2814
2819
2815 opts = pycompat.byteskwargs(opts)
2820 opts = pycompat.byteskwargs(opts)
2816 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2821 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2817 for dst, src in ctx.p1copies().items():
2822 for dst, src in ctx.p1copies().items():
2818 ui.write(b'%s -> %s\n' % (src, dst))
2823 ui.write(b'%s -> %s\n' % (src, dst))
2819
2824
2820
2825
2821 @command(
2826 @command(
2822 b'debugp2copies',
2827 b'debugp2copies',
2823 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2828 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2824 _(b'[-r REV]'),
2829 _(b'[-r REV]'),
2825 )
2830 )
2826 def debugp2copies(ui, repo, **opts):
2831 def debugp2copies(ui, repo, **opts):
2827 """dump copy information compared to p2"""
2832 """dump copy information compared to p2"""
2828
2833
2829 opts = pycompat.byteskwargs(opts)
2834 opts = pycompat.byteskwargs(opts)
2830 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2831 for dst, src in ctx.p2copies().items():
2836 for dst, src in ctx.p2copies().items():
2832 ui.write(b'%s -> %s\n' % (src, dst))
2837 ui.write(b'%s -> %s\n' % (src, dst))
2833
2838
2834
2839
2835 @command(
2840 @command(
2836 b'debugpathcomplete',
2841 b'debugpathcomplete',
2837 [
2842 [
2838 (b'f', b'full', None, _(b'complete an entire path')),
2843 (b'f', b'full', None, _(b'complete an entire path')),
2839 (b'n', b'normal', None, _(b'show only normal files')),
2844 (b'n', b'normal', None, _(b'show only normal files')),
2840 (b'a', b'added', None, _(b'show only added files')),
2845 (b'a', b'added', None, _(b'show only added files')),
2841 (b'r', b'removed', None, _(b'show only removed files')),
2846 (b'r', b'removed', None, _(b'show only removed files')),
2842 ],
2847 ],
2843 _(b'FILESPEC...'),
2848 _(b'FILESPEC...'),
2844 )
2849 )
2845 def debugpathcomplete(ui, repo, *specs, **opts):
2850 def debugpathcomplete(ui, repo, *specs, **opts):
2846 """complete part or all of a tracked path
2851 """complete part or all of a tracked path
2847
2852
2848 This command supports shells that offer path name completion. It
2853 This command supports shells that offer path name completion. It
2849 currently completes only files already known to the dirstate.
2854 currently completes only files already known to the dirstate.
2850
2855
2851 Completion extends only to the next path segment unless
2856 Completion extends only to the next path segment unless
2852 --full is specified, in which case entire paths are used."""
2857 --full is specified, in which case entire paths are used."""
2853
2858
2854 def complete(path, acceptable):
2859 def complete(path, acceptable):
2855 dirstate = repo.dirstate
2860 dirstate = repo.dirstate
2856 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2861 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2857 rootdir = repo.root + pycompat.ossep
2862 rootdir = repo.root + pycompat.ossep
2858 if spec != repo.root and not spec.startswith(rootdir):
2863 if spec != repo.root and not spec.startswith(rootdir):
2859 return [], []
2864 return [], []
2860 if os.path.isdir(spec):
2865 if os.path.isdir(spec):
2861 spec += b'/'
2866 spec += b'/'
2862 spec = spec[len(rootdir) :]
2867 spec = spec[len(rootdir) :]
2863 fixpaths = pycompat.ossep != b'/'
2868 fixpaths = pycompat.ossep != b'/'
2864 if fixpaths:
2869 if fixpaths:
2865 spec = spec.replace(pycompat.ossep, b'/')
2870 spec = spec.replace(pycompat.ossep, b'/')
2866 speclen = len(spec)
2871 speclen = len(spec)
2867 fullpaths = opts['full']
2872 fullpaths = opts['full']
2868 files, dirs = set(), set()
2873 files, dirs = set(), set()
2869 adddir, addfile = dirs.add, files.add
2874 adddir, addfile = dirs.add, files.add
2870 for f, st in dirstate.items():
2875 for f, st in dirstate.items():
2871 if f.startswith(spec) and st.state in acceptable:
2876 if f.startswith(spec) and st.state in acceptable:
2872 if fixpaths:
2877 if fixpaths:
2873 f = f.replace(b'/', pycompat.ossep)
2878 f = f.replace(b'/', pycompat.ossep)
2874 if fullpaths:
2879 if fullpaths:
2875 addfile(f)
2880 addfile(f)
2876 continue
2881 continue
2877 s = f.find(pycompat.ossep, speclen)
2882 s = f.find(pycompat.ossep, speclen)
2878 if s >= 0:
2883 if s >= 0:
2879 adddir(f[:s])
2884 adddir(f[:s])
2880 else:
2885 else:
2881 addfile(f)
2886 addfile(f)
2882 return files, dirs
2887 return files, dirs
2883
2888
2884 acceptable = b''
2889 acceptable = b''
2885 if opts['normal']:
2890 if opts['normal']:
2886 acceptable += b'nm'
2891 acceptable += b'nm'
2887 if opts['added']:
2892 if opts['added']:
2888 acceptable += b'a'
2893 acceptable += b'a'
2889 if opts['removed']:
2894 if opts['removed']:
2890 acceptable += b'r'
2895 acceptable += b'r'
2891 cwd = repo.getcwd()
2896 cwd = repo.getcwd()
2892 if not specs:
2897 if not specs:
2893 specs = [b'.']
2898 specs = [b'.']
2894
2899
2895 files, dirs = set(), set()
2900 files, dirs = set(), set()
2896 for spec in specs:
2901 for spec in specs:
2897 f, d = complete(spec, acceptable or b'nmar')
2902 f, d = complete(spec, acceptable or b'nmar')
2898 files.update(f)
2903 files.update(f)
2899 dirs.update(d)
2904 dirs.update(d)
2900 files.update(dirs)
2905 files.update(dirs)
2901 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2906 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2902 ui.write(b'\n')
2907 ui.write(b'\n')
2903
2908
2904
2909
2905 @command(
2910 @command(
2906 b'debugpathcopies',
2911 b'debugpathcopies',
2907 cmdutil.walkopts,
2912 cmdutil.walkopts,
2908 b'hg debugpathcopies REV1 REV2 [FILE]',
2913 b'hg debugpathcopies REV1 REV2 [FILE]',
2909 inferrepo=True,
2914 inferrepo=True,
2910 )
2915 )
2911 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2916 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2912 """show copies between two revisions"""
2917 """show copies between two revisions"""
2913 ctx1 = scmutil.revsingle(repo, rev1)
2918 ctx1 = scmutil.revsingle(repo, rev1)
2914 ctx2 = scmutil.revsingle(repo, rev2)
2919 ctx2 = scmutil.revsingle(repo, rev2)
2915 m = scmutil.match(ctx1, pats, opts)
2920 m = scmutil.match(ctx1, pats, opts)
2916 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2921 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2917 ui.write(b'%s -> %s\n' % (src, dst))
2922 ui.write(b'%s -> %s\n' % (src, dst))
2918
2923
2919
2924
2920 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2925 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2921 def debugpeer(ui, path):
2926 def debugpeer(ui, path):
2922 """establish a connection to a peer repository"""
2927 """establish a connection to a peer repository"""
2923 # Always enable peer request logging. Requires --debug to display
2928 # Always enable peer request logging. Requires --debug to display
2924 # though.
2929 # though.
2925 overrides = {
2930 overrides = {
2926 (b'devel', b'debug.peer-request'): True,
2931 (b'devel', b'debug.peer-request'): True,
2927 }
2932 }
2928
2933
2929 with ui.configoverride(overrides):
2934 with ui.configoverride(overrides):
2930 peer = hg.peer(ui, {}, path)
2935 peer = hg.peer(ui, {}, path)
2931
2936
2932 try:
2937 try:
2933 local = peer.local() is not None
2938 local = peer.local() is not None
2934 canpush = peer.canpush()
2939 canpush = peer.canpush()
2935
2940
2936 ui.write(_(b'url: %s\n') % peer.url())
2941 ui.write(_(b'url: %s\n') % peer.url())
2937 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2942 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2938 ui.write(
2943 ui.write(
2939 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2944 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2940 )
2945 )
2941 finally:
2946 finally:
2942 peer.close()
2947 peer.close()
2943
2948
2944
2949
2945 @command(
2950 @command(
2946 b'debugpickmergetool',
2951 b'debugpickmergetool',
2947 [
2952 [
2948 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2953 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2949 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2954 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2950 ]
2955 ]
2951 + cmdutil.walkopts
2956 + cmdutil.walkopts
2952 + cmdutil.mergetoolopts,
2957 + cmdutil.mergetoolopts,
2953 _(b'[PATTERN]...'),
2958 _(b'[PATTERN]...'),
2954 inferrepo=True,
2959 inferrepo=True,
2955 )
2960 )
2956 def debugpickmergetool(ui, repo, *pats, **opts):
2961 def debugpickmergetool(ui, repo, *pats, **opts):
2957 """examine which merge tool is chosen for specified file
2962 """examine which merge tool is chosen for specified file
2958
2963
2959 As described in :hg:`help merge-tools`, Mercurial examines
2964 As described in :hg:`help merge-tools`, Mercurial examines
2960 configurations below in this order to decide which merge tool is
2965 configurations below in this order to decide which merge tool is
2961 chosen for specified file.
2966 chosen for specified file.
2962
2967
2963 1. ``--tool`` option
2968 1. ``--tool`` option
2964 2. ``HGMERGE`` environment variable
2969 2. ``HGMERGE`` environment variable
2965 3. configurations in ``merge-patterns`` section
2970 3. configurations in ``merge-patterns`` section
2966 4. configuration of ``ui.merge``
2971 4. configuration of ``ui.merge``
2967 5. configurations in ``merge-tools`` section
2972 5. configurations in ``merge-tools`` section
2968 6. ``hgmerge`` tool (for historical reason only)
2973 6. ``hgmerge`` tool (for historical reason only)
2969 7. default tool for fallback (``:merge`` or ``:prompt``)
2974 7. default tool for fallback (``:merge`` or ``:prompt``)
2970
2975
2971 This command writes out examination result in the style below::
2976 This command writes out examination result in the style below::
2972
2977
2973 FILE = MERGETOOL
2978 FILE = MERGETOOL
2974
2979
2975 By default, all files known in the first parent context of the
2980 By default, all files known in the first parent context of the
2976 working directory are examined. Use file patterns and/or -I/-X
2981 working directory are examined. Use file patterns and/or -I/-X
2977 options to limit target files. -r/--rev is also useful to examine
2982 options to limit target files. -r/--rev is also useful to examine
2978 files in another context without actual updating to it.
2983 files in another context without actual updating to it.
2979
2984
2980 With --debug, this command shows warning messages while matching
2985 With --debug, this command shows warning messages while matching
2981 against ``merge-patterns`` and so on, too. It is recommended to
2986 against ``merge-patterns`` and so on, too. It is recommended to
2982 use this option with explicit file patterns and/or -I/-X options,
2987 use this option with explicit file patterns and/or -I/-X options,
2983 because this option increases amount of output per file according
2988 because this option increases amount of output per file according
2984 to configurations in hgrc.
2989 to configurations in hgrc.
2985
2990
2986 With -v/--verbose, this command shows configurations below at
2991 With -v/--verbose, this command shows configurations below at
2987 first (only if specified).
2992 first (only if specified).
2988
2993
2989 - ``--tool`` option
2994 - ``--tool`` option
2990 - ``HGMERGE`` environment variable
2995 - ``HGMERGE`` environment variable
2991 - configuration of ``ui.merge``
2996 - configuration of ``ui.merge``
2992
2997
2993 If merge tool is chosen before matching against
2998 If merge tool is chosen before matching against
2994 ``merge-patterns``, this command can't show any helpful
2999 ``merge-patterns``, this command can't show any helpful
2995 information, even with --debug. In such case, information above is
3000 information, even with --debug. In such case, information above is
2996 useful to know why a merge tool is chosen.
3001 useful to know why a merge tool is chosen.
2997 """
3002 """
2998 opts = pycompat.byteskwargs(opts)
3003 opts = pycompat.byteskwargs(opts)
2999 overrides = {}
3004 overrides = {}
3000 if opts[b'tool']:
3005 if opts[b'tool']:
3001 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3006 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3002 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3007 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3003
3008
3004 with ui.configoverride(overrides, b'debugmergepatterns'):
3009 with ui.configoverride(overrides, b'debugmergepatterns'):
3005 hgmerge = encoding.environ.get(b"HGMERGE")
3010 hgmerge = encoding.environ.get(b"HGMERGE")
3006 if hgmerge is not None:
3011 if hgmerge is not None:
3007 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3012 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3008 uimerge = ui.config(b"ui", b"merge")
3013 uimerge = ui.config(b"ui", b"merge")
3009 if uimerge:
3014 if uimerge:
3010 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3015 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3011
3016
3012 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3017 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3013 m = scmutil.match(ctx, pats, opts)
3018 m = scmutil.match(ctx, pats, opts)
3014 changedelete = opts[b'changedelete']
3019 changedelete = opts[b'changedelete']
3015 for path in ctx.walk(m):
3020 for path in ctx.walk(m):
3016 fctx = ctx[path]
3021 fctx = ctx[path]
3017 with ui.silent(
3022 with ui.silent(
3018 error=True
3023 error=True
3019 ) if not ui.debugflag else util.nullcontextmanager():
3024 ) if not ui.debugflag else util.nullcontextmanager():
3020 tool, toolpath = filemerge._picktool(
3025 tool, toolpath = filemerge._picktool(
3021 repo,
3026 repo,
3022 ui,
3027 ui,
3023 path,
3028 path,
3024 fctx.isbinary(),
3029 fctx.isbinary(),
3025 b'l' in fctx.flags(),
3030 b'l' in fctx.flags(),
3026 changedelete,
3031 changedelete,
3027 )
3032 )
3028 ui.write(b'%s = %s\n' % (path, tool))
3033 ui.write(b'%s = %s\n' % (path, tool))
3029
3034
3030
3035
3031 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3036 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3032 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3037 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3033 """access the pushkey key/value protocol
3038 """access the pushkey key/value protocol
3034
3039
3035 With two args, list the keys in the given namespace.
3040 With two args, list the keys in the given namespace.
3036
3041
3037 With five args, set a key to new if it currently is set to old.
3042 With five args, set a key to new if it currently is set to old.
3038 Reports success or failure.
3043 Reports success or failure.
3039 """
3044 """
3040
3045
3041 target = hg.peer(ui, {}, repopath)
3046 target = hg.peer(ui, {}, repopath)
3042 try:
3047 try:
3043 if keyinfo:
3048 if keyinfo:
3044 key, old, new = keyinfo
3049 key, old, new = keyinfo
3045 with target.commandexecutor() as e:
3050 with target.commandexecutor() as e:
3046 r = e.callcommand(
3051 r = e.callcommand(
3047 b'pushkey',
3052 b'pushkey',
3048 {
3053 {
3049 b'namespace': namespace,
3054 b'namespace': namespace,
3050 b'key': key,
3055 b'key': key,
3051 b'old': old,
3056 b'old': old,
3052 b'new': new,
3057 b'new': new,
3053 },
3058 },
3054 ).result()
3059 ).result()
3055
3060
3056 ui.status(pycompat.bytestr(r) + b'\n')
3061 ui.status(pycompat.bytestr(r) + b'\n')
3057 return not r
3062 return not r
3058 else:
3063 else:
3059 for k, v in sorted(target.listkeys(namespace).items()):
3064 for k, v in sorted(target.listkeys(namespace).items()):
3060 ui.write(
3065 ui.write(
3061 b"%s\t%s\n"
3066 b"%s\t%s\n"
3062 % (stringutil.escapestr(k), stringutil.escapestr(v))
3067 % (stringutil.escapestr(k), stringutil.escapestr(v))
3063 )
3068 )
3064 finally:
3069 finally:
3065 target.close()
3070 target.close()
3066
3071
3067
3072
3068 @command(b'debugpvec', [], _(b'A B'))
3073 @command(b'debugpvec', [], _(b'A B'))
3069 def debugpvec(ui, repo, a, b=None):
3074 def debugpvec(ui, repo, a, b=None):
3070 ca = scmutil.revsingle(repo, a)
3075 ca = scmutil.revsingle(repo, a)
3071 cb = scmutil.revsingle(repo, b)
3076 cb = scmutil.revsingle(repo, b)
3072 pa = pvec.ctxpvec(ca)
3077 pa = pvec.ctxpvec(ca)
3073 pb = pvec.ctxpvec(cb)
3078 pb = pvec.ctxpvec(cb)
3074 if pa == pb:
3079 if pa == pb:
3075 rel = b"="
3080 rel = b"="
3076 elif pa > pb:
3081 elif pa > pb:
3077 rel = b">"
3082 rel = b">"
3078 elif pa < pb:
3083 elif pa < pb:
3079 rel = b"<"
3084 rel = b"<"
3080 elif pa | pb:
3085 elif pa | pb:
3081 rel = b"|"
3086 rel = b"|"
3082 ui.write(_(b"a: %s\n") % pa)
3087 ui.write(_(b"a: %s\n") % pa)
3083 ui.write(_(b"b: %s\n") % pb)
3088 ui.write(_(b"b: %s\n") % pb)
3084 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3089 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3085 ui.write(
3090 ui.write(
3086 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3091 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3087 % (
3092 % (
3088 abs(pa._depth - pb._depth),
3093 abs(pa._depth - pb._depth),
3089 pvec._hamming(pa._vec, pb._vec),
3094 pvec._hamming(pa._vec, pb._vec),
3090 pa.distance(pb),
3095 pa.distance(pb),
3091 rel,
3096 rel,
3092 )
3097 )
3093 )
3098 )
3094
3099
3095
3100
3096 @command(
3101 @command(
3097 b'debugrebuilddirstate|debugrebuildstate',
3102 b'debugrebuilddirstate|debugrebuildstate',
3098 [
3103 [
3099 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3104 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3100 (
3105 (
3101 b'',
3106 b'',
3102 b'minimal',
3107 b'minimal',
3103 None,
3108 None,
3104 _(
3109 _(
3105 b'only rebuild files that are inconsistent with '
3110 b'only rebuild files that are inconsistent with '
3106 b'the working copy parent'
3111 b'the working copy parent'
3107 ),
3112 ),
3108 ),
3113 ),
3109 ],
3114 ],
3110 _(b'[-r REV]'),
3115 _(b'[-r REV]'),
3111 )
3116 )
3112 def debugrebuilddirstate(ui, repo, rev, **opts):
3117 def debugrebuilddirstate(ui, repo, rev, **opts):
3113 """rebuild the dirstate as it would look like for the given revision
3118 """rebuild the dirstate as it would look like for the given revision
3114
3119
3115 If no revision is specified the first current parent will be used.
3120 If no revision is specified the first current parent will be used.
3116
3121
3117 The dirstate will be set to the files of the given revision.
3122 The dirstate will be set to the files of the given revision.
3118 The actual working directory content or existing dirstate
3123 The actual working directory content or existing dirstate
3119 information such as adds or removes is not considered.
3124 information such as adds or removes is not considered.
3120
3125
3121 ``minimal`` will only rebuild the dirstate status for files that claim to be
3126 ``minimal`` will only rebuild the dirstate status for files that claim to be
3122 tracked but are not in the parent manifest, or that exist in the parent
3127 tracked but are not in the parent manifest, or that exist in the parent
3123 manifest but are not in the dirstate. It will not change adds, removes, or
3128 manifest but are not in the dirstate. It will not change adds, removes, or
3124 modified files that are in the working copy parent.
3129 modified files that are in the working copy parent.
3125
3130
3126 One use of this command is to make the next :hg:`status` invocation
3131 One use of this command is to make the next :hg:`status` invocation
3127 check the actual file content.
3132 check the actual file content.
3128 """
3133 """
3129 ctx = scmutil.revsingle(repo, rev)
3134 ctx = scmutil.revsingle(repo, rev)
3130 with repo.wlock():
3135 with repo.wlock():
3131 dirstate = repo.dirstate
3136 dirstate = repo.dirstate
3132 changedfiles = None
3137 changedfiles = None
3133 # See command doc for what minimal does.
3138 # See command doc for what minimal does.
3134 if opts.get('minimal'):
3139 if opts.get('minimal'):
3135 manifestfiles = set(ctx.manifest().keys())
3140 manifestfiles = set(ctx.manifest().keys())
3136 dirstatefiles = set(dirstate)
3141 dirstatefiles = set(dirstate)
3137 manifestonly = manifestfiles - dirstatefiles
3142 manifestonly = manifestfiles - dirstatefiles
3138 dsonly = dirstatefiles - manifestfiles
3143 dsonly = dirstatefiles - manifestfiles
3139 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3144 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3140 changedfiles = manifestonly | dsnotadded
3145 changedfiles = manifestonly | dsnotadded
3141
3146
3142 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3147 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3143
3148
3144
3149
3145 @command(
3150 @command(
3146 b'debugrebuildfncache',
3151 b'debugrebuildfncache',
3147 [
3152 [
3148 (
3153 (
3149 b'',
3154 b'',
3150 b'only-data',
3155 b'only-data',
3151 False,
3156 False,
3152 _(b'only look for wrong .d files (much faster)'),
3157 _(b'only look for wrong .d files (much faster)'),
3153 )
3158 )
3154 ],
3159 ],
3155 b'',
3160 b'',
3156 )
3161 )
3157 def debugrebuildfncache(ui, repo, **opts):
3162 def debugrebuildfncache(ui, repo, **opts):
3158 """rebuild the fncache file"""
3163 """rebuild the fncache file"""
3159 opts = pycompat.byteskwargs(opts)
3164 opts = pycompat.byteskwargs(opts)
3160 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3165 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3161
3166
3162
3167
3163 @command(
3168 @command(
3164 b'debugrename',
3169 b'debugrename',
3165 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3170 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3166 _(b'[-r REV] [FILE]...'),
3171 _(b'[-r REV] [FILE]...'),
3167 )
3172 )
3168 def debugrename(ui, repo, *pats, **opts):
3173 def debugrename(ui, repo, *pats, **opts):
3169 """dump rename information"""
3174 """dump rename information"""
3170
3175
3171 opts = pycompat.byteskwargs(opts)
3176 opts = pycompat.byteskwargs(opts)
3172 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3177 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3173 m = scmutil.match(ctx, pats, opts)
3178 m = scmutil.match(ctx, pats, opts)
3174 for abs in ctx.walk(m):
3179 for abs in ctx.walk(m):
3175 fctx = ctx[abs]
3180 fctx = ctx[abs]
3176 o = fctx.filelog().renamed(fctx.filenode())
3181 o = fctx.filelog().renamed(fctx.filenode())
3177 rel = repo.pathto(abs)
3182 rel = repo.pathto(abs)
3178 if o:
3183 if o:
3179 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3184 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3180 else:
3185 else:
3181 ui.write(_(b"%s not renamed\n") % rel)
3186 ui.write(_(b"%s not renamed\n") % rel)
3182
3187
3183
3188
3184 @command(b'debugrequires|debugrequirements', [], b'')
3189 @command(b'debugrequires|debugrequirements', [], b'')
3185 def debugrequirements(ui, repo):
3190 def debugrequirements(ui, repo):
3186 """print the current repo requirements"""
3191 """print the current repo requirements"""
3187 for r in sorted(repo.requirements):
3192 for r in sorted(repo.requirements):
3188 ui.write(b"%s\n" % r)
3193 ui.write(b"%s\n" % r)
3189
3194
3190
3195
3191 @command(
3196 @command(
3192 b'debugrevlog',
3197 b'debugrevlog',
3193 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3198 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3194 _(b'-c|-m|FILE'),
3199 _(b'-c|-m|FILE'),
3195 optionalrepo=True,
3200 optionalrepo=True,
3196 )
3201 )
3197 def debugrevlog(ui, repo, file_=None, **opts):
3202 def debugrevlog(ui, repo, file_=None, **opts):
3198 """show data and statistics about a revlog"""
3203 """show data and statistics about a revlog"""
3199 opts = pycompat.byteskwargs(opts)
3204 opts = pycompat.byteskwargs(opts)
3200 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3205 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3201
3206
3202 if opts.get(b"dump"):
3207 if opts.get(b"dump"):
3203 numrevs = len(r)
3208 numrevs = len(r)
3204 ui.write(
3209 ui.write(
3205 (
3210 (
3206 b"# rev p1rev p2rev start end deltastart base p1 p2"
3211 b"# rev p1rev p2rev start end deltastart base p1 p2"
3207 b" rawsize totalsize compression heads chainlen\n"
3212 b" rawsize totalsize compression heads chainlen\n"
3208 )
3213 )
3209 )
3214 )
3210 ts = 0
3215 ts = 0
3211 heads = set()
3216 heads = set()
3212
3217
3213 for rev in range(numrevs):
3218 for rev in range(numrevs):
3214 dbase = r.deltaparent(rev)
3219 dbase = r.deltaparent(rev)
3215 if dbase == -1:
3220 if dbase == -1:
3216 dbase = rev
3221 dbase = rev
3217 cbase = r.chainbase(rev)
3222 cbase = r.chainbase(rev)
3218 clen = r.chainlen(rev)
3223 clen = r.chainlen(rev)
3219 p1, p2 = r.parentrevs(rev)
3224 p1, p2 = r.parentrevs(rev)
3220 rs = r.rawsize(rev)
3225 rs = r.rawsize(rev)
3221 ts = ts + rs
3226 ts = ts + rs
3222 heads -= set(r.parentrevs(rev))
3227 heads -= set(r.parentrevs(rev))
3223 heads.add(rev)
3228 heads.add(rev)
3224 try:
3229 try:
3225 compression = ts / r.end(rev)
3230 compression = ts / r.end(rev)
3226 except ZeroDivisionError:
3231 except ZeroDivisionError:
3227 compression = 0
3232 compression = 0
3228 ui.write(
3233 ui.write(
3229 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3234 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3230 b"%11d %5d %8d\n"
3235 b"%11d %5d %8d\n"
3231 % (
3236 % (
3232 rev,
3237 rev,
3233 p1,
3238 p1,
3234 p2,
3239 p2,
3235 r.start(rev),
3240 r.start(rev),
3236 r.end(rev),
3241 r.end(rev),
3237 r.start(dbase),
3242 r.start(dbase),
3238 r.start(cbase),
3243 r.start(cbase),
3239 r.start(p1),
3244 r.start(p1),
3240 r.start(p2),
3245 r.start(p2),
3241 rs,
3246 rs,
3242 ts,
3247 ts,
3243 compression,
3248 compression,
3244 len(heads),
3249 len(heads),
3245 clen,
3250 clen,
3246 )
3251 )
3247 )
3252 )
3248 return 0
3253 return 0
3249
3254
3250 format = r._format_version
3255 format = r._format_version
3251 v = r._format_flags
3256 v = r._format_flags
3252 flags = []
3257 flags = []
3253 gdelta = False
3258 gdelta = False
3254 if v & revlog.FLAG_INLINE_DATA:
3259 if v & revlog.FLAG_INLINE_DATA:
3255 flags.append(b'inline')
3260 flags.append(b'inline')
3256 if v & revlog.FLAG_GENERALDELTA:
3261 if v & revlog.FLAG_GENERALDELTA:
3257 gdelta = True
3262 gdelta = True
3258 flags.append(b'generaldelta')
3263 flags.append(b'generaldelta')
3259 if not flags:
3264 if not flags:
3260 flags = [b'(none)']
3265 flags = [b'(none)']
3261
3266
3262 ### tracks merge vs single parent
3267 ### tracks merge vs single parent
3263 nummerges = 0
3268 nummerges = 0
3264
3269
3265 ### tracks ways the "delta" are build
3270 ### tracks ways the "delta" are build
3266 # nodelta
3271 # nodelta
3267 numempty = 0
3272 numempty = 0
3268 numemptytext = 0
3273 numemptytext = 0
3269 numemptydelta = 0
3274 numemptydelta = 0
3270 # full file content
3275 # full file content
3271 numfull = 0
3276 numfull = 0
3272 # intermediate snapshot against a prior snapshot
3277 # intermediate snapshot against a prior snapshot
3273 numsemi = 0
3278 numsemi = 0
3274 # snapshot count per depth
3279 # snapshot count per depth
3275 numsnapdepth = collections.defaultdict(lambda: 0)
3280 numsnapdepth = collections.defaultdict(lambda: 0)
3276 # delta against previous revision
3281 # delta against previous revision
3277 numprev = 0
3282 numprev = 0
3278 # delta against first or second parent (not prev)
3283 # delta against first or second parent (not prev)
3279 nump1 = 0
3284 nump1 = 0
3280 nump2 = 0
3285 nump2 = 0
3281 # delta against neither prev nor parents
3286 # delta against neither prev nor parents
3282 numother = 0
3287 numother = 0
3283 # delta against prev that are also first or second parent
3288 # delta against prev that are also first or second parent
3284 # (details of `numprev`)
3289 # (details of `numprev`)
3285 nump1prev = 0
3290 nump1prev = 0
3286 nump2prev = 0
3291 nump2prev = 0
3287
3292
3288 # data about delta chain of each revs
3293 # data about delta chain of each revs
3289 chainlengths = []
3294 chainlengths = []
3290 chainbases = []
3295 chainbases = []
3291 chainspans = []
3296 chainspans = []
3292
3297
3293 # data about each revision
3298 # data about each revision
3294 datasize = [None, 0, 0]
3299 datasize = [None, 0, 0]
3295 fullsize = [None, 0, 0]
3300 fullsize = [None, 0, 0]
3296 semisize = [None, 0, 0]
3301 semisize = [None, 0, 0]
3297 # snapshot count per depth
3302 # snapshot count per depth
3298 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3303 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3299 deltasize = [None, 0, 0]
3304 deltasize = [None, 0, 0]
3300 chunktypecounts = {}
3305 chunktypecounts = {}
3301 chunktypesizes = {}
3306 chunktypesizes = {}
3302
3307
3303 def addsize(size, l):
3308 def addsize(size, l):
3304 if l[0] is None or size < l[0]:
3309 if l[0] is None or size < l[0]:
3305 l[0] = size
3310 l[0] = size
3306 if size > l[1]:
3311 if size > l[1]:
3307 l[1] = size
3312 l[1] = size
3308 l[2] += size
3313 l[2] += size
3309
3314
3310 numrevs = len(r)
3315 numrevs = len(r)
3311 for rev in range(numrevs):
3316 for rev in range(numrevs):
3312 p1, p2 = r.parentrevs(rev)
3317 p1, p2 = r.parentrevs(rev)
3313 delta = r.deltaparent(rev)
3318 delta = r.deltaparent(rev)
3314 if format > 0:
3319 if format > 0:
3315 addsize(r.rawsize(rev), datasize)
3320 addsize(r.rawsize(rev), datasize)
3316 if p2 != nullrev:
3321 if p2 != nullrev:
3317 nummerges += 1
3322 nummerges += 1
3318 size = r.length(rev)
3323 size = r.length(rev)
3319 if delta == nullrev:
3324 if delta == nullrev:
3320 chainlengths.append(0)
3325 chainlengths.append(0)
3321 chainbases.append(r.start(rev))
3326 chainbases.append(r.start(rev))
3322 chainspans.append(size)
3327 chainspans.append(size)
3323 if size == 0:
3328 if size == 0:
3324 numempty += 1
3329 numempty += 1
3325 numemptytext += 1
3330 numemptytext += 1
3326 else:
3331 else:
3327 numfull += 1
3332 numfull += 1
3328 numsnapdepth[0] += 1
3333 numsnapdepth[0] += 1
3329 addsize(size, fullsize)
3334 addsize(size, fullsize)
3330 addsize(size, snapsizedepth[0])
3335 addsize(size, snapsizedepth[0])
3331 else:
3336 else:
3332 chainlengths.append(chainlengths[delta] + 1)
3337 chainlengths.append(chainlengths[delta] + 1)
3333 baseaddr = chainbases[delta]
3338 baseaddr = chainbases[delta]
3334 revaddr = r.start(rev)
3339 revaddr = r.start(rev)
3335 chainbases.append(baseaddr)
3340 chainbases.append(baseaddr)
3336 chainspans.append((revaddr - baseaddr) + size)
3341 chainspans.append((revaddr - baseaddr) + size)
3337 if size == 0:
3342 if size == 0:
3338 numempty += 1
3343 numempty += 1
3339 numemptydelta += 1
3344 numemptydelta += 1
3340 elif r.issnapshot(rev):
3345 elif r.issnapshot(rev):
3341 addsize(size, semisize)
3346 addsize(size, semisize)
3342 numsemi += 1
3347 numsemi += 1
3343 depth = r.snapshotdepth(rev)
3348 depth = r.snapshotdepth(rev)
3344 numsnapdepth[depth] += 1
3349 numsnapdepth[depth] += 1
3345 addsize(size, snapsizedepth[depth])
3350 addsize(size, snapsizedepth[depth])
3346 else:
3351 else:
3347 addsize(size, deltasize)
3352 addsize(size, deltasize)
3348 if delta == rev - 1:
3353 if delta == rev - 1:
3349 numprev += 1
3354 numprev += 1
3350 if delta == p1:
3355 if delta == p1:
3351 nump1prev += 1
3356 nump1prev += 1
3352 elif delta == p2:
3357 elif delta == p2:
3353 nump2prev += 1
3358 nump2prev += 1
3354 elif delta == p1:
3359 elif delta == p1:
3355 nump1 += 1
3360 nump1 += 1
3356 elif delta == p2:
3361 elif delta == p2:
3357 nump2 += 1
3362 nump2 += 1
3358 elif delta != nullrev:
3363 elif delta != nullrev:
3359 numother += 1
3364 numother += 1
3360
3365
3361 # Obtain data on the raw chunks in the revlog.
3366 # Obtain data on the raw chunks in the revlog.
3362 if util.safehasattr(r, b'_getsegmentforrevs'):
3367 if util.safehasattr(r, b'_getsegmentforrevs'):
3363 segment = r._getsegmentforrevs(rev, rev)[1]
3368 segment = r._getsegmentforrevs(rev, rev)[1]
3364 else:
3369 else:
3365 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3370 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3366 if segment:
3371 if segment:
3367 chunktype = bytes(segment[0:1])
3372 chunktype = bytes(segment[0:1])
3368 else:
3373 else:
3369 chunktype = b'empty'
3374 chunktype = b'empty'
3370
3375
3371 if chunktype not in chunktypecounts:
3376 if chunktype not in chunktypecounts:
3372 chunktypecounts[chunktype] = 0
3377 chunktypecounts[chunktype] = 0
3373 chunktypesizes[chunktype] = 0
3378 chunktypesizes[chunktype] = 0
3374
3379
3375 chunktypecounts[chunktype] += 1
3380 chunktypecounts[chunktype] += 1
3376 chunktypesizes[chunktype] += size
3381 chunktypesizes[chunktype] += size
3377
3382
3378 # Adjust size min value for empty cases
3383 # Adjust size min value for empty cases
3379 for size in (datasize, fullsize, semisize, deltasize):
3384 for size in (datasize, fullsize, semisize, deltasize):
3380 if size[0] is None:
3385 if size[0] is None:
3381 size[0] = 0
3386 size[0] = 0
3382
3387
3383 numdeltas = numrevs - numfull - numempty - numsemi
3388 numdeltas = numrevs - numfull - numempty - numsemi
3384 numoprev = numprev - nump1prev - nump2prev
3389 numoprev = numprev - nump1prev - nump2prev
3385 totalrawsize = datasize[2]
3390 totalrawsize = datasize[2]
3386 datasize[2] /= numrevs
3391 datasize[2] /= numrevs
3387 fulltotal = fullsize[2]
3392 fulltotal = fullsize[2]
3388 if numfull == 0:
3393 if numfull == 0:
3389 fullsize[2] = 0
3394 fullsize[2] = 0
3390 else:
3395 else:
3391 fullsize[2] /= numfull
3396 fullsize[2] /= numfull
3392 semitotal = semisize[2]
3397 semitotal = semisize[2]
3393 snaptotal = {}
3398 snaptotal = {}
3394 if numsemi > 0:
3399 if numsemi > 0:
3395 semisize[2] /= numsemi
3400 semisize[2] /= numsemi
3396 for depth in snapsizedepth:
3401 for depth in snapsizedepth:
3397 snaptotal[depth] = snapsizedepth[depth][2]
3402 snaptotal[depth] = snapsizedepth[depth][2]
3398 snapsizedepth[depth][2] /= numsnapdepth[depth]
3403 snapsizedepth[depth][2] /= numsnapdepth[depth]
3399
3404
3400 deltatotal = deltasize[2]
3405 deltatotal = deltasize[2]
3401 if numdeltas > 0:
3406 if numdeltas > 0:
3402 deltasize[2] /= numdeltas
3407 deltasize[2] /= numdeltas
3403 totalsize = fulltotal + semitotal + deltatotal
3408 totalsize = fulltotal + semitotal + deltatotal
3404 avgchainlen = sum(chainlengths) / numrevs
3409 avgchainlen = sum(chainlengths) / numrevs
3405 maxchainlen = max(chainlengths)
3410 maxchainlen = max(chainlengths)
3406 maxchainspan = max(chainspans)
3411 maxchainspan = max(chainspans)
3407 compratio = 1
3412 compratio = 1
3408 if totalsize:
3413 if totalsize:
3409 compratio = totalrawsize / totalsize
3414 compratio = totalrawsize / totalsize
3410
3415
3411 basedfmtstr = b'%%%dd\n'
3416 basedfmtstr = b'%%%dd\n'
3412 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3417 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3413
3418
3414 def dfmtstr(max):
3419 def dfmtstr(max):
3415 return basedfmtstr % len(str(max))
3420 return basedfmtstr % len(str(max))
3416
3421
3417 def pcfmtstr(max, padding=0):
3422 def pcfmtstr(max, padding=0):
3418 return basepcfmtstr % (len(str(max)), b' ' * padding)
3423 return basepcfmtstr % (len(str(max)), b' ' * padding)
3419
3424
3420 def pcfmt(value, total):
3425 def pcfmt(value, total):
3421 if total:
3426 if total:
3422 return (value, 100 * float(value) / total)
3427 return (value, 100 * float(value) / total)
3423 else:
3428 else:
3424 return value, 100.0
3429 return value, 100.0
3425
3430
3426 ui.writenoi18n(b'format : %d\n' % format)
3431 ui.writenoi18n(b'format : %d\n' % format)
3427 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3432 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3428
3433
3429 ui.write(b'\n')
3434 ui.write(b'\n')
3430 fmt = pcfmtstr(totalsize)
3435 fmt = pcfmtstr(totalsize)
3431 fmt2 = dfmtstr(totalsize)
3436 fmt2 = dfmtstr(totalsize)
3432 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3437 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3433 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3438 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3434 ui.writenoi18n(
3439 ui.writenoi18n(
3435 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3440 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3436 )
3441 )
3437 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3442 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3438 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3443 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3439 ui.writenoi18n(
3444 ui.writenoi18n(
3440 b' text : '
3445 b' text : '
3441 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3446 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3442 )
3447 )
3443 ui.writenoi18n(
3448 ui.writenoi18n(
3444 b' delta : '
3449 b' delta : '
3445 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3450 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3446 )
3451 )
3447 ui.writenoi18n(
3452 ui.writenoi18n(
3448 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3453 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3449 )
3454 )
3450 for depth in sorted(numsnapdepth):
3455 for depth in sorted(numsnapdepth):
3451 ui.write(
3456 ui.write(
3452 (b' lvl-%-3d : ' % depth)
3457 (b' lvl-%-3d : ' % depth)
3453 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3458 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3454 )
3459 )
3455 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3460 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3456 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3461 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3457 ui.writenoi18n(
3462 ui.writenoi18n(
3458 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3463 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3459 )
3464 )
3460 for depth in sorted(numsnapdepth):
3465 for depth in sorted(numsnapdepth):
3461 ui.write(
3466 ui.write(
3462 (b' lvl-%-3d : ' % depth)
3467 (b' lvl-%-3d : ' % depth)
3463 + fmt % pcfmt(snaptotal[depth], totalsize)
3468 + fmt % pcfmt(snaptotal[depth], totalsize)
3464 )
3469 )
3465 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3470 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3466
3471
3467 def fmtchunktype(chunktype):
3472 def fmtchunktype(chunktype):
3468 if chunktype == b'empty':
3473 if chunktype == b'empty':
3469 return b' %s : ' % chunktype
3474 return b' %s : ' % chunktype
3470 elif chunktype in pycompat.bytestr(string.ascii_letters):
3475 elif chunktype in pycompat.bytestr(string.ascii_letters):
3471 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3476 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3472 else:
3477 else:
3473 return b' 0x%s : ' % hex(chunktype)
3478 return b' 0x%s : ' % hex(chunktype)
3474
3479
3475 ui.write(b'\n')
3480 ui.write(b'\n')
3476 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3481 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3477 for chunktype in sorted(chunktypecounts):
3482 for chunktype in sorted(chunktypecounts):
3478 ui.write(fmtchunktype(chunktype))
3483 ui.write(fmtchunktype(chunktype))
3479 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3484 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3480 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3485 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3481 for chunktype in sorted(chunktypecounts):
3486 for chunktype in sorted(chunktypecounts):
3482 ui.write(fmtchunktype(chunktype))
3487 ui.write(fmtchunktype(chunktype))
3483 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3488 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3484
3489
3485 ui.write(b'\n')
3490 ui.write(b'\n')
3486 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3491 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3487 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3492 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3488 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3493 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3489 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3494 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3490 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3495 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3491
3496
3492 if format > 0:
3497 if format > 0:
3493 ui.write(b'\n')
3498 ui.write(b'\n')
3494 ui.writenoi18n(
3499 ui.writenoi18n(
3495 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3500 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3496 % tuple(datasize)
3501 % tuple(datasize)
3497 )
3502 )
3498 ui.writenoi18n(
3503 ui.writenoi18n(
3499 b'full revision size (min/max/avg) : %d / %d / %d\n'
3504 b'full revision size (min/max/avg) : %d / %d / %d\n'
3500 % tuple(fullsize)
3505 % tuple(fullsize)
3501 )
3506 )
3502 ui.writenoi18n(
3507 ui.writenoi18n(
3503 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3508 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3504 % tuple(semisize)
3509 % tuple(semisize)
3505 )
3510 )
3506 for depth in sorted(snapsizedepth):
3511 for depth in sorted(snapsizedepth):
3507 if depth == 0:
3512 if depth == 0:
3508 continue
3513 continue
3509 ui.writenoi18n(
3514 ui.writenoi18n(
3510 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3515 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3511 % ((depth,) + tuple(snapsizedepth[depth]))
3516 % ((depth,) + tuple(snapsizedepth[depth]))
3512 )
3517 )
3513 ui.writenoi18n(
3518 ui.writenoi18n(
3514 b'delta size (min/max/avg) : %d / %d / %d\n'
3519 b'delta size (min/max/avg) : %d / %d / %d\n'
3515 % tuple(deltasize)
3520 % tuple(deltasize)
3516 )
3521 )
3517
3522
3518 if numdeltas > 0:
3523 if numdeltas > 0:
3519 ui.write(b'\n')
3524 ui.write(b'\n')
3520 fmt = pcfmtstr(numdeltas)
3525 fmt = pcfmtstr(numdeltas)
3521 fmt2 = pcfmtstr(numdeltas, 4)
3526 fmt2 = pcfmtstr(numdeltas, 4)
3522 ui.writenoi18n(
3527 ui.writenoi18n(
3523 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3528 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3524 )
3529 )
3525 if numprev > 0:
3530 if numprev > 0:
3526 ui.writenoi18n(
3531 ui.writenoi18n(
3527 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3532 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3528 )
3533 )
3529 ui.writenoi18n(
3534 ui.writenoi18n(
3530 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3535 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3531 )
3536 )
3532 ui.writenoi18n(
3537 ui.writenoi18n(
3533 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3538 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3534 )
3539 )
3535 if gdelta:
3540 if gdelta:
3536 ui.writenoi18n(
3541 ui.writenoi18n(
3537 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3542 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3538 )
3543 )
3539 ui.writenoi18n(
3544 ui.writenoi18n(
3540 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3545 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3541 )
3546 )
3542 ui.writenoi18n(
3547 ui.writenoi18n(
3543 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3548 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3544 )
3549 )
3545
3550
3546
3551
3547 @command(
3552 @command(
3548 b'debugrevlogindex',
3553 b'debugrevlogindex',
3549 cmdutil.debugrevlogopts
3554 cmdutil.debugrevlogopts
3550 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3555 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3551 _(b'[-f FORMAT] -c|-m|FILE'),
3556 _(b'[-f FORMAT] -c|-m|FILE'),
3552 optionalrepo=True,
3557 optionalrepo=True,
3553 )
3558 )
3554 def debugrevlogindex(ui, repo, file_=None, **opts):
3559 def debugrevlogindex(ui, repo, file_=None, **opts):
3555 """dump the contents of a revlog index"""
3560 """dump the contents of a revlog index"""
3556 opts = pycompat.byteskwargs(opts)
3561 opts = pycompat.byteskwargs(opts)
3557 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3562 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3558 format = opts.get(b'format', 0)
3563 format = opts.get(b'format', 0)
3559 if format not in (0, 1):
3564 if format not in (0, 1):
3560 raise error.Abort(_(b"unknown format %d") % format)
3565 raise error.Abort(_(b"unknown format %d") % format)
3561
3566
3562 if ui.debugflag:
3567 if ui.debugflag:
3563 shortfn = hex
3568 shortfn = hex
3564 else:
3569 else:
3565 shortfn = short
3570 shortfn = short
3566
3571
3567 # There might not be anything in r, so have a sane default
3572 # There might not be anything in r, so have a sane default
3568 idlen = 12
3573 idlen = 12
3569 for i in r:
3574 for i in r:
3570 idlen = len(shortfn(r.node(i)))
3575 idlen = len(shortfn(r.node(i)))
3571 break
3576 break
3572
3577
3573 if format == 0:
3578 if format == 0:
3574 if ui.verbose:
3579 if ui.verbose:
3575 ui.writenoi18n(
3580 ui.writenoi18n(
3576 b" rev offset length linkrev %s %s p2\n"
3581 b" rev offset length linkrev %s %s p2\n"
3577 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3582 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3578 )
3583 )
3579 else:
3584 else:
3580 ui.writenoi18n(
3585 ui.writenoi18n(
3581 b" rev linkrev %s %s p2\n"
3586 b" rev linkrev %s %s p2\n"
3582 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3587 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3583 )
3588 )
3584 elif format == 1:
3589 elif format == 1:
3585 if ui.verbose:
3590 if ui.verbose:
3586 ui.writenoi18n(
3591 ui.writenoi18n(
3587 (
3592 (
3588 b" rev flag offset length size link p1"
3593 b" rev flag offset length size link p1"
3589 b" p2 %s\n"
3594 b" p2 %s\n"
3590 )
3595 )
3591 % b"nodeid".rjust(idlen)
3596 % b"nodeid".rjust(idlen)
3592 )
3597 )
3593 else:
3598 else:
3594 ui.writenoi18n(
3599 ui.writenoi18n(
3595 b" rev flag size link p1 p2 %s\n"
3600 b" rev flag size link p1 p2 %s\n"
3596 % b"nodeid".rjust(idlen)
3601 % b"nodeid".rjust(idlen)
3597 )
3602 )
3598
3603
3599 for i in r:
3604 for i in r:
3600 node = r.node(i)
3605 node = r.node(i)
3601 if format == 0:
3606 if format == 0:
3602 try:
3607 try:
3603 pp = r.parents(node)
3608 pp = r.parents(node)
3604 except Exception:
3609 except Exception:
3605 pp = [repo.nullid, repo.nullid]
3610 pp = [repo.nullid, repo.nullid]
3606 if ui.verbose:
3611 if ui.verbose:
3607 ui.write(
3612 ui.write(
3608 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3613 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3609 % (
3614 % (
3610 i,
3615 i,
3611 r.start(i),
3616 r.start(i),
3612 r.length(i),
3617 r.length(i),
3613 r.linkrev(i),
3618 r.linkrev(i),
3614 shortfn(node),
3619 shortfn(node),
3615 shortfn(pp[0]),
3620 shortfn(pp[0]),
3616 shortfn(pp[1]),
3621 shortfn(pp[1]),
3617 )
3622 )
3618 )
3623 )
3619 else:
3624 else:
3620 ui.write(
3625 ui.write(
3621 b"% 6d % 7d %s %s %s\n"
3626 b"% 6d % 7d %s %s %s\n"
3622 % (
3627 % (
3623 i,
3628 i,
3624 r.linkrev(i),
3629 r.linkrev(i),
3625 shortfn(node),
3630 shortfn(node),
3626 shortfn(pp[0]),
3631 shortfn(pp[0]),
3627 shortfn(pp[1]),
3632 shortfn(pp[1]),
3628 )
3633 )
3629 )
3634 )
3630 elif format == 1:
3635 elif format == 1:
3631 pr = r.parentrevs(i)
3636 pr = r.parentrevs(i)
3632 if ui.verbose:
3637 if ui.verbose:
3633 ui.write(
3638 ui.write(
3634 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3639 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3635 % (
3640 % (
3636 i,
3641 i,
3637 r.flags(i),
3642 r.flags(i),
3638 r.start(i),
3643 r.start(i),
3639 r.length(i),
3644 r.length(i),
3640 r.rawsize(i),
3645 r.rawsize(i),
3641 r.linkrev(i),
3646 r.linkrev(i),
3642 pr[0],
3647 pr[0],
3643 pr[1],
3648 pr[1],
3644 shortfn(node),
3649 shortfn(node),
3645 )
3650 )
3646 )
3651 )
3647 else:
3652 else:
3648 ui.write(
3653 ui.write(
3649 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3654 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3650 % (
3655 % (
3651 i,
3656 i,
3652 r.flags(i),
3657 r.flags(i),
3653 r.rawsize(i),
3658 r.rawsize(i),
3654 r.linkrev(i),
3659 r.linkrev(i),
3655 pr[0],
3660 pr[0],
3656 pr[1],
3661 pr[1],
3657 shortfn(node),
3662 shortfn(node),
3658 )
3663 )
3659 )
3664 )
3660
3665
3661
3666
3662 @command(
3667 @command(
3663 b'debugrevspec',
3668 b'debugrevspec',
3664 [
3669 [
3665 (
3670 (
3666 b'',
3671 b'',
3667 b'optimize',
3672 b'optimize',
3668 None,
3673 None,
3669 _(b'print parsed tree after optimizing (DEPRECATED)'),
3674 _(b'print parsed tree after optimizing (DEPRECATED)'),
3670 ),
3675 ),
3671 (
3676 (
3672 b'',
3677 b'',
3673 b'show-revs',
3678 b'show-revs',
3674 True,
3679 True,
3675 _(b'print list of result revisions (default)'),
3680 _(b'print list of result revisions (default)'),
3676 ),
3681 ),
3677 (
3682 (
3678 b's',
3683 b's',
3679 b'show-set',
3684 b'show-set',
3680 None,
3685 None,
3681 _(b'print internal representation of result set'),
3686 _(b'print internal representation of result set'),
3682 ),
3687 ),
3683 (
3688 (
3684 b'p',
3689 b'p',
3685 b'show-stage',
3690 b'show-stage',
3686 [],
3691 [],
3687 _(b'print parsed tree at the given stage'),
3692 _(b'print parsed tree at the given stage'),
3688 _(b'NAME'),
3693 _(b'NAME'),
3689 ),
3694 ),
3690 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3695 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3691 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3696 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3692 ],
3697 ],
3693 b'REVSPEC',
3698 b'REVSPEC',
3694 )
3699 )
3695 def debugrevspec(ui, repo, expr, **opts):
3700 def debugrevspec(ui, repo, expr, **opts):
3696 """parse and apply a revision specification
3701 """parse and apply a revision specification
3697
3702
3698 Use -p/--show-stage option to print the parsed tree at the given stages.
3703 Use -p/--show-stage option to print the parsed tree at the given stages.
3699 Use -p all to print tree at every stage.
3704 Use -p all to print tree at every stage.
3700
3705
3701 Use --no-show-revs option with -s or -p to print only the set
3706 Use --no-show-revs option with -s or -p to print only the set
3702 representation or the parsed tree respectively.
3707 representation or the parsed tree respectively.
3703
3708
3704 Use --verify-optimized to compare the optimized result with the unoptimized
3709 Use --verify-optimized to compare the optimized result with the unoptimized
3705 one. Returns 1 if the optimized result differs.
3710 one. Returns 1 if the optimized result differs.
3706 """
3711 """
3707 opts = pycompat.byteskwargs(opts)
3712 opts = pycompat.byteskwargs(opts)
3708 aliases = ui.configitems(b'revsetalias')
3713 aliases = ui.configitems(b'revsetalias')
3709 stages = [
3714 stages = [
3710 (b'parsed', lambda tree: tree),
3715 (b'parsed', lambda tree: tree),
3711 (
3716 (
3712 b'expanded',
3717 b'expanded',
3713 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3718 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3714 ),
3719 ),
3715 (b'concatenated', revsetlang.foldconcat),
3720 (b'concatenated', revsetlang.foldconcat),
3716 (b'analyzed', revsetlang.analyze),
3721 (b'analyzed', revsetlang.analyze),
3717 (b'optimized', revsetlang.optimize),
3722 (b'optimized', revsetlang.optimize),
3718 ]
3723 ]
3719 if opts[b'no_optimized']:
3724 if opts[b'no_optimized']:
3720 stages = stages[:-1]
3725 stages = stages[:-1]
3721 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3726 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3722 raise error.Abort(
3727 raise error.Abort(
3723 _(b'cannot use --verify-optimized with --no-optimized')
3728 _(b'cannot use --verify-optimized with --no-optimized')
3724 )
3729 )
3725 stagenames = {n for n, f in stages}
3730 stagenames = {n for n, f in stages}
3726
3731
3727 showalways = set()
3732 showalways = set()
3728 showchanged = set()
3733 showchanged = set()
3729 if ui.verbose and not opts[b'show_stage']:
3734 if ui.verbose and not opts[b'show_stage']:
3730 # show parsed tree by --verbose (deprecated)
3735 # show parsed tree by --verbose (deprecated)
3731 showalways.add(b'parsed')
3736 showalways.add(b'parsed')
3732 showchanged.update([b'expanded', b'concatenated'])
3737 showchanged.update([b'expanded', b'concatenated'])
3733 if opts[b'optimize']:
3738 if opts[b'optimize']:
3734 showalways.add(b'optimized')
3739 showalways.add(b'optimized')
3735 if opts[b'show_stage'] and opts[b'optimize']:
3740 if opts[b'show_stage'] and opts[b'optimize']:
3736 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3741 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3737 if opts[b'show_stage'] == [b'all']:
3742 if opts[b'show_stage'] == [b'all']:
3738 showalways.update(stagenames)
3743 showalways.update(stagenames)
3739 else:
3744 else:
3740 for n in opts[b'show_stage']:
3745 for n in opts[b'show_stage']:
3741 if n not in stagenames:
3746 if n not in stagenames:
3742 raise error.Abort(_(b'invalid stage name: %s') % n)
3747 raise error.Abort(_(b'invalid stage name: %s') % n)
3743 showalways.update(opts[b'show_stage'])
3748 showalways.update(opts[b'show_stage'])
3744
3749
3745 treebystage = {}
3750 treebystage = {}
3746 printedtree = None
3751 printedtree = None
3747 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3752 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3748 for n, f in stages:
3753 for n, f in stages:
3749 treebystage[n] = tree = f(tree)
3754 treebystage[n] = tree = f(tree)
3750 if n in showalways or (n in showchanged and tree != printedtree):
3755 if n in showalways or (n in showchanged and tree != printedtree):
3751 if opts[b'show_stage'] or n != b'parsed':
3756 if opts[b'show_stage'] or n != b'parsed':
3752 ui.write(b"* %s:\n" % n)
3757 ui.write(b"* %s:\n" % n)
3753 ui.write(revsetlang.prettyformat(tree), b"\n")
3758 ui.write(revsetlang.prettyformat(tree), b"\n")
3754 printedtree = tree
3759 printedtree = tree
3755
3760
3756 if opts[b'verify_optimized']:
3761 if opts[b'verify_optimized']:
3757 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3762 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3758 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3763 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3759 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3764 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3760 ui.writenoi18n(
3765 ui.writenoi18n(
3761 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3766 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3762 )
3767 )
3763 ui.writenoi18n(
3768 ui.writenoi18n(
3764 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3769 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3765 )
3770 )
3766 arevs = list(arevs)
3771 arevs = list(arevs)
3767 brevs = list(brevs)
3772 brevs = list(brevs)
3768 if arevs == brevs:
3773 if arevs == brevs:
3769 return 0
3774 return 0
3770 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3775 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3771 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3776 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3772 sm = difflib.SequenceMatcher(None, arevs, brevs)
3777 sm = difflib.SequenceMatcher(None, arevs, brevs)
3773 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3778 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3774 if tag in ('delete', 'replace'):
3779 if tag in ('delete', 'replace'):
3775 for c in arevs[alo:ahi]:
3780 for c in arevs[alo:ahi]:
3776 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3781 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3777 if tag in ('insert', 'replace'):
3782 if tag in ('insert', 'replace'):
3778 for c in brevs[blo:bhi]:
3783 for c in brevs[blo:bhi]:
3779 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3784 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3780 if tag == 'equal':
3785 if tag == 'equal':
3781 for c in arevs[alo:ahi]:
3786 for c in arevs[alo:ahi]:
3782 ui.write(b' %d\n' % c)
3787 ui.write(b' %d\n' % c)
3783 return 1
3788 return 1
3784
3789
3785 func = revset.makematcher(tree)
3790 func = revset.makematcher(tree)
3786 revs = func(repo)
3791 revs = func(repo)
3787 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3792 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3788 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3793 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3789 if not opts[b'show_revs']:
3794 if not opts[b'show_revs']:
3790 return
3795 return
3791 for c in revs:
3796 for c in revs:
3792 ui.write(b"%d\n" % c)
3797 ui.write(b"%d\n" % c)
3793
3798
3794
3799
3795 @command(
3800 @command(
3796 b'debugserve',
3801 b'debugserve',
3797 [
3802 [
3798 (
3803 (
3799 b'',
3804 b'',
3800 b'sshstdio',
3805 b'sshstdio',
3801 False,
3806 False,
3802 _(b'run an SSH server bound to process handles'),
3807 _(b'run an SSH server bound to process handles'),
3803 ),
3808 ),
3804 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3809 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3805 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3810 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3806 ],
3811 ],
3807 b'',
3812 b'',
3808 )
3813 )
3809 def debugserve(ui, repo, **opts):
3814 def debugserve(ui, repo, **opts):
3810 """run a server with advanced settings
3815 """run a server with advanced settings
3811
3816
3812 This command is similar to :hg:`serve`. It exists partially as a
3817 This command is similar to :hg:`serve`. It exists partially as a
3813 workaround to the fact that ``hg serve --stdio`` must have specific
3818 workaround to the fact that ``hg serve --stdio`` must have specific
3814 arguments for security reasons.
3819 arguments for security reasons.
3815 """
3820 """
3816 opts = pycompat.byteskwargs(opts)
3821 opts = pycompat.byteskwargs(opts)
3817
3822
3818 if not opts[b'sshstdio']:
3823 if not opts[b'sshstdio']:
3819 raise error.Abort(_(b'only --sshstdio is currently supported'))
3824 raise error.Abort(_(b'only --sshstdio is currently supported'))
3820
3825
3821 logfh = None
3826 logfh = None
3822
3827
3823 if opts[b'logiofd'] and opts[b'logiofile']:
3828 if opts[b'logiofd'] and opts[b'logiofile']:
3824 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3829 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3825
3830
3826 if opts[b'logiofd']:
3831 if opts[b'logiofd']:
3827 # Ideally we would be line buffered. But line buffering in binary
3832 # Ideally we would be line buffered. But line buffering in binary
3828 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3833 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3829 # buffering could have performance impacts. But since this isn't
3834 # buffering could have performance impacts. But since this isn't
3830 # performance critical code, it should be fine.
3835 # performance critical code, it should be fine.
3831 try:
3836 try:
3832 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3837 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3833 except OSError as e:
3838 except OSError as e:
3834 if e.errno != errno.ESPIPE:
3839 if e.errno != errno.ESPIPE:
3835 raise
3840 raise
3836 # can't seek a pipe, so `ab` mode fails on py3
3841 # can't seek a pipe, so `ab` mode fails on py3
3837 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3842 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3838 elif opts[b'logiofile']:
3843 elif opts[b'logiofile']:
3839 logfh = open(opts[b'logiofile'], b'ab', 0)
3844 logfh = open(opts[b'logiofile'], b'ab', 0)
3840
3845
3841 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3846 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3842 s.serve_forever()
3847 s.serve_forever()
3843
3848
3844
3849
3845 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3850 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3846 def debugsetparents(ui, repo, rev1, rev2=None):
3851 def debugsetparents(ui, repo, rev1, rev2=None):
3847 """manually set the parents of the current working directory (DANGEROUS)
3852 """manually set the parents of the current working directory (DANGEROUS)
3848
3853
3849 This command is not what you are looking for and should not be used. Using
3854 This command is not what you are looking for and should not be used. Using
3850 this command will most certainly results in slight corruption of the file
3855 this command will most certainly results in slight corruption of the file
3851 level histories withing your repository. DO NOT USE THIS COMMAND.
3856 level histories withing your repository. DO NOT USE THIS COMMAND.
3852
3857
3853 The command update the p1 and p2 field in the dirstate, and not touching
3858 The command update the p1 and p2 field in the dirstate, and not touching
3854 anything else. This useful for writing repository conversion tools, but
3859 anything else. This useful for writing repository conversion tools, but
3855 should be used with extreme care. For example, neither the working
3860 should be used with extreme care. For example, neither the working
3856 directory nor the dirstate is updated, so file status may be incorrect
3861 directory nor the dirstate is updated, so file status may be incorrect
3857 after running this command. Only used if you are one of the few people that
3862 after running this command. Only used if you are one of the few people that
3858 deeply unstand both conversion tools and file level histories. If you are
3863 deeply unstand both conversion tools and file level histories. If you are
3859 reading this help, you are not one of this people (most of them sailed west
3864 reading this help, you are not one of this people (most of them sailed west
3860 from Mithlond anyway.
3865 from Mithlond anyway.
3861
3866
3862 So one last time DO NOT USE THIS COMMAND.
3867 So one last time DO NOT USE THIS COMMAND.
3863
3868
3864 Returns 0 on success.
3869 Returns 0 on success.
3865 """
3870 """
3866
3871
3867 node1 = scmutil.revsingle(repo, rev1).node()
3872 node1 = scmutil.revsingle(repo, rev1).node()
3868 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3873 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3869
3874
3870 with repo.wlock():
3875 with repo.wlock():
3871 repo.setparents(node1, node2)
3876 repo.setparents(node1, node2)
3872
3877
3873
3878
3874 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3879 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3875 def debugsidedata(ui, repo, file_, rev=None, **opts):
3880 def debugsidedata(ui, repo, file_, rev=None, **opts):
3876 """dump the side data for a cl/manifest/file revision
3881 """dump the side data for a cl/manifest/file revision
3877
3882
3878 Use --verbose to dump the sidedata content."""
3883 Use --verbose to dump the sidedata content."""
3879 opts = pycompat.byteskwargs(opts)
3884 opts = pycompat.byteskwargs(opts)
3880 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3885 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3881 if rev is not None:
3886 if rev is not None:
3882 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3887 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3883 file_, rev = None, file_
3888 file_, rev = None, file_
3884 elif rev is None:
3889 elif rev is None:
3885 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3890 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3886 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3891 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3887 r = getattr(r, '_revlog', r)
3892 r = getattr(r, '_revlog', r)
3888 try:
3893 try:
3889 sidedata = r.sidedata(r.lookup(rev))
3894 sidedata = r.sidedata(r.lookup(rev))
3890 except KeyError:
3895 except KeyError:
3891 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3896 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3892 if sidedata:
3897 if sidedata:
3893 sidedata = list(sidedata.items())
3898 sidedata = list(sidedata.items())
3894 sidedata.sort()
3899 sidedata.sort()
3895 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3900 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3896 for key, value in sidedata:
3901 for key, value in sidedata:
3897 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3902 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3898 if ui.verbose:
3903 if ui.verbose:
3899 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3904 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3900
3905
3901
3906
3902 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3907 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3903 def debugssl(ui, repo, source=None, **opts):
3908 def debugssl(ui, repo, source=None, **opts):
3904 """test a secure connection to a server
3909 """test a secure connection to a server
3905
3910
3906 This builds the certificate chain for the server on Windows, installing the
3911 This builds the certificate chain for the server on Windows, installing the
3907 missing intermediates and trusted root via Windows Update if necessary. It
3912 missing intermediates and trusted root via Windows Update if necessary. It
3908 does nothing on other platforms.
3913 does nothing on other platforms.
3909
3914
3910 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3915 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3911 that server is used. See :hg:`help urls` for more information.
3916 that server is used. See :hg:`help urls` for more information.
3912
3917
3913 If the update succeeds, retry the original operation. Otherwise, the cause
3918 If the update succeeds, retry the original operation. Otherwise, the cause
3914 of the SSL error is likely another issue.
3919 of the SSL error is likely another issue.
3915 """
3920 """
3916 if not pycompat.iswindows:
3921 if not pycompat.iswindows:
3917 raise error.Abort(
3922 raise error.Abort(
3918 _(b'certificate chain building is only possible on Windows')
3923 _(b'certificate chain building is only possible on Windows')
3919 )
3924 )
3920
3925
3921 if not source:
3926 if not source:
3922 if not repo:
3927 if not repo:
3923 raise error.Abort(
3928 raise error.Abort(
3924 _(
3929 _(
3925 b"there is no Mercurial repository here, and no "
3930 b"there is no Mercurial repository here, and no "
3926 b"server specified"
3931 b"server specified"
3927 )
3932 )
3928 )
3933 )
3929 source = b"default"
3934 source = b"default"
3930
3935
3931 source, branches = urlutil.get_unique_pull_path(
3936 source, branches = urlutil.get_unique_pull_path(
3932 b'debugssl', repo, ui, source
3937 b'debugssl', repo, ui, source
3933 )
3938 )
3934 url = urlutil.url(source)
3939 url = urlutil.url(source)
3935
3940
3936 defaultport = {b'https': 443, b'ssh': 22}
3941 defaultport = {b'https': 443, b'ssh': 22}
3937 if url.scheme in defaultport:
3942 if url.scheme in defaultport:
3938 try:
3943 try:
3939 addr = (url.host, int(url.port or defaultport[url.scheme]))
3944 addr = (url.host, int(url.port or defaultport[url.scheme]))
3940 except ValueError:
3945 except ValueError:
3941 raise error.Abort(_(b"malformed port number in URL"))
3946 raise error.Abort(_(b"malformed port number in URL"))
3942 else:
3947 else:
3943 raise error.Abort(_(b"only https and ssh connections are supported"))
3948 raise error.Abort(_(b"only https and ssh connections are supported"))
3944
3949
3945 from . import win32
3950 from . import win32
3946
3951
3947 s = ssl.wrap_socket(
3952 s = ssl.wrap_socket(
3948 socket.socket(),
3953 socket.socket(),
3949 ssl_version=ssl.PROTOCOL_TLS,
3954 ssl_version=ssl.PROTOCOL_TLS,
3950 cert_reqs=ssl.CERT_NONE,
3955 cert_reqs=ssl.CERT_NONE,
3951 ca_certs=None,
3956 ca_certs=None,
3952 )
3957 )
3953
3958
3954 try:
3959 try:
3955 s.connect(addr)
3960 s.connect(addr)
3956 cert = s.getpeercert(True)
3961 cert = s.getpeercert(True)
3957
3962
3958 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3963 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3959
3964
3960 complete = win32.checkcertificatechain(cert, build=False)
3965 complete = win32.checkcertificatechain(cert, build=False)
3961
3966
3962 if not complete:
3967 if not complete:
3963 ui.status(_(b'certificate chain is incomplete, updating... '))
3968 ui.status(_(b'certificate chain is incomplete, updating... '))
3964
3969
3965 if not win32.checkcertificatechain(cert):
3970 if not win32.checkcertificatechain(cert):
3966 ui.status(_(b'failed.\n'))
3971 ui.status(_(b'failed.\n'))
3967 else:
3972 else:
3968 ui.status(_(b'done.\n'))
3973 ui.status(_(b'done.\n'))
3969 else:
3974 else:
3970 ui.status(_(b'full certificate chain is available\n'))
3975 ui.status(_(b'full certificate chain is available\n'))
3971 finally:
3976 finally:
3972 s.close()
3977 s.close()
3973
3978
3974
3979
3975 @command(
3980 @command(
3976 b"debugbackupbundle",
3981 b"debugbackupbundle",
3977 [
3982 [
3978 (
3983 (
3979 b"",
3984 b"",
3980 b"recover",
3985 b"recover",
3981 b"",
3986 b"",
3982 b"brings the specified changeset back into the repository",
3987 b"brings the specified changeset back into the repository",
3983 )
3988 )
3984 ]
3989 ]
3985 + cmdutil.logopts,
3990 + cmdutil.logopts,
3986 _(b"hg debugbackupbundle [--recover HASH]"),
3991 _(b"hg debugbackupbundle [--recover HASH]"),
3987 )
3992 )
3988 def debugbackupbundle(ui, repo, *pats, **opts):
3993 def debugbackupbundle(ui, repo, *pats, **opts):
3989 """lists the changesets available in backup bundles
3994 """lists the changesets available in backup bundles
3990
3995
3991 Without any arguments, this command prints a list of the changesets in each
3996 Without any arguments, this command prints a list of the changesets in each
3992 backup bundle.
3997 backup bundle.
3993
3998
3994 --recover takes a changeset hash and unbundles the first bundle that
3999 --recover takes a changeset hash and unbundles the first bundle that
3995 contains that hash, which puts that changeset back in your repository.
4000 contains that hash, which puts that changeset back in your repository.
3996
4001
3997 --verbose will print the entire commit message and the bundle path for that
4002 --verbose will print the entire commit message and the bundle path for that
3998 backup.
4003 backup.
3999 """
4004 """
4000 backups = list(
4005 backups = list(
4001 filter(
4006 filter(
4002 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4007 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4003 )
4008 )
4004 )
4009 )
4005 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4010 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4006
4011
4007 opts = pycompat.byteskwargs(opts)
4012 opts = pycompat.byteskwargs(opts)
4008 opts[b"bundle"] = b""
4013 opts[b"bundle"] = b""
4009 opts[b"force"] = None
4014 opts[b"force"] = None
4010 limit = logcmdutil.getlimit(opts)
4015 limit = logcmdutil.getlimit(opts)
4011
4016
4012 def display(other, chlist, displayer):
4017 def display(other, chlist, displayer):
4013 if opts.get(b"newest_first"):
4018 if opts.get(b"newest_first"):
4014 chlist.reverse()
4019 chlist.reverse()
4015 count = 0
4020 count = 0
4016 for n in chlist:
4021 for n in chlist:
4017 if limit is not None and count >= limit:
4022 if limit is not None and count >= limit:
4018 break
4023 break
4019 parents = [
4024 parents = [
4020 True for p in other.changelog.parents(n) if p != repo.nullid
4025 True for p in other.changelog.parents(n) if p != repo.nullid
4021 ]
4026 ]
4022 if opts.get(b"no_merges") and len(parents) == 2:
4027 if opts.get(b"no_merges") and len(parents) == 2:
4023 continue
4028 continue
4024 count += 1
4029 count += 1
4025 displayer.show(other[n])
4030 displayer.show(other[n])
4026
4031
4027 recovernode = opts.get(b"recover")
4032 recovernode = opts.get(b"recover")
4028 if recovernode:
4033 if recovernode:
4029 if scmutil.isrevsymbol(repo, recovernode):
4034 if scmutil.isrevsymbol(repo, recovernode):
4030 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4035 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4031 return
4036 return
4032 elif backups:
4037 elif backups:
4033 msg = _(
4038 msg = _(
4034 b"Recover changesets using: hg debugbackupbundle --recover "
4039 b"Recover changesets using: hg debugbackupbundle --recover "
4035 b"<changeset hash>\n\nAvailable backup changesets:"
4040 b"<changeset hash>\n\nAvailable backup changesets:"
4036 )
4041 )
4037 ui.status(msg, label=b"status.removed")
4042 ui.status(msg, label=b"status.removed")
4038 else:
4043 else:
4039 ui.status(_(b"no backup changesets found\n"))
4044 ui.status(_(b"no backup changesets found\n"))
4040 return
4045 return
4041
4046
4042 for backup in backups:
4047 for backup in backups:
4043 # Much of this is copied from the hg incoming logic
4048 # Much of this is copied from the hg incoming logic
4044 source = os.path.relpath(backup, encoding.getcwd())
4049 source = os.path.relpath(backup, encoding.getcwd())
4045 source, branches = urlutil.get_unique_pull_path(
4050 source, branches = urlutil.get_unique_pull_path(
4046 b'debugbackupbundle',
4051 b'debugbackupbundle',
4047 repo,
4052 repo,
4048 ui,
4053 ui,
4049 source,
4054 source,
4050 default_branches=opts.get(b'branch'),
4055 default_branches=opts.get(b'branch'),
4051 )
4056 )
4052 try:
4057 try:
4053 other = hg.peer(repo, opts, source)
4058 other = hg.peer(repo, opts, source)
4054 except error.LookupError as ex:
4059 except error.LookupError as ex:
4055 msg = _(b"\nwarning: unable to open bundle %s") % source
4060 msg = _(b"\nwarning: unable to open bundle %s") % source
4056 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4061 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4057 ui.warn(msg, hint=hint)
4062 ui.warn(msg, hint=hint)
4058 continue
4063 continue
4059 revs, checkout = hg.addbranchrevs(
4064 revs, checkout = hg.addbranchrevs(
4060 repo, other, branches, opts.get(b"rev")
4065 repo, other, branches, opts.get(b"rev")
4061 )
4066 )
4062
4067
4063 if revs:
4068 if revs:
4064 revs = [other.lookup(rev) for rev in revs]
4069 revs = [other.lookup(rev) for rev in revs]
4065
4070
4066 with ui.silent():
4071 with ui.silent():
4067 try:
4072 try:
4068 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4073 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4069 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4074 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4070 )
4075 )
4071 except error.LookupError:
4076 except error.LookupError:
4072 continue
4077 continue
4073
4078
4074 try:
4079 try:
4075 if not chlist:
4080 if not chlist:
4076 continue
4081 continue
4077 if recovernode:
4082 if recovernode:
4078 with repo.lock(), repo.transaction(b"unbundle") as tr:
4083 with repo.lock(), repo.transaction(b"unbundle") as tr:
4079 if scmutil.isrevsymbol(other, recovernode):
4084 if scmutil.isrevsymbol(other, recovernode):
4080 ui.status(_(b"Unbundling %s\n") % (recovernode))
4085 ui.status(_(b"Unbundling %s\n") % (recovernode))
4081 f = hg.openpath(ui, source)
4086 f = hg.openpath(ui, source)
4082 gen = exchange.readbundle(ui, f, source)
4087 gen = exchange.readbundle(ui, f, source)
4083 if isinstance(gen, bundle2.unbundle20):
4088 if isinstance(gen, bundle2.unbundle20):
4084 bundle2.applybundle(
4089 bundle2.applybundle(
4085 repo,
4090 repo,
4086 gen,
4091 gen,
4087 tr,
4092 tr,
4088 source=b"unbundle",
4093 source=b"unbundle",
4089 url=b"bundle:" + source,
4094 url=b"bundle:" + source,
4090 )
4095 )
4091 else:
4096 else:
4092 gen.apply(repo, b"unbundle", b"bundle:" + source)
4097 gen.apply(repo, b"unbundle", b"bundle:" + source)
4093 break
4098 break
4094 else:
4099 else:
4095 backupdate = encoding.strtolocal(
4100 backupdate = encoding.strtolocal(
4096 time.strftime(
4101 time.strftime(
4097 "%a %H:%M, %Y-%m-%d",
4102 "%a %H:%M, %Y-%m-%d",
4098 time.localtime(os.path.getmtime(source)),
4103 time.localtime(os.path.getmtime(source)),
4099 )
4104 )
4100 )
4105 )
4101 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4106 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4102 if ui.verbose:
4107 if ui.verbose:
4103 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4108 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4104 else:
4109 else:
4105 opts[
4110 opts[
4106 b"template"
4111 b"template"
4107 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4112 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4108 displayer = logcmdutil.changesetdisplayer(
4113 displayer = logcmdutil.changesetdisplayer(
4109 ui, other, opts, False
4114 ui, other, opts, False
4110 )
4115 )
4111 display(other, chlist, displayer)
4116 display(other, chlist, displayer)
4112 displayer.close()
4117 displayer.close()
4113 finally:
4118 finally:
4114 cleanupfn()
4119 cleanupfn()
4115
4120
4116
4121
4117 @command(
4122 @command(
4118 b'debugsub',
4123 b'debugsub',
4119 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4124 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4120 _(b'[-r REV] [REV]'),
4125 _(b'[-r REV] [REV]'),
4121 )
4126 )
4122 def debugsub(ui, repo, rev=None):
4127 def debugsub(ui, repo, rev=None):
4123 ctx = scmutil.revsingle(repo, rev, None)
4128 ctx = scmutil.revsingle(repo, rev, None)
4124 for k, v in sorted(ctx.substate.items()):
4129 for k, v in sorted(ctx.substate.items()):
4125 ui.writenoi18n(b'path %s\n' % k)
4130 ui.writenoi18n(b'path %s\n' % k)
4126 ui.writenoi18n(b' source %s\n' % v[0])
4131 ui.writenoi18n(b' source %s\n' % v[0])
4127 ui.writenoi18n(b' revision %s\n' % v[1])
4132 ui.writenoi18n(b' revision %s\n' % v[1])
4128
4133
4129
4134
4130 @command(b'debugshell', optionalrepo=True)
4135 @command(b'debugshell', optionalrepo=True)
4131 def debugshell(ui, repo):
4136 def debugshell(ui, repo):
4132 """run an interactive Python interpreter
4137 """run an interactive Python interpreter
4133
4138
4134 The local namespace is provided with a reference to the ui and
4139 The local namespace is provided with a reference to the ui and
4135 the repo instance (if available).
4140 the repo instance (if available).
4136 """
4141 """
4137 import code
4142 import code
4138
4143
4139 imported_objects = {
4144 imported_objects = {
4140 'ui': ui,
4145 'ui': ui,
4141 'repo': repo,
4146 'repo': repo,
4142 }
4147 }
4143
4148
4144 code.interact(local=imported_objects)
4149 code.interact(local=imported_objects)
4145
4150
4146
4151
4147 @command(
4152 @command(
4148 b'debugsuccessorssets',
4153 b'debugsuccessorssets',
4149 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4154 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4150 _(b'[REV]'),
4155 _(b'[REV]'),
4151 )
4156 )
4152 def debugsuccessorssets(ui, repo, *revs, **opts):
4157 def debugsuccessorssets(ui, repo, *revs, **opts):
4153 """show set of successors for revision
4158 """show set of successors for revision
4154
4159
4155 A successors set of changeset A is a consistent group of revisions that
4160 A successors set of changeset A is a consistent group of revisions that
4156 succeed A. It contains non-obsolete changesets only unless closests
4161 succeed A. It contains non-obsolete changesets only unless closests
4157 successors set is set.
4162 successors set is set.
4158
4163
4159 In most cases a changeset A has a single successors set containing a single
4164 In most cases a changeset A has a single successors set containing a single
4160 successor (changeset A replaced by A').
4165 successor (changeset A replaced by A').
4161
4166
4162 A changeset that is made obsolete with no successors are called "pruned".
4167 A changeset that is made obsolete with no successors are called "pruned".
4163 Such changesets have no successors sets at all.
4168 Such changesets have no successors sets at all.
4164
4169
4165 A changeset that has been "split" will have a successors set containing
4170 A changeset that has been "split" will have a successors set containing
4166 more than one successor.
4171 more than one successor.
4167
4172
4168 A changeset that has been rewritten in multiple different ways is called
4173 A changeset that has been rewritten in multiple different ways is called
4169 "divergent". Such changesets have multiple successor sets (each of which
4174 "divergent". Such changesets have multiple successor sets (each of which
4170 may also be split, i.e. have multiple successors).
4175 may also be split, i.e. have multiple successors).
4171
4176
4172 Results are displayed as follows::
4177 Results are displayed as follows::
4173
4178
4174 <rev1>
4179 <rev1>
4175 <successors-1A>
4180 <successors-1A>
4176 <rev2>
4181 <rev2>
4177 <successors-2A>
4182 <successors-2A>
4178 <successors-2B1> <successors-2B2> <successors-2B3>
4183 <successors-2B1> <successors-2B2> <successors-2B3>
4179
4184
4180 Here rev2 has two possible (i.e. divergent) successors sets. The first
4185 Here rev2 has two possible (i.e. divergent) successors sets. The first
4181 holds one element, whereas the second holds three (i.e. the changeset has
4186 holds one element, whereas the second holds three (i.e. the changeset has
4182 been split).
4187 been split).
4183 """
4188 """
4184 # passed to successorssets caching computation from one call to another
4189 # passed to successorssets caching computation from one call to another
4185 cache = {}
4190 cache = {}
4186 ctx2str = bytes
4191 ctx2str = bytes
4187 node2str = short
4192 node2str = short
4188 for rev in logcmdutil.revrange(repo, revs):
4193 for rev in logcmdutil.revrange(repo, revs):
4189 ctx = repo[rev]
4194 ctx = repo[rev]
4190 ui.write(b'%s\n' % ctx2str(ctx))
4195 ui.write(b'%s\n' % ctx2str(ctx))
4191 for succsset in obsutil.successorssets(
4196 for succsset in obsutil.successorssets(
4192 repo, ctx.node(), closest=opts['closest'], cache=cache
4197 repo, ctx.node(), closest=opts['closest'], cache=cache
4193 ):
4198 ):
4194 if succsset:
4199 if succsset:
4195 ui.write(b' ')
4200 ui.write(b' ')
4196 ui.write(node2str(succsset[0]))
4201 ui.write(node2str(succsset[0]))
4197 for node in succsset[1:]:
4202 for node in succsset[1:]:
4198 ui.write(b' ')
4203 ui.write(b' ')
4199 ui.write(node2str(node))
4204 ui.write(node2str(node))
4200 ui.write(b'\n')
4205 ui.write(b'\n')
4201
4206
4202
4207
4203 @command(b'debugtagscache', [])
4208 @command(b'debugtagscache', [])
4204 def debugtagscache(ui, repo):
4209 def debugtagscache(ui, repo):
4205 """display the contents of .hg/cache/hgtagsfnodes1"""
4210 """display the contents of .hg/cache/hgtagsfnodes1"""
4206 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4211 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4207 flog = repo.file(b'.hgtags')
4212 flog = repo.file(b'.hgtags')
4208 for r in repo:
4213 for r in repo:
4209 node = repo[r].node()
4214 node = repo[r].node()
4210 tagsnode = cache.getfnode(node, computemissing=False)
4215 tagsnode = cache.getfnode(node, computemissing=False)
4211 if tagsnode:
4216 if tagsnode:
4212 tagsnodedisplay = hex(tagsnode)
4217 tagsnodedisplay = hex(tagsnode)
4213 if not flog.hasnode(tagsnode):
4218 if not flog.hasnode(tagsnode):
4214 tagsnodedisplay += b' (unknown node)'
4219 tagsnodedisplay += b' (unknown node)'
4215 elif tagsnode is None:
4220 elif tagsnode is None:
4216 tagsnodedisplay = b'missing'
4221 tagsnodedisplay = b'missing'
4217 else:
4222 else:
4218 tagsnodedisplay = b'invalid'
4223 tagsnodedisplay = b'invalid'
4219
4224
4220 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4225 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4221
4226
4222
4227
4223 @command(
4228 @command(
4224 b'debugtemplate',
4229 b'debugtemplate',
4225 [
4230 [
4226 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4231 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4227 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4232 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4228 ],
4233 ],
4229 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4234 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4230 optionalrepo=True,
4235 optionalrepo=True,
4231 )
4236 )
4232 def debugtemplate(ui, repo, tmpl, **opts):
4237 def debugtemplate(ui, repo, tmpl, **opts):
4233 """parse and apply a template
4238 """parse and apply a template
4234
4239
4235 If -r/--rev is given, the template is processed as a log template and
4240 If -r/--rev is given, the template is processed as a log template and
4236 applied to the given changesets. Otherwise, it is processed as a generic
4241 applied to the given changesets. Otherwise, it is processed as a generic
4237 template.
4242 template.
4238
4243
4239 Use --verbose to print the parsed tree.
4244 Use --verbose to print the parsed tree.
4240 """
4245 """
4241 revs = None
4246 revs = None
4242 if opts['rev']:
4247 if opts['rev']:
4243 if repo is None:
4248 if repo is None:
4244 raise error.RepoError(
4249 raise error.RepoError(
4245 _(b'there is no Mercurial repository here (.hg not found)')
4250 _(b'there is no Mercurial repository here (.hg not found)')
4246 )
4251 )
4247 revs = logcmdutil.revrange(repo, opts['rev'])
4252 revs = logcmdutil.revrange(repo, opts['rev'])
4248
4253
4249 props = {}
4254 props = {}
4250 for d in opts['define']:
4255 for d in opts['define']:
4251 try:
4256 try:
4252 k, v = (e.strip() for e in d.split(b'=', 1))
4257 k, v = (e.strip() for e in d.split(b'=', 1))
4253 if not k or k == b'ui':
4258 if not k or k == b'ui':
4254 raise ValueError
4259 raise ValueError
4255 props[k] = v
4260 props[k] = v
4256 except ValueError:
4261 except ValueError:
4257 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4262 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4258
4263
4259 if ui.verbose:
4264 if ui.verbose:
4260 aliases = ui.configitems(b'templatealias')
4265 aliases = ui.configitems(b'templatealias')
4261 tree = templater.parse(tmpl)
4266 tree = templater.parse(tmpl)
4262 ui.note(templater.prettyformat(tree), b'\n')
4267 ui.note(templater.prettyformat(tree), b'\n')
4263 newtree = templater.expandaliases(tree, aliases)
4268 newtree = templater.expandaliases(tree, aliases)
4264 if newtree != tree:
4269 if newtree != tree:
4265 ui.notenoi18n(
4270 ui.notenoi18n(
4266 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4271 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4267 )
4272 )
4268
4273
4269 if revs is None:
4274 if revs is None:
4270 tres = formatter.templateresources(ui, repo)
4275 tres = formatter.templateresources(ui, repo)
4271 t = formatter.maketemplater(ui, tmpl, resources=tres)
4276 t = formatter.maketemplater(ui, tmpl, resources=tres)
4272 if ui.verbose:
4277 if ui.verbose:
4273 kwds, funcs = t.symbolsuseddefault()
4278 kwds, funcs = t.symbolsuseddefault()
4274 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4279 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4275 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4280 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4276 ui.write(t.renderdefault(props))
4281 ui.write(t.renderdefault(props))
4277 else:
4282 else:
4278 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4283 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4279 if ui.verbose:
4284 if ui.verbose:
4280 kwds, funcs = displayer.t.symbolsuseddefault()
4285 kwds, funcs = displayer.t.symbolsuseddefault()
4281 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4286 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4282 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4287 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4283 for r in revs:
4288 for r in revs:
4284 displayer.show(repo[r], **pycompat.strkwargs(props))
4289 displayer.show(repo[r], **pycompat.strkwargs(props))
4285 displayer.close()
4290 displayer.close()
4286
4291
4287
4292
4288 @command(
4293 @command(
4289 b'debuguigetpass',
4294 b'debuguigetpass',
4290 [
4295 [
4291 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4296 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4292 ],
4297 ],
4293 _(b'[-p TEXT]'),
4298 _(b'[-p TEXT]'),
4294 norepo=True,
4299 norepo=True,
4295 )
4300 )
4296 def debuguigetpass(ui, prompt=b''):
4301 def debuguigetpass(ui, prompt=b''):
4297 """show prompt to type password"""
4302 """show prompt to type password"""
4298 r = ui.getpass(prompt)
4303 r = ui.getpass(prompt)
4299 if r is None:
4304 if r is None:
4300 r = b"<default response>"
4305 r = b"<default response>"
4301 ui.writenoi18n(b'response: %s\n' % r)
4306 ui.writenoi18n(b'response: %s\n' % r)
4302
4307
4303
4308
4304 @command(
4309 @command(
4305 b'debuguiprompt',
4310 b'debuguiprompt',
4306 [
4311 [
4307 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4312 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4308 ],
4313 ],
4309 _(b'[-p TEXT]'),
4314 _(b'[-p TEXT]'),
4310 norepo=True,
4315 norepo=True,
4311 )
4316 )
4312 def debuguiprompt(ui, prompt=b''):
4317 def debuguiprompt(ui, prompt=b''):
4313 """show plain prompt"""
4318 """show plain prompt"""
4314 r = ui.prompt(prompt)
4319 r = ui.prompt(prompt)
4315 ui.writenoi18n(b'response: %s\n' % r)
4320 ui.writenoi18n(b'response: %s\n' % r)
4316
4321
4317
4322
4318 @command(b'debugupdatecaches', [])
4323 @command(b'debugupdatecaches', [])
4319 def debugupdatecaches(ui, repo, *pats, **opts):
4324 def debugupdatecaches(ui, repo, *pats, **opts):
4320 """warm all known caches in the repository"""
4325 """warm all known caches in the repository"""
4321 with repo.wlock(), repo.lock():
4326 with repo.wlock(), repo.lock():
4322 repo.updatecaches(caches=repository.CACHES_ALL)
4327 repo.updatecaches(caches=repository.CACHES_ALL)
4323
4328
4324
4329
4325 @command(
4330 @command(
4326 b'debugupgraderepo',
4331 b'debugupgraderepo',
4327 [
4332 [
4328 (
4333 (
4329 b'o',
4334 b'o',
4330 b'optimize',
4335 b'optimize',
4331 [],
4336 [],
4332 _(b'extra optimization to perform'),
4337 _(b'extra optimization to perform'),
4333 _(b'NAME'),
4338 _(b'NAME'),
4334 ),
4339 ),
4335 (b'', b'run', False, _(b'performs an upgrade')),
4340 (b'', b'run', False, _(b'performs an upgrade')),
4336 (b'', b'backup', True, _(b'keep the old repository content around')),
4341 (b'', b'backup', True, _(b'keep the old repository content around')),
4337 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4342 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4338 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4343 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4339 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4344 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4340 ],
4345 ],
4341 )
4346 )
4342 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4347 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4343 """upgrade a repository to use different features
4348 """upgrade a repository to use different features
4344
4349
4345 If no arguments are specified, the repository is evaluated for upgrade
4350 If no arguments are specified, the repository is evaluated for upgrade
4346 and a list of problems and potential optimizations is printed.
4351 and a list of problems and potential optimizations is printed.
4347
4352
4348 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4353 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4349 can be influenced via additional arguments. More details will be provided
4354 can be influenced via additional arguments. More details will be provided
4350 by the command output when run without ``--run``.
4355 by the command output when run without ``--run``.
4351
4356
4352 During the upgrade, the repository will be locked and no writes will be
4357 During the upgrade, the repository will be locked and no writes will be
4353 allowed.
4358 allowed.
4354
4359
4355 At the end of the upgrade, the repository may not be readable while new
4360 At the end of the upgrade, the repository may not be readable while new
4356 repository data is swapped in. This window will be as long as it takes to
4361 repository data is swapped in. This window will be as long as it takes to
4357 rename some directories inside the ``.hg`` directory. On most machines, this
4362 rename some directories inside the ``.hg`` directory. On most machines, this
4358 should complete almost instantaneously and the chances of a consumer being
4363 should complete almost instantaneously and the chances of a consumer being
4359 unable to access the repository should be low.
4364 unable to access the repository should be low.
4360
4365
4361 By default, all revlogs will be upgraded. You can restrict this using flags
4366 By default, all revlogs will be upgraded. You can restrict this using flags
4362 such as `--manifest`:
4367 such as `--manifest`:
4363
4368
4364 * `--manifest`: only optimize the manifest
4369 * `--manifest`: only optimize the manifest
4365 * `--no-manifest`: optimize all revlog but the manifest
4370 * `--no-manifest`: optimize all revlog but the manifest
4366 * `--changelog`: optimize the changelog only
4371 * `--changelog`: optimize the changelog only
4367 * `--no-changelog --no-manifest`: optimize filelogs only
4372 * `--no-changelog --no-manifest`: optimize filelogs only
4368 * `--filelogs`: optimize the filelogs only
4373 * `--filelogs`: optimize the filelogs only
4369 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4374 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4370 """
4375 """
4371 return upgrade.upgraderepo(
4376 return upgrade.upgraderepo(
4372 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4377 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4373 )
4378 )
4374
4379
4375
4380
4376 @command(
4381 @command(
4377 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4382 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4378 )
4383 )
4379 def debugwalk(ui, repo, *pats, **opts):
4384 def debugwalk(ui, repo, *pats, **opts):
4380 """show how files match on given patterns"""
4385 """show how files match on given patterns"""
4381 opts = pycompat.byteskwargs(opts)
4386 opts = pycompat.byteskwargs(opts)
4382 m = scmutil.match(repo[None], pats, opts)
4387 m = scmutil.match(repo[None], pats, opts)
4383 if ui.verbose:
4388 if ui.verbose:
4384 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4389 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4385 items = list(repo[None].walk(m))
4390 items = list(repo[None].walk(m))
4386 if not items:
4391 if not items:
4387 return
4392 return
4388 f = lambda fn: fn
4393 f = lambda fn: fn
4389 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4394 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4390 f = lambda fn: util.normpath(fn)
4395 f = lambda fn: util.normpath(fn)
4391 fmt = b'f %%-%ds %%-%ds %%s' % (
4396 fmt = b'f %%-%ds %%-%ds %%s' % (
4392 max([len(abs) for abs in items]),
4397 max([len(abs) for abs in items]),
4393 max([len(repo.pathto(abs)) for abs in items]),
4398 max([len(repo.pathto(abs)) for abs in items]),
4394 )
4399 )
4395 for abs in items:
4400 for abs in items:
4396 line = fmt % (
4401 line = fmt % (
4397 abs,
4402 abs,
4398 f(repo.pathto(abs)),
4403 f(repo.pathto(abs)),
4399 m.exact(abs) and b'exact' or b'',
4404 m.exact(abs) and b'exact' or b'',
4400 )
4405 )
4401 ui.write(b"%s\n" % line.rstrip())
4406 ui.write(b"%s\n" % line.rstrip())
4402
4407
4403
4408
4404 @command(b'debugwhyunstable', [], _(b'REV'))
4409 @command(b'debugwhyunstable', [], _(b'REV'))
4405 def debugwhyunstable(ui, repo, rev):
4410 def debugwhyunstable(ui, repo, rev):
4406 """explain instabilities of a changeset"""
4411 """explain instabilities of a changeset"""
4407 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4412 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4408 dnodes = b''
4413 dnodes = b''
4409 if entry.get(b'divergentnodes'):
4414 if entry.get(b'divergentnodes'):
4410 dnodes = (
4415 dnodes = (
4411 b' '.join(
4416 b' '.join(
4412 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4417 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4413 for ctx in entry[b'divergentnodes']
4418 for ctx in entry[b'divergentnodes']
4414 )
4419 )
4415 + b' '
4420 + b' '
4416 )
4421 )
4417 ui.write(
4422 ui.write(
4418 b'%s: %s%s %s\n'
4423 b'%s: %s%s %s\n'
4419 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4424 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4420 )
4425 )
4421
4426
4422
4427
4423 @command(
4428 @command(
4424 b'debugwireargs',
4429 b'debugwireargs',
4425 [
4430 [
4426 (b'', b'three', b'', b'three'),
4431 (b'', b'three', b'', b'three'),
4427 (b'', b'four', b'', b'four'),
4432 (b'', b'four', b'', b'four'),
4428 (b'', b'five', b'', b'five'),
4433 (b'', b'five', b'', b'five'),
4429 ]
4434 ]
4430 + cmdutil.remoteopts,
4435 + cmdutil.remoteopts,
4431 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4436 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4432 norepo=True,
4437 norepo=True,
4433 )
4438 )
4434 def debugwireargs(ui, repopath, *vals, **opts):
4439 def debugwireargs(ui, repopath, *vals, **opts):
4435 opts = pycompat.byteskwargs(opts)
4440 opts = pycompat.byteskwargs(opts)
4436 repo = hg.peer(ui, opts, repopath)
4441 repo = hg.peer(ui, opts, repopath)
4437 try:
4442 try:
4438 for opt in cmdutil.remoteopts:
4443 for opt in cmdutil.remoteopts:
4439 del opts[opt[1]]
4444 del opts[opt[1]]
4440 args = {}
4445 args = {}
4441 for k, v in opts.items():
4446 for k, v in opts.items():
4442 if v:
4447 if v:
4443 args[k] = v
4448 args[k] = v
4444 args = pycompat.strkwargs(args)
4449 args = pycompat.strkwargs(args)
4445 # run twice to check that we don't mess up the stream for the next command
4450 # run twice to check that we don't mess up the stream for the next command
4446 res1 = repo.debugwireargs(*vals, **args)
4451 res1 = repo.debugwireargs(*vals, **args)
4447 res2 = repo.debugwireargs(*vals, **args)
4452 res2 = repo.debugwireargs(*vals, **args)
4448 ui.write(b"%s\n" % res1)
4453 ui.write(b"%s\n" % res1)
4449 if res1 != res2:
4454 if res1 != res2:
4450 ui.warn(b"%s\n" % res2)
4455 ui.warn(b"%s\n" % res2)
4451 finally:
4456 finally:
4452 repo.close()
4457 repo.close()
4453
4458
4454
4459
4455 def _parsewirelangblocks(fh):
4460 def _parsewirelangblocks(fh):
4456 activeaction = None
4461 activeaction = None
4457 blocklines = []
4462 blocklines = []
4458 lastindent = 0
4463 lastindent = 0
4459
4464
4460 for line in fh:
4465 for line in fh:
4461 line = line.rstrip()
4466 line = line.rstrip()
4462 if not line:
4467 if not line:
4463 continue
4468 continue
4464
4469
4465 if line.startswith(b'#'):
4470 if line.startswith(b'#'):
4466 continue
4471 continue
4467
4472
4468 if not line.startswith(b' '):
4473 if not line.startswith(b' '):
4469 # New block. Flush previous one.
4474 # New block. Flush previous one.
4470 if activeaction:
4475 if activeaction:
4471 yield activeaction, blocklines
4476 yield activeaction, blocklines
4472
4477
4473 activeaction = line
4478 activeaction = line
4474 blocklines = []
4479 blocklines = []
4475 lastindent = 0
4480 lastindent = 0
4476 continue
4481 continue
4477
4482
4478 # Else we start with an indent.
4483 # Else we start with an indent.
4479
4484
4480 if not activeaction:
4485 if not activeaction:
4481 raise error.Abort(_(b'indented line outside of block'))
4486 raise error.Abort(_(b'indented line outside of block'))
4482
4487
4483 indent = len(line) - len(line.lstrip())
4488 indent = len(line) - len(line.lstrip())
4484
4489
4485 # If this line is indented more than the last line, concatenate it.
4490 # If this line is indented more than the last line, concatenate it.
4486 if indent > lastindent and blocklines:
4491 if indent > lastindent and blocklines:
4487 blocklines[-1] += line.lstrip()
4492 blocklines[-1] += line.lstrip()
4488 else:
4493 else:
4489 blocklines.append(line)
4494 blocklines.append(line)
4490 lastindent = indent
4495 lastindent = indent
4491
4496
4492 # Flush last block.
4497 # Flush last block.
4493 if activeaction:
4498 if activeaction:
4494 yield activeaction, blocklines
4499 yield activeaction, blocklines
4495
4500
4496
4501
4497 @command(
4502 @command(
4498 b'debugwireproto',
4503 b'debugwireproto',
4499 [
4504 [
4500 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4505 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4501 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4506 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4502 (
4507 (
4503 b'',
4508 b'',
4504 b'noreadstderr',
4509 b'noreadstderr',
4505 False,
4510 False,
4506 _(b'do not read from stderr of the remote'),
4511 _(b'do not read from stderr of the remote'),
4507 ),
4512 ),
4508 (
4513 (
4509 b'',
4514 b'',
4510 b'nologhandshake',
4515 b'nologhandshake',
4511 False,
4516 False,
4512 _(b'do not log I/O related to the peer handshake'),
4517 _(b'do not log I/O related to the peer handshake'),
4513 ),
4518 ),
4514 ]
4519 ]
4515 + cmdutil.remoteopts,
4520 + cmdutil.remoteopts,
4516 _(b'[PATH]'),
4521 _(b'[PATH]'),
4517 optionalrepo=True,
4522 optionalrepo=True,
4518 )
4523 )
4519 def debugwireproto(ui, repo, path=None, **opts):
4524 def debugwireproto(ui, repo, path=None, **opts):
4520 """send wire protocol commands to a server
4525 """send wire protocol commands to a server
4521
4526
4522 This command can be used to issue wire protocol commands to remote
4527 This command can be used to issue wire protocol commands to remote
4523 peers and to debug the raw data being exchanged.
4528 peers and to debug the raw data being exchanged.
4524
4529
4525 ``--localssh`` will start an SSH server against the current repository
4530 ``--localssh`` will start an SSH server against the current repository
4526 and connect to that. By default, the connection will perform a handshake
4531 and connect to that. By default, the connection will perform a handshake
4527 and establish an appropriate peer instance.
4532 and establish an appropriate peer instance.
4528
4533
4529 ``--peer`` can be used to bypass the handshake protocol and construct a
4534 ``--peer`` can be used to bypass the handshake protocol and construct a
4530 peer instance using the specified class type. Valid values are ``raw``,
4535 peer instance using the specified class type. Valid values are ``raw``,
4531 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4536 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4532 don't support higher-level command actions.
4537 don't support higher-level command actions.
4533
4538
4534 ``--noreadstderr`` can be used to disable automatic reading from stderr
4539 ``--noreadstderr`` can be used to disable automatic reading from stderr
4535 of the peer (for SSH connections only). Disabling automatic reading of
4540 of the peer (for SSH connections only). Disabling automatic reading of
4536 stderr is useful for making output more deterministic.
4541 stderr is useful for making output more deterministic.
4537
4542
4538 Commands are issued via a mini language which is specified via stdin.
4543 Commands are issued via a mini language which is specified via stdin.
4539 The language consists of individual actions to perform. An action is
4544 The language consists of individual actions to perform. An action is
4540 defined by a block. A block is defined as a line with no leading
4545 defined by a block. A block is defined as a line with no leading
4541 space followed by 0 or more lines with leading space. Blocks are
4546 space followed by 0 or more lines with leading space. Blocks are
4542 effectively a high-level command with additional metadata.
4547 effectively a high-level command with additional metadata.
4543
4548
4544 Lines beginning with ``#`` are ignored.
4549 Lines beginning with ``#`` are ignored.
4545
4550
4546 The following sections denote available actions.
4551 The following sections denote available actions.
4547
4552
4548 raw
4553 raw
4549 ---
4554 ---
4550
4555
4551 Send raw data to the server.
4556 Send raw data to the server.
4552
4557
4553 The block payload contains the raw data to send as one atomic send
4558 The block payload contains the raw data to send as one atomic send
4554 operation. The data may not actually be delivered in a single system
4559 operation. The data may not actually be delivered in a single system
4555 call: it depends on the abilities of the transport being used.
4560 call: it depends on the abilities of the transport being used.
4556
4561
4557 Each line in the block is de-indented and concatenated. Then, that
4562 Each line in the block is de-indented and concatenated. Then, that
4558 value is evaluated as a Python b'' literal. This allows the use of
4563 value is evaluated as a Python b'' literal. This allows the use of
4559 backslash escaping, etc.
4564 backslash escaping, etc.
4560
4565
4561 raw+
4566 raw+
4562 ----
4567 ----
4563
4568
4564 Behaves like ``raw`` except flushes output afterwards.
4569 Behaves like ``raw`` except flushes output afterwards.
4565
4570
4566 command <X>
4571 command <X>
4567 -----------
4572 -----------
4568
4573
4569 Send a request to run a named command, whose name follows the ``command``
4574 Send a request to run a named command, whose name follows the ``command``
4570 string.
4575 string.
4571
4576
4572 Arguments to the command are defined as lines in this block. The format of
4577 Arguments to the command are defined as lines in this block. The format of
4573 each line is ``<key> <value>``. e.g.::
4578 each line is ``<key> <value>``. e.g.::
4574
4579
4575 command listkeys
4580 command listkeys
4576 namespace bookmarks
4581 namespace bookmarks
4577
4582
4578 If the value begins with ``eval:``, it will be interpreted as a Python
4583 If the value begins with ``eval:``, it will be interpreted as a Python
4579 literal expression. Otherwise values are interpreted as Python b'' literals.
4584 literal expression. Otherwise values are interpreted as Python b'' literals.
4580 This allows sending complex types and encoding special byte sequences via
4585 This allows sending complex types and encoding special byte sequences via
4581 backslash escaping.
4586 backslash escaping.
4582
4587
4583 The following arguments have special meaning:
4588 The following arguments have special meaning:
4584
4589
4585 ``PUSHFILE``
4590 ``PUSHFILE``
4586 When defined, the *push* mechanism of the peer will be used instead
4591 When defined, the *push* mechanism of the peer will be used instead
4587 of the static request-response mechanism and the content of the
4592 of the static request-response mechanism and the content of the
4588 file specified in the value of this argument will be sent as the
4593 file specified in the value of this argument will be sent as the
4589 command payload.
4594 command payload.
4590
4595
4591 This can be used to submit a local bundle file to the remote.
4596 This can be used to submit a local bundle file to the remote.
4592
4597
4593 batchbegin
4598 batchbegin
4594 ----------
4599 ----------
4595
4600
4596 Instruct the peer to begin a batched send.
4601 Instruct the peer to begin a batched send.
4597
4602
4598 All ``command`` blocks are queued for execution until the next
4603 All ``command`` blocks are queued for execution until the next
4599 ``batchsubmit`` block.
4604 ``batchsubmit`` block.
4600
4605
4601 batchsubmit
4606 batchsubmit
4602 -----------
4607 -----------
4603
4608
4604 Submit previously queued ``command`` blocks as a batch request.
4609 Submit previously queued ``command`` blocks as a batch request.
4605
4610
4606 This action MUST be paired with a ``batchbegin`` action.
4611 This action MUST be paired with a ``batchbegin`` action.
4607
4612
4608 httprequest <method> <path>
4613 httprequest <method> <path>
4609 ---------------------------
4614 ---------------------------
4610
4615
4611 (HTTP peer only)
4616 (HTTP peer only)
4612
4617
4613 Send an HTTP request to the peer.
4618 Send an HTTP request to the peer.
4614
4619
4615 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4620 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4616
4621
4617 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4622 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4618 headers to add to the request. e.g. ``Accept: foo``.
4623 headers to add to the request. e.g. ``Accept: foo``.
4619
4624
4620 The following arguments are special:
4625 The following arguments are special:
4621
4626
4622 ``BODYFILE``
4627 ``BODYFILE``
4623 The content of the file defined as the value to this argument will be
4628 The content of the file defined as the value to this argument will be
4624 transferred verbatim as the HTTP request body.
4629 transferred verbatim as the HTTP request body.
4625
4630
4626 ``frame <type> <flags> <payload>``
4631 ``frame <type> <flags> <payload>``
4627 Send a unified protocol frame as part of the request body.
4632 Send a unified protocol frame as part of the request body.
4628
4633
4629 All frames will be collected and sent as the body to the HTTP
4634 All frames will be collected and sent as the body to the HTTP
4630 request.
4635 request.
4631
4636
4632 close
4637 close
4633 -----
4638 -----
4634
4639
4635 Close the connection to the server.
4640 Close the connection to the server.
4636
4641
4637 flush
4642 flush
4638 -----
4643 -----
4639
4644
4640 Flush data written to the server.
4645 Flush data written to the server.
4641
4646
4642 readavailable
4647 readavailable
4643 -------------
4648 -------------
4644
4649
4645 Close the write end of the connection and read all available data from
4650 Close the write end of the connection and read all available data from
4646 the server.
4651 the server.
4647
4652
4648 If the connection to the server encompasses multiple pipes, we poll both
4653 If the connection to the server encompasses multiple pipes, we poll both
4649 pipes and read available data.
4654 pipes and read available data.
4650
4655
4651 readline
4656 readline
4652 --------
4657 --------
4653
4658
4654 Read a line of output from the server. If there are multiple output
4659 Read a line of output from the server. If there are multiple output
4655 pipes, reads only the main pipe.
4660 pipes, reads only the main pipe.
4656
4661
4657 ereadline
4662 ereadline
4658 ---------
4663 ---------
4659
4664
4660 Like ``readline``, but read from the stderr pipe, if available.
4665 Like ``readline``, but read from the stderr pipe, if available.
4661
4666
4662 read <X>
4667 read <X>
4663 --------
4668 --------
4664
4669
4665 ``read()`` N bytes from the server's main output pipe.
4670 ``read()`` N bytes from the server's main output pipe.
4666
4671
4667 eread <X>
4672 eread <X>
4668 ---------
4673 ---------
4669
4674
4670 ``read()`` N bytes from the server's stderr pipe, if available.
4675 ``read()`` N bytes from the server's stderr pipe, if available.
4671
4676
4672 Specifying Unified Frame-Based Protocol Frames
4677 Specifying Unified Frame-Based Protocol Frames
4673 ----------------------------------------------
4678 ----------------------------------------------
4674
4679
4675 It is possible to emit a *Unified Frame-Based Protocol* by using special
4680 It is possible to emit a *Unified Frame-Based Protocol* by using special
4676 syntax.
4681 syntax.
4677
4682
4678 A frame is composed as a type, flags, and payload. These can be parsed
4683 A frame is composed as a type, flags, and payload. These can be parsed
4679 from a string of the form:
4684 from a string of the form:
4680
4685
4681 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4686 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4682
4687
4683 ``request-id`` and ``stream-id`` are integers defining the request and
4688 ``request-id`` and ``stream-id`` are integers defining the request and
4684 stream identifiers.
4689 stream identifiers.
4685
4690
4686 ``type`` can be an integer value for the frame type or the string name
4691 ``type`` can be an integer value for the frame type or the string name
4687 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4692 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4688 ``command-name``.
4693 ``command-name``.
4689
4694
4690 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4695 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4691 components. Each component (and there can be just one) can be an integer
4696 components. Each component (and there can be just one) can be an integer
4692 or a flag name for stream flags or frame flags, respectively. Values are
4697 or a flag name for stream flags or frame flags, respectively. Values are
4693 resolved to integers and then bitwise OR'd together.
4698 resolved to integers and then bitwise OR'd together.
4694
4699
4695 ``payload`` represents the raw frame payload. If it begins with
4700 ``payload`` represents the raw frame payload. If it begins with
4696 ``cbor:``, the following string is evaluated as Python code and the
4701 ``cbor:``, the following string is evaluated as Python code and the
4697 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4702 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4698 as a Python byte string literal.
4703 as a Python byte string literal.
4699 """
4704 """
4700 opts = pycompat.byteskwargs(opts)
4705 opts = pycompat.byteskwargs(opts)
4701
4706
4702 if opts[b'localssh'] and not repo:
4707 if opts[b'localssh'] and not repo:
4703 raise error.Abort(_(b'--localssh requires a repository'))
4708 raise error.Abort(_(b'--localssh requires a repository'))
4704
4709
4705 if opts[b'peer'] and opts[b'peer'] not in (
4710 if opts[b'peer'] and opts[b'peer'] not in (
4706 b'raw',
4711 b'raw',
4707 b'ssh1',
4712 b'ssh1',
4708 ):
4713 ):
4709 raise error.Abort(
4714 raise error.Abort(
4710 _(b'invalid value for --peer'),
4715 _(b'invalid value for --peer'),
4711 hint=_(b'valid values are "raw" and "ssh1"'),
4716 hint=_(b'valid values are "raw" and "ssh1"'),
4712 )
4717 )
4713
4718
4714 if path and opts[b'localssh']:
4719 if path and opts[b'localssh']:
4715 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4720 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4716
4721
4717 if ui.interactive():
4722 if ui.interactive():
4718 ui.write(_(b'(waiting for commands on stdin)\n'))
4723 ui.write(_(b'(waiting for commands on stdin)\n'))
4719
4724
4720 blocks = list(_parsewirelangblocks(ui.fin))
4725 blocks = list(_parsewirelangblocks(ui.fin))
4721
4726
4722 proc = None
4727 proc = None
4723 stdin = None
4728 stdin = None
4724 stdout = None
4729 stdout = None
4725 stderr = None
4730 stderr = None
4726 opener = None
4731 opener = None
4727
4732
4728 if opts[b'localssh']:
4733 if opts[b'localssh']:
4729 # We start the SSH server in its own process so there is process
4734 # We start the SSH server in its own process so there is process
4730 # separation. This prevents a whole class of potential bugs around
4735 # separation. This prevents a whole class of potential bugs around
4731 # shared state from interfering with server operation.
4736 # shared state from interfering with server operation.
4732 args = procutil.hgcmd() + [
4737 args = procutil.hgcmd() + [
4733 b'-R',
4738 b'-R',
4734 repo.root,
4739 repo.root,
4735 b'debugserve',
4740 b'debugserve',
4736 b'--sshstdio',
4741 b'--sshstdio',
4737 ]
4742 ]
4738 proc = subprocess.Popen(
4743 proc = subprocess.Popen(
4739 pycompat.rapply(procutil.tonativestr, args),
4744 pycompat.rapply(procutil.tonativestr, args),
4740 stdin=subprocess.PIPE,
4745 stdin=subprocess.PIPE,
4741 stdout=subprocess.PIPE,
4746 stdout=subprocess.PIPE,
4742 stderr=subprocess.PIPE,
4747 stderr=subprocess.PIPE,
4743 bufsize=0,
4748 bufsize=0,
4744 )
4749 )
4745
4750
4746 stdin = proc.stdin
4751 stdin = proc.stdin
4747 stdout = proc.stdout
4752 stdout = proc.stdout
4748 stderr = proc.stderr
4753 stderr = proc.stderr
4749
4754
4750 # We turn the pipes into observers so we can log I/O.
4755 # We turn the pipes into observers so we can log I/O.
4751 if ui.verbose or opts[b'peer'] == b'raw':
4756 if ui.verbose or opts[b'peer'] == b'raw':
4752 stdin = util.makeloggingfileobject(
4757 stdin = util.makeloggingfileobject(
4753 ui, proc.stdin, b'i', logdata=True
4758 ui, proc.stdin, b'i', logdata=True
4754 )
4759 )
4755 stdout = util.makeloggingfileobject(
4760 stdout = util.makeloggingfileobject(
4756 ui, proc.stdout, b'o', logdata=True
4761 ui, proc.stdout, b'o', logdata=True
4757 )
4762 )
4758 stderr = util.makeloggingfileobject(
4763 stderr = util.makeloggingfileobject(
4759 ui, proc.stderr, b'e', logdata=True
4764 ui, proc.stderr, b'e', logdata=True
4760 )
4765 )
4761
4766
4762 # --localssh also implies the peer connection settings.
4767 # --localssh also implies the peer connection settings.
4763
4768
4764 url = b'ssh://localserver'
4769 url = b'ssh://localserver'
4765 autoreadstderr = not opts[b'noreadstderr']
4770 autoreadstderr = not opts[b'noreadstderr']
4766
4771
4767 if opts[b'peer'] == b'ssh1':
4772 if opts[b'peer'] == b'ssh1':
4768 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4773 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4769 peer = sshpeer.sshv1peer(
4774 peer = sshpeer.sshv1peer(
4770 ui,
4775 ui,
4771 url,
4776 url,
4772 proc,
4777 proc,
4773 stdin,
4778 stdin,
4774 stdout,
4779 stdout,
4775 stderr,
4780 stderr,
4776 None,
4781 None,
4777 autoreadstderr=autoreadstderr,
4782 autoreadstderr=autoreadstderr,
4778 )
4783 )
4779 elif opts[b'peer'] == b'raw':
4784 elif opts[b'peer'] == b'raw':
4780 ui.write(_(b'using raw connection to peer\n'))
4785 ui.write(_(b'using raw connection to peer\n'))
4781 peer = None
4786 peer = None
4782 else:
4787 else:
4783 ui.write(_(b'creating ssh peer from handshake results\n'))
4788 ui.write(_(b'creating ssh peer from handshake results\n'))
4784 peer = sshpeer.makepeer(
4789 peer = sshpeer.makepeer(
4785 ui,
4790 ui,
4786 url,
4791 url,
4787 proc,
4792 proc,
4788 stdin,
4793 stdin,
4789 stdout,
4794 stdout,
4790 stderr,
4795 stderr,
4791 autoreadstderr=autoreadstderr,
4796 autoreadstderr=autoreadstderr,
4792 )
4797 )
4793
4798
4794 elif path:
4799 elif path:
4795 # We bypass hg.peer() so we can proxy the sockets.
4800 # We bypass hg.peer() so we can proxy the sockets.
4796 # TODO consider not doing this because we skip
4801 # TODO consider not doing this because we skip
4797 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4802 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4798 u = urlutil.url(path)
4803 u = urlutil.url(path)
4799 if u.scheme != b'http':
4804 if u.scheme != b'http':
4800 raise error.Abort(_(b'only http:// paths are currently supported'))
4805 raise error.Abort(_(b'only http:// paths are currently supported'))
4801
4806
4802 url, authinfo = u.authinfo()
4807 url, authinfo = u.authinfo()
4803 openerargs = {
4808 openerargs = {
4804 'useragent': b'Mercurial debugwireproto',
4809 'useragent': b'Mercurial debugwireproto',
4805 }
4810 }
4806
4811
4807 # Turn pipes/sockets into observers so we can log I/O.
4812 # Turn pipes/sockets into observers so we can log I/O.
4808 if ui.verbose:
4813 if ui.verbose:
4809 openerargs.update(
4814 openerargs.update(
4810 {
4815 {
4811 'loggingfh': ui,
4816 'loggingfh': ui,
4812 'loggingname': b's',
4817 'loggingname': b's',
4813 'loggingopts': {
4818 'loggingopts': {
4814 'logdata': True,
4819 'logdata': True,
4815 'logdataapis': False,
4820 'logdataapis': False,
4816 },
4821 },
4817 }
4822 }
4818 )
4823 )
4819
4824
4820 if ui.debugflag:
4825 if ui.debugflag:
4821 openerargs['loggingopts']['logdataapis'] = True
4826 openerargs['loggingopts']['logdataapis'] = True
4822
4827
4823 # Don't send default headers when in raw mode. This allows us to
4828 # Don't send default headers when in raw mode. This allows us to
4824 # bypass most of the behavior of our URL handling code so we can
4829 # bypass most of the behavior of our URL handling code so we can
4825 # have near complete control over what's sent on the wire.
4830 # have near complete control over what's sent on the wire.
4826 if opts[b'peer'] == b'raw':
4831 if opts[b'peer'] == b'raw':
4827 openerargs['sendaccept'] = False
4832 openerargs['sendaccept'] = False
4828
4833
4829 opener = urlmod.opener(ui, authinfo, **openerargs)
4834 opener = urlmod.opener(ui, authinfo, **openerargs)
4830
4835
4831 if opts[b'peer'] == b'raw':
4836 if opts[b'peer'] == b'raw':
4832 ui.write(_(b'using raw connection to peer\n'))
4837 ui.write(_(b'using raw connection to peer\n'))
4833 peer = None
4838 peer = None
4834 elif opts[b'peer']:
4839 elif opts[b'peer']:
4835 raise error.Abort(
4840 raise error.Abort(
4836 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4841 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4837 )
4842 )
4838 else:
4843 else:
4839 peer = httppeer.makepeer(ui, path, opener=opener)
4844 peer = httppeer.makepeer(ui, path, opener=opener)
4840
4845
4841 # We /could/ populate stdin/stdout with sock.makefile()...
4846 # We /could/ populate stdin/stdout with sock.makefile()...
4842 else:
4847 else:
4843 raise error.Abort(_(b'unsupported connection configuration'))
4848 raise error.Abort(_(b'unsupported connection configuration'))
4844
4849
4845 batchedcommands = None
4850 batchedcommands = None
4846
4851
4847 # Now perform actions based on the parsed wire language instructions.
4852 # Now perform actions based on the parsed wire language instructions.
4848 for action, lines in blocks:
4853 for action, lines in blocks:
4849 if action in (b'raw', b'raw+'):
4854 if action in (b'raw', b'raw+'):
4850 if not stdin:
4855 if not stdin:
4851 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4856 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4852
4857
4853 # Concatenate the data together.
4858 # Concatenate the data together.
4854 data = b''.join(l.lstrip() for l in lines)
4859 data = b''.join(l.lstrip() for l in lines)
4855 data = stringutil.unescapestr(data)
4860 data = stringutil.unescapestr(data)
4856 stdin.write(data)
4861 stdin.write(data)
4857
4862
4858 if action == b'raw+':
4863 if action == b'raw+':
4859 stdin.flush()
4864 stdin.flush()
4860 elif action == b'flush':
4865 elif action == b'flush':
4861 if not stdin:
4866 if not stdin:
4862 raise error.Abort(_(b'cannot call flush on this peer'))
4867 raise error.Abort(_(b'cannot call flush on this peer'))
4863 stdin.flush()
4868 stdin.flush()
4864 elif action.startswith(b'command'):
4869 elif action.startswith(b'command'):
4865 if not peer:
4870 if not peer:
4866 raise error.Abort(
4871 raise error.Abort(
4867 _(
4872 _(
4868 b'cannot send commands unless peer instance '
4873 b'cannot send commands unless peer instance '
4869 b'is available'
4874 b'is available'
4870 )
4875 )
4871 )
4876 )
4872
4877
4873 command = action.split(b' ', 1)[1]
4878 command = action.split(b' ', 1)[1]
4874
4879
4875 args = {}
4880 args = {}
4876 for line in lines:
4881 for line in lines:
4877 # We need to allow empty values.
4882 # We need to allow empty values.
4878 fields = line.lstrip().split(b' ', 1)
4883 fields = line.lstrip().split(b' ', 1)
4879 if len(fields) == 1:
4884 if len(fields) == 1:
4880 key = fields[0]
4885 key = fields[0]
4881 value = b''
4886 value = b''
4882 else:
4887 else:
4883 key, value = fields
4888 key, value = fields
4884
4889
4885 if value.startswith(b'eval:'):
4890 if value.startswith(b'eval:'):
4886 value = stringutil.evalpythonliteral(value[5:])
4891 value = stringutil.evalpythonliteral(value[5:])
4887 else:
4892 else:
4888 value = stringutil.unescapestr(value)
4893 value = stringutil.unescapestr(value)
4889
4894
4890 args[key] = value
4895 args[key] = value
4891
4896
4892 if batchedcommands is not None:
4897 if batchedcommands is not None:
4893 batchedcommands.append((command, args))
4898 batchedcommands.append((command, args))
4894 continue
4899 continue
4895
4900
4896 ui.status(_(b'sending %s command\n') % command)
4901 ui.status(_(b'sending %s command\n') % command)
4897
4902
4898 if b'PUSHFILE' in args:
4903 if b'PUSHFILE' in args:
4899 with open(args[b'PUSHFILE'], 'rb') as fh:
4904 with open(args[b'PUSHFILE'], 'rb') as fh:
4900 del args[b'PUSHFILE']
4905 del args[b'PUSHFILE']
4901 res, output = peer._callpush(
4906 res, output = peer._callpush(
4902 command, fh, **pycompat.strkwargs(args)
4907 command, fh, **pycompat.strkwargs(args)
4903 )
4908 )
4904 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4909 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4905 ui.status(
4910 ui.status(
4906 _(b'remote output: %s\n') % stringutil.escapestr(output)
4911 _(b'remote output: %s\n') % stringutil.escapestr(output)
4907 )
4912 )
4908 else:
4913 else:
4909 with peer.commandexecutor() as e:
4914 with peer.commandexecutor() as e:
4910 res = e.callcommand(command, args).result()
4915 res = e.callcommand(command, args).result()
4911
4916
4912 ui.status(
4917 ui.status(
4913 _(b'response: %s\n')
4918 _(b'response: %s\n')
4914 % stringutil.pprint(res, bprefix=True, indent=2)
4919 % stringutil.pprint(res, bprefix=True, indent=2)
4915 )
4920 )
4916
4921
4917 elif action == b'batchbegin':
4922 elif action == b'batchbegin':
4918 if batchedcommands is not None:
4923 if batchedcommands is not None:
4919 raise error.Abort(_(b'nested batchbegin not allowed'))
4924 raise error.Abort(_(b'nested batchbegin not allowed'))
4920
4925
4921 batchedcommands = []
4926 batchedcommands = []
4922 elif action == b'batchsubmit':
4927 elif action == b'batchsubmit':
4923 # There is a batching API we could go through. But it would be
4928 # There is a batching API we could go through. But it would be
4924 # difficult to normalize requests into function calls. It is easier
4929 # difficult to normalize requests into function calls. It is easier
4925 # to bypass this layer and normalize to commands + args.
4930 # to bypass this layer and normalize to commands + args.
4926 ui.status(
4931 ui.status(
4927 _(b'sending batch with %d sub-commands\n')
4932 _(b'sending batch with %d sub-commands\n')
4928 % len(batchedcommands)
4933 % len(batchedcommands)
4929 )
4934 )
4930 assert peer is not None
4935 assert peer is not None
4931 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4936 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4932 ui.status(
4937 ui.status(
4933 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4938 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4934 )
4939 )
4935
4940
4936 batchedcommands = None
4941 batchedcommands = None
4937
4942
4938 elif action.startswith(b'httprequest '):
4943 elif action.startswith(b'httprequest '):
4939 if not opener:
4944 if not opener:
4940 raise error.Abort(
4945 raise error.Abort(
4941 _(b'cannot use httprequest without an HTTP peer')
4946 _(b'cannot use httprequest without an HTTP peer')
4942 )
4947 )
4943
4948
4944 request = action.split(b' ', 2)
4949 request = action.split(b' ', 2)
4945 if len(request) != 3:
4950 if len(request) != 3:
4946 raise error.Abort(
4951 raise error.Abort(
4947 _(
4952 _(
4948 b'invalid httprequest: expected format is '
4953 b'invalid httprequest: expected format is '
4949 b'"httprequest <method> <path>'
4954 b'"httprequest <method> <path>'
4950 )
4955 )
4951 )
4956 )
4952
4957
4953 method, httppath = request[1:]
4958 method, httppath = request[1:]
4954 headers = {}
4959 headers = {}
4955 body = None
4960 body = None
4956 frames = []
4961 frames = []
4957 for line in lines:
4962 for line in lines:
4958 line = line.lstrip()
4963 line = line.lstrip()
4959 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4964 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4960 if m:
4965 if m:
4961 # Headers need to use native strings.
4966 # Headers need to use native strings.
4962 key = pycompat.strurl(m.group(1))
4967 key = pycompat.strurl(m.group(1))
4963 value = pycompat.strurl(m.group(2))
4968 value = pycompat.strurl(m.group(2))
4964 headers[key] = value
4969 headers[key] = value
4965 continue
4970 continue
4966
4971
4967 if line.startswith(b'BODYFILE '):
4972 if line.startswith(b'BODYFILE '):
4968 with open(line.split(b' ', 1), b'rb') as fh:
4973 with open(line.split(b' ', 1), b'rb') as fh:
4969 body = fh.read()
4974 body = fh.read()
4970 elif line.startswith(b'frame '):
4975 elif line.startswith(b'frame '):
4971 frame = wireprotoframing.makeframefromhumanstring(
4976 frame = wireprotoframing.makeframefromhumanstring(
4972 line[len(b'frame ') :]
4977 line[len(b'frame ') :]
4973 )
4978 )
4974
4979
4975 frames.append(frame)
4980 frames.append(frame)
4976 else:
4981 else:
4977 raise error.Abort(
4982 raise error.Abort(
4978 _(b'unknown argument to httprequest: %s') % line
4983 _(b'unknown argument to httprequest: %s') % line
4979 )
4984 )
4980
4985
4981 url = path + httppath
4986 url = path + httppath
4982
4987
4983 if frames:
4988 if frames:
4984 body = b''.join(bytes(f) for f in frames)
4989 body = b''.join(bytes(f) for f in frames)
4985
4990
4986 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4991 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4987
4992
4988 # urllib.Request insists on using has_data() as a proxy for
4993 # urllib.Request insists on using has_data() as a proxy for
4989 # determining the request method. Override that to use our
4994 # determining the request method. Override that to use our
4990 # explicitly requested method.
4995 # explicitly requested method.
4991 req.get_method = lambda: pycompat.sysstr(method)
4996 req.get_method = lambda: pycompat.sysstr(method)
4992
4997
4993 try:
4998 try:
4994 res = opener.open(req)
4999 res = opener.open(req)
4995 body = res.read()
5000 body = res.read()
4996 except util.urlerr.urlerror as e:
5001 except util.urlerr.urlerror as e:
4997 # read() method must be called, but only exists in Python 2
5002 # read() method must be called, but only exists in Python 2
4998 getattr(e, 'read', lambda: None)()
5003 getattr(e, 'read', lambda: None)()
4999 continue
5004 continue
5000
5005
5001 ct = res.headers.get('Content-Type')
5006 ct = res.headers.get('Content-Type')
5002 if ct == 'application/mercurial-cbor':
5007 if ct == 'application/mercurial-cbor':
5003 ui.write(
5008 ui.write(
5004 _(b'cbor> %s\n')
5009 _(b'cbor> %s\n')
5005 % stringutil.pprint(
5010 % stringutil.pprint(
5006 cborutil.decodeall(body), bprefix=True, indent=2
5011 cborutil.decodeall(body), bprefix=True, indent=2
5007 )
5012 )
5008 )
5013 )
5009
5014
5010 elif action == b'close':
5015 elif action == b'close':
5011 assert peer is not None
5016 assert peer is not None
5012 peer.close()
5017 peer.close()
5013 elif action == b'readavailable':
5018 elif action == b'readavailable':
5014 if not stdout or not stderr:
5019 if not stdout or not stderr:
5015 raise error.Abort(
5020 raise error.Abort(
5016 _(b'readavailable not available on this peer')
5021 _(b'readavailable not available on this peer')
5017 )
5022 )
5018
5023
5019 stdin.close()
5024 stdin.close()
5020 stdout.read()
5025 stdout.read()
5021 stderr.read()
5026 stderr.read()
5022
5027
5023 elif action == b'readline':
5028 elif action == b'readline':
5024 if not stdout:
5029 if not stdout:
5025 raise error.Abort(_(b'readline not available on this peer'))
5030 raise error.Abort(_(b'readline not available on this peer'))
5026 stdout.readline()
5031 stdout.readline()
5027 elif action == b'ereadline':
5032 elif action == b'ereadline':
5028 if not stderr:
5033 if not stderr:
5029 raise error.Abort(_(b'ereadline not available on this peer'))
5034 raise error.Abort(_(b'ereadline not available on this peer'))
5030 stderr.readline()
5035 stderr.readline()
5031 elif action.startswith(b'read '):
5036 elif action.startswith(b'read '):
5032 count = int(action.split(b' ', 1)[1])
5037 count = int(action.split(b' ', 1)[1])
5033 if not stdout:
5038 if not stdout:
5034 raise error.Abort(_(b'read not available on this peer'))
5039 raise error.Abort(_(b'read not available on this peer'))
5035 stdout.read(count)
5040 stdout.read(count)
5036 elif action.startswith(b'eread '):
5041 elif action.startswith(b'eread '):
5037 count = int(action.split(b' ', 1)[1])
5042 count = int(action.split(b' ', 1)[1])
5038 if not stderr:
5043 if not stderr:
5039 raise error.Abort(_(b'eread not available on this peer'))
5044 raise error.Abort(_(b'eread not available on this peer'))
5040 stderr.read(count)
5045 stderr.read(count)
5041 else:
5046 else:
5042 raise error.Abort(_(b'unknown action: %s') % action)
5047 raise error.Abort(_(b'unknown action: %s') % action)
5043
5048
5044 if batchedcommands is not None:
5049 if batchedcommands is not None:
5045 raise error.Abort(_(b'unclosed "batchbegin" request'))
5050 raise error.Abort(_(b'unclosed "batchbegin" request'))
5046
5051
5047 if peer:
5052 if peer:
5048 peer.close()
5053 peer.close()
5049
5054
5050 if proc:
5055 if proc:
5051 proc.kill()
5056 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now