##// END OF EJS Templates
debugcommands: fix some plural-agreements I noticed...
Augie Fackler -
r47855:bd2ed103 stable
parent child Browse files
Show More
@@ -1,4827 +1,4827 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullid,
33 nullid,
34 nullrev,
34 nullrev,
35 short,
35 short,
36 )
36 )
37 from .pycompat import (
37 from .pycompat import (
38 getattr,
38 getattr,
39 open,
39 open,
40 )
40 )
41 from . import (
41 from . import (
42 bundle2,
42 bundle2,
43 bundlerepo,
43 bundlerepo,
44 changegroup,
44 changegroup,
45 cmdutil,
45 cmdutil,
46 color,
46 color,
47 context,
47 context,
48 copies,
48 copies,
49 dagparser,
49 dagparser,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 revlog,
74 revlog,
75 revset,
75 revset,
76 revsetlang,
76 revsetlang,
77 scmutil,
77 scmutil,
78 setdiscovery,
78 setdiscovery,
79 simplemerge,
79 simplemerge,
80 sshpeer,
80 sshpeer,
81 sslutil,
81 sslutil,
82 streamclone,
82 streamclone,
83 strip,
83 strip,
84 tags as tagsmod,
84 tags as tagsmod,
85 templater,
85 templater,
86 treediscovery,
86 treediscovery,
87 upgrade,
87 upgrade,
88 url as urlmod,
88 url as urlmod,
89 util,
89 util,
90 vfs as vfsmod,
90 vfs as vfsmod,
91 wireprotoframing,
91 wireprotoframing,
92 wireprotoserver,
92 wireprotoserver,
93 wireprotov2peer,
93 wireprotov2peer,
94 )
94 )
95 from .utils import (
95 from .utils import (
96 cborutil,
96 cborutil,
97 compression,
97 compression,
98 dateutil,
98 dateutil,
99 procutil,
99 procutil,
100 stringutil,
100 stringutil,
101 urlutil,
101 urlutil,
102 )
102 )
103
103
104 from .revlogutils import (
104 from .revlogutils import (
105 deltas as deltautil,
105 deltas as deltautil,
106 nodemap,
106 nodemap,
107 sidedata,
107 sidedata,
108 )
108 )
109
109
110 release = lockmod.release
110 release = lockmod.release
111
111
112 table = {}
112 table = {}
113 table.update(strip.command._table)
113 table.update(strip.command._table)
114 command = registrar.command(table)
114 command = registrar.command(table)
115
115
116
116
117 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
117 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
118 def debugancestor(ui, repo, *args):
118 def debugancestor(ui, repo, *args):
119 """find the ancestor revision of two revisions in a given index"""
119 """find the ancestor revision of two revisions in a given index"""
120 if len(args) == 3:
120 if len(args) == 3:
121 index, rev1, rev2 = args
121 index, rev1, rev2 = args
122 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
122 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
123 lookup = r.lookup
123 lookup = r.lookup
124 elif len(args) == 2:
124 elif len(args) == 2:
125 if not repo:
125 if not repo:
126 raise error.Abort(
126 raise error.Abort(
127 _(b'there is no Mercurial repository here (.hg not found)')
127 _(b'there is no Mercurial repository here (.hg not found)')
128 )
128 )
129 rev1, rev2 = args
129 rev1, rev2 = args
130 r = repo.changelog
130 r = repo.changelog
131 lookup = repo.lookup
131 lookup = repo.lookup
132 else:
132 else:
133 raise error.Abort(_(b'either two or three arguments required'))
133 raise error.Abort(_(b'either two or three arguments required'))
134 a = r.ancestor(lookup(rev1), lookup(rev2))
134 a = r.ancestor(lookup(rev1), lookup(rev2))
135 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
135 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
136
136
137
137
138 @command(b'debugantivirusrunning', [])
138 @command(b'debugantivirusrunning', [])
139 def debugantivirusrunning(ui, repo):
139 def debugantivirusrunning(ui, repo):
140 """attempt to trigger an antivirus scanner to see if one is active"""
140 """attempt to trigger an antivirus scanner to see if one is active"""
141 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
141 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
142 f.write(
142 f.write(
143 util.b85decode(
143 util.b85decode(
144 # This is a base85-armored version of the EICAR test file. See
144 # This is a base85-armored version of the EICAR test file. See
145 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
145 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
146 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
146 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
147 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
147 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
148 )
148 )
149 )
149 )
150 # Give an AV engine time to scan the file.
150 # Give an AV engine time to scan the file.
151 time.sleep(2)
151 time.sleep(2)
152 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
152 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
153
153
154
154
155 @command(b'debugapplystreamclonebundle', [], b'FILE')
155 @command(b'debugapplystreamclonebundle', [], b'FILE')
156 def debugapplystreamclonebundle(ui, repo, fname):
156 def debugapplystreamclonebundle(ui, repo, fname):
157 """apply a stream clone bundle file"""
157 """apply a stream clone bundle file"""
158 f = hg.openpath(ui, fname)
158 f = hg.openpath(ui, fname)
159 gen = exchange.readbundle(ui, f, fname)
159 gen = exchange.readbundle(ui, f, fname)
160 gen.apply(repo)
160 gen.apply(repo)
161
161
162
162
163 @command(
163 @command(
164 b'debugbuilddag',
164 b'debugbuilddag',
165 [
165 [
166 (
166 (
167 b'm',
167 b'm',
168 b'mergeable-file',
168 b'mergeable-file',
169 None,
169 None,
170 _(b'add single file mergeable changes'),
170 _(b'add single file mergeable changes'),
171 ),
171 ),
172 (
172 (
173 b'o',
173 b'o',
174 b'overwritten-file',
174 b'overwritten-file',
175 None,
175 None,
176 _(b'add single file all revs overwrite'),
176 _(b'add single file all revs overwrite'),
177 ),
177 ),
178 (b'n', b'new-file', None, _(b'add new file at each rev')),
178 (b'n', b'new-file', None, _(b'add new file at each rev')),
179 ],
179 ],
180 _(b'[OPTION]... [TEXT]'),
180 _(b'[OPTION]... [TEXT]'),
181 )
181 )
182 def debugbuilddag(
182 def debugbuilddag(
183 ui,
183 ui,
184 repo,
184 repo,
185 text=None,
185 text=None,
186 mergeable_file=False,
186 mergeable_file=False,
187 overwritten_file=False,
187 overwritten_file=False,
188 new_file=False,
188 new_file=False,
189 ):
189 ):
190 """builds a repo with a given DAG from scratch in the current empty repo
190 """builds a repo with a given DAG from scratch in the current empty repo
191
191
192 The description of the DAG is read from stdin if not given on the
192 The description of the DAG is read from stdin if not given on the
193 command line.
193 command line.
194
194
195 Elements:
195 Elements:
196
196
197 - "+n" is a linear run of n nodes based on the current default parent
197 - "+n" is a linear run of n nodes based on the current default parent
198 - "." is a single node based on the current default parent
198 - "." is a single node based on the current default parent
199 - "$" resets the default parent to null (implied at the start);
199 - "$" resets the default parent to null (implied at the start);
200 otherwise the default parent is always the last node created
200 otherwise the default parent is always the last node created
201 - "<p" sets the default parent to the backref p
201 - "<p" sets the default parent to the backref p
202 - "*p" is a fork at parent p, which is a backref
202 - "*p" is a fork at parent p, which is a backref
203 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
203 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
204 - "/p2" is a merge of the preceding node and p2
204 - "/p2" is a merge of the preceding node and p2
205 - ":tag" defines a local tag for the preceding node
205 - ":tag" defines a local tag for the preceding node
206 - "@branch" sets the named branch for subsequent nodes
206 - "@branch" sets the named branch for subsequent nodes
207 - "#...\\n" is a comment up to the end of the line
207 - "#...\\n" is a comment up to the end of the line
208
208
209 Whitespace between the above elements is ignored.
209 Whitespace between the above elements is ignored.
210
210
211 A backref is either
211 A backref is either
212
212
213 - a number n, which references the node curr-n, where curr is the current
213 - a number n, which references the node curr-n, where curr is the current
214 node, or
214 node, or
215 - the name of a local tag you placed earlier using ":tag", or
215 - the name of a local tag you placed earlier using ":tag", or
216 - empty to denote the default parent.
216 - empty to denote the default parent.
217
217
218 All string valued-elements are either strictly alphanumeric, or must
218 All string valued-elements are either strictly alphanumeric, or must
219 be enclosed in double quotes ("..."), with "\\" as escape character.
219 be enclosed in double quotes ("..."), with "\\" as escape character.
220 """
220 """
221
221
222 if text is None:
222 if text is None:
223 ui.status(_(b"reading DAG from stdin\n"))
223 ui.status(_(b"reading DAG from stdin\n"))
224 text = ui.fin.read()
224 text = ui.fin.read()
225
225
226 cl = repo.changelog
226 cl = repo.changelog
227 if len(cl) > 0:
227 if len(cl) > 0:
228 raise error.Abort(_(b'repository is not empty'))
228 raise error.Abort(_(b'repository is not empty'))
229
229
230 # determine number of revs in DAG
230 # determine number of revs in DAG
231 total = 0
231 total = 0
232 for type, data in dagparser.parsedag(text):
232 for type, data in dagparser.parsedag(text):
233 if type == b'n':
233 if type == b'n':
234 total += 1
234 total += 1
235
235
236 if mergeable_file:
236 if mergeable_file:
237 linesperrev = 2
237 linesperrev = 2
238 # make a file with k lines per rev
238 # make a file with k lines per rev
239 initialmergedlines = [
239 initialmergedlines = [
240 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
240 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
241 ]
241 ]
242 initialmergedlines.append(b"")
242 initialmergedlines.append(b"")
243
243
244 tags = []
244 tags = []
245 progress = ui.makeprogress(
245 progress = ui.makeprogress(
246 _(b'building'), unit=_(b'revisions'), total=total
246 _(b'building'), unit=_(b'revisions'), total=total
247 )
247 )
248 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
248 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
249 at = -1
249 at = -1
250 atbranch = b'default'
250 atbranch = b'default'
251 nodeids = []
251 nodeids = []
252 id = 0
252 id = 0
253 progress.update(id)
253 progress.update(id)
254 for type, data in dagparser.parsedag(text):
254 for type, data in dagparser.parsedag(text):
255 if type == b'n':
255 if type == b'n':
256 ui.note((b'node %s\n' % pycompat.bytestr(data)))
256 ui.note((b'node %s\n' % pycompat.bytestr(data)))
257 id, ps = data
257 id, ps = data
258
258
259 files = []
259 files = []
260 filecontent = {}
260 filecontent = {}
261
261
262 p2 = None
262 p2 = None
263 if mergeable_file:
263 if mergeable_file:
264 fn = b"mf"
264 fn = b"mf"
265 p1 = repo[ps[0]]
265 p1 = repo[ps[0]]
266 if len(ps) > 1:
266 if len(ps) > 1:
267 p2 = repo[ps[1]]
267 p2 = repo[ps[1]]
268 pa = p1.ancestor(p2)
268 pa = p1.ancestor(p2)
269 base, local, other = [
269 base, local, other = [
270 x[fn].data() for x in (pa, p1, p2)
270 x[fn].data() for x in (pa, p1, p2)
271 ]
271 ]
272 m3 = simplemerge.Merge3Text(base, local, other)
272 m3 = simplemerge.Merge3Text(base, local, other)
273 ml = [l.strip() for l in m3.merge_lines()]
273 ml = [l.strip() for l in m3.merge_lines()]
274 ml.append(b"")
274 ml.append(b"")
275 elif at > 0:
275 elif at > 0:
276 ml = p1[fn].data().split(b"\n")
276 ml = p1[fn].data().split(b"\n")
277 else:
277 else:
278 ml = initialmergedlines
278 ml = initialmergedlines
279 ml[id * linesperrev] += b" r%i" % id
279 ml[id * linesperrev] += b" r%i" % id
280 mergedtext = b"\n".join(ml)
280 mergedtext = b"\n".join(ml)
281 files.append(fn)
281 files.append(fn)
282 filecontent[fn] = mergedtext
282 filecontent[fn] = mergedtext
283
283
284 if overwritten_file:
284 if overwritten_file:
285 fn = b"of"
285 fn = b"of"
286 files.append(fn)
286 files.append(fn)
287 filecontent[fn] = b"r%i\n" % id
287 filecontent[fn] = b"r%i\n" % id
288
288
289 if new_file:
289 if new_file:
290 fn = b"nf%i" % id
290 fn = b"nf%i" % id
291 files.append(fn)
291 files.append(fn)
292 filecontent[fn] = b"r%i\n" % id
292 filecontent[fn] = b"r%i\n" % id
293 if len(ps) > 1:
293 if len(ps) > 1:
294 if not p2:
294 if not p2:
295 p2 = repo[ps[1]]
295 p2 = repo[ps[1]]
296 for fn in p2:
296 for fn in p2:
297 if fn.startswith(b"nf"):
297 if fn.startswith(b"nf"):
298 files.append(fn)
298 files.append(fn)
299 filecontent[fn] = p2[fn].data()
299 filecontent[fn] = p2[fn].data()
300
300
301 def fctxfn(repo, cx, path):
301 def fctxfn(repo, cx, path):
302 if path in filecontent:
302 if path in filecontent:
303 return context.memfilectx(
303 return context.memfilectx(
304 repo, cx, path, filecontent[path]
304 repo, cx, path, filecontent[path]
305 )
305 )
306 return None
306 return None
307
307
308 if len(ps) == 0 or ps[0] < 0:
308 if len(ps) == 0 or ps[0] < 0:
309 pars = [None, None]
309 pars = [None, None]
310 elif len(ps) == 1:
310 elif len(ps) == 1:
311 pars = [nodeids[ps[0]], None]
311 pars = [nodeids[ps[0]], None]
312 else:
312 else:
313 pars = [nodeids[p] for p in ps]
313 pars = [nodeids[p] for p in ps]
314 cx = context.memctx(
314 cx = context.memctx(
315 repo,
315 repo,
316 pars,
316 pars,
317 b"r%i" % id,
317 b"r%i" % id,
318 files,
318 files,
319 fctxfn,
319 fctxfn,
320 date=(id, 0),
320 date=(id, 0),
321 user=b"debugbuilddag",
321 user=b"debugbuilddag",
322 extra={b'branch': atbranch},
322 extra={b'branch': atbranch},
323 )
323 )
324 nodeid = repo.commitctx(cx)
324 nodeid = repo.commitctx(cx)
325 nodeids.append(nodeid)
325 nodeids.append(nodeid)
326 at = id
326 at = id
327 elif type == b'l':
327 elif type == b'l':
328 id, name = data
328 id, name = data
329 ui.note((b'tag %s\n' % name))
329 ui.note((b'tag %s\n' % name))
330 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
330 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
331 elif type == b'a':
331 elif type == b'a':
332 ui.note((b'branch %s\n' % data))
332 ui.note((b'branch %s\n' % data))
333 atbranch = data
333 atbranch = data
334 progress.update(id)
334 progress.update(id)
335
335
336 if tags:
336 if tags:
337 repo.vfs.write(b"localtags", b"".join(tags))
337 repo.vfs.write(b"localtags", b"".join(tags))
338
338
339
339
340 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
340 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
341 indent_string = b' ' * indent
341 indent_string = b' ' * indent
342 if all:
342 if all:
343 ui.writenoi18n(
343 ui.writenoi18n(
344 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
344 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
345 % indent_string
345 % indent_string
346 )
346 )
347
347
348 def showchunks(named):
348 def showchunks(named):
349 ui.write(b"\n%s%s\n" % (indent_string, named))
349 ui.write(b"\n%s%s\n" % (indent_string, named))
350 for deltadata in gen.deltaiter():
350 for deltadata in gen.deltaiter():
351 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
351 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
352 ui.write(
352 ui.write(
353 b"%s%s %s %s %s %s %d\n"
353 b"%s%s %s %s %s %s %d\n"
354 % (
354 % (
355 indent_string,
355 indent_string,
356 hex(node),
356 hex(node),
357 hex(p1),
357 hex(p1),
358 hex(p2),
358 hex(p2),
359 hex(cs),
359 hex(cs),
360 hex(deltabase),
360 hex(deltabase),
361 len(delta),
361 len(delta),
362 )
362 )
363 )
363 )
364
364
365 gen.changelogheader()
365 gen.changelogheader()
366 showchunks(b"changelog")
366 showchunks(b"changelog")
367 gen.manifestheader()
367 gen.manifestheader()
368 showchunks(b"manifest")
368 showchunks(b"manifest")
369 for chunkdata in iter(gen.filelogheader, {}):
369 for chunkdata in iter(gen.filelogheader, {}):
370 fname = chunkdata[b'filename']
370 fname = chunkdata[b'filename']
371 showchunks(fname)
371 showchunks(fname)
372 else:
372 else:
373 if isinstance(gen, bundle2.unbundle20):
373 if isinstance(gen, bundle2.unbundle20):
374 raise error.Abort(_(b'use debugbundle2 for this file'))
374 raise error.Abort(_(b'use debugbundle2 for this file'))
375 gen.changelogheader()
375 gen.changelogheader()
376 for deltadata in gen.deltaiter():
376 for deltadata in gen.deltaiter():
377 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
377 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
378 ui.write(b"%s%s\n" % (indent_string, hex(node)))
378 ui.write(b"%s%s\n" % (indent_string, hex(node)))
379
379
380
380
381 def _debugobsmarkers(ui, part, indent=0, **opts):
381 def _debugobsmarkers(ui, part, indent=0, **opts):
382 """display version and markers contained in 'data'"""
382 """display version and markers contained in 'data'"""
383 opts = pycompat.byteskwargs(opts)
383 opts = pycompat.byteskwargs(opts)
384 data = part.read()
384 data = part.read()
385 indent_string = b' ' * indent
385 indent_string = b' ' * indent
386 try:
386 try:
387 version, markers = obsolete._readmarkers(data)
387 version, markers = obsolete._readmarkers(data)
388 except error.UnknownVersion as exc:
388 except error.UnknownVersion as exc:
389 msg = b"%sunsupported version: %s (%d bytes)\n"
389 msg = b"%sunsupported version: %s (%d bytes)\n"
390 msg %= indent_string, exc.version, len(data)
390 msg %= indent_string, exc.version, len(data)
391 ui.write(msg)
391 ui.write(msg)
392 else:
392 else:
393 msg = b"%sversion: %d (%d bytes)\n"
393 msg = b"%sversion: %d (%d bytes)\n"
394 msg %= indent_string, version, len(data)
394 msg %= indent_string, version, len(data)
395 ui.write(msg)
395 ui.write(msg)
396 fm = ui.formatter(b'debugobsolete', opts)
396 fm = ui.formatter(b'debugobsolete', opts)
397 for rawmarker in sorted(markers):
397 for rawmarker in sorted(markers):
398 m = obsutil.marker(None, rawmarker)
398 m = obsutil.marker(None, rawmarker)
399 fm.startitem()
399 fm.startitem()
400 fm.plain(indent_string)
400 fm.plain(indent_string)
401 cmdutil.showmarker(fm, m)
401 cmdutil.showmarker(fm, m)
402 fm.end()
402 fm.end()
403
403
404
404
405 def _debugphaseheads(ui, data, indent=0):
405 def _debugphaseheads(ui, data, indent=0):
406 """display version and markers contained in 'data'"""
406 """display version and markers contained in 'data'"""
407 indent_string = b' ' * indent
407 indent_string = b' ' * indent
408 headsbyphase = phases.binarydecode(data)
408 headsbyphase = phases.binarydecode(data)
409 for phase in phases.allphases:
409 for phase in phases.allphases:
410 for head in headsbyphase[phase]:
410 for head in headsbyphase[phase]:
411 ui.write(indent_string)
411 ui.write(indent_string)
412 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
412 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
413
413
414
414
415 def _quasirepr(thing):
415 def _quasirepr(thing):
416 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
416 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
417 return b'{%s}' % (
417 return b'{%s}' % (
418 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
418 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
419 )
419 )
420 return pycompat.bytestr(repr(thing))
420 return pycompat.bytestr(repr(thing))
421
421
422
422
423 def _debugbundle2(ui, gen, all=None, **opts):
423 def _debugbundle2(ui, gen, all=None, **opts):
424 """lists the contents of a bundle2"""
424 """lists the contents of a bundle2"""
425 if not isinstance(gen, bundle2.unbundle20):
425 if not isinstance(gen, bundle2.unbundle20):
426 raise error.Abort(_(b'not a bundle2 file'))
426 raise error.Abort(_(b'not a bundle2 file'))
427 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
427 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
428 parttypes = opts.get('part_type', [])
428 parttypes = opts.get('part_type', [])
429 for part in gen.iterparts():
429 for part in gen.iterparts():
430 if parttypes and part.type not in parttypes:
430 if parttypes and part.type not in parttypes:
431 continue
431 continue
432 msg = b'%s -- %s (mandatory: %r)\n'
432 msg = b'%s -- %s (mandatory: %r)\n'
433 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
433 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
434 if part.type == b'changegroup':
434 if part.type == b'changegroup':
435 version = part.params.get(b'version', b'01')
435 version = part.params.get(b'version', b'01')
436 cg = changegroup.getunbundler(version, part, b'UN')
436 cg = changegroup.getunbundler(version, part, b'UN')
437 if not ui.quiet:
437 if not ui.quiet:
438 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
438 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
439 if part.type == b'obsmarkers':
439 if part.type == b'obsmarkers':
440 if not ui.quiet:
440 if not ui.quiet:
441 _debugobsmarkers(ui, part, indent=4, **opts)
441 _debugobsmarkers(ui, part, indent=4, **opts)
442 if part.type == b'phase-heads':
442 if part.type == b'phase-heads':
443 if not ui.quiet:
443 if not ui.quiet:
444 _debugphaseheads(ui, part, indent=4)
444 _debugphaseheads(ui, part, indent=4)
445
445
446
446
447 @command(
447 @command(
448 b'debugbundle',
448 b'debugbundle',
449 [
449 [
450 (b'a', b'all', None, _(b'show all details')),
450 (b'a', b'all', None, _(b'show all details')),
451 (b'', b'part-type', [], _(b'show only the named part type')),
451 (b'', b'part-type', [], _(b'show only the named part type')),
452 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
452 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
453 ],
453 ],
454 _(b'FILE'),
454 _(b'FILE'),
455 norepo=True,
455 norepo=True,
456 )
456 )
457 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
457 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
458 """lists the contents of a bundle"""
458 """lists the contents of a bundle"""
459 with hg.openpath(ui, bundlepath) as f:
459 with hg.openpath(ui, bundlepath) as f:
460 if spec:
460 if spec:
461 spec = exchange.getbundlespec(ui, f)
461 spec = exchange.getbundlespec(ui, f)
462 ui.write(b'%s\n' % spec)
462 ui.write(b'%s\n' % spec)
463 return
463 return
464
464
465 gen = exchange.readbundle(ui, f, bundlepath)
465 gen = exchange.readbundle(ui, f, bundlepath)
466 if isinstance(gen, bundle2.unbundle20):
466 if isinstance(gen, bundle2.unbundle20):
467 return _debugbundle2(ui, gen, all=all, **opts)
467 return _debugbundle2(ui, gen, all=all, **opts)
468 _debugchangegroup(ui, gen, all=all, **opts)
468 _debugchangegroup(ui, gen, all=all, **opts)
469
469
470
470
471 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
471 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
472 def debugcapabilities(ui, path, **opts):
472 def debugcapabilities(ui, path, **opts):
473 """lists the capabilities of a remote peer"""
473 """lists the capabilities of a remote peer"""
474 opts = pycompat.byteskwargs(opts)
474 opts = pycompat.byteskwargs(opts)
475 peer = hg.peer(ui, opts, path)
475 peer = hg.peer(ui, opts, path)
476 try:
476 try:
477 caps = peer.capabilities()
477 caps = peer.capabilities()
478 ui.writenoi18n(b'Main capabilities:\n')
478 ui.writenoi18n(b'Main capabilities:\n')
479 for c in sorted(caps):
479 for c in sorted(caps):
480 ui.write(b' %s\n' % c)
480 ui.write(b' %s\n' % c)
481 b2caps = bundle2.bundle2caps(peer)
481 b2caps = bundle2.bundle2caps(peer)
482 if b2caps:
482 if b2caps:
483 ui.writenoi18n(b'Bundle2 capabilities:\n')
483 ui.writenoi18n(b'Bundle2 capabilities:\n')
484 for key, values in sorted(pycompat.iteritems(b2caps)):
484 for key, values in sorted(pycompat.iteritems(b2caps)):
485 ui.write(b' %s\n' % key)
485 ui.write(b' %s\n' % key)
486 for v in values:
486 for v in values:
487 ui.write(b' %s\n' % v)
487 ui.write(b' %s\n' % v)
488 finally:
488 finally:
489 peer.close()
489 peer.close()
490
490
491
491
492 @command(
492 @command(
493 b'debugchangedfiles',
493 b'debugchangedfiles',
494 [
494 [
495 (
495 (
496 b'',
496 b'',
497 b'compute',
497 b'compute',
498 False,
498 False,
499 b"compute information instead of reading it from storage",
499 b"compute information instead of reading it from storage",
500 ),
500 ),
501 ],
501 ],
502 b'REV',
502 b'REV',
503 )
503 )
504 def debugchangedfiles(ui, repo, rev, **opts):
504 def debugchangedfiles(ui, repo, rev, **opts):
505 """list the stored files changes for a revision"""
505 """list the stored files changes for a revision"""
506 ctx = scmutil.revsingle(repo, rev, None)
506 ctx = scmutil.revsingle(repo, rev, None)
507 files = None
507 files = None
508
508
509 if opts['compute']:
509 if opts['compute']:
510 files = metadata.compute_all_files_changes(ctx)
510 files = metadata.compute_all_files_changes(ctx)
511 else:
511 else:
512 sd = repo.changelog.sidedata(ctx.rev())
512 sd = repo.changelog.sidedata(ctx.rev())
513 files_block = sd.get(sidedata.SD_FILES)
513 files_block = sd.get(sidedata.SD_FILES)
514 if files_block is not None:
514 if files_block is not None:
515 files = metadata.decode_files_sidedata(sd)
515 files = metadata.decode_files_sidedata(sd)
516 if files is not None:
516 if files is not None:
517 for f in sorted(files.touched):
517 for f in sorted(files.touched):
518 if f in files.added:
518 if f in files.added:
519 action = b"added"
519 action = b"added"
520 elif f in files.removed:
520 elif f in files.removed:
521 action = b"removed"
521 action = b"removed"
522 elif f in files.merged:
522 elif f in files.merged:
523 action = b"merged"
523 action = b"merged"
524 elif f in files.salvaged:
524 elif f in files.salvaged:
525 action = b"salvaged"
525 action = b"salvaged"
526 else:
526 else:
527 action = b"touched"
527 action = b"touched"
528
528
529 copy_parent = b""
529 copy_parent = b""
530 copy_source = b""
530 copy_source = b""
531 if f in files.copied_from_p1:
531 if f in files.copied_from_p1:
532 copy_parent = b"p1"
532 copy_parent = b"p1"
533 copy_source = files.copied_from_p1[f]
533 copy_source = files.copied_from_p1[f]
534 elif f in files.copied_from_p2:
534 elif f in files.copied_from_p2:
535 copy_parent = b"p2"
535 copy_parent = b"p2"
536 copy_source = files.copied_from_p2[f]
536 copy_source = files.copied_from_p2[f]
537
537
538 data = (action, copy_parent, f, copy_source)
538 data = (action, copy_parent, f, copy_source)
539 template = b"%-8s %2s: %s, %s;\n"
539 template = b"%-8s %2s: %s, %s;\n"
540 ui.write(template % data)
540 ui.write(template % data)
541
541
542
542
543 @command(b'debugcheckstate', [], b'')
543 @command(b'debugcheckstate', [], b'')
544 def debugcheckstate(ui, repo):
544 def debugcheckstate(ui, repo):
545 """validate the correctness of the current dirstate"""
545 """validate the correctness of the current dirstate"""
546 parent1, parent2 = repo.dirstate.parents()
546 parent1, parent2 = repo.dirstate.parents()
547 m1 = repo[parent1].manifest()
547 m1 = repo[parent1].manifest()
548 m2 = repo[parent2].manifest()
548 m2 = repo[parent2].manifest()
549 errors = 0
549 errors = 0
550 for f in repo.dirstate:
550 for f in repo.dirstate:
551 state = repo.dirstate[f]
551 state = repo.dirstate[f]
552 if state in b"nr" and f not in m1:
552 if state in b"nr" and f not in m1:
553 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
553 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
554 errors += 1
554 errors += 1
555 if state in b"a" and f in m1:
555 if state in b"a" and f in m1:
556 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
556 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
557 errors += 1
557 errors += 1
558 if state in b"m" and f not in m1 and f not in m2:
558 if state in b"m" and f not in m1 and f not in m2:
559 ui.warn(
559 ui.warn(
560 _(b"%s in state %s, but not in either manifest\n") % (f, state)
560 _(b"%s in state %s, but not in either manifest\n") % (f, state)
561 )
561 )
562 errors += 1
562 errors += 1
563 for f in m1:
563 for f in m1:
564 state = repo.dirstate[f]
564 state = repo.dirstate[f]
565 if state not in b"nrm":
565 if state not in b"nrm":
566 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
566 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
567 errors += 1
567 errors += 1
568 if errors:
568 if errors:
569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
570 raise error.Abort(errstr)
570 raise error.Abort(errstr)
571
571
572
572
573 @command(
573 @command(
574 b'debugcolor',
574 b'debugcolor',
575 [(b'', b'style', None, _(b'show all configured styles'))],
575 [(b'', b'style', None, _(b'show all configured styles'))],
576 b'hg debugcolor',
576 b'hg debugcolor',
577 )
577 )
578 def debugcolor(ui, repo, **opts):
578 def debugcolor(ui, repo, **opts):
579 """show available color, effects or style"""
579 """show available color, effects or style"""
580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
581 if opts.get('style'):
581 if opts.get('style'):
582 return _debugdisplaystyle(ui)
582 return _debugdisplaystyle(ui)
583 else:
583 else:
584 return _debugdisplaycolor(ui)
584 return _debugdisplaycolor(ui)
585
585
586
586
587 def _debugdisplaycolor(ui):
587 def _debugdisplaycolor(ui):
588 ui = ui.copy()
588 ui = ui.copy()
589 ui._styles.clear()
589 ui._styles.clear()
590 for effect in color._activeeffects(ui).keys():
590 for effect in color._activeeffects(ui).keys():
591 ui._styles[effect] = effect
591 ui._styles[effect] = effect
592 if ui._terminfoparams:
592 if ui._terminfoparams:
593 for k, v in ui.configitems(b'color'):
593 for k, v in ui.configitems(b'color'):
594 if k.startswith(b'color.'):
594 if k.startswith(b'color.'):
595 ui._styles[k] = k[6:]
595 ui._styles[k] = k[6:]
596 elif k.startswith(b'terminfo.'):
596 elif k.startswith(b'terminfo.'):
597 ui._styles[k] = k[9:]
597 ui._styles[k] = k[9:]
598 ui.write(_(b'available colors:\n'))
598 ui.write(_(b'available colors:\n'))
599 # sort label with a '_' after the other to group '_background' entry.
599 # sort label with a '_' after the other to group '_background' entry.
600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
601 for colorname, label in items:
601 for colorname, label in items:
602 ui.write(b'%s\n' % colorname, label=label)
602 ui.write(b'%s\n' % colorname, label=label)
603
603
604
604
605 def _debugdisplaystyle(ui):
605 def _debugdisplaystyle(ui):
606 ui.write(_(b'available style:\n'))
606 ui.write(_(b'available style:\n'))
607 if not ui._styles:
607 if not ui._styles:
608 return
608 return
609 width = max(len(s) for s in ui._styles)
609 width = max(len(s) for s in ui._styles)
610 for label, effects in sorted(ui._styles.items()):
610 for label, effects in sorted(ui._styles.items()):
611 ui.write(b'%s' % label, label=label)
611 ui.write(b'%s' % label, label=label)
612 if effects:
612 if effects:
613 # 50
613 # 50
614 ui.write(b': ')
614 ui.write(b': ')
615 ui.write(b' ' * (max(0, width - len(label))))
615 ui.write(b' ' * (max(0, width - len(label))))
616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
617 ui.write(b'\n')
617 ui.write(b'\n')
618
618
619
619
620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
621 def debugcreatestreamclonebundle(ui, repo, fname):
621 def debugcreatestreamclonebundle(ui, repo, fname):
622 """create a stream clone bundle file
622 """create a stream clone bundle file
623
623
624 Stream bundles are special bundles that are essentially archives of
624 Stream bundles are special bundles that are essentially archives of
625 revlog files. They are commonly used for cloning very quickly.
625 revlog files. They are commonly used for cloning very quickly.
626 """
626 """
627 # TODO we may want to turn this into an abort when this functionality
627 # TODO we may want to turn this into an abort when this functionality
628 # is moved into `hg bundle`.
628 # is moved into `hg bundle`.
629 if phases.hassecret(repo):
629 if phases.hassecret(repo):
630 ui.warn(
630 ui.warn(
631 _(
631 _(
632 b'(warning: stream clone bundle will contain secret '
632 b'(warning: stream clone bundle will contain secret '
633 b'revisions)\n'
633 b'revisions)\n'
634 )
634 )
635 )
635 )
636
636
637 requirements, gen = streamclone.generatebundlev1(repo)
637 requirements, gen = streamclone.generatebundlev1(repo)
638 changegroup.writechunks(ui, gen, fname)
638 changegroup.writechunks(ui, gen, fname)
639
639
640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
641
641
642
642
643 @command(
643 @command(
644 b'debugdag',
644 b'debugdag',
645 [
645 [
646 (b't', b'tags', None, _(b'use tags as labels')),
646 (b't', b'tags', None, _(b'use tags as labels')),
647 (b'b', b'branches', None, _(b'annotate with branch names')),
647 (b'b', b'branches', None, _(b'annotate with branch names')),
648 (b'', b'dots', None, _(b'use dots for runs')),
648 (b'', b'dots', None, _(b'use dots for runs')),
649 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 (b's', b'spaces', None, _(b'separate elements by spaces')),
650 ],
650 ],
651 _(b'[OPTION]... [FILE [REV]...]'),
651 _(b'[OPTION]... [FILE [REV]...]'),
652 optionalrepo=True,
652 optionalrepo=True,
653 )
653 )
654 def debugdag(ui, repo, file_=None, *revs, **opts):
654 def debugdag(ui, repo, file_=None, *revs, **opts):
655 """format the changelog or an index DAG as a concise textual description
655 """format the changelog or an index DAG as a concise textual description
656
656
657 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 If you pass a revlog index, the revlog's DAG is emitted. If you list
658 revision numbers, they get labeled in the output as rN.
658 revision numbers, they get labeled in the output as rN.
659
659
660 Otherwise, the changelog DAG of the current repo is emitted.
660 Otherwise, the changelog DAG of the current repo is emitted.
661 """
661 """
662 spaces = opts.get('spaces')
662 spaces = opts.get('spaces')
663 dots = opts.get('dots')
663 dots = opts.get('dots')
664 if file_:
664 if file_:
665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
666 revs = {int(r) for r in revs}
666 revs = {int(r) for r in revs}
667
667
668 def events():
668 def events():
669 for r in rlog:
669 for r in rlog:
670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
671 if r in revs:
671 if r in revs:
672 yield b'l', (r, b"r%i" % r)
672 yield b'l', (r, b"r%i" % r)
673
673
674 elif repo:
674 elif repo:
675 cl = repo.changelog
675 cl = repo.changelog
676 tags = opts.get('tags')
676 tags = opts.get('tags')
677 branches = opts.get('branches')
677 branches = opts.get('branches')
678 if tags:
678 if tags:
679 labels = {}
679 labels = {}
680 for l, n in repo.tags().items():
680 for l, n in repo.tags().items():
681 labels.setdefault(cl.rev(n), []).append(l)
681 labels.setdefault(cl.rev(n), []).append(l)
682
682
683 def events():
683 def events():
684 b = b"default"
684 b = b"default"
685 for r in cl:
685 for r in cl:
686 if branches:
686 if branches:
687 newb = cl.read(cl.node(r))[5][b'branch']
687 newb = cl.read(cl.node(r))[5][b'branch']
688 if newb != b:
688 if newb != b:
689 yield b'a', newb
689 yield b'a', newb
690 b = newb
690 b = newb
691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
692 if tags:
692 if tags:
693 ls = labels.get(r)
693 ls = labels.get(r)
694 if ls:
694 if ls:
695 for l in ls:
695 for l in ls:
696 yield b'l', (r, l)
696 yield b'l', (r, l)
697
697
698 else:
698 else:
699 raise error.Abort(_(b'need repo for changelog dag'))
699 raise error.Abort(_(b'need repo for changelog dag'))
700
700
701 for line in dagparser.dagtextlines(
701 for line in dagparser.dagtextlines(
702 events(),
702 events(),
703 addspaces=spaces,
703 addspaces=spaces,
704 wraplabels=True,
704 wraplabels=True,
705 wrapannotations=True,
705 wrapannotations=True,
706 wrapnonlinear=dots,
706 wrapnonlinear=dots,
707 usedots=dots,
707 usedots=dots,
708 maxlinewidth=70,
708 maxlinewidth=70,
709 ):
709 ):
710 ui.write(line)
710 ui.write(line)
711 ui.write(b"\n")
711 ui.write(b"\n")
712
712
713
713
714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
715 def debugdata(ui, repo, file_, rev=None, **opts):
715 def debugdata(ui, repo, file_, rev=None, **opts):
716 """dump the contents of a data file revision"""
716 """dump the contents of a data file revision"""
717 opts = pycompat.byteskwargs(opts)
717 opts = pycompat.byteskwargs(opts)
718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
719 if rev is not None:
719 if rev is not None:
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 file_, rev = None, file_
721 file_, rev = None, file_
722 elif rev is None:
722 elif rev is None:
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
725 try:
725 try:
726 ui.write(r.rawdata(r.lookup(rev)))
726 ui.write(r.rawdata(r.lookup(rev)))
727 except KeyError:
727 except KeyError:
728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
729
729
730
730
731 @command(
731 @command(
732 b'debugdate',
732 b'debugdate',
733 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 [(b'e', b'extended', None, _(b'try extended date formats'))],
734 _(b'[-e] DATE [RANGE]'),
734 _(b'[-e] DATE [RANGE]'),
735 norepo=True,
735 norepo=True,
736 optionalrepo=True,
736 optionalrepo=True,
737 )
737 )
738 def debugdate(ui, date, range=None, **opts):
738 def debugdate(ui, date, range=None, **opts):
739 """parse and display a date"""
739 """parse and display a date"""
740 if opts["extended"]:
740 if opts["extended"]:
741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
742 else:
742 else:
743 d = dateutil.parsedate(date)
743 d = dateutil.parsedate(date)
744 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"internal: %d %d\n" % d)
745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
746 if range:
746 if range:
747 m = dateutil.matchdate(range)
747 m = dateutil.matchdate(range)
748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
749
749
750
750
751 @command(
751 @command(
752 b'debugdeltachain',
752 b'debugdeltachain',
753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
754 _(b'-c|-m|FILE'),
754 _(b'-c|-m|FILE'),
755 optionalrepo=True,
755 optionalrepo=True,
756 )
756 )
757 def debugdeltachain(ui, repo, file_=None, **opts):
757 def debugdeltachain(ui, repo, file_=None, **opts):
758 """dump information about delta chains in a revlog
758 """dump information about delta chains in a revlog
759
759
760 Output can be templatized. Available template keywords are:
760 Output can be templatized. Available template keywords are:
761
761
762 :``rev``: revision number
762 :``rev``: revision number
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 :``compsize``: compressed size of revision
767 :``compsize``: compressed size of revision
768 :``uncompsize``: uncompressed size of revision
768 :``uncompsize``: uncompressed size of revision
769 :``chainsize``: total size of compressed revisions in chain
769 :``chainsize``: total size of compressed revisions in chain
770 :``chainratio``: total chain size divided by uncompressed revision size
770 :``chainratio``: total chain size divided by uncompressed revision size
771 (new delta chains typically start at ratio 2.00)
771 (new delta chains typically start at ratio 2.00)
772 :``lindist``: linear distance from base revision in delta chain to end
772 :``lindist``: linear distance from base revision in delta chain to end
773 of this revision
773 of this revision
774 :``extradist``: total size of revisions not part of this delta chain from
774 :``extradist``: total size of revisions not part of this delta chain from
775 base of delta chain to end of this revision; a measurement
775 base of delta chain to end of this revision; a measurement
776 of how much extra data we need to read/seek across to read
776 of how much extra data we need to read/seek across to read
777 the delta chain for this revision
777 the delta chain for this revision
778 :``extraratio``: extradist divided by chainsize; another representation of
778 :``extraratio``: extradist divided by chainsize; another representation of
779 how much unrelated data is needed to load this delta chain
779 how much unrelated data is needed to load this delta chain
780
780
781 If the repository is configured to use the sparse read, additional keywords
781 If the repository is configured to use the sparse read, additional keywords
782 are available:
782 are available:
783
783
784 :``readsize``: total size of data read from the disk for a revision
784 :``readsize``: total size of data read from the disk for a revision
785 (sum of the sizes of all the blocks)
785 (sum of the sizes of all the blocks)
786 :``largestblock``: size of the largest block of data read from the disk
786 :``largestblock``: size of the largest block of data read from the disk
787 :``readdensity``: density of useful bytes in the data read from the disk
787 :``readdensity``: density of useful bytes in the data read from the disk
788 :``srchunks``: in how many data hunks the whole revision would be read
788 :``srchunks``: in how many data hunks the whole revision would be read
789
789
790 The sparse read can be enabled with experimental.sparse-read = True
790 The sparse read can be enabled with experimental.sparse-read = True
791 """
791 """
792 opts = pycompat.byteskwargs(opts)
792 opts = pycompat.byteskwargs(opts)
793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
794 index = r.index
794 index = r.index
795 start = r.start
795 start = r.start
796 length = r.length
796 length = r.length
797 generaldelta = r.version & revlog.FLAG_GENERALDELTA
797 generaldelta = r.version & revlog.FLAG_GENERALDELTA
798 withsparseread = getattr(r, '_withsparseread', False)
798 withsparseread = getattr(r, '_withsparseread', False)
799
799
800 def revinfo(rev):
800 def revinfo(rev):
801 e = index[rev]
801 e = index[rev]
802 compsize = e[1]
802 compsize = e[1]
803 uncompsize = e[2]
803 uncompsize = e[2]
804 chainsize = 0
804 chainsize = 0
805
805
806 if generaldelta:
806 if generaldelta:
807 if e[3] == e[5]:
807 if e[3] == e[5]:
808 deltatype = b'p1'
808 deltatype = b'p1'
809 elif e[3] == e[6]:
809 elif e[3] == e[6]:
810 deltatype = b'p2'
810 deltatype = b'p2'
811 elif e[3] == rev - 1:
811 elif e[3] == rev - 1:
812 deltatype = b'prev'
812 deltatype = b'prev'
813 elif e[3] == rev:
813 elif e[3] == rev:
814 deltatype = b'base'
814 deltatype = b'base'
815 else:
815 else:
816 deltatype = b'other'
816 deltatype = b'other'
817 else:
817 else:
818 if e[3] == rev:
818 if e[3] == rev:
819 deltatype = b'base'
819 deltatype = b'base'
820 else:
820 else:
821 deltatype = b'prev'
821 deltatype = b'prev'
822
822
823 chain = r._deltachain(rev)[0]
823 chain = r._deltachain(rev)[0]
824 for iterrev in chain:
824 for iterrev in chain:
825 e = index[iterrev]
825 e = index[iterrev]
826 chainsize += e[1]
826 chainsize += e[1]
827
827
828 return compsize, uncompsize, deltatype, chain, chainsize
828 return compsize, uncompsize, deltatype, chain, chainsize
829
829
830 fm = ui.formatter(b'debugdeltachain', opts)
830 fm = ui.formatter(b'debugdeltachain', opts)
831
831
832 fm.plain(
832 fm.plain(
833 b' rev chain# chainlen prev delta '
833 b' rev chain# chainlen prev delta '
834 b'size rawsize chainsize ratio lindist extradist '
834 b'size rawsize chainsize ratio lindist extradist '
835 b'extraratio'
835 b'extraratio'
836 )
836 )
837 if withsparseread:
837 if withsparseread:
838 fm.plain(b' readsize largestblk rddensity srchunks')
838 fm.plain(b' readsize largestblk rddensity srchunks')
839 fm.plain(b'\n')
839 fm.plain(b'\n')
840
840
841 chainbases = {}
841 chainbases = {}
842 for rev in r:
842 for rev in r:
843 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
844 chainbase = chain[0]
844 chainbase = chain[0]
845 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
846 basestart = start(chainbase)
846 basestart = start(chainbase)
847 revstart = start(rev)
847 revstart = start(rev)
848 lineardist = revstart + comp - basestart
848 lineardist = revstart + comp - basestart
849 extradist = lineardist - chainsize
849 extradist = lineardist - chainsize
850 try:
850 try:
851 prevrev = chain[-2]
851 prevrev = chain[-2]
852 except IndexError:
852 except IndexError:
853 prevrev = -1
853 prevrev = -1
854
854
855 if uncomp != 0:
855 if uncomp != 0:
856 chainratio = float(chainsize) / float(uncomp)
856 chainratio = float(chainsize) / float(uncomp)
857 else:
857 else:
858 chainratio = chainsize
858 chainratio = chainsize
859
859
860 if chainsize != 0:
860 if chainsize != 0:
861 extraratio = float(extradist) / float(chainsize)
861 extraratio = float(extradist) / float(chainsize)
862 else:
862 else:
863 extraratio = extradist
863 extraratio = extradist
864
864
865 fm.startitem()
865 fm.startitem()
866 fm.write(
866 fm.write(
867 b'rev chainid chainlen prevrev deltatype compsize '
867 b'rev chainid chainlen prevrev deltatype compsize '
868 b'uncompsize chainsize chainratio lindist extradist '
868 b'uncompsize chainsize chainratio lindist extradist '
869 b'extraratio',
869 b'extraratio',
870 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
871 rev,
871 rev,
872 chainid,
872 chainid,
873 len(chain),
873 len(chain),
874 prevrev,
874 prevrev,
875 deltatype,
875 deltatype,
876 comp,
876 comp,
877 uncomp,
877 uncomp,
878 chainsize,
878 chainsize,
879 chainratio,
879 chainratio,
880 lineardist,
880 lineardist,
881 extradist,
881 extradist,
882 extraratio,
882 extraratio,
883 rev=rev,
883 rev=rev,
884 chainid=chainid,
884 chainid=chainid,
885 chainlen=len(chain),
885 chainlen=len(chain),
886 prevrev=prevrev,
886 prevrev=prevrev,
887 deltatype=deltatype,
887 deltatype=deltatype,
888 compsize=comp,
888 compsize=comp,
889 uncompsize=uncomp,
889 uncompsize=uncomp,
890 chainsize=chainsize,
890 chainsize=chainsize,
891 chainratio=chainratio,
891 chainratio=chainratio,
892 lindist=lineardist,
892 lindist=lineardist,
893 extradist=extradist,
893 extradist=extradist,
894 extraratio=extraratio,
894 extraratio=extraratio,
895 )
895 )
896 if withsparseread:
896 if withsparseread:
897 readsize = 0
897 readsize = 0
898 largestblock = 0
898 largestblock = 0
899 srchunks = 0
899 srchunks = 0
900
900
901 for revschunk in deltautil.slicechunk(r, chain):
901 for revschunk in deltautil.slicechunk(r, chain):
902 srchunks += 1
902 srchunks += 1
903 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 blkend = start(revschunk[-1]) + length(revschunk[-1])
904 blksize = blkend - start(revschunk[0])
904 blksize = blkend - start(revschunk[0])
905
905
906 readsize += blksize
906 readsize += blksize
907 if largestblock < blksize:
907 if largestblock < blksize:
908 largestblock = blksize
908 largestblock = blksize
909
909
910 if readsize:
910 if readsize:
911 readdensity = float(chainsize) / float(readsize)
911 readdensity = float(chainsize) / float(readsize)
912 else:
912 else:
913 readdensity = 1
913 readdensity = 1
914
914
915 fm.write(
915 fm.write(
916 b'readsize largestblock readdensity srchunks',
916 b'readsize largestblock readdensity srchunks',
917 b' %10d %10d %9.5f %8d',
917 b' %10d %10d %9.5f %8d',
918 readsize,
918 readsize,
919 largestblock,
919 largestblock,
920 readdensity,
920 readdensity,
921 srchunks,
921 srchunks,
922 readsize=readsize,
922 readsize=readsize,
923 largestblock=largestblock,
923 largestblock=largestblock,
924 readdensity=readdensity,
924 readdensity=readdensity,
925 srchunks=srchunks,
925 srchunks=srchunks,
926 )
926 )
927
927
928 fm.plain(b'\n')
928 fm.plain(b'\n')
929
929
930 fm.end()
930 fm.end()
931
931
932
932
933 @command(
933 @command(
934 b'debugdirstate|debugstate',
934 b'debugdirstate|debugstate',
935 [
935 [
936 (
936 (
937 b'',
937 b'',
938 b'nodates',
938 b'nodates',
939 None,
939 None,
940 _(b'do not display the saved mtime (DEPRECATED)'),
940 _(b'do not display the saved mtime (DEPRECATED)'),
941 ),
941 ),
942 (b'', b'dates', True, _(b'display the saved mtime')),
942 (b'', b'dates', True, _(b'display the saved mtime')),
943 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 (b'', b'datesort', None, _(b'sort by saved mtime')),
944 ],
944 ],
945 _(b'[OPTION]...'),
945 _(b'[OPTION]...'),
946 )
946 )
947 def debugstate(ui, repo, **opts):
947 def debugstate(ui, repo, **opts):
948 """show the contents of the current dirstate"""
948 """show the contents of the current dirstate"""
949
949
950 nodates = not opts['dates']
950 nodates = not opts['dates']
951 if opts.get('nodates') is not None:
951 if opts.get('nodates') is not None:
952 nodates = True
952 nodates = True
953 datesort = opts.get('datesort')
953 datesort = opts.get('datesort')
954
954
955 if datesort:
955 if datesort:
956 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
956 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
957 else:
957 else:
958 keyfunc = None # sort by filename
958 keyfunc = None # sort by filename
959 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
959 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
960 if ent[3] == -1:
960 if ent[3] == -1:
961 timestr = b'unset '
961 timestr = b'unset '
962 elif nodates:
962 elif nodates:
963 timestr = b'set '
963 timestr = b'set '
964 else:
964 else:
965 timestr = time.strftime(
965 timestr = time.strftime(
966 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
966 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
967 )
967 )
968 timestr = encoding.strtolocal(timestr)
968 timestr = encoding.strtolocal(timestr)
969 if ent[1] & 0o20000:
969 if ent[1] & 0o20000:
970 mode = b'lnk'
970 mode = b'lnk'
971 else:
971 else:
972 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
972 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
973 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
973 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
974 for f in repo.dirstate.copies():
974 for f in repo.dirstate.copies():
975 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
976
976
977
977
978 @command(
978 @command(
979 b'debugdiscovery',
979 b'debugdiscovery',
980 [
980 [
981 (b'', b'old', None, _(b'use old-style discovery')),
981 (b'', b'old', None, _(b'use old-style discovery')),
982 (
982 (
983 b'',
983 b'',
984 b'nonheads',
984 b'nonheads',
985 None,
985 None,
986 _(b'use old-style discovery with non-heads included'),
986 _(b'use old-style discovery with non-heads included'),
987 ),
987 ),
988 (b'', b'rev', [], b'restrict discovery to this set of revs'),
988 (b'', b'rev', [], b'restrict discovery to this set of revs'),
989 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
989 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
990 (
990 (
991 b'',
991 b'',
992 b'local-as-revs',
992 b'local-as-revs',
993 b"",
993 b"",
994 b'treat local has having these revisions only',
994 b'treat local has having these revisions only',
995 ),
995 ),
996 (
996 (
997 b'',
997 b'',
998 b'remote-as-revs',
998 b'remote-as-revs',
999 b"",
999 b"",
1000 b'use local as remote, with only these these revisions',
1000 b'use local as remote, with only these these revisions',
1001 ),
1001 ),
1002 ]
1002 ]
1003 + cmdutil.remoteopts
1003 + cmdutil.remoteopts
1004 + cmdutil.formatteropts,
1004 + cmdutil.formatteropts,
1005 _(b'[--rev REV] [OTHER]'),
1005 _(b'[--rev REV] [OTHER]'),
1006 )
1006 )
1007 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1007 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1008 """runs the changeset discovery protocol in isolation
1008 """runs the changeset discovery protocol in isolation
1009
1009
1010 The local peer can be "replaced" by a subset of the local repository by
1010 The local peer can be "replaced" by a subset of the local repository by
1011 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1011 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1012 be "replaced" by a subset of the local repository using the
1012 be "replaced" by a subset of the local repository using the
1013 `--local-as-revs` flag. This is useful to efficiently debug pathological
1013 `--local-as-revs` flag. This is useful to efficiently debug pathological
1014 discovery situation.
1014 discovery situation.
1015
1015
1016 The following developer oriented config are relevant for people playing with this command:
1016 The following developer oriented config are relevant for people playing with this command:
1017
1017
1018 * devel.discovery.exchange-heads=True
1018 * devel.discovery.exchange-heads=True
1019
1019
1020 If False, the discovery will not start with
1020 If False, the discovery will not start with
1021 remote head fetching and local head querying.
1021 remote head fetching and local head querying.
1022
1022
1023 * devel.discovery.grow-sample=True
1023 * devel.discovery.grow-sample=True
1024
1024
1025 If False, the sample size used in set discovery will not be increased
1025 If False, the sample size used in set discovery will not be increased
1026 through the process
1026 through the process
1027
1027
1028 * devel.discovery.grow-sample.dynamic=True
1028 * devel.discovery.grow-sample.dynamic=True
1029
1029
1030 When discovery.grow-sample.dynamic is True, the default, the sample size is
1030 When discovery.grow-sample.dynamic is True, the default, the sample size is
1031 adapted to the shape of the undecided set (it is set to the max of:
1031 adapted to the shape of the undecided set (it is set to the max of:
1032 <target-size>, len(roots(undecided)), len(heads(undecided)
1032 <target-size>, len(roots(undecided)), len(heads(undecided)
1033
1033
1034 * devel.discovery.grow-sample.rate=1.05
1034 * devel.discovery.grow-sample.rate=1.05
1035
1035
1036 the rate at which the sample grow
1036 the rate at which the sample grow
1037
1037
1038 * devel.discovery.randomize=True
1038 * devel.discovery.randomize=True
1039
1039
1040 If andom sampling during discovery are deterministic. It is meant for
1040 If andom sampling during discovery are deterministic. It is meant for
1041 integration tests.
1041 integration tests.
1042
1042
1043 * devel.discovery.sample-size=200
1043 * devel.discovery.sample-size=200
1044
1044
1045 Control the initial size of the discovery sample
1045 Control the initial size of the discovery sample
1046
1046
1047 * devel.discovery.sample-size.initial=100
1047 * devel.discovery.sample-size.initial=100
1048
1048
1049 Control the initial size of the discovery for initial change
1049 Control the initial size of the discovery for initial change
1050 """
1050 """
1051 opts = pycompat.byteskwargs(opts)
1051 opts = pycompat.byteskwargs(opts)
1052 unfi = repo.unfiltered()
1052 unfi = repo.unfiltered()
1053
1053
1054 # setup potential extra filtering
1054 # setup potential extra filtering
1055 local_revs = opts[b"local_as_revs"]
1055 local_revs = opts[b"local_as_revs"]
1056 remote_revs = opts[b"remote_as_revs"]
1056 remote_revs = opts[b"remote_as_revs"]
1057
1057
1058 # make sure tests are repeatable
1058 # make sure tests are repeatable
1059 random.seed(int(opts[b'seed']))
1059 random.seed(int(opts[b'seed']))
1060
1060
1061 if not remote_revs:
1061 if not remote_revs:
1062
1062
1063 remoteurl, branches = urlutil.get_unique_pull_path(
1063 remoteurl, branches = urlutil.get_unique_pull_path(
1064 b'debugdiscovery', repo, ui, remoteurl
1064 b'debugdiscovery', repo, ui, remoteurl
1065 )
1065 )
1066 remote = hg.peer(repo, opts, remoteurl)
1066 remote = hg.peer(repo, opts, remoteurl)
1067 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1067 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1068 else:
1068 else:
1069 branches = (None, [])
1069 branches = (None, [])
1070 remote_filtered_revs = scmutil.revrange(
1070 remote_filtered_revs = scmutil.revrange(
1071 unfi, [b"not (::(%s))" % remote_revs]
1071 unfi, [b"not (::(%s))" % remote_revs]
1072 )
1072 )
1073 remote_filtered_revs = frozenset(remote_filtered_revs)
1073 remote_filtered_revs = frozenset(remote_filtered_revs)
1074
1074
1075 def remote_func(x):
1075 def remote_func(x):
1076 return remote_filtered_revs
1076 return remote_filtered_revs
1077
1077
1078 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1078 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1079
1079
1080 remote = repo.peer()
1080 remote = repo.peer()
1081 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1081 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1082
1082
1083 if local_revs:
1083 if local_revs:
1084 local_filtered_revs = scmutil.revrange(
1084 local_filtered_revs = scmutil.revrange(
1085 unfi, [b"not (::(%s))" % local_revs]
1085 unfi, [b"not (::(%s))" % local_revs]
1086 )
1086 )
1087 local_filtered_revs = frozenset(local_filtered_revs)
1087 local_filtered_revs = frozenset(local_filtered_revs)
1088
1088
1089 def local_func(x):
1089 def local_func(x):
1090 return local_filtered_revs
1090 return local_filtered_revs
1091
1091
1092 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1092 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1093 repo = repo.filtered(b'debug-discovery-local-filter')
1093 repo = repo.filtered(b'debug-discovery-local-filter')
1094
1094
1095 data = {}
1095 data = {}
1096 if opts.get(b'old'):
1096 if opts.get(b'old'):
1097
1097
1098 def doit(pushedrevs, remoteheads, remote=remote):
1098 def doit(pushedrevs, remoteheads, remote=remote):
1099 if not util.safehasattr(remote, b'branches'):
1099 if not util.safehasattr(remote, b'branches'):
1100 # enable in-client legacy support
1100 # enable in-client legacy support
1101 remote = localrepo.locallegacypeer(remote.local())
1101 remote = localrepo.locallegacypeer(remote.local())
1102 common, _in, hds = treediscovery.findcommonincoming(
1102 common, _in, hds = treediscovery.findcommonincoming(
1103 repo, remote, force=True, audit=data
1103 repo, remote, force=True, audit=data
1104 )
1104 )
1105 common = set(common)
1105 common = set(common)
1106 if not opts.get(b'nonheads'):
1106 if not opts.get(b'nonheads'):
1107 ui.writenoi18n(
1107 ui.writenoi18n(
1108 b"unpruned common: %s\n"
1108 b"unpruned common: %s\n"
1109 % b" ".join(sorted(short(n) for n in common))
1109 % b" ".join(sorted(short(n) for n in common))
1110 )
1110 )
1111
1111
1112 clnode = repo.changelog.node
1112 clnode = repo.changelog.node
1113 common = repo.revs(b'heads(::%ln)', common)
1113 common = repo.revs(b'heads(::%ln)', common)
1114 common = {clnode(r) for r in common}
1114 common = {clnode(r) for r in common}
1115 return common, hds
1115 return common, hds
1116
1116
1117 else:
1117 else:
1118
1118
1119 def doit(pushedrevs, remoteheads, remote=remote):
1119 def doit(pushedrevs, remoteheads, remote=remote):
1120 nodes = None
1120 nodes = None
1121 if pushedrevs:
1121 if pushedrevs:
1122 revs = scmutil.revrange(repo, pushedrevs)
1122 revs = scmutil.revrange(repo, pushedrevs)
1123 nodes = [repo[r].node() for r in revs]
1123 nodes = [repo[r].node() for r in revs]
1124 common, any, hds = setdiscovery.findcommonheads(
1124 common, any, hds = setdiscovery.findcommonheads(
1125 ui, repo, remote, ancestorsof=nodes, audit=data
1125 ui, repo, remote, ancestorsof=nodes, audit=data
1126 )
1126 )
1127 return common, hds
1127 return common, hds
1128
1128
1129 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1129 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1130 localrevs = opts[b'rev']
1130 localrevs = opts[b'rev']
1131
1131
1132 fm = ui.formatter(b'debugdiscovery', opts)
1132 fm = ui.formatter(b'debugdiscovery', opts)
1133 if fm.strict_format:
1133 if fm.strict_format:
1134
1134
1135 @contextlib.contextmanager
1135 @contextlib.contextmanager
1136 def may_capture_output():
1136 def may_capture_output():
1137 ui.pushbuffer()
1137 ui.pushbuffer()
1138 yield
1138 yield
1139 data[b'output'] = ui.popbuffer()
1139 data[b'output'] = ui.popbuffer()
1140
1140
1141 else:
1141 else:
1142 may_capture_output = util.nullcontextmanager
1142 may_capture_output = util.nullcontextmanager
1143 with may_capture_output():
1143 with may_capture_output():
1144 with util.timedcm('debug-discovery') as t:
1144 with util.timedcm('debug-discovery') as t:
1145 common, hds = doit(localrevs, remoterevs)
1145 common, hds = doit(localrevs, remoterevs)
1146
1146
1147 # compute all statistics
1147 # compute all statistics
1148 heads_common = set(common)
1148 heads_common = set(common)
1149 heads_remote = set(hds)
1149 heads_remote = set(hds)
1150 heads_local = set(repo.heads())
1150 heads_local = set(repo.heads())
1151 # note: they cannot be a local or remote head that is in common and not
1151 # note: they cannot be a local or remote head that is in common and not
1152 # itself a head of common.
1152 # itself a head of common.
1153 heads_common_local = heads_common & heads_local
1153 heads_common_local = heads_common & heads_local
1154 heads_common_remote = heads_common & heads_remote
1154 heads_common_remote = heads_common & heads_remote
1155 heads_common_both = heads_common & heads_remote & heads_local
1155 heads_common_both = heads_common & heads_remote & heads_local
1156
1156
1157 all = repo.revs(b'all()')
1157 all = repo.revs(b'all()')
1158 common = repo.revs(b'::%ln', common)
1158 common = repo.revs(b'::%ln', common)
1159 roots_common = repo.revs(b'roots(::%ld)', common)
1159 roots_common = repo.revs(b'roots(::%ld)', common)
1160 missing = repo.revs(b'not ::%ld', common)
1160 missing = repo.revs(b'not ::%ld', common)
1161 heads_missing = repo.revs(b'heads(%ld)', missing)
1161 heads_missing = repo.revs(b'heads(%ld)', missing)
1162 roots_missing = repo.revs(b'roots(%ld)', missing)
1162 roots_missing = repo.revs(b'roots(%ld)', missing)
1163 assert len(common) + len(missing) == len(all)
1163 assert len(common) + len(missing) == len(all)
1164
1164
1165 initial_undecided = repo.revs(
1165 initial_undecided = repo.revs(
1166 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1166 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1167 )
1167 )
1168 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1168 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1169 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1169 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1170 common_initial_undecided = initial_undecided & common
1170 common_initial_undecided = initial_undecided & common
1171 missing_initial_undecided = initial_undecided & missing
1171 missing_initial_undecided = initial_undecided & missing
1172
1172
1173 data[b'elapsed'] = t.elapsed
1173 data[b'elapsed'] = t.elapsed
1174 data[b'nb-common-heads'] = len(heads_common)
1174 data[b'nb-common-heads'] = len(heads_common)
1175 data[b'nb-common-heads-local'] = len(heads_common_local)
1175 data[b'nb-common-heads-local'] = len(heads_common_local)
1176 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1176 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1177 data[b'nb-common-heads-both'] = len(heads_common_both)
1177 data[b'nb-common-heads-both'] = len(heads_common_both)
1178 data[b'nb-common-roots'] = len(roots_common)
1178 data[b'nb-common-roots'] = len(roots_common)
1179 data[b'nb-head-local'] = len(heads_local)
1179 data[b'nb-head-local'] = len(heads_local)
1180 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1180 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1181 data[b'nb-head-remote'] = len(heads_remote)
1181 data[b'nb-head-remote'] = len(heads_remote)
1182 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1182 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1183 heads_common_remote
1183 heads_common_remote
1184 )
1184 )
1185 data[b'nb-revs'] = len(all)
1185 data[b'nb-revs'] = len(all)
1186 data[b'nb-revs-common'] = len(common)
1186 data[b'nb-revs-common'] = len(common)
1187 data[b'nb-revs-missing'] = len(missing)
1187 data[b'nb-revs-missing'] = len(missing)
1188 data[b'nb-missing-heads'] = len(heads_missing)
1188 data[b'nb-missing-heads'] = len(heads_missing)
1189 data[b'nb-missing-roots'] = len(roots_missing)
1189 data[b'nb-missing-roots'] = len(roots_missing)
1190 data[b'nb-ini_und'] = len(initial_undecided)
1190 data[b'nb-ini_und'] = len(initial_undecided)
1191 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1191 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1192 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1192 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1193 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1193 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1194 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1194 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1195
1195
1196 fm.startitem()
1196 fm.startitem()
1197 fm.data(**pycompat.strkwargs(data))
1197 fm.data(**pycompat.strkwargs(data))
1198 # display discovery summary
1198 # display discovery summary
1199 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1199 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1200 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1200 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1201 fm.plain(b"heads summary:\n")
1201 fm.plain(b"heads summary:\n")
1202 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1202 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1203 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1203 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1204 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1204 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1205 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1205 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1206 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1206 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1207 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1207 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1208 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1208 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1209 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1209 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1210 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1210 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1211 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1211 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1212 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1212 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1213 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1213 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1214 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1214 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1215 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1215 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1216 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1216 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1217 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1217 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1218 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1218 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1219 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1219 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1220 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1220 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1221 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1221 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1222 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1222 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1223 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1223 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1224
1224
1225 if ui.verbose:
1225 if ui.verbose:
1226 fm.plain(
1226 fm.plain(
1227 b"common heads: %s\n"
1227 b"common heads: %s\n"
1228 % b" ".join(sorted(short(n) for n in heads_common))
1228 % b" ".join(sorted(short(n) for n in heads_common))
1229 )
1229 )
1230 fm.end()
1230 fm.end()
1231
1231
1232
1232
1233 _chunksize = 4 << 10
1233 _chunksize = 4 << 10
1234
1234
1235
1235
1236 @command(
1236 @command(
1237 b'debugdownload',
1237 b'debugdownload',
1238 [
1238 [
1239 (b'o', b'output', b'', _(b'path')),
1239 (b'o', b'output', b'', _(b'path')),
1240 ],
1240 ],
1241 optionalrepo=True,
1241 optionalrepo=True,
1242 )
1242 )
1243 def debugdownload(ui, repo, url, output=None, **opts):
1243 def debugdownload(ui, repo, url, output=None, **opts):
1244 """download a resource using Mercurial logic and config"""
1244 """download a resource using Mercurial logic and config"""
1245 fh = urlmod.open(ui, url, output)
1245 fh = urlmod.open(ui, url, output)
1246
1246
1247 dest = ui
1247 dest = ui
1248 if output:
1248 if output:
1249 dest = open(output, b"wb", _chunksize)
1249 dest = open(output, b"wb", _chunksize)
1250 try:
1250 try:
1251 data = fh.read(_chunksize)
1251 data = fh.read(_chunksize)
1252 while data:
1252 while data:
1253 dest.write(data)
1253 dest.write(data)
1254 data = fh.read(_chunksize)
1254 data = fh.read(_chunksize)
1255 finally:
1255 finally:
1256 if output:
1256 if output:
1257 dest.close()
1257 dest.close()
1258
1258
1259
1259
1260 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1260 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1261 def debugextensions(ui, repo, **opts):
1261 def debugextensions(ui, repo, **opts):
1262 '''show information about active extensions'''
1262 '''show information about active extensions'''
1263 opts = pycompat.byteskwargs(opts)
1263 opts = pycompat.byteskwargs(opts)
1264 exts = extensions.extensions(ui)
1264 exts = extensions.extensions(ui)
1265 hgver = util.version()
1265 hgver = util.version()
1266 fm = ui.formatter(b'debugextensions', opts)
1266 fm = ui.formatter(b'debugextensions', opts)
1267 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1267 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1268 isinternal = extensions.ismoduleinternal(extmod)
1268 isinternal = extensions.ismoduleinternal(extmod)
1269 extsource = None
1269 extsource = None
1270
1270
1271 if util.safehasattr(extmod, '__file__'):
1271 if util.safehasattr(extmod, '__file__'):
1272 extsource = pycompat.fsencode(extmod.__file__)
1272 extsource = pycompat.fsencode(extmod.__file__)
1273 elif getattr(sys, 'oxidized', False):
1273 elif getattr(sys, 'oxidized', False):
1274 extsource = pycompat.sysexecutable
1274 extsource = pycompat.sysexecutable
1275 if isinternal:
1275 if isinternal:
1276 exttestedwith = [] # never expose magic string to users
1276 exttestedwith = [] # never expose magic string to users
1277 else:
1277 else:
1278 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1278 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1279 extbuglink = getattr(extmod, 'buglink', None)
1279 extbuglink = getattr(extmod, 'buglink', None)
1280
1280
1281 fm.startitem()
1281 fm.startitem()
1282
1282
1283 if ui.quiet or ui.verbose:
1283 if ui.quiet or ui.verbose:
1284 fm.write(b'name', b'%s\n', extname)
1284 fm.write(b'name', b'%s\n', extname)
1285 else:
1285 else:
1286 fm.write(b'name', b'%s', extname)
1286 fm.write(b'name', b'%s', extname)
1287 if isinternal or hgver in exttestedwith:
1287 if isinternal or hgver in exttestedwith:
1288 fm.plain(b'\n')
1288 fm.plain(b'\n')
1289 elif not exttestedwith:
1289 elif not exttestedwith:
1290 fm.plain(_(b' (untested!)\n'))
1290 fm.plain(_(b' (untested!)\n'))
1291 else:
1291 else:
1292 lasttestedversion = exttestedwith[-1]
1292 lasttestedversion = exttestedwith[-1]
1293 fm.plain(b' (%s!)\n' % lasttestedversion)
1293 fm.plain(b' (%s!)\n' % lasttestedversion)
1294
1294
1295 fm.condwrite(
1295 fm.condwrite(
1296 ui.verbose and extsource,
1296 ui.verbose and extsource,
1297 b'source',
1297 b'source',
1298 _(b' location: %s\n'),
1298 _(b' location: %s\n'),
1299 extsource or b"",
1299 extsource or b"",
1300 )
1300 )
1301
1301
1302 if ui.verbose:
1302 if ui.verbose:
1303 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1303 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1304 fm.data(bundled=isinternal)
1304 fm.data(bundled=isinternal)
1305
1305
1306 fm.condwrite(
1306 fm.condwrite(
1307 ui.verbose and exttestedwith,
1307 ui.verbose and exttestedwith,
1308 b'testedwith',
1308 b'testedwith',
1309 _(b' tested with: %s\n'),
1309 _(b' tested with: %s\n'),
1310 fm.formatlist(exttestedwith, name=b'ver'),
1310 fm.formatlist(exttestedwith, name=b'ver'),
1311 )
1311 )
1312
1312
1313 fm.condwrite(
1313 fm.condwrite(
1314 ui.verbose and extbuglink,
1314 ui.verbose and extbuglink,
1315 b'buglink',
1315 b'buglink',
1316 _(b' bug reporting: %s\n'),
1316 _(b' bug reporting: %s\n'),
1317 extbuglink or b"",
1317 extbuglink or b"",
1318 )
1318 )
1319
1319
1320 fm.end()
1320 fm.end()
1321
1321
1322
1322
1323 @command(
1323 @command(
1324 b'debugfileset',
1324 b'debugfileset',
1325 [
1325 [
1326 (
1326 (
1327 b'r',
1327 b'r',
1328 b'rev',
1328 b'rev',
1329 b'',
1329 b'',
1330 _(b'apply the filespec on this revision'),
1330 _(b'apply the filespec on this revision'),
1331 _(b'REV'),
1331 _(b'REV'),
1332 ),
1332 ),
1333 (
1333 (
1334 b'',
1334 b'',
1335 b'all-files',
1335 b'all-files',
1336 False,
1336 False,
1337 _(b'test files from all revisions and working directory'),
1337 _(b'test files from all revisions and working directory'),
1338 ),
1338 ),
1339 (
1339 (
1340 b's',
1340 b's',
1341 b'show-matcher',
1341 b'show-matcher',
1342 None,
1342 None,
1343 _(b'print internal representation of matcher'),
1343 _(b'print internal representation of matcher'),
1344 ),
1344 ),
1345 (
1345 (
1346 b'p',
1346 b'p',
1347 b'show-stage',
1347 b'show-stage',
1348 [],
1348 [],
1349 _(b'print parsed tree at the given stage'),
1349 _(b'print parsed tree at the given stage'),
1350 _(b'NAME'),
1350 _(b'NAME'),
1351 ),
1351 ),
1352 ],
1352 ],
1353 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1353 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1354 )
1354 )
1355 def debugfileset(ui, repo, expr, **opts):
1355 def debugfileset(ui, repo, expr, **opts):
1356 '''parse and apply a fileset specification'''
1356 '''parse and apply a fileset specification'''
1357 from . import fileset
1357 from . import fileset
1358
1358
1359 fileset.symbols # force import of fileset so we have predicates to optimize
1359 fileset.symbols # force import of fileset so we have predicates to optimize
1360 opts = pycompat.byteskwargs(opts)
1360 opts = pycompat.byteskwargs(opts)
1361 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1361 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1362
1362
1363 stages = [
1363 stages = [
1364 (b'parsed', pycompat.identity),
1364 (b'parsed', pycompat.identity),
1365 (b'analyzed', filesetlang.analyze),
1365 (b'analyzed', filesetlang.analyze),
1366 (b'optimized', filesetlang.optimize),
1366 (b'optimized', filesetlang.optimize),
1367 ]
1367 ]
1368 stagenames = {n for n, f in stages}
1368 stagenames = {n for n, f in stages}
1369
1369
1370 showalways = set()
1370 showalways = set()
1371 if ui.verbose and not opts[b'show_stage']:
1371 if ui.verbose and not opts[b'show_stage']:
1372 # show parsed tree by --verbose (deprecated)
1372 # show parsed tree by --verbose (deprecated)
1373 showalways.add(b'parsed')
1373 showalways.add(b'parsed')
1374 if opts[b'show_stage'] == [b'all']:
1374 if opts[b'show_stage'] == [b'all']:
1375 showalways.update(stagenames)
1375 showalways.update(stagenames)
1376 else:
1376 else:
1377 for n in opts[b'show_stage']:
1377 for n in opts[b'show_stage']:
1378 if n not in stagenames:
1378 if n not in stagenames:
1379 raise error.Abort(_(b'invalid stage name: %s') % n)
1379 raise error.Abort(_(b'invalid stage name: %s') % n)
1380 showalways.update(opts[b'show_stage'])
1380 showalways.update(opts[b'show_stage'])
1381
1381
1382 tree = filesetlang.parse(expr)
1382 tree = filesetlang.parse(expr)
1383 for n, f in stages:
1383 for n, f in stages:
1384 tree = f(tree)
1384 tree = f(tree)
1385 if n in showalways:
1385 if n in showalways:
1386 if opts[b'show_stage'] or n != b'parsed':
1386 if opts[b'show_stage'] or n != b'parsed':
1387 ui.write(b"* %s:\n" % n)
1387 ui.write(b"* %s:\n" % n)
1388 ui.write(filesetlang.prettyformat(tree), b"\n")
1388 ui.write(filesetlang.prettyformat(tree), b"\n")
1389
1389
1390 files = set()
1390 files = set()
1391 if opts[b'all_files']:
1391 if opts[b'all_files']:
1392 for r in repo:
1392 for r in repo:
1393 c = repo[r]
1393 c = repo[r]
1394 files.update(c.files())
1394 files.update(c.files())
1395 files.update(c.substate)
1395 files.update(c.substate)
1396 if opts[b'all_files'] or ctx.rev() is None:
1396 if opts[b'all_files'] or ctx.rev() is None:
1397 wctx = repo[None]
1397 wctx = repo[None]
1398 files.update(
1398 files.update(
1399 repo.dirstate.walk(
1399 repo.dirstate.walk(
1400 scmutil.matchall(repo),
1400 scmutil.matchall(repo),
1401 subrepos=list(wctx.substate),
1401 subrepos=list(wctx.substate),
1402 unknown=True,
1402 unknown=True,
1403 ignored=True,
1403 ignored=True,
1404 )
1404 )
1405 )
1405 )
1406 files.update(wctx.substate)
1406 files.update(wctx.substate)
1407 else:
1407 else:
1408 files.update(ctx.files())
1408 files.update(ctx.files())
1409 files.update(ctx.substate)
1409 files.update(ctx.substate)
1410
1410
1411 m = ctx.matchfileset(repo.getcwd(), expr)
1411 m = ctx.matchfileset(repo.getcwd(), expr)
1412 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1412 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1413 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1413 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1414 for f in sorted(files):
1414 for f in sorted(files):
1415 if not m(f):
1415 if not m(f):
1416 continue
1416 continue
1417 ui.write(b"%s\n" % f)
1417 ui.write(b"%s\n" % f)
1418
1418
1419
1419
1420 @command(b'debugformat', [] + cmdutil.formatteropts)
1420 @command(b'debugformat', [] + cmdutil.formatteropts)
1421 def debugformat(ui, repo, **opts):
1421 def debugformat(ui, repo, **opts):
1422 """display format information about the current repository
1422 """display format information about the current repository
1423
1423
1424 Use --verbose to get extra information about current config value and
1424 Use --verbose to get extra information about current config value and
1425 Mercurial default."""
1425 Mercurial default."""
1426 opts = pycompat.byteskwargs(opts)
1426 opts = pycompat.byteskwargs(opts)
1427 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1427 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1428 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1428 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1429
1429
1430 def makeformatname(name):
1430 def makeformatname(name):
1431 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1431 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1432
1432
1433 fm = ui.formatter(b'debugformat', opts)
1433 fm = ui.formatter(b'debugformat', opts)
1434 if fm.isplain():
1434 if fm.isplain():
1435
1435
1436 def formatvalue(value):
1436 def formatvalue(value):
1437 if util.safehasattr(value, b'startswith'):
1437 if util.safehasattr(value, b'startswith'):
1438 return value
1438 return value
1439 if value:
1439 if value:
1440 return b'yes'
1440 return b'yes'
1441 else:
1441 else:
1442 return b'no'
1442 return b'no'
1443
1443
1444 else:
1444 else:
1445 formatvalue = pycompat.identity
1445 formatvalue = pycompat.identity
1446
1446
1447 fm.plain(b'format-variant')
1447 fm.plain(b'format-variant')
1448 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1448 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1449 fm.plain(b' repo')
1449 fm.plain(b' repo')
1450 if ui.verbose:
1450 if ui.verbose:
1451 fm.plain(b' config default')
1451 fm.plain(b' config default')
1452 fm.plain(b'\n')
1452 fm.plain(b'\n')
1453 for fv in upgrade.allformatvariant:
1453 for fv in upgrade.allformatvariant:
1454 fm.startitem()
1454 fm.startitem()
1455 repovalue = fv.fromrepo(repo)
1455 repovalue = fv.fromrepo(repo)
1456 configvalue = fv.fromconfig(repo)
1456 configvalue = fv.fromconfig(repo)
1457
1457
1458 if repovalue != configvalue:
1458 if repovalue != configvalue:
1459 namelabel = b'formatvariant.name.mismatchconfig'
1459 namelabel = b'formatvariant.name.mismatchconfig'
1460 repolabel = b'formatvariant.repo.mismatchconfig'
1460 repolabel = b'formatvariant.repo.mismatchconfig'
1461 elif repovalue != fv.default:
1461 elif repovalue != fv.default:
1462 namelabel = b'formatvariant.name.mismatchdefault'
1462 namelabel = b'formatvariant.name.mismatchdefault'
1463 repolabel = b'formatvariant.repo.mismatchdefault'
1463 repolabel = b'formatvariant.repo.mismatchdefault'
1464 else:
1464 else:
1465 namelabel = b'formatvariant.name.uptodate'
1465 namelabel = b'formatvariant.name.uptodate'
1466 repolabel = b'formatvariant.repo.uptodate'
1466 repolabel = b'formatvariant.repo.uptodate'
1467
1467
1468 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1468 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1469 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1469 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1470 if fv.default != configvalue:
1470 if fv.default != configvalue:
1471 configlabel = b'formatvariant.config.special'
1471 configlabel = b'formatvariant.config.special'
1472 else:
1472 else:
1473 configlabel = b'formatvariant.config.default'
1473 configlabel = b'formatvariant.config.default'
1474 fm.condwrite(
1474 fm.condwrite(
1475 ui.verbose,
1475 ui.verbose,
1476 b'config',
1476 b'config',
1477 b' %6s',
1477 b' %6s',
1478 formatvalue(configvalue),
1478 formatvalue(configvalue),
1479 label=configlabel,
1479 label=configlabel,
1480 )
1480 )
1481 fm.condwrite(
1481 fm.condwrite(
1482 ui.verbose,
1482 ui.verbose,
1483 b'default',
1483 b'default',
1484 b' %7s',
1484 b' %7s',
1485 formatvalue(fv.default),
1485 formatvalue(fv.default),
1486 label=b'formatvariant.default',
1486 label=b'formatvariant.default',
1487 )
1487 )
1488 fm.plain(b'\n')
1488 fm.plain(b'\n')
1489 fm.end()
1489 fm.end()
1490
1490
1491
1491
1492 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1492 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1493 def debugfsinfo(ui, path=b"."):
1493 def debugfsinfo(ui, path=b"."):
1494 """show information detected about current filesystem"""
1494 """show information detected about current filesystem"""
1495 ui.writenoi18n(b'path: %s\n' % path)
1495 ui.writenoi18n(b'path: %s\n' % path)
1496 ui.writenoi18n(
1496 ui.writenoi18n(
1497 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1497 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1498 )
1498 )
1499 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1499 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1500 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1500 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1501 ui.writenoi18n(
1501 ui.writenoi18n(
1502 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1502 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1503 )
1503 )
1504 ui.writenoi18n(
1504 ui.writenoi18n(
1505 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1505 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1506 )
1506 )
1507 casesensitive = b'(unknown)'
1507 casesensitive = b'(unknown)'
1508 try:
1508 try:
1509 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1509 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1510 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1510 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1511 except OSError:
1511 except OSError:
1512 pass
1512 pass
1513 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1513 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1514
1514
1515
1515
1516 @command(
1516 @command(
1517 b'debuggetbundle',
1517 b'debuggetbundle',
1518 [
1518 [
1519 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1519 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1520 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1520 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1521 (
1521 (
1522 b't',
1522 b't',
1523 b'type',
1523 b'type',
1524 b'bzip2',
1524 b'bzip2',
1525 _(b'bundle compression type to use'),
1525 _(b'bundle compression type to use'),
1526 _(b'TYPE'),
1526 _(b'TYPE'),
1527 ),
1527 ),
1528 ],
1528 ],
1529 _(b'REPO FILE [-H|-C ID]...'),
1529 _(b'REPO FILE [-H|-C ID]...'),
1530 norepo=True,
1530 norepo=True,
1531 )
1531 )
1532 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1532 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1533 """retrieves a bundle from a repo
1533 """retrieves a bundle from a repo
1534
1534
1535 Every ID must be a full-length hex node id string. Saves the bundle to the
1535 Every ID must be a full-length hex node id string. Saves the bundle to the
1536 given file.
1536 given file.
1537 """
1537 """
1538 opts = pycompat.byteskwargs(opts)
1538 opts = pycompat.byteskwargs(opts)
1539 repo = hg.peer(ui, opts, repopath)
1539 repo = hg.peer(ui, opts, repopath)
1540 if not repo.capable(b'getbundle'):
1540 if not repo.capable(b'getbundle'):
1541 raise error.Abort(b"getbundle() not supported by target repository")
1541 raise error.Abort(b"getbundle() not supported by target repository")
1542 args = {}
1542 args = {}
1543 if common:
1543 if common:
1544 args['common'] = [bin(s) for s in common]
1544 args['common'] = [bin(s) for s in common]
1545 if head:
1545 if head:
1546 args['heads'] = [bin(s) for s in head]
1546 args['heads'] = [bin(s) for s in head]
1547 # TODO: get desired bundlecaps from command line.
1547 # TODO: get desired bundlecaps from command line.
1548 args['bundlecaps'] = None
1548 args['bundlecaps'] = None
1549 bundle = repo.getbundle(b'debug', **args)
1549 bundle = repo.getbundle(b'debug', **args)
1550
1550
1551 bundletype = opts.get(b'type', b'bzip2').lower()
1551 bundletype = opts.get(b'type', b'bzip2').lower()
1552 btypes = {
1552 btypes = {
1553 b'none': b'HG10UN',
1553 b'none': b'HG10UN',
1554 b'bzip2': b'HG10BZ',
1554 b'bzip2': b'HG10BZ',
1555 b'gzip': b'HG10GZ',
1555 b'gzip': b'HG10GZ',
1556 b'bundle2': b'HG20',
1556 b'bundle2': b'HG20',
1557 }
1557 }
1558 bundletype = btypes.get(bundletype)
1558 bundletype = btypes.get(bundletype)
1559 if bundletype not in bundle2.bundletypes:
1559 if bundletype not in bundle2.bundletypes:
1560 raise error.Abort(_(b'unknown bundle type specified with --type'))
1560 raise error.Abort(_(b'unknown bundle type specified with --type'))
1561 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1561 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1562
1562
1563
1563
1564 @command(b'debugignore', [], b'[FILE]')
1564 @command(b'debugignore', [], b'[FILE]')
1565 def debugignore(ui, repo, *files, **opts):
1565 def debugignore(ui, repo, *files, **opts):
1566 """display the combined ignore pattern and information about ignored files
1566 """display the combined ignore pattern and information about ignored files
1567
1567
1568 With no argument display the combined ignore pattern.
1568 With no argument display the combined ignore pattern.
1569
1569
1570 Given space separated file names, shows if the given file is ignored and
1570 Given space separated file names, shows if the given file is ignored and
1571 if so, show the ignore rule (file and line number) that matched it.
1571 if so, show the ignore rule (file and line number) that matched it.
1572 """
1572 """
1573 ignore = repo.dirstate._ignore
1573 ignore = repo.dirstate._ignore
1574 if not files:
1574 if not files:
1575 # Show all the patterns
1575 # Show all the patterns
1576 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1576 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1577 else:
1577 else:
1578 m = scmutil.match(repo[None], pats=files)
1578 m = scmutil.match(repo[None], pats=files)
1579 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1579 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1580 for f in m.files():
1580 for f in m.files():
1581 nf = util.normpath(f)
1581 nf = util.normpath(f)
1582 ignored = None
1582 ignored = None
1583 ignoredata = None
1583 ignoredata = None
1584 if nf != b'.':
1584 if nf != b'.':
1585 if ignore(nf):
1585 if ignore(nf):
1586 ignored = nf
1586 ignored = nf
1587 ignoredata = repo.dirstate._ignorefileandline(nf)
1587 ignoredata = repo.dirstate._ignorefileandline(nf)
1588 else:
1588 else:
1589 for p in pathutil.finddirs(nf):
1589 for p in pathutil.finddirs(nf):
1590 if ignore(p):
1590 if ignore(p):
1591 ignored = p
1591 ignored = p
1592 ignoredata = repo.dirstate._ignorefileandline(p)
1592 ignoredata = repo.dirstate._ignorefileandline(p)
1593 break
1593 break
1594 if ignored:
1594 if ignored:
1595 if ignored == nf:
1595 if ignored == nf:
1596 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1596 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1597 else:
1597 else:
1598 ui.write(
1598 ui.write(
1599 _(
1599 _(
1600 b"%s is ignored because of "
1600 b"%s is ignored because of "
1601 b"containing directory %s\n"
1601 b"containing directory %s\n"
1602 )
1602 )
1603 % (uipathfn(f), ignored)
1603 % (uipathfn(f), ignored)
1604 )
1604 )
1605 ignorefile, lineno, line = ignoredata
1605 ignorefile, lineno, line = ignoredata
1606 ui.write(
1606 ui.write(
1607 _(b"(ignore rule in %s, line %d: '%s')\n")
1607 _(b"(ignore rule in %s, line %d: '%s')\n")
1608 % (ignorefile, lineno, line)
1608 % (ignorefile, lineno, line)
1609 )
1609 )
1610 else:
1610 else:
1611 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1611 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1612
1612
1613
1613
1614 @command(
1614 @command(
1615 b'debugindex',
1615 b'debugindex',
1616 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1616 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1617 _(b'-c|-m|FILE'),
1617 _(b'-c|-m|FILE'),
1618 )
1618 )
1619 def debugindex(ui, repo, file_=None, **opts):
1619 def debugindex(ui, repo, file_=None, **opts):
1620 """dump index data for a storage primitive"""
1620 """dump index data for a storage primitive"""
1621 opts = pycompat.byteskwargs(opts)
1621 opts = pycompat.byteskwargs(opts)
1622 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1622 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1623
1623
1624 if ui.debugflag:
1624 if ui.debugflag:
1625 shortfn = hex
1625 shortfn = hex
1626 else:
1626 else:
1627 shortfn = short
1627 shortfn = short
1628
1628
1629 idlen = 12
1629 idlen = 12
1630 for i in store:
1630 for i in store:
1631 idlen = len(shortfn(store.node(i)))
1631 idlen = len(shortfn(store.node(i)))
1632 break
1632 break
1633
1633
1634 fm = ui.formatter(b'debugindex', opts)
1634 fm = ui.formatter(b'debugindex', opts)
1635 fm.plain(
1635 fm.plain(
1636 b' rev linkrev %s %s p2\n'
1636 b' rev linkrev %s %s p2\n'
1637 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1637 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1638 )
1638 )
1639
1639
1640 for rev in store:
1640 for rev in store:
1641 node = store.node(rev)
1641 node = store.node(rev)
1642 parents = store.parents(node)
1642 parents = store.parents(node)
1643
1643
1644 fm.startitem()
1644 fm.startitem()
1645 fm.write(b'rev', b'%6d ', rev)
1645 fm.write(b'rev', b'%6d ', rev)
1646 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1646 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1647 fm.write(b'node', b'%s ', shortfn(node))
1647 fm.write(b'node', b'%s ', shortfn(node))
1648 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1648 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1649 fm.write(b'p2', b'%s', shortfn(parents[1]))
1649 fm.write(b'p2', b'%s', shortfn(parents[1]))
1650 fm.plain(b'\n')
1650 fm.plain(b'\n')
1651
1651
1652 fm.end()
1652 fm.end()
1653
1653
1654
1654
1655 @command(
1655 @command(
1656 b'debugindexdot',
1656 b'debugindexdot',
1657 cmdutil.debugrevlogopts,
1657 cmdutil.debugrevlogopts,
1658 _(b'-c|-m|FILE'),
1658 _(b'-c|-m|FILE'),
1659 optionalrepo=True,
1659 optionalrepo=True,
1660 )
1660 )
1661 def debugindexdot(ui, repo, file_=None, **opts):
1661 def debugindexdot(ui, repo, file_=None, **opts):
1662 """dump an index DAG as a graphviz dot file"""
1662 """dump an index DAG as a graphviz dot file"""
1663 opts = pycompat.byteskwargs(opts)
1663 opts = pycompat.byteskwargs(opts)
1664 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1664 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1665 ui.writenoi18n(b"digraph G {\n")
1665 ui.writenoi18n(b"digraph G {\n")
1666 for i in r:
1666 for i in r:
1667 node = r.node(i)
1667 node = r.node(i)
1668 pp = r.parents(node)
1668 pp = r.parents(node)
1669 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1669 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1670 if pp[1] != nullid:
1670 if pp[1] != nullid:
1671 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1671 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1672 ui.write(b"}\n")
1672 ui.write(b"}\n")
1673
1673
1674
1674
1675 @command(b'debugindexstats', [])
1675 @command(b'debugindexstats', [])
1676 def debugindexstats(ui, repo):
1676 def debugindexstats(ui, repo):
1677 """show stats related to the changelog index"""
1677 """show stats related to the changelog index"""
1678 repo.changelog.shortest(nullid, 1)
1678 repo.changelog.shortest(nullid, 1)
1679 index = repo.changelog.index
1679 index = repo.changelog.index
1680 if not util.safehasattr(index, b'stats'):
1680 if not util.safehasattr(index, b'stats'):
1681 raise error.Abort(_(b'debugindexstats only works with native code'))
1681 raise error.Abort(_(b'debugindexstats only works with native code'))
1682 for k, v in sorted(index.stats().items()):
1682 for k, v in sorted(index.stats().items()):
1683 ui.write(b'%s: %d\n' % (k, v))
1683 ui.write(b'%s: %d\n' % (k, v))
1684
1684
1685
1685
1686 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1686 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1687 def debuginstall(ui, **opts):
1687 def debuginstall(ui, **opts):
1688 """test Mercurial installation
1688 """test Mercurial installation
1689
1689
1690 Returns 0 on success.
1690 Returns 0 on success.
1691 """
1691 """
1692 opts = pycompat.byteskwargs(opts)
1692 opts = pycompat.byteskwargs(opts)
1693
1693
1694 problems = 0
1694 problems = 0
1695
1695
1696 fm = ui.formatter(b'debuginstall', opts)
1696 fm = ui.formatter(b'debuginstall', opts)
1697 fm.startitem()
1697 fm.startitem()
1698
1698
1699 # encoding might be unknown or wrong. don't translate these messages.
1699 # encoding might be unknown or wrong. don't translate these messages.
1700 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1700 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1701 err = None
1701 err = None
1702 try:
1702 try:
1703 codecs.lookup(pycompat.sysstr(encoding.encoding))
1703 codecs.lookup(pycompat.sysstr(encoding.encoding))
1704 except LookupError as inst:
1704 except LookupError as inst:
1705 err = stringutil.forcebytestr(inst)
1705 err = stringutil.forcebytestr(inst)
1706 problems += 1
1706 problems += 1
1707 fm.condwrite(
1707 fm.condwrite(
1708 err,
1708 err,
1709 b'encodingerror',
1709 b'encodingerror',
1710 b" %s\n (check that your locale is properly set)\n",
1710 b" %s\n (check that your locale is properly set)\n",
1711 err,
1711 err,
1712 )
1712 )
1713
1713
1714 # Python
1714 # Python
1715 pythonlib = None
1715 pythonlib = None
1716 if util.safehasattr(os, '__file__'):
1716 if util.safehasattr(os, '__file__'):
1717 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1717 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1718 elif getattr(sys, 'oxidized', False):
1718 elif getattr(sys, 'oxidized', False):
1719 pythonlib = pycompat.sysexecutable
1719 pythonlib = pycompat.sysexecutable
1720
1720
1721 fm.write(
1721 fm.write(
1722 b'pythonexe',
1722 b'pythonexe',
1723 _(b"checking Python executable (%s)\n"),
1723 _(b"checking Python executable (%s)\n"),
1724 pycompat.sysexecutable or _(b"unknown"),
1724 pycompat.sysexecutable or _(b"unknown"),
1725 )
1725 )
1726 fm.write(
1726 fm.write(
1727 b'pythonimplementation',
1727 b'pythonimplementation',
1728 _(b"checking Python implementation (%s)\n"),
1728 _(b"checking Python implementation (%s)\n"),
1729 pycompat.sysbytes(platform.python_implementation()),
1729 pycompat.sysbytes(platform.python_implementation()),
1730 )
1730 )
1731 fm.write(
1731 fm.write(
1732 b'pythonver',
1732 b'pythonver',
1733 _(b"checking Python version (%s)\n"),
1733 _(b"checking Python version (%s)\n"),
1734 (b"%d.%d.%d" % sys.version_info[:3]),
1734 (b"%d.%d.%d" % sys.version_info[:3]),
1735 )
1735 )
1736 fm.write(
1736 fm.write(
1737 b'pythonlib',
1737 b'pythonlib',
1738 _(b"checking Python lib (%s)...\n"),
1738 _(b"checking Python lib (%s)...\n"),
1739 pythonlib or _(b"unknown"),
1739 pythonlib or _(b"unknown"),
1740 )
1740 )
1741
1741
1742 try:
1742 try:
1743 from . import rustext # pytype: disable=import-error
1743 from . import rustext # pytype: disable=import-error
1744
1744
1745 rustext.__doc__ # trigger lazy import
1745 rustext.__doc__ # trigger lazy import
1746 except ImportError:
1746 except ImportError:
1747 rustext = None
1747 rustext = None
1748
1748
1749 security = set(sslutil.supportedprotocols)
1749 security = set(sslutil.supportedprotocols)
1750 if sslutil.hassni:
1750 if sslutil.hassni:
1751 security.add(b'sni')
1751 security.add(b'sni')
1752
1752
1753 fm.write(
1753 fm.write(
1754 b'pythonsecurity',
1754 b'pythonsecurity',
1755 _(b"checking Python security support (%s)\n"),
1755 _(b"checking Python security support (%s)\n"),
1756 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1756 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1757 )
1757 )
1758
1758
1759 # These are warnings, not errors. So don't increment problem count. This
1759 # These are warnings, not errors. So don't increment problem count. This
1760 # may change in the future.
1760 # may change in the future.
1761 if b'tls1.2' not in security:
1761 if b'tls1.2' not in security:
1762 fm.plain(
1762 fm.plain(
1763 _(
1763 _(
1764 b' TLS 1.2 not supported by Python install; '
1764 b' TLS 1.2 not supported by Python install; '
1765 b'network connections lack modern security\n'
1765 b'network connections lack modern security\n'
1766 )
1766 )
1767 )
1767 )
1768 if b'sni' not in security:
1768 if b'sni' not in security:
1769 fm.plain(
1769 fm.plain(
1770 _(
1770 _(
1771 b' SNI not supported by Python install; may have '
1771 b' SNI not supported by Python install; may have '
1772 b'connectivity issues with some servers\n'
1772 b'connectivity issues with some servers\n'
1773 )
1773 )
1774 )
1774 )
1775
1775
1776 fm.plain(
1776 fm.plain(
1777 _(
1777 _(
1778 b"checking Rust extensions (%s)\n"
1778 b"checking Rust extensions (%s)\n"
1779 % (b'missing' if rustext is None else b'installed')
1779 % (b'missing' if rustext is None else b'installed')
1780 ),
1780 ),
1781 )
1781 )
1782
1782
1783 # TODO print CA cert info
1783 # TODO print CA cert info
1784
1784
1785 # hg version
1785 # hg version
1786 hgver = util.version()
1786 hgver = util.version()
1787 fm.write(
1787 fm.write(
1788 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1788 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1789 )
1789 )
1790 fm.write(
1790 fm.write(
1791 b'hgverextra',
1791 b'hgverextra',
1792 _(b"checking Mercurial custom build (%s)\n"),
1792 _(b"checking Mercurial custom build (%s)\n"),
1793 b'+'.join(hgver.split(b'+')[1:]),
1793 b'+'.join(hgver.split(b'+')[1:]),
1794 )
1794 )
1795
1795
1796 # compiled modules
1796 # compiled modules
1797 hgmodules = None
1797 hgmodules = None
1798 if util.safehasattr(sys.modules[__name__], '__file__'):
1798 if util.safehasattr(sys.modules[__name__], '__file__'):
1799 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1799 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1800 elif getattr(sys, 'oxidized', False):
1800 elif getattr(sys, 'oxidized', False):
1801 hgmodules = pycompat.sysexecutable
1801 hgmodules = pycompat.sysexecutable
1802
1802
1803 fm.write(
1803 fm.write(
1804 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1804 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1805 )
1805 )
1806 fm.write(
1806 fm.write(
1807 b'hgmodules',
1807 b'hgmodules',
1808 _(b"checking installed modules (%s)...\n"),
1808 _(b"checking installed modules (%s)...\n"),
1809 hgmodules or _(b"unknown"),
1809 hgmodules or _(b"unknown"),
1810 )
1810 )
1811
1811
1812 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1812 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1813 rustext = rustandc # for now, that's the only case
1813 rustext = rustandc # for now, that's the only case
1814 cext = policy.policy in (b'c', b'allow') or rustandc
1814 cext = policy.policy in (b'c', b'allow') or rustandc
1815 nopure = cext or rustext
1815 nopure = cext or rustext
1816 if nopure:
1816 if nopure:
1817 err = None
1817 err = None
1818 try:
1818 try:
1819 if cext:
1819 if cext:
1820 from .cext import ( # pytype: disable=import-error
1820 from .cext import ( # pytype: disable=import-error
1821 base85,
1821 base85,
1822 bdiff,
1822 bdiff,
1823 mpatch,
1823 mpatch,
1824 osutil,
1824 osutil,
1825 )
1825 )
1826
1826
1827 # quiet pyflakes
1827 # quiet pyflakes
1828 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1828 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1829 if rustext:
1829 if rustext:
1830 from .rustext import ( # pytype: disable=import-error
1830 from .rustext import ( # pytype: disable=import-error
1831 ancestor,
1831 ancestor,
1832 dirstate,
1832 dirstate,
1833 )
1833 )
1834
1834
1835 dir(ancestor), dir(dirstate) # quiet pyflakes
1835 dir(ancestor), dir(dirstate) # quiet pyflakes
1836 except Exception as inst:
1836 except Exception as inst:
1837 err = stringutil.forcebytestr(inst)
1837 err = stringutil.forcebytestr(inst)
1838 problems += 1
1838 problems += 1
1839 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1839 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1840
1840
1841 compengines = util.compengines._engines.values()
1841 compengines = util.compengines._engines.values()
1842 fm.write(
1842 fm.write(
1843 b'compengines',
1843 b'compengines',
1844 _(b'checking registered compression engines (%s)\n'),
1844 _(b'checking registered compression engines (%s)\n'),
1845 fm.formatlist(
1845 fm.formatlist(
1846 sorted(e.name() for e in compengines),
1846 sorted(e.name() for e in compengines),
1847 name=b'compengine',
1847 name=b'compengine',
1848 fmt=b'%s',
1848 fmt=b'%s',
1849 sep=b', ',
1849 sep=b', ',
1850 ),
1850 ),
1851 )
1851 )
1852 fm.write(
1852 fm.write(
1853 b'compenginesavail',
1853 b'compenginesavail',
1854 _(b'checking available compression engines (%s)\n'),
1854 _(b'checking available compression engines (%s)\n'),
1855 fm.formatlist(
1855 fm.formatlist(
1856 sorted(e.name() for e in compengines if e.available()),
1856 sorted(e.name() for e in compengines if e.available()),
1857 name=b'compengine',
1857 name=b'compengine',
1858 fmt=b'%s',
1858 fmt=b'%s',
1859 sep=b', ',
1859 sep=b', ',
1860 ),
1860 ),
1861 )
1861 )
1862 wirecompengines = compression.compengines.supportedwireengines(
1862 wirecompengines = compression.compengines.supportedwireengines(
1863 compression.SERVERROLE
1863 compression.SERVERROLE
1864 )
1864 )
1865 fm.write(
1865 fm.write(
1866 b'compenginesserver',
1866 b'compenginesserver',
1867 _(
1867 _(
1868 b'checking available compression engines '
1868 b'checking available compression engines '
1869 b'for wire protocol (%s)\n'
1869 b'for wire protocol (%s)\n'
1870 ),
1870 ),
1871 fm.formatlist(
1871 fm.formatlist(
1872 [e.name() for e in wirecompengines if e.wireprotosupport()],
1872 [e.name() for e in wirecompengines if e.wireprotosupport()],
1873 name=b'compengine',
1873 name=b'compengine',
1874 fmt=b'%s',
1874 fmt=b'%s',
1875 sep=b', ',
1875 sep=b', ',
1876 ),
1876 ),
1877 )
1877 )
1878 re2 = b'missing'
1878 re2 = b'missing'
1879 if util._re2:
1879 if util._re2:
1880 re2 = b'available'
1880 re2 = b'available'
1881 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1881 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1882 fm.data(re2=bool(util._re2))
1882 fm.data(re2=bool(util._re2))
1883
1883
1884 # templates
1884 # templates
1885 p = templater.templatedir()
1885 p = templater.templatedir()
1886 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1886 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1887 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1887 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1888 if p:
1888 if p:
1889 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1889 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1890 if m:
1890 if m:
1891 # template found, check if it is working
1891 # template found, check if it is working
1892 err = None
1892 err = None
1893 try:
1893 try:
1894 templater.templater.frommapfile(m)
1894 templater.templater.frommapfile(m)
1895 except Exception as inst:
1895 except Exception as inst:
1896 err = stringutil.forcebytestr(inst)
1896 err = stringutil.forcebytestr(inst)
1897 p = None
1897 p = None
1898 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1898 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1899 else:
1899 else:
1900 p = None
1900 p = None
1901 fm.condwrite(
1901 fm.condwrite(
1902 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1902 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1903 )
1903 )
1904 fm.condwrite(
1904 fm.condwrite(
1905 not m,
1905 not m,
1906 b'defaulttemplatenotfound',
1906 b'defaulttemplatenotfound',
1907 _(b" template '%s' not found\n"),
1907 _(b" template '%s' not found\n"),
1908 b"default",
1908 b"default",
1909 )
1909 )
1910 if not p:
1910 if not p:
1911 problems += 1
1911 problems += 1
1912 fm.condwrite(
1912 fm.condwrite(
1913 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1913 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1914 )
1914 )
1915
1915
1916 # editor
1916 # editor
1917 editor = ui.geteditor()
1917 editor = ui.geteditor()
1918 editor = util.expandpath(editor)
1918 editor = util.expandpath(editor)
1919 editorbin = procutil.shellsplit(editor)[0]
1919 editorbin = procutil.shellsplit(editor)[0]
1920 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1920 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1921 cmdpath = procutil.findexe(editorbin)
1921 cmdpath = procutil.findexe(editorbin)
1922 fm.condwrite(
1922 fm.condwrite(
1923 not cmdpath and editor == b'vi',
1923 not cmdpath and editor == b'vi',
1924 b'vinotfound',
1924 b'vinotfound',
1925 _(
1925 _(
1926 b" No commit editor set and can't find %s in PATH\n"
1926 b" No commit editor set and can't find %s in PATH\n"
1927 b" (specify a commit editor in your configuration"
1927 b" (specify a commit editor in your configuration"
1928 b" file)\n"
1928 b" file)\n"
1929 ),
1929 ),
1930 not cmdpath and editor == b'vi' and editorbin,
1930 not cmdpath and editor == b'vi' and editorbin,
1931 )
1931 )
1932 fm.condwrite(
1932 fm.condwrite(
1933 not cmdpath and editor != b'vi',
1933 not cmdpath and editor != b'vi',
1934 b'editornotfound',
1934 b'editornotfound',
1935 _(
1935 _(
1936 b" Can't find editor '%s' in PATH\n"
1936 b" Can't find editor '%s' in PATH\n"
1937 b" (specify a commit editor in your configuration"
1937 b" (specify a commit editor in your configuration"
1938 b" file)\n"
1938 b" file)\n"
1939 ),
1939 ),
1940 not cmdpath and editorbin,
1940 not cmdpath and editorbin,
1941 )
1941 )
1942 if not cmdpath and editor != b'vi':
1942 if not cmdpath and editor != b'vi':
1943 problems += 1
1943 problems += 1
1944
1944
1945 # check username
1945 # check username
1946 username = None
1946 username = None
1947 err = None
1947 err = None
1948 try:
1948 try:
1949 username = ui.username()
1949 username = ui.username()
1950 except error.Abort as e:
1950 except error.Abort as e:
1951 err = e.message
1951 err = e.message
1952 problems += 1
1952 problems += 1
1953
1953
1954 fm.condwrite(
1954 fm.condwrite(
1955 username, b'username', _(b"checking username (%s)\n"), username
1955 username, b'username', _(b"checking username (%s)\n"), username
1956 )
1956 )
1957 fm.condwrite(
1957 fm.condwrite(
1958 err,
1958 err,
1959 b'usernameerror',
1959 b'usernameerror',
1960 _(
1960 _(
1961 b"checking username...\n %s\n"
1961 b"checking username...\n %s\n"
1962 b" (specify a username in your configuration file)\n"
1962 b" (specify a username in your configuration file)\n"
1963 ),
1963 ),
1964 err,
1964 err,
1965 )
1965 )
1966
1966
1967 for name, mod in extensions.extensions():
1967 for name, mod in extensions.extensions():
1968 handler = getattr(mod, 'debuginstall', None)
1968 handler = getattr(mod, 'debuginstall', None)
1969 if handler is not None:
1969 if handler is not None:
1970 problems += handler(ui, fm)
1970 problems += handler(ui, fm)
1971
1971
1972 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1972 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1973 if not problems:
1973 if not problems:
1974 fm.data(problems=problems)
1974 fm.data(problems=problems)
1975 fm.condwrite(
1975 fm.condwrite(
1976 problems,
1976 problems,
1977 b'problems',
1977 b'problems',
1978 _(b"%d problems detected, please check your install!\n"),
1978 _(b"%d problems detected, please check your install!\n"),
1979 problems,
1979 problems,
1980 )
1980 )
1981 fm.end()
1981 fm.end()
1982
1982
1983 return problems
1983 return problems
1984
1984
1985
1985
1986 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1986 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1987 def debugknown(ui, repopath, *ids, **opts):
1987 def debugknown(ui, repopath, *ids, **opts):
1988 """test whether node ids are known to a repo
1988 """test whether node ids are known to a repo
1989
1989
1990 Every ID must be a full-length hex node id string. Returns a list of 0s
1990 Every ID must be a full-length hex node id string. Returns a list of 0s
1991 and 1s indicating unknown/known.
1991 and 1s indicating unknown/known.
1992 """
1992 """
1993 opts = pycompat.byteskwargs(opts)
1993 opts = pycompat.byteskwargs(opts)
1994 repo = hg.peer(ui, opts, repopath)
1994 repo = hg.peer(ui, opts, repopath)
1995 if not repo.capable(b'known'):
1995 if not repo.capable(b'known'):
1996 raise error.Abort(b"known() not supported by target repository")
1996 raise error.Abort(b"known() not supported by target repository")
1997 flags = repo.known([bin(s) for s in ids])
1997 flags = repo.known([bin(s) for s in ids])
1998 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1998 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1999
1999
2000
2000
2001 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2001 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2002 def debuglabelcomplete(ui, repo, *args):
2002 def debuglabelcomplete(ui, repo, *args):
2003 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2003 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2004 debugnamecomplete(ui, repo, *args)
2004 debugnamecomplete(ui, repo, *args)
2005
2005
2006
2006
2007 @command(
2007 @command(
2008 b'debuglocks',
2008 b'debuglocks',
2009 [
2009 [
2010 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2010 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2011 (
2011 (
2012 b'W',
2012 b'W',
2013 b'force-free-wlock',
2013 b'force-free-wlock',
2014 None,
2014 None,
2015 _(b'free the working state lock (DANGEROUS)'),
2015 _(b'free the working state lock (DANGEROUS)'),
2016 ),
2016 ),
2017 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2017 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2018 (
2018 (
2019 b'S',
2019 b'S',
2020 b'set-wlock',
2020 b'set-wlock',
2021 None,
2021 None,
2022 _(b'set the working state lock until stopped'),
2022 _(b'set the working state lock until stopped'),
2023 ),
2023 ),
2024 ],
2024 ],
2025 _(b'[OPTION]...'),
2025 _(b'[OPTION]...'),
2026 )
2026 )
2027 def debuglocks(ui, repo, **opts):
2027 def debuglocks(ui, repo, **opts):
2028 """show or modify state of locks
2028 """show or modify state of locks
2029
2029
2030 By default, this command will show which locks are held. This
2030 By default, this command will show which locks are held. This
2031 includes the user and process holding the lock, the amount of time
2031 includes the user and process holding the lock, the amount of time
2032 the lock has been held, and the machine name where the process is
2032 the lock has been held, and the machine name where the process is
2033 running if it's not local.
2033 running if it's not local.
2034
2034
2035 Locks protect the integrity of Mercurial's data, so should be
2035 Locks protect the integrity of Mercurial's data, so should be
2036 treated with care. System crashes or other interruptions may cause
2036 treated with care. System crashes or other interruptions may cause
2037 locks to not be properly released, though Mercurial will usually
2037 locks to not be properly released, though Mercurial will usually
2038 detect and remove such stale locks automatically.
2038 detect and remove such stale locks automatically.
2039
2039
2040 However, detecting stale locks may not always be possible (for
2040 However, detecting stale locks may not always be possible (for
2041 instance, on a shared filesystem). Removing locks may also be
2041 instance, on a shared filesystem). Removing locks may also be
2042 blocked by filesystem permissions.
2042 blocked by filesystem permissions.
2043
2043
2044 Setting a lock will prevent other commands from changing the data.
2044 Setting a lock will prevent other commands from changing the data.
2045 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2045 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2046 The set locks are removed when the command exits.
2046 The set locks are removed when the command exits.
2047
2047
2048 Returns 0 if no locks are held.
2048 Returns 0 if no locks are held.
2049
2049
2050 """
2050 """
2051
2051
2052 if opts.get('force_free_lock'):
2052 if opts.get('force_free_lock'):
2053 repo.svfs.unlink(b'lock')
2053 repo.svfs.unlink(b'lock')
2054 if opts.get('force_free_wlock'):
2054 if opts.get('force_free_wlock'):
2055 repo.vfs.unlink(b'wlock')
2055 repo.vfs.unlink(b'wlock')
2056 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2056 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2057 return 0
2057 return 0
2058
2058
2059 locks = []
2059 locks = []
2060 try:
2060 try:
2061 if opts.get('set_wlock'):
2061 if opts.get('set_wlock'):
2062 try:
2062 try:
2063 locks.append(repo.wlock(False))
2063 locks.append(repo.wlock(False))
2064 except error.LockHeld:
2064 except error.LockHeld:
2065 raise error.Abort(_(b'wlock is already held'))
2065 raise error.Abort(_(b'wlock is already held'))
2066 if opts.get('set_lock'):
2066 if opts.get('set_lock'):
2067 try:
2067 try:
2068 locks.append(repo.lock(False))
2068 locks.append(repo.lock(False))
2069 except error.LockHeld:
2069 except error.LockHeld:
2070 raise error.Abort(_(b'lock is already held'))
2070 raise error.Abort(_(b'lock is already held'))
2071 if len(locks):
2071 if len(locks):
2072 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2072 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2073 return 0
2073 return 0
2074 finally:
2074 finally:
2075 release(*locks)
2075 release(*locks)
2076
2076
2077 now = time.time()
2077 now = time.time()
2078 held = 0
2078 held = 0
2079
2079
2080 def report(vfs, name, method):
2080 def report(vfs, name, method):
2081 # this causes stale locks to get reaped for more accurate reporting
2081 # this causes stale locks to get reaped for more accurate reporting
2082 try:
2082 try:
2083 l = method(False)
2083 l = method(False)
2084 except error.LockHeld:
2084 except error.LockHeld:
2085 l = None
2085 l = None
2086
2086
2087 if l:
2087 if l:
2088 l.release()
2088 l.release()
2089 else:
2089 else:
2090 try:
2090 try:
2091 st = vfs.lstat(name)
2091 st = vfs.lstat(name)
2092 age = now - st[stat.ST_MTIME]
2092 age = now - st[stat.ST_MTIME]
2093 user = util.username(st.st_uid)
2093 user = util.username(st.st_uid)
2094 locker = vfs.readlock(name)
2094 locker = vfs.readlock(name)
2095 if b":" in locker:
2095 if b":" in locker:
2096 host, pid = locker.split(b':')
2096 host, pid = locker.split(b':')
2097 if host == socket.gethostname():
2097 if host == socket.gethostname():
2098 locker = b'user %s, process %s' % (user or b'None', pid)
2098 locker = b'user %s, process %s' % (user or b'None', pid)
2099 else:
2099 else:
2100 locker = b'user %s, process %s, host %s' % (
2100 locker = b'user %s, process %s, host %s' % (
2101 user or b'None',
2101 user or b'None',
2102 pid,
2102 pid,
2103 host,
2103 host,
2104 )
2104 )
2105 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2105 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2106 return 1
2106 return 1
2107 except OSError as e:
2107 except OSError as e:
2108 if e.errno != errno.ENOENT:
2108 if e.errno != errno.ENOENT:
2109 raise
2109 raise
2110
2110
2111 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2111 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2112 return 0
2112 return 0
2113
2113
2114 held += report(repo.svfs, b"lock", repo.lock)
2114 held += report(repo.svfs, b"lock", repo.lock)
2115 held += report(repo.vfs, b"wlock", repo.wlock)
2115 held += report(repo.vfs, b"wlock", repo.wlock)
2116
2116
2117 return held
2117 return held
2118
2118
2119
2119
2120 @command(
2120 @command(
2121 b'debugmanifestfulltextcache',
2121 b'debugmanifestfulltextcache',
2122 [
2122 [
2123 (b'', b'clear', False, _(b'clear the cache')),
2123 (b'', b'clear', False, _(b'clear the cache')),
2124 (
2124 (
2125 b'a',
2125 b'a',
2126 b'add',
2126 b'add',
2127 [],
2127 [],
2128 _(b'add the given manifest nodes to the cache'),
2128 _(b'add the given manifest nodes to the cache'),
2129 _(b'NODE'),
2129 _(b'NODE'),
2130 ),
2130 ),
2131 ],
2131 ],
2132 b'',
2132 b'',
2133 )
2133 )
2134 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2134 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2135 """show, clear or amend the contents of the manifest fulltext cache"""
2135 """show, clear or amend the contents of the manifest fulltext cache"""
2136
2136
2137 def getcache():
2137 def getcache():
2138 r = repo.manifestlog.getstorage(b'')
2138 r = repo.manifestlog.getstorage(b'')
2139 try:
2139 try:
2140 return r._fulltextcache
2140 return r._fulltextcache
2141 except AttributeError:
2141 except AttributeError:
2142 msg = _(
2142 msg = _(
2143 b"Current revlog implementation doesn't appear to have a "
2143 b"Current revlog implementation doesn't appear to have a "
2144 b"manifest fulltext cache\n"
2144 b"manifest fulltext cache\n"
2145 )
2145 )
2146 raise error.Abort(msg)
2146 raise error.Abort(msg)
2147
2147
2148 if opts.get('clear'):
2148 if opts.get('clear'):
2149 with repo.wlock():
2149 with repo.wlock():
2150 cache = getcache()
2150 cache = getcache()
2151 cache.clear(clear_persisted_data=True)
2151 cache.clear(clear_persisted_data=True)
2152 return
2152 return
2153
2153
2154 if add:
2154 if add:
2155 with repo.wlock():
2155 with repo.wlock():
2156 m = repo.manifestlog
2156 m = repo.manifestlog
2157 store = m.getstorage(b'')
2157 store = m.getstorage(b'')
2158 for n in add:
2158 for n in add:
2159 try:
2159 try:
2160 manifest = m[store.lookup(n)]
2160 manifest = m[store.lookup(n)]
2161 except error.LookupError as e:
2161 except error.LookupError as e:
2162 raise error.Abort(
2162 raise error.Abort(
2163 bytes(e), hint=b"Check your manifest node id"
2163 bytes(e), hint=b"Check your manifest node id"
2164 )
2164 )
2165 manifest.read() # stores revisision in cache too
2165 manifest.read() # stores revisision in cache too
2166 return
2166 return
2167
2167
2168 cache = getcache()
2168 cache = getcache()
2169 if not len(cache):
2169 if not len(cache):
2170 ui.write(_(b'cache empty\n'))
2170 ui.write(_(b'cache empty\n'))
2171 else:
2171 else:
2172 ui.write(
2172 ui.write(
2173 _(
2173 _(
2174 b'cache contains %d manifest entries, in order of most to '
2174 b'cache contains %d manifest entries, in order of most to '
2175 b'least recent:\n'
2175 b'least recent:\n'
2176 )
2176 )
2177 % (len(cache),)
2177 % (len(cache),)
2178 )
2178 )
2179 totalsize = 0
2179 totalsize = 0
2180 for nodeid in cache:
2180 for nodeid in cache:
2181 # Use cache.get to not update the LRU order
2181 # Use cache.get to not update the LRU order
2182 data = cache.peek(nodeid)
2182 data = cache.peek(nodeid)
2183 size = len(data)
2183 size = len(data)
2184 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2184 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2185 ui.write(
2185 ui.write(
2186 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2186 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2187 )
2187 )
2188 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2188 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2189 ui.write(
2189 ui.write(
2190 _(b'total cache data size %s, on-disk %s\n')
2190 _(b'total cache data size %s, on-disk %s\n')
2191 % (util.bytecount(totalsize), util.bytecount(ondisk))
2191 % (util.bytecount(totalsize), util.bytecount(ondisk))
2192 )
2192 )
2193
2193
2194
2194
2195 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2195 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2196 def debugmergestate(ui, repo, *args, **opts):
2196 def debugmergestate(ui, repo, *args, **opts):
2197 """print merge state
2197 """print merge state
2198
2198
2199 Use --verbose to print out information about whether v1 or v2 merge state
2199 Use --verbose to print out information about whether v1 or v2 merge state
2200 was chosen."""
2200 was chosen."""
2201
2201
2202 if ui.verbose:
2202 if ui.verbose:
2203 ms = mergestatemod.mergestate(repo)
2203 ms = mergestatemod.mergestate(repo)
2204
2204
2205 # sort so that reasonable information is on top
2205 # sort so that reasonable information is on top
2206 v1records = ms._readrecordsv1()
2206 v1records = ms._readrecordsv1()
2207 v2records = ms._readrecordsv2()
2207 v2records = ms._readrecordsv2()
2208
2208
2209 if not v1records and not v2records:
2209 if not v1records and not v2records:
2210 pass
2210 pass
2211 elif not v2records:
2211 elif not v2records:
2212 ui.writenoi18n(b'no version 2 merge state\n')
2212 ui.writenoi18n(b'no version 2 merge state\n')
2213 elif ms._v1v2match(v1records, v2records):
2213 elif ms._v1v2match(v1records, v2records):
2214 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2214 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2215 else:
2215 else:
2216 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2216 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2217
2217
2218 opts = pycompat.byteskwargs(opts)
2218 opts = pycompat.byteskwargs(opts)
2219 if not opts[b'template']:
2219 if not opts[b'template']:
2220 opts[b'template'] = (
2220 opts[b'template'] = (
2221 b'{if(commits, "", "no merge state found\n")}'
2221 b'{if(commits, "", "no merge state found\n")}'
2222 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2222 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2223 b'{files % "file: {path} (state \\"{state}\\")\n'
2223 b'{files % "file: {path} (state \\"{state}\\")\n'
2224 b'{if(local_path, "'
2224 b'{if(local_path, "'
2225 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2225 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2226 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2226 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2227 b' other path: {other_path} (node {other_node})\n'
2227 b' other path: {other_path} (node {other_node})\n'
2228 b'")}'
2228 b'")}'
2229 b'{if(rename_side, "'
2229 b'{if(rename_side, "'
2230 b' rename side: {rename_side}\n'
2230 b' rename side: {rename_side}\n'
2231 b' renamed path: {renamed_path}\n'
2231 b' renamed path: {renamed_path}\n'
2232 b'")}'
2232 b'")}'
2233 b'{extras % " extra: {key} = {value}\n"}'
2233 b'{extras % " extra: {key} = {value}\n"}'
2234 b'"}'
2234 b'"}'
2235 b'{extras % "extra: {file} ({key} = {value})\n"}'
2235 b'{extras % "extra: {file} ({key} = {value})\n"}'
2236 )
2236 )
2237
2237
2238 ms = mergestatemod.mergestate.read(repo)
2238 ms = mergestatemod.mergestate.read(repo)
2239
2239
2240 fm = ui.formatter(b'debugmergestate', opts)
2240 fm = ui.formatter(b'debugmergestate', opts)
2241 fm.startitem()
2241 fm.startitem()
2242
2242
2243 fm_commits = fm.nested(b'commits')
2243 fm_commits = fm.nested(b'commits')
2244 if ms.active():
2244 if ms.active():
2245 for name, node, label_index in (
2245 for name, node, label_index in (
2246 (b'local', ms.local, 0),
2246 (b'local', ms.local, 0),
2247 (b'other', ms.other, 1),
2247 (b'other', ms.other, 1),
2248 ):
2248 ):
2249 fm_commits.startitem()
2249 fm_commits.startitem()
2250 fm_commits.data(name=name)
2250 fm_commits.data(name=name)
2251 fm_commits.data(node=hex(node))
2251 fm_commits.data(node=hex(node))
2252 if ms._labels and len(ms._labels) > label_index:
2252 if ms._labels and len(ms._labels) > label_index:
2253 fm_commits.data(label=ms._labels[label_index])
2253 fm_commits.data(label=ms._labels[label_index])
2254 fm_commits.end()
2254 fm_commits.end()
2255
2255
2256 fm_files = fm.nested(b'files')
2256 fm_files = fm.nested(b'files')
2257 if ms.active():
2257 if ms.active():
2258 for f in ms:
2258 for f in ms:
2259 fm_files.startitem()
2259 fm_files.startitem()
2260 fm_files.data(path=f)
2260 fm_files.data(path=f)
2261 state = ms._state[f]
2261 state = ms._state[f]
2262 fm_files.data(state=state[0])
2262 fm_files.data(state=state[0])
2263 if state[0] in (
2263 if state[0] in (
2264 mergestatemod.MERGE_RECORD_UNRESOLVED,
2264 mergestatemod.MERGE_RECORD_UNRESOLVED,
2265 mergestatemod.MERGE_RECORD_RESOLVED,
2265 mergestatemod.MERGE_RECORD_RESOLVED,
2266 ):
2266 ):
2267 fm_files.data(local_key=state[1])
2267 fm_files.data(local_key=state[1])
2268 fm_files.data(local_path=state[2])
2268 fm_files.data(local_path=state[2])
2269 fm_files.data(ancestor_path=state[3])
2269 fm_files.data(ancestor_path=state[3])
2270 fm_files.data(ancestor_node=state[4])
2270 fm_files.data(ancestor_node=state[4])
2271 fm_files.data(other_path=state[5])
2271 fm_files.data(other_path=state[5])
2272 fm_files.data(other_node=state[6])
2272 fm_files.data(other_node=state[6])
2273 fm_files.data(local_flags=state[7])
2273 fm_files.data(local_flags=state[7])
2274 elif state[0] in (
2274 elif state[0] in (
2275 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2275 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2276 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2276 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2277 ):
2277 ):
2278 fm_files.data(renamed_path=state[1])
2278 fm_files.data(renamed_path=state[1])
2279 fm_files.data(rename_side=state[2])
2279 fm_files.data(rename_side=state[2])
2280 fm_extras = fm_files.nested(b'extras')
2280 fm_extras = fm_files.nested(b'extras')
2281 for k, v in sorted(ms.extras(f).items()):
2281 for k, v in sorted(ms.extras(f).items()):
2282 fm_extras.startitem()
2282 fm_extras.startitem()
2283 fm_extras.data(key=k)
2283 fm_extras.data(key=k)
2284 fm_extras.data(value=v)
2284 fm_extras.data(value=v)
2285 fm_extras.end()
2285 fm_extras.end()
2286
2286
2287 fm_files.end()
2287 fm_files.end()
2288
2288
2289 fm_extras = fm.nested(b'extras')
2289 fm_extras = fm.nested(b'extras')
2290 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2290 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2291 if f in ms:
2291 if f in ms:
2292 # If file is in mergestate, we have already processed it's extras
2292 # If file is in mergestate, we have already processed it's extras
2293 continue
2293 continue
2294 for k, v in pycompat.iteritems(d):
2294 for k, v in pycompat.iteritems(d):
2295 fm_extras.startitem()
2295 fm_extras.startitem()
2296 fm_extras.data(file=f)
2296 fm_extras.data(file=f)
2297 fm_extras.data(key=k)
2297 fm_extras.data(key=k)
2298 fm_extras.data(value=v)
2298 fm_extras.data(value=v)
2299 fm_extras.end()
2299 fm_extras.end()
2300
2300
2301 fm.end()
2301 fm.end()
2302
2302
2303
2303
2304 @command(b'debugnamecomplete', [], _(b'NAME...'))
2304 @command(b'debugnamecomplete', [], _(b'NAME...'))
2305 def debugnamecomplete(ui, repo, *args):
2305 def debugnamecomplete(ui, repo, *args):
2306 '''complete "names" - tags, open branch names, bookmark names'''
2306 '''complete "names" - tags, open branch names, bookmark names'''
2307
2307
2308 names = set()
2308 names = set()
2309 # since we previously only listed open branches, we will handle that
2309 # since we previously only listed open branches, we will handle that
2310 # specially (after this for loop)
2310 # specially (after this for loop)
2311 for name, ns in pycompat.iteritems(repo.names):
2311 for name, ns in pycompat.iteritems(repo.names):
2312 if name != b'branches':
2312 if name != b'branches':
2313 names.update(ns.listnames(repo))
2313 names.update(ns.listnames(repo))
2314 names.update(
2314 names.update(
2315 tag
2315 tag
2316 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2316 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2317 if not closed
2317 if not closed
2318 )
2318 )
2319 completions = set()
2319 completions = set()
2320 if not args:
2320 if not args:
2321 args = [b'']
2321 args = [b'']
2322 for a in args:
2322 for a in args:
2323 completions.update(n for n in names if n.startswith(a))
2323 completions.update(n for n in names if n.startswith(a))
2324 ui.write(b'\n'.join(sorted(completions)))
2324 ui.write(b'\n'.join(sorted(completions)))
2325 ui.write(b'\n')
2325 ui.write(b'\n')
2326
2326
2327
2327
2328 @command(
2328 @command(
2329 b'debugnodemap',
2329 b'debugnodemap',
2330 [
2330 [
2331 (
2331 (
2332 b'',
2332 b'',
2333 b'dump-new',
2333 b'dump-new',
2334 False,
2334 False,
2335 _(b'write a (new) persistent binary nodemap on stdout'),
2335 _(b'write a (new) persistent binary nodemap on stdout'),
2336 ),
2336 ),
2337 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2337 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2338 (
2338 (
2339 b'',
2339 b'',
2340 b'check',
2340 b'check',
2341 False,
2341 False,
2342 _(b'check that the data on disk data are correct.'),
2342 _(b'check that the data on disk data are correct.'),
2343 ),
2343 ),
2344 (
2344 (
2345 b'',
2345 b'',
2346 b'metadata',
2346 b'metadata',
2347 False,
2347 False,
2348 _(b'display the on disk meta data for the nodemap'),
2348 _(b'display the on disk meta data for the nodemap'),
2349 ),
2349 ),
2350 ],
2350 ],
2351 )
2351 )
2352 def debugnodemap(ui, repo, **opts):
2352 def debugnodemap(ui, repo, **opts):
2353 """write and inspect on disk nodemap"""
2353 """write and inspect on disk nodemap"""
2354 if opts['dump_new']:
2354 if opts['dump_new']:
2355 unfi = repo.unfiltered()
2355 unfi = repo.unfiltered()
2356 cl = unfi.changelog
2356 cl = unfi.changelog
2357 if util.safehasattr(cl.index, "nodemap_data_all"):
2357 if util.safehasattr(cl.index, "nodemap_data_all"):
2358 data = cl.index.nodemap_data_all()
2358 data = cl.index.nodemap_data_all()
2359 else:
2359 else:
2360 data = nodemap.persistent_data(cl.index)
2360 data = nodemap.persistent_data(cl.index)
2361 ui.write(data)
2361 ui.write(data)
2362 elif opts['dump_disk']:
2362 elif opts['dump_disk']:
2363 unfi = repo.unfiltered()
2363 unfi = repo.unfiltered()
2364 cl = unfi.changelog
2364 cl = unfi.changelog
2365 nm_data = nodemap.persisted_data(cl)
2365 nm_data = nodemap.persisted_data(cl)
2366 if nm_data is not None:
2366 if nm_data is not None:
2367 docket, data = nm_data
2367 docket, data = nm_data
2368 ui.write(data[:])
2368 ui.write(data[:])
2369 elif opts['check']:
2369 elif opts['check']:
2370 unfi = repo.unfiltered()
2370 unfi = repo.unfiltered()
2371 cl = unfi.changelog
2371 cl = unfi.changelog
2372 nm_data = nodemap.persisted_data(cl)
2372 nm_data = nodemap.persisted_data(cl)
2373 if nm_data is not None:
2373 if nm_data is not None:
2374 docket, data = nm_data
2374 docket, data = nm_data
2375 return nodemap.check_data(ui, cl.index, data)
2375 return nodemap.check_data(ui, cl.index, data)
2376 elif opts['metadata']:
2376 elif opts['metadata']:
2377 unfi = repo.unfiltered()
2377 unfi = repo.unfiltered()
2378 cl = unfi.changelog
2378 cl = unfi.changelog
2379 nm_data = nodemap.persisted_data(cl)
2379 nm_data = nodemap.persisted_data(cl)
2380 if nm_data is not None:
2380 if nm_data is not None:
2381 docket, data = nm_data
2381 docket, data = nm_data
2382 ui.write((b"uid: %s\n") % docket.uid)
2382 ui.write((b"uid: %s\n") % docket.uid)
2383 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2383 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2384 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2384 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2385 ui.write((b"data-length: %d\n") % docket.data_length)
2385 ui.write((b"data-length: %d\n") % docket.data_length)
2386 ui.write((b"data-unused: %d\n") % docket.data_unused)
2386 ui.write((b"data-unused: %d\n") % docket.data_unused)
2387 unused_perc = docket.data_unused * 100.0 / docket.data_length
2387 unused_perc = docket.data_unused * 100.0 / docket.data_length
2388 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2388 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2389
2389
2390
2390
2391 @command(
2391 @command(
2392 b'debugobsolete',
2392 b'debugobsolete',
2393 [
2393 [
2394 (b'', b'flags', 0, _(b'markers flag')),
2394 (b'', b'flags', 0, _(b'markers flag')),
2395 (
2395 (
2396 b'',
2396 b'',
2397 b'record-parents',
2397 b'record-parents',
2398 False,
2398 False,
2399 _(b'record parent information for the precursor'),
2399 _(b'record parent information for the precursor'),
2400 ),
2400 ),
2401 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2401 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2402 (
2402 (
2403 b'',
2403 b'',
2404 b'exclusive',
2404 b'exclusive',
2405 False,
2405 False,
2406 _(b'restrict display to markers only relevant to REV'),
2406 _(b'restrict display to markers only relevant to REV'),
2407 ),
2407 ),
2408 (b'', b'index', False, _(b'display index of the marker')),
2408 (b'', b'index', False, _(b'display index of the marker')),
2409 (b'', b'delete', [], _(b'delete markers specified by indices')),
2409 (b'', b'delete', [], _(b'delete markers specified by indices')),
2410 ]
2410 ]
2411 + cmdutil.commitopts2
2411 + cmdutil.commitopts2
2412 + cmdutil.formatteropts,
2412 + cmdutil.formatteropts,
2413 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2413 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2414 )
2414 )
2415 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2415 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2416 """create arbitrary obsolete marker
2416 """create arbitrary obsolete marker
2417
2417
2418 With no arguments, displays the list of obsolescence markers."""
2418 With no arguments, displays the list of obsolescence markers."""
2419
2419
2420 opts = pycompat.byteskwargs(opts)
2420 opts = pycompat.byteskwargs(opts)
2421
2421
2422 def parsenodeid(s):
2422 def parsenodeid(s):
2423 try:
2423 try:
2424 # We do not use revsingle/revrange functions here to accept
2424 # We do not use revsingle/revrange functions here to accept
2425 # arbitrary node identifiers, possibly not present in the
2425 # arbitrary node identifiers, possibly not present in the
2426 # local repository.
2426 # local repository.
2427 n = bin(s)
2427 n = bin(s)
2428 if len(n) != len(nullid):
2428 if len(n) != len(nullid):
2429 raise TypeError()
2429 raise TypeError()
2430 return n
2430 return n
2431 except TypeError:
2431 except TypeError:
2432 raise error.InputError(
2432 raise error.InputError(
2433 b'changeset references must be full hexadecimal '
2433 b'changeset references must be full hexadecimal '
2434 b'node identifiers'
2434 b'node identifiers'
2435 )
2435 )
2436
2436
2437 if opts.get(b'delete'):
2437 if opts.get(b'delete'):
2438 indices = []
2438 indices = []
2439 for v in opts.get(b'delete'):
2439 for v in opts.get(b'delete'):
2440 try:
2440 try:
2441 indices.append(int(v))
2441 indices.append(int(v))
2442 except ValueError:
2442 except ValueError:
2443 raise error.InputError(
2443 raise error.InputError(
2444 _(b'invalid index value: %r') % v,
2444 _(b'invalid index value: %r') % v,
2445 hint=_(b'use integers for indices'),
2445 hint=_(b'use integers for indices'),
2446 )
2446 )
2447
2447
2448 if repo.currenttransaction():
2448 if repo.currenttransaction():
2449 raise error.Abort(
2449 raise error.Abort(
2450 _(b'cannot delete obsmarkers in the middle of transaction.')
2450 _(b'cannot delete obsmarkers in the middle of transaction.')
2451 )
2451 )
2452
2452
2453 with repo.lock():
2453 with repo.lock():
2454 n = repair.deleteobsmarkers(repo.obsstore, indices)
2454 n = repair.deleteobsmarkers(repo.obsstore, indices)
2455 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2455 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2456
2456
2457 return
2457 return
2458
2458
2459 if precursor is not None:
2459 if precursor is not None:
2460 if opts[b'rev']:
2460 if opts[b'rev']:
2461 raise error.InputError(
2461 raise error.InputError(
2462 b'cannot select revision when creating marker'
2462 b'cannot select revision when creating marker'
2463 )
2463 )
2464 metadata = {}
2464 metadata = {}
2465 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2465 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2466 succs = tuple(parsenodeid(succ) for succ in successors)
2466 succs = tuple(parsenodeid(succ) for succ in successors)
2467 l = repo.lock()
2467 l = repo.lock()
2468 try:
2468 try:
2469 tr = repo.transaction(b'debugobsolete')
2469 tr = repo.transaction(b'debugobsolete')
2470 try:
2470 try:
2471 date = opts.get(b'date')
2471 date = opts.get(b'date')
2472 if date:
2472 if date:
2473 date = dateutil.parsedate(date)
2473 date = dateutil.parsedate(date)
2474 else:
2474 else:
2475 date = None
2475 date = None
2476 prec = parsenodeid(precursor)
2476 prec = parsenodeid(precursor)
2477 parents = None
2477 parents = None
2478 if opts[b'record_parents']:
2478 if opts[b'record_parents']:
2479 if prec not in repo.unfiltered():
2479 if prec not in repo.unfiltered():
2480 raise error.Abort(
2480 raise error.Abort(
2481 b'cannot used --record-parents on '
2481 b'cannot used --record-parents on '
2482 b'unknown changesets'
2482 b'unknown changesets'
2483 )
2483 )
2484 parents = repo.unfiltered()[prec].parents()
2484 parents = repo.unfiltered()[prec].parents()
2485 parents = tuple(p.node() for p in parents)
2485 parents = tuple(p.node() for p in parents)
2486 repo.obsstore.create(
2486 repo.obsstore.create(
2487 tr,
2487 tr,
2488 prec,
2488 prec,
2489 succs,
2489 succs,
2490 opts[b'flags'],
2490 opts[b'flags'],
2491 parents=parents,
2491 parents=parents,
2492 date=date,
2492 date=date,
2493 metadata=metadata,
2493 metadata=metadata,
2494 ui=ui,
2494 ui=ui,
2495 )
2495 )
2496 tr.close()
2496 tr.close()
2497 except ValueError as exc:
2497 except ValueError as exc:
2498 raise error.Abort(
2498 raise error.Abort(
2499 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2499 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2500 )
2500 )
2501 finally:
2501 finally:
2502 tr.release()
2502 tr.release()
2503 finally:
2503 finally:
2504 l.release()
2504 l.release()
2505 else:
2505 else:
2506 if opts[b'rev']:
2506 if opts[b'rev']:
2507 revs = scmutil.revrange(repo, opts[b'rev'])
2507 revs = scmutil.revrange(repo, opts[b'rev'])
2508 nodes = [repo[r].node() for r in revs]
2508 nodes = [repo[r].node() for r in revs]
2509 markers = list(
2509 markers = list(
2510 obsutil.getmarkers(
2510 obsutil.getmarkers(
2511 repo, nodes=nodes, exclusive=opts[b'exclusive']
2511 repo, nodes=nodes, exclusive=opts[b'exclusive']
2512 )
2512 )
2513 )
2513 )
2514 markers.sort(key=lambda x: x._data)
2514 markers.sort(key=lambda x: x._data)
2515 else:
2515 else:
2516 markers = obsutil.getmarkers(repo)
2516 markers = obsutil.getmarkers(repo)
2517
2517
2518 markerstoiter = markers
2518 markerstoiter = markers
2519 isrelevant = lambda m: True
2519 isrelevant = lambda m: True
2520 if opts.get(b'rev') and opts.get(b'index'):
2520 if opts.get(b'rev') and opts.get(b'index'):
2521 markerstoiter = obsutil.getmarkers(repo)
2521 markerstoiter = obsutil.getmarkers(repo)
2522 markerset = set(markers)
2522 markerset = set(markers)
2523 isrelevant = lambda m: m in markerset
2523 isrelevant = lambda m: m in markerset
2524
2524
2525 fm = ui.formatter(b'debugobsolete', opts)
2525 fm = ui.formatter(b'debugobsolete', opts)
2526 for i, m in enumerate(markerstoiter):
2526 for i, m in enumerate(markerstoiter):
2527 if not isrelevant(m):
2527 if not isrelevant(m):
2528 # marker can be irrelevant when we're iterating over a set
2528 # marker can be irrelevant when we're iterating over a set
2529 # of markers (markerstoiter) which is bigger than the set
2529 # of markers (markerstoiter) which is bigger than the set
2530 # of markers we want to display (markers)
2530 # of markers we want to display (markers)
2531 # this can happen if both --index and --rev options are
2531 # this can happen if both --index and --rev options are
2532 # provided and thus we need to iterate over all of the markers
2532 # provided and thus we need to iterate over all of the markers
2533 # to get the correct indices, but only display the ones that
2533 # to get the correct indices, but only display the ones that
2534 # are relevant to --rev value
2534 # are relevant to --rev value
2535 continue
2535 continue
2536 fm.startitem()
2536 fm.startitem()
2537 ind = i if opts.get(b'index') else None
2537 ind = i if opts.get(b'index') else None
2538 cmdutil.showmarker(fm, m, index=ind)
2538 cmdutil.showmarker(fm, m, index=ind)
2539 fm.end()
2539 fm.end()
2540
2540
2541
2541
2542 @command(
2542 @command(
2543 b'debugp1copies',
2543 b'debugp1copies',
2544 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2544 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2545 _(b'[-r REV]'),
2545 _(b'[-r REV]'),
2546 )
2546 )
2547 def debugp1copies(ui, repo, **opts):
2547 def debugp1copies(ui, repo, **opts):
2548 """dump copy information compared to p1"""
2548 """dump copy information compared to p1"""
2549
2549
2550 opts = pycompat.byteskwargs(opts)
2550 opts = pycompat.byteskwargs(opts)
2551 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2551 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2552 for dst, src in ctx.p1copies().items():
2552 for dst, src in ctx.p1copies().items():
2553 ui.write(b'%s -> %s\n' % (src, dst))
2553 ui.write(b'%s -> %s\n' % (src, dst))
2554
2554
2555
2555
2556 @command(
2556 @command(
2557 b'debugp2copies',
2557 b'debugp2copies',
2558 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2558 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2559 _(b'[-r REV]'),
2559 _(b'[-r REV]'),
2560 )
2560 )
2561 def debugp1copies(ui, repo, **opts):
2561 def debugp1copies(ui, repo, **opts):
2562 """dump copy information compared to p2"""
2562 """dump copy information compared to p2"""
2563
2563
2564 opts = pycompat.byteskwargs(opts)
2564 opts = pycompat.byteskwargs(opts)
2565 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2565 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2566 for dst, src in ctx.p2copies().items():
2566 for dst, src in ctx.p2copies().items():
2567 ui.write(b'%s -> %s\n' % (src, dst))
2567 ui.write(b'%s -> %s\n' % (src, dst))
2568
2568
2569
2569
2570 @command(
2570 @command(
2571 b'debugpathcomplete',
2571 b'debugpathcomplete',
2572 [
2572 [
2573 (b'f', b'full', None, _(b'complete an entire path')),
2573 (b'f', b'full', None, _(b'complete an entire path')),
2574 (b'n', b'normal', None, _(b'show only normal files')),
2574 (b'n', b'normal', None, _(b'show only normal files')),
2575 (b'a', b'added', None, _(b'show only added files')),
2575 (b'a', b'added', None, _(b'show only added files')),
2576 (b'r', b'removed', None, _(b'show only removed files')),
2576 (b'r', b'removed', None, _(b'show only removed files')),
2577 ],
2577 ],
2578 _(b'FILESPEC...'),
2578 _(b'FILESPEC...'),
2579 )
2579 )
2580 def debugpathcomplete(ui, repo, *specs, **opts):
2580 def debugpathcomplete(ui, repo, *specs, **opts):
2581 """complete part or all of a tracked path
2581 """complete part or all of a tracked path
2582
2582
2583 This command supports shells that offer path name completion. It
2583 This command supports shells that offer path name completion. It
2584 currently completes only files already known to the dirstate.
2584 currently completes only files already known to the dirstate.
2585
2585
2586 Completion extends only to the next path segment unless
2586 Completion extends only to the next path segment unless
2587 --full is specified, in which case entire paths are used."""
2587 --full is specified, in which case entire paths are used."""
2588
2588
2589 def complete(path, acceptable):
2589 def complete(path, acceptable):
2590 dirstate = repo.dirstate
2590 dirstate = repo.dirstate
2591 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2591 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2592 rootdir = repo.root + pycompat.ossep
2592 rootdir = repo.root + pycompat.ossep
2593 if spec != repo.root and not spec.startswith(rootdir):
2593 if spec != repo.root and not spec.startswith(rootdir):
2594 return [], []
2594 return [], []
2595 if os.path.isdir(spec):
2595 if os.path.isdir(spec):
2596 spec += b'/'
2596 spec += b'/'
2597 spec = spec[len(rootdir) :]
2597 spec = spec[len(rootdir) :]
2598 fixpaths = pycompat.ossep != b'/'
2598 fixpaths = pycompat.ossep != b'/'
2599 if fixpaths:
2599 if fixpaths:
2600 spec = spec.replace(pycompat.ossep, b'/')
2600 spec = spec.replace(pycompat.ossep, b'/')
2601 speclen = len(spec)
2601 speclen = len(spec)
2602 fullpaths = opts['full']
2602 fullpaths = opts['full']
2603 files, dirs = set(), set()
2603 files, dirs = set(), set()
2604 adddir, addfile = dirs.add, files.add
2604 adddir, addfile = dirs.add, files.add
2605 for f, st in pycompat.iteritems(dirstate):
2605 for f, st in pycompat.iteritems(dirstate):
2606 if f.startswith(spec) and st[0] in acceptable:
2606 if f.startswith(spec) and st[0] in acceptable:
2607 if fixpaths:
2607 if fixpaths:
2608 f = f.replace(b'/', pycompat.ossep)
2608 f = f.replace(b'/', pycompat.ossep)
2609 if fullpaths:
2609 if fullpaths:
2610 addfile(f)
2610 addfile(f)
2611 continue
2611 continue
2612 s = f.find(pycompat.ossep, speclen)
2612 s = f.find(pycompat.ossep, speclen)
2613 if s >= 0:
2613 if s >= 0:
2614 adddir(f[:s])
2614 adddir(f[:s])
2615 else:
2615 else:
2616 addfile(f)
2616 addfile(f)
2617 return files, dirs
2617 return files, dirs
2618
2618
2619 acceptable = b''
2619 acceptable = b''
2620 if opts['normal']:
2620 if opts['normal']:
2621 acceptable += b'nm'
2621 acceptable += b'nm'
2622 if opts['added']:
2622 if opts['added']:
2623 acceptable += b'a'
2623 acceptable += b'a'
2624 if opts['removed']:
2624 if opts['removed']:
2625 acceptable += b'r'
2625 acceptable += b'r'
2626 cwd = repo.getcwd()
2626 cwd = repo.getcwd()
2627 if not specs:
2627 if not specs:
2628 specs = [b'.']
2628 specs = [b'.']
2629
2629
2630 files, dirs = set(), set()
2630 files, dirs = set(), set()
2631 for spec in specs:
2631 for spec in specs:
2632 f, d = complete(spec, acceptable or b'nmar')
2632 f, d = complete(spec, acceptable or b'nmar')
2633 files.update(f)
2633 files.update(f)
2634 dirs.update(d)
2634 dirs.update(d)
2635 files.update(dirs)
2635 files.update(dirs)
2636 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2636 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2637 ui.write(b'\n')
2637 ui.write(b'\n')
2638
2638
2639
2639
2640 @command(
2640 @command(
2641 b'debugpathcopies',
2641 b'debugpathcopies',
2642 cmdutil.walkopts,
2642 cmdutil.walkopts,
2643 b'hg debugpathcopies REV1 REV2 [FILE]',
2643 b'hg debugpathcopies REV1 REV2 [FILE]',
2644 inferrepo=True,
2644 inferrepo=True,
2645 )
2645 )
2646 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2646 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2647 """show copies between two revisions"""
2647 """show copies between two revisions"""
2648 ctx1 = scmutil.revsingle(repo, rev1)
2648 ctx1 = scmutil.revsingle(repo, rev1)
2649 ctx2 = scmutil.revsingle(repo, rev2)
2649 ctx2 = scmutil.revsingle(repo, rev2)
2650 m = scmutil.match(ctx1, pats, opts)
2650 m = scmutil.match(ctx1, pats, opts)
2651 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2651 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2652 ui.write(b'%s -> %s\n' % (src, dst))
2652 ui.write(b'%s -> %s\n' % (src, dst))
2653
2653
2654
2654
2655 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2655 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2656 def debugpeer(ui, path):
2656 def debugpeer(ui, path):
2657 """establish a connection to a peer repository"""
2657 """establish a connection to a peer repository"""
2658 # Always enable peer request logging. Requires --debug to display
2658 # Always enable peer request logging. Requires --debug to display
2659 # though.
2659 # though.
2660 overrides = {
2660 overrides = {
2661 (b'devel', b'debug.peer-request'): True,
2661 (b'devel', b'debug.peer-request'): True,
2662 }
2662 }
2663
2663
2664 with ui.configoverride(overrides):
2664 with ui.configoverride(overrides):
2665 peer = hg.peer(ui, {}, path)
2665 peer = hg.peer(ui, {}, path)
2666
2666
2667 try:
2667 try:
2668 local = peer.local() is not None
2668 local = peer.local() is not None
2669 canpush = peer.canpush()
2669 canpush = peer.canpush()
2670
2670
2671 ui.write(_(b'url: %s\n') % peer.url())
2671 ui.write(_(b'url: %s\n') % peer.url())
2672 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2672 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2673 ui.write(
2673 ui.write(
2674 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2674 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2675 )
2675 )
2676 finally:
2676 finally:
2677 peer.close()
2677 peer.close()
2678
2678
2679
2679
2680 @command(
2680 @command(
2681 b'debugpickmergetool',
2681 b'debugpickmergetool',
2682 [
2682 [
2683 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2683 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2684 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2684 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2685 ]
2685 ]
2686 + cmdutil.walkopts
2686 + cmdutil.walkopts
2687 + cmdutil.mergetoolopts,
2687 + cmdutil.mergetoolopts,
2688 _(b'[PATTERN]...'),
2688 _(b'[PATTERN]...'),
2689 inferrepo=True,
2689 inferrepo=True,
2690 )
2690 )
2691 def debugpickmergetool(ui, repo, *pats, **opts):
2691 def debugpickmergetool(ui, repo, *pats, **opts):
2692 """examine which merge tool is chosen for specified file
2692 """examine which merge tool is chosen for specified file
2693
2693
2694 As described in :hg:`help merge-tools`, Mercurial examines
2694 As described in :hg:`help merge-tools`, Mercurial examines
2695 configurations below in this order to decide which merge tool is
2695 configurations below in this order to decide which merge tool is
2696 chosen for specified file.
2696 chosen for specified file.
2697
2697
2698 1. ``--tool`` option
2698 1. ``--tool`` option
2699 2. ``HGMERGE`` environment variable
2699 2. ``HGMERGE`` environment variable
2700 3. configurations in ``merge-patterns`` section
2700 3. configurations in ``merge-patterns`` section
2701 4. configuration of ``ui.merge``
2701 4. configuration of ``ui.merge``
2702 5. configurations in ``merge-tools`` section
2702 5. configurations in ``merge-tools`` section
2703 6. ``hgmerge`` tool (for historical reason only)
2703 6. ``hgmerge`` tool (for historical reason only)
2704 7. default tool for fallback (``:merge`` or ``:prompt``)
2704 7. default tool for fallback (``:merge`` or ``:prompt``)
2705
2705
2706 This command writes out examination result in the style below::
2706 This command writes out examination result in the style below::
2707
2707
2708 FILE = MERGETOOL
2708 FILE = MERGETOOL
2709
2709
2710 By default, all files known in the first parent context of the
2710 By default, all files known in the first parent context of the
2711 working directory are examined. Use file patterns and/or -I/-X
2711 working directory are examined. Use file patterns and/or -I/-X
2712 options to limit target files. -r/--rev is also useful to examine
2712 options to limit target files. -r/--rev is also useful to examine
2713 files in another context without actual updating to it.
2713 files in another context without actual updating to it.
2714
2714
2715 With --debug, this command shows warning messages while matching
2715 With --debug, this command shows warning messages while matching
2716 against ``merge-patterns`` and so on, too. It is recommended to
2716 against ``merge-patterns`` and so on, too. It is recommended to
2717 use this option with explicit file patterns and/or -I/-X options,
2717 use this option with explicit file patterns and/or -I/-X options,
2718 because this option increases amount of output per file according
2718 because this option increases amount of output per file according
2719 to configurations in hgrc.
2719 to configurations in hgrc.
2720
2720
2721 With -v/--verbose, this command shows configurations below at
2721 With -v/--verbose, this command shows configurations below at
2722 first (only if specified).
2722 first (only if specified).
2723
2723
2724 - ``--tool`` option
2724 - ``--tool`` option
2725 - ``HGMERGE`` environment variable
2725 - ``HGMERGE`` environment variable
2726 - configuration of ``ui.merge``
2726 - configuration of ``ui.merge``
2727
2727
2728 If merge tool is chosen before matching against
2728 If merge tool is chosen before matching against
2729 ``merge-patterns``, this command can't show any helpful
2729 ``merge-patterns``, this command can't show any helpful
2730 information, even with --debug. In such case, information above is
2730 information, even with --debug. In such case, information above is
2731 useful to know why a merge tool is chosen.
2731 useful to know why a merge tool is chosen.
2732 """
2732 """
2733 opts = pycompat.byteskwargs(opts)
2733 opts = pycompat.byteskwargs(opts)
2734 overrides = {}
2734 overrides = {}
2735 if opts[b'tool']:
2735 if opts[b'tool']:
2736 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2736 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2737 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2737 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2738
2738
2739 with ui.configoverride(overrides, b'debugmergepatterns'):
2739 with ui.configoverride(overrides, b'debugmergepatterns'):
2740 hgmerge = encoding.environ.get(b"HGMERGE")
2740 hgmerge = encoding.environ.get(b"HGMERGE")
2741 if hgmerge is not None:
2741 if hgmerge is not None:
2742 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2742 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2743 uimerge = ui.config(b"ui", b"merge")
2743 uimerge = ui.config(b"ui", b"merge")
2744 if uimerge:
2744 if uimerge:
2745 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2745 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2746
2746
2747 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2747 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2748 m = scmutil.match(ctx, pats, opts)
2748 m = scmutil.match(ctx, pats, opts)
2749 changedelete = opts[b'changedelete']
2749 changedelete = opts[b'changedelete']
2750 for path in ctx.walk(m):
2750 for path in ctx.walk(m):
2751 fctx = ctx[path]
2751 fctx = ctx[path]
2752 try:
2752 try:
2753 if not ui.debugflag:
2753 if not ui.debugflag:
2754 ui.pushbuffer(error=True)
2754 ui.pushbuffer(error=True)
2755 tool, toolpath = filemerge._picktool(
2755 tool, toolpath = filemerge._picktool(
2756 repo,
2756 repo,
2757 ui,
2757 ui,
2758 path,
2758 path,
2759 fctx.isbinary(),
2759 fctx.isbinary(),
2760 b'l' in fctx.flags(),
2760 b'l' in fctx.flags(),
2761 changedelete,
2761 changedelete,
2762 )
2762 )
2763 finally:
2763 finally:
2764 if not ui.debugflag:
2764 if not ui.debugflag:
2765 ui.popbuffer()
2765 ui.popbuffer()
2766 ui.write(b'%s = %s\n' % (path, tool))
2766 ui.write(b'%s = %s\n' % (path, tool))
2767
2767
2768
2768
2769 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2769 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2770 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2770 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2771 """access the pushkey key/value protocol
2771 """access the pushkey key/value protocol
2772
2772
2773 With two args, list the keys in the given namespace.
2773 With two args, list the keys in the given namespace.
2774
2774
2775 With five args, set a key to new if it currently is set to old.
2775 With five args, set a key to new if it currently is set to old.
2776 Reports success or failure.
2776 Reports success or failure.
2777 """
2777 """
2778
2778
2779 target = hg.peer(ui, {}, repopath)
2779 target = hg.peer(ui, {}, repopath)
2780 try:
2780 try:
2781 if keyinfo:
2781 if keyinfo:
2782 key, old, new = keyinfo
2782 key, old, new = keyinfo
2783 with target.commandexecutor() as e:
2783 with target.commandexecutor() as e:
2784 r = e.callcommand(
2784 r = e.callcommand(
2785 b'pushkey',
2785 b'pushkey',
2786 {
2786 {
2787 b'namespace': namespace,
2787 b'namespace': namespace,
2788 b'key': key,
2788 b'key': key,
2789 b'old': old,
2789 b'old': old,
2790 b'new': new,
2790 b'new': new,
2791 },
2791 },
2792 ).result()
2792 ).result()
2793
2793
2794 ui.status(pycompat.bytestr(r) + b'\n')
2794 ui.status(pycompat.bytestr(r) + b'\n')
2795 return not r
2795 return not r
2796 else:
2796 else:
2797 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2797 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2798 ui.write(
2798 ui.write(
2799 b"%s\t%s\n"
2799 b"%s\t%s\n"
2800 % (stringutil.escapestr(k), stringutil.escapestr(v))
2800 % (stringutil.escapestr(k), stringutil.escapestr(v))
2801 )
2801 )
2802 finally:
2802 finally:
2803 target.close()
2803 target.close()
2804
2804
2805
2805
2806 @command(b'debugpvec', [], _(b'A B'))
2806 @command(b'debugpvec', [], _(b'A B'))
2807 def debugpvec(ui, repo, a, b=None):
2807 def debugpvec(ui, repo, a, b=None):
2808 ca = scmutil.revsingle(repo, a)
2808 ca = scmutil.revsingle(repo, a)
2809 cb = scmutil.revsingle(repo, b)
2809 cb = scmutil.revsingle(repo, b)
2810 pa = pvec.ctxpvec(ca)
2810 pa = pvec.ctxpvec(ca)
2811 pb = pvec.ctxpvec(cb)
2811 pb = pvec.ctxpvec(cb)
2812 if pa == pb:
2812 if pa == pb:
2813 rel = b"="
2813 rel = b"="
2814 elif pa > pb:
2814 elif pa > pb:
2815 rel = b">"
2815 rel = b">"
2816 elif pa < pb:
2816 elif pa < pb:
2817 rel = b"<"
2817 rel = b"<"
2818 elif pa | pb:
2818 elif pa | pb:
2819 rel = b"|"
2819 rel = b"|"
2820 ui.write(_(b"a: %s\n") % pa)
2820 ui.write(_(b"a: %s\n") % pa)
2821 ui.write(_(b"b: %s\n") % pb)
2821 ui.write(_(b"b: %s\n") % pb)
2822 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2822 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2823 ui.write(
2823 ui.write(
2824 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2824 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2825 % (
2825 % (
2826 abs(pa._depth - pb._depth),
2826 abs(pa._depth - pb._depth),
2827 pvec._hamming(pa._vec, pb._vec),
2827 pvec._hamming(pa._vec, pb._vec),
2828 pa.distance(pb),
2828 pa.distance(pb),
2829 rel,
2829 rel,
2830 )
2830 )
2831 )
2831 )
2832
2832
2833
2833
2834 @command(
2834 @command(
2835 b'debugrebuilddirstate|debugrebuildstate',
2835 b'debugrebuilddirstate|debugrebuildstate',
2836 [
2836 [
2837 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2837 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2838 (
2838 (
2839 b'',
2839 b'',
2840 b'minimal',
2840 b'minimal',
2841 None,
2841 None,
2842 _(
2842 _(
2843 b'only rebuild files that are inconsistent with '
2843 b'only rebuild files that are inconsistent with '
2844 b'the working copy parent'
2844 b'the working copy parent'
2845 ),
2845 ),
2846 ),
2846 ),
2847 ],
2847 ],
2848 _(b'[-r REV]'),
2848 _(b'[-r REV]'),
2849 )
2849 )
2850 def debugrebuilddirstate(ui, repo, rev, **opts):
2850 def debugrebuilddirstate(ui, repo, rev, **opts):
2851 """rebuild the dirstate as it would look like for the given revision
2851 """rebuild the dirstate as it would look like for the given revision
2852
2852
2853 If no revision is specified the first current parent will be used.
2853 If no revision is specified the first current parent will be used.
2854
2854
2855 The dirstate will be set to the files of the given revision.
2855 The dirstate will be set to the files of the given revision.
2856 The actual working directory content or existing dirstate
2856 The actual working directory content or existing dirstate
2857 information such as adds or removes is not considered.
2857 information such as adds or removes is not considered.
2858
2858
2859 ``minimal`` will only rebuild the dirstate status for files that claim to be
2859 ``minimal`` will only rebuild the dirstate status for files that claim to be
2860 tracked but are not in the parent manifest, or that exist in the parent
2860 tracked but are not in the parent manifest, or that exist in the parent
2861 manifest but are not in the dirstate. It will not change adds, removes, or
2861 manifest but are not in the dirstate. It will not change adds, removes, or
2862 modified files that are in the working copy parent.
2862 modified files that are in the working copy parent.
2863
2863
2864 One use of this command is to make the next :hg:`status` invocation
2864 One use of this command is to make the next :hg:`status` invocation
2865 check the actual file content.
2865 check the actual file content.
2866 """
2866 """
2867 ctx = scmutil.revsingle(repo, rev)
2867 ctx = scmutil.revsingle(repo, rev)
2868 with repo.wlock():
2868 with repo.wlock():
2869 dirstate = repo.dirstate
2869 dirstate = repo.dirstate
2870 changedfiles = None
2870 changedfiles = None
2871 # See command doc for what minimal does.
2871 # See command doc for what minimal does.
2872 if opts.get('minimal'):
2872 if opts.get('minimal'):
2873 manifestfiles = set(ctx.manifest().keys())
2873 manifestfiles = set(ctx.manifest().keys())
2874 dirstatefiles = set(dirstate)
2874 dirstatefiles = set(dirstate)
2875 manifestonly = manifestfiles - dirstatefiles
2875 manifestonly = manifestfiles - dirstatefiles
2876 dsonly = dirstatefiles - manifestfiles
2876 dsonly = dirstatefiles - manifestfiles
2877 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2877 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2878 changedfiles = manifestonly | dsnotadded
2878 changedfiles = manifestonly | dsnotadded
2879
2879
2880 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2880 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2881
2881
2882
2882
2883 @command(b'debugrebuildfncache', [], b'')
2883 @command(b'debugrebuildfncache', [], b'')
2884 def debugrebuildfncache(ui, repo):
2884 def debugrebuildfncache(ui, repo):
2885 """rebuild the fncache file"""
2885 """rebuild the fncache file"""
2886 repair.rebuildfncache(ui, repo)
2886 repair.rebuildfncache(ui, repo)
2887
2887
2888
2888
2889 @command(
2889 @command(
2890 b'debugrename',
2890 b'debugrename',
2891 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2891 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2892 _(b'[-r REV] [FILE]...'),
2892 _(b'[-r REV] [FILE]...'),
2893 )
2893 )
2894 def debugrename(ui, repo, *pats, **opts):
2894 def debugrename(ui, repo, *pats, **opts):
2895 """dump rename information"""
2895 """dump rename information"""
2896
2896
2897 opts = pycompat.byteskwargs(opts)
2897 opts = pycompat.byteskwargs(opts)
2898 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2898 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2899 m = scmutil.match(ctx, pats, opts)
2899 m = scmutil.match(ctx, pats, opts)
2900 for abs in ctx.walk(m):
2900 for abs in ctx.walk(m):
2901 fctx = ctx[abs]
2901 fctx = ctx[abs]
2902 o = fctx.filelog().renamed(fctx.filenode())
2902 o = fctx.filelog().renamed(fctx.filenode())
2903 rel = repo.pathto(abs)
2903 rel = repo.pathto(abs)
2904 if o:
2904 if o:
2905 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2905 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2906 else:
2906 else:
2907 ui.write(_(b"%s not renamed\n") % rel)
2907 ui.write(_(b"%s not renamed\n") % rel)
2908
2908
2909
2909
2910 @command(b'debugrequires|debugrequirements', [], b'')
2910 @command(b'debugrequires|debugrequirements', [], b'')
2911 def debugrequirements(ui, repo):
2911 def debugrequirements(ui, repo):
2912 """ print the current repo requirements """
2912 """ print the current repo requirements """
2913 for r in sorted(repo.requirements):
2913 for r in sorted(repo.requirements):
2914 ui.write(b"%s\n" % r)
2914 ui.write(b"%s\n" % r)
2915
2915
2916
2916
2917 @command(
2917 @command(
2918 b'debugrevlog',
2918 b'debugrevlog',
2919 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2919 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2920 _(b'-c|-m|FILE'),
2920 _(b'-c|-m|FILE'),
2921 optionalrepo=True,
2921 optionalrepo=True,
2922 )
2922 )
2923 def debugrevlog(ui, repo, file_=None, **opts):
2923 def debugrevlog(ui, repo, file_=None, **opts):
2924 """show data and statistics about a revlog"""
2924 """show data and statistics about a revlog"""
2925 opts = pycompat.byteskwargs(opts)
2925 opts = pycompat.byteskwargs(opts)
2926 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2926 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2927
2927
2928 if opts.get(b"dump"):
2928 if opts.get(b"dump"):
2929 numrevs = len(r)
2929 numrevs = len(r)
2930 ui.write(
2930 ui.write(
2931 (
2931 (
2932 b"# rev p1rev p2rev start end deltastart base p1 p2"
2932 b"# rev p1rev p2rev start end deltastart base p1 p2"
2933 b" rawsize totalsize compression heads chainlen\n"
2933 b" rawsize totalsize compression heads chainlen\n"
2934 )
2934 )
2935 )
2935 )
2936 ts = 0
2936 ts = 0
2937 heads = set()
2937 heads = set()
2938
2938
2939 for rev in pycompat.xrange(numrevs):
2939 for rev in pycompat.xrange(numrevs):
2940 dbase = r.deltaparent(rev)
2940 dbase = r.deltaparent(rev)
2941 if dbase == -1:
2941 if dbase == -1:
2942 dbase = rev
2942 dbase = rev
2943 cbase = r.chainbase(rev)
2943 cbase = r.chainbase(rev)
2944 clen = r.chainlen(rev)
2944 clen = r.chainlen(rev)
2945 p1, p2 = r.parentrevs(rev)
2945 p1, p2 = r.parentrevs(rev)
2946 rs = r.rawsize(rev)
2946 rs = r.rawsize(rev)
2947 ts = ts + rs
2947 ts = ts + rs
2948 heads -= set(r.parentrevs(rev))
2948 heads -= set(r.parentrevs(rev))
2949 heads.add(rev)
2949 heads.add(rev)
2950 try:
2950 try:
2951 compression = ts / r.end(rev)
2951 compression = ts / r.end(rev)
2952 except ZeroDivisionError:
2952 except ZeroDivisionError:
2953 compression = 0
2953 compression = 0
2954 ui.write(
2954 ui.write(
2955 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2955 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2956 b"%11d %5d %8d\n"
2956 b"%11d %5d %8d\n"
2957 % (
2957 % (
2958 rev,
2958 rev,
2959 p1,
2959 p1,
2960 p2,
2960 p2,
2961 r.start(rev),
2961 r.start(rev),
2962 r.end(rev),
2962 r.end(rev),
2963 r.start(dbase),
2963 r.start(dbase),
2964 r.start(cbase),
2964 r.start(cbase),
2965 r.start(p1),
2965 r.start(p1),
2966 r.start(p2),
2966 r.start(p2),
2967 rs,
2967 rs,
2968 ts,
2968 ts,
2969 compression,
2969 compression,
2970 len(heads),
2970 len(heads),
2971 clen,
2971 clen,
2972 )
2972 )
2973 )
2973 )
2974 return 0
2974 return 0
2975
2975
2976 v = r.version
2976 v = r.version
2977 format = v & 0xFFFF
2977 format = v & 0xFFFF
2978 flags = []
2978 flags = []
2979 gdelta = False
2979 gdelta = False
2980 if v & revlog.FLAG_INLINE_DATA:
2980 if v & revlog.FLAG_INLINE_DATA:
2981 flags.append(b'inline')
2981 flags.append(b'inline')
2982 if v & revlog.FLAG_GENERALDELTA:
2982 if v & revlog.FLAG_GENERALDELTA:
2983 gdelta = True
2983 gdelta = True
2984 flags.append(b'generaldelta')
2984 flags.append(b'generaldelta')
2985 if not flags:
2985 if not flags:
2986 flags = [b'(none)']
2986 flags = [b'(none)']
2987
2987
2988 ### tracks merge vs single parent
2988 ### tracks merge vs single parent
2989 nummerges = 0
2989 nummerges = 0
2990
2990
2991 ### tracks ways the "delta" are build
2991 ### tracks ways the "delta" are build
2992 # nodelta
2992 # nodelta
2993 numempty = 0
2993 numempty = 0
2994 numemptytext = 0
2994 numemptytext = 0
2995 numemptydelta = 0
2995 numemptydelta = 0
2996 # full file content
2996 # full file content
2997 numfull = 0
2997 numfull = 0
2998 # intermediate snapshot against a prior snapshot
2998 # intermediate snapshot against a prior snapshot
2999 numsemi = 0
2999 numsemi = 0
3000 # snapshot count per depth
3000 # snapshot count per depth
3001 numsnapdepth = collections.defaultdict(lambda: 0)
3001 numsnapdepth = collections.defaultdict(lambda: 0)
3002 # delta against previous revision
3002 # delta against previous revision
3003 numprev = 0
3003 numprev = 0
3004 # delta against first or second parent (not prev)
3004 # delta against first or second parent (not prev)
3005 nump1 = 0
3005 nump1 = 0
3006 nump2 = 0
3006 nump2 = 0
3007 # delta against neither prev nor parents
3007 # delta against neither prev nor parents
3008 numother = 0
3008 numother = 0
3009 # delta against prev that are also first or second parent
3009 # delta against prev that are also first or second parent
3010 # (details of `numprev`)
3010 # (details of `numprev`)
3011 nump1prev = 0
3011 nump1prev = 0
3012 nump2prev = 0
3012 nump2prev = 0
3013
3013
3014 # data about delta chain of each revs
3014 # data about delta chain of each revs
3015 chainlengths = []
3015 chainlengths = []
3016 chainbases = []
3016 chainbases = []
3017 chainspans = []
3017 chainspans = []
3018
3018
3019 # data about each revision
3019 # data about each revision
3020 datasize = [None, 0, 0]
3020 datasize = [None, 0, 0]
3021 fullsize = [None, 0, 0]
3021 fullsize = [None, 0, 0]
3022 semisize = [None, 0, 0]
3022 semisize = [None, 0, 0]
3023 # snapshot count per depth
3023 # snapshot count per depth
3024 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3024 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3025 deltasize = [None, 0, 0]
3025 deltasize = [None, 0, 0]
3026 chunktypecounts = {}
3026 chunktypecounts = {}
3027 chunktypesizes = {}
3027 chunktypesizes = {}
3028
3028
3029 def addsize(size, l):
3029 def addsize(size, l):
3030 if l[0] is None or size < l[0]:
3030 if l[0] is None or size < l[0]:
3031 l[0] = size
3031 l[0] = size
3032 if size > l[1]:
3032 if size > l[1]:
3033 l[1] = size
3033 l[1] = size
3034 l[2] += size
3034 l[2] += size
3035
3035
3036 numrevs = len(r)
3036 numrevs = len(r)
3037 for rev in pycompat.xrange(numrevs):
3037 for rev in pycompat.xrange(numrevs):
3038 p1, p2 = r.parentrevs(rev)
3038 p1, p2 = r.parentrevs(rev)
3039 delta = r.deltaparent(rev)
3039 delta = r.deltaparent(rev)
3040 if format > 0:
3040 if format > 0:
3041 addsize(r.rawsize(rev), datasize)
3041 addsize(r.rawsize(rev), datasize)
3042 if p2 != nullrev:
3042 if p2 != nullrev:
3043 nummerges += 1
3043 nummerges += 1
3044 size = r.length(rev)
3044 size = r.length(rev)
3045 if delta == nullrev:
3045 if delta == nullrev:
3046 chainlengths.append(0)
3046 chainlengths.append(0)
3047 chainbases.append(r.start(rev))
3047 chainbases.append(r.start(rev))
3048 chainspans.append(size)
3048 chainspans.append(size)
3049 if size == 0:
3049 if size == 0:
3050 numempty += 1
3050 numempty += 1
3051 numemptytext += 1
3051 numemptytext += 1
3052 else:
3052 else:
3053 numfull += 1
3053 numfull += 1
3054 numsnapdepth[0] += 1
3054 numsnapdepth[0] += 1
3055 addsize(size, fullsize)
3055 addsize(size, fullsize)
3056 addsize(size, snapsizedepth[0])
3056 addsize(size, snapsizedepth[0])
3057 else:
3057 else:
3058 chainlengths.append(chainlengths[delta] + 1)
3058 chainlengths.append(chainlengths[delta] + 1)
3059 baseaddr = chainbases[delta]
3059 baseaddr = chainbases[delta]
3060 revaddr = r.start(rev)
3060 revaddr = r.start(rev)
3061 chainbases.append(baseaddr)
3061 chainbases.append(baseaddr)
3062 chainspans.append((revaddr - baseaddr) + size)
3062 chainspans.append((revaddr - baseaddr) + size)
3063 if size == 0:
3063 if size == 0:
3064 numempty += 1
3064 numempty += 1
3065 numemptydelta += 1
3065 numemptydelta += 1
3066 elif r.issnapshot(rev):
3066 elif r.issnapshot(rev):
3067 addsize(size, semisize)
3067 addsize(size, semisize)
3068 numsemi += 1
3068 numsemi += 1
3069 depth = r.snapshotdepth(rev)
3069 depth = r.snapshotdepth(rev)
3070 numsnapdepth[depth] += 1
3070 numsnapdepth[depth] += 1
3071 addsize(size, snapsizedepth[depth])
3071 addsize(size, snapsizedepth[depth])
3072 else:
3072 else:
3073 addsize(size, deltasize)
3073 addsize(size, deltasize)
3074 if delta == rev - 1:
3074 if delta == rev - 1:
3075 numprev += 1
3075 numprev += 1
3076 if delta == p1:
3076 if delta == p1:
3077 nump1prev += 1
3077 nump1prev += 1
3078 elif delta == p2:
3078 elif delta == p2:
3079 nump2prev += 1
3079 nump2prev += 1
3080 elif delta == p1:
3080 elif delta == p1:
3081 nump1 += 1
3081 nump1 += 1
3082 elif delta == p2:
3082 elif delta == p2:
3083 nump2 += 1
3083 nump2 += 1
3084 elif delta != nullrev:
3084 elif delta != nullrev:
3085 numother += 1
3085 numother += 1
3086
3086
3087 # Obtain data on the raw chunks in the revlog.
3087 # Obtain data on the raw chunks in the revlog.
3088 if util.safehasattr(r, b'_getsegmentforrevs'):
3088 if util.safehasattr(r, b'_getsegmentforrevs'):
3089 segment = r._getsegmentforrevs(rev, rev)[1]
3089 segment = r._getsegmentforrevs(rev, rev)[1]
3090 else:
3090 else:
3091 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3091 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3092 if segment:
3092 if segment:
3093 chunktype = bytes(segment[0:1])
3093 chunktype = bytes(segment[0:1])
3094 else:
3094 else:
3095 chunktype = b'empty'
3095 chunktype = b'empty'
3096
3096
3097 if chunktype not in chunktypecounts:
3097 if chunktype not in chunktypecounts:
3098 chunktypecounts[chunktype] = 0
3098 chunktypecounts[chunktype] = 0
3099 chunktypesizes[chunktype] = 0
3099 chunktypesizes[chunktype] = 0
3100
3100
3101 chunktypecounts[chunktype] += 1
3101 chunktypecounts[chunktype] += 1
3102 chunktypesizes[chunktype] += size
3102 chunktypesizes[chunktype] += size
3103
3103
3104 # Adjust size min value for empty cases
3104 # Adjust size min value for empty cases
3105 for size in (datasize, fullsize, semisize, deltasize):
3105 for size in (datasize, fullsize, semisize, deltasize):
3106 if size[0] is None:
3106 if size[0] is None:
3107 size[0] = 0
3107 size[0] = 0
3108
3108
3109 numdeltas = numrevs - numfull - numempty - numsemi
3109 numdeltas = numrevs - numfull - numempty - numsemi
3110 numoprev = numprev - nump1prev - nump2prev
3110 numoprev = numprev - nump1prev - nump2prev
3111 totalrawsize = datasize[2]
3111 totalrawsize = datasize[2]
3112 datasize[2] /= numrevs
3112 datasize[2] /= numrevs
3113 fulltotal = fullsize[2]
3113 fulltotal = fullsize[2]
3114 if numfull == 0:
3114 if numfull == 0:
3115 fullsize[2] = 0
3115 fullsize[2] = 0
3116 else:
3116 else:
3117 fullsize[2] /= numfull
3117 fullsize[2] /= numfull
3118 semitotal = semisize[2]
3118 semitotal = semisize[2]
3119 snaptotal = {}
3119 snaptotal = {}
3120 if numsemi > 0:
3120 if numsemi > 0:
3121 semisize[2] /= numsemi
3121 semisize[2] /= numsemi
3122 for depth in snapsizedepth:
3122 for depth in snapsizedepth:
3123 snaptotal[depth] = snapsizedepth[depth][2]
3123 snaptotal[depth] = snapsizedepth[depth][2]
3124 snapsizedepth[depth][2] /= numsnapdepth[depth]
3124 snapsizedepth[depth][2] /= numsnapdepth[depth]
3125
3125
3126 deltatotal = deltasize[2]
3126 deltatotal = deltasize[2]
3127 if numdeltas > 0:
3127 if numdeltas > 0:
3128 deltasize[2] /= numdeltas
3128 deltasize[2] /= numdeltas
3129 totalsize = fulltotal + semitotal + deltatotal
3129 totalsize = fulltotal + semitotal + deltatotal
3130 avgchainlen = sum(chainlengths) / numrevs
3130 avgchainlen = sum(chainlengths) / numrevs
3131 maxchainlen = max(chainlengths)
3131 maxchainlen = max(chainlengths)
3132 maxchainspan = max(chainspans)
3132 maxchainspan = max(chainspans)
3133 compratio = 1
3133 compratio = 1
3134 if totalsize:
3134 if totalsize:
3135 compratio = totalrawsize / totalsize
3135 compratio = totalrawsize / totalsize
3136
3136
3137 basedfmtstr = b'%%%dd\n'
3137 basedfmtstr = b'%%%dd\n'
3138 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3138 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3139
3139
3140 def dfmtstr(max):
3140 def dfmtstr(max):
3141 return basedfmtstr % len(str(max))
3141 return basedfmtstr % len(str(max))
3142
3142
3143 def pcfmtstr(max, padding=0):
3143 def pcfmtstr(max, padding=0):
3144 return basepcfmtstr % (len(str(max)), b' ' * padding)
3144 return basepcfmtstr % (len(str(max)), b' ' * padding)
3145
3145
3146 def pcfmt(value, total):
3146 def pcfmt(value, total):
3147 if total:
3147 if total:
3148 return (value, 100 * float(value) / total)
3148 return (value, 100 * float(value) / total)
3149 else:
3149 else:
3150 return value, 100.0
3150 return value, 100.0
3151
3151
3152 ui.writenoi18n(b'format : %d\n' % format)
3152 ui.writenoi18n(b'format : %d\n' % format)
3153 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3153 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3154
3154
3155 ui.write(b'\n')
3155 ui.write(b'\n')
3156 fmt = pcfmtstr(totalsize)
3156 fmt = pcfmtstr(totalsize)
3157 fmt2 = dfmtstr(totalsize)
3157 fmt2 = dfmtstr(totalsize)
3158 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3158 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3159 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3159 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3160 ui.writenoi18n(
3160 ui.writenoi18n(
3161 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3161 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3162 )
3162 )
3163 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3163 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3164 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3164 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3165 ui.writenoi18n(
3165 ui.writenoi18n(
3166 b' text : '
3166 b' text : '
3167 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3167 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3168 )
3168 )
3169 ui.writenoi18n(
3169 ui.writenoi18n(
3170 b' delta : '
3170 b' delta : '
3171 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3171 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3172 )
3172 )
3173 ui.writenoi18n(
3173 ui.writenoi18n(
3174 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3174 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3175 )
3175 )
3176 for depth in sorted(numsnapdepth):
3176 for depth in sorted(numsnapdepth):
3177 ui.write(
3177 ui.write(
3178 (b' lvl-%-3d : ' % depth)
3178 (b' lvl-%-3d : ' % depth)
3179 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3179 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3180 )
3180 )
3181 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3181 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3182 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3182 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3183 ui.writenoi18n(
3183 ui.writenoi18n(
3184 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3184 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3185 )
3185 )
3186 for depth in sorted(numsnapdepth):
3186 for depth in sorted(numsnapdepth):
3187 ui.write(
3187 ui.write(
3188 (b' lvl-%-3d : ' % depth)
3188 (b' lvl-%-3d : ' % depth)
3189 + fmt % pcfmt(snaptotal[depth], totalsize)
3189 + fmt % pcfmt(snaptotal[depth], totalsize)
3190 )
3190 )
3191 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3191 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3192
3192
3193 def fmtchunktype(chunktype):
3193 def fmtchunktype(chunktype):
3194 if chunktype == b'empty':
3194 if chunktype == b'empty':
3195 return b' %s : ' % chunktype
3195 return b' %s : ' % chunktype
3196 elif chunktype in pycompat.bytestr(string.ascii_letters):
3196 elif chunktype in pycompat.bytestr(string.ascii_letters):
3197 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3197 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3198 else:
3198 else:
3199 return b' 0x%s : ' % hex(chunktype)
3199 return b' 0x%s : ' % hex(chunktype)
3200
3200
3201 ui.write(b'\n')
3201 ui.write(b'\n')
3202 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3202 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3203 for chunktype in sorted(chunktypecounts):
3203 for chunktype in sorted(chunktypecounts):
3204 ui.write(fmtchunktype(chunktype))
3204 ui.write(fmtchunktype(chunktype))
3205 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3205 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3206 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3206 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3207 for chunktype in sorted(chunktypecounts):
3207 for chunktype in sorted(chunktypecounts):
3208 ui.write(fmtchunktype(chunktype))
3208 ui.write(fmtchunktype(chunktype))
3209 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3209 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3210
3210
3211 ui.write(b'\n')
3211 ui.write(b'\n')
3212 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3212 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3213 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3213 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3214 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3214 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3215 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3215 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3216 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3216 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3217
3217
3218 if format > 0:
3218 if format > 0:
3219 ui.write(b'\n')
3219 ui.write(b'\n')
3220 ui.writenoi18n(
3220 ui.writenoi18n(
3221 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3221 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3222 % tuple(datasize)
3222 % tuple(datasize)
3223 )
3223 )
3224 ui.writenoi18n(
3224 ui.writenoi18n(
3225 b'full revision size (min/max/avg) : %d / %d / %d\n'
3225 b'full revision size (min/max/avg) : %d / %d / %d\n'
3226 % tuple(fullsize)
3226 % tuple(fullsize)
3227 )
3227 )
3228 ui.writenoi18n(
3228 ui.writenoi18n(
3229 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3229 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3230 % tuple(semisize)
3230 % tuple(semisize)
3231 )
3231 )
3232 for depth in sorted(snapsizedepth):
3232 for depth in sorted(snapsizedepth):
3233 if depth == 0:
3233 if depth == 0:
3234 continue
3234 continue
3235 ui.writenoi18n(
3235 ui.writenoi18n(
3236 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3236 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3237 % ((depth,) + tuple(snapsizedepth[depth]))
3237 % ((depth,) + tuple(snapsizedepth[depth]))
3238 )
3238 )
3239 ui.writenoi18n(
3239 ui.writenoi18n(
3240 b'delta size (min/max/avg) : %d / %d / %d\n'
3240 b'delta size (min/max/avg) : %d / %d / %d\n'
3241 % tuple(deltasize)
3241 % tuple(deltasize)
3242 )
3242 )
3243
3243
3244 if numdeltas > 0:
3244 if numdeltas > 0:
3245 ui.write(b'\n')
3245 ui.write(b'\n')
3246 fmt = pcfmtstr(numdeltas)
3246 fmt = pcfmtstr(numdeltas)
3247 fmt2 = pcfmtstr(numdeltas, 4)
3247 fmt2 = pcfmtstr(numdeltas, 4)
3248 ui.writenoi18n(
3248 ui.writenoi18n(
3249 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3249 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3250 )
3250 )
3251 if numprev > 0:
3251 if numprev > 0:
3252 ui.writenoi18n(
3252 ui.writenoi18n(
3253 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3253 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3254 )
3254 )
3255 ui.writenoi18n(
3255 ui.writenoi18n(
3256 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3256 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3257 )
3257 )
3258 ui.writenoi18n(
3258 ui.writenoi18n(
3259 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3259 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3260 )
3260 )
3261 if gdelta:
3261 if gdelta:
3262 ui.writenoi18n(
3262 ui.writenoi18n(
3263 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3263 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3264 )
3264 )
3265 ui.writenoi18n(
3265 ui.writenoi18n(
3266 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3266 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3267 )
3267 )
3268 ui.writenoi18n(
3268 ui.writenoi18n(
3269 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3269 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3270 )
3270 )
3271
3271
3272
3272
3273 @command(
3273 @command(
3274 b'debugrevlogindex',
3274 b'debugrevlogindex',
3275 cmdutil.debugrevlogopts
3275 cmdutil.debugrevlogopts
3276 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3276 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3277 _(b'[-f FORMAT] -c|-m|FILE'),
3277 _(b'[-f FORMAT] -c|-m|FILE'),
3278 optionalrepo=True,
3278 optionalrepo=True,
3279 )
3279 )
3280 def debugrevlogindex(ui, repo, file_=None, **opts):
3280 def debugrevlogindex(ui, repo, file_=None, **opts):
3281 """dump the contents of a revlog index"""
3281 """dump the contents of a revlog index"""
3282 opts = pycompat.byteskwargs(opts)
3282 opts = pycompat.byteskwargs(opts)
3283 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3283 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3284 format = opts.get(b'format', 0)
3284 format = opts.get(b'format', 0)
3285 if format not in (0, 1):
3285 if format not in (0, 1):
3286 raise error.Abort(_(b"unknown format %d") % format)
3286 raise error.Abort(_(b"unknown format %d") % format)
3287
3287
3288 if ui.debugflag:
3288 if ui.debugflag:
3289 shortfn = hex
3289 shortfn = hex
3290 else:
3290 else:
3291 shortfn = short
3291 shortfn = short
3292
3292
3293 # There might not be anything in r, so have a sane default
3293 # There might not be anything in r, so have a sane default
3294 idlen = 12
3294 idlen = 12
3295 for i in r:
3295 for i in r:
3296 idlen = len(shortfn(r.node(i)))
3296 idlen = len(shortfn(r.node(i)))
3297 break
3297 break
3298
3298
3299 if format == 0:
3299 if format == 0:
3300 if ui.verbose:
3300 if ui.verbose:
3301 ui.writenoi18n(
3301 ui.writenoi18n(
3302 b" rev offset length linkrev %s %s p2\n"
3302 b" rev offset length linkrev %s %s p2\n"
3303 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3303 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3304 )
3304 )
3305 else:
3305 else:
3306 ui.writenoi18n(
3306 ui.writenoi18n(
3307 b" rev linkrev %s %s p2\n"
3307 b" rev linkrev %s %s p2\n"
3308 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3308 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3309 )
3309 )
3310 elif format == 1:
3310 elif format == 1:
3311 if ui.verbose:
3311 if ui.verbose:
3312 ui.writenoi18n(
3312 ui.writenoi18n(
3313 (
3313 (
3314 b" rev flag offset length size link p1"
3314 b" rev flag offset length size link p1"
3315 b" p2 %s\n"
3315 b" p2 %s\n"
3316 )
3316 )
3317 % b"nodeid".rjust(idlen)
3317 % b"nodeid".rjust(idlen)
3318 )
3318 )
3319 else:
3319 else:
3320 ui.writenoi18n(
3320 ui.writenoi18n(
3321 b" rev flag size link p1 p2 %s\n"
3321 b" rev flag size link p1 p2 %s\n"
3322 % b"nodeid".rjust(idlen)
3322 % b"nodeid".rjust(idlen)
3323 )
3323 )
3324
3324
3325 for i in r:
3325 for i in r:
3326 node = r.node(i)
3326 node = r.node(i)
3327 if format == 0:
3327 if format == 0:
3328 try:
3328 try:
3329 pp = r.parents(node)
3329 pp = r.parents(node)
3330 except Exception:
3330 except Exception:
3331 pp = [nullid, nullid]
3331 pp = [nullid, nullid]
3332 if ui.verbose:
3332 if ui.verbose:
3333 ui.write(
3333 ui.write(
3334 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3334 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3335 % (
3335 % (
3336 i,
3336 i,
3337 r.start(i),
3337 r.start(i),
3338 r.length(i),
3338 r.length(i),
3339 r.linkrev(i),
3339 r.linkrev(i),
3340 shortfn(node),
3340 shortfn(node),
3341 shortfn(pp[0]),
3341 shortfn(pp[0]),
3342 shortfn(pp[1]),
3342 shortfn(pp[1]),
3343 )
3343 )
3344 )
3344 )
3345 else:
3345 else:
3346 ui.write(
3346 ui.write(
3347 b"% 6d % 7d %s %s %s\n"
3347 b"% 6d % 7d %s %s %s\n"
3348 % (
3348 % (
3349 i,
3349 i,
3350 r.linkrev(i),
3350 r.linkrev(i),
3351 shortfn(node),
3351 shortfn(node),
3352 shortfn(pp[0]),
3352 shortfn(pp[0]),
3353 shortfn(pp[1]),
3353 shortfn(pp[1]),
3354 )
3354 )
3355 )
3355 )
3356 elif format == 1:
3356 elif format == 1:
3357 pr = r.parentrevs(i)
3357 pr = r.parentrevs(i)
3358 if ui.verbose:
3358 if ui.verbose:
3359 ui.write(
3359 ui.write(
3360 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3360 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3361 % (
3361 % (
3362 i,
3362 i,
3363 r.flags(i),
3363 r.flags(i),
3364 r.start(i),
3364 r.start(i),
3365 r.length(i),
3365 r.length(i),
3366 r.rawsize(i),
3366 r.rawsize(i),
3367 r.linkrev(i),
3367 r.linkrev(i),
3368 pr[0],
3368 pr[0],
3369 pr[1],
3369 pr[1],
3370 shortfn(node),
3370 shortfn(node),
3371 )
3371 )
3372 )
3372 )
3373 else:
3373 else:
3374 ui.write(
3374 ui.write(
3375 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3375 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3376 % (
3376 % (
3377 i,
3377 i,
3378 r.flags(i),
3378 r.flags(i),
3379 r.rawsize(i),
3379 r.rawsize(i),
3380 r.linkrev(i),
3380 r.linkrev(i),
3381 pr[0],
3381 pr[0],
3382 pr[1],
3382 pr[1],
3383 shortfn(node),
3383 shortfn(node),
3384 )
3384 )
3385 )
3385 )
3386
3386
3387
3387
3388 @command(
3388 @command(
3389 b'debugrevspec',
3389 b'debugrevspec',
3390 [
3390 [
3391 (
3391 (
3392 b'',
3392 b'',
3393 b'optimize',
3393 b'optimize',
3394 None,
3394 None,
3395 _(b'print parsed tree after optimizing (DEPRECATED)'),
3395 _(b'print parsed tree after optimizing (DEPRECATED)'),
3396 ),
3396 ),
3397 (
3397 (
3398 b'',
3398 b'',
3399 b'show-revs',
3399 b'show-revs',
3400 True,
3400 True,
3401 _(b'print list of result revisions (default)'),
3401 _(b'print list of result revisions (default)'),
3402 ),
3402 ),
3403 (
3403 (
3404 b's',
3404 b's',
3405 b'show-set',
3405 b'show-set',
3406 None,
3406 None,
3407 _(b'print internal representation of result set'),
3407 _(b'print internal representation of result set'),
3408 ),
3408 ),
3409 (
3409 (
3410 b'p',
3410 b'p',
3411 b'show-stage',
3411 b'show-stage',
3412 [],
3412 [],
3413 _(b'print parsed tree at the given stage'),
3413 _(b'print parsed tree at the given stage'),
3414 _(b'NAME'),
3414 _(b'NAME'),
3415 ),
3415 ),
3416 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3416 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3417 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3417 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3418 ],
3418 ],
3419 b'REVSPEC',
3419 b'REVSPEC',
3420 )
3420 )
3421 def debugrevspec(ui, repo, expr, **opts):
3421 def debugrevspec(ui, repo, expr, **opts):
3422 """parse and apply a revision specification
3422 """parse and apply a revision specification
3423
3423
3424 Use -p/--show-stage option to print the parsed tree at the given stages.
3424 Use -p/--show-stage option to print the parsed tree at the given stages.
3425 Use -p all to print tree at every stage.
3425 Use -p all to print tree at every stage.
3426
3426
3427 Use --no-show-revs option with -s or -p to print only the set
3427 Use --no-show-revs option with -s or -p to print only the set
3428 representation or the parsed tree respectively.
3428 representation or the parsed tree respectively.
3429
3429
3430 Use --verify-optimized to compare the optimized result with the unoptimized
3430 Use --verify-optimized to compare the optimized result with the unoptimized
3431 one. Returns 1 if the optimized result differs.
3431 one. Returns 1 if the optimized result differs.
3432 """
3432 """
3433 opts = pycompat.byteskwargs(opts)
3433 opts = pycompat.byteskwargs(opts)
3434 aliases = ui.configitems(b'revsetalias')
3434 aliases = ui.configitems(b'revsetalias')
3435 stages = [
3435 stages = [
3436 (b'parsed', lambda tree: tree),
3436 (b'parsed', lambda tree: tree),
3437 (
3437 (
3438 b'expanded',
3438 b'expanded',
3439 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3439 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3440 ),
3440 ),
3441 (b'concatenated', revsetlang.foldconcat),
3441 (b'concatenated', revsetlang.foldconcat),
3442 (b'analyzed', revsetlang.analyze),
3442 (b'analyzed', revsetlang.analyze),
3443 (b'optimized', revsetlang.optimize),
3443 (b'optimized', revsetlang.optimize),
3444 ]
3444 ]
3445 if opts[b'no_optimized']:
3445 if opts[b'no_optimized']:
3446 stages = stages[:-1]
3446 stages = stages[:-1]
3447 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3447 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3448 raise error.Abort(
3448 raise error.Abort(
3449 _(b'cannot use --verify-optimized with --no-optimized')
3449 _(b'cannot use --verify-optimized with --no-optimized')
3450 )
3450 )
3451 stagenames = {n for n, f in stages}
3451 stagenames = {n for n, f in stages}
3452
3452
3453 showalways = set()
3453 showalways = set()
3454 showchanged = set()
3454 showchanged = set()
3455 if ui.verbose and not opts[b'show_stage']:
3455 if ui.verbose and not opts[b'show_stage']:
3456 # show parsed tree by --verbose (deprecated)
3456 # show parsed tree by --verbose (deprecated)
3457 showalways.add(b'parsed')
3457 showalways.add(b'parsed')
3458 showchanged.update([b'expanded', b'concatenated'])
3458 showchanged.update([b'expanded', b'concatenated'])
3459 if opts[b'optimize']:
3459 if opts[b'optimize']:
3460 showalways.add(b'optimized')
3460 showalways.add(b'optimized')
3461 if opts[b'show_stage'] and opts[b'optimize']:
3461 if opts[b'show_stage'] and opts[b'optimize']:
3462 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3462 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3463 if opts[b'show_stage'] == [b'all']:
3463 if opts[b'show_stage'] == [b'all']:
3464 showalways.update(stagenames)
3464 showalways.update(stagenames)
3465 else:
3465 else:
3466 for n in opts[b'show_stage']:
3466 for n in opts[b'show_stage']:
3467 if n not in stagenames:
3467 if n not in stagenames:
3468 raise error.Abort(_(b'invalid stage name: %s') % n)
3468 raise error.Abort(_(b'invalid stage name: %s') % n)
3469 showalways.update(opts[b'show_stage'])
3469 showalways.update(opts[b'show_stage'])
3470
3470
3471 treebystage = {}
3471 treebystage = {}
3472 printedtree = None
3472 printedtree = None
3473 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3473 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3474 for n, f in stages:
3474 for n, f in stages:
3475 treebystage[n] = tree = f(tree)
3475 treebystage[n] = tree = f(tree)
3476 if n in showalways or (n in showchanged and tree != printedtree):
3476 if n in showalways or (n in showchanged and tree != printedtree):
3477 if opts[b'show_stage'] or n != b'parsed':
3477 if opts[b'show_stage'] or n != b'parsed':
3478 ui.write(b"* %s:\n" % n)
3478 ui.write(b"* %s:\n" % n)
3479 ui.write(revsetlang.prettyformat(tree), b"\n")
3479 ui.write(revsetlang.prettyformat(tree), b"\n")
3480 printedtree = tree
3480 printedtree = tree
3481
3481
3482 if opts[b'verify_optimized']:
3482 if opts[b'verify_optimized']:
3483 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3483 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3484 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3484 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3485 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3485 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3486 ui.writenoi18n(
3486 ui.writenoi18n(
3487 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3487 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3488 )
3488 )
3489 ui.writenoi18n(
3489 ui.writenoi18n(
3490 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3490 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3491 )
3491 )
3492 arevs = list(arevs)
3492 arevs = list(arevs)
3493 brevs = list(brevs)
3493 brevs = list(brevs)
3494 if arevs == brevs:
3494 if arevs == brevs:
3495 return 0
3495 return 0
3496 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3496 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3497 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3497 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3498 sm = difflib.SequenceMatcher(None, arevs, brevs)
3498 sm = difflib.SequenceMatcher(None, arevs, brevs)
3499 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3499 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3500 if tag in ('delete', 'replace'):
3500 if tag in ('delete', 'replace'):
3501 for c in arevs[alo:ahi]:
3501 for c in arevs[alo:ahi]:
3502 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3502 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3503 if tag in ('insert', 'replace'):
3503 if tag in ('insert', 'replace'):
3504 for c in brevs[blo:bhi]:
3504 for c in brevs[blo:bhi]:
3505 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3505 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3506 if tag == 'equal':
3506 if tag == 'equal':
3507 for c in arevs[alo:ahi]:
3507 for c in arevs[alo:ahi]:
3508 ui.write(b' %d\n' % c)
3508 ui.write(b' %d\n' % c)
3509 return 1
3509 return 1
3510
3510
3511 func = revset.makematcher(tree)
3511 func = revset.makematcher(tree)
3512 revs = func(repo)
3512 revs = func(repo)
3513 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3513 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3514 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3514 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3515 if not opts[b'show_revs']:
3515 if not opts[b'show_revs']:
3516 return
3516 return
3517 for c in revs:
3517 for c in revs:
3518 ui.write(b"%d\n" % c)
3518 ui.write(b"%d\n" % c)
3519
3519
3520
3520
3521 @command(
3521 @command(
3522 b'debugserve',
3522 b'debugserve',
3523 [
3523 [
3524 (
3524 (
3525 b'',
3525 b'',
3526 b'sshstdio',
3526 b'sshstdio',
3527 False,
3527 False,
3528 _(b'run an SSH server bound to process handles'),
3528 _(b'run an SSH server bound to process handles'),
3529 ),
3529 ),
3530 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3530 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3531 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3531 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3532 ],
3532 ],
3533 b'',
3533 b'',
3534 )
3534 )
3535 def debugserve(ui, repo, **opts):
3535 def debugserve(ui, repo, **opts):
3536 """run a server with advanced settings
3536 """run a server with advanced settings
3537
3537
3538 This command is similar to :hg:`serve`. It exists partially as a
3538 This command is similar to :hg:`serve`. It exists partially as a
3539 workaround to the fact that ``hg serve --stdio`` must have specific
3539 workaround to the fact that ``hg serve --stdio`` must have specific
3540 arguments for security reasons.
3540 arguments for security reasons.
3541 """
3541 """
3542 opts = pycompat.byteskwargs(opts)
3542 opts = pycompat.byteskwargs(opts)
3543
3543
3544 if not opts[b'sshstdio']:
3544 if not opts[b'sshstdio']:
3545 raise error.Abort(_(b'only --sshstdio is currently supported'))
3545 raise error.Abort(_(b'only --sshstdio is currently supported'))
3546
3546
3547 logfh = None
3547 logfh = None
3548
3548
3549 if opts[b'logiofd'] and opts[b'logiofile']:
3549 if opts[b'logiofd'] and opts[b'logiofile']:
3550 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3550 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3551
3551
3552 if opts[b'logiofd']:
3552 if opts[b'logiofd']:
3553 # Ideally we would be line buffered. But line buffering in binary
3553 # Ideally we would be line buffered. But line buffering in binary
3554 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3554 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3555 # buffering could have performance impacts. But since this isn't
3555 # buffering could have performance impacts. But since this isn't
3556 # performance critical code, it should be fine.
3556 # performance critical code, it should be fine.
3557 try:
3557 try:
3558 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3558 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3559 except OSError as e:
3559 except OSError as e:
3560 if e.errno != errno.ESPIPE:
3560 if e.errno != errno.ESPIPE:
3561 raise
3561 raise
3562 # can't seek a pipe, so `ab` mode fails on py3
3562 # can't seek a pipe, so `ab` mode fails on py3
3563 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3563 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3564 elif opts[b'logiofile']:
3564 elif opts[b'logiofile']:
3565 logfh = open(opts[b'logiofile'], b'ab', 0)
3565 logfh = open(opts[b'logiofile'], b'ab', 0)
3566
3566
3567 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3567 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3568 s.serve_forever()
3568 s.serve_forever()
3569
3569
3570
3570
3571 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3571 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3572 def debugsetparents(ui, repo, rev1, rev2=None):
3572 def debugsetparents(ui, repo, rev1, rev2=None):
3573 """manually set the parents of the current working directory (DANGEROUS)
3573 """manually set the parents of the current working directory (DANGEROUS)
3574
3574
3575 This command is not what you are looking for and should not be used. Using
3575 This command is not what you are looking for and should not be used. Using
3576 this command will most certainly results in slight corruption of the file
3576 this command will most certainly results in slight corruption of the file
3577 level histories withing your repository. DO NOT USE THIS COMMAND.
3577 level histories withing your repository. DO NOT USE THIS COMMAND.
3578
3578
3579 The command update the p1 and p2 field in the dirstate, and not touching
3579 The command update the p1 and p2 field in the dirstate, and not touching
3580 anything else. This useful for writing repository conversion tools, but
3580 anything else. This useful for writing repository conversion tools, but
3581 should be used with extreme care. For example, neither the working
3581 should be used with extreme care. For example, neither the working
3582 directory nor the dirstate is updated, so file status may be incorrect
3582 directory nor the dirstate is updated, so file status may be incorrect
3583 after running this command. Only used if you are one of the few people that
3583 after running this command. Only used if you are one of the few people that
3584 deeply unstand both conversion tools and file level histories. If you are
3584 deeply unstand both conversion tools and file level histories. If you are
3585 reading this help, you are not one of this people (most of them sailed west
3585 reading this help, you are not one of this people (most of them sailed west
3586 from Mithlond anyway.
3586 from Mithlond anyway.
3587
3587
3588 So one last time DO NOT USE THIS COMMAND.
3588 So one last time DO NOT USE THIS COMMAND.
3589
3589
3590 Returns 0 on success.
3590 Returns 0 on success.
3591 """
3591 """
3592
3592
3593 node1 = scmutil.revsingle(repo, rev1).node()
3593 node1 = scmutil.revsingle(repo, rev1).node()
3594 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3594 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3595
3595
3596 with repo.wlock():
3596 with repo.wlock():
3597 repo.setparents(node1, node2)
3597 repo.setparents(node1, node2)
3598
3598
3599
3599
3600 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3600 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3601 def debugsidedata(ui, repo, file_, rev=None, **opts):
3601 def debugsidedata(ui, repo, file_, rev=None, **opts):
3602 """dump the side data for a cl/manifest/file revision
3602 """dump the side data for a cl/manifest/file revision
3603
3603
3604 Use --verbose to dump the sidedata content."""
3604 Use --verbose to dump the sidedata content."""
3605 opts = pycompat.byteskwargs(opts)
3605 opts = pycompat.byteskwargs(opts)
3606 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3606 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3607 if rev is not None:
3607 if rev is not None:
3608 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3608 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3609 file_, rev = None, file_
3609 file_, rev = None, file_
3610 elif rev is None:
3610 elif rev is None:
3611 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3611 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3612 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3612 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3613 r = getattr(r, '_revlog', r)
3613 r = getattr(r, '_revlog', r)
3614 try:
3614 try:
3615 sidedata = r.sidedata(r.lookup(rev))
3615 sidedata = r.sidedata(r.lookup(rev))
3616 except KeyError:
3616 except KeyError:
3617 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3617 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3618 if sidedata:
3618 if sidedata:
3619 sidedata = list(sidedata.items())
3619 sidedata = list(sidedata.items())
3620 sidedata.sort()
3620 sidedata.sort()
3621 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3621 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3622 for key, value in sidedata:
3622 for key, value in sidedata:
3623 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3623 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3624 if ui.verbose:
3624 if ui.verbose:
3625 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3625 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3626
3626
3627
3627
3628 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3628 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3629 def debugssl(ui, repo, source=None, **opts):
3629 def debugssl(ui, repo, source=None, **opts):
3630 """test a secure connection to a server
3630 """test a secure connection to a server
3631
3631
3632 This builds the certificate chain for the server on Windows, installing the
3632 This builds the certificate chain for the server on Windows, installing the
3633 missing intermediates and trusted root via Windows Update if necessary. It
3633 missing intermediates and trusted root via Windows Update if necessary. It
3634 does nothing on other platforms.
3634 does nothing on other platforms.
3635
3635
3636 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3636 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3637 that server is used. See :hg:`help urls` for more information.
3637 that server is used. See :hg:`help urls` for more information.
3638
3638
3639 If the update succeeds, retry the original operation. Otherwise, the cause
3639 If the update succeeds, retry the original operation. Otherwise, the cause
3640 of the SSL error is likely another issue.
3640 of the SSL error is likely another issue.
3641 """
3641 """
3642 if not pycompat.iswindows:
3642 if not pycompat.iswindows:
3643 raise error.Abort(
3643 raise error.Abort(
3644 _(b'certificate chain building is only possible on Windows')
3644 _(b'certificate chain building is only possible on Windows')
3645 )
3645 )
3646
3646
3647 if not source:
3647 if not source:
3648 if not repo:
3648 if not repo:
3649 raise error.Abort(
3649 raise error.Abort(
3650 _(
3650 _(
3651 b"there is no Mercurial repository here, and no "
3651 b"there is no Mercurial repository here, and no "
3652 b"server specified"
3652 b"server specified"
3653 )
3653 )
3654 )
3654 )
3655 source = b"default"
3655 source = b"default"
3656
3656
3657 source, branches = urlutil.get_unique_pull_path(
3657 source, branches = urlutil.get_unique_pull_path(
3658 b'debugssl', repo, ui, source
3658 b'debugssl', repo, ui, source
3659 )
3659 )
3660 url = urlutil.url(source)
3660 url = urlutil.url(source)
3661
3661
3662 defaultport = {b'https': 443, b'ssh': 22}
3662 defaultport = {b'https': 443, b'ssh': 22}
3663 if url.scheme in defaultport:
3663 if url.scheme in defaultport:
3664 try:
3664 try:
3665 addr = (url.host, int(url.port or defaultport[url.scheme]))
3665 addr = (url.host, int(url.port or defaultport[url.scheme]))
3666 except ValueError:
3666 except ValueError:
3667 raise error.Abort(_(b"malformed port number in URL"))
3667 raise error.Abort(_(b"malformed port number in URL"))
3668 else:
3668 else:
3669 raise error.Abort(_(b"only https and ssh connections are supported"))
3669 raise error.Abort(_(b"only https and ssh connections are supported"))
3670
3670
3671 from . import win32
3671 from . import win32
3672
3672
3673 s = ssl.wrap_socket(
3673 s = ssl.wrap_socket(
3674 socket.socket(),
3674 socket.socket(),
3675 ssl_version=ssl.PROTOCOL_TLS,
3675 ssl_version=ssl.PROTOCOL_TLS,
3676 cert_reqs=ssl.CERT_NONE,
3676 cert_reqs=ssl.CERT_NONE,
3677 ca_certs=None,
3677 ca_certs=None,
3678 )
3678 )
3679
3679
3680 try:
3680 try:
3681 s.connect(addr)
3681 s.connect(addr)
3682 cert = s.getpeercert(True)
3682 cert = s.getpeercert(True)
3683
3683
3684 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3684 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3685
3685
3686 complete = win32.checkcertificatechain(cert, build=False)
3686 complete = win32.checkcertificatechain(cert, build=False)
3687
3687
3688 if not complete:
3688 if not complete:
3689 ui.status(_(b'certificate chain is incomplete, updating... '))
3689 ui.status(_(b'certificate chain is incomplete, updating... '))
3690
3690
3691 if not win32.checkcertificatechain(cert):
3691 if not win32.checkcertificatechain(cert):
3692 ui.status(_(b'failed.\n'))
3692 ui.status(_(b'failed.\n'))
3693 else:
3693 else:
3694 ui.status(_(b'done.\n'))
3694 ui.status(_(b'done.\n'))
3695 else:
3695 else:
3696 ui.status(_(b'full certificate chain is available\n'))
3696 ui.status(_(b'full certificate chain is available\n'))
3697 finally:
3697 finally:
3698 s.close()
3698 s.close()
3699
3699
3700
3700
3701 @command(
3701 @command(
3702 b"debugbackupbundle",
3702 b"debugbackupbundle",
3703 [
3703 [
3704 (
3704 (
3705 b"",
3705 b"",
3706 b"recover",
3706 b"recover",
3707 b"",
3707 b"",
3708 b"brings the specified changeset back into the repository",
3708 b"brings the specified changeset back into the repository",
3709 )
3709 )
3710 ]
3710 ]
3711 + cmdutil.logopts,
3711 + cmdutil.logopts,
3712 _(b"hg debugbackupbundle [--recover HASH]"),
3712 _(b"hg debugbackupbundle [--recover HASH]"),
3713 )
3713 )
3714 def debugbackupbundle(ui, repo, *pats, **opts):
3714 def debugbackupbundle(ui, repo, *pats, **opts):
3715 """lists the changesets available in backup bundles
3715 """lists the changesets available in backup bundles
3716
3716
3717 Without any arguments, this command prints a list of the changesets in each
3717 Without any arguments, this command prints a list of the changesets in each
3718 backup bundle.
3718 backup bundle.
3719
3719
3720 --recover takes a changeset hash and unbundles the first bundle that
3720 --recover takes a changeset hash and unbundles the first bundle that
3721 contains that hash, which puts that changeset back in your repository.
3721 contains that hash, which puts that changeset back in your repository.
3722
3722
3723 --verbose will print the entire commit message and the bundle path for that
3723 --verbose will print the entire commit message and the bundle path for that
3724 backup.
3724 backup.
3725 """
3725 """
3726 backups = list(
3726 backups = list(
3727 filter(
3727 filter(
3728 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3728 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3729 )
3729 )
3730 )
3730 )
3731 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3731 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3732
3732
3733 opts = pycompat.byteskwargs(opts)
3733 opts = pycompat.byteskwargs(opts)
3734 opts[b"bundle"] = b""
3734 opts[b"bundle"] = b""
3735 opts[b"force"] = None
3735 opts[b"force"] = None
3736 limit = logcmdutil.getlimit(opts)
3736 limit = logcmdutil.getlimit(opts)
3737
3737
3738 def display(other, chlist, displayer):
3738 def display(other, chlist, displayer):
3739 if opts.get(b"newest_first"):
3739 if opts.get(b"newest_first"):
3740 chlist.reverse()
3740 chlist.reverse()
3741 count = 0
3741 count = 0
3742 for n in chlist:
3742 for n in chlist:
3743 if limit is not None and count >= limit:
3743 if limit is not None and count >= limit:
3744 break
3744 break
3745 parents = [True for p in other.changelog.parents(n) if p != nullid]
3745 parents = [True for p in other.changelog.parents(n) if p != nullid]
3746 if opts.get(b"no_merges") and len(parents) == 2:
3746 if opts.get(b"no_merges") and len(parents) == 2:
3747 continue
3747 continue
3748 count += 1
3748 count += 1
3749 displayer.show(other[n])
3749 displayer.show(other[n])
3750
3750
3751 recovernode = opts.get(b"recover")
3751 recovernode = opts.get(b"recover")
3752 if recovernode:
3752 if recovernode:
3753 if scmutil.isrevsymbol(repo, recovernode):
3753 if scmutil.isrevsymbol(repo, recovernode):
3754 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3754 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3755 return
3755 return
3756 elif backups:
3756 elif backups:
3757 msg = _(
3757 msg = _(
3758 b"Recover changesets using: hg debugbackupbundle --recover "
3758 b"Recover changesets using: hg debugbackupbundle --recover "
3759 b"<changeset hash>\n\nAvailable backup changesets:"
3759 b"<changeset hash>\n\nAvailable backup changesets:"
3760 )
3760 )
3761 ui.status(msg, label=b"status.removed")
3761 ui.status(msg, label=b"status.removed")
3762 else:
3762 else:
3763 ui.status(_(b"no backup changesets found\n"))
3763 ui.status(_(b"no backup changesets found\n"))
3764 return
3764 return
3765
3765
3766 for backup in backups:
3766 for backup in backups:
3767 # Much of this is copied from the hg incoming logic
3767 # Much of this is copied from the hg incoming logic
3768 source = os.path.relpath(backup, encoding.getcwd())
3768 source = os.path.relpath(backup, encoding.getcwd())
3769 source, branches = urlutil.get_unique_pull_path(
3769 source, branches = urlutil.get_unique_pull_path(
3770 b'debugbackupbundle',
3770 b'debugbackupbundle',
3771 repo,
3771 repo,
3772 ui,
3772 ui,
3773 source,
3773 source,
3774 default_branches=opts.get(b'branch'),
3774 default_branches=opts.get(b'branch'),
3775 )
3775 )
3776 try:
3776 try:
3777 other = hg.peer(repo, opts, source)
3777 other = hg.peer(repo, opts, source)
3778 except error.LookupError as ex:
3778 except error.LookupError as ex:
3779 msg = _(b"\nwarning: unable to open bundle %s") % source
3779 msg = _(b"\nwarning: unable to open bundle %s") % source
3780 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3780 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3781 ui.warn(msg, hint=hint)
3781 ui.warn(msg, hint=hint)
3782 continue
3782 continue
3783 revs, checkout = hg.addbranchrevs(
3783 revs, checkout = hg.addbranchrevs(
3784 repo, other, branches, opts.get(b"rev")
3784 repo, other, branches, opts.get(b"rev")
3785 )
3785 )
3786
3786
3787 if revs:
3787 if revs:
3788 revs = [other.lookup(rev) for rev in revs]
3788 revs = [other.lookup(rev) for rev in revs]
3789
3789
3790 quiet = ui.quiet
3790 quiet = ui.quiet
3791 try:
3791 try:
3792 ui.quiet = True
3792 ui.quiet = True
3793 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3793 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3794 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3794 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3795 )
3795 )
3796 except error.LookupError:
3796 except error.LookupError:
3797 continue
3797 continue
3798 finally:
3798 finally:
3799 ui.quiet = quiet
3799 ui.quiet = quiet
3800
3800
3801 try:
3801 try:
3802 if not chlist:
3802 if not chlist:
3803 continue
3803 continue
3804 if recovernode:
3804 if recovernode:
3805 with repo.lock(), repo.transaction(b"unbundle") as tr:
3805 with repo.lock(), repo.transaction(b"unbundle") as tr:
3806 if scmutil.isrevsymbol(other, recovernode):
3806 if scmutil.isrevsymbol(other, recovernode):
3807 ui.status(_(b"Unbundling %s\n") % (recovernode))
3807 ui.status(_(b"Unbundling %s\n") % (recovernode))
3808 f = hg.openpath(ui, source)
3808 f = hg.openpath(ui, source)
3809 gen = exchange.readbundle(ui, f, source)
3809 gen = exchange.readbundle(ui, f, source)
3810 if isinstance(gen, bundle2.unbundle20):
3810 if isinstance(gen, bundle2.unbundle20):
3811 bundle2.applybundle(
3811 bundle2.applybundle(
3812 repo,
3812 repo,
3813 gen,
3813 gen,
3814 tr,
3814 tr,
3815 source=b"unbundle",
3815 source=b"unbundle",
3816 url=b"bundle:" + source,
3816 url=b"bundle:" + source,
3817 )
3817 )
3818 else:
3818 else:
3819 gen.apply(repo, b"unbundle", b"bundle:" + source)
3819 gen.apply(repo, b"unbundle", b"bundle:" + source)
3820 break
3820 break
3821 else:
3821 else:
3822 backupdate = encoding.strtolocal(
3822 backupdate = encoding.strtolocal(
3823 time.strftime(
3823 time.strftime(
3824 "%a %H:%M, %Y-%m-%d",
3824 "%a %H:%M, %Y-%m-%d",
3825 time.localtime(os.path.getmtime(source)),
3825 time.localtime(os.path.getmtime(source)),
3826 )
3826 )
3827 )
3827 )
3828 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3828 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3829 if ui.verbose:
3829 if ui.verbose:
3830 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3830 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3831 else:
3831 else:
3832 opts[
3832 opts[
3833 b"template"
3833 b"template"
3834 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3834 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3835 displayer = logcmdutil.changesetdisplayer(
3835 displayer = logcmdutil.changesetdisplayer(
3836 ui, other, opts, False
3836 ui, other, opts, False
3837 )
3837 )
3838 display(other, chlist, displayer)
3838 display(other, chlist, displayer)
3839 displayer.close()
3839 displayer.close()
3840 finally:
3840 finally:
3841 cleanupfn()
3841 cleanupfn()
3842
3842
3843
3843
3844 @command(
3844 @command(
3845 b'debugsub',
3845 b'debugsub',
3846 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3846 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3847 _(b'[-r REV] [REV]'),
3847 _(b'[-r REV] [REV]'),
3848 )
3848 )
3849 def debugsub(ui, repo, rev=None):
3849 def debugsub(ui, repo, rev=None):
3850 ctx = scmutil.revsingle(repo, rev, None)
3850 ctx = scmutil.revsingle(repo, rev, None)
3851 for k, v in sorted(ctx.substate.items()):
3851 for k, v in sorted(ctx.substate.items()):
3852 ui.writenoi18n(b'path %s\n' % k)
3852 ui.writenoi18n(b'path %s\n' % k)
3853 ui.writenoi18n(b' source %s\n' % v[0])
3853 ui.writenoi18n(b' source %s\n' % v[0])
3854 ui.writenoi18n(b' revision %s\n' % v[1])
3854 ui.writenoi18n(b' revision %s\n' % v[1])
3855
3855
3856
3856
3857 @command(b'debugshell', optionalrepo=True)
3857 @command(b'debugshell', optionalrepo=True)
3858 def debugshell(ui, repo):
3858 def debugshell(ui, repo):
3859 """run an interactive Python interpreter
3859 """run an interactive Python interpreter
3860
3860
3861 The local namespace is provided with a reference to the ui and
3861 The local namespace is provided with a reference to the ui and
3862 the repo instance (if available).
3862 the repo instance (if available).
3863 """
3863 """
3864 import code
3864 import code
3865
3865
3866 imported_objects = {
3866 imported_objects = {
3867 'ui': ui,
3867 'ui': ui,
3868 'repo': repo,
3868 'repo': repo,
3869 }
3869 }
3870
3870
3871 code.interact(local=imported_objects)
3871 code.interact(local=imported_objects)
3872
3872
3873
3873
3874 @command(
3874 @command(
3875 b'debugsuccessorssets',
3875 b'debugsuccessorssets',
3876 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3876 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3877 _(b'[REV]'),
3877 _(b'[REV]'),
3878 )
3878 )
3879 def debugsuccessorssets(ui, repo, *revs, **opts):
3879 def debugsuccessorssets(ui, repo, *revs, **opts):
3880 """show set of successors for revision
3880 """show set of successors for revision
3881
3881
3882 A successors set of changeset A is a consistent group of revisions that
3882 A successors set of changeset A is a consistent group of revisions that
3883 succeed A. It contains non-obsolete changesets only unless closests
3883 succeed A. It contains non-obsolete changesets only unless closests
3884 successors set is set.
3884 successors set is set.
3885
3885
3886 In most cases a changeset A has a single successors set containing a single
3886 In most cases a changeset A has a single successors set containing a single
3887 successor (changeset A replaced by A').
3887 successor (changeset A replaced by A').
3888
3888
3889 A changeset that is made obsolete with no successors are called "pruned".
3889 A changeset that is made obsolete with no successors are called "pruned".
3890 Such changesets have no successors sets at all.
3890 Such changesets have no successors sets at all.
3891
3891
3892 A changeset that has been "split" will have a successors set containing
3892 A changeset that has been "split" will have a successors set containing
3893 more than one successor.
3893 more than one successor.
3894
3894
3895 A changeset that has been rewritten in multiple different ways is called
3895 A changeset that has been rewritten in multiple different ways is called
3896 "divergent". Such changesets have multiple successor sets (each of which
3896 "divergent". Such changesets have multiple successor sets (each of which
3897 may also be split, i.e. have multiple successors).
3897 may also be split, i.e. have multiple successors).
3898
3898
3899 Results are displayed as follows::
3899 Results are displayed as follows::
3900
3900
3901 <rev1>
3901 <rev1>
3902 <successors-1A>
3902 <successors-1A>
3903 <rev2>
3903 <rev2>
3904 <successors-2A>
3904 <successors-2A>
3905 <successors-2B1> <successors-2B2> <successors-2B3>
3905 <successors-2B1> <successors-2B2> <successors-2B3>
3906
3906
3907 Here rev2 has two possible (i.e. divergent) successors sets. The first
3907 Here rev2 has two possible (i.e. divergent) successors sets. The first
3908 holds one element, whereas the second holds three (i.e. the changeset has
3908 holds one element, whereas the second holds three (i.e. the changeset has
3909 been split).
3909 been split).
3910 """
3910 """
3911 # passed to successorssets caching computation from one call to another
3911 # passed to successorssets caching computation from one call to another
3912 cache = {}
3912 cache = {}
3913 ctx2str = bytes
3913 ctx2str = bytes
3914 node2str = short
3914 node2str = short
3915 for rev in scmutil.revrange(repo, revs):
3915 for rev in scmutil.revrange(repo, revs):
3916 ctx = repo[rev]
3916 ctx = repo[rev]
3917 ui.write(b'%s\n' % ctx2str(ctx))
3917 ui.write(b'%s\n' % ctx2str(ctx))
3918 for succsset in obsutil.successorssets(
3918 for succsset in obsutil.successorssets(
3919 repo, ctx.node(), closest=opts['closest'], cache=cache
3919 repo, ctx.node(), closest=opts['closest'], cache=cache
3920 ):
3920 ):
3921 if succsset:
3921 if succsset:
3922 ui.write(b' ')
3922 ui.write(b' ')
3923 ui.write(node2str(succsset[0]))
3923 ui.write(node2str(succsset[0]))
3924 for node in succsset[1:]:
3924 for node in succsset[1:]:
3925 ui.write(b' ')
3925 ui.write(b' ')
3926 ui.write(node2str(node))
3926 ui.write(node2str(node))
3927 ui.write(b'\n')
3927 ui.write(b'\n')
3928
3928
3929
3929
3930 @command(b'debugtagscache', [])
3930 @command(b'debugtagscache', [])
3931 def debugtagscache(ui, repo):
3931 def debugtagscache(ui, repo):
3932 """display the contents of .hg/cache/hgtagsfnodes1"""
3932 """display the contents of .hg/cache/hgtagsfnodes1"""
3933 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3933 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3934 flog = repo.file(b'.hgtags')
3934 flog = repo.file(b'.hgtags')
3935 for r in repo:
3935 for r in repo:
3936 node = repo[r].node()
3936 node = repo[r].node()
3937 tagsnode = cache.getfnode(node, computemissing=False)
3937 tagsnode = cache.getfnode(node, computemissing=False)
3938 if tagsnode:
3938 if tagsnode:
3939 tagsnodedisplay = hex(tagsnode)
3939 tagsnodedisplay = hex(tagsnode)
3940 if not flog.hasnode(tagsnode):
3940 if not flog.hasnode(tagsnode):
3941 tagsnodedisplay += b' (unknown node)'
3941 tagsnodedisplay += b' (unknown node)'
3942 elif tagsnode is None:
3942 elif tagsnode is None:
3943 tagsnodedisplay = b'missing'
3943 tagsnodedisplay = b'missing'
3944 else:
3944 else:
3945 tagsnodedisplay = b'invalid'
3945 tagsnodedisplay = b'invalid'
3946
3946
3947 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3947 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3948
3948
3949
3949
3950 @command(
3950 @command(
3951 b'debugtemplate',
3951 b'debugtemplate',
3952 [
3952 [
3953 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3953 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3954 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3954 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3955 ],
3955 ],
3956 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3956 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3957 optionalrepo=True,
3957 optionalrepo=True,
3958 )
3958 )
3959 def debugtemplate(ui, repo, tmpl, **opts):
3959 def debugtemplate(ui, repo, tmpl, **opts):
3960 """parse and apply a template
3960 """parse and apply a template
3961
3961
3962 If -r/--rev is given, the template is processed as a log template and
3962 If -r/--rev is given, the template is processed as a log template and
3963 applied to the given changesets. Otherwise, it is processed as a generic
3963 applied to the given changesets. Otherwise, it is processed as a generic
3964 template.
3964 template.
3965
3965
3966 Use --verbose to print the parsed tree.
3966 Use --verbose to print the parsed tree.
3967 """
3967 """
3968 revs = None
3968 revs = None
3969 if opts['rev']:
3969 if opts['rev']:
3970 if repo is None:
3970 if repo is None:
3971 raise error.RepoError(
3971 raise error.RepoError(
3972 _(b'there is no Mercurial repository here (.hg not found)')
3972 _(b'there is no Mercurial repository here (.hg not found)')
3973 )
3973 )
3974 revs = scmutil.revrange(repo, opts['rev'])
3974 revs = scmutil.revrange(repo, opts['rev'])
3975
3975
3976 props = {}
3976 props = {}
3977 for d in opts['define']:
3977 for d in opts['define']:
3978 try:
3978 try:
3979 k, v = (e.strip() for e in d.split(b'=', 1))
3979 k, v = (e.strip() for e in d.split(b'=', 1))
3980 if not k or k == b'ui':
3980 if not k or k == b'ui':
3981 raise ValueError
3981 raise ValueError
3982 props[k] = v
3982 props[k] = v
3983 except ValueError:
3983 except ValueError:
3984 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3984 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3985
3985
3986 if ui.verbose:
3986 if ui.verbose:
3987 aliases = ui.configitems(b'templatealias')
3987 aliases = ui.configitems(b'templatealias')
3988 tree = templater.parse(tmpl)
3988 tree = templater.parse(tmpl)
3989 ui.note(templater.prettyformat(tree), b'\n')
3989 ui.note(templater.prettyformat(tree), b'\n')
3990 newtree = templater.expandaliases(tree, aliases)
3990 newtree = templater.expandaliases(tree, aliases)
3991 if newtree != tree:
3991 if newtree != tree:
3992 ui.notenoi18n(
3992 ui.notenoi18n(
3993 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3993 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3994 )
3994 )
3995
3995
3996 if revs is None:
3996 if revs is None:
3997 tres = formatter.templateresources(ui, repo)
3997 tres = formatter.templateresources(ui, repo)
3998 t = formatter.maketemplater(ui, tmpl, resources=tres)
3998 t = formatter.maketemplater(ui, tmpl, resources=tres)
3999 if ui.verbose:
3999 if ui.verbose:
4000 kwds, funcs = t.symbolsuseddefault()
4000 kwds, funcs = t.symbolsuseddefault()
4001 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4001 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4002 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4002 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4003 ui.write(t.renderdefault(props))
4003 ui.write(t.renderdefault(props))
4004 else:
4004 else:
4005 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4005 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4006 if ui.verbose:
4006 if ui.verbose:
4007 kwds, funcs = displayer.t.symbolsuseddefault()
4007 kwds, funcs = displayer.t.symbolsuseddefault()
4008 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4008 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4009 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4009 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4010 for r in revs:
4010 for r in revs:
4011 displayer.show(repo[r], **pycompat.strkwargs(props))
4011 displayer.show(repo[r], **pycompat.strkwargs(props))
4012 displayer.close()
4012 displayer.close()
4013
4013
4014
4014
4015 @command(
4015 @command(
4016 b'debuguigetpass',
4016 b'debuguigetpass',
4017 [
4017 [
4018 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4018 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4019 ],
4019 ],
4020 _(b'[-p TEXT]'),
4020 _(b'[-p TEXT]'),
4021 norepo=True,
4021 norepo=True,
4022 )
4022 )
4023 def debuguigetpass(ui, prompt=b''):
4023 def debuguigetpass(ui, prompt=b''):
4024 """show prompt to type password"""
4024 """show prompt to type password"""
4025 r = ui.getpass(prompt)
4025 r = ui.getpass(prompt)
4026 if r is None:
4026 if r is None:
4027 r = b"<default response>"
4027 r = b"<default response>"
4028 ui.writenoi18n(b'response: %s\n' % r)
4028 ui.writenoi18n(b'response: %s\n' % r)
4029
4029
4030
4030
4031 @command(
4031 @command(
4032 b'debuguiprompt',
4032 b'debuguiprompt',
4033 [
4033 [
4034 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4034 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4035 ],
4035 ],
4036 _(b'[-p TEXT]'),
4036 _(b'[-p TEXT]'),
4037 norepo=True,
4037 norepo=True,
4038 )
4038 )
4039 def debuguiprompt(ui, prompt=b''):
4039 def debuguiprompt(ui, prompt=b''):
4040 """show plain prompt"""
4040 """show plain prompt"""
4041 r = ui.prompt(prompt)
4041 r = ui.prompt(prompt)
4042 ui.writenoi18n(b'response: %s\n' % r)
4042 ui.writenoi18n(b'response: %s\n' % r)
4043
4043
4044
4044
4045 @command(b'debugupdatecaches', [])
4045 @command(b'debugupdatecaches', [])
4046 def debugupdatecaches(ui, repo, *pats, **opts):
4046 def debugupdatecaches(ui, repo, *pats, **opts):
4047 """warm all known caches in the repository"""
4047 """warm all known caches in the repository"""
4048 with repo.wlock(), repo.lock():
4048 with repo.wlock(), repo.lock():
4049 repo.updatecaches(full=True)
4049 repo.updatecaches(full=True)
4050
4050
4051
4051
4052 @command(
4052 @command(
4053 b'debugupgraderepo',
4053 b'debugupgraderepo',
4054 [
4054 [
4055 (
4055 (
4056 b'o',
4056 b'o',
4057 b'optimize',
4057 b'optimize',
4058 [],
4058 [],
4059 _(b'extra optimization to perform'),
4059 _(b'extra optimization to perform'),
4060 _(b'NAME'),
4060 _(b'NAME'),
4061 ),
4061 ),
4062 (b'', b'run', False, _(b'performs an upgrade')),
4062 (b'', b'run', False, _(b'performs an upgrade')),
4063 (b'', b'backup', True, _(b'keep the old repository content around')),
4063 (b'', b'backup', True, _(b'keep the old repository content around')),
4064 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4064 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4065 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4065 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4066 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4066 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4067 ],
4067 ],
4068 )
4068 )
4069 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4069 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4070 """upgrade a repository to use different features
4070 """upgrade a repository to use different features
4071
4071
4072 If no arguments are specified, the repository is evaluated for upgrade
4072 If no arguments are specified, the repository is evaluated for upgrade
4073 and a list of problems and potential optimizations is printed.
4073 and a list of problems and potential optimizations is printed.
4074
4074
4075 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4075 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4076 can be influenced via additional arguments. More details will be provided
4076 can be influenced via additional arguments. More details will be provided
4077 by the command output when run without ``--run``.
4077 by the command output when run without ``--run``.
4078
4078
4079 During the upgrade, the repository will be locked and no writes will be
4079 During the upgrade, the repository will be locked and no writes will be
4080 allowed.
4080 allowed.
4081
4081
4082 At the end of the upgrade, the repository may not be readable while new
4082 At the end of the upgrade, the repository may not be readable while new
4083 repository data is swapped in. This window will be as long as it takes to
4083 repository data is swapped in. This window will be as long as it takes to
4084 rename some directories inside the ``.hg`` directory. On most machines, this
4084 rename some directories inside the ``.hg`` directory. On most machines, this
4085 should complete almost instantaneously and the chances of a consumer being
4085 should complete almost instantaneously and the chances of a consumer being
4086 unable to access the repository should be low.
4086 unable to access the repository should be low.
4087
4087
4088 By default, all revlog will be upgraded. You can restrict this using flag
4088 By default, all revlogs will be upgraded. You can restrict this using flags
4089 such as `--manifest`:
4089 such as `--manifest`:
4090
4090
4091 * `--manifest`: only optimize the manifest
4091 * `--manifest`: only optimize the manifest
4092 * `--no-manifest`: optimize all revlog but the manifest
4092 * `--no-manifest`: optimize all revlog but the manifest
4093 * `--changelog`: optimize the changelog only
4093 * `--changelog`: optimize the changelog only
4094 * `--no-changelog --no-manifest`: optimize filelogs only
4094 * `--no-changelog --no-manifest`: optimize filelogs only
4095 * `--filelogs`: optimize the filelogs only
4095 * `--filelogs`: optimize the filelogs only
4096 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4096 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4097 """
4097 """
4098 return upgrade.upgraderepo(
4098 return upgrade.upgraderepo(
4099 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4099 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4100 )
4100 )
4101
4101
4102
4102
4103 @command(
4103 @command(
4104 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4104 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4105 )
4105 )
4106 def debugwalk(ui, repo, *pats, **opts):
4106 def debugwalk(ui, repo, *pats, **opts):
4107 """show how files match on given patterns"""
4107 """show how files match on given patterns"""
4108 opts = pycompat.byteskwargs(opts)
4108 opts = pycompat.byteskwargs(opts)
4109 m = scmutil.match(repo[None], pats, opts)
4109 m = scmutil.match(repo[None], pats, opts)
4110 if ui.verbose:
4110 if ui.verbose:
4111 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4111 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4112 items = list(repo[None].walk(m))
4112 items = list(repo[None].walk(m))
4113 if not items:
4113 if not items:
4114 return
4114 return
4115 f = lambda fn: fn
4115 f = lambda fn: fn
4116 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4116 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4117 f = lambda fn: util.normpath(fn)
4117 f = lambda fn: util.normpath(fn)
4118 fmt = b'f %%-%ds %%-%ds %%s' % (
4118 fmt = b'f %%-%ds %%-%ds %%s' % (
4119 max([len(abs) for abs in items]),
4119 max([len(abs) for abs in items]),
4120 max([len(repo.pathto(abs)) for abs in items]),
4120 max([len(repo.pathto(abs)) for abs in items]),
4121 )
4121 )
4122 for abs in items:
4122 for abs in items:
4123 line = fmt % (
4123 line = fmt % (
4124 abs,
4124 abs,
4125 f(repo.pathto(abs)),
4125 f(repo.pathto(abs)),
4126 m.exact(abs) and b'exact' or b'',
4126 m.exact(abs) and b'exact' or b'',
4127 )
4127 )
4128 ui.write(b"%s\n" % line.rstrip())
4128 ui.write(b"%s\n" % line.rstrip())
4129
4129
4130
4130
4131 @command(b'debugwhyunstable', [], _(b'REV'))
4131 @command(b'debugwhyunstable', [], _(b'REV'))
4132 def debugwhyunstable(ui, repo, rev):
4132 def debugwhyunstable(ui, repo, rev):
4133 """explain instabilities of a changeset"""
4133 """explain instabilities of a changeset"""
4134 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4134 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4135 dnodes = b''
4135 dnodes = b''
4136 if entry.get(b'divergentnodes'):
4136 if entry.get(b'divergentnodes'):
4137 dnodes = (
4137 dnodes = (
4138 b' '.join(
4138 b' '.join(
4139 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4139 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4140 for ctx in entry[b'divergentnodes']
4140 for ctx in entry[b'divergentnodes']
4141 )
4141 )
4142 + b' '
4142 + b' '
4143 )
4143 )
4144 ui.write(
4144 ui.write(
4145 b'%s: %s%s %s\n'
4145 b'%s: %s%s %s\n'
4146 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4146 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4147 )
4147 )
4148
4148
4149
4149
4150 @command(
4150 @command(
4151 b'debugwireargs',
4151 b'debugwireargs',
4152 [
4152 [
4153 (b'', b'three', b'', b'three'),
4153 (b'', b'three', b'', b'three'),
4154 (b'', b'four', b'', b'four'),
4154 (b'', b'four', b'', b'four'),
4155 (b'', b'five', b'', b'five'),
4155 (b'', b'five', b'', b'five'),
4156 ]
4156 ]
4157 + cmdutil.remoteopts,
4157 + cmdutil.remoteopts,
4158 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4158 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4159 norepo=True,
4159 norepo=True,
4160 )
4160 )
4161 def debugwireargs(ui, repopath, *vals, **opts):
4161 def debugwireargs(ui, repopath, *vals, **opts):
4162 opts = pycompat.byteskwargs(opts)
4162 opts = pycompat.byteskwargs(opts)
4163 repo = hg.peer(ui, opts, repopath)
4163 repo = hg.peer(ui, opts, repopath)
4164 try:
4164 try:
4165 for opt in cmdutil.remoteopts:
4165 for opt in cmdutil.remoteopts:
4166 del opts[opt[1]]
4166 del opts[opt[1]]
4167 args = {}
4167 args = {}
4168 for k, v in pycompat.iteritems(opts):
4168 for k, v in pycompat.iteritems(opts):
4169 if v:
4169 if v:
4170 args[k] = v
4170 args[k] = v
4171 args = pycompat.strkwargs(args)
4171 args = pycompat.strkwargs(args)
4172 # run twice to check that we don't mess up the stream for the next command
4172 # run twice to check that we don't mess up the stream for the next command
4173 res1 = repo.debugwireargs(*vals, **args)
4173 res1 = repo.debugwireargs(*vals, **args)
4174 res2 = repo.debugwireargs(*vals, **args)
4174 res2 = repo.debugwireargs(*vals, **args)
4175 ui.write(b"%s\n" % res1)
4175 ui.write(b"%s\n" % res1)
4176 if res1 != res2:
4176 if res1 != res2:
4177 ui.warn(b"%s\n" % res2)
4177 ui.warn(b"%s\n" % res2)
4178 finally:
4178 finally:
4179 repo.close()
4179 repo.close()
4180
4180
4181
4181
4182 def _parsewirelangblocks(fh):
4182 def _parsewirelangblocks(fh):
4183 activeaction = None
4183 activeaction = None
4184 blocklines = []
4184 blocklines = []
4185 lastindent = 0
4185 lastindent = 0
4186
4186
4187 for line in fh:
4187 for line in fh:
4188 line = line.rstrip()
4188 line = line.rstrip()
4189 if not line:
4189 if not line:
4190 continue
4190 continue
4191
4191
4192 if line.startswith(b'#'):
4192 if line.startswith(b'#'):
4193 continue
4193 continue
4194
4194
4195 if not line.startswith(b' '):
4195 if not line.startswith(b' '):
4196 # New block. Flush previous one.
4196 # New block. Flush previous one.
4197 if activeaction:
4197 if activeaction:
4198 yield activeaction, blocklines
4198 yield activeaction, blocklines
4199
4199
4200 activeaction = line
4200 activeaction = line
4201 blocklines = []
4201 blocklines = []
4202 lastindent = 0
4202 lastindent = 0
4203 continue
4203 continue
4204
4204
4205 # Else we start with an indent.
4205 # Else we start with an indent.
4206
4206
4207 if not activeaction:
4207 if not activeaction:
4208 raise error.Abort(_(b'indented line outside of block'))
4208 raise error.Abort(_(b'indented line outside of block'))
4209
4209
4210 indent = len(line) - len(line.lstrip())
4210 indent = len(line) - len(line.lstrip())
4211
4211
4212 # If this line is indented more than the last line, concatenate it.
4212 # If this line is indented more than the last line, concatenate it.
4213 if indent > lastindent and blocklines:
4213 if indent > lastindent and blocklines:
4214 blocklines[-1] += line.lstrip()
4214 blocklines[-1] += line.lstrip()
4215 else:
4215 else:
4216 blocklines.append(line)
4216 blocklines.append(line)
4217 lastindent = indent
4217 lastindent = indent
4218
4218
4219 # Flush last block.
4219 # Flush last block.
4220 if activeaction:
4220 if activeaction:
4221 yield activeaction, blocklines
4221 yield activeaction, blocklines
4222
4222
4223
4223
4224 @command(
4224 @command(
4225 b'debugwireproto',
4225 b'debugwireproto',
4226 [
4226 [
4227 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4227 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4228 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4228 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4229 (
4229 (
4230 b'',
4230 b'',
4231 b'noreadstderr',
4231 b'noreadstderr',
4232 False,
4232 False,
4233 _(b'do not read from stderr of the remote'),
4233 _(b'do not read from stderr of the remote'),
4234 ),
4234 ),
4235 (
4235 (
4236 b'',
4236 b'',
4237 b'nologhandshake',
4237 b'nologhandshake',
4238 False,
4238 False,
4239 _(b'do not log I/O related to the peer handshake'),
4239 _(b'do not log I/O related to the peer handshake'),
4240 ),
4240 ),
4241 ]
4241 ]
4242 + cmdutil.remoteopts,
4242 + cmdutil.remoteopts,
4243 _(b'[PATH]'),
4243 _(b'[PATH]'),
4244 optionalrepo=True,
4244 optionalrepo=True,
4245 )
4245 )
4246 def debugwireproto(ui, repo, path=None, **opts):
4246 def debugwireproto(ui, repo, path=None, **opts):
4247 """send wire protocol commands to a server
4247 """send wire protocol commands to a server
4248
4248
4249 This command can be used to issue wire protocol commands to remote
4249 This command can be used to issue wire protocol commands to remote
4250 peers and to debug the raw data being exchanged.
4250 peers and to debug the raw data being exchanged.
4251
4251
4252 ``--localssh`` will start an SSH server against the current repository
4252 ``--localssh`` will start an SSH server against the current repository
4253 and connect to that. By default, the connection will perform a handshake
4253 and connect to that. By default, the connection will perform a handshake
4254 and establish an appropriate peer instance.
4254 and establish an appropriate peer instance.
4255
4255
4256 ``--peer`` can be used to bypass the handshake protocol and construct a
4256 ``--peer`` can be used to bypass the handshake protocol and construct a
4257 peer instance using the specified class type. Valid values are ``raw``,
4257 peer instance using the specified class type. Valid values are ``raw``,
4258 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4258 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4259 raw data payloads and don't support higher-level command actions.
4259 raw data payloads and don't support higher-level command actions.
4260
4260
4261 ``--noreadstderr`` can be used to disable automatic reading from stderr
4261 ``--noreadstderr`` can be used to disable automatic reading from stderr
4262 of the peer (for SSH connections only). Disabling automatic reading of
4262 of the peer (for SSH connections only). Disabling automatic reading of
4263 stderr is useful for making output more deterministic.
4263 stderr is useful for making output more deterministic.
4264
4264
4265 Commands are issued via a mini language which is specified via stdin.
4265 Commands are issued via a mini language which is specified via stdin.
4266 The language consists of individual actions to perform. An action is
4266 The language consists of individual actions to perform. An action is
4267 defined by a block. A block is defined as a line with no leading
4267 defined by a block. A block is defined as a line with no leading
4268 space followed by 0 or more lines with leading space. Blocks are
4268 space followed by 0 or more lines with leading space. Blocks are
4269 effectively a high-level command with additional metadata.
4269 effectively a high-level command with additional metadata.
4270
4270
4271 Lines beginning with ``#`` are ignored.
4271 Lines beginning with ``#`` are ignored.
4272
4272
4273 The following sections denote available actions.
4273 The following sections denote available actions.
4274
4274
4275 raw
4275 raw
4276 ---
4276 ---
4277
4277
4278 Send raw data to the server.
4278 Send raw data to the server.
4279
4279
4280 The block payload contains the raw data to send as one atomic send
4280 The block payload contains the raw data to send as one atomic send
4281 operation. The data may not actually be delivered in a single system
4281 operation. The data may not actually be delivered in a single system
4282 call: it depends on the abilities of the transport being used.
4282 call: it depends on the abilities of the transport being used.
4283
4283
4284 Each line in the block is de-indented and concatenated. Then, that
4284 Each line in the block is de-indented and concatenated. Then, that
4285 value is evaluated as a Python b'' literal. This allows the use of
4285 value is evaluated as a Python b'' literal. This allows the use of
4286 backslash escaping, etc.
4286 backslash escaping, etc.
4287
4287
4288 raw+
4288 raw+
4289 ----
4289 ----
4290
4290
4291 Behaves like ``raw`` except flushes output afterwards.
4291 Behaves like ``raw`` except flushes output afterwards.
4292
4292
4293 command <X>
4293 command <X>
4294 -----------
4294 -----------
4295
4295
4296 Send a request to run a named command, whose name follows the ``command``
4296 Send a request to run a named command, whose name follows the ``command``
4297 string.
4297 string.
4298
4298
4299 Arguments to the command are defined as lines in this block. The format of
4299 Arguments to the command are defined as lines in this block. The format of
4300 each line is ``<key> <value>``. e.g.::
4300 each line is ``<key> <value>``. e.g.::
4301
4301
4302 command listkeys
4302 command listkeys
4303 namespace bookmarks
4303 namespace bookmarks
4304
4304
4305 If the value begins with ``eval:``, it will be interpreted as a Python
4305 If the value begins with ``eval:``, it will be interpreted as a Python
4306 literal expression. Otherwise values are interpreted as Python b'' literals.
4306 literal expression. Otherwise values are interpreted as Python b'' literals.
4307 This allows sending complex types and encoding special byte sequences via
4307 This allows sending complex types and encoding special byte sequences via
4308 backslash escaping.
4308 backslash escaping.
4309
4309
4310 The following arguments have special meaning:
4310 The following arguments have special meaning:
4311
4311
4312 ``PUSHFILE``
4312 ``PUSHFILE``
4313 When defined, the *push* mechanism of the peer will be used instead
4313 When defined, the *push* mechanism of the peer will be used instead
4314 of the static request-response mechanism and the content of the
4314 of the static request-response mechanism and the content of the
4315 file specified in the value of this argument will be sent as the
4315 file specified in the value of this argument will be sent as the
4316 command payload.
4316 command payload.
4317
4317
4318 This can be used to submit a local bundle file to the remote.
4318 This can be used to submit a local bundle file to the remote.
4319
4319
4320 batchbegin
4320 batchbegin
4321 ----------
4321 ----------
4322
4322
4323 Instruct the peer to begin a batched send.
4323 Instruct the peer to begin a batched send.
4324
4324
4325 All ``command`` blocks are queued for execution until the next
4325 All ``command`` blocks are queued for execution until the next
4326 ``batchsubmit`` block.
4326 ``batchsubmit`` block.
4327
4327
4328 batchsubmit
4328 batchsubmit
4329 -----------
4329 -----------
4330
4330
4331 Submit previously queued ``command`` blocks as a batch request.
4331 Submit previously queued ``command`` blocks as a batch request.
4332
4332
4333 This action MUST be paired with a ``batchbegin`` action.
4333 This action MUST be paired with a ``batchbegin`` action.
4334
4334
4335 httprequest <method> <path>
4335 httprequest <method> <path>
4336 ---------------------------
4336 ---------------------------
4337
4337
4338 (HTTP peer only)
4338 (HTTP peer only)
4339
4339
4340 Send an HTTP request to the peer.
4340 Send an HTTP request to the peer.
4341
4341
4342 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4342 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4343
4343
4344 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4344 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4345 headers to add to the request. e.g. ``Accept: foo``.
4345 headers to add to the request. e.g. ``Accept: foo``.
4346
4346
4347 The following arguments are special:
4347 The following arguments are special:
4348
4348
4349 ``BODYFILE``
4349 ``BODYFILE``
4350 The content of the file defined as the value to this argument will be
4350 The content of the file defined as the value to this argument will be
4351 transferred verbatim as the HTTP request body.
4351 transferred verbatim as the HTTP request body.
4352
4352
4353 ``frame <type> <flags> <payload>``
4353 ``frame <type> <flags> <payload>``
4354 Send a unified protocol frame as part of the request body.
4354 Send a unified protocol frame as part of the request body.
4355
4355
4356 All frames will be collected and sent as the body to the HTTP
4356 All frames will be collected and sent as the body to the HTTP
4357 request.
4357 request.
4358
4358
4359 close
4359 close
4360 -----
4360 -----
4361
4361
4362 Close the connection to the server.
4362 Close the connection to the server.
4363
4363
4364 flush
4364 flush
4365 -----
4365 -----
4366
4366
4367 Flush data written to the server.
4367 Flush data written to the server.
4368
4368
4369 readavailable
4369 readavailable
4370 -------------
4370 -------------
4371
4371
4372 Close the write end of the connection and read all available data from
4372 Close the write end of the connection and read all available data from
4373 the server.
4373 the server.
4374
4374
4375 If the connection to the server encompasses multiple pipes, we poll both
4375 If the connection to the server encompasses multiple pipes, we poll both
4376 pipes and read available data.
4376 pipes and read available data.
4377
4377
4378 readline
4378 readline
4379 --------
4379 --------
4380
4380
4381 Read a line of output from the server. If there are multiple output
4381 Read a line of output from the server. If there are multiple output
4382 pipes, reads only the main pipe.
4382 pipes, reads only the main pipe.
4383
4383
4384 ereadline
4384 ereadline
4385 ---------
4385 ---------
4386
4386
4387 Like ``readline``, but read from the stderr pipe, if available.
4387 Like ``readline``, but read from the stderr pipe, if available.
4388
4388
4389 read <X>
4389 read <X>
4390 --------
4390 --------
4391
4391
4392 ``read()`` N bytes from the server's main output pipe.
4392 ``read()`` N bytes from the server's main output pipe.
4393
4393
4394 eread <X>
4394 eread <X>
4395 ---------
4395 ---------
4396
4396
4397 ``read()`` N bytes from the server's stderr pipe, if available.
4397 ``read()`` N bytes from the server's stderr pipe, if available.
4398
4398
4399 Specifying Unified Frame-Based Protocol Frames
4399 Specifying Unified Frame-Based Protocol Frames
4400 ----------------------------------------------
4400 ----------------------------------------------
4401
4401
4402 It is possible to emit a *Unified Frame-Based Protocol* by using special
4402 It is possible to emit a *Unified Frame-Based Protocol* by using special
4403 syntax.
4403 syntax.
4404
4404
4405 A frame is composed as a type, flags, and payload. These can be parsed
4405 A frame is composed as a type, flags, and payload. These can be parsed
4406 from a string of the form:
4406 from a string of the form:
4407
4407
4408 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4408 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4409
4409
4410 ``request-id`` and ``stream-id`` are integers defining the request and
4410 ``request-id`` and ``stream-id`` are integers defining the request and
4411 stream identifiers.
4411 stream identifiers.
4412
4412
4413 ``type`` can be an integer value for the frame type or the string name
4413 ``type`` can be an integer value for the frame type or the string name
4414 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4414 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4415 ``command-name``.
4415 ``command-name``.
4416
4416
4417 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4417 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4418 components. Each component (and there can be just one) can be an integer
4418 components. Each component (and there can be just one) can be an integer
4419 or a flag name for stream flags or frame flags, respectively. Values are
4419 or a flag name for stream flags or frame flags, respectively. Values are
4420 resolved to integers and then bitwise OR'd together.
4420 resolved to integers and then bitwise OR'd together.
4421
4421
4422 ``payload`` represents the raw frame payload. If it begins with
4422 ``payload`` represents the raw frame payload. If it begins with
4423 ``cbor:``, the following string is evaluated as Python code and the
4423 ``cbor:``, the following string is evaluated as Python code and the
4424 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4424 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4425 as a Python byte string literal.
4425 as a Python byte string literal.
4426 """
4426 """
4427 opts = pycompat.byteskwargs(opts)
4427 opts = pycompat.byteskwargs(opts)
4428
4428
4429 if opts[b'localssh'] and not repo:
4429 if opts[b'localssh'] and not repo:
4430 raise error.Abort(_(b'--localssh requires a repository'))
4430 raise error.Abort(_(b'--localssh requires a repository'))
4431
4431
4432 if opts[b'peer'] and opts[b'peer'] not in (
4432 if opts[b'peer'] and opts[b'peer'] not in (
4433 b'raw',
4433 b'raw',
4434 b'http2',
4434 b'http2',
4435 b'ssh1',
4435 b'ssh1',
4436 b'ssh2',
4436 b'ssh2',
4437 ):
4437 ):
4438 raise error.Abort(
4438 raise error.Abort(
4439 _(b'invalid value for --peer'),
4439 _(b'invalid value for --peer'),
4440 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4440 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4441 )
4441 )
4442
4442
4443 if path and opts[b'localssh']:
4443 if path and opts[b'localssh']:
4444 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4444 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4445
4445
4446 if ui.interactive():
4446 if ui.interactive():
4447 ui.write(_(b'(waiting for commands on stdin)\n'))
4447 ui.write(_(b'(waiting for commands on stdin)\n'))
4448
4448
4449 blocks = list(_parsewirelangblocks(ui.fin))
4449 blocks = list(_parsewirelangblocks(ui.fin))
4450
4450
4451 proc = None
4451 proc = None
4452 stdin = None
4452 stdin = None
4453 stdout = None
4453 stdout = None
4454 stderr = None
4454 stderr = None
4455 opener = None
4455 opener = None
4456
4456
4457 if opts[b'localssh']:
4457 if opts[b'localssh']:
4458 # We start the SSH server in its own process so there is process
4458 # We start the SSH server in its own process so there is process
4459 # separation. This prevents a whole class of potential bugs around
4459 # separation. This prevents a whole class of potential bugs around
4460 # shared state from interfering with server operation.
4460 # shared state from interfering with server operation.
4461 args = procutil.hgcmd() + [
4461 args = procutil.hgcmd() + [
4462 b'-R',
4462 b'-R',
4463 repo.root,
4463 repo.root,
4464 b'debugserve',
4464 b'debugserve',
4465 b'--sshstdio',
4465 b'--sshstdio',
4466 ]
4466 ]
4467 proc = subprocess.Popen(
4467 proc = subprocess.Popen(
4468 pycompat.rapply(procutil.tonativestr, args),
4468 pycompat.rapply(procutil.tonativestr, args),
4469 stdin=subprocess.PIPE,
4469 stdin=subprocess.PIPE,
4470 stdout=subprocess.PIPE,
4470 stdout=subprocess.PIPE,
4471 stderr=subprocess.PIPE,
4471 stderr=subprocess.PIPE,
4472 bufsize=0,
4472 bufsize=0,
4473 )
4473 )
4474
4474
4475 stdin = proc.stdin
4475 stdin = proc.stdin
4476 stdout = proc.stdout
4476 stdout = proc.stdout
4477 stderr = proc.stderr
4477 stderr = proc.stderr
4478
4478
4479 # We turn the pipes into observers so we can log I/O.
4479 # We turn the pipes into observers so we can log I/O.
4480 if ui.verbose or opts[b'peer'] == b'raw':
4480 if ui.verbose or opts[b'peer'] == b'raw':
4481 stdin = util.makeloggingfileobject(
4481 stdin = util.makeloggingfileobject(
4482 ui, proc.stdin, b'i', logdata=True
4482 ui, proc.stdin, b'i', logdata=True
4483 )
4483 )
4484 stdout = util.makeloggingfileobject(
4484 stdout = util.makeloggingfileobject(
4485 ui, proc.stdout, b'o', logdata=True
4485 ui, proc.stdout, b'o', logdata=True
4486 )
4486 )
4487 stderr = util.makeloggingfileobject(
4487 stderr = util.makeloggingfileobject(
4488 ui, proc.stderr, b'e', logdata=True
4488 ui, proc.stderr, b'e', logdata=True
4489 )
4489 )
4490
4490
4491 # --localssh also implies the peer connection settings.
4491 # --localssh also implies the peer connection settings.
4492
4492
4493 url = b'ssh://localserver'
4493 url = b'ssh://localserver'
4494 autoreadstderr = not opts[b'noreadstderr']
4494 autoreadstderr = not opts[b'noreadstderr']
4495
4495
4496 if opts[b'peer'] == b'ssh1':
4496 if opts[b'peer'] == b'ssh1':
4497 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4497 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4498 peer = sshpeer.sshv1peer(
4498 peer = sshpeer.sshv1peer(
4499 ui,
4499 ui,
4500 url,
4500 url,
4501 proc,
4501 proc,
4502 stdin,
4502 stdin,
4503 stdout,
4503 stdout,
4504 stderr,
4504 stderr,
4505 None,
4505 None,
4506 autoreadstderr=autoreadstderr,
4506 autoreadstderr=autoreadstderr,
4507 )
4507 )
4508 elif opts[b'peer'] == b'ssh2':
4508 elif opts[b'peer'] == b'ssh2':
4509 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4509 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4510 peer = sshpeer.sshv2peer(
4510 peer = sshpeer.sshv2peer(
4511 ui,
4511 ui,
4512 url,
4512 url,
4513 proc,
4513 proc,
4514 stdin,
4514 stdin,
4515 stdout,
4515 stdout,
4516 stderr,
4516 stderr,
4517 None,
4517 None,
4518 autoreadstderr=autoreadstderr,
4518 autoreadstderr=autoreadstderr,
4519 )
4519 )
4520 elif opts[b'peer'] == b'raw':
4520 elif opts[b'peer'] == b'raw':
4521 ui.write(_(b'using raw connection to peer\n'))
4521 ui.write(_(b'using raw connection to peer\n'))
4522 peer = None
4522 peer = None
4523 else:
4523 else:
4524 ui.write(_(b'creating ssh peer from handshake results\n'))
4524 ui.write(_(b'creating ssh peer from handshake results\n'))
4525 peer = sshpeer.makepeer(
4525 peer = sshpeer.makepeer(
4526 ui,
4526 ui,
4527 url,
4527 url,
4528 proc,
4528 proc,
4529 stdin,
4529 stdin,
4530 stdout,
4530 stdout,
4531 stderr,
4531 stderr,
4532 autoreadstderr=autoreadstderr,
4532 autoreadstderr=autoreadstderr,
4533 )
4533 )
4534
4534
4535 elif path:
4535 elif path:
4536 # We bypass hg.peer() so we can proxy the sockets.
4536 # We bypass hg.peer() so we can proxy the sockets.
4537 # TODO consider not doing this because we skip
4537 # TODO consider not doing this because we skip
4538 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4538 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4539 u = urlutil.url(path)
4539 u = urlutil.url(path)
4540 if u.scheme != b'http':
4540 if u.scheme != b'http':
4541 raise error.Abort(_(b'only http:// paths are currently supported'))
4541 raise error.Abort(_(b'only http:// paths are currently supported'))
4542
4542
4543 url, authinfo = u.authinfo()
4543 url, authinfo = u.authinfo()
4544 openerargs = {
4544 openerargs = {
4545 'useragent': b'Mercurial debugwireproto',
4545 'useragent': b'Mercurial debugwireproto',
4546 }
4546 }
4547
4547
4548 # Turn pipes/sockets into observers so we can log I/O.
4548 # Turn pipes/sockets into observers so we can log I/O.
4549 if ui.verbose:
4549 if ui.verbose:
4550 openerargs.update(
4550 openerargs.update(
4551 {
4551 {
4552 'loggingfh': ui,
4552 'loggingfh': ui,
4553 'loggingname': b's',
4553 'loggingname': b's',
4554 'loggingopts': {
4554 'loggingopts': {
4555 'logdata': True,
4555 'logdata': True,
4556 'logdataapis': False,
4556 'logdataapis': False,
4557 },
4557 },
4558 }
4558 }
4559 )
4559 )
4560
4560
4561 if ui.debugflag:
4561 if ui.debugflag:
4562 openerargs['loggingopts']['logdataapis'] = True
4562 openerargs['loggingopts']['logdataapis'] = True
4563
4563
4564 # Don't send default headers when in raw mode. This allows us to
4564 # Don't send default headers when in raw mode. This allows us to
4565 # bypass most of the behavior of our URL handling code so we can
4565 # bypass most of the behavior of our URL handling code so we can
4566 # have near complete control over what's sent on the wire.
4566 # have near complete control over what's sent on the wire.
4567 if opts[b'peer'] == b'raw':
4567 if opts[b'peer'] == b'raw':
4568 openerargs['sendaccept'] = False
4568 openerargs['sendaccept'] = False
4569
4569
4570 opener = urlmod.opener(ui, authinfo, **openerargs)
4570 opener = urlmod.opener(ui, authinfo, **openerargs)
4571
4571
4572 if opts[b'peer'] == b'http2':
4572 if opts[b'peer'] == b'http2':
4573 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4573 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4574 # We go through makepeer() because we need an API descriptor for
4574 # We go through makepeer() because we need an API descriptor for
4575 # the peer instance to be useful.
4575 # the peer instance to be useful.
4576 with ui.configoverride(
4576 with ui.configoverride(
4577 {(b'experimental', b'httppeer.advertise-v2'): True}
4577 {(b'experimental', b'httppeer.advertise-v2'): True}
4578 ):
4578 ):
4579 if opts[b'nologhandshake']:
4579 if opts[b'nologhandshake']:
4580 ui.pushbuffer()
4580 ui.pushbuffer()
4581
4581
4582 peer = httppeer.makepeer(ui, path, opener=opener)
4582 peer = httppeer.makepeer(ui, path, opener=opener)
4583
4583
4584 if opts[b'nologhandshake']:
4584 if opts[b'nologhandshake']:
4585 ui.popbuffer()
4585 ui.popbuffer()
4586
4586
4587 if not isinstance(peer, httppeer.httpv2peer):
4587 if not isinstance(peer, httppeer.httpv2peer):
4588 raise error.Abort(
4588 raise error.Abort(
4589 _(
4589 _(
4590 b'could not instantiate HTTP peer for '
4590 b'could not instantiate HTTP peer for '
4591 b'wire protocol version 2'
4591 b'wire protocol version 2'
4592 ),
4592 ),
4593 hint=_(
4593 hint=_(
4594 b'the server may not have the feature '
4594 b'the server may not have the feature '
4595 b'enabled or is not allowing this '
4595 b'enabled or is not allowing this '
4596 b'client version'
4596 b'client version'
4597 ),
4597 ),
4598 )
4598 )
4599
4599
4600 elif opts[b'peer'] == b'raw':
4600 elif opts[b'peer'] == b'raw':
4601 ui.write(_(b'using raw connection to peer\n'))
4601 ui.write(_(b'using raw connection to peer\n'))
4602 peer = None
4602 peer = None
4603 elif opts[b'peer']:
4603 elif opts[b'peer']:
4604 raise error.Abort(
4604 raise error.Abort(
4605 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4605 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4606 )
4606 )
4607 else:
4607 else:
4608 peer = httppeer.makepeer(ui, path, opener=opener)
4608 peer = httppeer.makepeer(ui, path, opener=opener)
4609
4609
4610 # We /could/ populate stdin/stdout with sock.makefile()...
4610 # We /could/ populate stdin/stdout with sock.makefile()...
4611 else:
4611 else:
4612 raise error.Abort(_(b'unsupported connection configuration'))
4612 raise error.Abort(_(b'unsupported connection configuration'))
4613
4613
4614 batchedcommands = None
4614 batchedcommands = None
4615
4615
4616 # Now perform actions based on the parsed wire language instructions.
4616 # Now perform actions based on the parsed wire language instructions.
4617 for action, lines in blocks:
4617 for action, lines in blocks:
4618 if action in (b'raw', b'raw+'):
4618 if action in (b'raw', b'raw+'):
4619 if not stdin:
4619 if not stdin:
4620 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4620 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4621
4621
4622 # Concatenate the data together.
4622 # Concatenate the data together.
4623 data = b''.join(l.lstrip() for l in lines)
4623 data = b''.join(l.lstrip() for l in lines)
4624 data = stringutil.unescapestr(data)
4624 data = stringutil.unescapestr(data)
4625 stdin.write(data)
4625 stdin.write(data)
4626
4626
4627 if action == b'raw+':
4627 if action == b'raw+':
4628 stdin.flush()
4628 stdin.flush()
4629 elif action == b'flush':
4629 elif action == b'flush':
4630 if not stdin:
4630 if not stdin:
4631 raise error.Abort(_(b'cannot call flush on this peer'))
4631 raise error.Abort(_(b'cannot call flush on this peer'))
4632 stdin.flush()
4632 stdin.flush()
4633 elif action.startswith(b'command'):
4633 elif action.startswith(b'command'):
4634 if not peer:
4634 if not peer:
4635 raise error.Abort(
4635 raise error.Abort(
4636 _(
4636 _(
4637 b'cannot send commands unless peer instance '
4637 b'cannot send commands unless peer instance '
4638 b'is available'
4638 b'is available'
4639 )
4639 )
4640 )
4640 )
4641
4641
4642 command = action.split(b' ', 1)[1]
4642 command = action.split(b' ', 1)[1]
4643
4643
4644 args = {}
4644 args = {}
4645 for line in lines:
4645 for line in lines:
4646 # We need to allow empty values.
4646 # We need to allow empty values.
4647 fields = line.lstrip().split(b' ', 1)
4647 fields = line.lstrip().split(b' ', 1)
4648 if len(fields) == 1:
4648 if len(fields) == 1:
4649 key = fields[0]
4649 key = fields[0]
4650 value = b''
4650 value = b''
4651 else:
4651 else:
4652 key, value = fields
4652 key, value = fields
4653
4653
4654 if value.startswith(b'eval:'):
4654 if value.startswith(b'eval:'):
4655 value = stringutil.evalpythonliteral(value[5:])
4655 value = stringutil.evalpythonliteral(value[5:])
4656 else:
4656 else:
4657 value = stringutil.unescapestr(value)
4657 value = stringutil.unescapestr(value)
4658
4658
4659 args[key] = value
4659 args[key] = value
4660
4660
4661 if batchedcommands is not None:
4661 if batchedcommands is not None:
4662 batchedcommands.append((command, args))
4662 batchedcommands.append((command, args))
4663 continue
4663 continue
4664
4664
4665 ui.status(_(b'sending %s command\n') % command)
4665 ui.status(_(b'sending %s command\n') % command)
4666
4666
4667 if b'PUSHFILE' in args:
4667 if b'PUSHFILE' in args:
4668 with open(args[b'PUSHFILE'], 'rb') as fh:
4668 with open(args[b'PUSHFILE'], 'rb') as fh:
4669 del args[b'PUSHFILE']
4669 del args[b'PUSHFILE']
4670 res, output = peer._callpush(
4670 res, output = peer._callpush(
4671 command, fh, **pycompat.strkwargs(args)
4671 command, fh, **pycompat.strkwargs(args)
4672 )
4672 )
4673 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4673 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4674 ui.status(
4674 ui.status(
4675 _(b'remote output: %s\n') % stringutil.escapestr(output)
4675 _(b'remote output: %s\n') % stringutil.escapestr(output)
4676 )
4676 )
4677 else:
4677 else:
4678 with peer.commandexecutor() as e:
4678 with peer.commandexecutor() as e:
4679 res = e.callcommand(command, args).result()
4679 res = e.callcommand(command, args).result()
4680
4680
4681 if isinstance(res, wireprotov2peer.commandresponse):
4681 if isinstance(res, wireprotov2peer.commandresponse):
4682 val = res.objects()
4682 val = res.objects()
4683 ui.status(
4683 ui.status(
4684 _(b'response: %s\n')
4684 _(b'response: %s\n')
4685 % stringutil.pprint(val, bprefix=True, indent=2)
4685 % stringutil.pprint(val, bprefix=True, indent=2)
4686 )
4686 )
4687 else:
4687 else:
4688 ui.status(
4688 ui.status(
4689 _(b'response: %s\n')
4689 _(b'response: %s\n')
4690 % stringutil.pprint(res, bprefix=True, indent=2)
4690 % stringutil.pprint(res, bprefix=True, indent=2)
4691 )
4691 )
4692
4692
4693 elif action == b'batchbegin':
4693 elif action == b'batchbegin':
4694 if batchedcommands is not None:
4694 if batchedcommands is not None:
4695 raise error.Abort(_(b'nested batchbegin not allowed'))
4695 raise error.Abort(_(b'nested batchbegin not allowed'))
4696
4696
4697 batchedcommands = []
4697 batchedcommands = []
4698 elif action == b'batchsubmit':
4698 elif action == b'batchsubmit':
4699 # There is a batching API we could go through. But it would be
4699 # There is a batching API we could go through. But it would be
4700 # difficult to normalize requests into function calls. It is easier
4700 # difficult to normalize requests into function calls. It is easier
4701 # to bypass this layer and normalize to commands + args.
4701 # to bypass this layer and normalize to commands + args.
4702 ui.status(
4702 ui.status(
4703 _(b'sending batch with %d sub-commands\n')
4703 _(b'sending batch with %d sub-commands\n')
4704 % len(batchedcommands)
4704 % len(batchedcommands)
4705 )
4705 )
4706 assert peer is not None
4706 assert peer is not None
4707 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4707 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4708 ui.status(
4708 ui.status(
4709 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4709 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4710 )
4710 )
4711
4711
4712 batchedcommands = None
4712 batchedcommands = None
4713
4713
4714 elif action.startswith(b'httprequest '):
4714 elif action.startswith(b'httprequest '):
4715 if not opener:
4715 if not opener:
4716 raise error.Abort(
4716 raise error.Abort(
4717 _(b'cannot use httprequest without an HTTP peer')
4717 _(b'cannot use httprequest without an HTTP peer')
4718 )
4718 )
4719
4719
4720 request = action.split(b' ', 2)
4720 request = action.split(b' ', 2)
4721 if len(request) != 3:
4721 if len(request) != 3:
4722 raise error.Abort(
4722 raise error.Abort(
4723 _(
4723 _(
4724 b'invalid httprequest: expected format is '
4724 b'invalid httprequest: expected format is '
4725 b'"httprequest <method> <path>'
4725 b'"httprequest <method> <path>'
4726 )
4726 )
4727 )
4727 )
4728
4728
4729 method, httppath = request[1:]
4729 method, httppath = request[1:]
4730 headers = {}
4730 headers = {}
4731 body = None
4731 body = None
4732 frames = []
4732 frames = []
4733 for line in lines:
4733 for line in lines:
4734 line = line.lstrip()
4734 line = line.lstrip()
4735 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4735 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4736 if m:
4736 if m:
4737 # Headers need to use native strings.
4737 # Headers need to use native strings.
4738 key = pycompat.strurl(m.group(1))
4738 key = pycompat.strurl(m.group(1))
4739 value = pycompat.strurl(m.group(2))
4739 value = pycompat.strurl(m.group(2))
4740 headers[key] = value
4740 headers[key] = value
4741 continue
4741 continue
4742
4742
4743 if line.startswith(b'BODYFILE '):
4743 if line.startswith(b'BODYFILE '):
4744 with open(line.split(b' ', 1), b'rb') as fh:
4744 with open(line.split(b' ', 1), b'rb') as fh:
4745 body = fh.read()
4745 body = fh.read()
4746 elif line.startswith(b'frame '):
4746 elif line.startswith(b'frame '):
4747 frame = wireprotoframing.makeframefromhumanstring(
4747 frame = wireprotoframing.makeframefromhumanstring(
4748 line[len(b'frame ') :]
4748 line[len(b'frame ') :]
4749 )
4749 )
4750
4750
4751 frames.append(frame)
4751 frames.append(frame)
4752 else:
4752 else:
4753 raise error.Abort(
4753 raise error.Abort(
4754 _(b'unknown argument to httprequest: %s') % line
4754 _(b'unknown argument to httprequest: %s') % line
4755 )
4755 )
4756
4756
4757 url = path + httppath
4757 url = path + httppath
4758
4758
4759 if frames:
4759 if frames:
4760 body = b''.join(bytes(f) for f in frames)
4760 body = b''.join(bytes(f) for f in frames)
4761
4761
4762 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4762 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4763
4763
4764 # urllib.Request insists on using has_data() as a proxy for
4764 # urllib.Request insists on using has_data() as a proxy for
4765 # determining the request method. Override that to use our
4765 # determining the request method. Override that to use our
4766 # explicitly requested method.
4766 # explicitly requested method.
4767 req.get_method = lambda: pycompat.sysstr(method)
4767 req.get_method = lambda: pycompat.sysstr(method)
4768
4768
4769 try:
4769 try:
4770 res = opener.open(req)
4770 res = opener.open(req)
4771 body = res.read()
4771 body = res.read()
4772 except util.urlerr.urlerror as e:
4772 except util.urlerr.urlerror as e:
4773 # read() method must be called, but only exists in Python 2
4773 # read() method must be called, but only exists in Python 2
4774 getattr(e, 'read', lambda: None)()
4774 getattr(e, 'read', lambda: None)()
4775 continue
4775 continue
4776
4776
4777 ct = res.headers.get('Content-Type')
4777 ct = res.headers.get('Content-Type')
4778 if ct == 'application/mercurial-cbor':
4778 if ct == 'application/mercurial-cbor':
4779 ui.write(
4779 ui.write(
4780 _(b'cbor> %s\n')
4780 _(b'cbor> %s\n')
4781 % stringutil.pprint(
4781 % stringutil.pprint(
4782 cborutil.decodeall(body), bprefix=True, indent=2
4782 cborutil.decodeall(body), bprefix=True, indent=2
4783 )
4783 )
4784 )
4784 )
4785
4785
4786 elif action == b'close':
4786 elif action == b'close':
4787 assert peer is not None
4787 assert peer is not None
4788 peer.close()
4788 peer.close()
4789 elif action == b'readavailable':
4789 elif action == b'readavailable':
4790 if not stdout or not stderr:
4790 if not stdout or not stderr:
4791 raise error.Abort(
4791 raise error.Abort(
4792 _(b'readavailable not available on this peer')
4792 _(b'readavailable not available on this peer')
4793 )
4793 )
4794
4794
4795 stdin.close()
4795 stdin.close()
4796 stdout.read()
4796 stdout.read()
4797 stderr.read()
4797 stderr.read()
4798
4798
4799 elif action == b'readline':
4799 elif action == b'readline':
4800 if not stdout:
4800 if not stdout:
4801 raise error.Abort(_(b'readline not available on this peer'))
4801 raise error.Abort(_(b'readline not available on this peer'))
4802 stdout.readline()
4802 stdout.readline()
4803 elif action == b'ereadline':
4803 elif action == b'ereadline':
4804 if not stderr:
4804 if not stderr:
4805 raise error.Abort(_(b'ereadline not available on this peer'))
4805 raise error.Abort(_(b'ereadline not available on this peer'))
4806 stderr.readline()
4806 stderr.readline()
4807 elif action.startswith(b'read '):
4807 elif action.startswith(b'read '):
4808 count = int(action.split(b' ', 1)[1])
4808 count = int(action.split(b' ', 1)[1])
4809 if not stdout:
4809 if not stdout:
4810 raise error.Abort(_(b'read not available on this peer'))
4810 raise error.Abort(_(b'read not available on this peer'))
4811 stdout.read(count)
4811 stdout.read(count)
4812 elif action.startswith(b'eread '):
4812 elif action.startswith(b'eread '):
4813 count = int(action.split(b' ', 1)[1])
4813 count = int(action.split(b' ', 1)[1])
4814 if not stderr:
4814 if not stderr:
4815 raise error.Abort(_(b'eread not available on this peer'))
4815 raise error.Abort(_(b'eread not available on this peer'))
4816 stderr.read(count)
4816 stderr.read(count)
4817 else:
4817 else:
4818 raise error.Abort(_(b'unknown action: %s') % action)
4818 raise error.Abort(_(b'unknown action: %s') % action)
4819
4819
4820 if batchedcommands is not None:
4820 if batchedcommands is not None:
4821 raise error.Abort(_(b'unclosed "batchbegin" request'))
4821 raise error.Abort(_(b'unclosed "batchbegin" request'))
4822
4822
4823 if peer:
4823 if peer:
4824 peer.close()
4824 peer.close()
4825
4825
4826 if proc:
4826 if proc:
4827 proc.kill()
4827 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now