##// END OF EJS Templates
debugindexdot: migrate `opts` to native kwargs
Matt Harbison -
r51847:1cfc49e3 default
parent child Browse files
Show More
@@ -1,4852 +1,4853 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 open,
36 open,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 bundlerepo,
40 bundlerepo,
41 changegroup,
41 changegroup,
42 cmdutil,
42 cmdutil,
43 color,
43 color,
44 context,
44 context,
45 copies,
45 copies,
46 dagparser,
46 dagparser,
47 dirstateutils,
47 dirstateutils,
48 encoding,
48 encoding,
49 error,
49 error,
50 exchange,
50 exchange,
51 extensions,
51 extensions,
52 filelog,
52 filelog,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 manifest,
61 manifest,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 verify,
91 verify,
92 vfs as vfsmod,
92 vfs as vfsmod,
93 wireprotoframing,
93 wireprotoframing,
94 wireprotoserver,
94 wireprotoserver,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .stabletailgraph import stabletailsort
97 from .stabletailgraph import stabletailsort
98 from .utils import (
98 from .utils import (
99 cborutil,
99 cborutil,
100 compression,
100 compression,
101 dateutil,
101 dateutil,
102 procutil,
102 procutil,
103 stringutil,
103 stringutil,
104 urlutil,
104 urlutil,
105 )
105 )
106
106
107 from .revlogutils import (
107 from .revlogutils import (
108 constants as revlog_constants,
108 constants as revlog_constants,
109 debug as revlog_debug,
109 debug as revlog_debug,
110 deltas as deltautil,
110 deltas as deltautil,
111 nodemap,
111 nodemap,
112 rewrite,
112 rewrite,
113 sidedata,
113 sidedata,
114 )
114 )
115
115
116 release = lockmod.release
116 release = lockmod.release
117
117
118 table = {}
118 table = {}
119 table.update(strip.command._table)
119 table.update(strip.command._table)
120 command = registrar.command(table)
120 command = registrar.command(table)
121
121
122
122
123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
124 def debugancestor(ui, repo, *args):
124 def debugancestor(ui, repo, *args):
125 """find the ancestor revision of two revisions in a given index"""
125 """find the ancestor revision of two revisions in a given index"""
126 if len(args) == 3:
126 if len(args) == 3:
127 index, rev1, rev2 = args
127 index, rev1, rev2 = args
128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
129 lookup = r.lookup
129 lookup = r.lookup
130 elif len(args) == 2:
130 elif len(args) == 2:
131 if not repo:
131 if not repo:
132 raise error.Abort(
132 raise error.Abort(
133 _(b'there is no Mercurial repository here (.hg not found)')
133 _(b'there is no Mercurial repository here (.hg not found)')
134 )
134 )
135 rev1, rev2 = args
135 rev1, rev2 = args
136 r = repo.changelog
136 r = repo.changelog
137 lookup = repo.lookup
137 lookup = repo.lookup
138 else:
138 else:
139 raise error.Abort(_(b'either two or three arguments required'))
139 raise error.Abort(_(b'either two or three arguments required'))
140 a = r.ancestor(lookup(rev1), lookup(rev2))
140 a = r.ancestor(lookup(rev1), lookup(rev2))
141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
142
142
143
143
144 @command(b'debugantivirusrunning', [])
144 @command(b'debugantivirusrunning', [])
145 def debugantivirusrunning(ui, repo):
145 def debugantivirusrunning(ui, repo):
146 """attempt to trigger an antivirus scanner to see if one is active"""
146 """attempt to trigger an antivirus scanner to see if one is active"""
147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
148 f.write(
148 f.write(
149 util.b85decode(
149 util.b85decode(
150 # This is a base85-armored version of the EICAR test file. See
150 # This is a base85-armored version of the EICAR test file. See
151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
154 )
154 )
155 )
155 )
156 # Give an AV engine time to scan the file.
156 # Give an AV engine time to scan the file.
157 time.sleep(2)
157 time.sleep(2)
158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
159
159
160
160
161 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 @command(b'debugapplystreamclonebundle', [], b'FILE')
162 def debugapplystreamclonebundle(ui, repo, fname):
162 def debugapplystreamclonebundle(ui, repo, fname):
163 """apply a stream clone bundle file"""
163 """apply a stream clone bundle file"""
164 f = hg.openpath(ui, fname)
164 f = hg.openpath(ui, fname)
165 gen = exchange.readbundle(ui, f, fname)
165 gen = exchange.readbundle(ui, f, fname)
166 gen.apply(repo)
166 gen.apply(repo)
167
167
168
168
169 @command(
169 @command(
170 b'debugbuilddag',
170 b'debugbuilddag',
171 [
171 [
172 (
172 (
173 b'm',
173 b'm',
174 b'mergeable-file',
174 b'mergeable-file',
175 None,
175 None,
176 _(b'add single file mergeable changes'),
176 _(b'add single file mergeable changes'),
177 ),
177 ),
178 (
178 (
179 b'o',
179 b'o',
180 b'overwritten-file',
180 b'overwritten-file',
181 None,
181 None,
182 _(b'add single file all revs overwrite'),
182 _(b'add single file all revs overwrite'),
183 ),
183 ),
184 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (b'n', b'new-file', None, _(b'add new file at each rev')),
185 (
185 (
186 b'',
186 b'',
187 b'from-existing',
187 b'from-existing',
188 None,
188 None,
189 _(b'continue from a non-empty repository'),
189 _(b'continue from a non-empty repository'),
190 ),
190 ),
191 ],
191 ],
192 _(b'[OPTION]... [TEXT]'),
192 _(b'[OPTION]... [TEXT]'),
193 )
193 )
194 def debugbuilddag(
194 def debugbuilddag(
195 ui,
195 ui,
196 repo,
196 repo,
197 text=None,
197 text=None,
198 mergeable_file=False,
198 mergeable_file=False,
199 overwritten_file=False,
199 overwritten_file=False,
200 new_file=False,
200 new_file=False,
201 from_existing=False,
201 from_existing=False,
202 ):
202 ):
203 """builds a repo with a given DAG from scratch in the current empty repo
203 """builds a repo with a given DAG from scratch in the current empty repo
204
204
205 The description of the DAG is read from stdin if not given on the
205 The description of the DAG is read from stdin if not given on the
206 command line.
206 command line.
207
207
208 Elements:
208 Elements:
209
209
210 - "+n" is a linear run of n nodes based on the current default parent
210 - "+n" is a linear run of n nodes based on the current default parent
211 - "." is a single node based on the current default parent
211 - "." is a single node based on the current default parent
212 - "$" resets the default parent to null (implied at the start);
212 - "$" resets the default parent to null (implied at the start);
213 otherwise the default parent is always the last node created
213 otherwise the default parent is always the last node created
214 - "<p" sets the default parent to the backref p
214 - "<p" sets the default parent to the backref p
215 - "*p" is a fork at parent p, which is a backref
215 - "*p" is a fork at parent p, which is a backref
216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
217 - "/p2" is a merge of the preceding node and p2
217 - "/p2" is a merge of the preceding node and p2
218 - ":tag" defines a local tag for the preceding node
218 - ":tag" defines a local tag for the preceding node
219 - "@branch" sets the named branch for subsequent nodes
219 - "@branch" sets the named branch for subsequent nodes
220 - "#...\\n" is a comment up to the end of the line
220 - "#...\\n" is a comment up to the end of the line
221
221
222 Whitespace between the above elements is ignored.
222 Whitespace between the above elements is ignored.
223
223
224 A backref is either
224 A backref is either
225
225
226 - a number n, which references the node curr-n, where curr is the current
226 - a number n, which references the node curr-n, where curr is the current
227 node, or
227 node, or
228 - the name of a local tag you placed earlier using ":tag", or
228 - the name of a local tag you placed earlier using ":tag", or
229 - empty to denote the default parent.
229 - empty to denote the default parent.
230
230
231 All string valued-elements are either strictly alphanumeric, or must
231 All string valued-elements are either strictly alphanumeric, or must
232 be enclosed in double quotes ("..."), with "\\" as escape character.
232 be enclosed in double quotes ("..."), with "\\" as escape character.
233 """
233 """
234
234
235 if text is None:
235 if text is None:
236 ui.status(_(b"reading DAG from stdin\n"))
236 ui.status(_(b"reading DAG from stdin\n"))
237 text = ui.fin.read()
237 text = ui.fin.read()
238
238
239 cl = repo.changelog
239 cl = repo.changelog
240 if len(cl) > 0 and not from_existing:
240 if len(cl) > 0 and not from_existing:
241 raise error.Abort(_(b'repository is not empty'))
241 raise error.Abort(_(b'repository is not empty'))
242
242
243 # determine number of revs in DAG
243 # determine number of revs in DAG
244 total = 0
244 total = 0
245 for type, data in dagparser.parsedag(text):
245 for type, data in dagparser.parsedag(text):
246 if type == b'n':
246 if type == b'n':
247 total += 1
247 total += 1
248
248
249 if mergeable_file:
249 if mergeable_file:
250 linesperrev = 2
250 linesperrev = 2
251 # make a file with k lines per rev
251 # make a file with k lines per rev
252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
253 initialmergedlines.append(b"")
253 initialmergedlines.append(b"")
254
254
255 tags = []
255 tags = []
256 progress = ui.makeprogress(
256 progress = ui.makeprogress(
257 _(b'building'), unit=_(b'revisions'), total=total
257 _(b'building'), unit=_(b'revisions'), total=total
258 )
258 )
259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 at = -1
260 at = -1
261 atbranch = b'default'
261 atbranch = b'default'
262 nodeids = []
262 nodeids = []
263 id = 0
263 id = 0
264 progress.update(id)
264 progress.update(id)
265 for type, data in dagparser.parsedag(text):
265 for type, data in dagparser.parsedag(text):
266 if type == b'n':
266 if type == b'n':
267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 id, ps = data
268 id, ps = data
269
269
270 files = []
270 files = []
271 filecontent = {}
271 filecontent = {}
272
272
273 p2 = None
273 p2 = None
274 if mergeable_file:
274 if mergeable_file:
275 fn = b"mf"
275 fn = b"mf"
276 p1 = repo[ps[0]]
276 p1 = repo[ps[0]]
277 if len(ps) > 1:
277 if len(ps) > 1:
278 p2 = repo[ps[1]]
278 p2 = repo[ps[1]]
279 pa = p1.ancestor(p2)
279 pa = p1.ancestor(p2)
280 base, local, other = [
280 base, local, other = [
281 x[fn].data() for x in (pa, p1, p2)
281 x[fn].data() for x in (pa, p1, p2)
282 ]
282 ]
283 m3 = simplemerge.Merge3Text(base, local, other)
283 m3 = simplemerge.Merge3Text(base, local, other)
284 ml = [
284 ml = [
285 l.strip()
285 l.strip()
286 for l in simplemerge.render_minimized(m3)[0]
286 for l in simplemerge.render_minimized(m3)[0]
287 ]
287 ]
288 ml.append(b"")
288 ml.append(b"")
289 elif at > 0:
289 elif at > 0:
290 ml = p1[fn].data().split(b"\n")
290 ml = p1[fn].data().split(b"\n")
291 else:
291 else:
292 ml = initialmergedlines
292 ml = initialmergedlines
293 ml[id * linesperrev] += b" r%i" % id
293 ml[id * linesperrev] += b" r%i" % id
294 mergedtext = b"\n".join(ml)
294 mergedtext = b"\n".join(ml)
295 files.append(fn)
295 files.append(fn)
296 filecontent[fn] = mergedtext
296 filecontent[fn] = mergedtext
297
297
298 if overwritten_file:
298 if overwritten_file:
299 fn = b"of"
299 fn = b"of"
300 files.append(fn)
300 files.append(fn)
301 filecontent[fn] = b"r%i\n" % id
301 filecontent[fn] = b"r%i\n" % id
302
302
303 if new_file:
303 if new_file:
304 fn = b"nf%i" % id
304 fn = b"nf%i" % id
305 files.append(fn)
305 files.append(fn)
306 filecontent[fn] = b"r%i\n" % id
306 filecontent[fn] = b"r%i\n" % id
307 if len(ps) > 1:
307 if len(ps) > 1:
308 if not p2:
308 if not p2:
309 p2 = repo[ps[1]]
309 p2 = repo[ps[1]]
310 for fn in p2:
310 for fn in p2:
311 if fn.startswith(b"nf"):
311 if fn.startswith(b"nf"):
312 files.append(fn)
312 files.append(fn)
313 filecontent[fn] = p2[fn].data()
313 filecontent[fn] = p2[fn].data()
314
314
315 def fctxfn(repo, cx, path):
315 def fctxfn(repo, cx, path):
316 if path in filecontent:
316 if path in filecontent:
317 return context.memfilectx(
317 return context.memfilectx(
318 repo, cx, path, filecontent[path]
318 repo, cx, path, filecontent[path]
319 )
319 )
320 return None
320 return None
321
321
322 if len(ps) == 0 or ps[0] < 0:
322 if len(ps) == 0 or ps[0] < 0:
323 pars = [None, None]
323 pars = [None, None]
324 elif len(ps) == 1:
324 elif len(ps) == 1:
325 pars = [nodeids[ps[0]], None]
325 pars = [nodeids[ps[0]], None]
326 else:
326 else:
327 pars = [nodeids[p] for p in ps]
327 pars = [nodeids[p] for p in ps]
328 cx = context.memctx(
328 cx = context.memctx(
329 repo,
329 repo,
330 pars,
330 pars,
331 b"r%i" % id,
331 b"r%i" % id,
332 files,
332 files,
333 fctxfn,
333 fctxfn,
334 date=(id, 0),
334 date=(id, 0),
335 user=b"debugbuilddag",
335 user=b"debugbuilddag",
336 extra={b'branch': atbranch},
336 extra={b'branch': atbranch},
337 )
337 )
338 nodeid = repo.commitctx(cx)
338 nodeid = repo.commitctx(cx)
339 nodeids.append(nodeid)
339 nodeids.append(nodeid)
340 at = id
340 at = id
341 elif type == b'l':
341 elif type == b'l':
342 id, name = data
342 id, name = data
343 ui.note((b'tag %s\n' % name))
343 ui.note((b'tag %s\n' % name))
344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 elif type == b'a':
345 elif type == b'a':
346 ui.note((b'branch %s\n' % data))
346 ui.note((b'branch %s\n' % data))
347 atbranch = data
347 atbranch = data
348 progress.update(id)
348 progress.update(id)
349
349
350 if tags:
350 if tags:
351 repo.vfs.write(b"localtags", b"".join(tags))
351 repo.vfs.write(b"localtags", b"".join(tags))
352
352
353
353
354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 indent_string = b' ' * indent
355 indent_string = b' ' * indent
356 if all:
356 if all:
357 ui.writenoi18n(
357 ui.writenoi18n(
358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 % indent_string
359 % indent_string
360 )
360 )
361
361
362 def showchunks(named):
362 def showchunks(named):
363 ui.write(b"\n%s%s\n" % (indent_string, named))
363 ui.write(b"\n%s%s\n" % (indent_string, named))
364 for deltadata in gen.deltaiter():
364 for deltadata in gen.deltaiter():
365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 ui.write(
366 ui.write(
367 b"%s%s %s %s %s %s %d\n"
367 b"%s%s %s %s %s %s %d\n"
368 % (
368 % (
369 indent_string,
369 indent_string,
370 hex(node),
370 hex(node),
371 hex(p1),
371 hex(p1),
372 hex(p2),
372 hex(p2),
373 hex(cs),
373 hex(cs),
374 hex(deltabase),
374 hex(deltabase),
375 len(delta),
375 len(delta),
376 )
376 )
377 )
377 )
378
378
379 gen.changelogheader()
379 gen.changelogheader()
380 showchunks(b"changelog")
380 showchunks(b"changelog")
381 gen.manifestheader()
381 gen.manifestheader()
382 showchunks(b"manifest")
382 showchunks(b"manifest")
383 for chunkdata in iter(gen.filelogheader, {}):
383 for chunkdata in iter(gen.filelogheader, {}):
384 fname = chunkdata[b'filename']
384 fname = chunkdata[b'filename']
385 showchunks(fname)
385 showchunks(fname)
386 else:
386 else:
387 if isinstance(gen, bundle2.unbundle20):
387 if isinstance(gen, bundle2.unbundle20):
388 raise error.Abort(_(b'use debugbundle2 for this file'))
388 raise error.Abort(_(b'use debugbundle2 for this file'))
389 gen.changelogheader()
389 gen.changelogheader()
390 for deltadata in gen.deltaiter():
390 for deltadata in gen.deltaiter():
391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393
393
394
394
395 def _debugobsmarkers(ui, part, indent=0, **opts):
395 def _debugobsmarkers(ui, part, indent=0, **opts):
396 """display version and markers contained in 'data'"""
396 """display version and markers contained in 'data'"""
397 data = part.read()
397 data = part.read()
398 indent_string = b' ' * indent
398 indent_string = b' ' * indent
399 try:
399 try:
400 version, markers = obsolete._readmarkers(data)
400 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
401 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
403 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
404 ui.write(msg)
405 else:
405 else:
406 msg = b"%sversion: %d (%d bytes)\n"
406 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
407 msg %= indent_string, version, len(data)
408 ui.write(msg)
408 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
409 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
410 for rawmarker in sorted(markers):
410 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
411 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
412 fm.startitem()
413 fm.plain(indent_string)
413 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
414 cmdutil.showmarker(fm, m)
415 fm.end()
415 fm.end()
416
416
417
417
418 def _debugphaseheads(ui, data, indent=0):
418 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
419 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
420 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
421 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
422 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
423 for head in headsbyphase[phase]:
424 ui.write(indent_string)
424 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
426
427
427
428 def _quasirepr(thing):
428 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
430 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
432 )
433 return pycompat.bytestr(repr(thing))
433 return pycompat.bytestr(repr(thing))
434
434
435
435
436 def _debugbundle2(ui, gen, all=None, **opts):
436 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
437 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
438 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
439 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
441 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
442 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
443 if parttypes and part.type not in parttypes:
444 continue
444 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
445 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
447 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
448 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
449 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
450 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
452 if part.type == b'obsmarkers':
453 if not ui.quiet:
453 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
454 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
455 if part.type == b'phase-heads':
456 if not ui.quiet:
456 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
457 _debugphaseheads(ui, part, indent=4)
458
458
459
459
460 @command(
460 @command(
461 b'debugbundle',
461 b'debugbundle',
462 [
462 [
463 (b'a', b'all', None, _(b'show all details')),
463 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
466 ],
467 _(b'FILE'),
467 _(b'FILE'),
468 norepo=True,
468 norepo=True,
469 )
469 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
471 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
472 with hg.openpath(ui, bundlepath) as f:
473 if spec:
473 if spec:
474 spec = exchange.getbundlespec(ui, f)
474 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
475 ui.write(b'%s\n' % spec)
476 return
476 return
477
477
478 gen = exchange.readbundle(ui, f, bundlepath)
478 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
479 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
480 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
482
482
483
483
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
485 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
486 """lists the capabilities of a remote peer"""
487 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
487 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
488 try:
488 try:
489 caps = peer.capabilities()
489 caps = peer.capabilities()
490 ui.writenoi18n(b'Main capabilities:\n')
490 ui.writenoi18n(b'Main capabilities:\n')
491 for c in sorted(caps):
491 for c in sorted(caps):
492 ui.write(b' %s\n' % c)
492 ui.write(b' %s\n' % c)
493 b2caps = bundle2.bundle2caps(peer)
493 b2caps = bundle2.bundle2caps(peer)
494 if b2caps:
494 if b2caps:
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 for key, values in sorted(b2caps.items()):
496 for key, values in sorted(b2caps.items()):
497 ui.write(b' %s\n' % key)
497 ui.write(b' %s\n' % key)
498 for v in values:
498 for v in values:
499 ui.write(b' %s\n' % v)
499 ui.write(b' %s\n' % v)
500 finally:
500 finally:
501 peer.close()
501 peer.close()
502
502
503
503
504 @command(
504 @command(
505 b'debugchangedfiles',
505 b'debugchangedfiles',
506 [
506 [
507 (
507 (
508 b'',
508 b'',
509 b'compute',
509 b'compute',
510 False,
510 False,
511 b"compute information instead of reading it from storage",
511 b"compute information instead of reading it from storage",
512 ),
512 ),
513 ],
513 ],
514 b'REV',
514 b'REV',
515 )
515 )
516 def debugchangedfiles(ui, repo, rev, **opts):
516 def debugchangedfiles(ui, repo, rev, **opts):
517 """list the stored files changes for a revision"""
517 """list the stored files changes for a revision"""
518 ctx = logcmdutil.revsingle(repo, rev, None)
518 ctx = logcmdutil.revsingle(repo, rev, None)
519 files = None
519 files = None
520
520
521 if opts['compute']:
521 if opts['compute']:
522 files = metadata.compute_all_files_changes(ctx)
522 files = metadata.compute_all_files_changes(ctx)
523 else:
523 else:
524 sd = repo.changelog.sidedata(ctx.rev())
524 sd = repo.changelog.sidedata(ctx.rev())
525 files_block = sd.get(sidedata.SD_FILES)
525 files_block = sd.get(sidedata.SD_FILES)
526 if files_block is not None:
526 if files_block is not None:
527 files = metadata.decode_files_sidedata(sd)
527 files = metadata.decode_files_sidedata(sd)
528 if files is not None:
528 if files is not None:
529 for f in sorted(files.touched):
529 for f in sorted(files.touched):
530 if f in files.added:
530 if f in files.added:
531 action = b"added"
531 action = b"added"
532 elif f in files.removed:
532 elif f in files.removed:
533 action = b"removed"
533 action = b"removed"
534 elif f in files.merged:
534 elif f in files.merged:
535 action = b"merged"
535 action = b"merged"
536 elif f in files.salvaged:
536 elif f in files.salvaged:
537 action = b"salvaged"
537 action = b"salvaged"
538 else:
538 else:
539 action = b"touched"
539 action = b"touched"
540
540
541 copy_parent = b""
541 copy_parent = b""
542 copy_source = b""
542 copy_source = b""
543 if f in files.copied_from_p1:
543 if f in files.copied_from_p1:
544 copy_parent = b"p1"
544 copy_parent = b"p1"
545 copy_source = files.copied_from_p1[f]
545 copy_source = files.copied_from_p1[f]
546 elif f in files.copied_from_p2:
546 elif f in files.copied_from_p2:
547 copy_parent = b"p2"
547 copy_parent = b"p2"
548 copy_source = files.copied_from_p2[f]
548 copy_source = files.copied_from_p2[f]
549
549
550 data = (action, copy_parent, f, copy_source)
550 data = (action, copy_parent, f, copy_source)
551 template = b"%-8s %2s: %s, %s;\n"
551 template = b"%-8s %2s: %s, %s;\n"
552 ui.write(template % data)
552 ui.write(template % data)
553
553
554
554
555 @command(b'debugcheckstate', [], b'')
555 @command(b'debugcheckstate', [], b'')
556 def debugcheckstate(ui, repo):
556 def debugcheckstate(ui, repo):
557 """validate the correctness of the current dirstate"""
557 """validate the correctness of the current dirstate"""
558 errors = verify.verifier(repo)._verify_dirstate()
558 errors = verify.verifier(repo)._verify_dirstate()
559 if errors:
559 if errors:
560 errstr = _(b"dirstate inconsistent with current parent's manifest")
560 errstr = _(b"dirstate inconsistent with current parent's manifest")
561 raise error.Abort(errstr)
561 raise error.Abort(errstr)
562
562
563
563
564 @command(
564 @command(
565 b'debugcolor',
565 b'debugcolor',
566 [(b'', b'style', None, _(b'show all configured styles'))],
566 [(b'', b'style', None, _(b'show all configured styles'))],
567 b'hg debugcolor',
567 b'hg debugcolor',
568 )
568 )
569 def debugcolor(ui, repo, **opts):
569 def debugcolor(ui, repo, **opts):
570 """show available color, effects or style"""
570 """show available color, effects or style"""
571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
572 if opts.get('style'):
572 if opts.get('style'):
573 return _debugdisplaystyle(ui)
573 return _debugdisplaystyle(ui)
574 else:
574 else:
575 return _debugdisplaycolor(ui)
575 return _debugdisplaycolor(ui)
576
576
577
577
578 def _debugdisplaycolor(ui):
578 def _debugdisplaycolor(ui):
579 ui = ui.copy()
579 ui = ui.copy()
580 ui._styles.clear()
580 ui._styles.clear()
581 for effect in color._activeeffects(ui).keys():
581 for effect in color._activeeffects(ui).keys():
582 ui._styles[effect] = effect
582 ui._styles[effect] = effect
583 if ui._terminfoparams:
583 if ui._terminfoparams:
584 for k, v in ui.configitems(b'color'):
584 for k, v in ui.configitems(b'color'):
585 if k.startswith(b'color.'):
585 if k.startswith(b'color.'):
586 ui._styles[k] = k[6:]
586 ui._styles[k] = k[6:]
587 elif k.startswith(b'terminfo.'):
587 elif k.startswith(b'terminfo.'):
588 ui._styles[k] = k[9:]
588 ui._styles[k] = k[9:]
589 ui.write(_(b'available colors:\n'))
589 ui.write(_(b'available colors:\n'))
590 # sort label with a '_' after the other to group '_background' entry.
590 # sort label with a '_' after the other to group '_background' entry.
591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
592 for colorname, label in items:
592 for colorname, label in items:
593 ui.write(b'%s\n' % colorname, label=label)
593 ui.write(b'%s\n' % colorname, label=label)
594
594
595
595
596 def _debugdisplaystyle(ui):
596 def _debugdisplaystyle(ui):
597 ui.write(_(b'available style:\n'))
597 ui.write(_(b'available style:\n'))
598 if not ui._styles:
598 if not ui._styles:
599 return
599 return
600 width = max(len(s) for s in ui._styles)
600 width = max(len(s) for s in ui._styles)
601 for label, effects in sorted(ui._styles.items()):
601 for label, effects in sorted(ui._styles.items()):
602 ui.write(b'%s' % label, label=label)
602 ui.write(b'%s' % label, label=label)
603 if effects:
603 if effects:
604 # 50
604 # 50
605 ui.write(b': ')
605 ui.write(b': ')
606 ui.write(b' ' * (max(0, width - len(label))))
606 ui.write(b' ' * (max(0, width - len(label))))
607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
608 ui.write(b'\n')
608 ui.write(b'\n')
609
609
610
610
611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
612 def debugcreatestreamclonebundle(ui, repo, fname):
612 def debugcreatestreamclonebundle(ui, repo, fname):
613 """create a stream clone bundle file
613 """create a stream clone bundle file
614
614
615 Stream bundles are special bundles that are essentially archives of
615 Stream bundles are special bundles that are essentially archives of
616 revlog files. They are commonly used for cloning very quickly.
616 revlog files. They are commonly used for cloning very quickly.
617
617
618 This command creates a "version 1" stream clone, which is deprecated in
618 This command creates a "version 1" stream clone, which is deprecated in
619 favor of newer versions of the stream protocol. Bundles using such newer
619 favor of newer versions of the stream protocol. Bundles using such newer
620 versions can be generated using the `hg bundle` command.
620 versions can be generated using the `hg bundle` command.
621 """
621 """
622 # TODO we may want to turn this into an abort when this functionality
622 # TODO we may want to turn this into an abort when this functionality
623 # is moved into `hg bundle`.
623 # is moved into `hg bundle`.
624 if phases.hassecret(repo):
624 if phases.hassecret(repo):
625 ui.warn(
625 ui.warn(
626 _(
626 _(
627 b'(warning: stream clone bundle will contain secret '
627 b'(warning: stream clone bundle will contain secret '
628 b'revisions)\n'
628 b'revisions)\n'
629 )
629 )
630 )
630 )
631
631
632 requirements, gen = streamclone.generatebundlev1(repo)
632 requirements, gen = streamclone.generatebundlev1(repo)
633 changegroup.writechunks(ui, gen, fname)
633 changegroup.writechunks(ui, gen, fname)
634
634
635 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
635 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
636
636
637
637
638 @command(
638 @command(
639 b'debugdag',
639 b'debugdag',
640 [
640 [
641 (b't', b'tags', None, _(b'use tags as labels')),
641 (b't', b'tags', None, _(b'use tags as labels')),
642 (b'b', b'branches', None, _(b'annotate with branch names')),
642 (b'b', b'branches', None, _(b'annotate with branch names')),
643 (b'', b'dots', None, _(b'use dots for runs')),
643 (b'', b'dots', None, _(b'use dots for runs')),
644 (b's', b'spaces', None, _(b'separate elements by spaces')),
644 (b's', b'spaces', None, _(b'separate elements by spaces')),
645 ],
645 ],
646 _(b'[OPTION]... [FILE [REV]...]'),
646 _(b'[OPTION]... [FILE [REV]...]'),
647 optionalrepo=True,
647 optionalrepo=True,
648 )
648 )
649 def debugdag(ui, repo, file_=None, *revs, **opts):
649 def debugdag(ui, repo, file_=None, *revs, **opts):
650 """format the changelog or an index DAG as a concise textual description
650 """format the changelog or an index DAG as a concise textual description
651
651
652 If you pass a revlog index, the revlog's DAG is emitted. If you list
652 If you pass a revlog index, the revlog's DAG is emitted. If you list
653 revision numbers, they get labeled in the output as rN.
653 revision numbers, they get labeled in the output as rN.
654
654
655 Otherwise, the changelog DAG of the current repo is emitted.
655 Otherwise, the changelog DAG of the current repo is emitted.
656 """
656 """
657 spaces = opts.get('spaces')
657 spaces = opts.get('spaces')
658 dots = opts.get('dots')
658 dots = opts.get('dots')
659 if file_:
659 if file_:
660 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
660 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
661 revs = {int(r) for r in revs}
661 revs = {int(r) for r in revs}
662
662
663 def events():
663 def events():
664 for r in rlog:
664 for r in rlog:
665 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
665 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
666 if r in revs:
666 if r in revs:
667 yield b'l', (r, b"r%i" % r)
667 yield b'l', (r, b"r%i" % r)
668
668
669 elif repo:
669 elif repo:
670 cl = repo.changelog
670 cl = repo.changelog
671 tags = opts.get('tags')
671 tags = opts.get('tags')
672 branches = opts.get('branches')
672 branches = opts.get('branches')
673 if tags:
673 if tags:
674 labels = {}
674 labels = {}
675 for l, n in repo.tags().items():
675 for l, n in repo.tags().items():
676 labels.setdefault(cl.rev(n), []).append(l)
676 labels.setdefault(cl.rev(n), []).append(l)
677
677
678 def events():
678 def events():
679 b = b"default"
679 b = b"default"
680 for r in cl:
680 for r in cl:
681 if branches:
681 if branches:
682 newb = cl.read(cl.node(r))[5][b'branch']
682 newb = cl.read(cl.node(r))[5][b'branch']
683 if newb != b:
683 if newb != b:
684 yield b'a', newb
684 yield b'a', newb
685 b = newb
685 b = newb
686 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
686 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
687 if tags:
687 if tags:
688 ls = labels.get(r)
688 ls = labels.get(r)
689 if ls:
689 if ls:
690 for l in ls:
690 for l in ls:
691 yield b'l', (r, l)
691 yield b'l', (r, l)
692
692
693 else:
693 else:
694 raise error.Abort(_(b'need repo for changelog dag'))
694 raise error.Abort(_(b'need repo for changelog dag'))
695
695
696 for line in dagparser.dagtextlines(
696 for line in dagparser.dagtextlines(
697 events(),
697 events(),
698 addspaces=spaces,
698 addspaces=spaces,
699 wraplabels=True,
699 wraplabels=True,
700 wrapannotations=True,
700 wrapannotations=True,
701 wrapnonlinear=dots,
701 wrapnonlinear=dots,
702 usedots=dots,
702 usedots=dots,
703 maxlinewidth=70,
703 maxlinewidth=70,
704 ):
704 ):
705 ui.write(line)
705 ui.write(line)
706 ui.write(b"\n")
706 ui.write(b"\n")
707
707
708
708
709 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
709 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
710 def debugdata(ui, repo, file_, rev=None, **opts):
710 def debugdata(ui, repo, file_, rev=None, **opts):
711 """dump the contents of a data file revision"""
711 """dump the contents of a data file revision"""
712 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
712 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
713 if rev is not None:
713 if rev is not None:
714 raise error.InputError(
714 raise error.InputError(
715 _(b'cannot specify a revision with other arguments')
715 _(b'cannot specify a revision with other arguments')
716 )
716 )
717 file_, rev = None, file_
717 file_, rev = None, file_
718 elif rev is None:
718 elif rev is None:
719 raise error.InputError(_(b'please specify a revision'))
719 raise error.InputError(_(b'please specify a revision'))
720 r = cmdutil.openstorage(
720 r = cmdutil.openstorage(
721 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
721 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
722 )
722 )
723 try:
723 try:
724 ui.write(r.rawdata(r.lookup(rev)))
724 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
725 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
727
728
728
729 @command(
729 @command(
730 b'debugdate',
730 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
732 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
733 norepo=True,
734 optionalrepo=True,
734 optionalrepo=True,
735 )
735 )
736 def debugdate(ui, date, range=None, **opts):
736 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
737 """parse and display a date"""
738 if opts["extended"]:
738 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
740 else:
741 d = dateutil.parsedate(date)
741 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
744 if range:
745 m = dateutil.matchdate(range)
745 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
747
748
748
749 @command(
749 @command(
750 b'debugdeltachain',
750 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
752 _(b'-c|-m|FILE'),
753 optionalrepo=True,
753 optionalrepo=True,
754 )
754 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
755 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
756 """dump information about delta chains in a revlog
757
757
758 Output can be templatized. Available template keywords are:
758 Output can be templatized. Available template keywords are:
759
759
760 :``rev``: revision number
760 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
761 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
767 - base: a full snapshot
768 - snap: an intermediate snapshot
768 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
769 - p1: a delta against the first parent
770 - p2: a delta against the second parent
770 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
771 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
772 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
773 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
774 (when p2 has empty delta
775 - prev: a delta against the previous revision
775 - prev: a delta against the previous revision
776 - other: a delta against an arbitrary revision
776 - other: a delta against an arbitrary revision
777 :``compsize``: compressed size of revision
777 :``compsize``: compressed size of revision
778 :``uncompsize``: uncompressed size of revision
778 :``uncompsize``: uncompressed size of revision
779 :``chainsize``: total size of compressed revisions in chain
779 :``chainsize``: total size of compressed revisions in chain
780 :``chainratio``: total chain size divided by uncompressed revision size
780 :``chainratio``: total chain size divided by uncompressed revision size
781 (new delta chains typically start at ratio 2.00)
781 (new delta chains typically start at ratio 2.00)
782 :``lindist``: linear distance from base revision in delta chain to end
782 :``lindist``: linear distance from base revision in delta chain to end
783 of this revision
783 of this revision
784 :``extradist``: total size of revisions not part of this delta chain from
784 :``extradist``: total size of revisions not part of this delta chain from
785 base of delta chain to end of this revision; a measurement
785 base of delta chain to end of this revision; a measurement
786 of how much extra data we need to read/seek across to read
786 of how much extra data we need to read/seek across to read
787 the delta chain for this revision
787 the delta chain for this revision
788 :``extraratio``: extradist divided by chainsize; another representation of
788 :``extraratio``: extradist divided by chainsize; another representation of
789 how much unrelated data is needed to load this delta chain
789 how much unrelated data is needed to load this delta chain
790
790
791 If the repository is configured to use the sparse read, additional keywords
791 If the repository is configured to use the sparse read, additional keywords
792 are available:
792 are available:
793
793
794 :``readsize``: total size of data read from the disk for a revision
794 :``readsize``: total size of data read from the disk for a revision
795 (sum of the sizes of all the blocks)
795 (sum of the sizes of all the blocks)
796 :``largestblock``: size of the largest block of data read from the disk
796 :``largestblock``: size of the largest block of data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
798 :``srchunks``: in how many data hunks the whole revision would be read
798 :``srchunks``: in how many data hunks the whole revision would be read
799
799
800 The sparse read can be enabled with experimental.sparse-read = True
800 The sparse read can be enabled with experimental.sparse-read = True
801 """
801 """
802 r = cmdutil.openrevlog(
802 r = cmdutil.openrevlog(
803 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
803 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
804 )
804 )
805 index = r.index
805 index = r.index
806 start = r.start
806 start = r.start
807 length = r.length
807 length = r.length
808 generaldelta = r._generaldelta
808 generaldelta = r._generaldelta
809 withsparseread = getattr(r, '_withsparseread', False)
809 withsparseread = getattr(r, '_withsparseread', False)
810
810
811 # security to avoid crash on corrupted revlogs
811 # security to avoid crash on corrupted revlogs
812 total_revs = len(index)
812 total_revs = len(index)
813
813
814 chain_size_cache = {}
814 chain_size_cache = {}
815
815
816 def revinfo(rev):
816 def revinfo(rev):
817 e = index[rev]
817 e = index[rev]
818 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
818 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
819 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
819 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
820
820
821 base = e[revlog_constants.ENTRY_DELTA_BASE]
821 base = e[revlog_constants.ENTRY_DELTA_BASE]
822 p1 = e[revlog_constants.ENTRY_PARENT_1]
822 p1 = e[revlog_constants.ENTRY_PARENT_1]
823 p2 = e[revlog_constants.ENTRY_PARENT_2]
823 p2 = e[revlog_constants.ENTRY_PARENT_2]
824
824
825 # If the parents of a revision has an empty delta, we never try to delta
825 # If the parents of a revision has an empty delta, we never try to delta
826 # against that parent, but directly against the delta base of that
826 # against that parent, but directly against the delta base of that
827 # parent (recursively). It avoids adding a useless entry in the chain.
827 # parent (recursively). It avoids adding a useless entry in the chain.
828 #
828 #
829 # However we need to detect that as a special case for delta-type, that
829 # However we need to detect that as a special case for delta-type, that
830 # is not simply "other".
830 # is not simply "other".
831 p1_base = p1
831 p1_base = p1
832 if p1 != nullrev and p1 < total_revs:
832 if p1 != nullrev and p1 < total_revs:
833 e1 = index[p1]
833 e1 = index[p1]
834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
836 if (
836 if (
837 new_base == p1_base
837 new_base == p1_base
838 or new_base == nullrev
838 or new_base == nullrev
839 or new_base >= total_revs
839 or new_base >= total_revs
840 ):
840 ):
841 break
841 break
842 p1_base = new_base
842 p1_base = new_base
843 e1 = index[p1_base]
843 e1 = index[p1_base]
844 p2_base = p2
844 p2_base = p2
845 if p2 != nullrev and p2 < total_revs:
845 if p2 != nullrev and p2 < total_revs:
846 e2 = index[p2]
846 e2 = index[p2]
847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
849 if (
849 if (
850 new_base == p2_base
850 new_base == p2_base
851 or new_base == nullrev
851 or new_base == nullrev
852 or new_base >= total_revs
852 or new_base >= total_revs
853 ):
853 ):
854 break
854 break
855 p2_base = new_base
855 p2_base = new_base
856 e2 = index[p2_base]
856 e2 = index[p2_base]
857
857
858 if generaldelta:
858 if generaldelta:
859 if base == p1:
859 if base == p1:
860 deltatype = b'p1'
860 deltatype = b'p1'
861 elif base == p2:
861 elif base == p2:
862 deltatype = b'p2'
862 deltatype = b'p2'
863 elif base == rev:
863 elif base == rev:
864 deltatype = b'base'
864 deltatype = b'base'
865 elif base == p1_base:
865 elif base == p1_base:
866 deltatype = b'skip1'
866 deltatype = b'skip1'
867 elif base == p2_base:
867 elif base == p2_base:
868 deltatype = b'skip2'
868 deltatype = b'skip2'
869 elif r.issnapshot(rev):
869 elif r.issnapshot(rev):
870 deltatype = b'snap'
870 deltatype = b'snap'
871 elif base == rev - 1:
871 elif base == rev - 1:
872 deltatype = b'prev'
872 deltatype = b'prev'
873 else:
873 else:
874 deltatype = b'other'
874 deltatype = b'other'
875 else:
875 else:
876 if base == rev:
876 if base == rev:
877 deltatype = b'base'
877 deltatype = b'base'
878 else:
878 else:
879 deltatype = b'prev'
879 deltatype = b'prev'
880
880
881 chain = r._deltachain(rev)[0]
881 chain = r._deltachain(rev)[0]
882 chain_size = 0
882 chain_size = 0
883 for iter_rev in reversed(chain):
883 for iter_rev in reversed(chain):
884 cached = chain_size_cache.get(iter_rev)
884 cached = chain_size_cache.get(iter_rev)
885 if cached is not None:
885 if cached is not None:
886 chain_size += cached
886 chain_size += cached
887 break
887 break
888 e = index[iter_rev]
888 e = index[iter_rev]
889 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
889 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
890 chain_size_cache[rev] = chain_size
890 chain_size_cache[rev] = chain_size
891
891
892 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
892 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
893
893
894 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
894 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
895
895
896 fm.plain(
896 fm.plain(
897 b' rev p1 p2 chain# chainlen prev delta '
897 b' rev p1 p2 chain# chainlen prev delta '
898 b'size rawsize chainsize ratio lindist extradist '
898 b'size rawsize chainsize ratio lindist extradist '
899 b'extraratio'
899 b'extraratio'
900 )
900 )
901 if withsparseread:
901 if withsparseread:
902 fm.plain(b' readsize largestblk rddensity srchunks')
902 fm.plain(b' readsize largestblk rddensity srchunks')
903 fm.plain(b'\n')
903 fm.plain(b'\n')
904
904
905 chainbases = {}
905 chainbases = {}
906 for rev in r:
906 for rev in r:
907 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
907 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
908 chainbase = chain[0]
908 chainbase = chain[0]
909 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
909 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
910 basestart = start(chainbase)
910 basestart = start(chainbase)
911 revstart = start(rev)
911 revstart = start(rev)
912 lineardist = revstart + comp - basestart
912 lineardist = revstart + comp - basestart
913 extradist = lineardist - chainsize
913 extradist = lineardist - chainsize
914 try:
914 try:
915 prevrev = chain[-2]
915 prevrev = chain[-2]
916 except IndexError:
916 except IndexError:
917 prevrev = -1
917 prevrev = -1
918
918
919 if uncomp != 0:
919 if uncomp != 0:
920 chainratio = float(chainsize) / float(uncomp)
920 chainratio = float(chainsize) / float(uncomp)
921 else:
921 else:
922 chainratio = chainsize
922 chainratio = chainsize
923
923
924 if chainsize != 0:
924 if chainsize != 0:
925 extraratio = float(extradist) / float(chainsize)
925 extraratio = float(extradist) / float(chainsize)
926 else:
926 else:
927 extraratio = extradist
927 extraratio = extradist
928
928
929 fm.startitem()
929 fm.startitem()
930 fm.write(
930 fm.write(
931 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
931 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
932 b'uncompsize chainsize chainratio lindist extradist '
932 b'uncompsize chainsize chainratio lindist extradist '
933 b'extraratio',
933 b'extraratio',
934 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
934 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
935 rev,
935 rev,
936 p1,
936 p1,
937 p2,
937 p2,
938 chainid,
938 chainid,
939 len(chain),
939 len(chain),
940 prevrev,
940 prevrev,
941 deltatype,
941 deltatype,
942 comp,
942 comp,
943 uncomp,
943 uncomp,
944 chainsize,
944 chainsize,
945 chainratio,
945 chainratio,
946 lineardist,
946 lineardist,
947 extradist,
947 extradist,
948 extraratio,
948 extraratio,
949 rev=rev,
949 rev=rev,
950 chainid=chainid,
950 chainid=chainid,
951 chainlen=len(chain),
951 chainlen=len(chain),
952 prevrev=prevrev,
952 prevrev=prevrev,
953 deltatype=deltatype,
953 deltatype=deltatype,
954 compsize=comp,
954 compsize=comp,
955 uncompsize=uncomp,
955 uncompsize=uncomp,
956 chainsize=chainsize,
956 chainsize=chainsize,
957 chainratio=chainratio,
957 chainratio=chainratio,
958 lindist=lineardist,
958 lindist=lineardist,
959 extradist=extradist,
959 extradist=extradist,
960 extraratio=extraratio,
960 extraratio=extraratio,
961 )
961 )
962 if withsparseread:
962 if withsparseread:
963 readsize = 0
963 readsize = 0
964 largestblock = 0
964 largestblock = 0
965 srchunks = 0
965 srchunks = 0
966
966
967 for revschunk in deltautil.slicechunk(r, chain):
967 for revschunk in deltautil.slicechunk(r, chain):
968 srchunks += 1
968 srchunks += 1
969 blkend = start(revschunk[-1]) + length(revschunk[-1])
969 blkend = start(revschunk[-1]) + length(revschunk[-1])
970 blksize = blkend - start(revschunk[0])
970 blksize = blkend - start(revschunk[0])
971
971
972 readsize += blksize
972 readsize += blksize
973 if largestblock < blksize:
973 if largestblock < blksize:
974 largestblock = blksize
974 largestblock = blksize
975
975
976 if readsize:
976 if readsize:
977 readdensity = float(chainsize) / float(readsize)
977 readdensity = float(chainsize) / float(readsize)
978 else:
978 else:
979 readdensity = 1
979 readdensity = 1
980
980
981 fm.write(
981 fm.write(
982 b'readsize largestblock readdensity srchunks',
982 b'readsize largestblock readdensity srchunks',
983 b' %10d %10d %9.5f %8d',
983 b' %10d %10d %9.5f %8d',
984 readsize,
984 readsize,
985 largestblock,
985 largestblock,
986 readdensity,
986 readdensity,
987 srchunks,
987 srchunks,
988 readsize=readsize,
988 readsize=readsize,
989 largestblock=largestblock,
989 largestblock=largestblock,
990 readdensity=readdensity,
990 readdensity=readdensity,
991 srchunks=srchunks,
991 srchunks=srchunks,
992 )
992 )
993
993
994 fm.plain(b'\n')
994 fm.plain(b'\n')
995
995
996 fm.end()
996 fm.end()
997
997
998
998
999 @command(
999 @command(
1000 b'debug-delta-find',
1000 b'debug-delta-find',
1001 cmdutil.debugrevlogopts
1001 cmdutil.debugrevlogopts
1002 + cmdutil.formatteropts
1002 + cmdutil.formatteropts
1003 + [
1003 + [
1004 (
1004 (
1005 b'',
1005 b'',
1006 b'source',
1006 b'source',
1007 b'full',
1007 b'full',
1008 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1008 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1009 ),
1009 ),
1010 ],
1010 ],
1011 _(b'-c|-m|FILE REV'),
1011 _(b'-c|-m|FILE REV'),
1012 optionalrepo=True,
1012 optionalrepo=True,
1013 )
1013 )
1014 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1014 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1015 """display the computation to get to a valid delta for storing REV
1015 """display the computation to get to a valid delta for storing REV
1016
1016
1017 This command will replay the process used to find the "best" delta to store
1017 This command will replay the process used to find the "best" delta to store
1018 a revision and display information about all the steps used to get to that
1018 a revision and display information about all the steps used to get to that
1019 result.
1019 result.
1020
1020
1021 By default, the process is fed with a the full-text for the revision. This
1021 By default, the process is fed with a the full-text for the revision. This
1022 can be controlled with the --source flag.
1022 can be controlled with the --source flag.
1023
1023
1024 The revision use the revision number of the target storage (not changelog
1024 The revision use the revision number of the target storage (not changelog
1025 revision number).
1025 revision number).
1026
1026
1027 note: the process is initiated from a full text of the revision to store.
1027 note: the process is initiated from a full text of the revision to store.
1028 """
1028 """
1029 if arg_2 is None:
1029 if arg_2 is None:
1030 file_ = None
1030 file_ = None
1031 rev = arg_1
1031 rev = arg_1
1032 else:
1032 else:
1033 file_ = arg_1
1033 file_ = arg_1
1034 rev = arg_2
1034 rev = arg_2
1035
1035
1036 rev = int(rev)
1036 rev = int(rev)
1037
1037
1038 revlog = cmdutil.openrevlog(
1038 revlog = cmdutil.openrevlog(
1039 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
1039 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
1040 )
1040 )
1041 p1r, p2r = revlog.parentrevs(rev)
1041 p1r, p2r = revlog.parentrevs(rev)
1042
1042
1043 if source == b'full':
1043 if source == b'full':
1044 base_rev = nullrev
1044 base_rev = nullrev
1045 elif source == b'storage':
1045 elif source == b'storage':
1046 base_rev = revlog.deltaparent(rev)
1046 base_rev = revlog.deltaparent(rev)
1047 elif source == b'p1':
1047 elif source == b'p1':
1048 base_rev = p1r
1048 base_rev = p1r
1049 elif source == b'p2':
1049 elif source == b'p2':
1050 base_rev = p2r
1050 base_rev = p2r
1051 elif source == b'prev':
1051 elif source == b'prev':
1052 base_rev = rev - 1
1052 base_rev = rev - 1
1053 else:
1053 else:
1054 raise error.InputError(b"invalid --source value: %s" % source)
1054 raise error.InputError(b"invalid --source value: %s" % source)
1055
1055
1056 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1056 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1057
1057
1058
1058
1059 @command(
1059 @command(
1060 b'debugdirstate|debugstate',
1060 b'debugdirstate|debugstate',
1061 [
1061 [
1062 (
1062 (
1063 b'',
1063 b'',
1064 b'nodates',
1064 b'nodates',
1065 None,
1065 None,
1066 _(b'do not display the saved mtime (DEPRECATED)'),
1066 _(b'do not display the saved mtime (DEPRECATED)'),
1067 ),
1067 ),
1068 (b'', b'dates', True, _(b'display the saved mtime')),
1068 (b'', b'dates', True, _(b'display the saved mtime')),
1069 (b'', b'datesort', None, _(b'sort by saved mtime')),
1069 (b'', b'datesort', None, _(b'sort by saved mtime')),
1070 (
1070 (
1071 b'',
1071 b'',
1072 b'docket',
1072 b'docket',
1073 False,
1073 False,
1074 _(b'display the docket (metadata file) instead'),
1074 _(b'display the docket (metadata file) instead'),
1075 ),
1075 ),
1076 (
1076 (
1077 b'',
1077 b'',
1078 b'all',
1078 b'all',
1079 False,
1079 False,
1080 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1080 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1081 ),
1081 ),
1082 ],
1082 ],
1083 _(b'[OPTION]...'),
1083 _(b'[OPTION]...'),
1084 )
1084 )
1085 def debugstate(ui, repo, **opts):
1085 def debugstate(ui, repo, **opts):
1086 """show the contents of the current dirstate"""
1086 """show the contents of the current dirstate"""
1087
1087
1088 if opts.get("docket"):
1088 if opts.get("docket"):
1089 if not repo.dirstate._use_dirstate_v2:
1089 if not repo.dirstate._use_dirstate_v2:
1090 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1090 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1091
1091
1092 docket = repo.dirstate._map.docket
1092 docket = repo.dirstate._map.docket
1093 (
1093 (
1094 start_offset,
1094 start_offset,
1095 root_nodes,
1095 root_nodes,
1096 nodes_with_entry,
1096 nodes_with_entry,
1097 nodes_with_copy,
1097 nodes_with_copy,
1098 unused_bytes,
1098 unused_bytes,
1099 _unused,
1099 _unused,
1100 ignore_pattern,
1100 ignore_pattern,
1101 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1101 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1102
1102
1103 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1103 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1104 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1104 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1105 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1105 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1106 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1106 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1107 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1107 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1108 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1108 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1109 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1109 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1110 ui.write(
1110 ui.write(
1111 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1111 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1112 )
1112 )
1113 return
1113 return
1114
1114
1115 nodates = not opts['dates']
1115 nodates = not opts['dates']
1116 if opts.get('nodates') is not None:
1116 if opts.get('nodates') is not None:
1117 nodates = True
1117 nodates = True
1118 datesort = opts.get('datesort')
1118 datesort = opts.get('datesort')
1119
1119
1120 if datesort:
1120 if datesort:
1121
1121
1122 def keyfunc(entry):
1122 def keyfunc(entry):
1123 filename, _state, _mode, _size, mtime = entry
1123 filename, _state, _mode, _size, mtime = entry
1124 return (mtime, filename)
1124 return (mtime, filename)
1125
1125
1126 else:
1126 else:
1127 keyfunc = None # sort by filename
1127 keyfunc = None # sort by filename
1128 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1128 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1129 entries.sort(key=keyfunc)
1129 entries.sort(key=keyfunc)
1130 for entry in entries:
1130 for entry in entries:
1131 filename, state, mode, size, mtime = entry
1131 filename, state, mode, size, mtime = entry
1132 if mtime == -1:
1132 if mtime == -1:
1133 timestr = b'unset '
1133 timestr = b'unset '
1134 elif nodates:
1134 elif nodates:
1135 timestr = b'set '
1135 timestr = b'set '
1136 else:
1136 else:
1137 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1137 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1138 timestr = encoding.strtolocal(timestr)
1138 timestr = encoding.strtolocal(timestr)
1139 if mode & 0o20000:
1139 if mode & 0o20000:
1140 mode = b'lnk'
1140 mode = b'lnk'
1141 else:
1141 else:
1142 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1142 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1143 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1143 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1144 for f in repo.dirstate.copies():
1144 for f in repo.dirstate.copies():
1145 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1145 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1146
1146
1147
1147
1148 @command(
1148 @command(
1149 b'debugdirstateignorepatternshash',
1149 b'debugdirstateignorepatternshash',
1150 [],
1150 [],
1151 _(b''),
1151 _(b''),
1152 )
1152 )
1153 def debugdirstateignorepatternshash(ui, repo, **opts):
1153 def debugdirstateignorepatternshash(ui, repo, **opts):
1154 """show the hash of ignore patterns stored in dirstate if v2,
1154 """show the hash of ignore patterns stored in dirstate if v2,
1155 or nothing for dirstate-v2
1155 or nothing for dirstate-v2
1156 """
1156 """
1157 if repo.dirstate._use_dirstate_v2:
1157 if repo.dirstate._use_dirstate_v2:
1158 docket = repo.dirstate._map.docket
1158 docket = repo.dirstate._map.docket
1159 hash_len = 20 # 160 bits for SHA-1
1159 hash_len = 20 # 160 bits for SHA-1
1160 hash_bytes = docket.tree_metadata[-hash_len:]
1160 hash_bytes = docket.tree_metadata[-hash_len:]
1161 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1161 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1162
1162
1163
1163
1164 @command(
1164 @command(
1165 b'debugdiscovery',
1165 b'debugdiscovery',
1166 [
1166 [
1167 (b'', b'old', None, _(b'use old-style discovery')),
1167 (b'', b'old', None, _(b'use old-style discovery')),
1168 (
1168 (
1169 b'',
1169 b'',
1170 b'nonheads',
1170 b'nonheads',
1171 None,
1171 None,
1172 _(b'use old-style discovery with non-heads included'),
1172 _(b'use old-style discovery with non-heads included'),
1173 ),
1173 ),
1174 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1174 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1175 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1175 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1176 (
1176 (
1177 b'',
1177 b'',
1178 b'local-as-revs',
1178 b'local-as-revs',
1179 b"",
1179 b"",
1180 b'treat local has having these revisions only',
1180 b'treat local has having these revisions only',
1181 ),
1181 ),
1182 (
1182 (
1183 b'',
1183 b'',
1184 b'remote-as-revs',
1184 b'remote-as-revs',
1185 b"",
1185 b"",
1186 b'use local as remote, with only these revisions',
1186 b'use local as remote, with only these revisions',
1187 ),
1187 ),
1188 ]
1188 ]
1189 + cmdutil.remoteopts
1189 + cmdutil.remoteopts
1190 + cmdutil.formatteropts,
1190 + cmdutil.formatteropts,
1191 _(b'[--rev REV] [OTHER]'),
1191 _(b'[--rev REV] [OTHER]'),
1192 )
1192 )
1193 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1193 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1194 """runs the changeset discovery protocol in isolation
1194 """runs the changeset discovery protocol in isolation
1195
1195
1196 The local peer can be "replaced" by a subset of the local repository by
1196 The local peer can be "replaced" by a subset of the local repository by
1197 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1197 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1198 can be "replaced" by a subset of the local repository using the
1198 can be "replaced" by a subset of the local repository using the
1199 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1199 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1200 discovery situations.
1200 discovery situations.
1201
1201
1202 The following developer oriented config are relevant for people playing with this command:
1202 The following developer oriented config are relevant for people playing with this command:
1203
1203
1204 * devel.discovery.exchange-heads=True
1204 * devel.discovery.exchange-heads=True
1205
1205
1206 If False, the discovery will not start with
1206 If False, the discovery will not start with
1207 remote head fetching and local head querying.
1207 remote head fetching and local head querying.
1208
1208
1209 * devel.discovery.grow-sample=True
1209 * devel.discovery.grow-sample=True
1210
1210
1211 If False, the sample size used in set discovery will not be increased
1211 If False, the sample size used in set discovery will not be increased
1212 through the process
1212 through the process
1213
1213
1214 * devel.discovery.grow-sample.dynamic=True
1214 * devel.discovery.grow-sample.dynamic=True
1215
1215
1216 When discovery.grow-sample.dynamic is True, the default, the sample size is
1216 When discovery.grow-sample.dynamic is True, the default, the sample size is
1217 adapted to the shape of the undecided set (it is set to the max of:
1217 adapted to the shape of the undecided set (it is set to the max of:
1218 <target-size>, len(roots(undecided)), len(heads(undecided)
1218 <target-size>, len(roots(undecided)), len(heads(undecided)
1219
1219
1220 * devel.discovery.grow-sample.rate=1.05
1220 * devel.discovery.grow-sample.rate=1.05
1221
1221
1222 the rate at which the sample grow
1222 the rate at which the sample grow
1223
1223
1224 * devel.discovery.randomize=True
1224 * devel.discovery.randomize=True
1225
1225
1226 If andom sampling during discovery are deterministic. It is meant for
1226 If andom sampling during discovery are deterministic. It is meant for
1227 integration tests.
1227 integration tests.
1228
1228
1229 * devel.discovery.sample-size=200
1229 * devel.discovery.sample-size=200
1230
1230
1231 Control the initial size of the discovery sample
1231 Control the initial size of the discovery sample
1232
1232
1233 * devel.discovery.sample-size.initial=100
1233 * devel.discovery.sample-size.initial=100
1234
1234
1235 Control the initial size of the discovery for initial change
1235 Control the initial size of the discovery for initial change
1236 """
1236 """
1237 unfi = repo.unfiltered()
1237 unfi = repo.unfiltered()
1238
1238
1239 # setup potential extra filtering
1239 # setup potential extra filtering
1240 local_revs = opts["local_as_revs"]
1240 local_revs = opts["local_as_revs"]
1241 remote_revs = opts["remote_as_revs"]
1241 remote_revs = opts["remote_as_revs"]
1242
1242
1243 # make sure tests are repeatable
1243 # make sure tests are repeatable
1244 random.seed(int(opts['seed']))
1244 random.seed(int(opts['seed']))
1245
1245
1246 if not remote_revs:
1246 if not remote_revs:
1247 path = urlutil.get_unique_pull_path_obj(
1247 path = urlutil.get_unique_pull_path_obj(
1248 b'debugdiscovery', ui, remoteurl
1248 b'debugdiscovery', ui, remoteurl
1249 )
1249 )
1250 branches = (path.branch, [])
1250 branches = (path.branch, [])
1251 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1251 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1252 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1252 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1253 else:
1253 else:
1254 branches = (None, [])
1254 branches = (None, [])
1255 remote_filtered_revs = logcmdutil.revrange(
1255 remote_filtered_revs = logcmdutil.revrange(
1256 unfi, [b"not (::(%s))" % remote_revs]
1256 unfi, [b"not (::(%s))" % remote_revs]
1257 )
1257 )
1258 remote_filtered_revs = frozenset(remote_filtered_revs)
1258 remote_filtered_revs = frozenset(remote_filtered_revs)
1259
1259
1260 def remote_func(x):
1260 def remote_func(x):
1261 return remote_filtered_revs
1261 return remote_filtered_revs
1262
1262
1263 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1263 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1264
1264
1265 remote = repo.peer()
1265 remote = repo.peer()
1266 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1266 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1267
1267
1268 if local_revs:
1268 if local_revs:
1269 local_filtered_revs = logcmdutil.revrange(
1269 local_filtered_revs = logcmdutil.revrange(
1270 unfi, [b"not (::(%s))" % local_revs]
1270 unfi, [b"not (::(%s))" % local_revs]
1271 )
1271 )
1272 local_filtered_revs = frozenset(local_filtered_revs)
1272 local_filtered_revs = frozenset(local_filtered_revs)
1273
1273
1274 def local_func(x):
1274 def local_func(x):
1275 return local_filtered_revs
1275 return local_filtered_revs
1276
1276
1277 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1277 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1278 repo = repo.filtered(b'debug-discovery-local-filter')
1278 repo = repo.filtered(b'debug-discovery-local-filter')
1279
1279
1280 data = {}
1280 data = {}
1281 if opts.get('old'):
1281 if opts.get('old'):
1282
1282
1283 def doit(pushedrevs, remoteheads, remote=remote):
1283 def doit(pushedrevs, remoteheads, remote=remote):
1284 if not hasattr(remote, 'branches'):
1284 if not hasattr(remote, 'branches'):
1285 # enable in-client legacy support
1285 # enable in-client legacy support
1286 remote = localrepo.locallegacypeer(remote.local())
1286 remote = localrepo.locallegacypeer(remote.local())
1287 if remote_revs:
1287 if remote_revs:
1288 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1288 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1289 remote._repo = r
1289 remote._repo = r
1290 common, _in, hds = treediscovery.findcommonincoming(
1290 common, _in, hds = treediscovery.findcommonincoming(
1291 repo, remote, force=True, audit=data
1291 repo, remote, force=True, audit=data
1292 )
1292 )
1293 common = set(common)
1293 common = set(common)
1294 if not opts.get('nonheads'):
1294 if not opts.get('nonheads'):
1295 ui.writenoi18n(
1295 ui.writenoi18n(
1296 b"unpruned common: %s\n"
1296 b"unpruned common: %s\n"
1297 % b" ".join(sorted(short(n) for n in common))
1297 % b" ".join(sorted(short(n) for n in common))
1298 )
1298 )
1299
1299
1300 clnode = repo.changelog.node
1300 clnode = repo.changelog.node
1301 common = repo.revs(b'heads(::%ln)', common)
1301 common = repo.revs(b'heads(::%ln)', common)
1302 common = {clnode(r) for r in common}
1302 common = {clnode(r) for r in common}
1303 return common, hds
1303 return common, hds
1304
1304
1305 else:
1305 else:
1306
1306
1307 def doit(pushedrevs, remoteheads, remote=remote):
1307 def doit(pushedrevs, remoteheads, remote=remote):
1308 nodes = None
1308 nodes = None
1309 if pushedrevs:
1309 if pushedrevs:
1310 revs = logcmdutil.revrange(repo, pushedrevs)
1310 revs = logcmdutil.revrange(repo, pushedrevs)
1311 nodes = [repo[r].node() for r in revs]
1311 nodes = [repo[r].node() for r in revs]
1312 common, any, hds = setdiscovery.findcommonheads(
1312 common, any, hds = setdiscovery.findcommonheads(
1313 ui,
1313 ui,
1314 repo,
1314 repo,
1315 remote,
1315 remote,
1316 ancestorsof=nodes,
1316 ancestorsof=nodes,
1317 audit=data,
1317 audit=data,
1318 abortwhenunrelated=False,
1318 abortwhenunrelated=False,
1319 )
1319 )
1320 return common, hds
1320 return common, hds
1321
1321
1322 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1322 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1323 localrevs = opts['rev']
1323 localrevs = opts['rev']
1324
1324
1325 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1325 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1326 if fm.strict_format:
1326 if fm.strict_format:
1327
1327
1328 @contextlib.contextmanager
1328 @contextlib.contextmanager
1329 def may_capture_output():
1329 def may_capture_output():
1330 ui.pushbuffer()
1330 ui.pushbuffer()
1331 yield
1331 yield
1332 data[b'output'] = ui.popbuffer()
1332 data[b'output'] = ui.popbuffer()
1333
1333
1334 else:
1334 else:
1335 may_capture_output = util.nullcontextmanager
1335 may_capture_output = util.nullcontextmanager
1336 with may_capture_output():
1336 with may_capture_output():
1337 with util.timedcm('debug-discovery') as t:
1337 with util.timedcm('debug-discovery') as t:
1338 common, hds = doit(localrevs, remoterevs)
1338 common, hds = doit(localrevs, remoterevs)
1339
1339
1340 # compute all statistics
1340 # compute all statistics
1341 if len(common) == 1 and repo.nullid in common:
1341 if len(common) == 1 and repo.nullid in common:
1342 common = set()
1342 common = set()
1343 heads_common = set(common)
1343 heads_common = set(common)
1344 heads_remote = set(hds)
1344 heads_remote = set(hds)
1345 heads_local = set(repo.heads())
1345 heads_local = set(repo.heads())
1346 # note: they cannot be a local or remote head that is in common and not
1346 # note: they cannot be a local or remote head that is in common and not
1347 # itself a head of common.
1347 # itself a head of common.
1348 heads_common_local = heads_common & heads_local
1348 heads_common_local = heads_common & heads_local
1349 heads_common_remote = heads_common & heads_remote
1349 heads_common_remote = heads_common & heads_remote
1350 heads_common_both = heads_common & heads_remote & heads_local
1350 heads_common_both = heads_common & heads_remote & heads_local
1351
1351
1352 all = repo.revs(b'all()')
1352 all = repo.revs(b'all()')
1353 common = repo.revs(b'::%ln', common)
1353 common = repo.revs(b'::%ln', common)
1354 roots_common = repo.revs(b'roots(::%ld)', common)
1354 roots_common = repo.revs(b'roots(::%ld)', common)
1355 missing = repo.revs(b'not ::%ld', common)
1355 missing = repo.revs(b'not ::%ld', common)
1356 heads_missing = repo.revs(b'heads(%ld)', missing)
1356 heads_missing = repo.revs(b'heads(%ld)', missing)
1357 roots_missing = repo.revs(b'roots(%ld)', missing)
1357 roots_missing = repo.revs(b'roots(%ld)', missing)
1358 assert len(common) + len(missing) == len(all)
1358 assert len(common) + len(missing) == len(all)
1359
1359
1360 initial_undecided = repo.revs(
1360 initial_undecided = repo.revs(
1361 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1361 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1362 )
1362 )
1363 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1363 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1364 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1364 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1365 common_initial_undecided = initial_undecided & common
1365 common_initial_undecided = initial_undecided & common
1366 missing_initial_undecided = initial_undecided & missing
1366 missing_initial_undecided = initial_undecided & missing
1367
1367
1368 data[b'elapsed'] = t.elapsed
1368 data[b'elapsed'] = t.elapsed
1369 data[b'nb-common-heads'] = len(heads_common)
1369 data[b'nb-common-heads'] = len(heads_common)
1370 data[b'nb-common-heads-local'] = len(heads_common_local)
1370 data[b'nb-common-heads-local'] = len(heads_common_local)
1371 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1371 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1372 data[b'nb-common-heads-both'] = len(heads_common_both)
1372 data[b'nb-common-heads-both'] = len(heads_common_both)
1373 data[b'nb-common-roots'] = len(roots_common)
1373 data[b'nb-common-roots'] = len(roots_common)
1374 data[b'nb-head-local'] = len(heads_local)
1374 data[b'nb-head-local'] = len(heads_local)
1375 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1375 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1376 data[b'nb-head-remote'] = len(heads_remote)
1376 data[b'nb-head-remote'] = len(heads_remote)
1377 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1377 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1378 heads_common_remote
1378 heads_common_remote
1379 )
1379 )
1380 data[b'nb-revs'] = len(all)
1380 data[b'nb-revs'] = len(all)
1381 data[b'nb-revs-common'] = len(common)
1381 data[b'nb-revs-common'] = len(common)
1382 data[b'nb-revs-missing'] = len(missing)
1382 data[b'nb-revs-missing'] = len(missing)
1383 data[b'nb-missing-heads'] = len(heads_missing)
1383 data[b'nb-missing-heads'] = len(heads_missing)
1384 data[b'nb-missing-roots'] = len(roots_missing)
1384 data[b'nb-missing-roots'] = len(roots_missing)
1385 data[b'nb-ini_und'] = len(initial_undecided)
1385 data[b'nb-ini_und'] = len(initial_undecided)
1386 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1386 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1387 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1387 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1388 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1388 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1389 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1389 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1390
1390
1391 fm.startitem()
1391 fm.startitem()
1392 fm.data(**pycompat.strkwargs(data))
1392 fm.data(**pycompat.strkwargs(data))
1393 # display discovery summary
1393 # display discovery summary
1394 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1394 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1395 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1395 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1396 if b'total-round-trips-heads' in data:
1396 if b'total-round-trips-heads' in data:
1397 fm.plain(
1397 fm.plain(
1398 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1398 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1399 )
1399 )
1400 if b'total-round-trips-branches' in data:
1400 if b'total-round-trips-branches' in data:
1401 fm.plain(
1401 fm.plain(
1402 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1402 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1403 % data
1403 % data
1404 )
1404 )
1405 if b'total-round-trips-between' in data:
1405 if b'total-round-trips-between' in data:
1406 fm.plain(
1406 fm.plain(
1407 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1407 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1408 )
1408 )
1409 fm.plain(b"queries: %(total-queries)9d\n" % data)
1409 fm.plain(b"queries: %(total-queries)9d\n" % data)
1410 if b'total-queries-branches' in data:
1410 if b'total-queries-branches' in data:
1411 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1411 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1412 if b'total-queries-between' in data:
1412 if b'total-queries-between' in data:
1413 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1413 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1414 fm.plain(b"heads summary:\n")
1414 fm.plain(b"heads summary:\n")
1415 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1415 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1416 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1416 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1417 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1417 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1418 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1418 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1419 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1419 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1420 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1420 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1421 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1421 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1422 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1422 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1423 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1423 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1424 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1424 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1425 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1425 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1426 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1426 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1427 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1427 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1428 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1428 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1429 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1429 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1430 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1430 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1431 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1431 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1432 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1432 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1433 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1433 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1434 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1434 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1435 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1435 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1436 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1436 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1437
1437
1438 if ui.verbose:
1438 if ui.verbose:
1439 fm.plain(
1439 fm.plain(
1440 b"common heads: %s\n"
1440 b"common heads: %s\n"
1441 % b" ".join(sorted(short(n) for n in heads_common))
1441 % b" ".join(sorted(short(n) for n in heads_common))
1442 )
1442 )
1443 fm.end()
1443 fm.end()
1444
1444
1445
1445
1446 _chunksize = 4 << 10
1446 _chunksize = 4 << 10
1447
1447
1448
1448
1449 @command(
1449 @command(
1450 b'debugdownload',
1450 b'debugdownload',
1451 [
1451 [
1452 (b'o', b'output', b'', _(b'path')),
1452 (b'o', b'output', b'', _(b'path')),
1453 ],
1453 ],
1454 optionalrepo=True,
1454 optionalrepo=True,
1455 )
1455 )
1456 def debugdownload(ui, repo, url, output=None, **opts):
1456 def debugdownload(ui, repo, url, output=None, **opts):
1457 """download a resource using Mercurial logic and config"""
1457 """download a resource using Mercurial logic and config"""
1458 fh = urlmod.open(ui, url, output)
1458 fh = urlmod.open(ui, url, output)
1459
1459
1460 dest = ui
1460 dest = ui
1461 if output:
1461 if output:
1462 dest = open(output, b"wb", _chunksize)
1462 dest = open(output, b"wb", _chunksize)
1463 try:
1463 try:
1464 data = fh.read(_chunksize)
1464 data = fh.read(_chunksize)
1465 while data:
1465 while data:
1466 dest.write(data)
1466 dest.write(data)
1467 data = fh.read(_chunksize)
1467 data = fh.read(_chunksize)
1468 finally:
1468 finally:
1469 if output:
1469 if output:
1470 dest.close()
1470 dest.close()
1471
1471
1472
1472
1473 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1473 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1474 def debugextensions(ui, repo, **opts):
1474 def debugextensions(ui, repo, **opts):
1475 '''show information about active extensions'''
1475 '''show information about active extensions'''
1476 exts = extensions.extensions(ui)
1476 exts = extensions.extensions(ui)
1477 hgver = util.version()
1477 hgver = util.version()
1478 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1478 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1479 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1479 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1480 isinternal = extensions.ismoduleinternal(extmod)
1480 isinternal = extensions.ismoduleinternal(extmod)
1481 extsource = None
1481 extsource = None
1482
1482
1483 if hasattr(extmod, '__file__'):
1483 if hasattr(extmod, '__file__'):
1484 extsource = pycompat.fsencode(extmod.__file__)
1484 extsource = pycompat.fsencode(extmod.__file__)
1485 elif getattr(sys, 'oxidized', False):
1485 elif getattr(sys, 'oxidized', False):
1486 extsource = pycompat.sysexecutable
1486 extsource = pycompat.sysexecutable
1487 if isinternal:
1487 if isinternal:
1488 exttestedwith = [] # never expose magic string to users
1488 exttestedwith = [] # never expose magic string to users
1489 else:
1489 else:
1490 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1490 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1491 extbuglink = getattr(extmod, 'buglink', None)
1491 extbuglink = getattr(extmod, 'buglink', None)
1492
1492
1493 fm.startitem()
1493 fm.startitem()
1494
1494
1495 if ui.quiet or ui.verbose:
1495 if ui.quiet or ui.verbose:
1496 fm.write(b'name', b'%s\n', extname)
1496 fm.write(b'name', b'%s\n', extname)
1497 else:
1497 else:
1498 fm.write(b'name', b'%s', extname)
1498 fm.write(b'name', b'%s', extname)
1499 if isinternal or hgver in exttestedwith:
1499 if isinternal or hgver in exttestedwith:
1500 fm.plain(b'\n')
1500 fm.plain(b'\n')
1501 elif not exttestedwith:
1501 elif not exttestedwith:
1502 fm.plain(_(b' (untested!)\n'))
1502 fm.plain(_(b' (untested!)\n'))
1503 else:
1503 else:
1504 lasttestedversion = exttestedwith[-1]
1504 lasttestedversion = exttestedwith[-1]
1505 fm.plain(b' (%s!)\n' % lasttestedversion)
1505 fm.plain(b' (%s!)\n' % lasttestedversion)
1506
1506
1507 fm.condwrite(
1507 fm.condwrite(
1508 ui.verbose and extsource,
1508 ui.verbose and extsource,
1509 b'source',
1509 b'source',
1510 _(b' location: %s\n'),
1510 _(b' location: %s\n'),
1511 extsource or b"",
1511 extsource or b"",
1512 )
1512 )
1513
1513
1514 if ui.verbose:
1514 if ui.verbose:
1515 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1515 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1516 fm.data(bundled=isinternal)
1516 fm.data(bundled=isinternal)
1517
1517
1518 fm.condwrite(
1518 fm.condwrite(
1519 ui.verbose and exttestedwith,
1519 ui.verbose and exttestedwith,
1520 b'testedwith',
1520 b'testedwith',
1521 _(b' tested with: %s\n'),
1521 _(b' tested with: %s\n'),
1522 fm.formatlist(exttestedwith, name=b'ver'),
1522 fm.formatlist(exttestedwith, name=b'ver'),
1523 )
1523 )
1524
1524
1525 fm.condwrite(
1525 fm.condwrite(
1526 ui.verbose and extbuglink,
1526 ui.verbose and extbuglink,
1527 b'buglink',
1527 b'buglink',
1528 _(b' bug reporting: %s\n'),
1528 _(b' bug reporting: %s\n'),
1529 extbuglink or b"",
1529 extbuglink or b"",
1530 )
1530 )
1531
1531
1532 fm.end()
1532 fm.end()
1533
1533
1534
1534
1535 @command(
1535 @command(
1536 b'debugfileset',
1536 b'debugfileset',
1537 [
1537 [
1538 (
1538 (
1539 b'r',
1539 b'r',
1540 b'rev',
1540 b'rev',
1541 b'',
1541 b'',
1542 _(b'apply the filespec on this revision'),
1542 _(b'apply the filespec on this revision'),
1543 _(b'REV'),
1543 _(b'REV'),
1544 ),
1544 ),
1545 (
1545 (
1546 b'',
1546 b'',
1547 b'all-files',
1547 b'all-files',
1548 False,
1548 False,
1549 _(b'test files from all revisions and working directory'),
1549 _(b'test files from all revisions and working directory'),
1550 ),
1550 ),
1551 (
1551 (
1552 b's',
1552 b's',
1553 b'show-matcher',
1553 b'show-matcher',
1554 None,
1554 None,
1555 _(b'print internal representation of matcher'),
1555 _(b'print internal representation of matcher'),
1556 ),
1556 ),
1557 (
1557 (
1558 b'p',
1558 b'p',
1559 b'show-stage',
1559 b'show-stage',
1560 [],
1560 [],
1561 _(b'print parsed tree at the given stage'),
1561 _(b'print parsed tree at the given stage'),
1562 _(b'NAME'),
1562 _(b'NAME'),
1563 ),
1563 ),
1564 ],
1564 ],
1565 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1565 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1566 )
1566 )
1567 def debugfileset(ui, repo, expr, **opts):
1567 def debugfileset(ui, repo, expr, **opts):
1568 '''parse and apply a fileset specification'''
1568 '''parse and apply a fileset specification'''
1569 from . import fileset
1569 from . import fileset
1570
1570
1571 fileset.symbols # force import of fileset so we have predicates to optimize
1571 fileset.symbols # force import of fileset so we have predicates to optimize
1572
1572
1573 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1573 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1574
1574
1575 stages = [
1575 stages = [
1576 (b'parsed', pycompat.identity),
1576 (b'parsed', pycompat.identity),
1577 (b'analyzed', filesetlang.analyze),
1577 (b'analyzed', filesetlang.analyze),
1578 (b'optimized', filesetlang.optimize),
1578 (b'optimized', filesetlang.optimize),
1579 ]
1579 ]
1580 stagenames = {n for n, f in stages}
1580 stagenames = {n for n, f in stages}
1581
1581
1582 showalways = set()
1582 showalways = set()
1583 if ui.verbose and not opts['show_stage']:
1583 if ui.verbose and not opts['show_stage']:
1584 # show parsed tree by --verbose (deprecated)
1584 # show parsed tree by --verbose (deprecated)
1585 showalways.add(b'parsed')
1585 showalways.add(b'parsed')
1586 if opts['show_stage'] == [b'all']:
1586 if opts['show_stage'] == [b'all']:
1587 showalways.update(stagenames)
1587 showalways.update(stagenames)
1588 else:
1588 else:
1589 for n in opts['show_stage']:
1589 for n in opts['show_stage']:
1590 if n not in stagenames:
1590 if n not in stagenames:
1591 raise error.Abort(_(b'invalid stage name: %s') % n)
1591 raise error.Abort(_(b'invalid stage name: %s') % n)
1592 showalways.update(opts['show_stage'])
1592 showalways.update(opts['show_stage'])
1593
1593
1594 tree = filesetlang.parse(expr)
1594 tree = filesetlang.parse(expr)
1595 for n, f in stages:
1595 for n, f in stages:
1596 tree = f(tree)
1596 tree = f(tree)
1597 if n in showalways:
1597 if n in showalways:
1598 if opts['show_stage'] or n != b'parsed':
1598 if opts['show_stage'] or n != b'parsed':
1599 ui.write(b"* %s:\n" % n)
1599 ui.write(b"* %s:\n" % n)
1600 ui.write(filesetlang.prettyformat(tree), b"\n")
1600 ui.write(filesetlang.prettyformat(tree), b"\n")
1601
1601
1602 files = set()
1602 files = set()
1603 if opts['all_files']:
1603 if opts['all_files']:
1604 for r in repo:
1604 for r in repo:
1605 c = repo[r]
1605 c = repo[r]
1606 files.update(c.files())
1606 files.update(c.files())
1607 files.update(c.substate)
1607 files.update(c.substate)
1608 if opts['all_files'] or ctx.rev() is None:
1608 if opts['all_files'] or ctx.rev() is None:
1609 wctx = repo[None]
1609 wctx = repo[None]
1610 files.update(
1610 files.update(
1611 repo.dirstate.walk(
1611 repo.dirstate.walk(
1612 scmutil.matchall(repo),
1612 scmutil.matchall(repo),
1613 subrepos=list(wctx.substate),
1613 subrepos=list(wctx.substate),
1614 unknown=True,
1614 unknown=True,
1615 ignored=True,
1615 ignored=True,
1616 )
1616 )
1617 )
1617 )
1618 files.update(wctx.substate)
1618 files.update(wctx.substate)
1619 else:
1619 else:
1620 files.update(ctx.files())
1620 files.update(ctx.files())
1621 files.update(ctx.substate)
1621 files.update(ctx.substate)
1622
1622
1623 m = ctx.matchfileset(repo.getcwd(), expr)
1623 m = ctx.matchfileset(repo.getcwd(), expr)
1624 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1624 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1625 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1625 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1626 for f in sorted(files):
1626 for f in sorted(files):
1627 if not m(f):
1627 if not m(f):
1628 continue
1628 continue
1629 ui.write(b"%s\n" % f)
1629 ui.write(b"%s\n" % f)
1630
1630
1631
1631
1632 @command(
1632 @command(
1633 b"debug-repair-issue6528",
1633 b"debug-repair-issue6528",
1634 [
1634 [
1635 (
1635 (
1636 b'',
1636 b'',
1637 b'to-report',
1637 b'to-report',
1638 b'',
1638 b'',
1639 _(b'build a report of affected revisions to this file'),
1639 _(b'build a report of affected revisions to this file'),
1640 _(b'FILE'),
1640 _(b'FILE'),
1641 ),
1641 ),
1642 (
1642 (
1643 b'',
1643 b'',
1644 b'from-report',
1644 b'from-report',
1645 b'',
1645 b'',
1646 _(b'repair revisions listed in this report file'),
1646 _(b'repair revisions listed in this report file'),
1647 _(b'FILE'),
1647 _(b'FILE'),
1648 ),
1648 ),
1649 (
1649 (
1650 b'',
1650 b'',
1651 b'paranoid',
1651 b'paranoid',
1652 False,
1652 False,
1653 _(b'check that both detection methods do the same thing'),
1653 _(b'check that both detection methods do the same thing'),
1654 ),
1654 ),
1655 ]
1655 ]
1656 + cmdutil.dryrunopts,
1656 + cmdutil.dryrunopts,
1657 )
1657 )
1658 def debug_repair_issue6528(ui, repo, **opts):
1658 def debug_repair_issue6528(ui, repo, **opts):
1659 """find affected revisions and repair them. See issue6528 for more details.
1659 """find affected revisions and repair them. See issue6528 for more details.
1660
1660
1661 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1661 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1662 computation of affected revisions for a given repository across clones.
1662 computation of affected revisions for a given repository across clones.
1663 The report format is line-based (with empty lines ignored):
1663 The report format is line-based (with empty lines ignored):
1664
1664
1665 ```
1665 ```
1666 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1666 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1667 ```
1667 ```
1668
1668
1669 There can be multiple broken revisions per filelog, they are separated by
1669 There can be multiple broken revisions per filelog, they are separated by
1670 a comma with no spaces. The only space is between the revision(s) and the
1670 a comma with no spaces. The only space is between the revision(s) and the
1671 filename.
1671 filename.
1672
1672
1673 Note that this does *not* mean that this repairs future affected revisions,
1673 Note that this does *not* mean that this repairs future affected revisions,
1674 that needs a separate fix at the exchange level that was introduced in
1674 that needs a separate fix at the exchange level that was introduced in
1675 Mercurial 5.9.1.
1675 Mercurial 5.9.1.
1676
1676
1677 There is a `--paranoid` flag to test that the fast implementation is correct
1677 There is a `--paranoid` flag to test that the fast implementation is correct
1678 by checking it against the slow implementation. Since this matter is quite
1678 by checking it against the slow implementation. Since this matter is quite
1679 urgent and testing every edge-case is probably quite costly, we use this
1679 urgent and testing every edge-case is probably quite costly, we use this
1680 method to test on large repositories as a fuzzing method of sorts.
1680 method to test on large repositories as a fuzzing method of sorts.
1681 """
1681 """
1682 cmdutil.check_incompatible_arguments(
1682 cmdutil.check_incompatible_arguments(
1683 opts, 'to_report', ['from_report', 'dry_run']
1683 opts, 'to_report', ['from_report', 'dry_run']
1684 )
1684 )
1685 dry_run = opts.get('dry_run')
1685 dry_run = opts.get('dry_run')
1686 to_report = opts.get('to_report')
1686 to_report = opts.get('to_report')
1687 from_report = opts.get('from_report')
1687 from_report = opts.get('from_report')
1688 paranoid = opts.get('paranoid')
1688 paranoid = opts.get('paranoid')
1689 # TODO maybe add filelog pattern and revision pattern parameters to help
1689 # TODO maybe add filelog pattern and revision pattern parameters to help
1690 # narrow down the search for users that know what they're looking for?
1690 # narrow down the search for users that know what they're looking for?
1691
1691
1692 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1692 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1693 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1693 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1694 raise error.Abort(_(msg))
1694 raise error.Abort(_(msg))
1695
1695
1696 rewrite.repair_issue6528(
1696 rewrite.repair_issue6528(
1697 ui,
1697 ui,
1698 repo,
1698 repo,
1699 dry_run=dry_run,
1699 dry_run=dry_run,
1700 to_report=to_report,
1700 to_report=to_report,
1701 from_report=from_report,
1701 from_report=from_report,
1702 paranoid=paranoid,
1702 paranoid=paranoid,
1703 )
1703 )
1704
1704
1705
1705
1706 @command(b'debugformat', [] + cmdutil.formatteropts)
1706 @command(b'debugformat', [] + cmdutil.formatteropts)
1707 def debugformat(ui, repo, **opts):
1707 def debugformat(ui, repo, **opts):
1708 """display format information about the current repository
1708 """display format information about the current repository
1709
1709
1710 Use --verbose to get extra information about current config value and
1710 Use --verbose to get extra information about current config value and
1711 Mercurial default."""
1711 Mercurial default."""
1712 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1712 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1713 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1713 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1714
1714
1715 def makeformatname(name):
1715 def makeformatname(name):
1716 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1716 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1717
1717
1718 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1718 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1719 if fm.isplain():
1719 if fm.isplain():
1720
1720
1721 def formatvalue(value):
1721 def formatvalue(value):
1722 if hasattr(value, 'startswith'):
1722 if hasattr(value, 'startswith'):
1723 return value
1723 return value
1724 if value:
1724 if value:
1725 return b'yes'
1725 return b'yes'
1726 else:
1726 else:
1727 return b'no'
1727 return b'no'
1728
1728
1729 else:
1729 else:
1730 formatvalue = pycompat.identity
1730 formatvalue = pycompat.identity
1731
1731
1732 fm.plain(b'format-variant')
1732 fm.plain(b'format-variant')
1733 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1733 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1734 fm.plain(b' repo')
1734 fm.plain(b' repo')
1735 if ui.verbose:
1735 if ui.verbose:
1736 fm.plain(b' config default')
1736 fm.plain(b' config default')
1737 fm.plain(b'\n')
1737 fm.plain(b'\n')
1738 for fv in upgrade.allformatvariant:
1738 for fv in upgrade.allformatvariant:
1739 fm.startitem()
1739 fm.startitem()
1740 repovalue = fv.fromrepo(repo)
1740 repovalue = fv.fromrepo(repo)
1741 configvalue = fv.fromconfig(repo)
1741 configvalue = fv.fromconfig(repo)
1742
1742
1743 if repovalue != configvalue:
1743 if repovalue != configvalue:
1744 namelabel = b'formatvariant.name.mismatchconfig'
1744 namelabel = b'formatvariant.name.mismatchconfig'
1745 repolabel = b'formatvariant.repo.mismatchconfig'
1745 repolabel = b'formatvariant.repo.mismatchconfig'
1746 elif repovalue != fv.default:
1746 elif repovalue != fv.default:
1747 namelabel = b'formatvariant.name.mismatchdefault'
1747 namelabel = b'formatvariant.name.mismatchdefault'
1748 repolabel = b'formatvariant.repo.mismatchdefault'
1748 repolabel = b'formatvariant.repo.mismatchdefault'
1749 else:
1749 else:
1750 namelabel = b'formatvariant.name.uptodate'
1750 namelabel = b'formatvariant.name.uptodate'
1751 repolabel = b'formatvariant.repo.uptodate'
1751 repolabel = b'formatvariant.repo.uptodate'
1752
1752
1753 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1753 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1754 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1754 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1755 if fv.default != configvalue:
1755 if fv.default != configvalue:
1756 configlabel = b'formatvariant.config.special'
1756 configlabel = b'formatvariant.config.special'
1757 else:
1757 else:
1758 configlabel = b'formatvariant.config.default'
1758 configlabel = b'formatvariant.config.default'
1759 fm.condwrite(
1759 fm.condwrite(
1760 ui.verbose,
1760 ui.verbose,
1761 b'config',
1761 b'config',
1762 b' %6s',
1762 b' %6s',
1763 formatvalue(configvalue),
1763 formatvalue(configvalue),
1764 label=configlabel,
1764 label=configlabel,
1765 )
1765 )
1766 fm.condwrite(
1766 fm.condwrite(
1767 ui.verbose,
1767 ui.verbose,
1768 b'default',
1768 b'default',
1769 b' %7s',
1769 b' %7s',
1770 formatvalue(fv.default),
1770 formatvalue(fv.default),
1771 label=b'formatvariant.default',
1771 label=b'formatvariant.default',
1772 )
1772 )
1773 fm.plain(b'\n')
1773 fm.plain(b'\n')
1774 fm.end()
1774 fm.end()
1775
1775
1776
1776
1777 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1777 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1778 def debugfsinfo(ui, path=b"."):
1778 def debugfsinfo(ui, path=b"."):
1779 """show information detected about current filesystem"""
1779 """show information detected about current filesystem"""
1780 ui.writenoi18n(b'path: %s\n' % path)
1780 ui.writenoi18n(b'path: %s\n' % path)
1781 ui.writenoi18n(
1781 ui.writenoi18n(
1782 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1782 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1783 )
1783 )
1784 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1784 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1785 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1785 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1786 ui.writenoi18n(
1786 ui.writenoi18n(
1787 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1787 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1788 )
1788 )
1789 ui.writenoi18n(
1789 ui.writenoi18n(
1790 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1790 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1791 )
1791 )
1792 casesensitive = b'(unknown)'
1792 casesensitive = b'(unknown)'
1793 try:
1793 try:
1794 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1794 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1795 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1795 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1796 except OSError:
1796 except OSError:
1797 pass
1797 pass
1798 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1798 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1799
1799
1800
1800
1801 @command(
1801 @command(
1802 b'debuggetbundle',
1802 b'debuggetbundle',
1803 [
1803 [
1804 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1804 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1805 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1805 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1806 (
1806 (
1807 b't',
1807 b't',
1808 b'type',
1808 b'type',
1809 b'bzip2',
1809 b'bzip2',
1810 _(b'bundle compression type to use'),
1810 _(b'bundle compression type to use'),
1811 _(b'TYPE'),
1811 _(b'TYPE'),
1812 ),
1812 ),
1813 ],
1813 ],
1814 _(b'REPO FILE [-H|-C ID]...'),
1814 _(b'REPO FILE [-H|-C ID]...'),
1815 norepo=True,
1815 norepo=True,
1816 )
1816 )
1817 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1817 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1818 """retrieves a bundle from a repo
1818 """retrieves a bundle from a repo
1819
1819
1820 Every ID must be a full-length hex node id string. Saves the bundle to the
1820 Every ID must be a full-length hex node id string. Saves the bundle to the
1821 given file.
1821 given file.
1822 """
1822 """
1823 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1823 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1824 if not repo.capable(b'getbundle'):
1824 if not repo.capable(b'getbundle'):
1825 raise error.Abort(b"getbundle() not supported by target repository")
1825 raise error.Abort(b"getbundle() not supported by target repository")
1826 args = {}
1826 args = {}
1827 if common:
1827 if common:
1828 args['common'] = [bin(s) for s in common]
1828 args['common'] = [bin(s) for s in common]
1829 if head:
1829 if head:
1830 args['heads'] = [bin(s) for s in head]
1830 args['heads'] = [bin(s) for s in head]
1831 # TODO: get desired bundlecaps from command line.
1831 # TODO: get desired bundlecaps from command line.
1832 args['bundlecaps'] = None
1832 args['bundlecaps'] = None
1833 bundle = repo.getbundle(b'debug', **args)
1833 bundle = repo.getbundle(b'debug', **args)
1834
1834
1835 bundletype = opts.get('type', b'bzip2').lower()
1835 bundletype = opts.get('type', b'bzip2').lower()
1836 btypes = {
1836 btypes = {
1837 b'none': b'HG10UN',
1837 b'none': b'HG10UN',
1838 b'bzip2': b'HG10BZ',
1838 b'bzip2': b'HG10BZ',
1839 b'gzip': b'HG10GZ',
1839 b'gzip': b'HG10GZ',
1840 b'bundle2': b'HG20',
1840 b'bundle2': b'HG20',
1841 }
1841 }
1842 bundletype = btypes.get(bundletype)
1842 bundletype = btypes.get(bundletype)
1843 if bundletype not in bundle2.bundletypes:
1843 if bundletype not in bundle2.bundletypes:
1844 raise error.Abort(_(b'unknown bundle type specified with --type'))
1844 raise error.Abort(_(b'unknown bundle type specified with --type'))
1845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1846
1846
1847
1847
1848 @command(b'debugignore', [], b'[FILE]...')
1848 @command(b'debugignore', [], b'[FILE]...')
1849 def debugignore(ui, repo, *files, **opts):
1849 def debugignore(ui, repo, *files, **opts):
1850 """display the combined ignore pattern and information about ignored files
1850 """display the combined ignore pattern and information about ignored files
1851
1851
1852 With no argument display the combined ignore pattern.
1852 With no argument display the combined ignore pattern.
1853
1853
1854 Given space separated file names, shows if the given file is ignored and
1854 Given space separated file names, shows if the given file is ignored and
1855 if so, show the ignore rule (file and line number) that matched it.
1855 if so, show the ignore rule (file and line number) that matched it.
1856 """
1856 """
1857 ignore = repo.dirstate._ignore
1857 ignore = repo.dirstate._ignore
1858 if not files:
1858 if not files:
1859 # Show all the patterns
1859 # Show all the patterns
1860 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1860 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1861 else:
1861 else:
1862 m = scmutil.match(repo[None], pats=files)
1862 m = scmutil.match(repo[None], pats=files)
1863 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1863 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1864 for f in m.files():
1864 for f in m.files():
1865 nf = util.normpath(f)
1865 nf = util.normpath(f)
1866 ignored = None
1866 ignored = None
1867 ignoredata = None
1867 ignoredata = None
1868 if nf != b'.':
1868 if nf != b'.':
1869 if ignore(nf):
1869 if ignore(nf):
1870 ignored = nf
1870 ignored = nf
1871 ignoredata = repo.dirstate._ignorefileandline(nf)
1871 ignoredata = repo.dirstate._ignorefileandline(nf)
1872 else:
1872 else:
1873 for p in pathutil.finddirs(nf):
1873 for p in pathutil.finddirs(nf):
1874 if ignore(p):
1874 if ignore(p):
1875 ignored = p
1875 ignored = p
1876 ignoredata = repo.dirstate._ignorefileandline(p)
1876 ignoredata = repo.dirstate._ignorefileandline(p)
1877 break
1877 break
1878 if ignored:
1878 if ignored:
1879 if ignored == nf:
1879 if ignored == nf:
1880 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1880 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1881 else:
1881 else:
1882 ui.write(
1882 ui.write(
1883 _(
1883 _(
1884 b"%s is ignored because of "
1884 b"%s is ignored because of "
1885 b"containing directory %s\n"
1885 b"containing directory %s\n"
1886 )
1886 )
1887 % (uipathfn(f), ignored)
1887 % (uipathfn(f), ignored)
1888 )
1888 )
1889 ignorefile, lineno, line = ignoredata
1889 ignorefile, lineno, line = ignoredata
1890 ui.write(
1890 ui.write(
1891 _(b"(ignore rule in %s, line %d: '%s')\n")
1891 _(b"(ignore rule in %s, line %d: '%s')\n")
1892 % (ignorefile, lineno, line)
1892 % (ignorefile, lineno, line)
1893 )
1893 )
1894 else:
1894 else:
1895 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1895 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1896
1896
1897
1897
1898 @command(
1898 @command(
1899 b'debug-revlog-index|debugindex',
1899 b'debug-revlog-index|debugindex',
1900 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1900 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1901 _(b'-c|-m|FILE'),
1901 _(b'-c|-m|FILE'),
1902 )
1902 )
1903 def debugindex(ui, repo, file_=None, **opts):
1903 def debugindex(ui, repo, file_=None, **opts):
1904 """dump index data for a revlog"""
1904 """dump index data for a revlog"""
1905 opts = pycompat.byteskwargs(opts)
1905 opts = pycompat.byteskwargs(opts)
1906 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1906 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1907
1907
1908 fm = ui.formatter(b'debugindex', opts)
1908 fm = ui.formatter(b'debugindex', opts)
1909
1909
1910 revlog = getattr(store, '_revlog', store)
1910 revlog = getattr(store, '_revlog', store)
1911
1911
1912 return revlog_debug.debug_index(
1912 return revlog_debug.debug_index(
1913 ui,
1913 ui,
1914 repo,
1914 repo,
1915 formatter=fm,
1915 formatter=fm,
1916 revlog=revlog,
1916 revlog=revlog,
1917 full_node=ui.debugflag,
1917 full_node=ui.debugflag,
1918 )
1918 )
1919
1919
1920
1920
1921 @command(
1921 @command(
1922 b'debugindexdot',
1922 b'debugindexdot',
1923 cmdutil.debugrevlogopts,
1923 cmdutil.debugrevlogopts,
1924 _(b'-c|-m|FILE'),
1924 _(b'-c|-m|FILE'),
1925 optionalrepo=True,
1925 optionalrepo=True,
1926 )
1926 )
1927 def debugindexdot(ui, repo, file_=None, **opts):
1927 def debugindexdot(ui, repo, file_=None, **opts):
1928 """dump an index DAG as a graphviz dot file"""
1928 """dump an index DAG as a graphviz dot file"""
1929 opts = pycompat.byteskwargs(opts)
1929 r = cmdutil.openstorage(
1930 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1930 repo, b'debugindexdot', file_, pycompat.byteskwargs(opts)
1931 )
1931 ui.writenoi18n(b"digraph G {\n")
1932 ui.writenoi18n(b"digraph G {\n")
1932 for i in r:
1933 for i in r:
1933 node = r.node(i)
1934 node = r.node(i)
1934 pp = r.parents(node)
1935 pp = r.parents(node)
1935 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1936 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1936 if pp[1] != repo.nullid:
1937 if pp[1] != repo.nullid:
1937 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1938 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1938 ui.write(b"}\n")
1939 ui.write(b"}\n")
1939
1940
1940
1941
1941 @command(b'debugindexstats', [])
1942 @command(b'debugindexstats', [])
1942 def debugindexstats(ui, repo):
1943 def debugindexstats(ui, repo):
1943 """show stats related to the changelog index"""
1944 """show stats related to the changelog index"""
1944 repo.changelog.shortest(repo.nullid, 1)
1945 repo.changelog.shortest(repo.nullid, 1)
1945 index = repo.changelog.index
1946 index = repo.changelog.index
1946 if not hasattr(index, 'stats'):
1947 if not hasattr(index, 'stats'):
1947 raise error.Abort(_(b'debugindexstats only works with native code'))
1948 raise error.Abort(_(b'debugindexstats only works with native code'))
1948 for k, v in sorted(index.stats().items()):
1949 for k, v in sorted(index.stats().items()):
1949 ui.write(b'%s: %d\n' % (k, v))
1950 ui.write(b'%s: %d\n' % (k, v))
1950
1951
1951
1952
1952 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1953 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1953 def debuginstall(ui, **opts):
1954 def debuginstall(ui, **opts):
1954 """test Mercurial installation
1955 """test Mercurial installation
1955
1956
1956 Returns 0 on success.
1957 Returns 0 on success.
1957 """
1958 """
1958 opts = pycompat.byteskwargs(opts)
1959 opts = pycompat.byteskwargs(opts)
1959
1960
1960 problems = 0
1961 problems = 0
1961
1962
1962 fm = ui.formatter(b'debuginstall', opts)
1963 fm = ui.formatter(b'debuginstall', opts)
1963 fm.startitem()
1964 fm.startitem()
1964
1965
1965 # encoding might be unknown or wrong. don't translate these messages.
1966 # encoding might be unknown or wrong. don't translate these messages.
1966 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1967 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1967 err = None
1968 err = None
1968 try:
1969 try:
1969 codecs.lookup(pycompat.sysstr(encoding.encoding))
1970 codecs.lookup(pycompat.sysstr(encoding.encoding))
1970 except LookupError as inst:
1971 except LookupError as inst:
1971 err = stringutil.forcebytestr(inst)
1972 err = stringutil.forcebytestr(inst)
1972 problems += 1
1973 problems += 1
1973 fm.condwrite(
1974 fm.condwrite(
1974 err,
1975 err,
1975 b'encodingerror',
1976 b'encodingerror',
1976 b" %s\n (check that your locale is properly set)\n",
1977 b" %s\n (check that your locale is properly set)\n",
1977 err,
1978 err,
1978 )
1979 )
1979
1980
1980 # Python
1981 # Python
1981 pythonlib = None
1982 pythonlib = None
1982 if hasattr(os, '__file__'):
1983 if hasattr(os, '__file__'):
1983 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1984 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1984 elif getattr(sys, 'oxidized', False):
1985 elif getattr(sys, 'oxidized', False):
1985 pythonlib = pycompat.sysexecutable
1986 pythonlib = pycompat.sysexecutable
1986
1987
1987 fm.write(
1988 fm.write(
1988 b'pythonexe',
1989 b'pythonexe',
1989 _(b"checking Python executable (%s)\n"),
1990 _(b"checking Python executable (%s)\n"),
1990 pycompat.sysexecutable or _(b"unknown"),
1991 pycompat.sysexecutable or _(b"unknown"),
1991 )
1992 )
1992 fm.write(
1993 fm.write(
1993 b'pythonimplementation',
1994 b'pythonimplementation',
1994 _(b"checking Python implementation (%s)\n"),
1995 _(b"checking Python implementation (%s)\n"),
1995 pycompat.sysbytes(platform.python_implementation()),
1996 pycompat.sysbytes(platform.python_implementation()),
1996 )
1997 )
1997 fm.write(
1998 fm.write(
1998 b'pythonver',
1999 b'pythonver',
1999 _(b"checking Python version (%s)\n"),
2000 _(b"checking Python version (%s)\n"),
2000 (b"%d.%d.%d" % sys.version_info[:3]),
2001 (b"%d.%d.%d" % sys.version_info[:3]),
2001 )
2002 )
2002 fm.write(
2003 fm.write(
2003 b'pythonlib',
2004 b'pythonlib',
2004 _(b"checking Python lib (%s)...\n"),
2005 _(b"checking Python lib (%s)...\n"),
2005 pythonlib or _(b"unknown"),
2006 pythonlib or _(b"unknown"),
2006 )
2007 )
2007
2008
2008 try:
2009 try:
2009 from . import rustext # pytype: disable=import-error
2010 from . import rustext # pytype: disable=import-error
2010
2011
2011 rustext.__doc__ # trigger lazy import
2012 rustext.__doc__ # trigger lazy import
2012 except ImportError:
2013 except ImportError:
2013 rustext = None
2014 rustext = None
2014
2015
2015 security = set(sslutil.supportedprotocols)
2016 security = set(sslutil.supportedprotocols)
2016 if sslutil.hassni:
2017 if sslutil.hassni:
2017 security.add(b'sni')
2018 security.add(b'sni')
2018
2019
2019 fm.write(
2020 fm.write(
2020 b'pythonsecurity',
2021 b'pythonsecurity',
2021 _(b"checking Python security support (%s)\n"),
2022 _(b"checking Python security support (%s)\n"),
2022 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2023 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2023 )
2024 )
2024
2025
2025 # These are warnings, not errors. So don't increment problem count. This
2026 # These are warnings, not errors. So don't increment problem count. This
2026 # may change in the future.
2027 # may change in the future.
2027 if b'tls1.2' not in security:
2028 if b'tls1.2' not in security:
2028 fm.plain(
2029 fm.plain(
2029 _(
2030 _(
2030 b' TLS 1.2 not supported by Python install; '
2031 b' TLS 1.2 not supported by Python install; '
2031 b'network connections lack modern security\n'
2032 b'network connections lack modern security\n'
2032 )
2033 )
2033 )
2034 )
2034 if b'sni' not in security:
2035 if b'sni' not in security:
2035 fm.plain(
2036 fm.plain(
2036 _(
2037 _(
2037 b' SNI not supported by Python install; may have '
2038 b' SNI not supported by Python install; may have '
2038 b'connectivity issues with some servers\n'
2039 b'connectivity issues with some servers\n'
2039 )
2040 )
2040 )
2041 )
2041
2042
2042 fm.plain(
2043 fm.plain(
2043 _(
2044 _(
2044 b"checking Rust extensions (%s)\n"
2045 b"checking Rust extensions (%s)\n"
2045 % (b'missing' if rustext is None else b'installed')
2046 % (b'missing' if rustext is None else b'installed')
2046 ),
2047 ),
2047 )
2048 )
2048
2049
2049 # TODO print CA cert info
2050 # TODO print CA cert info
2050
2051
2051 # hg version
2052 # hg version
2052 hgver = util.version()
2053 hgver = util.version()
2053 fm.write(
2054 fm.write(
2054 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2055 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2055 )
2056 )
2056 fm.write(
2057 fm.write(
2057 b'hgverextra',
2058 b'hgverextra',
2058 _(b"checking Mercurial custom build (%s)\n"),
2059 _(b"checking Mercurial custom build (%s)\n"),
2059 b'+'.join(hgver.split(b'+')[1:]),
2060 b'+'.join(hgver.split(b'+')[1:]),
2060 )
2061 )
2061
2062
2062 # compiled modules
2063 # compiled modules
2063 hgmodules = None
2064 hgmodules = None
2064 if hasattr(sys.modules[__name__], '__file__'):
2065 if hasattr(sys.modules[__name__], '__file__'):
2065 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2066 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2066 elif getattr(sys, 'oxidized', False):
2067 elif getattr(sys, 'oxidized', False):
2067 hgmodules = pycompat.sysexecutable
2068 hgmodules = pycompat.sysexecutable
2068
2069
2069 fm.write(
2070 fm.write(
2070 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2071 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2071 )
2072 )
2072 fm.write(
2073 fm.write(
2073 b'hgmodules',
2074 b'hgmodules',
2074 _(b"checking installed modules (%s)...\n"),
2075 _(b"checking installed modules (%s)...\n"),
2075 hgmodules or _(b"unknown"),
2076 hgmodules or _(b"unknown"),
2076 )
2077 )
2077
2078
2078 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2079 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2079 rustext = rustandc # for now, that's the only case
2080 rustext = rustandc # for now, that's the only case
2080 cext = policy.policy in (b'c', b'allow') or rustandc
2081 cext = policy.policy in (b'c', b'allow') or rustandc
2081 nopure = cext or rustext
2082 nopure = cext or rustext
2082 if nopure:
2083 if nopure:
2083 err = None
2084 err = None
2084 try:
2085 try:
2085 if cext:
2086 if cext:
2086 from .cext import ( # pytype: disable=import-error
2087 from .cext import ( # pytype: disable=import-error
2087 base85,
2088 base85,
2088 bdiff,
2089 bdiff,
2089 mpatch,
2090 mpatch,
2090 osutil,
2091 osutil,
2091 )
2092 )
2092
2093
2093 # quiet pyflakes
2094 # quiet pyflakes
2094 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2095 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2095 if rustext:
2096 if rustext:
2096 from .rustext import ( # pytype: disable=import-error
2097 from .rustext import ( # pytype: disable=import-error
2097 ancestor,
2098 ancestor,
2098 dirstate,
2099 dirstate,
2099 )
2100 )
2100
2101
2101 dir(ancestor), dir(dirstate) # quiet pyflakes
2102 dir(ancestor), dir(dirstate) # quiet pyflakes
2102 except Exception as inst:
2103 except Exception as inst:
2103 err = stringutil.forcebytestr(inst)
2104 err = stringutil.forcebytestr(inst)
2104 problems += 1
2105 problems += 1
2105 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2106 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2106
2107
2107 compengines = util.compengines._engines.values()
2108 compengines = util.compengines._engines.values()
2108 fm.write(
2109 fm.write(
2109 b'compengines',
2110 b'compengines',
2110 _(b'checking registered compression engines (%s)\n'),
2111 _(b'checking registered compression engines (%s)\n'),
2111 fm.formatlist(
2112 fm.formatlist(
2112 sorted(e.name() for e in compengines),
2113 sorted(e.name() for e in compengines),
2113 name=b'compengine',
2114 name=b'compengine',
2114 fmt=b'%s',
2115 fmt=b'%s',
2115 sep=b', ',
2116 sep=b', ',
2116 ),
2117 ),
2117 )
2118 )
2118 fm.write(
2119 fm.write(
2119 b'compenginesavail',
2120 b'compenginesavail',
2120 _(b'checking available compression engines (%s)\n'),
2121 _(b'checking available compression engines (%s)\n'),
2121 fm.formatlist(
2122 fm.formatlist(
2122 sorted(e.name() for e in compengines if e.available()),
2123 sorted(e.name() for e in compengines if e.available()),
2123 name=b'compengine',
2124 name=b'compengine',
2124 fmt=b'%s',
2125 fmt=b'%s',
2125 sep=b', ',
2126 sep=b', ',
2126 ),
2127 ),
2127 )
2128 )
2128 wirecompengines = compression.compengines.supportedwireengines(
2129 wirecompengines = compression.compengines.supportedwireengines(
2129 compression.SERVERROLE
2130 compression.SERVERROLE
2130 )
2131 )
2131 fm.write(
2132 fm.write(
2132 b'compenginesserver',
2133 b'compenginesserver',
2133 _(
2134 _(
2134 b'checking available compression engines '
2135 b'checking available compression engines '
2135 b'for wire protocol (%s)\n'
2136 b'for wire protocol (%s)\n'
2136 ),
2137 ),
2137 fm.formatlist(
2138 fm.formatlist(
2138 [e.name() for e in wirecompengines if e.wireprotosupport()],
2139 [e.name() for e in wirecompengines if e.wireprotosupport()],
2139 name=b'compengine',
2140 name=b'compengine',
2140 fmt=b'%s',
2141 fmt=b'%s',
2141 sep=b', ',
2142 sep=b', ',
2142 ),
2143 ),
2143 )
2144 )
2144 re2 = b'missing'
2145 re2 = b'missing'
2145 if util.has_re2():
2146 if util.has_re2():
2146 re2 = b'available'
2147 re2 = b'available'
2147 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2148 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2148 fm.data(re2=bool(util._re2))
2149 fm.data(re2=bool(util._re2))
2149
2150
2150 # templates
2151 # templates
2151 p = templater.templatedir()
2152 p = templater.templatedir()
2152 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2153 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2153 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2154 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2154 if p:
2155 if p:
2155 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2156 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2156 if m:
2157 if m:
2157 # template found, check if it is working
2158 # template found, check if it is working
2158 err = None
2159 err = None
2159 try:
2160 try:
2160 templater.templater.frommapfile(m)
2161 templater.templater.frommapfile(m)
2161 except Exception as inst:
2162 except Exception as inst:
2162 err = stringutil.forcebytestr(inst)
2163 err = stringutil.forcebytestr(inst)
2163 p = None
2164 p = None
2164 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2165 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2165 else:
2166 else:
2166 p = None
2167 p = None
2167 fm.condwrite(
2168 fm.condwrite(
2168 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2169 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2169 )
2170 )
2170 fm.condwrite(
2171 fm.condwrite(
2171 not m,
2172 not m,
2172 b'defaulttemplatenotfound',
2173 b'defaulttemplatenotfound',
2173 _(b" template '%s' not found\n"),
2174 _(b" template '%s' not found\n"),
2174 b"default",
2175 b"default",
2175 )
2176 )
2176 if not p:
2177 if not p:
2177 problems += 1
2178 problems += 1
2178 fm.condwrite(
2179 fm.condwrite(
2179 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2180 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2180 )
2181 )
2181
2182
2182 # editor
2183 # editor
2183 editor = ui.geteditor()
2184 editor = ui.geteditor()
2184 editor = util.expandpath(editor)
2185 editor = util.expandpath(editor)
2185 editorbin = procutil.shellsplit(editor)[0]
2186 editorbin = procutil.shellsplit(editor)[0]
2186 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2187 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2187 cmdpath = procutil.findexe(editorbin)
2188 cmdpath = procutil.findexe(editorbin)
2188 fm.condwrite(
2189 fm.condwrite(
2189 not cmdpath and editor == b'vi',
2190 not cmdpath and editor == b'vi',
2190 b'vinotfound',
2191 b'vinotfound',
2191 _(
2192 _(
2192 b" No commit editor set and can't find %s in PATH\n"
2193 b" No commit editor set and can't find %s in PATH\n"
2193 b" (specify a commit editor in your configuration"
2194 b" (specify a commit editor in your configuration"
2194 b" file)\n"
2195 b" file)\n"
2195 ),
2196 ),
2196 not cmdpath and editor == b'vi' and editorbin,
2197 not cmdpath and editor == b'vi' and editorbin,
2197 )
2198 )
2198 fm.condwrite(
2199 fm.condwrite(
2199 not cmdpath and editor != b'vi',
2200 not cmdpath and editor != b'vi',
2200 b'editornotfound',
2201 b'editornotfound',
2201 _(
2202 _(
2202 b" Can't find editor '%s' in PATH\n"
2203 b" Can't find editor '%s' in PATH\n"
2203 b" (specify a commit editor in your configuration"
2204 b" (specify a commit editor in your configuration"
2204 b" file)\n"
2205 b" file)\n"
2205 ),
2206 ),
2206 not cmdpath and editorbin,
2207 not cmdpath and editorbin,
2207 )
2208 )
2208 if not cmdpath and editor != b'vi':
2209 if not cmdpath and editor != b'vi':
2209 problems += 1
2210 problems += 1
2210
2211
2211 # check username
2212 # check username
2212 username = None
2213 username = None
2213 err = None
2214 err = None
2214 try:
2215 try:
2215 username = ui.username()
2216 username = ui.username()
2216 except error.Abort as e:
2217 except error.Abort as e:
2217 err = e.message
2218 err = e.message
2218 problems += 1
2219 problems += 1
2219
2220
2220 fm.condwrite(
2221 fm.condwrite(
2221 username, b'username', _(b"checking username (%s)\n"), username
2222 username, b'username', _(b"checking username (%s)\n"), username
2222 )
2223 )
2223 fm.condwrite(
2224 fm.condwrite(
2224 err,
2225 err,
2225 b'usernameerror',
2226 b'usernameerror',
2226 _(
2227 _(
2227 b"checking username...\n %s\n"
2228 b"checking username...\n %s\n"
2228 b" (specify a username in your configuration file)\n"
2229 b" (specify a username in your configuration file)\n"
2229 ),
2230 ),
2230 err,
2231 err,
2231 )
2232 )
2232
2233
2233 for name, mod in extensions.extensions():
2234 for name, mod in extensions.extensions():
2234 handler = getattr(mod, 'debuginstall', None)
2235 handler = getattr(mod, 'debuginstall', None)
2235 if handler is not None:
2236 if handler is not None:
2236 problems += handler(ui, fm)
2237 problems += handler(ui, fm)
2237
2238
2238 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2239 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2239 if not problems:
2240 if not problems:
2240 fm.data(problems=problems)
2241 fm.data(problems=problems)
2241 fm.condwrite(
2242 fm.condwrite(
2242 problems,
2243 problems,
2243 b'problems',
2244 b'problems',
2244 _(b"%d problems detected, please check your install!\n"),
2245 _(b"%d problems detected, please check your install!\n"),
2245 problems,
2246 problems,
2246 )
2247 )
2247 fm.end()
2248 fm.end()
2248
2249
2249 return problems
2250 return problems
2250
2251
2251
2252
2252 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2253 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2253 def debugknown(ui, repopath, *ids, **opts):
2254 def debugknown(ui, repopath, *ids, **opts):
2254 """test whether node ids are known to a repo
2255 """test whether node ids are known to a repo
2255
2256
2256 Every ID must be a full-length hex node id string. Returns a list of 0s
2257 Every ID must be a full-length hex node id string. Returns a list of 0s
2257 and 1s indicating unknown/known.
2258 and 1s indicating unknown/known.
2258 """
2259 """
2259 opts = pycompat.byteskwargs(opts)
2260 opts = pycompat.byteskwargs(opts)
2260 repo = hg.peer(ui, opts, repopath)
2261 repo = hg.peer(ui, opts, repopath)
2261 if not repo.capable(b'known'):
2262 if not repo.capable(b'known'):
2262 raise error.Abort(b"known() not supported by target repository")
2263 raise error.Abort(b"known() not supported by target repository")
2263 flags = repo.known([bin(s) for s in ids])
2264 flags = repo.known([bin(s) for s in ids])
2264 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2265 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2265
2266
2266
2267
2267 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2268 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2268 def debuglabelcomplete(ui, repo, *args):
2269 def debuglabelcomplete(ui, repo, *args):
2269 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2270 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2270 debugnamecomplete(ui, repo, *args)
2271 debugnamecomplete(ui, repo, *args)
2271
2272
2272
2273
2273 @command(
2274 @command(
2274 b'debuglocks',
2275 b'debuglocks',
2275 [
2276 [
2276 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2277 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2277 (
2278 (
2278 b'W',
2279 b'W',
2279 b'force-free-wlock',
2280 b'force-free-wlock',
2280 None,
2281 None,
2281 _(b'free the working state lock (DANGEROUS)'),
2282 _(b'free the working state lock (DANGEROUS)'),
2282 ),
2283 ),
2283 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2284 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2284 (
2285 (
2285 b'S',
2286 b'S',
2286 b'set-wlock',
2287 b'set-wlock',
2287 None,
2288 None,
2288 _(b'set the working state lock until stopped'),
2289 _(b'set the working state lock until stopped'),
2289 ),
2290 ),
2290 ],
2291 ],
2291 _(b'[OPTION]...'),
2292 _(b'[OPTION]...'),
2292 )
2293 )
2293 def debuglocks(ui, repo, **opts):
2294 def debuglocks(ui, repo, **opts):
2294 """show or modify state of locks
2295 """show or modify state of locks
2295
2296
2296 By default, this command will show which locks are held. This
2297 By default, this command will show which locks are held. This
2297 includes the user and process holding the lock, the amount of time
2298 includes the user and process holding the lock, the amount of time
2298 the lock has been held, and the machine name where the process is
2299 the lock has been held, and the machine name where the process is
2299 running if it's not local.
2300 running if it's not local.
2300
2301
2301 Locks protect the integrity of Mercurial's data, so should be
2302 Locks protect the integrity of Mercurial's data, so should be
2302 treated with care. System crashes or other interruptions may cause
2303 treated with care. System crashes or other interruptions may cause
2303 locks to not be properly released, though Mercurial will usually
2304 locks to not be properly released, though Mercurial will usually
2304 detect and remove such stale locks automatically.
2305 detect and remove such stale locks automatically.
2305
2306
2306 However, detecting stale locks may not always be possible (for
2307 However, detecting stale locks may not always be possible (for
2307 instance, on a shared filesystem). Removing locks may also be
2308 instance, on a shared filesystem). Removing locks may also be
2308 blocked by filesystem permissions.
2309 blocked by filesystem permissions.
2309
2310
2310 Setting a lock will prevent other commands from changing the data.
2311 Setting a lock will prevent other commands from changing the data.
2311 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2312 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2312 The set locks are removed when the command exits.
2313 The set locks are removed when the command exits.
2313
2314
2314 Returns 0 if no locks are held.
2315 Returns 0 if no locks are held.
2315
2316
2316 """
2317 """
2317
2318
2318 if opts.get('force_free_lock'):
2319 if opts.get('force_free_lock'):
2319 repo.svfs.tryunlink(b'lock')
2320 repo.svfs.tryunlink(b'lock')
2320 if opts.get('force_free_wlock'):
2321 if opts.get('force_free_wlock'):
2321 repo.vfs.tryunlink(b'wlock')
2322 repo.vfs.tryunlink(b'wlock')
2322 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2323 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2323 return 0
2324 return 0
2324
2325
2325 locks = []
2326 locks = []
2326 try:
2327 try:
2327 if opts.get('set_wlock'):
2328 if opts.get('set_wlock'):
2328 try:
2329 try:
2329 locks.append(repo.wlock(False))
2330 locks.append(repo.wlock(False))
2330 except error.LockHeld:
2331 except error.LockHeld:
2331 raise error.Abort(_(b'wlock is already held'))
2332 raise error.Abort(_(b'wlock is already held'))
2332 if opts.get('set_lock'):
2333 if opts.get('set_lock'):
2333 try:
2334 try:
2334 locks.append(repo.lock(False))
2335 locks.append(repo.lock(False))
2335 except error.LockHeld:
2336 except error.LockHeld:
2336 raise error.Abort(_(b'lock is already held'))
2337 raise error.Abort(_(b'lock is already held'))
2337 if len(locks):
2338 if len(locks):
2338 try:
2339 try:
2339 if ui.interactive():
2340 if ui.interactive():
2340 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2341 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2341 ui.promptchoice(prompt)
2342 ui.promptchoice(prompt)
2342 else:
2343 else:
2343 msg = b"%d locks held, waiting for signal\n"
2344 msg = b"%d locks held, waiting for signal\n"
2344 msg %= len(locks)
2345 msg %= len(locks)
2345 ui.status(msg)
2346 ui.status(msg)
2346 while True: # XXX wait for a signal
2347 while True: # XXX wait for a signal
2347 time.sleep(0.1)
2348 time.sleep(0.1)
2348 except KeyboardInterrupt:
2349 except KeyboardInterrupt:
2349 msg = b"signal-received releasing locks\n"
2350 msg = b"signal-received releasing locks\n"
2350 ui.status(msg)
2351 ui.status(msg)
2351 return 0
2352 return 0
2352 finally:
2353 finally:
2353 release(*locks)
2354 release(*locks)
2354
2355
2355 now = time.time()
2356 now = time.time()
2356 held = 0
2357 held = 0
2357
2358
2358 def report(vfs, name, method):
2359 def report(vfs, name, method):
2359 # this causes stale locks to get reaped for more accurate reporting
2360 # this causes stale locks to get reaped for more accurate reporting
2360 try:
2361 try:
2361 l = method(False)
2362 l = method(False)
2362 except error.LockHeld:
2363 except error.LockHeld:
2363 l = None
2364 l = None
2364
2365
2365 if l:
2366 if l:
2366 l.release()
2367 l.release()
2367 else:
2368 else:
2368 try:
2369 try:
2369 st = vfs.lstat(name)
2370 st = vfs.lstat(name)
2370 age = now - st[stat.ST_MTIME]
2371 age = now - st[stat.ST_MTIME]
2371 user = util.username(st.st_uid)
2372 user = util.username(st.st_uid)
2372 locker = vfs.readlock(name)
2373 locker = vfs.readlock(name)
2373 if b":" in locker:
2374 if b":" in locker:
2374 host, pid = locker.split(b':')
2375 host, pid = locker.split(b':')
2375 if host == socket.gethostname():
2376 if host == socket.gethostname():
2376 locker = b'user %s, process %s' % (user or b'None', pid)
2377 locker = b'user %s, process %s' % (user or b'None', pid)
2377 else:
2378 else:
2378 locker = b'user %s, process %s, host %s' % (
2379 locker = b'user %s, process %s, host %s' % (
2379 user or b'None',
2380 user or b'None',
2380 pid,
2381 pid,
2381 host,
2382 host,
2382 )
2383 )
2383 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2384 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2384 return 1
2385 return 1
2385 except FileNotFoundError:
2386 except FileNotFoundError:
2386 pass
2387 pass
2387
2388
2388 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2389 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2389 return 0
2390 return 0
2390
2391
2391 held += report(repo.svfs, b"lock", repo.lock)
2392 held += report(repo.svfs, b"lock", repo.lock)
2392 held += report(repo.vfs, b"wlock", repo.wlock)
2393 held += report(repo.vfs, b"wlock", repo.wlock)
2393
2394
2394 return held
2395 return held
2395
2396
2396
2397
2397 @command(
2398 @command(
2398 b'debugmanifestfulltextcache',
2399 b'debugmanifestfulltextcache',
2399 [
2400 [
2400 (b'', b'clear', False, _(b'clear the cache')),
2401 (b'', b'clear', False, _(b'clear the cache')),
2401 (
2402 (
2402 b'a',
2403 b'a',
2403 b'add',
2404 b'add',
2404 [],
2405 [],
2405 _(b'add the given manifest nodes to the cache'),
2406 _(b'add the given manifest nodes to the cache'),
2406 _(b'NODE'),
2407 _(b'NODE'),
2407 ),
2408 ),
2408 ],
2409 ],
2409 b'',
2410 b'',
2410 )
2411 )
2411 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2412 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2412 """show, clear or amend the contents of the manifest fulltext cache"""
2413 """show, clear or amend the contents of the manifest fulltext cache"""
2413
2414
2414 def getcache():
2415 def getcache():
2415 r = repo.manifestlog.getstorage(b'')
2416 r = repo.manifestlog.getstorage(b'')
2416 try:
2417 try:
2417 return r._fulltextcache
2418 return r._fulltextcache
2418 except AttributeError:
2419 except AttributeError:
2419 msg = _(
2420 msg = _(
2420 b"Current revlog implementation doesn't appear to have a "
2421 b"Current revlog implementation doesn't appear to have a "
2421 b"manifest fulltext cache\n"
2422 b"manifest fulltext cache\n"
2422 )
2423 )
2423 raise error.Abort(msg)
2424 raise error.Abort(msg)
2424
2425
2425 if opts.get('clear'):
2426 if opts.get('clear'):
2426 with repo.wlock():
2427 with repo.wlock():
2427 cache = getcache()
2428 cache = getcache()
2428 cache.clear(clear_persisted_data=True)
2429 cache.clear(clear_persisted_data=True)
2429 return
2430 return
2430
2431
2431 if add:
2432 if add:
2432 with repo.wlock():
2433 with repo.wlock():
2433 m = repo.manifestlog
2434 m = repo.manifestlog
2434 store = m.getstorage(b'')
2435 store = m.getstorage(b'')
2435 for n in add:
2436 for n in add:
2436 try:
2437 try:
2437 manifest = m[store.lookup(n)]
2438 manifest = m[store.lookup(n)]
2438 except error.LookupError as e:
2439 except error.LookupError as e:
2439 raise error.Abort(
2440 raise error.Abort(
2440 bytes(e), hint=b"Check your manifest node id"
2441 bytes(e), hint=b"Check your manifest node id"
2441 )
2442 )
2442 manifest.read() # stores revisision in cache too
2443 manifest.read() # stores revisision in cache too
2443 return
2444 return
2444
2445
2445 cache = getcache()
2446 cache = getcache()
2446 if not len(cache):
2447 if not len(cache):
2447 ui.write(_(b'cache empty\n'))
2448 ui.write(_(b'cache empty\n'))
2448 else:
2449 else:
2449 ui.write(
2450 ui.write(
2450 _(
2451 _(
2451 b'cache contains %d manifest entries, in order of most to '
2452 b'cache contains %d manifest entries, in order of most to '
2452 b'least recent:\n'
2453 b'least recent:\n'
2453 )
2454 )
2454 % (len(cache),)
2455 % (len(cache),)
2455 )
2456 )
2456 totalsize = 0
2457 totalsize = 0
2457 for nodeid in cache:
2458 for nodeid in cache:
2458 # Use cache.get to not update the LRU order
2459 # Use cache.get to not update the LRU order
2459 data = cache.peek(nodeid)
2460 data = cache.peek(nodeid)
2460 size = len(data)
2461 size = len(data)
2461 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2462 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2462 ui.write(
2463 ui.write(
2463 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2464 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2464 )
2465 )
2465 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2466 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2466 ui.write(
2467 ui.write(
2467 _(b'total cache data size %s, on-disk %s\n')
2468 _(b'total cache data size %s, on-disk %s\n')
2468 % (util.bytecount(totalsize), util.bytecount(ondisk))
2469 % (util.bytecount(totalsize), util.bytecount(ondisk))
2469 )
2470 )
2470
2471
2471
2472
2472 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2473 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2473 def debugmergestate(ui, repo, *args, **opts):
2474 def debugmergestate(ui, repo, *args, **opts):
2474 """print merge state
2475 """print merge state
2475
2476
2476 Use --verbose to print out information about whether v1 or v2 merge state
2477 Use --verbose to print out information about whether v1 or v2 merge state
2477 was chosen."""
2478 was chosen."""
2478
2479
2479 if ui.verbose:
2480 if ui.verbose:
2480 ms = mergestatemod.mergestate(repo)
2481 ms = mergestatemod.mergestate(repo)
2481
2482
2482 # sort so that reasonable information is on top
2483 # sort so that reasonable information is on top
2483 v1records = ms._readrecordsv1()
2484 v1records = ms._readrecordsv1()
2484 v2records = ms._readrecordsv2()
2485 v2records = ms._readrecordsv2()
2485
2486
2486 if not v1records and not v2records:
2487 if not v1records and not v2records:
2487 pass
2488 pass
2488 elif not v2records:
2489 elif not v2records:
2489 ui.writenoi18n(b'no version 2 merge state\n')
2490 ui.writenoi18n(b'no version 2 merge state\n')
2490 elif ms._v1v2match(v1records, v2records):
2491 elif ms._v1v2match(v1records, v2records):
2491 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2492 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2492 else:
2493 else:
2493 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2494 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2494
2495
2495 opts = pycompat.byteskwargs(opts)
2496 opts = pycompat.byteskwargs(opts)
2496 if not opts[b'template']:
2497 if not opts[b'template']:
2497 opts[b'template'] = (
2498 opts[b'template'] = (
2498 b'{if(commits, "", "no merge state found\n")}'
2499 b'{if(commits, "", "no merge state found\n")}'
2499 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2500 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2500 b'{files % "file: {path} (state \\"{state}\\")\n'
2501 b'{files % "file: {path} (state \\"{state}\\")\n'
2501 b'{if(local_path, "'
2502 b'{if(local_path, "'
2502 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2503 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2503 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2504 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2504 b' other path: {other_path} (node {other_node})\n'
2505 b' other path: {other_path} (node {other_node})\n'
2505 b'")}'
2506 b'")}'
2506 b'{if(rename_side, "'
2507 b'{if(rename_side, "'
2507 b' rename side: {rename_side}\n'
2508 b' rename side: {rename_side}\n'
2508 b' renamed path: {renamed_path}\n'
2509 b' renamed path: {renamed_path}\n'
2509 b'")}'
2510 b'")}'
2510 b'{extras % " extra: {key} = {value}\n"}'
2511 b'{extras % " extra: {key} = {value}\n"}'
2511 b'"}'
2512 b'"}'
2512 b'{extras % "extra: {file} ({key} = {value})\n"}'
2513 b'{extras % "extra: {file} ({key} = {value})\n"}'
2513 )
2514 )
2514
2515
2515 ms = mergestatemod.mergestate.read(repo)
2516 ms = mergestatemod.mergestate.read(repo)
2516
2517
2517 fm = ui.formatter(b'debugmergestate', opts)
2518 fm = ui.formatter(b'debugmergestate', opts)
2518 fm.startitem()
2519 fm.startitem()
2519
2520
2520 fm_commits = fm.nested(b'commits')
2521 fm_commits = fm.nested(b'commits')
2521 if ms.active():
2522 if ms.active():
2522 for name, node, label_index in (
2523 for name, node, label_index in (
2523 (b'local', ms.local, 0),
2524 (b'local', ms.local, 0),
2524 (b'other', ms.other, 1),
2525 (b'other', ms.other, 1),
2525 ):
2526 ):
2526 fm_commits.startitem()
2527 fm_commits.startitem()
2527 fm_commits.data(name=name)
2528 fm_commits.data(name=name)
2528 fm_commits.data(node=hex(node))
2529 fm_commits.data(node=hex(node))
2529 if ms._labels and len(ms._labels) > label_index:
2530 if ms._labels and len(ms._labels) > label_index:
2530 fm_commits.data(label=ms._labels[label_index])
2531 fm_commits.data(label=ms._labels[label_index])
2531 fm_commits.end()
2532 fm_commits.end()
2532
2533
2533 fm_files = fm.nested(b'files')
2534 fm_files = fm.nested(b'files')
2534 if ms.active():
2535 if ms.active():
2535 for f in ms:
2536 for f in ms:
2536 fm_files.startitem()
2537 fm_files.startitem()
2537 fm_files.data(path=f)
2538 fm_files.data(path=f)
2538 state = ms._state[f]
2539 state = ms._state[f]
2539 fm_files.data(state=state[0])
2540 fm_files.data(state=state[0])
2540 if state[0] in (
2541 if state[0] in (
2541 mergestatemod.MERGE_RECORD_UNRESOLVED,
2542 mergestatemod.MERGE_RECORD_UNRESOLVED,
2542 mergestatemod.MERGE_RECORD_RESOLVED,
2543 mergestatemod.MERGE_RECORD_RESOLVED,
2543 ):
2544 ):
2544 fm_files.data(local_key=state[1])
2545 fm_files.data(local_key=state[1])
2545 fm_files.data(local_path=state[2])
2546 fm_files.data(local_path=state[2])
2546 fm_files.data(ancestor_path=state[3])
2547 fm_files.data(ancestor_path=state[3])
2547 fm_files.data(ancestor_node=state[4])
2548 fm_files.data(ancestor_node=state[4])
2548 fm_files.data(other_path=state[5])
2549 fm_files.data(other_path=state[5])
2549 fm_files.data(other_node=state[6])
2550 fm_files.data(other_node=state[6])
2550 fm_files.data(local_flags=state[7])
2551 fm_files.data(local_flags=state[7])
2551 elif state[0] in (
2552 elif state[0] in (
2552 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2553 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2553 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2554 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2554 ):
2555 ):
2555 fm_files.data(renamed_path=state[1])
2556 fm_files.data(renamed_path=state[1])
2556 fm_files.data(rename_side=state[2])
2557 fm_files.data(rename_side=state[2])
2557 fm_extras = fm_files.nested(b'extras')
2558 fm_extras = fm_files.nested(b'extras')
2558 for k, v in sorted(ms.extras(f).items()):
2559 for k, v in sorted(ms.extras(f).items()):
2559 fm_extras.startitem()
2560 fm_extras.startitem()
2560 fm_extras.data(key=k)
2561 fm_extras.data(key=k)
2561 fm_extras.data(value=v)
2562 fm_extras.data(value=v)
2562 fm_extras.end()
2563 fm_extras.end()
2563
2564
2564 fm_files.end()
2565 fm_files.end()
2565
2566
2566 fm_extras = fm.nested(b'extras')
2567 fm_extras = fm.nested(b'extras')
2567 for f, d in sorted(ms.allextras().items()):
2568 for f, d in sorted(ms.allextras().items()):
2568 if f in ms:
2569 if f in ms:
2569 # If file is in mergestate, we have already processed it's extras
2570 # If file is in mergestate, we have already processed it's extras
2570 continue
2571 continue
2571 for k, v in d.items():
2572 for k, v in d.items():
2572 fm_extras.startitem()
2573 fm_extras.startitem()
2573 fm_extras.data(file=f)
2574 fm_extras.data(file=f)
2574 fm_extras.data(key=k)
2575 fm_extras.data(key=k)
2575 fm_extras.data(value=v)
2576 fm_extras.data(value=v)
2576 fm_extras.end()
2577 fm_extras.end()
2577
2578
2578 fm.end()
2579 fm.end()
2579
2580
2580
2581
2581 @command(b'debugnamecomplete', [], _(b'NAME...'))
2582 @command(b'debugnamecomplete', [], _(b'NAME...'))
2582 def debugnamecomplete(ui, repo, *args):
2583 def debugnamecomplete(ui, repo, *args):
2583 '''complete "names" - tags, open branch names, bookmark names'''
2584 '''complete "names" - tags, open branch names, bookmark names'''
2584
2585
2585 names = set()
2586 names = set()
2586 # since we previously only listed open branches, we will handle that
2587 # since we previously only listed open branches, we will handle that
2587 # specially (after this for loop)
2588 # specially (after this for loop)
2588 for name, ns in repo.names.items():
2589 for name, ns in repo.names.items():
2589 if name != b'branches':
2590 if name != b'branches':
2590 names.update(ns.listnames(repo))
2591 names.update(ns.listnames(repo))
2591 names.update(
2592 names.update(
2592 tag
2593 tag
2593 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2594 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2594 if not closed
2595 if not closed
2595 )
2596 )
2596 completions = set()
2597 completions = set()
2597 if not args:
2598 if not args:
2598 args = [b'']
2599 args = [b'']
2599 for a in args:
2600 for a in args:
2600 completions.update(n for n in names if n.startswith(a))
2601 completions.update(n for n in names if n.startswith(a))
2601 ui.write(b'\n'.join(sorted(completions)))
2602 ui.write(b'\n'.join(sorted(completions)))
2602 ui.write(b'\n')
2603 ui.write(b'\n')
2603
2604
2604
2605
2605 @command(
2606 @command(
2606 b'debugnodemap',
2607 b'debugnodemap',
2607 (
2608 (
2608 cmdutil.debugrevlogopts
2609 cmdutil.debugrevlogopts
2609 + [
2610 + [
2610 (
2611 (
2611 b'',
2612 b'',
2612 b'dump-new',
2613 b'dump-new',
2613 False,
2614 False,
2614 _(b'write a (new) persistent binary nodemap on stdout'),
2615 _(b'write a (new) persistent binary nodemap on stdout'),
2615 ),
2616 ),
2616 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2617 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2617 (
2618 (
2618 b'',
2619 b'',
2619 b'check',
2620 b'check',
2620 False,
2621 False,
2621 _(b'check that the data on disk data are correct.'),
2622 _(b'check that the data on disk data are correct.'),
2622 ),
2623 ),
2623 (
2624 (
2624 b'',
2625 b'',
2625 b'metadata',
2626 b'metadata',
2626 False,
2627 False,
2627 _(b'display the on disk meta data for the nodemap'),
2628 _(b'display the on disk meta data for the nodemap'),
2628 ),
2629 ),
2629 ]
2630 ]
2630 ),
2631 ),
2631 _(b'-c|-m|FILE'),
2632 _(b'-c|-m|FILE'),
2632 )
2633 )
2633 def debugnodemap(ui, repo, file_=None, **opts):
2634 def debugnodemap(ui, repo, file_=None, **opts):
2634 """write and inspect on disk nodemap"""
2635 """write and inspect on disk nodemap"""
2635 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2636 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2636 if file_ is not None:
2637 if file_ is not None:
2637 raise error.InputError(
2638 raise error.InputError(
2638 _(b'cannot specify a file with other arguments')
2639 _(b'cannot specify a file with other arguments')
2639 )
2640 )
2640 elif file_ is None:
2641 elif file_ is None:
2641 opts['changelog'] = True
2642 opts['changelog'] = True
2642 r = cmdutil.openstorage(
2643 r = cmdutil.openstorage(
2643 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2644 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2644 )
2645 )
2645 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2646 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2646 r = r._revlog
2647 r = r._revlog
2647 if opts['dump_new']:
2648 if opts['dump_new']:
2648 if hasattr(r.index, "nodemap_data_all"):
2649 if hasattr(r.index, "nodemap_data_all"):
2649 data = r.index.nodemap_data_all()
2650 data = r.index.nodemap_data_all()
2650 else:
2651 else:
2651 data = nodemap.persistent_data(r.index)
2652 data = nodemap.persistent_data(r.index)
2652 ui.write(data)
2653 ui.write(data)
2653 elif opts['dump_disk']:
2654 elif opts['dump_disk']:
2654 nm_data = nodemap.persisted_data(r)
2655 nm_data = nodemap.persisted_data(r)
2655 if nm_data is not None:
2656 if nm_data is not None:
2656 docket, data = nm_data
2657 docket, data = nm_data
2657 ui.write(data[:])
2658 ui.write(data[:])
2658 elif opts['check']:
2659 elif opts['check']:
2659 nm_data = nodemap.persisted_data(r)
2660 nm_data = nodemap.persisted_data(r)
2660 if nm_data is not None:
2661 if nm_data is not None:
2661 docket, data = nm_data
2662 docket, data = nm_data
2662 return nodemap.check_data(ui, r.index, data)
2663 return nodemap.check_data(ui, r.index, data)
2663 elif opts['metadata']:
2664 elif opts['metadata']:
2664 nm_data = nodemap.persisted_data(r)
2665 nm_data = nodemap.persisted_data(r)
2665 if nm_data is not None:
2666 if nm_data is not None:
2666 docket, data = nm_data
2667 docket, data = nm_data
2667 ui.write((b"uid: %s\n") % docket.uid)
2668 ui.write((b"uid: %s\n") % docket.uid)
2668 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2669 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2669 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2670 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2670 ui.write((b"data-length: %d\n") % docket.data_length)
2671 ui.write((b"data-length: %d\n") % docket.data_length)
2671 ui.write((b"data-unused: %d\n") % docket.data_unused)
2672 ui.write((b"data-unused: %d\n") % docket.data_unused)
2672 unused_perc = docket.data_unused * 100.0 / docket.data_length
2673 unused_perc = docket.data_unused * 100.0 / docket.data_length
2673 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2674 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2674
2675
2675
2676
2676 @command(
2677 @command(
2677 b'debugobsolete',
2678 b'debugobsolete',
2678 [
2679 [
2679 (b'', b'flags', 0, _(b'markers flag')),
2680 (b'', b'flags', 0, _(b'markers flag')),
2680 (
2681 (
2681 b'',
2682 b'',
2682 b'record-parents',
2683 b'record-parents',
2683 False,
2684 False,
2684 _(b'record parent information for the precursor'),
2685 _(b'record parent information for the precursor'),
2685 ),
2686 ),
2686 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2687 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2687 (
2688 (
2688 b'',
2689 b'',
2689 b'exclusive',
2690 b'exclusive',
2690 False,
2691 False,
2691 _(b'restrict display to markers only relevant to REV'),
2692 _(b'restrict display to markers only relevant to REV'),
2692 ),
2693 ),
2693 (b'', b'index', False, _(b'display index of the marker')),
2694 (b'', b'index', False, _(b'display index of the marker')),
2694 (b'', b'delete', [], _(b'delete markers specified by indices')),
2695 (b'', b'delete', [], _(b'delete markers specified by indices')),
2695 ]
2696 ]
2696 + cmdutil.commitopts2
2697 + cmdutil.commitopts2
2697 + cmdutil.formatteropts,
2698 + cmdutil.formatteropts,
2698 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2699 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2699 )
2700 )
2700 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2701 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2701 """create arbitrary obsolete marker
2702 """create arbitrary obsolete marker
2702
2703
2703 With no arguments, displays the list of obsolescence markers."""
2704 With no arguments, displays the list of obsolescence markers."""
2704
2705
2705 opts = pycompat.byteskwargs(opts)
2706 opts = pycompat.byteskwargs(opts)
2706
2707
2707 def parsenodeid(s):
2708 def parsenodeid(s):
2708 try:
2709 try:
2709 # We do not use revsingle/revrange functions here to accept
2710 # We do not use revsingle/revrange functions here to accept
2710 # arbitrary node identifiers, possibly not present in the
2711 # arbitrary node identifiers, possibly not present in the
2711 # local repository.
2712 # local repository.
2712 n = bin(s)
2713 n = bin(s)
2713 if len(n) != repo.nodeconstants.nodelen:
2714 if len(n) != repo.nodeconstants.nodelen:
2714 raise ValueError
2715 raise ValueError
2715 return n
2716 return n
2716 except ValueError:
2717 except ValueError:
2717 raise error.InputError(
2718 raise error.InputError(
2718 b'changeset references must be full hexadecimal '
2719 b'changeset references must be full hexadecimal '
2719 b'node identifiers'
2720 b'node identifiers'
2720 )
2721 )
2721
2722
2722 if opts.get(b'delete'):
2723 if opts.get(b'delete'):
2723 indices = []
2724 indices = []
2724 for v in opts.get(b'delete'):
2725 for v in opts.get(b'delete'):
2725 try:
2726 try:
2726 indices.append(int(v))
2727 indices.append(int(v))
2727 except ValueError:
2728 except ValueError:
2728 raise error.InputError(
2729 raise error.InputError(
2729 _(b'invalid index value: %r') % v,
2730 _(b'invalid index value: %r') % v,
2730 hint=_(b'use integers for indices'),
2731 hint=_(b'use integers for indices'),
2731 )
2732 )
2732
2733
2733 if repo.currenttransaction():
2734 if repo.currenttransaction():
2734 raise error.Abort(
2735 raise error.Abort(
2735 _(b'cannot delete obsmarkers in the middle of transaction.')
2736 _(b'cannot delete obsmarkers in the middle of transaction.')
2736 )
2737 )
2737
2738
2738 with repo.lock():
2739 with repo.lock():
2739 n = repair.deleteobsmarkers(repo.obsstore, indices)
2740 n = repair.deleteobsmarkers(repo.obsstore, indices)
2740 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2741 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2741
2742
2742 return
2743 return
2743
2744
2744 if precursor is not None:
2745 if precursor is not None:
2745 if opts[b'rev']:
2746 if opts[b'rev']:
2746 raise error.InputError(
2747 raise error.InputError(
2747 b'cannot select revision when creating marker'
2748 b'cannot select revision when creating marker'
2748 )
2749 )
2749 metadata = {}
2750 metadata = {}
2750 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2751 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2751 succs = tuple(parsenodeid(succ) for succ in successors)
2752 succs = tuple(parsenodeid(succ) for succ in successors)
2752 l = repo.lock()
2753 l = repo.lock()
2753 try:
2754 try:
2754 tr = repo.transaction(b'debugobsolete')
2755 tr = repo.transaction(b'debugobsolete')
2755 try:
2756 try:
2756 date = opts.get(b'date')
2757 date = opts.get(b'date')
2757 if date:
2758 if date:
2758 date = dateutil.parsedate(date)
2759 date = dateutil.parsedate(date)
2759 else:
2760 else:
2760 date = None
2761 date = None
2761 prec = parsenodeid(precursor)
2762 prec = parsenodeid(precursor)
2762 parents = None
2763 parents = None
2763 if opts[b'record_parents']:
2764 if opts[b'record_parents']:
2764 if prec not in repo.unfiltered():
2765 if prec not in repo.unfiltered():
2765 raise error.Abort(
2766 raise error.Abort(
2766 b'cannot used --record-parents on '
2767 b'cannot used --record-parents on '
2767 b'unknown changesets'
2768 b'unknown changesets'
2768 )
2769 )
2769 parents = repo.unfiltered()[prec].parents()
2770 parents = repo.unfiltered()[prec].parents()
2770 parents = tuple(p.node() for p in parents)
2771 parents = tuple(p.node() for p in parents)
2771 repo.obsstore.create(
2772 repo.obsstore.create(
2772 tr,
2773 tr,
2773 prec,
2774 prec,
2774 succs,
2775 succs,
2775 opts[b'flags'],
2776 opts[b'flags'],
2776 parents=parents,
2777 parents=parents,
2777 date=date,
2778 date=date,
2778 metadata=metadata,
2779 metadata=metadata,
2779 ui=ui,
2780 ui=ui,
2780 )
2781 )
2781 tr.close()
2782 tr.close()
2782 except ValueError as exc:
2783 except ValueError as exc:
2783 raise error.Abort(
2784 raise error.Abort(
2784 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2785 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2785 )
2786 )
2786 finally:
2787 finally:
2787 tr.release()
2788 tr.release()
2788 finally:
2789 finally:
2789 l.release()
2790 l.release()
2790 else:
2791 else:
2791 if opts[b'rev']:
2792 if opts[b'rev']:
2792 revs = logcmdutil.revrange(repo, opts[b'rev'])
2793 revs = logcmdutil.revrange(repo, opts[b'rev'])
2793 nodes = [repo[r].node() for r in revs]
2794 nodes = [repo[r].node() for r in revs]
2794 markers = list(
2795 markers = list(
2795 obsutil.getmarkers(
2796 obsutil.getmarkers(
2796 repo, nodes=nodes, exclusive=opts[b'exclusive']
2797 repo, nodes=nodes, exclusive=opts[b'exclusive']
2797 )
2798 )
2798 )
2799 )
2799 markers.sort(key=lambda x: x._data)
2800 markers.sort(key=lambda x: x._data)
2800 else:
2801 else:
2801 markers = obsutil.getmarkers(repo)
2802 markers = obsutil.getmarkers(repo)
2802
2803
2803 markerstoiter = markers
2804 markerstoiter = markers
2804 isrelevant = lambda m: True
2805 isrelevant = lambda m: True
2805 if opts.get(b'rev') and opts.get(b'index'):
2806 if opts.get(b'rev') and opts.get(b'index'):
2806 markerstoiter = obsutil.getmarkers(repo)
2807 markerstoiter = obsutil.getmarkers(repo)
2807 markerset = set(markers)
2808 markerset = set(markers)
2808 isrelevant = lambda m: m in markerset
2809 isrelevant = lambda m: m in markerset
2809
2810
2810 fm = ui.formatter(b'debugobsolete', opts)
2811 fm = ui.formatter(b'debugobsolete', opts)
2811 for i, m in enumerate(markerstoiter):
2812 for i, m in enumerate(markerstoiter):
2812 if not isrelevant(m):
2813 if not isrelevant(m):
2813 # marker can be irrelevant when we're iterating over a set
2814 # marker can be irrelevant when we're iterating over a set
2814 # of markers (markerstoiter) which is bigger than the set
2815 # of markers (markerstoiter) which is bigger than the set
2815 # of markers we want to display (markers)
2816 # of markers we want to display (markers)
2816 # this can happen if both --index and --rev options are
2817 # this can happen if both --index and --rev options are
2817 # provided and thus we need to iterate over all of the markers
2818 # provided and thus we need to iterate over all of the markers
2818 # to get the correct indices, but only display the ones that
2819 # to get the correct indices, but only display the ones that
2819 # are relevant to --rev value
2820 # are relevant to --rev value
2820 continue
2821 continue
2821 fm.startitem()
2822 fm.startitem()
2822 ind = i if opts.get(b'index') else None
2823 ind = i if opts.get(b'index') else None
2823 cmdutil.showmarker(fm, m, index=ind)
2824 cmdutil.showmarker(fm, m, index=ind)
2824 fm.end()
2825 fm.end()
2825
2826
2826
2827
2827 @command(
2828 @command(
2828 b'debugp1copies',
2829 b'debugp1copies',
2829 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2830 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2830 _(b'[-r REV]'),
2831 _(b'[-r REV]'),
2831 )
2832 )
2832 def debugp1copies(ui, repo, **opts):
2833 def debugp1copies(ui, repo, **opts):
2833 """dump copy information compared to p1"""
2834 """dump copy information compared to p1"""
2834
2835
2835 opts = pycompat.byteskwargs(opts)
2836 opts = pycompat.byteskwargs(opts)
2836 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2837 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2837 for dst, src in ctx.p1copies().items():
2838 for dst, src in ctx.p1copies().items():
2838 ui.write(b'%s -> %s\n' % (src, dst))
2839 ui.write(b'%s -> %s\n' % (src, dst))
2839
2840
2840
2841
2841 @command(
2842 @command(
2842 b'debugp2copies',
2843 b'debugp2copies',
2843 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2844 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2844 _(b'[-r REV]'),
2845 _(b'[-r REV]'),
2845 )
2846 )
2846 def debugp2copies(ui, repo, **opts):
2847 def debugp2copies(ui, repo, **opts):
2847 """dump copy information compared to p2"""
2848 """dump copy information compared to p2"""
2848
2849
2849 opts = pycompat.byteskwargs(opts)
2850 opts = pycompat.byteskwargs(opts)
2850 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2851 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2851 for dst, src in ctx.p2copies().items():
2852 for dst, src in ctx.p2copies().items():
2852 ui.write(b'%s -> %s\n' % (src, dst))
2853 ui.write(b'%s -> %s\n' % (src, dst))
2853
2854
2854
2855
2855 @command(
2856 @command(
2856 b'debugpathcomplete',
2857 b'debugpathcomplete',
2857 [
2858 [
2858 (b'f', b'full', None, _(b'complete an entire path')),
2859 (b'f', b'full', None, _(b'complete an entire path')),
2859 (b'n', b'normal', None, _(b'show only normal files')),
2860 (b'n', b'normal', None, _(b'show only normal files')),
2860 (b'a', b'added', None, _(b'show only added files')),
2861 (b'a', b'added', None, _(b'show only added files')),
2861 (b'r', b'removed', None, _(b'show only removed files')),
2862 (b'r', b'removed', None, _(b'show only removed files')),
2862 ],
2863 ],
2863 _(b'FILESPEC...'),
2864 _(b'FILESPEC...'),
2864 )
2865 )
2865 def debugpathcomplete(ui, repo, *specs, **opts):
2866 def debugpathcomplete(ui, repo, *specs, **opts):
2866 """complete part or all of a tracked path
2867 """complete part or all of a tracked path
2867
2868
2868 This command supports shells that offer path name completion. It
2869 This command supports shells that offer path name completion. It
2869 currently completes only files already known to the dirstate.
2870 currently completes only files already known to the dirstate.
2870
2871
2871 Completion extends only to the next path segment unless
2872 Completion extends only to the next path segment unless
2872 --full is specified, in which case entire paths are used."""
2873 --full is specified, in which case entire paths are used."""
2873
2874
2874 def complete(path, acceptable):
2875 def complete(path, acceptable):
2875 dirstate = repo.dirstate
2876 dirstate = repo.dirstate
2876 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2877 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2877 rootdir = repo.root + pycompat.ossep
2878 rootdir = repo.root + pycompat.ossep
2878 if spec != repo.root and not spec.startswith(rootdir):
2879 if spec != repo.root and not spec.startswith(rootdir):
2879 return [], []
2880 return [], []
2880 if os.path.isdir(spec):
2881 if os.path.isdir(spec):
2881 spec += b'/'
2882 spec += b'/'
2882 spec = spec[len(rootdir) :]
2883 spec = spec[len(rootdir) :]
2883 fixpaths = pycompat.ossep != b'/'
2884 fixpaths = pycompat.ossep != b'/'
2884 if fixpaths:
2885 if fixpaths:
2885 spec = spec.replace(pycompat.ossep, b'/')
2886 spec = spec.replace(pycompat.ossep, b'/')
2886 speclen = len(spec)
2887 speclen = len(spec)
2887 fullpaths = opts['full']
2888 fullpaths = opts['full']
2888 files, dirs = set(), set()
2889 files, dirs = set(), set()
2889 adddir, addfile = dirs.add, files.add
2890 adddir, addfile = dirs.add, files.add
2890 for f, st in dirstate.items():
2891 for f, st in dirstate.items():
2891 if f.startswith(spec) and st.state in acceptable:
2892 if f.startswith(spec) and st.state in acceptable:
2892 if fixpaths:
2893 if fixpaths:
2893 f = f.replace(b'/', pycompat.ossep)
2894 f = f.replace(b'/', pycompat.ossep)
2894 if fullpaths:
2895 if fullpaths:
2895 addfile(f)
2896 addfile(f)
2896 continue
2897 continue
2897 s = f.find(pycompat.ossep, speclen)
2898 s = f.find(pycompat.ossep, speclen)
2898 if s >= 0:
2899 if s >= 0:
2899 adddir(f[:s])
2900 adddir(f[:s])
2900 else:
2901 else:
2901 addfile(f)
2902 addfile(f)
2902 return files, dirs
2903 return files, dirs
2903
2904
2904 acceptable = b''
2905 acceptable = b''
2905 if opts['normal']:
2906 if opts['normal']:
2906 acceptable += b'nm'
2907 acceptable += b'nm'
2907 if opts['added']:
2908 if opts['added']:
2908 acceptable += b'a'
2909 acceptable += b'a'
2909 if opts['removed']:
2910 if opts['removed']:
2910 acceptable += b'r'
2911 acceptable += b'r'
2911 cwd = repo.getcwd()
2912 cwd = repo.getcwd()
2912 if not specs:
2913 if not specs:
2913 specs = [b'.']
2914 specs = [b'.']
2914
2915
2915 files, dirs = set(), set()
2916 files, dirs = set(), set()
2916 for spec in specs:
2917 for spec in specs:
2917 f, d = complete(spec, acceptable or b'nmar')
2918 f, d = complete(spec, acceptable or b'nmar')
2918 files.update(f)
2919 files.update(f)
2919 dirs.update(d)
2920 dirs.update(d)
2920 files.update(dirs)
2921 files.update(dirs)
2921 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2922 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2922 ui.write(b'\n')
2923 ui.write(b'\n')
2923
2924
2924
2925
2925 @command(
2926 @command(
2926 b'debugpathcopies',
2927 b'debugpathcopies',
2927 cmdutil.walkopts,
2928 cmdutil.walkopts,
2928 b'hg debugpathcopies REV1 REV2 [FILE]',
2929 b'hg debugpathcopies REV1 REV2 [FILE]',
2929 inferrepo=True,
2930 inferrepo=True,
2930 )
2931 )
2931 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2932 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2932 """show copies between two revisions"""
2933 """show copies between two revisions"""
2933 ctx1 = scmutil.revsingle(repo, rev1)
2934 ctx1 = scmutil.revsingle(repo, rev1)
2934 ctx2 = scmutil.revsingle(repo, rev2)
2935 ctx2 = scmutil.revsingle(repo, rev2)
2935 m = scmutil.match(ctx1, pats, opts)
2936 m = scmutil.match(ctx1, pats, opts)
2936 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2937 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2937 ui.write(b'%s -> %s\n' % (src, dst))
2938 ui.write(b'%s -> %s\n' % (src, dst))
2938
2939
2939
2940
2940 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2941 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2941 def debugpeer(ui, path):
2942 def debugpeer(ui, path):
2942 """establish a connection to a peer repository"""
2943 """establish a connection to a peer repository"""
2943 # Always enable peer request logging. Requires --debug to display
2944 # Always enable peer request logging. Requires --debug to display
2944 # though.
2945 # though.
2945 overrides = {
2946 overrides = {
2946 (b'devel', b'debug.peer-request'): True,
2947 (b'devel', b'debug.peer-request'): True,
2947 }
2948 }
2948
2949
2949 with ui.configoverride(overrides):
2950 with ui.configoverride(overrides):
2950 peer = hg.peer(ui, {}, path)
2951 peer = hg.peer(ui, {}, path)
2951
2952
2952 try:
2953 try:
2953 local = peer.local() is not None
2954 local = peer.local() is not None
2954 canpush = peer.canpush()
2955 canpush = peer.canpush()
2955
2956
2956 ui.write(_(b'url: %s\n') % peer.url())
2957 ui.write(_(b'url: %s\n') % peer.url())
2957 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2958 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2958 ui.write(
2959 ui.write(
2959 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2960 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2960 )
2961 )
2961 finally:
2962 finally:
2962 peer.close()
2963 peer.close()
2963
2964
2964
2965
2965 @command(
2966 @command(
2966 b'debugpickmergetool',
2967 b'debugpickmergetool',
2967 [
2968 [
2968 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2969 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2969 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2970 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2970 ]
2971 ]
2971 + cmdutil.walkopts
2972 + cmdutil.walkopts
2972 + cmdutil.mergetoolopts,
2973 + cmdutil.mergetoolopts,
2973 _(b'[PATTERN]...'),
2974 _(b'[PATTERN]...'),
2974 inferrepo=True,
2975 inferrepo=True,
2975 )
2976 )
2976 def debugpickmergetool(ui, repo, *pats, **opts):
2977 def debugpickmergetool(ui, repo, *pats, **opts):
2977 """examine which merge tool is chosen for specified file
2978 """examine which merge tool is chosen for specified file
2978
2979
2979 As described in :hg:`help merge-tools`, Mercurial examines
2980 As described in :hg:`help merge-tools`, Mercurial examines
2980 configurations below in this order to decide which merge tool is
2981 configurations below in this order to decide which merge tool is
2981 chosen for specified file.
2982 chosen for specified file.
2982
2983
2983 1. ``--tool`` option
2984 1. ``--tool`` option
2984 2. ``HGMERGE`` environment variable
2985 2. ``HGMERGE`` environment variable
2985 3. configurations in ``merge-patterns`` section
2986 3. configurations in ``merge-patterns`` section
2986 4. configuration of ``ui.merge``
2987 4. configuration of ``ui.merge``
2987 5. configurations in ``merge-tools`` section
2988 5. configurations in ``merge-tools`` section
2988 6. ``hgmerge`` tool (for historical reason only)
2989 6. ``hgmerge`` tool (for historical reason only)
2989 7. default tool for fallback (``:merge`` or ``:prompt``)
2990 7. default tool for fallback (``:merge`` or ``:prompt``)
2990
2991
2991 This command writes out examination result in the style below::
2992 This command writes out examination result in the style below::
2992
2993
2993 FILE = MERGETOOL
2994 FILE = MERGETOOL
2994
2995
2995 By default, all files known in the first parent context of the
2996 By default, all files known in the first parent context of the
2996 working directory are examined. Use file patterns and/or -I/-X
2997 working directory are examined. Use file patterns and/or -I/-X
2997 options to limit target files. -r/--rev is also useful to examine
2998 options to limit target files. -r/--rev is also useful to examine
2998 files in another context without actual updating to it.
2999 files in another context without actual updating to it.
2999
3000
3000 With --debug, this command shows warning messages while matching
3001 With --debug, this command shows warning messages while matching
3001 against ``merge-patterns`` and so on, too. It is recommended to
3002 against ``merge-patterns`` and so on, too. It is recommended to
3002 use this option with explicit file patterns and/or -I/-X options,
3003 use this option with explicit file patterns and/or -I/-X options,
3003 because this option increases amount of output per file according
3004 because this option increases amount of output per file according
3004 to configurations in hgrc.
3005 to configurations in hgrc.
3005
3006
3006 With -v/--verbose, this command shows configurations below at
3007 With -v/--verbose, this command shows configurations below at
3007 first (only if specified).
3008 first (only if specified).
3008
3009
3009 - ``--tool`` option
3010 - ``--tool`` option
3010 - ``HGMERGE`` environment variable
3011 - ``HGMERGE`` environment variable
3011 - configuration of ``ui.merge``
3012 - configuration of ``ui.merge``
3012
3013
3013 If merge tool is chosen before matching against
3014 If merge tool is chosen before matching against
3014 ``merge-patterns``, this command can't show any helpful
3015 ``merge-patterns``, this command can't show any helpful
3015 information, even with --debug. In such case, information above is
3016 information, even with --debug. In such case, information above is
3016 useful to know why a merge tool is chosen.
3017 useful to know why a merge tool is chosen.
3017 """
3018 """
3018 opts = pycompat.byteskwargs(opts)
3019 opts = pycompat.byteskwargs(opts)
3019 overrides = {}
3020 overrides = {}
3020 if opts[b'tool']:
3021 if opts[b'tool']:
3021 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3022 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3022 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3023 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3023
3024
3024 with ui.configoverride(overrides, b'debugmergepatterns'):
3025 with ui.configoverride(overrides, b'debugmergepatterns'):
3025 hgmerge = encoding.environ.get(b"HGMERGE")
3026 hgmerge = encoding.environ.get(b"HGMERGE")
3026 if hgmerge is not None:
3027 if hgmerge is not None:
3027 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3028 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3028 uimerge = ui.config(b"ui", b"merge")
3029 uimerge = ui.config(b"ui", b"merge")
3029 if uimerge:
3030 if uimerge:
3030 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3031 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3031
3032
3032 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3033 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3033 m = scmutil.match(ctx, pats, opts)
3034 m = scmutil.match(ctx, pats, opts)
3034 changedelete = opts[b'changedelete']
3035 changedelete = opts[b'changedelete']
3035 for path in ctx.walk(m):
3036 for path in ctx.walk(m):
3036 fctx = ctx[path]
3037 fctx = ctx[path]
3037 with ui.silent(
3038 with ui.silent(
3038 error=True
3039 error=True
3039 ) if not ui.debugflag else util.nullcontextmanager():
3040 ) if not ui.debugflag else util.nullcontextmanager():
3040 tool, toolpath = filemerge._picktool(
3041 tool, toolpath = filemerge._picktool(
3041 repo,
3042 repo,
3042 ui,
3043 ui,
3043 path,
3044 path,
3044 fctx.isbinary(),
3045 fctx.isbinary(),
3045 b'l' in fctx.flags(),
3046 b'l' in fctx.flags(),
3046 changedelete,
3047 changedelete,
3047 )
3048 )
3048 ui.write(b'%s = %s\n' % (path, tool))
3049 ui.write(b'%s = %s\n' % (path, tool))
3049
3050
3050
3051
3051 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3052 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3052 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3053 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3053 """access the pushkey key/value protocol
3054 """access the pushkey key/value protocol
3054
3055
3055 With two args, list the keys in the given namespace.
3056 With two args, list the keys in the given namespace.
3056
3057
3057 With five args, set a key to new if it currently is set to old.
3058 With five args, set a key to new if it currently is set to old.
3058 Reports success or failure.
3059 Reports success or failure.
3059 """
3060 """
3060
3061
3061 target = hg.peer(ui, {}, repopath)
3062 target = hg.peer(ui, {}, repopath)
3062 try:
3063 try:
3063 if keyinfo:
3064 if keyinfo:
3064 key, old, new = keyinfo
3065 key, old, new = keyinfo
3065 with target.commandexecutor() as e:
3066 with target.commandexecutor() as e:
3066 r = e.callcommand(
3067 r = e.callcommand(
3067 b'pushkey',
3068 b'pushkey',
3068 {
3069 {
3069 b'namespace': namespace,
3070 b'namespace': namespace,
3070 b'key': key,
3071 b'key': key,
3071 b'old': old,
3072 b'old': old,
3072 b'new': new,
3073 b'new': new,
3073 },
3074 },
3074 ).result()
3075 ).result()
3075
3076
3076 ui.status(pycompat.bytestr(r) + b'\n')
3077 ui.status(pycompat.bytestr(r) + b'\n')
3077 return not r
3078 return not r
3078 else:
3079 else:
3079 for k, v in sorted(target.listkeys(namespace).items()):
3080 for k, v in sorted(target.listkeys(namespace).items()):
3080 ui.write(
3081 ui.write(
3081 b"%s\t%s\n"
3082 b"%s\t%s\n"
3082 % (stringutil.escapestr(k), stringutil.escapestr(v))
3083 % (stringutil.escapestr(k), stringutil.escapestr(v))
3083 )
3084 )
3084 finally:
3085 finally:
3085 target.close()
3086 target.close()
3086
3087
3087
3088
3088 @command(b'debugpvec', [], _(b'A B'))
3089 @command(b'debugpvec', [], _(b'A B'))
3089 def debugpvec(ui, repo, a, b=None):
3090 def debugpvec(ui, repo, a, b=None):
3090 ca = scmutil.revsingle(repo, a)
3091 ca = scmutil.revsingle(repo, a)
3091 cb = scmutil.revsingle(repo, b)
3092 cb = scmutil.revsingle(repo, b)
3092 pa = pvec.ctxpvec(ca)
3093 pa = pvec.ctxpvec(ca)
3093 pb = pvec.ctxpvec(cb)
3094 pb = pvec.ctxpvec(cb)
3094 if pa == pb:
3095 if pa == pb:
3095 rel = b"="
3096 rel = b"="
3096 elif pa > pb:
3097 elif pa > pb:
3097 rel = b">"
3098 rel = b">"
3098 elif pa < pb:
3099 elif pa < pb:
3099 rel = b"<"
3100 rel = b"<"
3100 elif pa | pb:
3101 elif pa | pb:
3101 rel = b"|"
3102 rel = b"|"
3102 ui.write(_(b"a: %s\n") % pa)
3103 ui.write(_(b"a: %s\n") % pa)
3103 ui.write(_(b"b: %s\n") % pb)
3104 ui.write(_(b"b: %s\n") % pb)
3104 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3105 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3105 ui.write(
3106 ui.write(
3106 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3107 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3107 % (
3108 % (
3108 abs(pa._depth - pb._depth),
3109 abs(pa._depth - pb._depth),
3109 pvec._hamming(pa._vec, pb._vec),
3110 pvec._hamming(pa._vec, pb._vec),
3110 pa.distance(pb),
3111 pa.distance(pb),
3111 rel,
3112 rel,
3112 )
3113 )
3113 )
3114 )
3114
3115
3115
3116
3116 @command(
3117 @command(
3117 b'debugrebuilddirstate|debugrebuildstate',
3118 b'debugrebuilddirstate|debugrebuildstate',
3118 [
3119 [
3119 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3120 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3120 (
3121 (
3121 b'',
3122 b'',
3122 b'minimal',
3123 b'minimal',
3123 None,
3124 None,
3124 _(
3125 _(
3125 b'only rebuild files that are inconsistent with '
3126 b'only rebuild files that are inconsistent with '
3126 b'the working copy parent'
3127 b'the working copy parent'
3127 ),
3128 ),
3128 ),
3129 ),
3129 ],
3130 ],
3130 _(b'[-r REV]'),
3131 _(b'[-r REV]'),
3131 )
3132 )
3132 def debugrebuilddirstate(ui, repo, rev, **opts):
3133 def debugrebuilddirstate(ui, repo, rev, **opts):
3133 """rebuild the dirstate as it would look like for the given revision
3134 """rebuild the dirstate as it would look like for the given revision
3134
3135
3135 If no revision is specified the first current parent will be used.
3136 If no revision is specified the first current parent will be used.
3136
3137
3137 The dirstate will be set to the files of the given revision.
3138 The dirstate will be set to the files of the given revision.
3138 The actual working directory content or existing dirstate
3139 The actual working directory content or existing dirstate
3139 information such as adds or removes is not considered.
3140 information such as adds or removes is not considered.
3140
3141
3141 ``minimal`` will only rebuild the dirstate status for files that claim to be
3142 ``minimal`` will only rebuild the dirstate status for files that claim to be
3142 tracked but are not in the parent manifest, or that exist in the parent
3143 tracked but are not in the parent manifest, or that exist in the parent
3143 manifest but are not in the dirstate. It will not change adds, removes, or
3144 manifest but are not in the dirstate. It will not change adds, removes, or
3144 modified files that are in the working copy parent.
3145 modified files that are in the working copy parent.
3145
3146
3146 One use of this command is to make the next :hg:`status` invocation
3147 One use of this command is to make the next :hg:`status` invocation
3147 check the actual file content.
3148 check the actual file content.
3148 """
3149 """
3149 ctx = scmutil.revsingle(repo, rev)
3150 ctx = scmutil.revsingle(repo, rev)
3150 with repo.wlock():
3151 with repo.wlock():
3151 if repo.currenttransaction() is not None:
3152 if repo.currenttransaction() is not None:
3152 msg = b'rebuild the dirstate outside of a transaction'
3153 msg = b'rebuild the dirstate outside of a transaction'
3153 raise error.ProgrammingError(msg)
3154 raise error.ProgrammingError(msg)
3154 dirstate = repo.dirstate
3155 dirstate = repo.dirstate
3155 changedfiles = None
3156 changedfiles = None
3156 # See command doc for what minimal does.
3157 # See command doc for what minimal does.
3157 if opts.get('minimal'):
3158 if opts.get('minimal'):
3158 manifestfiles = set(ctx.manifest().keys())
3159 manifestfiles = set(ctx.manifest().keys())
3159 dirstatefiles = set(dirstate)
3160 dirstatefiles = set(dirstate)
3160 manifestonly = manifestfiles - dirstatefiles
3161 manifestonly = manifestfiles - dirstatefiles
3161 dsonly = dirstatefiles - manifestfiles
3162 dsonly = dirstatefiles - manifestfiles
3162 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3163 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3163 changedfiles = manifestonly | dsnotadded
3164 changedfiles = manifestonly | dsnotadded
3164
3165
3165 with dirstate.changing_parents(repo):
3166 with dirstate.changing_parents(repo):
3166 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3167 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3167
3168
3168
3169
3169 @command(
3170 @command(
3170 b'debugrebuildfncache',
3171 b'debugrebuildfncache',
3171 [
3172 [
3172 (
3173 (
3173 b'',
3174 b'',
3174 b'only-data',
3175 b'only-data',
3175 False,
3176 False,
3176 _(b'only look for wrong .d files (much faster)'),
3177 _(b'only look for wrong .d files (much faster)'),
3177 )
3178 )
3178 ],
3179 ],
3179 b'',
3180 b'',
3180 )
3181 )
3181 def debugrebuildfncache(ui, repo, **opts):
3182 def debugrebuildfncache(ui, repo, **opts):
3182 """rebuild the fncache file"""
3183 """rebuild the fncache file"""
3183 opts = pycompat.byteskwargs(opts)
3184 opts = pycompat.byteskwargs(opts)
3184 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3185 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3185
3186
3186
3187
3187 @command(
3188 @command(
3188 b'debugrename',
3189 b'debugrename',
3189 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3190 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3190 _(b'[-r REV] [FILE]...'),
3191 _(b'[-r REV] [FILE]...'),
3191 )
3192 )
3192 def debugrename(ui, repo, *pats, **opts):
3193 def debugrename(ui, repo, *pats, **opts):
3193 """dump rename information"""
3194 """dump rename information"""
3194
3195
3195 opts = pycompat.byteskwargs(opts)
3196 opts = pycompat.byteskwargs(opts)
3196 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3197 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3197 m = scmutil.match(ctx, pats, opts)
3198 m = scmutil.match(ctx, pats, opts)
3198 for abs in ctx.walk(m):
3199 for abs in ctx.walk(m):
3199 fctx = ctx[abs]
3200 fctx = ctx[abs]
3200 o = fctx.filelog().renamed(fctx.filenode())
3201 o = fctx.filelog().renamed(fctx.filenode())
3201 rel = repo.pathto(abs)
3202 rel = repo.pathto(abs)
3202 if o:
3203 if o:
3203 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3204 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3204 else:
3205 else:
3205 ui.write(_(b"%s not renamed\n") % rel)
3206 ui.write(_(b"%s not renamed\n") % rel)
3206
3207
3207
3208
3208 @command(b'debugrequires|debugrequirements', [], b'')
3209 @command(b'debugrequires|debugrequirements', [], b'')
3209 def debugrequirements(ui, repo):
3210 def debugrequirements(ui, repo):
3210 """print the current repo requirements"""
3211 """print the current repo requirements"""
3211 for r in sorted(repo.requirements):
3212 for r in sorted(repo.requirements):
3212 ui.write(b"%s\n" % r)
3213 ui.write(b"%s\n" % r)
3213
3214
3214
3215
3215 @command(
3216 @command(
3216 b'debugrevlog',
3217 b'debugrevlog',
3217 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3218 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3218 _(b'-c|-m|FILE'),
3219 _(b'-c|-m|FILE'),
3219 optionalrepo=True,
3220 optionalrepo=True,
3220 )
3221 )
3221 def debugrevlog(ui, repo, file_=None, **opts):
3222 def debugrevlog(ui, repo, file_=None, **opts):
3222 """show data and statistics about a revlog"""
3223 """show data and statistics about a revlog"""
3223 opts = pycompat.byteskwargs(opts)
3224 opts = pycompat.byteskwargs(opts)
3224 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3225 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3225
3226
3226 if opts.get(b"dump"):
3227 if opts.get(b"dump"):
3227 revlog_debug.dump(ui, r)
3228 revlog_debug.dump(ui, r)
3228 else:
3229 else:
3229 revlog_debug.debug_revlog(ui, r)
3230 revlog_debug.debug_revlog(ui, r)
3230 return 0
3231 return 0
3231
3232
3232
3233
3233 @command(
3234 @command(
3234 b'debugrevlogindex',
3235 b'debugrevlogindex',
3235 cmdutil.debugrevlogopts
3236 cmdutil.debugrevlogopts
3236 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3237 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3237 _(b'[-f FORMAT] -c|-m|FILE'),
3238 _(b'[-f FORMAT] -c|-m|FILE'),
3238 optionalrepo=True,
3239 optionalrepo=True,
3239 )
3240 )
3240 def debugrevlogindex(ui, repo, file_=None, **opts):
3241 def debugrevlogindex(ui, repo, file_=None, **opts):
3241 """dump the contents of a revlog index"""
3242 """dump the contents of a revlog index"""
3242 opts = pycompat.byteskwargs(opts)
3243 opts = pycompat.byteskwargs(opts)
3243 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3244 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3244 format = opts.get(b'format', 0)
3245 format = opts.get(b'format', 0)
3245 if format not in (0, 1):
3246 if format not in (0, 1):
3246 raise error.Abort(_(b"unknown format %d") % format)
3247 raise error.Abort(_(b"unknown format %d") % format)
3247
3248
3248 if ui.debugflag:
3249 if ui.debugflag:
3249 shortfn = hex
3250 shortfn = hex
3250 else:
3251 else:
3251 shortfn = short
3252 shortfn = short
3252
3253
3253 # There might not be anything in r, so have a sane default
3254 # There might not be anything in r, so have a sane default
3254 idlen = 12
3255 idlen = 12
3255 for i in r:
3256 for i in r:
3256 idlen = len(shortfn(r.node(i)))
3257 idlen = len(shortfn(r.node(i)))
3257 break
3258 break
3258
3259
3259 if format == 0:
3260 if format == 0:
3260 if ui.verbose:
3261 if ui.verbose:
3261 ui.writenoi18n(
3262 ui.writenoi18n(
3262 b" rev offset length linkrev %s %s p2\n"
3263 b" rev offset length linkrev %s %s p2\n"
3263 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3264 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3264 )
3265 )
3265 else:
3266 else:
3266 ui.writenoi18n(
3267 ui.writenoi18n(
3267 b" rev linkrev %s %s p2\n"
3268 b" rev linkrev %s %s p2\n"
3268 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3269 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3269 )
3270 )
3270 elif format == 1:
3271 elif format == 1:
3271 if ui.verbose:
3272 if ui.verbose:
3272 ui.writenoi18n(
3273 ui.writenoi18n(
3273 (
3274 (
3274 b" rev flag offset length size link p1"
3275 b" rev flag offset length size link p1"
3275 b" p2 %s\n"
3276 b" p2 %s\n"
3276 )
3277 )
3277 % b"nodeid".rjust(idlen)
3278 % b"nodeid".rjust(idlen)
3278 )
3279 )
3279 else:
3280 else:
3280 ui.writenoi18n(
3281 ui.writenoi18n(
3281 b" rev flag size link p1 p2 %s\n"
3282 b" rev flag size link p1 p2 %s\n"
3282 % b"nodeid".rjust(idlen)
3283 % b"nodeid".rjust(idlen)
3283 )
3284 )
3284
3285
3285 for i in r:
3286 for i in r:
3286 node = r.node(i)
3287 node = r.node(i)
3287 if format == 0:
3288 if format == 0:
3288 try:
3289 try:
3289 pp = r.parents(node)
3290 pp = r.parents(node)
3290 except Exception:
3291 except Exception:
3291 pp = [repo.nullid, repo.nullid]
3292 pp = [repo.nullid, repo.nullid]
3292 if ui.verbose:
3293 if ui.verbose:
3293 ui.write(
3294 ui.write(
3294 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3295 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3295 % (
3296 % (
3296 i,
3297 i,
3297 r.start(i),
3298 r.start(i),
3298 r.length(i),
3299 r.length(i),
3299 r.linkrev(i),
3300 r.linkrev(i),
3300 shortfn(node),
3301 shortfn(node),
3301 shortfn(pp[0]),
3302 shortfn(pp[0]),
3302 shortfn(pp[1]),
3303 shortfn(pp[1]),
3303 )
3304 )
3304 )
3305 )
3305 else:
3306 else:
3306 ui.write(
3307 ui.write(
3307 b"% 6d % 7d %s %s %s\n"
3308 b"% 6d % 7d %s %s %s\n"
3308 % (
3309 % (
3309 i,
3310 i,
3310 r.linkrev(i),
3311 r.linkrev(i),
3311 shortfn(node),
3312 shortfn(node),
3312 shortfn(pp[0]),
3313 shortfn(pp[0]),
3313 shortfn(pp[1]),
3314 shortfn(pp[1]),
3314 )
3315 )
3315 )
3316 )
3316 elif format == 1:
3317 elif format == 1:
3317 pr = r.parentrevs(i)
3318 pr = r.parentrevs(i)
3318 if ui.verbose:
3319 if ui.verbose:
3319 ui.write(
3320 ui.write(
3320 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3321 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3321 % (
3322 % (
3322 i,
3323 i,
3323 r.flags(i),
3324 r.flags(i),
3324 r.start(i),
3325 r.start(i),
3325 r.length(i),
3326 r.length(i),
3326 r.rawsize(i),
3327 r.rawsize(i),
3327 r.linkrev(i),
3328 r.linkrev(i),
3328 pr[0],
3329 pr[0],
3329 pr[1],
3330 pr[1],
3330 shortfn(node),
3331 shortfn(node),
3331 )
3332 )
3332 )
3333 )
3333 else:
3334 else:
3334 ui.write(
3335 ui.write(
3335 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3336 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3336 % (
3337 % (
3337 i,
3338 i,
3338 r.flags(i),
3339 r.flags(i),
3339 r.rawsize(i),
3340 r.rawsize(i),
3340 r.linkrev(i),
3341 r.linkrev(i),
3341 pr[0],
3342 pr[0],
3342 pr[1],
3343 pr[1],
3343 shortfn(node),
3344 shortfn(node),
3344 )
3345 )
3345 )
3346 )
3346
3347
3347
3348
3348 @command(
3349 @command(
3349 b'debugrevspec',
3350 b'debugrevspec',
3350 [
3351 [
3351 (
3352 (
3352 b'',
3353 b'',
3353 b'optimize',
3354 b'optimize',
3354 None,
3355 None,
3355 _(b'print parsed tree after optimizing (DEPRECATED)'),
3356 _(b'print parsed tree after optimizing (DEPRECATED)'),
3356 ),
3357 ),
3357 (
3358 (
3358 b'',
3359 b'',
3359 b'show-revs',
3360 b'show-revs',
3360 True,
3361 True,
3361 _(b'print list of result revisions (default)'),
3362 _(b'print list of result revisions (default)'),
3362 ),
3363 ),
3363 (
3364 (
3364 b's',
3365 b's',
3365 b'show-set',
3366 b'show-set',
3366 None,
3367 None,
3367 _(b'print internal representation of result set'),
3368 _(b'print internal representation of result set'),
3368 ),
3369 ),
3369 (
3370 (
3370 b'p',
3371 b'p',
3371 b'show-stage',
3372 b'show-stage',
3372 [],
3373 [],
3373 _(b'print parsed tree at the given stage'),
3374 _(b'print parsed tree at the given stage'),
3374 _(b'NAME'),
3375 _(b'NAME'),
3375 ),
3376 ),
3376 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3377 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3377 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3378 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3378 ],
3379 ],
3379 b'REVSPEC',
3380 b'REVSPEC',
3380 )
3381 )
3381 def debugrevspec(ui, repo, expr, **opts):
3382 def debugrevspec(ui, repo, expr, **opts):
3382 """parse and apply a revision specification
3383 """parse and apply a revision specification
3383
3384
3384 Use -p/--show-stage option to print the parsed tree at the given stages.
3385 Use -p/--show-stage option to print the parsed tree at the given stages.
3385 Use -p all to print tree at every stage.
3386 Use -p all to print tree at every stage.
3386
3387
3387 Use --no-show-revs option with -s or -p to print only the set
3388 Use --no-show-revs option with -s or -p to print only the set
3388 representation or the parsed tree respectively.
3389 representation or the parsed tree respectively.
3389
3390
3390 Use --verify-optimized to compare the optimized result with the unoptimized
3391 Use --verify-optimized to compare the optimized result with the unoptimized
3391 one. Returns 1 if the optimized result differs.
3392 one. Returns 1 if the optimized result differs.
3392 """
3393 """
3393 opts = pycompat.byteskwargs(opts)
3394 opts = pycompat.byteskwargs(opts)
3394 aliases = ui.configitems(b'revsetalias')
3395 aliases = ui.configitems(b'revsetalias')
3395 stages = [
3396 stages = [
3396 (b'parsed', lambda tree: tree),
3397 (b'parsed', lambda tree: tree),
3397 (
3398 (
3398 b'expanded',
3399 b'expanded',
3399 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3400 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3400 ),
3401 ),
3401 (b'concatenated', revsetlang.foldconcat),
3402 (b'concatenated', revsetlang.foldconcat),
3402 (b'analyzed', revsetlang.analyze),
3403 (b'analyzed', revsetlang.analyze),
3403 (b'optimized', revsetlang.optimize),
3404 (b'optimized', revsetlang.optimize),
3404 ]
3405 ]
3405 if opts[b'no_optimized']:
3406 if opts[b'no_optimized']:
3406 stages = stages[:-1]
3407 stages = stages[:-1]
3407 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3408 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3408 raise error.Abort(
3409 raise error.Abort(
3409 _(b'cannot use --verify-optimized with --no-optimized')
3410 _(b'cannot use --verify-optimized with --no-optimized')
3410 )
3411 )
3411 stagenames = {n for n, f in stages}
3412 stagenames = {n for n, f in stages}
3412
3413
3413 showalways = set()
3414 showalways = set()
3414 showchanged = set()
3415 showchanged = set()
3415 if ui.verbose and not opts[b'show_stage']:
3416 if ui.verbose and not opts[b'show_stage']:
3416 # show parsed tree by --verbose (deprecated)
3417 # show parsed tree by --verbose (deprecated)
3417 showalways.add(b'parsed')
3418 showalways.add(b'parsed')
3418 showchanged.update([b'expanded', b'concatenated'])
3419 showchanged.update([b'expanded', b'concatenated'])
3419 if opts[b'optimize']:
3420 if opts[b'optimize']:
3420 showalways.add(b'optimized')
3421 showalways.add(b'optimized')
3421 if opts[b'show_stage'] and opts[b'optimize']:
3422 if opts[b'show_stage'] and opts[b'optimize']:
3422 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3423 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3423 if opts[b'show_stage'] == [b'all']:
3424 if opts[b'show_stage'] == [b'all']:
3424 showalways.update(stagenames)
3425 showalways.update(stagenames)
3425 else:
3426 else:
3426 for n in opts[b'show_stage']:
3427 for n in opts[b'show_stage']:
3427 if n not in stagenames:
3428 if n not in stagenames:
3428 raise error.Abort(_(b'invalid stage name: %s') % n)
3429 raise error.Abort(_(b'invalid stage name: %s') % n)
3429 showalways.update(opts[b'show_stage'])
3430 showalways.update(opts[b'show_stage'])
3430
3431
3431 treebystage = {}
3432 treebystage = {}
3432 printedtree = None
3433 printedtree = None
3433 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3434 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3434 for n, f in stages:
3435 for n, f in stages:
3435 treebystage[n] = tree = f(tree)
3436 treebystage[n] = tree = f(tree)
3436 if n in showalways or (n in showchanged and tree != printedtree):
3437 if n in showalways or (n in showchanged and tree != printedtree):
3437 if opts[b'show_stage'] or n != b'parsed':
3438 if opts[b'show_stage'] or n != b'parsed':
3438 ui.write(b"* %s:\n" % n)
3439 ui.write(b"* %s:\n" % n)
3439 ui.write(revsetlang.prettyformat(tree), b"\n")
3440 ui.write(revsetlang.prettyformat(tree), b"\n")
3440 printedtree = tree
3441 printedtree = tree
3441
3442
3442 if opts[b'verify_optimized']:
3443 if opts[b'verify_optimized']:
3443 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3444 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3444 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3445 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3445 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3446 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3446 ui.writenoi18n(
3447 ui.writenoi18n(
3447 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3448 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3448 )
3449 )
3449 ui.writenoi18n(
3450 ui.writenoi18n(
3450 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3451 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3451 )
3452 )
3452 arevs = list(arevs)
3453 arevs = list(arevs)
3453 brevs = list(brevs)
3454 brevs = list(brevs)
3454 if arevs == brevs:
3455 if arevs == brevs:
3455 return 0
3456 return 0
3456 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3457 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3457 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3458 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3458 sm = difflib.SequenceMatcher(None, arevs, brevs)
3459 sm = difflib.SequenceMatcher(None, arevs, brevs)
3459 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3460 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3460 if tag in ('delete', 'replace'):
3461 if tag in ('delete', 'replace'):
3461 for c in arevs[alo:ahi]:
3462 for c in arevs[alo:ahi]:
3462 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3463 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3463 if tag in ('insert', 'replace'):
3464 if tag in ('insert', 'replace'):
3464 for c in brevs[blo:bhi]:
3465 for c in brevs[blo:bhi]:
3465 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3466 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3466 if tag == 'equal':
3467 if tag == 'equal':
3467 for c in arevs[alo:ahi]:
3468 for c in arevs[alo:ahi]:
3468 ui.write(b' %d\n' % c)
3469 ui.write(b' %d\n' % c)
3469 return 1
3470 return 1
3470
3471
3471 func = revset.makematcher(tree)
3472 func = revset.makematcher(tree)
3472 revs = func(repo)
3473 revs = func(repo)
3473 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3474 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3474 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3475 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3475 if not opts[b'show_revs']:
3476 if not opts[b'show_revs']:
3476 return
3477 return
3477 for c in revs:
3478 for c in revs:
3478 ui.write(b"%d\n" % c)
3479 ui.write(b"%d\n" % c)
3479
3480
3480
3481
3481 @command(
3482 @command(
3482 b'debugserve',
3483 b'debugserve',
3483 [
3484 [
3484 (
3485 (
3485 b'',
3486 b'',
3486 b'sshstdio',
3487 b'sshstdio',
3487 False,
3488 False,
3488 _(b'run an SSH server bound to process handles'),
3489 _(b'run an SSH server bound to process handles'),
3489 ),
3490 ),
3490 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3491 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3491 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3492 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3492 ],
3493 ],
3493 b'',
3494 b'',
3494 )
3495 )
3495 def debugserve(ui, repo, **opts):
3496 def debugserve(ui, repo, **opts):
3496 """run a server with advanced settings
3497 """run a server with advanced settings
3497
3498
3498 This command is similar to :hg:`serve`. It exists partially as a
3499 This command is similar to :hg:`serve`. It exists partially as a
3499 workaround to the fact that ``hg serve --stdio`` must have specific
3500 workaround to the fact that ``hg serve --stdio`` must have specific
3500 arguments for security reasons.
3501 arguments for security reasons.
3501 """
3502 """
3502 opts = pycompat.byteskwargs(opts)
3503 opts = pycompat.byteskwargs(opts)
3503
3504
3504 if not opts[b'sshstdio']:
3505 if not opts[b'sshstdio']:
3505 raise error.Abort(_(b'only --sshstdio is currently supported'))
3506 raise error.Abort(_(b'only --sshstdio is currently supported'))
3506
3507
3507 logfh = None
3508 logfh = None
3508
3509
3509 if opts[b'logiofd'] and opts[b'logiofile']:
3510 if opts[b'logiofd'] and opts[b'logiofile']:
3510 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3511 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3511
3512
3512 if opts[b'logiofd']:
3513 if opts[b'logiofd']:
3513 # Ideally we would be line buffered. But line buffering in binary
3514 # Ideally we would be line buffered. But line buffering in binary
3514 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3515 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3515 # buffering could have performance impacts. But since this isn't
3516 # buffering could have performance impacts. But since this isn't
3516 # performance critical code, it should be fine.
3517 # performance critical code, it should be fine.
3517 try:
3518 try:
3518 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3519 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3519 except OSError as e:
3520 except OSError as e:
3520 if e.errno != errno.ESPIPE:
3521 if e.errno != errno.ESPIPE:
3521 raise
3522 raise
3522 # can't seek a pipe, so `ab` mode fails on py3
3523 # can't seek a pipe, so `ab` mode fails on py3
3523 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3524 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3524 elif opts[b'logiofile']:
3525 elif opts[b'logiofile']:
3525 logfh = open(opts[b'logiofile'], b'ab', 0)
3526 logfh = open(opts[b'logiofile'], b'ab', 0)
3526
3527
3527 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3528 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3528 s.serve_forever()
3529 s.serve_forever()
3529
3530
3530
3531
3531 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3532 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3532 def debugsetparents(ui, repo, rev1, rev2=None):
3533 def debugsetparents(ui, repo, rev1, rev2=None):
3533 """manually set the parents of the current working directory (DANGEROUS)
3534 """manually set the parents of the current working directory (DANGEROUS)
3534
3535
3535 This command is not what you are looking for and should not be used. Using
3536 This command is not what you are looking for and should not be used. Using
3536 this command will most certainly results in slight corruption of the file
3537 this command will most certainly results in slight corruption of the file
3537 level histories withing your repository. DO NOT USE THIS COMMAND.
3538 level histories withing your repository. DO NOT USE THIS COMMAND.
3538
3539
3539 The command update the p1 and p2 field in the dirstate, and not touching
3540 The command update the p1 and p2 field in the dirstate, and not touching
3540 anything else. This useful for writing repository conversion tools, but
3541 anything else. This useful for writing repository conversion tools, but
3541 should be used with extreme care. For example, neither the working
3542 should be used with extreme care. For example, neither the working
3542 directory nor the dirstate is updated, so file status may be incorrect
3543 directory nor the dirstate is updated, so file status may be incorrect
3543 after running this command. Only used if you are one of the few people that
3544 after running this command. Only used if you are one of the few people that
3544 deeply unstand both conversion tools and file level histories. If you are
3545 deeply unstand both conversion tools and file level histories. If you are
3545 reading this help, you are not one of this people (most of them sailed west
3546 reading this help, you are not one of this people (most of them sailed west
3546 from Mithlond anyway.
3547 from Mithlond anyway.
3547
3548
3548 So one last time DO NOT USE THIS COMMAND.
3549 So one last time DO NOT USE THIS COMMAND.
3549
3550
3550 Returns 0 on success.
3551 Returns 0 on success.
3551 """
3552 """
3552
3553
3553 node1 = scmutil.revsingle(repo, rev1).node()
3554 node1 = scmutil.revsingle(repo, rev1).node()
3554 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3555 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3555
3556
3556 with repo.wlock():
3557 with repo.wlock():
3557 repo.setparents(node1, node2)
3558 repo.setparents(node1, node2)
3558
3559
3559
3560
3560 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3561 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3561 def debugsidedata(ui, repo, file_, rev=None, **opts):
3562 def debugsidedata(ui, repo, file_, rev=None, **opts):
3562 """dump the side data for a cl/manifest/file revision
3563 """dump the side data for a cl/manifest/file revision
3563
3564
3564 Use --verbose to dump the sidedata content."""
3565 Use --verbose to dump the sidedata content."""
3565 opts = pycompat.byteskwargs(opts)
3566 opts = pycompat.byteskwargs(opts)
3566 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3567 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3567 if rev is not None:
3568 if rev is not None:
3568 raise error.InputError(
3569 raise error.InputError(
3569 _(b'cannot specify a revision with other arguments')
3570 _(b'cannot specify a revision with other arguments')
3570 )
3571 )
3571 file_, rev = None, file_
3572 file_, rev = None, file_
3572 elif rev is None:
3573 elif rev is None:
3573 raise error.InputError(_(b'please specify a revision'))
3574 raise error.InputError(_(b'please specify a revision'))
3574 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3575 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3575 r = getattr(r, '_revlog', r)
3576 r = getattr(r, '_revlog', r)
3576 try:
3577 try:
3577 sidedata = r.sidedata(r.lookup(rev))
3578 sidedata = r.sidedata(r.lookup(rev))
3578 except KeyError:
3579 except KeyError:
3579 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3580 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3580 if sidedata:
3581 if sidedata:
3581 sidedata = list(sidedata.items())
3582 sidedata = list(sidedata.items())
3582 sidedata.sort()
3583 sidedata.sort()
3583 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3584 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3584 for key, value in sidedata:
3585 for key, value in sidedata:
3585 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3586 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3586 if ui.verbose:
3587 if ui.verbose:
3587 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3588 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3588
3589
3589
3590
3590 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3591 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3591 def debugssl(ui, repo, source=None, **opts):
3592 def debugssl(ui, repo, source=None, **opts):
3592 """test a secure connection to a server
3593 """test a secure connection to a server
3593
3594
3594 This builds the certificate chain for the server on Windows, installing the
3595 This builds the certificate chain for the server on Windows, installing the
3595 missing intermediates and trusted root via Windows Update if necessary. It
3596 missing intermediates and trusted root via Windows Update if necessary. It
3596 does nothing on other platforms.
3597 does nothing on other platforms.
3597
3598
3598 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3599 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3599 that server is used. See :hg:`help urls` for more information.
3600 that server is used. See :hg:`help urls` for more information.
3600
3601
3601 If the update succeeds, retry the original operation. Otherwise, the cause
3602 If the update succeeds, retry the original operation. Otherwise, the cause
3602 of the SSL error is likely another issue.
3603 of the SSL error is likely another issue.
3603 """
3604 """
3604 if not pycompat.iswindows:
3605 if not pycompat.iswindows:
3605 raise error.Abort(
3606 raise error.Abort(
3606 _(b'certificate chain building is only possible on Windows')
3607 _(b'certificate chain building is only possible on Windows')
3607 )
3608 )
3608
3609
3609 if not source:
3610 if not source:
3610 if not repo:
3611 if not repo:
3611 raise error.Abort(
3612 raise error.Abort(
3612 _(
3613 _(
3613 b"there is no Mercurial repository here, and no "
3614 b"there is no Mercurial repository here, and no "
3614 b"server specified"
3615 b"server specified"
3615 )
3616 )
3616 )
3617 )
3617 source = b"default"
3618 source = b"default"
3618
3619
3619 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3620 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3620 url = path.url
3621 url = path.url
3621
3622
3622 defaultport = {b'https': 443, b'ssh': 22}
3623 defaultport = {b'https': 443, b'ssh': 22}
3623 if url.scheme in defaultport:
3624 if url.scheme in defaultport:
3624 try:
3625 try:
3625 addr = (url.host, int(url.port or defaultport[url.scheme]))
3626 addr = (url.host, int(url.port or defaultport[url.scheme]))
3626 except ValueError:
3627 except ValueError:
3627 raise error.Abort(_(b"malformed port number in URL"))
3628 raise error.Abort(_(b"malformed port number in URL"))
3628 else:
3629 else:
3629 raise error.Abort(_(b"only https and ssh connections are supported"))
3630 raise error.Abort(_(b"only https and ssh connections are supported"))
3630
3631
3631 from . import win32
3632 from . import win32
3632
3633
3633 s = ssl.wrap_socket(
3634 s = ssl.wrap_socket(
3634 socket.socket(),
3635 socket.socket(),
3635 ssl_version=ssl.PROTOCOL_TLS,
3636 ssl_version=ssl.PROTOCOL_TLS,
3636 cert_reqs=ssl.CERT_NONE,
3637 cert_reqs=ssl.CERT_NONE,
3637 ca_certs=None,
3638 ca_certs=None,
3638 )
3639 )
3639
3640
3640 try:
3641 try:
3641 s.connect(addr)
3642 s.connect(addr)
3642 cert = s.getpeercert(True)
3643 cert = s.getpeercert(True)
3643
3644
3644 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3645 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3645
3646
3646 complete = win32.checkcertificatechain(cert, build=False)
3647 complete = win32.checkcertificatechain(cert, build=False)
3647
3648
3648 if not complete:
3649 if not complete:
3649 ui.status(_(b'certificate chain is incomplete, updating... '))
3650 ui.status(_(b'certificate chain is incomplete, updating... '))
3650
3651
3651 if not win32.checkcertificatechain(cert):
3652 if not win32.checkcertificatechain(cert):
3652 ui.status(_(b'failed.\n'))
3653 ui.status(_(b'failed.\n'))
3653 else:
3654 else:
3654 ui.status(_(b'done.\n'))
3655 ui.status(_(b'done.\n'))
3655 else:
3656 else:
3656 ui.status(_(b'full certificate chain is available\n'))
3657 ui.status(_(b'full certificate chain is available\n'))
3657 finally:
3658 finally:
3658 s.close()
3659 s.close()
3659
3660
3660
3661
3661 @command(
3662 @command(
3662 b'debug::stable-tail-sort',
3663 b'debug::stable-tail-sort',
3663 [
3664 [
3664 (
3665 (
3665 b'T',
3666 b'T',
3666 b'template',
3667 b'template',
3667 b'{rev}\n',
3668 b'{rev}\n',
3668 _(b'display with template'),
3669 _(b'display with template'),
3669 _(b'TEMPLATE'),
3670 _(b'TEMPLATE'),
3670 ),
3671 ),
3671 ],
3672 ],
3672 b'REV',
3673 b'REV',
3673 )
3674 )
3674 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3675 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3675 """display the stable-tail sort of the ancestors of a given node"""
3676 """display the stable-tail sort of the ancestors of a given node"""
3676 rev = logcmdutil.revsingle(repo, revspec).rev()
3677 rev = logcmdutil.revsingle(repo, revspec).rev()
3677 cl = repo.changelog
3678 cl = repo.changelog
3678
3679
3679 displayer = logcmdutil.maketemplater(ui, repo, template)
3680 displayer = logcmdutil.maketemplater(ui, repo, template)
3680 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3681 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3681 for ancestor_rev in sorted_revs:
3682 for ancestor_rev in sorted_revs:
3682 displayer.show(repo[ancestor_rev])
3683 displayer.show(repo[ancestor_rev])
3683
3684
3684
3685
3685 @command(
3686 @command(
3686 b'debug::stable-tail-sort-leaps',
3687 b'debug::stable-tail-sort-leaps',
3687 [
3688 [
3688 (
3689 (
3689 b'T',
3690 b'T',
3690 b'template',
3691 b'template',
3691 b'{rev}',
3692 b'{rev}',
3692 _(b'display with template'),
3693 _(b'display with template'),
3693 _(b'TEMPLATE'),
3694 _(b'TEMPLATE'),
3694 ),
3695 ),
3695 (b's', b'specific', False, _(b'restrict to specific leaps')),
3696 (b's', b'specific', False, _(b'restrict to specific leaps')),
3696 ],
3697 ],
3697 b'REV',
3698 b'REV',
3698 )
3699 )
3699 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3700 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3700 """display the leaps in the stable-tail sort of a node, one per line"""
3701 """display the leaps in the stable-tail sort of a node, one per line"""
3701 rev = logcmdutil.revsingle(repo, rspec).rev()
3702 rev = logcmdutil.revsingle(repo, rspec).rev()
3702
3703
3703 if specific:
3704 if specific:
3704 get_leaps = stabletailsort._find_specific_leaps_naive
3705 get_leaps = stabletailsort._find_specific_leaps_naive
3705 else:
3706 else:
3706 get_leaps = stabletailsort._find_all_leaps_naive
3707 get_leaps = stabletailsort._find_all_leaps_naive
3707
3708
3708 displayer = logcmdutil.maketemplater(ui, repo, template)
3709 displayer = logcmdutil.maketemplater(ui, repo, template)
3709 for source, target in get_leaps(repo.changelog, rev):
3710 for source, target in get_leaps(repo.changelog, rev):
3710 displayer.show(repo[source])
3711 displayer.show(repo[source])
3711 displayer.show(repo[target])
3712 displayer.show(repo[target])
3712 ui.write(b'\n')
3713 ui.write(b'\n')
3713
3714
3714
3715
3715 @command(
3716 @command(
3716 b"debugbackupbundle",
3717 b"debugbackupbundle",
3717 [
3718 [
3718 (
3719 (
3719 b"",
3720 b"",
3720 b"recover",
3721 b"recover",
3721 b"",
3722 b"",
3722 b"brings the specified changeset back into the repository",
3723 b"brings the specified changeset back into the repository",
3723 )
3724 )
3724 ]
3725 ]
3725 + cmdutil.logopts,
3726 + cmdutil.logopts,
3726 _(b"hg debugbackupbundle [--recover HASH]"),
3727 _(b"hg debugbackupbundle [--recover HASH]"),
3727 )
3728 )
3728 def debugbackupbundle(ui, repo, *pats, **opts):
3729 def debugbackupbundle(ui, repo, *pats, **opts):
3729 """lists the changesets available in backup bundles
3730 """lists the changesets available in backup bundles
3730
3731
3731 Without any arguments, this command prints a list of the changesets in each
3732 Without any arguments, this command prints a list of the changesets in each
3732 backup bundle.
3733 backup bundle.
3733
3734
3734 --recover takes a changeset hash and unbundles the first bundle that
3735 --recover takes a changeset hash and unbundles the first bundle that
3735 contains that hash, which puts that changeset back in your repository.
3736 contains that hash, which puts that changeset back in your repository.
3736
3737
3737 --verbose will print the entire commit message and the bundle path for that
3738 --verbose will print the entire commit message and the bundle path for that
3738 backup.
3739 backup.
3739 """
3740 """
3740 backups = list(
3741 backups = list(
3741 filter(
3742 filter(
3742 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3743 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3743 )
3744 )
3744 )
3745 )
3745 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3746 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3746
3747
3747 opts = pycompat.byteskwargs(opts)
3748 opts = pycompat.byteskwargs(opts)
3748 opts[b"bundle"] = b""
3749 opts[b"bundle"] = b""
3749 opts[b"force"] = None
3750 opts[b"force"] = None
3750 limit = logcmdutil.getlimit(opts)
3751 limit = logcmdutil.getlimit(opts)
3751
3752
3752 def display(other, chlist, displayer):
3753 def display(other, chlist, displayer):
3753 if opts.get(b"newest_first"):
3754 if opts.get(b"newest_first"):
3754 chlist.reverse()
3755 chlist.reverse()
3755 count = 0
3756 count = 0
3756 for n in chlist:
3757 for n in chlist:
3757 if limit is not None and count >= limit:
3758 if limit is not None and count >= limit:
3758 break
3759 break
3759 parents = [
3760 parents = [
3760 True for p in other.changelog.parents(n) if p != repo.nullid
3761 True for p in other.changelog.parents(n) if p != repo.nullid
3761 ]
3762 ]
3762 if opts.get(b"no_merges") and len(parents) == 2:
3763 if opts.get(b"no_merges") and len(parents) == 2:
3763 continue
3764 continue
3764 count += 1
3765 count += 1
3765 displayer.show(other[n])
3766 displayer.show(other[n])
3766
3767
3767 recovernode = opts.get(b"recover")
3768 recovernode = opts.get(b"recover")
3768 if recovernode:
3769 if recovernode:
3769 if scmutil.isrevsymbol(repo, recovernode):
3770 if scmutil.isrevsymbol(repo, recovernode):
3770 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3771 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3771 return
3772 return
3772 elif backups:
3773 elif backups:
3773 msg = _(
3774 msg = _(
3774 b"Recover changesets using: hg debugbackupbundle --recover "
3775 b"Recover changesets using: hg debugbackupbundle --recover "
3775 b"<changeset hash>\n\nAvailable backup changesets:"
3776 b"<changeset hash>\n\nAvailable backup changesets:"
3776 )
3777 )
3777 ui.status(msg, label=b"status.removed")
3778 ui.status(msg, label=b"status.removed")
3778 else:
3779 else:
3779 ui.status(_(b"no backup changesets found\n"))
3780 ui.status(_(b"no backup changesets found\n"))
3780 return
3781 return
3781
3782
3782 for backup in backups:
3783 for backup in backups:
3783 # Much of this is copied from the hg incoming logic
3784 # Much of this is copied from the hg incoming logic
3784 source = os.path.relpath(backup, encoding.getcwd())
3785 source = os.path.relpath(backup, encoding.getcwd())
3785 path = urlutil.get_unique_pull_path_obj(
3786 path = urlutil.get_unique_pull_path_obj(
3786 b'debugbackupbundle',
3787 b'debugbackupbundle',
3787 ui,
3788 ui,
3788 source,
3789 source,
3789 )
3790 )
3790 try:
3791 try:
3791 other = hg.peer(repo, opts, path)
3792 other = hg.peer(repo, opts, path)
3792 except error.LookupError as ex:
3793 except error.LookupError as ex:
3793 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3794 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3794 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3795 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3795 ui.warn(msg, hint=hint)
3796 ui.warn(msg, hint=hint)
3796 continue
3797 continue
3797 branches = (path.branch, opts.get(b'branch', []))
3798 branches = (path.branch, opts.get(b'branch', []))
3798 revs, checkout = hg.addbranchrevs(
3799 revs, checkout = hg.addbranchrevs(
3799 repo, other, branches, opts.get(b"rev")
3800 repo, other, branches, opts.get(b"rev")
3800 )
3801 )
3801
3802
3802 if revs:
3803 if revs:
3803 revs = [other.lookup(rev) for rev in revs]
3804 revs = [other.lookup(rev) for rev in revs]
3804
3805
3805 with ui.silent():
3806 with ui.silent():
3806 try:
3807 try:
3807 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3808 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3808 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3809 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3809 )
3810 )
3810 except error.LookupError:
3811 except error.LookupError:
3811 continue
3812 continue
3812
3813
3813 try:
3814 try:
3814 if not chlist:
3815 if not chlist:
3815 continue
3816 continue
3816 if recovernode:
3817 if recovernode:
3817 with repo.lock(), repo.transaction(b"unbundle") as tr:
3818 with repo.lock(), repo.transaction(b"unbundle") as tr:
3818 if scmutil.isrevsymbol(other, recovernode):
3819 if scmutil.isrevsymbol(other, recovernode):
3819 ui.status(_(b"Unbundling %s\n") % (recovernode))
3820 ui.status(_(b"Unbundling %s\n") % (recovernode))
3820 f = hg.openpath(ui, path.loc)
3821 f = hg.openpath(ui, path.loc)
3821 gen = exchange.readbundle(ui, f, path.loc)
3822 gen = exchange.readbundle(ui, f, path.loc)
3822 if isinstance(gen, bundle2.unbundle20):
3823 if isinstance(gen, bundle2.unbundle20):
3823 bundle2.applybundle(
3824 bundle2.applybundle(
3824 repo,
3825 repo,
3825 gen,
3826 gen,
3826 tr,
3827 tr,
3827 source=b"unbundle",
3828 source=b"unbundle",
3828 url=b"bundle:" + path.loc,
3829 url=b"bundle:" + path.loc,
3829 )
3830 )
3830 else:
3831 else:
3831 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3832 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3832 break
3833 break
3833 else:
3834 else:
3834 backupdate = encoding.strtolocal(
3835 backupdate = encoding.strtolocal(
3835 time.strftime(
3836 time.strftime(
3836 "%a %H:%M, %Y-%m-%d",
3837 "%a %H:%M, %Y-%m-%d",
3837 time.localtime(os.path.getmtime(path.loc)),
3838 time.localtime(os.path.getmtime(path.loc)),
3838 )
3839 )
3839 )
3840 )
3840 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3841 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3841 if ui.verbose:
3842 if ui.verbose:
3842 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3843 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3843 else:
3844 else:
3844 opts[
3845 opts[
3845 b"template"
3846 b"template"
3846 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3847 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3847 displayer = logcmdutil.changesetdisplayer(
3848 displayer = logcmdutil.changesetdisplayer(
3848 ui, other, opts, False
3849 ui, other, opts, False
3849 )
3850 )
3850 display(other, chlist, displayer)
3851 display(other, chlist, displayer)
3851 displayer.close()
3852 displayer.close()
3852 finally:
3853 finally:
3853 cleanupfn()
3854 cleanupfn()
3854
3855
3855
3856
3856 @command(
3857 @command(
3857 b'debugsub',
3858 b'debugsub',
3858 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3859 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3859 _(b'[-r REV] [REV]'),
3860 _(b'[-r REV] [REV]'),
3860 )
3861 )
3861 def debugsub(ui, repo, rev=None):
3862 def debugsub(ui, repo, rev=None):
3862 ctx = scmutil.revsingle(repo, rev, None)
3863 ctx = scmutil.revsingle(repo, rev, None)
3863 for k, v in sorted(ctx.substate.items()):
3864 for k, v in sorted(ctx.substate.items()):
3864 ui.writenoi18n(b'path %s\n' % k)
3865 ui.writenoi18n(b'path %s\n' % k)
3865 ui.writenoi18n(b' source %s\n' % v[0])
3866 ui.writenoi18n(b' source %s\n' % v[0])
3866 ui.writenoi18n(b' revision %s\n' % v[1])
3867 ui.writenoi18n(b' revision %s\n' % v[1])
3867
3868
3868
3869
3869 @command(
3870 @command(
3870 b'debugshell',
3871 b'debugshell',
3871 [
3872 [
3872 (
3873 (
3873 b'c',
3874 b'c',
3874 b'command',
3875 b'command',
3875 b'',
3876 b'',
3876 _(b'program passed in as a string'),
3877 _(b'program passed in as a string'),
3877 _(b'COMMAND'),
3878 _(b'COMMAND'),
3878 )
3879 )
3879 ],
3880 ],
3880 _(b'[-c COMMAND]'),
3881 _(b'[-c COMMAND]'),
3881 optionalrepo=True,
3882 optionalrepo=True,
3882 )
3883 )
3883 def debugshell(ui, repo, **opts):
3884 def debugshell(ui, repo, **opts):
3884 """run an interactive Python interpreter
3885 """run an interactive Python interpreter
3885
3886
3886 The local namespace is provided with a reference to the ui and
3887 The local namespace is provided with a reference to the ui and
3887 the repo instance (if available).
3888 the repo instance (if available).
3888 """
3889 """
3889 import code
3890 import code
3890
3891
3891 imported_objects = {
3892 imported_objects = {
3892 'ui': ui,
3893 'ui': ui,
3893 'repo': repo,
3894 'repo': repo,
3894 }
3895 }
3895
3896
3896 # py2exe disables initialization of the site module, which is responsible
3897 # py2exe disables initialization of the site module, which is responsible
3897 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3898 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3898 # the stuff that site normally does here, so that the interpreter can be
3899 # the stuff that site normally does here, so that the interpreter can be
3899 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3900 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3900 # py.exe, or py2exe.
3901 # py.exe, or py2exe.
3901 if getattr(sys, "frozen", None) == 'console_exe':
3902 if getattr(sys, "frozen", None) == 'console_exe':
3902 try:
3903 try:
3903 import site
3904 import site
3904
3905
3905 site.setcopyright()
3906 site.setcopyright()
3906 site.sethelper()
3907 site.sethelper()
3907 site.setquit()
3908 site.setquit()
3908 except ImportError:
3909 except ImportError:
3909 site = None # Keep PyCharm happy
3910 site = None # Keep PyCharm happy
3910
3911
3911 command = opts.get('command')
3912 command = opts.get('command')
3912 if command:
3913 if command:
3913 compiled = code.compile_command(encoding.strfromlocal(command))
3914 compiled = code.compile_command(encoding.strfromlocal(command))
3914 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3915 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3915 return
3916 return
3916
3917
3917 code.interact(local=imported_objects)
3918 code.interact(local=imported_objects)
3918
3919
3919
3920
3920 @command(
3921 @command(
3921 b'debug-revlog-stats',
3922 b'debug-revlog-stats',
3922 [
3923 [
3923 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3924 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3924 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3925 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3925 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3926 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3926 ]
3927 ]
3927 + cmdutil.formatteropts,
3928 + cmdutil.formatteropts,
3928 )
3929 )
3929 def debug_revlog_stats(ui, repo, **opts):
3930 def debug_revlog_stats(ui, repo, **opts):
3930 """display statistics about revlogs in the store"""
3931 """display statistics about revlogs in the store"""
3931 opts = pycompat.byteskwargs(opts)
3932 opts = pycompat.byteskwargs(opts)
3932 changelog = opts[b"changelog"]
3933 changelog = opts[b"changelog"]
3933 manifest = opts[b"manifest"]
3934 manifest = opts[b"manifest"]
3934 filelogs = opts[b"filelogs"]
3935 filelogs = opts[b"filelogs"]
3935
3936
3936 if changelog is None and manifest is None and filelogs is None:
3937 if changelog is None and manifest is None and filelogs is None:
3937 changelog = True
3938 changelog = True
3938 manifest = True
3939 manifest = True
3939 filelogs = True
3940 filelogs = True
3940
3941
3941 repo = repo.unfiltered()
3942 repo = repo.unfiltered()
3942 fm = ui.formatter(b'debug-revlog-stats', opts)
3943 fm = ui.formatter(b'debug-revlog-stats', opts)
3943 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3944 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3944 fm.end()
3945 fm.end()
3945
3946
3946
3947
3947 @command(
3948 @command(
3948 b'debugsuccessorssets',
3949 b'debugsuccessorssets',
3949 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3950 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3950 _(b'[REV]'),
3951 _(b'[REV]'),
3951 )
3952 )
3952 def debugsuccessorssets(ui, repo, *revs, **opts):
3953 def debugsuccessorssets(ui, repo, *revs, **opts):
3953 """show set of successors for revision
3954 """show set of successors for revision
3954
3955
3955 A successors set of changeset A is a consistent group of revisions that
3956 A successors set of changeset A is a consistent group of revisions that
3956 succeed A. It contains non-obsolete changesets only unless closests
3957 succeed A. It contains non-obsolete changesets only unless closests
3957 successors set is set.
3958 successors set is set.
3958
3959
3959 In most cases a changeset A has a single successors set containing a single
3960 In most cases a changeset A has a single successors set containing a single
3960 successor (changeset A replaced by A').
3961 successor (changeset A replaced by A').
3961
3962
3962 A changeset that is made obsolete with no successors are called "pruned".
3963 A changeset that is made obsolete with no successors are called "pruned".
3963 Such changesets have no successors sets at all.
3964 Such changesets have no successors sets at all.
3964
3965
3965 A changeset that has been "split" will have a successors set containing
3966 A changeset that has been "split" will have a successors set containing
3966 more than one successor.
3967 more than one successor.
3967
3968
3968 A changeset that has been rewritten in multiple different ways is called
3969 A changeset that has been rewritten in multiple different ways is called
3969 "divergent". Such changesets have multiple successor sets (each of which
3970 "divergent". Such changesets have multiple successor sets (each of which
3970 may also be split, i.e. have multiple successors).
3971 may also be split, i.e. have multiple successors).
3971
3972
3972 Results are displayed as follows::
3973 Results are displayed as follows::
3973
3974
3974 <rev1>
3975 <rev1>
3975 <successors-1A>
3976 <successors-1A>
3976 <rev2>
3977 <rev2>
3977 <successors-2A>
3978 <successors-2A>
3978 <successors-2B1> <successors-2B2> <successors-2B3>
3979 <successors-2B1> <successors-2B2> <successors-2B3>
3979
3980
3980 Here rev2 has two possible (i.e. divergent) successors sets. The first
3981 Here rev2 has two possible (i.e. divergent) successors sets. The first
3981 holds one element, whereas the second holds three (i.e. the changeset has
3982 holds one element, whereas the second holds three (i.e. the changeset has
3982 been split).
3983 been split).
3983 """
3984 """
3984 # passed to successorssets caching computation from one call to another
3985 # passed to successorssets caching computation from one call to another
3985 cache = {}
3986 cache = {}
3986 ctx2str = bytes
3987 ctx2str = bytes
3987 node2str = short
3988 node2str = short
3988 for rev in logcmdutil.revrange(repo, revs):
3989 for rev in logcmdutil.revrange(repo, revs):
3989 ctx = repo[rev]
3990 ctx = repo[rev]
3990 ui.write(b'%s\n' % ctx2str(ctx))
3991 ui.write(b'%s\n' % ctx2str(ctx))
3991 for succsset in obsutil.successorssets(
3992 for succsset in obsutil.successorssets(
3992 repo, ctx.node(), closest=opts['closest'], cache=cache
3993 repo, ctx.node(), closest=opts['closest'], cache=cache
3993 ):
3994 ):
3994 if succsset:
3995 if succsset:
3995 ui.write(b' ')
3996 ui.write(b' ')
3996 ui.write(node2str(succsset[0]))
3997 ui.write(node2str(succsset[0]))
3997 for node in succsset[1:]:
3998 for node in succsset[1:]:
3998 ui.write(b' ')
3999 ui.write(b' ')
3999 ui.write(node2str(node))
4000 ui.write(node2str(node))
4000 ui.write(b'\n')
4001 ui.write(b'\n')
4001
4002
4002
4003
4003 @command(b'debugtagscache', [])
4004 @command(b'debugtagscache', [])
4004 def debugtagscache(ui, repo):
4005 def debugtagscache(ui, repo):
4005 """display the contents of .hg/cache/hgtagsfnodes1"""
4006 """display the contents of .hg/cache/hgtagsfnodes1"""
4006 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4007 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4007 flog = repo.file(b'.hgtags')
4008 flog = repo.file(b'.hgtags')
4008 for r in repo:
4009 for r in repo:
4009 node = repo[r].node()
4010 node = repo[r].node()
4010 tagsnode = cache.getfnode(node, computemissing=False)
4011 tagsnode = cache.getfnode(node, computemissing=False)
4011 if tagsnode:
4012 if tagsnode:
4012 tagsnodedisplay = hex(tagsnode)
4013 tagsnodedisplay = hex(tagsnode)
4013 if not flog.hasnode(tagsnode):
4014 if not flog.hasnode(tagsnode):
4014 tagsnodedisplay += b' (unknown node)'
4015 tagsnodedisplay += b' (unknown node)'
4015 elif tagsnode is None:
4016 elif tagsnode is None:
4016 tagsnodedisplay = b'missing'
4017 tagsnodedisplay = b'missing'
4017 else:
4018 else:
4018 tagsnodedisplay = b'invalid'
4019 tagsnodedisplay = b'invalid'
4019
4020
4020 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4021 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4021
4022
4022
4023
4023 @command(
4024 @command(
4024 b'debugtemplate',
4025 b'debugtemplate',
4025 [
4026 [
4026 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4027 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4027 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4028 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4028 ],
4029 ],
4029 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4030 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4030 optionalrepo=True,
4031 optionalrepo=True,
4031 )
4032 )
4032 def debugtemplate(ui, repo, tmpl, **opts):
4033 def debugtemplate(ui, repo, tmpl, **opts):
4033 """parse and apply a template
4034 """parse and apply a template
4034
4035
4035 If -r/--rev is given, the template is processed as a log template and
4036 If -r/--rev is given, the template is processed as a log template and
4036 applied to the given changesets. Otherwise, it is processed as a generic
4037 applied to the given changesets. Otherwise, it is processed as a generic
4037 template.
4038 template.
4038
4039
4039 Use --verbose to print the parsed tree.
4040 Use --verbose to print the parsed tree.
4040 """
4041 """
4041 revs = None
4042 revs = None
4042 if opts['rev']:
4043 if opts['rev']:
4043 if repo is None:
4044 if repo is None:
4044 raise error.RepoError(
4045 raise error.RepoError(
4045 _(b'there is no Mercurial repository here (.hg not found)')
4046 _(b'there is no Mercurial repository here (.hg not found)')
4046 )
4047 )
4047 revs = logcmdutil.revrange(repo, opts['rev'])
4048 revs = logcmdutil.revrange(repo, opts['rev'])
4048
4049
4049 props = {}
4050 props = {}
4050 for d in opts['define']:
4051 for d in opts['define']:
4051 try:
4052 try:
4052 k, v = (e.strip() for e in d.split(b'=', 1))
4053 k, v = (e.strip() for e in d.split(b'=', 1))
4053 if not k or k == b'ui':
4054 if not k or k == b'ui':
4054 raise ValueError
4055 raise ValueError
4055 props[k] = v
4056 props[k] = v
4056 except ValueError:
4057 except ValueError:
4057 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4058 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4058
4059
4059 if ui.verbose:
4060 if ui.verbose:
4060 aliases = ui.configitems(b'templatealias')
4061 aliases = ui.configitems(b'templatealias')
4061 tree = templater.parse(tmpl)
4062 tree = templater.parse(tmpl)
4062 ui.note(templater.prettyformat(tree), b'\n')
4063 ui.note(templater.prettyformat(tree), b'\n')
4063 newtree = templater.expandaliases(tree, aliases)
4064 newtree = templater.expandaliases(tree, aliases)
4064 if newtree != tree:
4065 if newtree != tree:
4065 ui.notenoi18n(
4066 ui.notenoi18n(
4066 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4067 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4067 )
4068 )
4068
4069
4069 if revs is None:
4070 if revs is None:
4070 tres = formatter.templateresources(ui, repo)
4071 tres = formatter.templateresources(ui, repo)
4071 t = formatter.maketemplater(ui, tmpl, resources=tres)
4072 t = formatter.maketemplater(ui, tmpl, resources=tres)
4072 if ui.verbose:
4073 if ui.verbose:
4073 kwds, funcs = t.symbolsuseddefault()
4074 kwds, funcs = t.symbolsuseddefault()
4074 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4075 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4075 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4076 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4076 ui.write(t.renderdefault(props))
4077 ui.write(t.renderdefault(props))
4077 else:
4078 else:
4078 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4079 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4079 if ui.verbose:
4080 if ui.verbose:
4080 kwds, funcs = displayer.t.symbolsuseddefault()
4081 kwds, funcs = displayer.t.symbolsuseddefault()
4081 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4082 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4082 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4083 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4083 for r in revs:
4084 for r in revs:
4084 displayer.show(repo[r], **pycompat.strkwargs(props))
4085 displayer.show(repo[r], **pycompat.strkwargs(props))
4085 displayer.close()
4086 displayer.close()
4086
4087
4087
4088
4088 @command(
4089 @command(
4089 b'debuguigetpass',
4090 b'debuguigetpass',
4090 [
4091 [
4091 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4092 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4092 ],
4093 ],
4093 _(b'[-p TEXT]'),
4094 _(b'[-p TEXT]'),
4094 norepo=True,
4095 norepo=True,
4095 )
4096 )
4096 def debuguigetpass(ui, prompt=b''):
4097 def debuguigetpass(ui, prompt=b''):
4097 """show prompt to type password"""
4098 """show prompt to type password"""
4098 r = ui.getpass(prompt)
4099 r = ui.getpass(prompt)
4099 if r is None:
4100 if r is None:
4100 r = b"<default response>"
4101 r = b"<default response>"
4101 ui.writenoi18n(b'response: %s\n' % r)
4102 ui.writenoi18n(b'response: %s\n' % r)
4102
4103
4103
4104
4104 @command(
4105 @command(
4105 b'debuguiprompt',
4106 b'debuguiprompt',
4106 [
4107 [
4107 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4108 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4108 ],
4109 ],
4109 _(b'[-p TEXT]'),
4110 _(b'[-p TEXT]'),
4110 norepo=True,
4111 norepo=True,
4111 )
4112 )
4112 def debuguiprompt(ui, prompt=b''):
4113 def debuguiprompt(ui, prompt=b''):
4113 """show plain prompt"""
4114 """show plain prompt"""
4114 r = ui.prompt(prompt)
4115 r = ui.prompt(prompt)
4115 ui.writenoi18n(b'response: %s\n' % r)
4116 ui.writenoi18n(b'response: %s\n' % r)
4116
4117
4117
4118
4118 @command(b'debugupdatecaches', [])
4119 @command(b'debugupdatecaches', [])
4119 def debugupdatecaches(ui, repo, *pats, **opts):
4120 def debugupdatecaches(ui, repo, *pats, **opts):
4120 """warm all known caches in the repository"""
4121 """warm all known caches in the repository"""
4121 with repo.wlock(), repo.lock():
4122 with repo.wlock(), repo.lock():
4122 repo.updatecaches(caches=repository.CACHES_ALL)
4123 repo.updatecaches(caches=repository.CACHES_ALL)
4123
4124
4124
4125
4125 @command(
4126 @command(
4126 b'debugupgraderepo',
4127 b'debugupgraderepo',
4127 [
4128 [
4128 (
4129 (
4129 b'o',
4130 b'o',
4130 b'optimize',
4131 b'optimize',
4131 [],
4132 [],
4132 _(b'extra optimization to perform'),
4133 _(b'extra optimization to perform'),
4133 _(b'NAME'),
4134 _(b'NAME'),
4134 ),
4135 ),
4135 (b'', b'run', False, _(b'performs an upgrade')),
4136 (b'', b'run', False, _(b'performs an upgrade')),
4136 (b'', b'backup', True, _(b'keep the old repository content around')),
4137 (b'', b'backup', True, _(b'keep the old repository content around')),
4137 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4138 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4138 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4139 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4139 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4140 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4140 ],
4141 ],
4141 )
4142 )
4142 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4143 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4143 """upgrade a repository to use different features
4144 """upgrade a repository to use different features
4144
4145
4145 If no arguments are specified, the repository is evaluated for upgrade
4146 If no arguments are specified, the repository is evaluated for upgrade
4146 and a list of problems and potential optimizations is printed.
4147 and a list of problems and potential optimizations is printed.
4147
4148
4148 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4149 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4149 can be influenced via additional arguments. More details will be provided
4150 can be influenced via additional arguments. More details will be provided
4150 by the command output when run without ``--run``.
4151 by the command output when run without ``--run``.
4151
4152
4152 During the upgrade, the repository will be locked and no writes will be
4153 During the upgrade, the repository will be locked and no writes will be
4153 allowed.
4154 allowed.
4154
4155
4155 At the end of the upgrade, the repository may not be readable while new
4156 At the end of the upgrade, the repository may not be readable while new
4156 repository data is swapped in. This window will be as long as it takes to
4157 repository data is swapped in. This window will be as long as it takes to
4157 rename some directories inside the ``.hg`` directory. On most machines, this
4158 rename some directories inside the ``.hg`` directory. On most machines, this
4158 should complete almost instantaneously and the chances of a consumer being
4159 should complete almost instantaneously and the chances of a consumer being
4159 unable to access the repository should be low.
4160 unable to access the repository should be low.
4160
4161
4161 By default, all revlogs will be upgraded. You can restrict this using flags
4162 By default, all revlogs will be upgraded. You can restrict this using flags
4162 such as `--manifest`:
4163 such as `--manifest`:
4163
4164
4164 * `--manifest`: only optimize the manifest
4165 * `--manifest`: only optimize the manifest
4165 * `--no-manifest`: optimize all revlog but the manifest
4166 * `--no-manifest`: optimize all revlog but the manifest
4166 * `--changelog`: optimize the changelog only
4167 * `--changelog`: optimize the changelog only
4167 * `--no-changelog --no-manifest`: optimize filelogs only
4168 * `--no-changelog --no-manifest`: optimize filelogs only
4168 * `--filelogs`: optimize the filelogs only
4169 * `--filelogs`: optimize the filelogs only
4169 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4170 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4170 """
4171 """
4171 return upgrade.upgraderepo(
4172 return upgrade.upgraderepo(
4172 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4173 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4173 )
4174 )
4174
4175
4175
4176
4176 @command(
4177 @command(
4177 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4178 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4178 )
4179 )
4179 def debugwalk(ui, repo, *pats, **opts):
4180 def debugwalk(ui, repo, *pats, **opts):
4180 """show how files match on given patterns"""
4181 """show how files match on given patterns"""
4181 opts = pycompat.byteskwargs(opts)
4182 opts = pycompat.byteskwargs(opts)
4182 m = scmutil.match(repo[None], pats, opts)
4183 m = scmutil.match(repo[None], pats, opts)
4183 if ui.verbose:
4184 if ui.verbose:
4184 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4185 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4185 items = list(repo[None].walk(m))
4186 items = list(repo[None].walk(m))
4186 if not items:
4187 if not items:
4187 return
4188 return
4188 f = lambda fn: fn
4189 f = lambda fn: fn
4189 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4190 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4190 f = lambda fn: util.normpath(fn)
4191 f = lambda fn: util.normpath(fn)
4191 fmt = b'f %%-%ds %%-%ds %%s' % (
4192 fmt = b'f %%-%ds %%-%ds %%s' % (
4192 max([len(abs) for abs in items]),
4193 max([len(abs) for abs in items]),
4193 max([len(repo.pathto(abs)) for abs in items]),
4194 max([len(repo.pathto(abs)) for abs in items]),
4194 )
4195 )
4195 for abs in items:
4196 for abs in items:
4196 line = fmt % (
4197 line = fmt % (
4197 abs,
4198 abs,
4198 f(repo.pathto(abs)),
4199 f(repo.pathto(abs)),
4199 m.exact(abs) and b'exact' or b'',
4200 m.exact(abs) and b'exact' or b'',
4200 )
4201 )
4201 ui.write(b"%s\n" % line.rstrip())
4202 ui.write(b"%s\n" % line.rstrip())
4202
4203
4203
4204
4204 @command(b'debugwhyunstable', [], _(b'REV'))
4205 @command(b'debugwhyunstable', [], _(b'REV'))
4205 def debugwhyunstable(ui, repo, rev):
4206 def debugwhyunstable(ui, repo, rev):
4206 """explain instabilities of a changeset"""
4207 """explain instabilities of a changeset"""
4207 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4208 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4208 dnodes = b''
4209 dnodes = b''
4209 if entry.get(b'divergentnodes'):
4210 if entry.get(b'divergentnodes'):
4210 dnodes = (
4211 dnodes = (
4211 b' '.join(
4212 b' '.join(
4212 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4213 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4213 for ctx in entry[b'divergentnodes']
4214 for ctx in entry[b'divergentnodes']
4214 )
4215 )
4215 + b' '
4216 + b' '
4216 )
4217 )
4217 ui.write(
4218 ui.write(
4218 b'%s: %s%s %s\n'
4219 b'%s: %s%s %s\n'
4219 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4220 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4220 )
4221 )
4221
4222
4222
4223
4223 @command(
4224 @command(
4224 b'debugwireargs',
4225 b'debugwireargs',
4225 [
4226 [
4226 (b'', b'three', b'', b'three'),
4227 (b'', b'three', b'', b'three'),
4227 (b'', b'four', b'', b'four'),
4228 (b'', b'four', b'', b'four'),
4228 (b'', b'five', b'', b'five'),
4229 (b'', b'five', b'', b'five'),
4229 ]
4230 ]
4230 + cmdutil.remoteopts,
4231 + cmdutil.remoteopts,
4231 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4232 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4232 norepo=True,
4233 norepo=True,
4233 )
4234 )
4234 def debugwireargs(ui, repopath, *vals, **opts):
4235 def debugwireargs(ui, repopath, *vals, **opts):
4235 opts = pycompat.byteskwargs(opts)
4236 opts = pycompat.byteskwargs(opts)
4236 repo = hg.peer(ui, opts, repopath)
4237 repo = hg.peer(ui, opts, repopath)
4237 try:
4238 try:
4238 for opt in cmdutil.remoteopts:
4239 for opt in cmdutil.remoteopts:
4239 del opts[opt[1]]
4240 del opts[opt[1]]
4240 args = {}
4241 args = {}
4241 for k, v in opts.items():
4242 for k, v in opts.items():
4242 if v:
4243 if v:
4243 args[k] = v
4244 args[k] = v
4244 args = pycompat.strkwargs(args)
4245 args = pycompat.strkwargs(args)
4245 # run twice to check that we don't mess up the stream for the next command
4246 # run twice to check that we don't mess up the stream for the next command
4246 res1 = repo.debugwireargs(*vals, **args)
4247 res1 = repo.debugwireargs(*vals, **args)
4247 res2 = repo.debugwireargs(*vals, **args)
4248 res2 = repo.debugwireargs(*vals, **args)
4248 ui.write(b"%s\n" % res1)
4249 ui.write(b"%s\n" % res1)
4249 if res1 != res2:
4250 if res1 != res2:
4250 ui.warn(b"%s\n" % res2)
4251 ui.warn(b"%s\n" % res2)
4251 finally:
4252 finally:
4252 repo.close()
4253 repo.close()
4253
4254
4254
4255
4255 def _parsewirelangblocks(fh):
4256 def _parsewirelangblocks(fh):
4256 activeaction = None
4257 activeaction = None
4257 blocklines = []
4258 blocklines = []
4258 lastindent = 0
4259 lastindent = 0
4259
4260
4260 for line in fh:
4261 for line in fh:
4261 line = line.rstrip()
4262 line = line.rstrip()
4262 if not line:
4263 if not line:
4263 continue
4264 continue
4264
4265
4265 if line.startswith(b'#'):
4266 if line.startswith(b'#'):
4266 continue
4267 continue
4267
4268
4268 if not line.startswith(b' '):
4269 if not line.startswith(b' '):
4269 # New block. Flush previous one.
4270 # New block. Flush previous one.
4270 if activeaction:
4271 if activeaction:
4271 yield activeaction, blocklines
4272 yield activeaction, blocklines
4272
4273
4273 activeaction = line
4274 activeaction = line
4274 blocklines = []
4275 blocklines = []
4275 lastindent = 0
4276 lastindent = 0
4276 continue
4277 continue
4277
4278
4278 # Else we start with an indent.
4279 # Else we start with an indent.
4279
4280
4280 if not activeaction:
4281 if not activeaction:
4281 raise error.Abort(_(b'indented line outside of block'))
4282 raise error.Abort(_(b'indented line outside of block'))
4282
4283
4283 indent = len(line) - len(line.lstrip())
4284 indent = len(line) - len(line.lstrip())
4284
4285
4285 # If this line is indented more than the last line, concatenate it.
4286 # If this line is indented more than the last line, concatenate it.
4286 if indent > lastindent and blocklines:
4287 if indent > lastindent and blocklines:
4287 blocklines[-1] += line.lstrip()
4288 blocklines[-1] += line.lstrip()
4288 else:
4289 else:
4289 blocklines.append(line)
4290 blocklines.append(line)
4290 lastindent = indent
4291 lastindent = indent
4291
4292
4292 # Flush last block.
4293 # Flush last block.
4293 if activeaction:
4294 if activeaction:
4294 yield activeaction, blocklines
4295 yield activeaction, blocklines
4295
4296
4296
4297
4297 @command(
4298 @command(
4298 b'debugwireproto',
4299 b'debugwireproto',
4299 [
4300 [
4300 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4301 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4301 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4302 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4302 (
4303 (
4303 b'',
4304 b'',
4304 b'noreadstderr',
4305 b'noreadstderr',
4305 False,
4306 False,
4306 _(b'do not read from stderr of the remote'),
4307 _(b'do not read from stderr of the remote'),
4307 ),
4308 ),
4308 (
4309 (
4309 b'',
4310 b'',
4310 b'nologhandshake',
4311 b'nologhandshake',
4311 False,
4312 False,
4312 _(b'do not log I/O related to the peer handshake'),
4313 _(b'do not log I/O related to the peer handshake'),
4313 ),
4314 ),
4314 ]
4315 ]
4315 + cmdutil.remoteopts,
4316 + cmdutil.remoteopts,
4316 _(b'[PATH]'),
4317 _(b'[PATH]'),
4317 optionalrepo=True,
4318 optionalrepo=True,
4318 )
4319 )
4319 def debugwireproto(ui, repo, path=None, **opts):
4320 def debugwireproto(ui, repo, path=None, **opts):
4320 """send wire protocol commands to a server
4321 """send wire protocol commands to a server
4321
4322
4322 This command can be used to issue wire protocol commands to remote
4323 This command can be used to issue wire protocol commands to remote
4323 peers and to debug the raw data being exchanged.
4324 peers and to debug the raw data being exchanged.
4324
4325
4325 ``--localssh`` will start an SSH server against the current repository
4326 ``--localssh`` will start an SSH server against the current repository
4326 and connect to that. By default, the connection will perform a handshake
4327 and connect to that. By default, the connection will perform a handshake
4327 and establish an appropriate peer instance.
4328 and establish an appropriate peer instance.
4328
4329
4329 ``--peer`` can be used to bypass the handshake protocol and construct a
4330 ``--peer`` can be used to bypass the handshake protocol and construct a
4330 peer instance using the specified class type. Valid values are ``raw``,
4331 peer instance using the specified class type. Valid values are ``raw``,
4331 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4332 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4332 don't support higher-level command actions.
4333 don't support higher-level command actions.
4333
4334
4334 ``--noreadstderr`` can be used to disable automatic reading from stderr
4335 ``--noreadstderr`` can be used to disable automatic reading from stderr
4335 of the peer (for SSH connections only). Disabling automatic reading of
4336 of the peer (for SSH connections only). Disabling automatic reading of
4336 stderr is useful for making output more deterministic.
4337 stderr is useful for making output more deterministic.
4337
4338
4338 Commands are issued via a mini language which is specified via stdin.
4339 Commands are issued via a mini language which is specified via stdin.
4339 The language consists of individual actions to perform. An action is
4340 The language consists of individual actions to perform. An action is
4340 defined by a block. A block is defined as a line with no leading
4341 defined by a block. A block is defined as a line with no leading
4341 space followed by 0 or more lines with leading space. Blocks are
4342 space followed by 0 or more lines with leading space. Blocks are
4342 effectively a high-level command with additional metadata.
4343 effectively a high-level command with additional metadata.
4343
4344
4344 Lines beginning with ``#`` are ignored.
4345 Lines beginning with ``#`` are ignored.
4345
4346
4346 The following sections denote available actions.
4347 The following sections denote available actions.
4347
4348
4348 raw
4349 raw
4349 ---
4350 ---
4350
4351
4351 Send raw data to the server.
4352 Send raw data to the server.
4352
4353
4353 The block payload contains the raw data to send as one atomic send
4354 The block payload contains the raw data to send as one atomic send
4354 operation. The data may not actually be delivered in a single system
4355 operation. The data may not actually be delivered in a single system
4355 call: it depends on the abilities of the transport being used.
4356 call: it depends on the abilities of the transport being used.
4356
4357
4357 Each line in the block is de-indented and concatenated. Then, that
4358 Each line in the block is de-indented and concatenated. Then, that
4358 value is evaluated as a Python b'' literal. This allows the use of
4359 value is evaluated as a Python b'' literal. This allows the use of
4359 backslash escaping, etc.
4360 backslash escaping, etc.
4360
4361
4361 raw+
4362 raw+
4362 ----
4363 ----
4363
4364
4364 Behaves like ``raw`` except flushes output afterwards.
4365 Behaves like ``raw`` except flushes output afterwards.
4365
4366
4366 command <X>
4367 command <X>
4367 -----------
4368 -----------
4368
4369
4369 Send a request to run a named command, whose name follows the ``command``
4370 Send a request to run a named command, whose name follows the ``command``
4370 string.
4371 string.
4371
4372
4372 Arguments to the command are defined as lines in this block. The format of
4373 Arguments to the command are defined as lines in this block. The format of
4373 each line is ``<key> <value>``. e.g.::
4374 each line is ``<key> <value>``. e.g.::
4374
4375
4375 command listkeys
4376 command listkeys
4376 namespace bookmarks
4377 namespace bookmarks
4377
4378
4378 If the value begins with ``eval:``, it will be interpreted as a Python
4379 If the value begins with ``eval:``, it will be interpreted as a Python
4379 literal expression. Otherwise values are interpreted as Python b'' literals.
4380 literal expression. Otherwise values are interpreted as Python b'' literals.
4380 This allows sending complex types and encoding special byte sequences via
4381 This allows sending complex types and encoding special byte sequences via
4381 backslash escaping.
4382 backslash escaping.
4382
4383
4383 The following arguments have special meaning:
4384 The following arguments have special meaning:
4384
4385
4385 ``PUSHFILE``
4386 ``PUSHFILE``
4386 When defined, the *push* mechanism of the peer will be used instead
4387 When defined, the *push* mechanism of the peer will be used instead
4387 of the static request-response mechanism and the content of the
4388 of the static request-response mechanism and the content of the
4388 file specified in the value of this argument will be sent as the
4389 file specified in the value of this argument will be sent as the
4389 command payload.
4390 command payload.
4390
4391
4391 This can be used to submit a local bundle file to the remote.
4392 This can be used to submit a local bundle file to the remote.
4392
4393
4393 batchbegin
4394 batchbegin
4394 ----------
4395 ----------
4395
4396
4396 Instruct the peer to begin a batched send.
4397 Instruct the peer to begin a batched send.
4397
4398
4398 All ``command`` blocks are queued for execution until the next
4399 All ``command`` blocks are queued for execution until the next
4399 ``batchsubmit`` block.
4400 ``batchsubmit`` block.
4400
4401
4401 batchsubmit
4402 batchsubmit
4402 -----------
4403 -----------
4403
4404
4404 Submit previously queued ``command`` blocks as a batch request.
4405 Submit previously queued ``command`` blocks as a batch request.
4405
4406
4406 This action MUST be paired with a ``batchbegin`` action.
4407 This action MUST be paired with a ``batchbegin`` action.
4407
4408
4408 httprequest <method> <path>
4409 httprequest <method> <path>
4409 ---------------------------
4410 ---------------------------
4410
4411
4411 (HTTP peer only)
4412 (HTTP peer only)
4412
4413
4413 Send an HTTP request to the peer.
4414 Send an HTTP request to the peer.
4414
4415
4415 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4416 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4416
4417
4417 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4418 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4418 headers to add to the request. e.g. ``Accept: foo``.
4419 headers to add to the request. e.g. ``Accept: foo``.
4419
4420
4420 The following arguments are special:
4421 The following arguments are special:
4421
4422
4422 ``BODYFILE``
4423 ``BODYFILE``
4423 The content of the file defined as the value to this argument will be
4424 The content of the file defined as the value to this argument will be
4424 transferred verbatim as the HTTP request body.
4425 transferred verbatim as the HTTP request body.
4425
4426
4426 ``frame <type> <flags> <payload>``
4427 ``frame <type> <flags> <payload>``
4427 Send a unified protocol frame as part of the request body.
4428 Send a unified protocol frame as part of the request body.
4428
4429
4429 All frames will be collected and sent as the body to the HTTP
4430 All frames will be collected and sent as the body to the HTTP
4430 request.
4431 request.
4431
4432
4432 close
4433 close
4433 -----
4434 -----
4434
4435
4435 Close the connection to the server.
4436 Close the connection to the server.
4436
4437
4437 flush
4438 flush
4438 -----
4439 -----
4439
4440
4440 Flush data written to the server.
4441 Flush data written to the server.
4441
4442
4442 readavailable
4443 readavailable
4443 -------------
4444 -------------
4444
4445
4445 Close the write end of the connection and read all available data from
4446 Close the write end of the connection and read all available data from
4446 the server.
4447 the server.
4447
4448
4448 If the connection to the server encompasses multiple pipes, we poll both
4449 If the connection to the server encompasses multiple pipes, we poll both
4449 pipes and read available data.
4450 pipes and read available data.
4450
4451
4451 readline
4452 readline
4452 --------
4453 --------
4453
4454
4454 Read a line of output from the server. If there are multiple output
4455 Read a line of output from the server. If there are multiple output
4455 pipes, reads only the main pipe.
4456 pipes, reads only the main pipe.
4456
4457
4457 ereadline
4458 ereadline
4458 ---------
4459 ---------
4459
4460
4460 Like ``readline``, but read from the stderr pipe, if available.
4461 Like ``readline``, but read from the stderr pipe, if available.
4461
4462
4462 read <X>
4463 read <X>
4463 --------
4464 --------
4464
4465
4465 ``read()`` N bytes from the server's main output pipe.
4466 ``read()`` N bytes from the server's main output pipe.
4466
4467
4467 eread <X>
4468 eread <X>
4468 ---------
4469 ---------
4469
4470
4470 ``read()`` N bytes from the server's stderr pipe, if available.
4471 ``read()`` N bytes from the server's stderr pipe, if available.
4471
4472
4472 Specifying Unified Frame-Based Protocol Frames
4473 Specifying Unified Frame-Based Protocol Frames
4473 ----------------------------------------------
4474 ----------------------------------------------
4474
4475
4475 It is possible to emit a *Unified Frame-Based Protocol* by using special
4476 It is possible to emit a *Unified Frame-Based Protocol* by using special
4476 syntax.
4477 syntax.
4477
4478
4478 A frame is composed as a type, flags, and payload. These can be parsed
4479 A frame is composed as a type, flags, and payload. These can be parsed
4479 from a string of the form:
4480 from a string of the form:
4480
4481
4481 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4482 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4482
4483
4483 ``request-id`` and ``stream-id`` are integers defining the request and
4484 ``request-id`` and ``stream-id`` are integers defining the request and
4484 stream identifiers.
4485 stream identifiers.
4485
4486
4486 ``type`` can be an integer value for the frame type or the string name
4487 ``type`` can be an integer value for the frame type or the string name
4487 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4488 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4488 ``command-name``.
4489 ``command-name``.
4489
4490
4490 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4491 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4491 components. Each component (and there can be just one) can be an integer
4492 components. Each component (and there can be just one) can be an integer
4492 or a flag name for stream flags or frame flags, respectively. Values are
4493 or a flag name for stream flags or frame flags, respectively. Values are
4493 resolved to integers and then bitwise OR'd together.
4494 resolved to integers and then bitwise OR'd together.
4494
4495
4495 ``payload`` represents the raw frame payload. If it begins with
4496 ``payload`` represents the raw frame payload. If it begins with
4496 ``cbor:``, the following string is evaluated as Python code and the
4497 ``cbor:``, the following string is evaluated as Python code and the
4497 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4498 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4498 as a Python byte string literal.
4499 as a Python byte string literal.
4499 """
4500 """
4500 opts = pycompat.byteskwargs(opts)
4501 opts = pycompat.byteskwargs(opts)
4501
4502
4502 if opts[b'localssh'] and not repo:
4503 if opts[b'localssh'] and not repo:
4503 raise error.Abort(_(b'--localssh requires a repository'))
4504 raise error.Abort(_(b'--localssh requires a repository'))
4504
4505
4505 if opts[b'peer'] and opts[b'peer'] not in (
4506 if opts[b'peer'] and opts[b'peer'] not in (
4506 b'raw',
4507 b'raw',
4507 b'ssh1',
4508 b'ssh1',
4508 ):
4509 ):
4509 raise error.Abort(
4510 raise error.Abort(
4510 _(b'invalid value for --peer'),
4511 _(b'invalid value for --peer'),
4511 hint=_(b'valid values are "raw" and "ssh1"'),
4512 hint=_(b'valid values are "raw" and "ssh1"'),
4512 )
4513 )
4513
4514
4514 if path and opts[b'localssh']:
4515 if path and opts[b'localssh']:
4515 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4516 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4516
4517
4517 if ui.interactive():
4518 if ui.interactive():
4518 ui.write(_(b'(waiting for commands on stdin)\n'))
4519 ui.write(_(b'(waiting for commands on stdin)\n'))
4519
4520
4520 blocks = list(_parsewirelangblocks(ui.fin))
4521 blocks = list(_parsewirelangblocks(ui.fin))
4521
4522
4522 proc = None
4523 proc = None
4523 stdin = None
4524 stdin = None
4524 stdout = None
4525 stdout = None
4525 stderr = None
4526 stderr = None
4526 opener = None
4527 opener = None
4527
4528
4528 if opts[b'localssh']:
4529 if opts[b'localssh']:
4529 # We start the SSH server in its own process so there is process
4530 # We start the SSH server in its own process so there is process
4530 # separation. This prevents a whole class of potential bugs around
4531 # separation. This prevents a whole class of potential bugs around
4531 # shared state from interfering with server operation.
4532 # shared state from interfering with server operation.
4532 args = procutil.hgcmd() + [
4533 args = procutil.hgcmd() + [
4533 b'-R',
4534 b'-R',
4534 repo.root,
4535 repo.root,
4535 b'debugserve',
4536 b'debugserve',
4536 b'--sshstdio',
4537 b'--sshstdio',
4537 ]
4538 ]
4538 proc = subprocess.Popen(
4539 proc = subprocess.Popen(
4539 pycompat.rapply(procutil.tonativestr, args),
4540 pycompat.rapply(procutil.tonativestr, args),
4540 stdin=subprocess.PIPE,
4541 stdin=subprocess.PIPE,
4541 stdout=subprocess.PIPE,
4542 stdout=subprocess.PIPE,
4542 stderr=subprocess.PIPE,
4543 stderr=subprocess.PIPE,
4543 bufsize=0,
4544 bufsize=0,
4544 )
4545 )
4545
4546
4546 stdin = proc.stdin
4547 stdin = proc.stdin
4547 stdout = proc.stdout
4548 stdout = proc.stdout
4548 stderr = proc.stderr
4549 stderr = proc.stderr
4549
4550
4550 # We turn the pipes into observers so we can log I/O.
4551 # We turn the pipes into observers so we can log I/O.
4551 if ui.verbose or opts[b'peer'] == b'raw':
4552 if ui.verbose or opts[b'peer'] == b'raw':
4552 stdin = util.makeloggingfileobject(
4553 stdin = util.makeloggingfileobject(
4553 ui, proc.stdin, b'i', logdata=True
4554 ui, proc.stdin, b'i', logdata=True
4554 )
4555 )
4555 stdout = util.makeloggingfileobject(
4556 stdout = util.makeloggingfileobject(
4556 ui, proc.stdout, b'o', logdata=True
4557 ui, proc.stdout, b'o', logdata=True
4557 )
4558 )
4558 stderr = util.makeloggingfileobject(
4559 stderr = util.makeloggingfileobject(
4559 ui, proc.stderr, b'e', logdata=True
4560 ui, proc.stderr, b'e', logdata=True
4560 )
4561 )
4561
4562
4562 # --localssh also implies the peer connection settings.
4563 # --localssh also implies the peer connection settings.
4563
4564
4564 url = b'ssh://localserver'
4565 url = b'ssh://localserver'
4565 autoreadstderr = not opts[b'noreadstderr']
4566 autoreadstderr = not opts[b'noreadstderr']
4566
4567
4567 if opts[b'peer'] == b'ssh1':
4568 if opts[b'peer'] == b'ssh1':
4568 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4569 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4569 peer = sshpeer.sshv1peer(
4570 peer = sshpeer.sshv1peer(
4570 ui,
4571 ui,
4571 url,
4572 url,
4572 proc,
4573 proc,
4573 stdin,
4574 stdin,
4574 stdout,
4575 stdout,
4575 stderr,
4576 stderr,
4576 None,
4577 None,
4577 autoreadstderr=autoreadstderr,
4578 autoreadstderr=autoreadstderr,
4578 )
4579 )
4579 elif opts[b'peer'] == b'raw':
4580 elif opts[b'peer'] == b'raw':
4580 ui.write(_(b'using raw connection to peer\n'))
4581 ui.write(_(b'using raw connection to peer\n'))
4581 peer = None
4582 peer = None
4582 else:
4583 else:
4583 ui.write(_(b'creating ssh peer from handshake results\n'))
4584 ui.write(_(b'creating ssh peer from handshake results\n'))
4584 peer = sshpeer._make_peer(
4585 peer = sshpeer._make_peer(
4585 ui,
4586 ui,
4586 url,
4587 url,
4587 proc,
4588 proc,
4588 stdin,
4589 stdin,
4589 stdout,
4590 stdout,
4590 stderr,
4591 stderr,
4591 autoreadstderr=autoreadstderr,
4592 autoreadstderr=autoreadstderr,
4592 )
4593 )
4593
4594
4594 elif path:
4595 elif path:
4595 # We bypass hg.peer() so we can proxy the sockets.
4596 # We bypass hg.peer() so we can proxy the sockets.
4596 # TODO consider not doing this because we skip
4597 # TODO consider not doing this because we skip
4597 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4598 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4598 u = urlutil.url(path)
4599 u = urlutil.url(path)
4599 if u.scheme != b'http':
4600 if u.scheme != b'http':
4600 raise error.Abort(_(b'only http:// paths are currently supported'))
4601 raise error.Abort(_(b'only http:// paths are currently supported'))
4601
4602
4602 url, authinfo = u.authinfo()
4603 url, authinfo = u.authinfo()
4603 openerargs = {
4604 openerargs = {
4604 'useragent': b'Mercurial debugwireproto',
4605 'useragent': b'Mercurial debugwireproto',
4605 }
4606 }
4606
4607
4607 # Turn pipes/sockets into observers so we can log I/O.
4608 # Turn pipes/sockets into observers so we can log I/O.
4608 if ui.verbose:
4609 if ui.verbose:
4609 openerargs.update(
4610 openerargs.update(
4610 {
4611 {
4611 'loggingfh': ui,
4612 'loggingfh': ui,
4612 'loggingname': b's',
4613 'loggingname': b's',
4613 'loggingopts': {
4614 'loggingopts': {
4614 'logdata': True,
4615 'logdata': True,
4615 'logdataapis': False,
4616 'logdataapis': False,
4616 },
4617 },
4617 }
4618 }
4618 )
4619 )
4619
4620
4620 if ui.debugflag:
4621 if ui.debugflag:
4621 openerargs['loggingopts']['logdataapis'] = True
4622 openerargs['loggingopts']['logdataapis'] = True
4622
4623
4623 # Don't send default headers when in raw mode. This allows us to
4624 # Don't send default headers when in raw mode. This allows us to
4624 # bypass most of the behavior of our URL handling code so we can
4625 # bypass most of the behavior of our URL handling code so we can
4625 # have near complete control over what's sent on the wire.
4626 # have near complete control over what's sent on the wire.
4626 if opts[b'peer'] == b'raw':
4627 if opts[b'peer'] == b'raw':
4627 openerargs['sendaccept'] = False
4628 openerargs['sendaccept'] = False
4628
4629
4629 opener = urlmod.opener(ui, authinfo, **openerargs)
4630 opener = urlmod.opener(ui, authinfo, **openerargs)
4630
4631
4631 if opts[b'peer'] == b'raw':
4632 if opts[b'peer'] == b'raw':
4632 ui.write(_(b'using raw connection to peer\n'))
4633 ui.write(_(b'using raw connection to peer\n'))
4633 peer = None
4634 peer = None
4634 elif opts[b'peer']:
4635 elif opts[b'peer']:
4635 raise error.Abort(
4636 raise error.Abort(
4636 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4637 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4637 )
4638 )
4638 else:
4639 else:
4639 peer_path = urlutil.try_path(ui, path)
4640 peer_path = urlutil.try_path(ui, path)
4640 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4641 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4641
4642
4642 # We /could/ populate stdin/stdout with sock.makefile()...
4643 # We /could/ populate stdin/stdout with sock.makefile()...
4643 else:
4644 else:
4644 raise error.Abort(_(b'unsupported connection configuration'))
4645 raise error.Abort(_(b'unsupported connection configuration'))
4645
4646
4646 batchedcommands = None
4647 batchedcommands = None
4647
4648
4648 # Now perform actions based on the parsed wire language instructions.
4649 # Now perform actions based on the parsed wire language instructions.
4649 for action, lines in blocks:
4650 for action, lines in blocks:
4650 if action in (b'raw', b'raw+'):
4651 if action in (b'raw', b'raw+'):
4651 if not stdin:
4652 if not stdin:
4652 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4653 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4653
4654
4654 # Concatenate the data together.
4655 # Concatenate the data together.
4655 data = b''.join(l.lstrip() for l in lines)
4656 data = b''.join(l.lstrip() for l in lines)
4656 data = stringutil.unescapestr(data)
4657 data = stringutil.unescapestr(data)
4657 stdin.write(data)
4658 stdin.write(data)
4658
4659
4659 if action == b'raw+':
4660 if action == b'raw+':
4660 stdin.flush()
4661 stdin.flush()
4661 elif action == b'flush':
4662 elif action == b'flush':
4662 if not stdin:
4663 if not stdin:
4663 raise error.Abort(_(b'cannot call flush on this peer'))
4664 raise error.Abort(_(b'cannot call flush on this peer'))
4664 stdin.flush()
4665 stdin.flush()
4665 elif action.startswith(b'command'):
4666 elif action.startswith(b'command'):
4666 if not peer:
4667 if not peer:
4667 raise error.Abort(
4668 raise error.Abort(
4668 _(
4669 _(
4669 b'cannot send commands unless peer instance '
4670 b'cannot send commands unless peer instance '
4670 b'is available'
4671 b'is available'
4671 )
4672 )
4672 )
4673 )
4673
4674
4674 command = action.split(b' ', 1)[1]
4675 command = action.split(b' ', 1)[1]
4675
4676
4676 args = {}
4677 args = {}
4677 for line in lines:
4678 for line in lines:
4678 # We need to allow empty values.
4679 # We need to allow empty values.
4679 fields = line.lstrip().split(b' ', 1)
4680 fields = line.lstrip().split(b' ', 1)
4680 if len(fields) == 1:
4681 if len(fields) == 1:
4681 key = fields[0]
4682 key = fields[0]
4682 value = b''
4683 value = b''
4683 else:
4684 else:
4684 key, value = fields
4685 key, value = fields
4685
4686
4686 if value.startswith(b'eval:'):
4687 if value.startswith(b'eval:'):
4687 value = stringutil.evalpythonliteral(value[5:])
4688 value = stringutil.evalpythonliteral(value[5:])
4688 else:
4689 else:
4689 value = stringutil.unescapestr(value)
4690 value = stringutil.unescapestr(value)
4690
4691
4691 args[key] = value
4692 args[key] = value
4692
4693
4693 if batchedcommands is not None:
4694 if batchedcommands is not None:
4694 batchedcommands.append((command, args))
4695 batchedcommands.append((command, args))
4695 continue
4696 continue
4696
4697
4697 ui.status(_(b'sending %s command\n') % command)
4698 ui.status(_(b'sending %s command\n') % command)
4698
4699
4699 if b'PUSHFILE' in args:
4700 if b'PUSHFILE' in args:
4700 with open(args[b'PUSHFILE'], 'rb') as fh:
4701 with open(args[b'PUSHFILE'], 'rb') as fh:
4701 del args[b'PUSHFILE']
4702 del args[b'PUSHFILE']
4702 res, output = peer._callpush(
4703 res, output = peer._callpush(
4703 command, fh, **pycompat.strkwargs(args)
4704 command, fh, **pycompat.strkwargs(args)
4704 )
4705 )
4705 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4706 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4706 ui.status(
4707 ui.status(
4707 _(b'remote output: %s\n') % stringutil.escapestr(output)
4708 _(b'remote output: %s\n') % stringutil.escapestr(output)
4708 )
4709 )
4709 else:
4710 else:
4710 with peer.commandexecutor() as e:
4711 with peer.commandexecutor() as e:
4711 res = e.callcommand(command, args).result()
4712 res = e.callcommand(command, args).result()
4712
4713
4713 ui.status(
4714 ui.status(
4714 _(b'response: %s\n')
4715 _(b'response: %s\n')
4715 % stringutil.pprint(res, bprefix=True, indent=2)
4716 % stringutil.pprint(res, bprefix=True, indent=2)
4716 )
4717 )
4717
4718
4718 elif action == b'batchbegin':
4719 elif action == b'batchbegin':
4719 if batchedcommands is not None:
4720 if batchedcommands is not None:
4720 raise error.Abort(_(b'nested batchbegin not allowed'))
4721 raise error.Abort(_(b'nested batchbegin not allowed'))
4721
4722
4722 batchedcommands = []
4723 batchedcommands = []
4723 elif action == b'batchsubmit':
4724 elif action == b'batchsubmit':
4724 # There is a batching API we could go through. But it would be
4725 # There is a batching API we could go through. But it would be
4725 # difficult to normalize requests into function calls. It is easier
4726 # difficult to normalize requests into function calls. It is easier
4726 # to bypass this layer and normalize to commands + args.
4727 # to bypass this layer and normalize to commands + args.
4727 ui.status(
4728 ui.status(
4728 _(b'sending batch with %d sub-commands\n')
4729 _(b'sending batch with %d sub-commands\n')
4729 % len(batchedcommands)
4730 % len(batchedcommands)
4730 )
4731 )
4731 assert peer is not None
4732 assert peer is not None
4732 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4733 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4733 ui.status(
4734 ui.status(
4734 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4735 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4735 )
4736 )
4736
4737
4737 batchedcommands = None
4738 batchedcommands = None
4738
4739
4739 elif action.startswith(b'httprequest '):
4740 elif action.startswith(b'httprequest '):
4740 if not opener:
4741 if not opener:
4741 raise error.Abort(
4742 raise error.Abort(
4742 _(b'cannot use httprequest without an HTTP peer')
4743 _(b'cannot use httprequest without an HTTP peer')
4743 )
4744 )
4744
4745
4745 request = action.split(b' ', 2)
4746 request = action.split(b' ', 2)
4746 if len(request) != 3:
4747 if len(request) != 3:
4747 raise error.Abort(
4748 raise error.Abort(
4748 _(
4749 _(
4749 b'invalid httprequest: expected format is '
4750 b'invalid httprequest: expected format is '
4750 b'"httprequest <method> <path>'
4751 b'"httprequest <method> <path>'
4751 )
4752 )
4752 )
4753 )
4753
4754
4754 method, httppath = request[1:]
4755 method, httppath = request[1:]
4755 headers = {}
4756 headers = {}
4756 body = None
4757 body = None
4757 frames = []
4758 frames = []
4758 for line in lines:
4759 for line in lines:
4759 line = line.lstrip()
4760 line = line.lstrip()
4760 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4761 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4761 if m:
4762 if m:
4762 # Headers need to use native strings.
4763 # Headers need to use native strings.
4763 key = pycompat.strurl(m.group(1))
4764 key = pycompat.strurl(m.group(1))
4764 value = pycompat.strurl(m.group(2))
4765 value = pycompat.strurl(m.group(2))
4765 headers[key] = value
4766 headers[key] = value
4766 continue
4767 continue
4767
4768
4768 if line.startswith(b'BODYFILE '):
4769 if line.startswith(b'BODYFILE '):
4769 with open(line.split(b' ', 1), b'rb') as fh:
4770 with open(line.split(b' ', 1), b'rb') as fh:
4770 body = fh.read()
4771 body = fh.read()
4771 elif line.startswith(b'frame '):
4772 elif line.startswith(b'frame '):
4772 frame = wireprotoframing.makeframefromhumanstring(
4773 frame = wireprotoframing.makeframefromhumanstring(
4773 line[len(b'frame ') :]
4774 line[len(b'frame ') :]
4774 )
4775 )
4775
4776
4776 frames.append(frame)
4777 frames.append(frame)
4777 else:
4778 else:
4778 raise error.Abort(
4779 raise error.Abort(
4779 _(b'unknown argument to httprequest: %s') % line
4780 _(b'unknown argument to httprequest: %s') % line
4780 )
4781 )
4781
4782
4782 url = path + httppath
4783 url = path + httppath
4783
4784
4784 if frames:
4785 if frames:
4785 body = b''.join(bytes(f) for f in frames)
4786 body = b''.join(bytes(f) for f in frames)
4786
4787
4787 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4788 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4788
4789
4789 # urllib.Request insists on using has_data() as a proxy for
4790 # urllib.Request insists on using has_data() as a proxy for
4790 # determining the request method. Override that to use our
4791 # determining the request method. Override that to use our
4791 # explicitly requested method.
4792 # explicitly requested method.
4792 req.get_method = lambda: pycompat.sysstr(method)
4793 req.get_method = lambda: pycompat.sysstr(method)
4793
4794
4794 try:
4795 try:
4795 res = opener.open(req)
4796 res = opener.open(req)
4796 body = res.read()
4797 body = res.read()
4797 except util.urlerr.urlerror as e:
4798 except util.urlerr.urlerror as e:
4798 # read() method must be called, but only exists in Python 2
4799 # read() method must be called, but only exists in Python 2
4799 getattr(e, 'read', lambda: None)()
4800 getattr(e, 'read', lambda: None)()
4800 continue
4801 continue
4801
4802
4802 ct = res.headers.get('Content-Type')
4803 ct = res.headers.get('Content-Type')
4803 if ct == 'application/mercurial-cbor':
4804 if ct == 'application/mercurial-cbor':
4804 ui.write(
4805 ui.write(
4805 _(b'cbor> %s\n')
4806 _(b'cbor> %s\n')
4806 % stringutil.pprint(
4807 % stringutil.pprint(
4807 cborutil.decodeall(body), bprefix=True, indent=2
4808 cborutil.decodeall(body), bprefix=True, indent=2
4808 )
4809 )
4809 )
4810 )
4810
4811
4811 elif action == b'close':
4812 elif action == b'close':
4812 assert peer is not None
4813 assert peer is not None
4813 peer.close()
4814 peer.close()
4814 elif action == b'readavailable':
4815 elif action == b'readavailable':
4815 if not stdout or not stderr:
4816 if not stdout or not stderr:
4816 raise error.Abort(
4817 raise error.Abort(
4817 _(b'readavailable not available on this peer')
4818 _(b'readavailable not available on this peer')
4818 )
4819 )
4819
4820
4820 stdin.close()
4821 stdin.close()
4821 stdout.read()
4822 stdout.read()
4822 stderr.read()
4823 stderr.read()
4823
4824
4824 elif action == b'readline':
4825 elif action == b'readline':
4825 if not stdout:
4826 if not stdout:
4826 raise error.Abort(_(b'readline not available on this peer'))
4827 raise error.Abort(_(b'readline not available on this peer'))
4827 stdout.readline()
4828 stdout.readline()
4828 elif action == b'ereadline':
4829 elif action == b'ereadline':
4829 if not stderr:
4830 if not stderr:
4830 raise error.Abort(_(b'ereadline not available on this peer'))
4831 raise error.Abort(_(b'ereadline not available on this peer'))
4831 stderr.readline()
4832 stderr.readline()
4832 elif action.startswith(b'read '):
4833 elif action.startswith(b'read '):
4833 count = int(action.split(b' ', 1)[1])
4834 count = int(action.split(b' ', 1)[1])
4834 if not stdout:
4835 if not stdout:
4835 raise error.Abort(_(b'read not available on this peer'))
4836 raise error.Abort(_(b'read not available on this peer'))
4836 stdout.read(count)
4837 stdout.read(count)
4837 elif action.startswith(b'eread '):
4838 elif action.startswith(b'eread '):
4838 count = int(action.split(b' ', 1)[1])
4839 count = int(action.split(b' ', 1)[1])
4839 if not stderr:
4840 if not stderr:
4840 raise error.Abort(_(b'eread not available on this peer'))
4841 raise error.Abort(_(b'eread not available on this peer'))
4841 stderr.read(count)
4842 stderr.read(count)
4842 else:
4843 else:
4843 raise error.Abort(_(b'unknown action: %s') % action)
4844 raise error.Abort(_(b'unknown action: %s') % action)
4844
4845
4845 if batchedcommands is not None:
4846 if batchedcommands is not None:
4846 raise error.Abort(_(b'unclosed "batchbegin" request'))
4847 raise error.Abort(_(b'unclosed "batchbegin" request'))
4847
4848
4848 if peer:
4849 if peer:
4849 peer.close()
4850 peer.close()
4850
4851
4851 if proc:
4852 if proc:
4852 proc.kill()
4853 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now