##// END OF EJS Templates
cli: make debugnodemap capable of inspecting an arbitrary nodemap...
Arseniy Alekseyev -
r51402:1b73868d default
parent child Browse files
Show More
@@ -1,4808 +1,4816 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 bundlerepo,
41 bundlerepo,
42 changegroup,
42 changegroup,
43 cmdutil,
43 cmdutil,
44 color,
44 color,
45 context,
45 context,
46 copies,
46 copies,
47 dagparser,
47 dagparser,
48 dirstateutils,
48 dirstateutils,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filelog,
53 filemerge,
54 filemerge,
54 filesetlang,
55 filesetlang,
55 formatter,
56 formatter,
56 hg,
57 hg,
57 httppeer,
58 httppeer,
58 localrepo,
59 localrepo,
59 lock as lockmod,
60 lock as lockmod,
60 logcmdutil,
61 logcmdutil,
62 manifest,
61 mergestate as mergestatemod,
63 mergestate as mergestatemod,
62 metadata,
64 metadata,
63 obsolete,
65 obsolete,
64 obsutil,
66 obsutil,
65 pathutil,
67 pathutil,
66 phases,
68 phases,
67 policy,
69 policy,
68 pvec,
70 pvec,
69 pycompat,
71 pycompat,
70 registrar,
72 registrar,
71 repair,
73 repair,
72 repoview,
74 repoview,
73 requirements,
75 requirements,
74 revlog,
76 revlog,
75 revset,
77 revset,
76 revsetlang,
78 revsetlang,
77 scmutil,
79 scmutil,
78 setdiscovery,
80 setdiscovery,
79 simplemerge,
81 simplemerge,
80 sshpeer,
82 sshpeer,
81 sslutil,
83 sslutil,
82 streamclone,
84 streamclone,
83 strip,
85 strip,
84 tags as tagsmod,
86 tags as tagsmod,
85 templater,
87 templater,
86 treediscovery,
88 treediscovery,
87 upgrade,
89 upgrade,
88 url as urlmod,
90 url as urlmod,
89 util,
91 util,
90 verify,
92 verify,
91 vfs as vfsmod,
93 vfs as vfsmod,
92 wireprotoframing,
94 wireprotoframing,
93 wireprotoserver,
95 wireprotoserver,
94 )
96 )
95 from .interfaces import repository
97 from .interfaces import repository
96 from .stabletailgraph import stabletailsort
98 from .stabletailgraph import stabletailsort
97 from .utils import (
99 from .utils import (
98 cborutil,
100 cborutil,
99 compression,
101 compression,
100 dateutil,
102 dateutil,
101 procutil,
103 procutil,
102 stringutil,
104 stringutil,
103 urlutil,
105 urlutil,
104 )
106 )
105
107
106 from .revlogutils import (
108 from .revlogutils import (
107 constants as revlog_constants,
109 constants as revlog_constants,
108 debug as revlog_debug,
110 debug as revlog_debug,
109 deltas as deltautil,
111 deltas as deltautil,
110 nodemap,
112 nodemap,
111 rewrite,
113 rewrite,
112 sidedata,
114 sidedata,
113 )
115 )
114
116
115 release = lockmod.release
117 release = lockmod.release
116
118
117 table = {}
119 table = {}
118 table.update(strip.command._table)
120 table.update(strip.command._table)
119 command = registrar.command(table)
121 command = registrar.command(table)
120
122
121
123
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
124 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 def debugancestor(ui, repo, *args):
125 def debugancestor(ui, repo, *args):
124 """find the ancestor revision of two revisions in a given index"""
126 """find the ancestor revision of two revisions in a given index"""
125 if len(args) == 3:
127 if len(args) == 3:
126 index, rev1, rev2 = args
128 index, rev1, rev2 = args
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
129 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 lookup = r.lookup
130 lookup = r.lookup
129 elif len(args) == 2:
131 elif len(args) == 2:
130 if not repo:
132 if not repo:
131 raise error.Abort(
133 raise error.Abort(
132 _(b'there is no Mercurial repository here (.hg not found)')
134 _(b'there is no Mercurial repository here (.hg not found)')
133 )
135 )
134 rev1, rev2 = args
136 rev1, rev2 = args
135 r = repo.changelog
137 r = repo.changelog
136 lookup = repo.lookup
138 lookup = repo.lookup
137 else:
139 else:
138 raise error.Abort(_(b'either two or three arguments required'))
140 raise error.Abort(_(b'either two or three arguments required'))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
141 a = r.ancestor(lookup(rev1), lookup(rev2))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
142 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141
143
142
144
143 @command(b'debugantivirusrunning', [])
145 @command(b'debugantivirusrunning', [])
144 def debugantivirusrunning(ui, repo):
146 def debugantivirusrunning(ui, repo):
145 """attempt to trigger an antivirus scanner to see if one is active"""
147 """attempt to trigger an antivirus scanner to see if one is active"""
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
148 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 f.write(
149 f.write(
148 util.b85decode(
150 util.b85decode(
149 # This is a base85-armored version of the EICAR test file. See
151 # This is a base85-armored version of the EICAR test file. See
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
152 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
153 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
154 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 )
155 )
154 )
156 )
155 # Give an AV engine time to scan the file.
157 # Give an AV engine time to scan the file.
156 time.sleep(2)
158 time.sleep(2)
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
159 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158
160
159
161
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
162 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 def debugapplystreamclonebundle(ui, repo, fname):
163 def debugapplystreamclonebundle(ui, repo, fname):
162 """apply a stream clone bundle file"""
164 """apply a stream clone bundle file"""
163 f = hg.openpath(ui, fname)
165 f = hg.openpath(ui, fname)
164 gen = exchange.readbundle(ui, f, fname)
166 gen = exchange.readbundle(ui, f, fname)
165 gen.apply(repo)
167 gen.apply(repo)
166
168
167
169
168 @command(
170 @command(
169 b'debugbuilddag',
171 b'debugbuilddag',
170 [
172 [
171 (
173 (
172 b'm',
174 b'm',
173 b'mergeable-file',
175 b'mergeable-file',
174 None,
176 None,
175 _(b'add single file mergeable changes'),
177 _(b'add single file mergeable changes'),
176 ),
178 ),
177 (
179 (
178 b'o',
180 b'o',
179 b'overwritten-file',
181 b'overwritten-file',
180 None,
182 None,
181 _(b'add single file all revs overwrite'),
183 _(b'add single file all revs overwrite'),
182 ),
184 ),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
185 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (
186 (
185 b'',
187 b'',
186 b'from-existing',
188 b'from-existing',
187 None,
189 None,
188 _(b'continue from a non-empty repository'),
190 _(b'continue from a non-empty repository'),
189 ),
191 ),
190 ],
192 ],
191 _(b'[OPTION]... [TEXT]'),
193 _(b'[OPTION]... [TEXT]'),
192 )
194 )
193 def debugbuilddag(
195 def debugbuilddag(
194 ui,
196 ui,
195 repo,
197 repo,
196 text=None,
198 text=None,
197 mergeable_file=False,
199 mergeable_file=False,
198 overwritten_file=False,
200 overwritten_file=False,
199 new_file=False,
201 new_file=False,
200 from_existing=False,
202 from_existing=False,
201 ):
203 ):
202 """builds a repo with a given DAG from scratch in the current empty repo
204 """builds a repo with a given DAG from scratch in the current empty repo
203
205
204 The description of the DAG is read from stdin if not given on the
206 The description of the DAG is read from stdin if not given on the
205 command line.
207 command line.
206
208
207 Elements:
209 Elements:
208
210
209 - "+n" is a linear run of n nodes based on the current default parent
211 - "+n" is a linear run of n nodes based on the current default parent
210 - "." is a single node based on the current default parent
212 - "." is a single node based on the current default parent
211 - "$" resets the default parent to null (implied at the start);
213 - "$" resets the default parent to null (implied at the start);
212 otherwise the default parent is always the last node created
214 otherwise the default parent is always the last node created
213 - "<p" sets the default parent to the backref p
215 - "<p" sets the default parent to the backref p
214 - "*p" is a fork at parent p, which is a backref
216 - "*p" is a fork at parent p, which is a backref
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
217 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "/p2" is a merge of the preceding node and p2
218 - "/p2" is a merge of the preceding node and p2
217 - ":tag" defines a local tag for the preceding node
219 - ":tag" defines a local tag for the preceding node
218 - "@branch" sets the named branch for subsequent nodes
220 - "@branch" sets the named branch for subsequent nodes
219 - "#...\\n" is a comment up to the end of the line
221 - "#...\\n" is a comment up to the end of the line
220
222
221 Whitespace between the above elements is ignored.
223 Whitespace between the above elements is ignored.
222
224
223 A backref is either
225 A backref is either
224
226
225 - a number n, which references the node curr-n, where curr is the current
227 - a number n, which references the node curr-n, where curr is the current
226 node, or
228 node, or
227 - the name of a local tag you placed earlier using ":tag", or
229 - the name of a local tag you placed earlier using ":tag", or
228 - empty to denote the default parent.
230 - empty to denote the default parent.
229
231
230 All string valued-elements are either strictly alphanumeric, or must
232 All string valued-elements are either strictly alphanumeric, or must
231 be enclosed in double quotes ("..."), with "\\" as escape character.
233 be enclosed in double quotes ("..."), with "\\" as escape character.
232 """
234 """
233
235
234 if text is None:
236 if text is None:
235 ui.status(_(b"reading DAG from stdin\n"))
237 ui.status(_(b"reading DAG from stdin\n"))
236 text = ui.fin.read()
238 text = ui.fin.read()
237
239
238 cl = repo.changelog
240 cl = repo.changelog
239 if len(cl) > 0 and not from_existing:
241 if len(cl) > 0 and not from_existing:
240 raise error.Abort(_(b'repository is not empty'))
242 raise error.Abort(_(b'repository is not empty'))
241
243
242 # determine number of revs in DAG
244 # determine number of revs in DAG
243 total = 0
245 total = 0
244 for type, data in dagparser.parsedag(text):
246 for type, data in dagparser.parsedag(text):
245 if type == b'n':
247 if type == b'n':
246 total += 1
248 total += 1
247
249
248 if mergeable_file:
250 if mergeable_file:
249 linesperrev = 2
251 linesperrev = 2
250 # make a file with k lines per rev
252 # make a file with k lines per rev
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
253 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines.append(b"")
254 initialmergedlines.append(b"")
253
255
254 tags = []
256 tags = []
255 progress = ui.makeprogress(
257 progress = ui.makeprogress(
256 _(b'building'), unit=_(b'revisions'), total=total
258 _(b'building'), unit=_(b'revisions'), total=total
257 )
259 )
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 at = -1
261 at = -1
260 atbranch = b'default'
262 atbranch = b'default'
261 nodeids = []
263 nodeids = []
262 id = 0
264 id = 0
263 progress.update(id)
265 progress.update(id)
264 for type, data in dagparser.parsedag(text):
266 for type, data in dagparser.parsedag(text):
265 if type == b'n':
267 if type == b'n':
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 id, ps = data
269 id, ps = data
268
270
269 files = []
271 files = []
270 filecontent = {}
272 filecontent = {}
271
273
272 p2 = None
274 p2 = None
273 if mergeable_file:
275 if mergeable_file:
274 fn = b"mf"
276 fn = b"mf"
275 p1 = repo[ps[0]]
277 p1 = repo[ps[0]]
276 if len(ps) > 1:
278 if len(ps) > 1:
277 p2 = repo[ps[1]]
279 p2 = repo[ps[1]]
278 pa = p1.ancestor(p2)
280 pa = p1.ancestor(p2)
279 base, local, other = [
281 base, local, other = [
280 x[fn].data() for x in (pa, p1, p2)
282 x[fn].data() for x in (pa, p1, p2)
281 ]
283 ]
282 m3 = simplemerge.Merge3Text(base, local, other)
284 m3 = simplemerge.Merge3Text(base, local, other)
283 ml = [
285 ml = [
284 l.strip()
286 l.strip()
285 for l in simplemerge.render_minimized(m3)[0]
287 for l in simplemerge.render_minimized(m3)[0]
286 ]
288 ]
287 ml.append(b"")
289 ml.append(b"")
288 elif at > 0:
290 elif at > 0:
289 ml = p1[fn].data().split(b"\n")
291 ml = p1[fn].data().split(b"\n")
290 else:
292 else:
291 ml = initialmergedlines
293 ml = initialmergedlines
292 ml[id * linesperrev] += b" r%i" % id
294 ml[id * linesperrev] += b" r%i" % id
293 mergedtext = b"\n".join(ml)
295 mergedtext = b"\n".join(ml)
294 files.append(fn)
296 files.append(fn)
295 filecontent[fn] = mergedtext
297 filecontent[fn] = mergedtext
296
298
297 if overwritten_file:
299 if overwritten_file:
298 fn = b"of"
300 fn = b"of"
299 files.append(fn)
301 files.append(fn)
300 filecontent[fn] = b"r%i\n" % id
302 filecontent[fn] = b"r%i\n" % id
301
303
302 if new_file:
304 if new_file:
303 fn = b"nf%i" % id
305 fn = b"nf%i" % id
304 files.append(fn)
306 files.append(fn)
305 filecontent[fn] = b"r%i\n" % id
307 filecontent[fn] = b"r%i\n" % id
306 if len(ps) > 1:
308 if len(ps) > 1:
307 if not p2:
309 if not p2:
308 p2 = repo[ps[1]]
310 p2 = repo[ps[1]]
309 for fn in p2:
311 for fn in p2:
310 if fn.startswith(b"nf"):
312 if fn.startswith(b"nf"):
311 files.append(fn)
313 files.append(fn)
312 filecontent[fn] = p2[fn].data()
314 filecontent[fn] = p2[fn].data()
313
315
314 def fctxfn(repo, cx, path):
316 def fctxfn(repo, cx, path):
315 if path in filecontent:
317 if path in filecontent:
316 return context.memfilectx(
318 return context.memfilectx(
317 repo, cx, path, filecontent[path]
319 repo, cx, path, filecontent[path]
318 )
320 )
319 return None
321 return None
320
322
321 if len(ps) == 0 or ps[0] < 0:
323 if len(ps) == 0 or ps[0] < 0:
322 pars = [None, None]
324 pars = [None, None]
323 elif len(ps) == 1:
325 elif len(ps) == 1:
324 pars = [nodeids[ps[0]], None]
326 pars = [nodeids[ps[0]], None]
325 else:
327 else:
326 pars = [nodeids[p] for p in ps]
328 pars = [nodeids[p] for p in ps]
327 cx = context.memctx(
329 cx = context.memctx(
328 repo,
330 repo,
329 pars,
331 pars,
330 b"r%i" % id,
332 b"r%i" % id,
331 files,
333 files,
332 fctxfn,
334 fctxfn,
333 date=(id, 0),
335 date=(id, 0),
334 user=b"debugbuilddag",
336 user=b"debugbuilddag",
335 extra={b'branch': atbranch},
337 extra={b'branch': atbranch},
336 )
338 )
337 nodeid = repo.commitctx(cx)
339 nodeid = repo.commitctx(cx)
338 nodeids.append(nodeid)
340 nodeids.append(nodeid)
339 at = id
341 at = id
340 elif type == b'l':
342 elif type == b'l':
341 id, name = data
343 id, name = data
342 ui.note((b'tag %s\n' % name))
344 ui.note((b'tag %s\n' % name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 elif type == b'a':
346 elif type == b'a':
345 ui.note((b'branch %s\n' % data))
347 ui.note((b'branch %s\n' % data))
346 atbranch = data
348 atbranch = data
347 progress.update(id)
349 progress.update(id)
348
350
349 if tags:
351 if tags:
350 repo.vfs.write(b"localtags", b"".join(tags))
352 repo.vfs.write(b"localtags", b"".join(tags))
351
353
352
354
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 indent_string = b' ' * indent
356 indent_string = b' ' * indent
355 if all:
357 if all:
356 ui.writenoi18n(
358 ui.writenoi18n(
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 % indent_string
360 % indent_string
359 )
361 )
360
362
361 def showchunks(named):
363 def showchunks(named):
362 ui.write(b"\n%s%s\n" % (indent_string, named))
364 ui.write(b"\n%s%s\n" % (indent_string, named))
363 for deltadata in gen.deltaiter():
365 for deltadata in gen.deltaiter():
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 ui.write(
367 ui.write(
366 b"%s%s %s %s %s %s %d\n"
368 b"%s%s %s %s %s %s %d\n"
367 % (
369 % (
368 indent_string,
370 indent_string,
369 hex(node),
371 hex(node),
370 hex(p1),
372 hex(p1),
371 hex(p2),
373 hex(p2),
372 hex(cs),
374 hex(cs),
373 hex(deltabase),
375 hex(deltabase),
374 len(delta),
376 len(delta),
375 )
377 )
376 )
378 )
377
379
378 gen.changelogheader()
380 gen.changelogheader()
379 showchunks(b"changelog")
381 showchunks(b"changelog")
380 gen.manifestheader()
382 gen.manifestheader()
381 showchunks(b"manifest")
383 showchunks(b"manifest")
382 for chunkdata in iter(gen.filelogheader, {}):
384 for chunkdata in iter(gen.filelogheader, {}):
383 fname = chunkdata[b'filename']
385 fname = chunkdata[b'filename']
384 showchunks(fname)
386 showchunks(fname)
385 else:
387 else:
386 if isinstance(gen, bundle2.unbundle20):
388 if isinstance(gen, bundle2.unbundle20):
387 raise error.Abort(_(b'use debugbundle2 for this file'))
389 raise error.Abort(_(b'use debugbundle2 for this file'))
388 gen.changelogheader()
390 gen.changelogheader()
389 for deltadata in gen.deltaiter():
391 for deltadata in gen.deltaiter():
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392
394
393
395
394 def _debugobsmarkers(ui, part, indent=0, **opts):
396 def _debugobsmarkers(ui, part, indent=0, **opts):
395 """display version and markers contained in 'data'"""
397 """display version and markers contained in 'data'"""
396 opts = pycompat.byteskwargs(opts)
398 opts = pycompat.byteskwargs(opts)
397 data = part.read()
399 data = part.read()
398 indent_string = b' ' * indent
400 indent_string = b' ' * indent
399 try:
401 try:
400 version, markers = obsolete._readmarkers(data)
402 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
403 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
404 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
405 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
406 ui.write(msg)
405 else:
407 else:
406 msg = b"%sversion: %d (%d bytes)\n"
408 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
409 msg %= indent_string, version, len(data)
408 ui.write(msg)
410 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', opts)
411 fm = ui.formatter(b'debugobsolete', opts)
410 for rawmarker in sorted(markers):
412 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
413 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
414 fm.startitem()
413 fm.plain(indent_string)
415 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
416 cmdutil.showmarker(fm, m)
415 fm.end()
417 fm.end()
416
418
417
419
418 def _debugphaseheads(ui, data, indent=0):
420 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
421 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
422 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
423 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
424 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
425 for head in headsbyphase[phase]:
424 ui.write(indent_string)
426 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
428
427
429
428 def _quasirepr(thing):
430 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
432 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
434 )
433 return pycompat.bytestr(repr(thing))
435 return pycompat.bytestr(repr(thing))
434
436
435
437
436 def _debugbundle2(ui, gen, all=None, **opts):
438 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
439 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
440 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
441 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
443 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
444 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
445 if parttypes and part.type not in parttypes:
444 continue
446 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
447 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
449 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
450 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
451 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
452 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
454 if part.type == b'obsmarkers':
453 if not ui.quiet:
455 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
456 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
457 if part.type == b'phase-heads':
456 if not ui.quiet:
458 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
459 _debugphaseheads(ui, part, indent=4)
458
460
459
461
460 @command(
462 @command(
461 b'debugbundle',
463 b'debugbundle',
462 [
464 [
463 (b'a', b'all', None, _(b'show all details')),
465 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
466 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
468 ],
467 _(b'FILE'),
469 _(b'FILE'),
468 norepo=True,
470 norepo=True,
469 )
471 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
473 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
474 with hg.openpath(ui, bundlepath) as f:
473 if spec:
475 if spec:
474 spec = exchange.getbundlespec(ui, f)
476 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
477 ui.write(b'%s\n' % spec)
476 return
478 return
477
479
478 gen = exchange.readbundle(ui, f, bundlepath)
480 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
481 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
482 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
483 _debugchangegroup(ui, gen, all=all, **opts)
482
484
483
485
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
487 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
488 """lists the capabilities of a remote peer"""
487 opts = pycompat.byteskwargs(opts)
489 opts = pycompat.byteskwargs(opts)
488 peer = hg.peer(ui, opts, path)
490 peer = hg.peer(ui, opts, path)
489 try:
491 try:
490 caps = peer.capabilities()
492 caps = peer.capabilities()
491 ui.writenoi18n(b'Main capabilities:\n')
493 ui.writenoi18n(b'Main capabilities:\n')
492 for c in sorted(caps):
494 for c in sorted(caps):
493 ui.write(b' %s\n' % c)
495 ui.write(b' %s\n' % c)
494 b2caps = bundle2.bundle2caps(peer)
496 b2caps = bundle2.bundle2caps(peer)
495 if b2caps:
497 if b2caps:
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
498 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 for key, values in sorted(b2caps.items()):
499 for key, values in sorted(b2caps.items()):
498 ui.write(b' %s\n' % key)
500 ui.write(b' %s\n' % key)
499 for v in values:
501 for v in values:
500 ui.write(b' %s\n' % v)
502 ui.write(b' %s\n' % v)
501 finally:
503 finally:
502 peer.close()
504 peer.close()
503
505
504
506
505 @command(
507 @command(
506 b'debugchangedfiles',
508 b'debugchangedfiles',
507 [
509 [
508 (
510 (
509 b'',
511 b'',
510 b'compute',
512 b'compute',
511 False,
513 False,
512 b"compute information instead of reading it from storage",
514 b"compute information instead of reading it from storage",
513 ),
515 ),
514 ],
516 ],
515 b'REV',
517 b'REV',
516 )
518 )
517 def debugchangedfiles(ui, repo, rev, **opts):
519 def debugchangedfiles(ui, repo, rev, **opts):
518 """list the stored files changes for a revision"""
520 """list the stored files changes for a revision"""
519 ctx = logcmdutil.revsingle(repo, rev, None)
521 ctx = logcmdutil.revsingle(repo, rev, None)
520 files = None
522 files = None
521
523
522 if opts['compute']:
524 if opts['compute']:
523 files = metadata.compute_all_files_changes(ctx)
525 files = metadata.compute_all_files_changes(ctx)
524 else:
526 else:
525 sd = repo.changelog.sidedata(ctx.rev())
527 sd = repo.changelog.sidedata(ctx.rev())
526 files_block = sd.get(sidedata.SD_FILES)
528 files_block = sd.get(sidedata.SD_FILES)
527 if files_block is not None:
529 if files_block is not None:
528 files = metadata.decode_files_sidedata(sd)
530 files = metadata.decode_files_sidedata(sd)
529 if files is not None:
531 if files is not None:
530 for f in sorted(files.touched):
532 for f in sorted(files.touched):
531 if f in files.added:
533 if f in files.added:
532 action = b"added"
534 action = b"added"
533 elif f in files.removed:
535 elif f in files.removed:
534 action = b"removed"
536 action = b"removed"
535 elif f in files.merged:
537 elif f in files.merged:
536 action = b"merged"
538 action = b"merged"
537 elif f in files.salvaged:
539 elif f in files.salvaged:
538 action = b"salvaged"
540 action = b"salvaged"
539 else:
541 else:
540 action = b"touched"
542 action = b"touched"
541
543
542 copy_parent = b""
544 copy_parent = b""
543 copy_source = b""
545 copy_source = b""
544 if f in files.copied_from_p1:
546 if f in files.copied_from_p1:
545 copy_parent = b"p1"
547 copy_parent = b"p1"
546 copy_source = files.copied_from_p1[f]
548 copy_source = files.copied_from_p1[f]
547 elif f in files.copied_from_p2:
549 elif f in files.copied_from_p2:
548 copy_parent = b"p2"
550 copy_parent = b"p2"
549 copy_source = files.copied_from_p2[f]
551 copy_source = files.copied_from_p2[f]
550
552
551 data = (action, copy_parent, f, copy_source)
553 data = (action, copy_parent, f, copy_source)
552 template = b"%-8s %2s: %s, %s;\n"
554 template = b"%-8s %2s: %s, %s;\n"
553 ui.write(template % data)
555 ui.write(template % data)
554
556
555
557
556 @command(b'debugcheckstate', [], b'')
558 @command(b'debugcheckstate', [], b'')
557 def debugcheckstate(ui, repo):
559 def debugcheckstate(ui, repo):
558 """validate the correctness of the current dirstate"""
560 """validate the correctness of the current dirstate"""
559 errors = verify.verifier(repo)._verify_dirstate()
561 errors = verify.verifier(repo)._verify_dirstate()
560 if errors:
562 if errors:
561 errstr = _(b"dirstate inconsistent with current parent's manifest")
563 errstr = _(b"dirstate inconsistent with current parent's manifest")
562 raise error.Abort(errstr)
564 raise error.Abort(errstr)
563
565
564
566
565 @command(
567 @command(
566 b'debugcolor',
568 b'debugcolor',
567 [(b'', b'style', None, _(b'show all configured styles'))],
569 [(b'', b'style', None, _(b'show all configured styles'))],
568 b'hg debugcolor',
570 b'hg debugcolor',
569 )
571 )
570 def debugcolor(ui, repo, **opts):
572 def debugcolor(ui, repo, **opts):
571 """show available color, effects or style"""
573 """show available color, effects or style"""
572 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
574 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
573 if opts.get('style'):
575 if opts.get('style'):
574 return _debugdisplaystyle(ui)
576 return _debugdisplaystyle(ui)
575 else:
577 else:
576 return _debugdisplaycolor(ui)
578 return _debugdisplaycolor(ui)
577
579
578
580
579 def _debugdisplaycolor(ui):
581 def _debugdisplaycolor(ui):
580 ui = ui.copy()
582 ui = ui.copy()
581 ui._styles.clear()
583 ui._styles.clear()
582 for effect in color._activeeffects(ui).keys():
584 for effect in color._activeeffects(ui).keys():
583 ui._styles[effect] = effect
585 ui._styles[effect] = effect
584 if ui._terminfoparams:
586 if ui._terminfoparams:
585 for k, v in ui.configitems(b'color'):
587 for k, v in ui.configitems(b'color'):
586 if k.startswith(b'color.'):
588 if k.startswith(b'color.'):
587 ui._styles[k] = k[6:]
589 ui._styles[k] = k[6:]
588 elif k.startswith(b'terminfo.'):
590 elif k.startswith(b'terminfo.'):
589 ui._styles[k] = k[9:]
591 ui._styles[k] = k[9:]
590 ui.write(_(b'available colors:\n'))
592 ui.write(_(b'available colors:\n'))
591 # sort label with a '_' after the other to group '_background' entry.
593 # sort label with a '_' after the other to group '_background' entry.
592 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
594 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
593 for colorname, label in items:
595 for colorname, label in items:
594 ui.write(b'%s\n' % colorname, label=label)
596 ui.write(b'%s\n' % colorname, label=label)
595
597
596
598
597 def _debugdisplaystyle(ui):
599 def _debugdisplaystyle(ui):
598 ui.write(_(b'available style:\n'))
600 ui.write(_(b'available style:\n'))
599 if not ui._styles:
601 if not ui._styles:
600 return
602 return
601 width = max(len(s) for s in ui._styles)
603 width = max(len(s) for s in ui._styles)
602 for label, effects in sorted(ui._styles.items()):
604 for label, effects in sorted(ui._styles.items()):
603 ui.write(b'%s' % label, label=label)
605 ui.write(b'%s' % label, label=label)
604 if effects:
606 if effects:
605 # 50
607 # 50
606 ui.write(b': ')
608 ui.write(b': ')
607 ui.write(b' ' * (max(0, width - len(label))))
609 ui.write(b' ' * (max(0, width - len(label))))
608 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
610 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
609 ui.write(b'\n')
611 ui.write(b'\n')
610
612
611
613
612 @command(b'debugcreatestreamclonebundle', [], b'FILE')
614 @command(b'debugcreatestreamclonebundle', [], b'FILE')
613 def debugcreatestreamclonebundle(ui, repo, fname):
615 def debugcreatestreamclonebundle(ui, repo, fname):
614 """create a stream clone bundle file
616 """create a stream clone bundle file
615
617
616 Stream bundles are special bundles that are essentially archives of
618 Stream bundles are special bundles that are essentially archives of
617 revlog files. They are commonly used for cloning very quickly.
619 revlog files. They are commonly used for cloning very quickly.
618 """
620 """
619 # TODO we may want to turn this into an abort when this functionality
621 # TODO we may want to turn this into an abort when this functionality
620 # is moved into `hg bundle`.
622 # is moved into `hg bundle`.
621 if phases.hassecret(repo):
623 if phases.hassecret(repo):
622 ui.warn(
624 ui.warn(
623 _(
625 _(
624 b'(warning: stream clone bundle will contain secret '
626 b'(warning: stream clone bundle will contain secret '
625 b'revisions)\n'
627 b'revisions)\n'
626 )
628 )
627 )
629 )
628
630
629 requirements, gen = streamclone.generatebundlev1(repo)
631 requirements, gen = streamclone.generatebundlev1(repo)
630 changegroup.writechunks(ui, gen, fname)
632 changegroup.writechunks(ui, gen, fname)
631
633
632 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
634 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
633
635
634
636
635 @command(
637 @command(
636 b'debugdag',
638 b'debugdag',
637 [
639 [
638 (b't', b'tags', None, _(b'use tags as labels')),
640 (b't', b'tags', None, _(b'use tags as labels')),
639 (b'b', b'branches', None, _(b'annotate with branch names')),
641 (b'b', b'branches', None, _(b'annotate with branch names')),
640 (b'', b'dots', None, _(b'use dots for runs')),
642 (b'', b'dots', None, _(b'use dots for runs')),
641 (b's', b'spaces', None, _(b'separate elements by spaces')),
643 (b's', b'spaces', None, _(b'separate elements by spaces')),
642 ],
644 ],
643 _(b'[OPTION]... [FILE [REV]...]'),
645 _(b'[OPTION]... [FILE [REV]...]'),
644 optionalrepo=True,
646 optionalrepo=True,
645 )
647 )
646 def debugdag(ui, repo, file_=None, *revs, **opts):
648 def debugdag(ui, repo, file_=None, *revs, **opts):
647 """format the changelog or an index DAG as a concise textual description
649 """format the changelog or an index DAG as a concise textual description
648
650
649 If you pass a revlog index, the revlog's DAG is emitted. If you list
651 If you pass a revlog index, the revlog's DAG is emitted. If you list
650 revision numbers, they get labeled in the output as rN.
652 revision numbers, they get labeled in the output as rN.
651
653
652 Otherwise, the changelog DAG of the current repo is emitted.
654 Otherwise, the changelog DAG of the current repo is emitted.
653 """
655 """
654 spaces = opts.get('spaces')
656 spaces = opts.get('spaces')
655 dots = opts.get('dots')
657 dots = opts.get('dots')
656 if file_:
658 if file_:
657 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
659 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
658 revs = {int(r) for r in revs}
660 revs = {int(r) for r in revs}
659
661
660 def events():
662 def events():
661 for r in rlog:
663 for r in rlog:
662 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
664 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
663 if r in revs:
665 if r in revs:
664 yield b'l', (r, b"r%i" % r)
666 yield b'l', (r, b"r%i" % r)
665
667
666 elif repo:
668 elif repo:
667 cl = repo.changelog
669 cl = repo.changelog
668 tags = opts.get('tags')
670 tags = opts.get('tags')
669 branches = opts.get('branches')
671 branches = opts.get('branches')
670 if tags:
672 if tags:
671 labels = {}
673 labels = {}
672 for l, n in repo.tags().items():
674 for l, n in repo.tags().items():
673 labels.setdefault(cl.rev(n), []).append(l)
675 labels.setdefault(cl.rev(n), []).append(l)
674
676
675 def events():
677 def events():
676 b = b"default"
678 b = b"default"
677 for r in cl:
679 for r in cl:
678 if branches:
680 if branches:
679 newb = cl.read(cl.node(r))[5][b'branch']
681 newb = cl.read(cl.node(r))[5][b'branch']
680 if newb != b:
682 if newb != b:
681 yield b'a', newb
683 yield b'a', newb
682 b = newb
684 b = newb
683 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
685 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
684 if tags:
686 if tags:
685 ls = labels.get(r)
687 ls = labels.get(r)
686 if ls:
688 if ls:
687 for l in ls:
689 for l in ls:
688 yield b'l', (r, l)
690 yield b'l', (r, l)
689
691
690 else:
692 else:
691 raise error.Abort(_(b'need repo for changelog dag'))
693 raise error.Abort(_(b'need repo for changelog dag'))
692
694
693 for line in dagparser.dagtextlines(
695 for line in dagparser.dagtextlines(
694 events(),
696 events(),
695 addspaces=spaces,
697 addspaces=spaces,
696 wraplabels=True,
698 wraplabels=True,
697 wrapannotations=True,
699 wrapannotations=True,
698 wrapnonlinear=dots,
700 wrapnonlinear=dots,
699 usedots=dots,
701 usedots=dots,
700 maxlinewidth=70,
702 maxlinewidth=70,
701 ):
703 ):
702 ui.write(line)
704 ui.write(line)
703 ui.write(b"\n")
705 ui.write(b"\n")
704
706
705
707
706 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
708 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
707 def debugdata(ui, repo, file_, rev=None, **opts):
709 def debugdata(ui, repo, file_, rev=None, **opts):
708 """dump the contents of a data file revision"""
710 """dump the contents of a data file revision"""
709 opts = pycompat.byteskwargs(opts)
711 opts = pycompat.byteskwargs(opts)
710 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
712 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
711 if rev is not None:
713 if rev is not None:
712 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
713 file_, rev = None, file_
715 file_, rev = None, file_
714 elif rev is None:
716 elif rev is None:
715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
716 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
718 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
717 try:
719 try:
718 ui.write(r.rawdata(r.lookup(rev)))
720 ui.write(r.rawdata(r.lookup(rev)))
719 except KeyError:
721 except KeyError:
720 raise error.Abort(_(b'invalid revision identifier %s') % rev)
722 raise error.Abort(_(b'invalid revision identifier %s') % rev)
721
723
722
724
723 @command(
725 @command(
724 b'debugdate',
726 b'debugdate',
725 [(b'e', b'extended', None, _(b'try extended date formats'))],
727 [(b'e', b'extended', None, _(b'try extended date formats'))],
726 _(b'[-e] DATE [RANGE]'),
728 _(b'[-e] DATE [RANGE]'),
727 norepo=True,
729 norepo=True,
728 optionalrepo=True,
730 optionalrepo=True,
729 )
731 )
730 def debugdate(ui, date, range=None, **opts):
732 def debugdate(ui, date, range=None, **opts):
731 """parse and display a date"""
733 """parse and display a date"""
732 if opts["extended"]:
734 if opts["extended"]:
733 d = dateutil.parsedate(date, dateutil.extendeddateformats)
735 d = dateutil.parsedate(date, dateutil.extendeddateformats)
734 else:
736 else:
735 d = dateutil.parsedate(date)
737 d = dateutil.parsedate(date)
736 ui.writenoi18n(b"internal: %d %d\n" % d)
738 ui.writenoi18n(b"internal: %d %d\n" % d)
737 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
739 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
738 if range:
740 if range:
739 m = dateutil.matchdate(range)
741 m = dateutil.matchdate(range)
740 ui.writenoi18n(b"match: %s\n" % m(d[0]))
742 ui.writenoi18n(b"match: %s\n" % m(d[0]))
741
743
742
744
743 @command(
745 @command(
744 b'debugdeltachain',
746 b'debugdeltachain',
745 cmdutil.debugrevlogopts + cmdutil.formatteropts,
747 cmdutil.debugrevlogopts + cmdutil.formatteropts,
746 _(b'-c|-m|FILE'),
748 _(b'-c|-m|FILE'),
747 optionalrepo=True,
749 optionalrepo=True,
748 )
750 )
749 def debugdeltachain(ui, repo, file_=None, **opts):
751 def debugdeltachain(ui, repo, file_=None, **opts):
750 """dump information about delta chains in a revlog
752 """dump information about delta chains in a revlog
751
753
752 Output can be templatized. Available template keywords are:
754 Output can be templatized. Available template keywords are:
753
755
754 :``rev``: revision number
756 :``rev``: revision number
755 :``p1``: parent 1 revision number (for reference)
757 :``p1``: parent 1 revision number (for reference)
756 :``p2``: parent 2 revision number (for reference)
758 :``p2``: parent 2 revision number (for reference)
757 :``chainid``: delta chain identifier (numbered by unique base)
759 :``chainid``: delta chain identifier (numbered by unique base)
758 :``chainlen``: delta chain length to this revision
760 :``chainlen``: delta chain length to this revision
759 :``prevrev``: previous revision in delta chain
761 :``prevrev``: previous revision in delta chain
760 :``deltatype``: role of delta / how it was computed
762 :``deltatype``: role of delta / how it was computed
761 - base: a full snapshot
763 - base: a full snapshot
762 - snap: an intermediate snapshot
764 - snap: an intermediate snapshot
763 - p1: a delta against the first parent
765 - p1: a delta against the first parent
764 - p2: a delta against the second parent
766 - p2: a delta against the second parent
765 - skip1: a delta against the same base as p1
767 - skip1: a delta against the same base as p1
766 (when p1 has empty delta
768 (when p1 has empty delta
767 - skip2: a delta against the same base as p2
769 - skip2: a delta against the same base as p2
768 (when p2 has empty delta
770 (when p2 has empty delta
769 - prev: a delta against the previous revision
771 - prev: a delta against the previous revision
770 - other: a delta against an arbitrary revision
772 - other: a delta against an arbitrary revision
771 :``compsize``: compressed size of revision
773 :``compsize``: compressed size of revision
772 :``uncompsize``: uncompressed size of revision
774 :``uncompsize``: uncompressed size of revision
773 :``chainsize``: total size of compressed revisions in chain
775 :``chainsize``: total size of compressed revisions in chain
774 :``chainratio``: total chain size divided by uncompressed revision size
776 :``chainratio``: total chain size divided by uncompressed revision size
775 (new delta chains typically start at ratio 2.00)
777 (new delta chains typically start at ratio 2.00)
776 :``lindist``: linear distance from base revision in delta chain to end
778 :``lindist``: linear distance from base revision in delta chain to end
777 of this revision
779 of this revision
778 :``extradist``: total size of revisions not part of this delta chain from
780 :``extradist``: total size of revisions not part of this delta chain from
779 base of delta chain to end of this revision; a measurement
781 base of delta chain to end of this revision; a measurement
780 of how much extra data we need to read/seek across to read
782 of how much extra data we need to read/seek across to read
781 the delta chain for this revision
783 the delta chain for this revision
782 :``extraratio``: extradist divided by chainsize; another representation of
784 :``extraratio``: extradist divided by chainsize; another representation of
783 how much unrelated data is needed to load this delta chain
785 how much unrelated data is needed to load this delta chain
784
786
785 If the repository is configured to use the sparse read, additional keywords
787 If the repository is configured to use the sparse read, additional keywords
786 are available:
788 are available:
787
789
788 :``readsize``: total size of data read from the disk for a revision
790 :``readsize``: total size of data read from the disk for a revision
789 (sum of the sizes of all the blocks)
791 (sum of the sizes of all the blocks)
790 :``largestblock``: size of the largest block of data read from the disk
792 :``largestblock``: size of the largest block of data read from the disk
791 :``readdensity``: density of useful bytes in the data read from the disk
793 :``readdensity``: density of useful bytes in the data read from the disk
792 :``srchunks``: in how many data hunks the whole revision would be read
794 :``srchunks``: in how many data hunks the whole revision would be read
793
795
794 The sparse read can be enabled with experimental.sparse-read = True
796 The sparse read can be enabled with experimental.sparse-read = True
795 """
797 """
796 opts = pycompat.byteskwargs(opts)
798 opts = pycompat.byteskwargs(opts)
797 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
799 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
798 index = r.index
800 index = r.index
799 start = r.start
801 start = r.start
800 length = r.length
802 length = r.length
801 generaldelta = r._generaldelta
803 generaldelta = r._generaldelta
802 withsparseread = getattr(r, '_withsparseread', False)
804 withsparseread = getattr(r, '_withsparseread', False)
803
805
804 # security to avoid crash on corrupted revlogs
806 # security to avoid crash on corrupted revlogs
805 total_revs = len(index)
807 total_revs = len(index)
806
808
807 chain_size_cache = {}
809 chain_size_cache = {}
808
810
809 def revinfo(rev):
811 def revinfo(rev):
810 e = index[rev]
812 e = index[rev]
811 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
813 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
812 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
814 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
813
815
814 base = e[revlog_constants.ENTRY_DELTA_BASE]
816 base = e[revlog_constants.ENTRY_DELTA_BASE]
815 p1 = e[revlog_constants.ENTRY_PARENT_1]
817 p1 = e[revlog_constants.ENTRY_PARENT_1]
816 p2 = e[revlog_constants.ENTRY_PARENT_2]
818 p2 = e[revlog_constants.ENTRY_PARENT_2]
817
819
818 # If the parents of a revision has an empty delta, we never try to delta
820 # If the parents of a revision has an empty delta, we never try to delta
819 # against that parent, but directly against the delta base of that
821 # against that parent, but directly against the delta base of that
820 # parent (recursively). It avoids adding a useless entry in the chain.
822 # parent (recursively). It avoids adding a useless entry in the chain.
821 #
823 #
822 # However we need to detect that as a special case for delta-type, that
824 # However we need to detect that as a special case for delta-type, that
823 # is not simply "other".
825 # is not simply "other".
824 p1_base = p1
826 p1_base = p1
825 if p1 != nullrev and p1 < total_revs:
827 if p1 != nullrev and p1 < total_revs:
826 e1 = index[p1]
828 e1 = index[p1]
827 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
829 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
828 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
830 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
829 if (
831 if (
830 new_base == p1_base
832 new_base == p1_base
831 or new_base == nullrev
833 or new_base == nullrev
832 or new_base >= total_revs
834 or new_base >= total_revs
833 ):
835 ):
834 break
836 break
835 p1_base = new_base
837 p1_base = new_base
836 e1 = index[p1_base]
838 e1 = index[p1_base]
837 p2_base = p2
839 p2_base = p2
838 if p2 != nullrev and p2 < total_revs:
840 if p2 != nullrev and p2 < total_revs:
839 e2 = index[p2]
841 e2 = index[p2]
840 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
842 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
841 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
843 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
842 if (
844 if (
843 new_base == p2_base
845 new_base == p2_base
844 or new_base == nullrev
846 or new_base == nullrev
845 or new_base >= total_revs
847 or new_base >= total_revs
846 ):
848 ):
847 break
849 break
848 p2_base = new_base
850 p2_base = new_base
849 e2 = index[p2_base]
851 e2 = index[p2_base]
850
852
851 if generaldelta:
853 if generaldelta:
852 if base == p1:
854 if base == p1:
853 deltatype = b'p1'
855 deltatype = b'p1'
854 elif base == p2:
856 elif base == p2:
855 deltatype = b'p2'
857 deltatype = b'p2'
856 elif base == rev:
858 elif base == rev:
857 deltatype = b'base'
859 deltatype = b'base'
858 elif base == p1_base:
860 elif base == p1_base:
859 deltatype = b'skip1'
861 deltatype = b'skip1'
860 elif base == p2_base:
862 elif base == p2_base:
861 deltatype = b'skip2'
863 deltatype = b'skip2'
862 elif r.issnapshot(rev):
864 elif r.issnapshot(rev):
863 deltatype = b'snap'
865 deltatype = b'snap'
864 elif base == rev - 1:
866 elif base == rev - 1:
865 deltatype = b'prev'
867 deltatype = b'prev'
866 else:
868 else:
867 deltatype = b'other'
869 deltatype = b'other'
868 else:
870 else:
869 if base == rev:
871 if base == rev:
870 deltatype = b'base'
872 deltatype = b'base'
871 else:
873 else:
872 deltatype = b'prev'
874 deltatype = b'prev'
873
875
874 chain = r._deltachain(rev)[0]
876 chain = r._deltachain(rev)[0]
875 chain_size = 0
877 chain_size = 0
876 for iter_rev in reversed(chain):
878 for iter_rev in reversed(chain):
877 cached = chain_size_cache.get(iter_rev)
879 cached = chain_size_cache.get(iter_rev)
878 if cached is not None:
880 if cached is not None:
879 chain_size += cached
881 chain_size += cached
880 break
882 break
881 e = index[iter_rev]
883 e = index[iter_rev]
882 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
884 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883 chain_size_cache[rev] = chain_size
885 chain_size_cache[rev] = chain_size
884
886
885 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
887 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
886
888
887 fm = ui.formatter(b'debugdeltachain', opts)
889 fm = ui.formatter(b'debugdeltachain', opts)
888
890
889 fm.plain(
891 fm.plain(
890 b' rev p1 p2 chain# chainlen prev delta '
892 b' rev p1 p2 chain# chainlen prev delta '
891 b'size rawsize chainsize ratio lindist extradist '
893 b'size rawsize chainsize ratio lindist extradist '
892 b'extraratio'
894 b'extraratio'
893 )
895 )
894 if withsparseread:
896 if withsparseread:
895 fm.plain(b' readsize largestblk rddensity srchunks')
897 fm.plain(b' readsize largestblk rddensity srchunks')
896 fm.plain(b'\n')
898 fm.plain(b'\n')
897
899
898 chainbases = {}
900 chainbases = {}
899 for rev in r:
901 for rev in r:
900 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
902 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
901 chainbase = chain[0]
903 chainbase = chain[0]
902 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
904 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
903 basestart = start(chainbase)
905 basestart = start(chainbase)
904 revstart = start(rev)
906 revstart = start(rev)
905 lineardist = revstart + comp - basestart
907 lineardist = revstart + comp - basestart
906 extradist = lineardist - chainsize
908 extradist = lineardist - chainsize
907 try:
909 try:
908 prevrev = chain[-2]
910 prevrev = chain[-2]
909 except IndexError:
911 except IndexError:
910 prevrev = -1
912 prevrev = -1
911
913
912 if uncomp != 0:
914 if uncomp != 0:
913 chainratio = float(chainsize) / float(uncomp)
915 chainratio = float(chainsize) / float(uncomp)
914 else:
916 else:
915 chainratio = chainsize
917 chainratio = chainsize
916
918
917 if chainsize != 0:
919 if chainsize != 0:
918 extraratio = float(extradist) / float(chainsize)
920 extraratio = float(extradist) / float(chainsize)
919 else:
921 else:
920 extraratio = extradist
922 extraratio = extradist
921
923
922 fm.startitem()
924 fm.startitem()
923 fm.write(
925 fm.write(
924 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
926 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
925 b'uncompsize chainsize chainratio lindist extradist '
927 b'uncompsize chainsize chainratio lindist extradist '
926 b'extraratio',
928 b'extraratio',
927 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
929 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
928 rev,
930 rev,
929 p1,
931 p1,
930 p2,
932 p2,
931 chainid,
933 chainid,
932 len(chain),
934 len(chain),
933 prevrev,
935 prevrev,
934 deltatype,
936 deltatype,
935 comp,
937 comp,
936 uncomp,
938 uncomp,
937 chainsize,
939 chainsize,
938 chainratio,
940 chainratio,
939 lineardist,
941 lineardist,
940 extradist,
942 extradist,
941 extraratio,
943 extraratio,
942 rev=rev,
944 rev=rev,
943 chainid=chainid,
945 chainid=chainid,
944 chainlen=len(chain),
946 chainlen=len(chain),
945 prevrev=prevrev,
947 prevrev=prevrev,
946 deltatype=deltatype,
948 deltatype=deltatype,
947 compsize=comp,
949 compsize=comp,
948 uncompsize=uncomp,
950 uncompsize=uncomp,
949 chainsize=chainsize,
951 chainsize=chainsize,
950 chainratio=chainratio,
952 chainratio=chainratio,
951 lindist=lineardist,
953 lindist=lineardist,
952 extradist=extradist,
954 extradist=extradist,
953 extraratio=extraratio,
955 extraratio=extraratio,
954 )
956 )
955 if withsparseread:
957 if withsparseread:
956 readsize = 0
958 readsize = 0
957 largestblock = 0
959 largestblock = 0
958 srchunks = 0
960 srchunks = 0
959
961
960 for revschunk in deltautil.slicechunk(r, chain):
962 for revschunk in deltautil.slicechunk(r, chain):
961 srchunks += 1
963 srchunks += 1
962 blkend = start(revschunk[-1]) + length(revschunk[-1])
964 blkend = start(revschunk[-1]) + length(revschunk[-1])
963 blksize = blkend - start(revschunk[0])
965 blksize = blkend - start(revschunk[0])
964
966
965 readsize += blksize
967 readsize += blksize
966 if largestblock < blksize:
968 if largestblock < blksize:
967 largestblock = blksize
969 largestblock = blksize
968
970
969 if readsize:
971 if readsize:
970 readdensity = float(chainsize) / float(readsize)
972 readdensity = float(chainsize) / float(readsize)
971 else:
973 else:
972 readdensity = 1
974 readdensity = 1
973
975
974 fm.write(
976 fm.write(
975 b'readsize largestblock readdensity srchunks',
977 b'readsize largestblock readdensity srchunks',
976 b' %10d %10d %9.5f %8d',
978 b' %10d %10d %9.5f %8d',
977 readsize,
979 readsize,
978 largestblock,
980 largestblock,
979 readdensity,
981 readdensity,
980 srchunks,
982 srchunks,
981 readsize=readsize,
983 readsize=readsize,
982 largestblock=largestblock,
984 largestblock=largestblock,
983 readdensity=readdensity,
985 readdensity=readdensity,
984 srchunks=srchunks,
986 srchunks=srchunks,
985 )
987 )
986
988
987 fm.plain(b'\n')
989 fm.plain(b'\n')
988
990
989 fm.end()
991 fm.end()
990
992
991
993
992 @command(
994 @command(
993 b'debug-delta-find',
995 b'debug-delta-find',
994 cmdutil.debugrevlogopts
996 cmdutil.debugrevlogopts
995 + cmdutil.formatteropts
997 + cmdutil.formatteropts
996 + [
998 + [
997 (
999 (
998 b'',
1000 b'',
999 b'source',
1001 b'source',
1000 b'full',
1002 b'full',
1001 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1003 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1002 ),
1004 ),
1003 ],
1005 ],
1004 _(b'-c|-m|FILE REV'),
1006 _(b'-c|-m|FILE REV'),
1005 optionalrepo=True,
1007 optionalrepo=True,
1006 )
1008 )
1007 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1009 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1008 """display the computation to get to a valid delta for storing REV
1010 """display the computation to get to a valid delta for storing REV
1009
1011
1010 This command will replay the process used to find the "best" delta to store
1012 This command will replay the process used to find the "best" delta to store
1011 a revision and display information about all the steps used to get to that
1013 a revision and display information about all the steps used to get to that
1012 result.
1014 result.
1013
1015
1014 By default, the process is fed with a the full-text for the revision. This
1016 By default, the process is fed with a the full-text for the revision. This
1015 can be controlled with the --source flag.
1017 can be controlled with the --source flag.
1016
1018
1017 The revision use the revision number of the target storage (not changelog
1019 The revision use the revision number of the target storage (not changelog
1018 revision number).
1020 revision number).
1019
1021
1020 note: the process is initiated from a full text of the revision to store.
1022 note: the process is initiated from a full text of the revision to store.
1021 """
1023 """
1022 opts = pycompat.byteskwargs(opts)
1024 opts = pycompat.byteskwargs(opts)
1023 if arg_2 is None:
1025 if arg_2 is None:
1024 file_ = None
1026 file_ = None
1025 rev = arg_1
1027 rev = arg_1
1026 else:
1028 else:
1027 file_ = arg_1
1029 file_ = arg_1
1028 rev = arg_2
1030 rev = arg_2
1029
1031
1030 rev = int(rev)
1032 rev = int(rev)
1031
1033
1032 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1034 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1033 p1r, p2r = revlog.parentrevs(rev)
1035 p1r, p2r = revlog.parentrevs(rev)
1034
1036
1035 if source == b'full':
1037 if source == b'full':
1036 base_rev = nullrev
1038 base_rev = nullrev
1037 elif source == b'storage':
1039 elif source == b'storage':
1038 base_rev = revlog.deltaparent(rev)
1040 base_rev = revlog.deltaparent(rev)
1039 elif source == b'p1':
1041 elif source == b'p1':
1040 base_rev = p1r
1042 base_rev = p1r
1041 elif source == b'p2':
1043 elif source == b'p2':
1042 base_rev = p2r
1044 base_rev = p2r
1043 elif source == b'prev':
1045 elif source == b'prev':
1044 base_rev = rev - 1
1046 base_rev = rev - 1
1045 else:
1047 else:
1046 raise error.InputError(b"invalid --source value: %s" % source)
1048 raise error.InputError(b"invalid --source value: %s" % source)
1047
1049
1048 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1050 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1049
1051
1050
1052
1051 @command(
1053 @command(
1052 b'debugdirstate|debugstate',
1054 b'debugdirstate|debugstate',
1053 [
1055 [
1054 (
1056 (
1055 b'',
1057 b'',
1056 b'nodates',
1058 b'nodates',
1057 None,
1059 None,
1058 _(b'do not display the saved mtime (DEPRECATED)'),
1060 _(b'do not display the saved mtime (DEPRECATED)'),
1059 ),
1061 ),
1060 (b'', b'dates', True, _(b'display the saved mtime')),
1062 (b'', b'dates', True, _(b'display the saved mtime')),
1061 (b'', b'datesort', None, _(b'sort by saved mtime')),
1063 (b'', b'datesort', None, _(b'sort by saved mtime')),
1062 (
1064 (
1063 b'',
1065 b'',
1064 b'docket',
1066 b'docket',
1065 False,
1067 False,
1066 _(b'display the docket (metadata file) instead'),
1068 _(b'display the docket (metadata file) instead'),
1067 ),
1069 ),
1068 (
1070 (
1069 b'',
1071 b'',
1070 b'all',
1072 b'all',
1071 False,
1073 False,
1072 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1074 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1073 ),
1075 ),
1074 ],
1076 ],
1075 _(b'[OPTION]...'),
1077 _(b'[OPTION]...'),
1076 )
1078 )
1077 def debugstate(ui, repo, **opts):
1079 def debugstate(ui, repo, **opts):
1078 """show the contents of the current dirstate"""
1080 """show the contents of the current dirstate"""
1079
1081
1080 if opts.get("docket"):
1082 if opts.get("docket"):
1081 if not repo.dirstate._use_dirstate_v2:
1083 if not repo.dirstate._use_dirstate_v2:
1082 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1084 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1083
1085
1084 docket = repo.dirstate._map.docket
1086 docket = repo.dirstate._map.docket
1085 (
1087 (
1086 start_offset,
1088 start_offset,
1087 root_nodes,
1089 root_nodes,
1088 nodes_with_entry,
1090 nodes_with_entry,
1089 nodes_with_copy,
1091 nodes_with_copy,
1090 unused_bytes,
1092 unused_bytes,
1091 _unused,
1093 _unused,
1092 ignore_pattern,
1094 ignore_pattern,
1093 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1095 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1094
1096
1095 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1097 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1096 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1098 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1097 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1099 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1098 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1100 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1099 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1101 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1100 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1102 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1101 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1103 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1102 ui.write(
1104 ui.write(
1103 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1105 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1104 )
1106 )
1105 return
1107 return
1106
1108
1107 nodates = not opts['dates']
1109 nodates = not opts['dates']
1108 if opts.get('nodates') is not None:
1110 if opts.get('nodates') is not None:
1109 nodates = True
1111 nodates = True
1110 datesort = opts.get('datesort')
1112 datesort = opts.get('datesort')
1111
1113
1112 if datesort:
1114 if datesort:
1113
1115
1114 def keyfunc(entry):
1116 def keyfunc(entry):
1115 filename, _state, _mode, _size, mtime = entry
1117 filename, _state, _mode, _size, mtime = entry
1116 return (mtime, filename)
1118 return (mtime, filename)
1117
1119
1118 else:
1120 else:
1119 keyfunc = None # sort by filename
1121 keyfunc = None # sort by filename
1120 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1122 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1121 entries.sort(key=keyfunc)
1123 entries.sort(key=keyfunc)
1122 for entry in entries:
1124 for entry in entries:
1123 filename, state, mode, size, mtime = entry
1125 filename, state, mode, size, mtime = entry
1124 if mtime == -1:
1126 if mtime == -1:
1125 timestr = b'unset '
1127 timestr = b'unset '
1126 elif nodates:
1128 elif nodates:
1127 timestr = b'set '
1129 timestr = b'set '
1128 else:
1130 else:
1129 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1131 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1130 timestr = encoding.strtolocal(timestr)
1132 timestr = encoding.strtolocal(timestr)
1131 if mode & 0o20000:
1133 if mode & 0o20000:
1132 mode = b'lnk'
1134 mode = b'lnk'
1133 else:
1135 else:
1134 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1136 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1135 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1137 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1136 for f in repo.dirstate.copies():
1138 for f in repo.dirstate.copies():
1137 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1139 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1138
1140
1139
1141
1140 @command(
1142 @command(
1141 b'debugdirstateignorepatternshash',
1143 b'debugdirstateignorepatternshash',
1142 [],
1144 [],
1143 _(b''),
1145 _(b''),
1144 )
1146 )
1145 def debugdirstateignorepatternshash(ui, repo, **opts):
1147 def debugdirstateignorepatternshash(ui, repo, **opts):
1146 """show the hash of ignore patterns stored in dirstate if v2,
1148 """show the hash of ignore patterns stored in dirstate if v2,
1147 or nothing for dirstate-v2
1149 or nothing for dirstate-v2
1148 """
1150 """
1149 if repo.dirstate._use_dirstate_v2:
1151 if repo.dirstate._use_dirstate_v2:
1150 docket = repo.dirstate._map.docket
1152 docket = repo.dirstate._map.docket
1151 hash_len = 20 # 160 bits for SHA-1
1153 hash_len = 20 # 160 bits for SHA-1
1152 hash_bytes = docket.tree_metadata[-hash_len:]
1154 hash_bytes = docket.tree_metadata[-hash_len:]
1153 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1155 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1154
1156
1155
1157
1156 @command(
1158 @command(
1157 b'debugdiscovery',
1159 b'debugdiscovery',
1158 [
1160 [
1159 (b'', b'old', None, _(b'use old-style discovery')),
1161 (b'', b'old', None, _(b'use old-style discovery')),
1160 (
1162 (
1161 b'',
1163 b'',
1162 b'nonheads',
1164 b'nonheads',
1163 None,
1165 None,
1164 _(b'use old-style discovery with non-heads included'),
1166 _(b'use old-style discovery with non-heads included'),
1165 ),
1167 ),
1166 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1168 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1167 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1169 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1168 (
1170 (
1169 b'',
1171 b'',
1170 b'local-as-revs',
1172 b'local-as-revs',
1171 b"",
1173 b"",
1172 b'treat local has having these revisions only',
1174 b'treat local has having these revisions only',
1173 ),
1175 ),
1174 (
1176 (
1175 b'',
1177 b'',
1176 b'remote-as-revs',
1178 b'remote-as-revs',
1177 b"",
1179 b"",
1178 b'use local as remote, with only these revisions',
1180 b'use local as remote, with only these revisions',
1179 ),
1181 ),
1180 ]
1182 ]
1181 + cmdutil.remoteopts
1183 + cmdutil.remoteopts
1182 + cmdutil.formatteropts,
1184 + cmdutil.formatteropts,
1183 _(b'[--rev REV] [OTHER]'),
1185 _(b'[--rev REV] [OTHER]'),
1184 )
1186 )
1185 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1187 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1186 """runs the changeset discovery protocol in isolation
1188 """runs the changeset discovery protocol in isolation
1187
1189
1188 The local peer can be "replaced" by a subset of the local repository by
1190 The local peer can be "replaced" by a subset of the local repository by
1189 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1191 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1190 can be "replaced" by a subset of the local repository using the
1192 can be "replaced" by a subset of the local repository using the
1191 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1193 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1192 discovery situations.
1194 discovery situations.
1193
1195
1194 The following developer oriented config are relevant for people playing with this command:
1196 The following developer oriented config are relevant for people playing with this command:
1195
1197
1196 * devel.discovery.exchange-heads=True
1198 * devel.discovery.exchange-heads=True
1197
1199
1198 If False, the discovery will not start with
1200 If False, the discovery will not start with
1199 remote head fetching and local head querying.
1201 remote head fetching and local head querying.
1200
1202
1201 * devel.discovery.grow-sample=True
1203 * devel.discovery.grow-sample=True
1202
1204
1203 If False, the sample size used in set discovery will not be increased
1205 If False, the sample size used in set discovery will not be increased
1204 through the process
1206 through the process
1205
1207
1206 * devel.discovery.grow-sample.dynamic=True
1208 * devel.discovery.grow-sample.dynamic=True
1207
1209
1208 When discovery.grow-sample.dynamic is True, the default, the sample size is
1210 When discovery.grow-sample.dynamic is True, the default, the sample size is
1209 adapted to the shape of the undecided set (it is set to the max of:
1211 adapted to the shape of the undecided set (it is set to the max of:
1210 <target-size>, len(roots(undecided)), len(heads(undecided)
1212 <target-size>, len(roots(undecided)), len(heads(undecided)
1211
1213
1212 * devel.discovery.grow-sample.rate=1.05
1214 * devel.discovery.grow-sample.rate=1.05
1213
1215
1214 the rate at which the sample grow
1216 the rate at which the sample grow
1215
1217
1216 * devel.discovery.randomize=True
1218 * devel.discovery.randomize=True
1217
1219
1218 If andom sampling during discovery are deterministic. It is meant for
1220 If andom sampling during discovery are deterministic. It is meant for
1219 integration tests.
1221 integration tests.
1220
1222
1221 * devel.discovery.sample-size=200
1223 * devel.discovery.sample-size=200
1222
1224
1223 Control the initial size of the discovery sample
1225 Control the initial size of the discovery sample
1224
1226
1225 * devel.discovery.sample-size.initial=100
1227 * devel.discovery.sample-size.initial=100
1226
1228
1227 Control the initial size of the discovery for initial change
1229 Control the initial size of the discovery for initial change
1228 """
1230 """
1229 opts = pycompat.byteskwargs(opts)
1231 opts = pycompat.byteskwargs(opts)
1230 unfi = repo.unfiltered()
1232 unfi = repo.unfiltered()
1231
1233
1232 # setup potential extra filtering
1234 # setup potential extra filtering
1233 local_revs = opts[b"local_as_revs"]
1235 local_revs = opts[b"local_as_revs"]
1234 remote_revs = opts[b"remote_as_revs"]
1236 remote_revs = opts[b"remote_as_revs"]
1235
1237
1236 # make sure tests are repeatable
1238 # make sure tests are repeatable
1237 random.seed(int(opts[b'seed']))
1239 random.seed(int(opts[b'seed']))
1238
1240
1239 if not remote_revs:
1241 if not remote_revs:
1240 path = urlutil.get_unique_pull_path_obj(
1242 path = urlutil.get_unique_pull_path_obj(
1241 b'debugdiscovery', ui, remoteurl
1243 b'debugdiscovery', ui, remoteurl
1242 )
1244 )
1243 branches = (path.branch, [])
1245 branches = (path.branch, [])
1244 remote = hg.peer(repo, opts, path)
1246 remote = hg.peer(repo, opts, path)
1245 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1247 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1246 else:
1248 else:
1247 branches = (None, [])
1249 branches = (None, [])
1248 remote_filtered_revs = logcmdutil.revrange(
1250 remote_filtered_revs = logcmdutil.revrange(
1249 unfi, [b"not (::(%s))" % remote_revs]
1251 unfi, [b"not (::(%s))" % remote_revs]
1250 )
1252 )
1251 remote_filtered_revs = frozenset(remote_filtered_revs)
1253 remote_filtered_revs = frozenset(remote_filtered_revs)
1252
1254
1253 def remote_func(x):
1255 def remote_func(x):
1254 return remote_filtered_revs
1256 return remote_filtered_revs
1255
1257
1256 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1258 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1257
1259
1258 remote = repo.peer()
1260 remote = repo.peer()
1259 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1261 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1260
1262
1261 if local_revs:
1263 if local_revs:
1262 local_filtered_revs = logcmdutil.revrange(
1264 local_filtered_revs = logcmdutil.revrange(
1263 unfi, [b"not (::(%s))" % local_revs]
1265 unfi, [b"not (::(%s))" % local_revs]
1264 )
1266 )
1265 local_filtered_revs = frozenset(local_filtered_revs)
1267 local_filtered_revs = frozenset(local_filtered_revs)
1266
1268
1267 def local_func(x):
1269 def local_func(x):
1268 return local_filtered_revs
1270 return local_filtered_revs
1269
1271
1270 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1272 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1271 repo = repo.filtered(b'debug-discovery-local-filter')
1273 repo = repo.filtered(b'debug-discovery-local-filter')
1272
1274
1273 data = {}
1275 data = {}
1274 if opts.get(b'old'):
1276 if opts.get(b'old'):
1275
1277
1276 def doit(pushedrevs, remoteheads, remote=remote):
1278 def doit(pushedrevs, remoteheads, remote=remote):
1277 if not util.safehasattr(remote, b'branches'):
1279 if not util.safehasattr(remote, b'branches'):
1278 # enable in-client legacy support
1280 # enable in-client legacy support
1279 remote = localrepo.locallegacypeer(remote.local())
1281 remote = localrepo.locallegacypeer(remote.local())
1280 if remote_revs:
1282 if remote_revs:
1281 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1283 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1282 remote._repo = r
1284 remote._repo = r
1283 common, _in, hds = treediscovery.findcommonincoming(
1285 common, _in, hds = treediscovery.findcommonincoming(
1284 repo, remote, force=True, audit=data
1286 repo, remote, force=True, audit=data
1285 )
1287 )
1286 common = set(common)
1288 common = set(common)
1287 if not opts.get(b'nonheads'):
1289 if not opts.get(b'nonheads'):
1288 ui.writenoi18n(
1290 ui.writenoi18n(
1289 b"unpruned common: %s\n"
1291 b"unpruned common: %s\n"
1290 % b" ".join(sorted(short(n) for n in common))
1292 % b" ".join(sorted(short(n) for n in common))
1291 )
1293 )
1292
1294
1293 clnode = repo.changelog.node
1295 clnode = repo.changelog.node
1294 common = repo.revs(b'heads(::%ln)', common)
1296 common = repo.revs(b'heads(::%ln)', common)
1295 common = {clnode(r) for r in common}
1297 common = {clnode(r) for r in common}
1296 return common, hds
1298 return common, hds
1297
1299
1298 else:
1300 else:
1299
1301
1300 def doit(pushedrevs, remoteheads, remote=remote):
1302 def doit(pushedrevs, remoteheads, remote=remote):
1301 nodes = None
1303 nodes = None
1302 if pushedrevs:
1304 if pushedrevs:
1303 revs = logcmdutil.revrange(repo, pushedrevs)
1305 revs = logcmdutil.revrange(repo, pushedrevs)
1304 nodes = [repo[r].node() for r in revs]
1306 nodes = [repo[r].node() for r in revs]
1305 common, any, hds = setdiscovery.findcommonheads(
1307 common, any, hds = setdiscovery.findcommonheads(
1306 ui,
1308 ui,
1307 repo,
1309 repo,
1308 remote,
1310 remote,
1309 ancestorsof=nodes,
1311 ancestorsof=nodes,
1310 audit=data,
1312 audit=data,
1311 abortwhenunrelated=False,
1313 abortwhenunrelated=False,
1312 )
1314 )
1313 return common, hds
1315 return common, hds
1314
1316
1315 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1317 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1316 localrevs = opts[b'rev']
1318 localrevs = opts[b'rev']
1317
1319
1318 fm = ui.formatter(b'debugdiscovery', opts)
1320 fm = ui.formatter(b'debugdiscovery', opts)
1319 if fm.strict_format:
1321 if fm.strict_format:
1320
1322
1321 @contextlib.contextmanager
1323 @contextlib.contextmanager
1322 def may_capture_output():
1324 def may_capture_output():
1323 ui.pushbuffer()
1325 ui.pushbuffer()
1324 yield
1326 yield
1325 data[b'output'] = ui.popbuffer()
1327 data[b'output'] = ui.popbuffer()
1326
1328
1327 else:
1329 else:
1328 may_capture_output = util.nullcontextmanager
1330 may_capture_output = util.nullcontextmanager
1329 with may_capture_output():
1331 with may_capture_output():
1330 with util.timedcm('debug-discovery') as t:
1332 with util.timedcm('debug-discovery') as t:
1331 common, hds = doit(localrevs, remoterevs)
1333 common, hds = doit(localrevs, remoterevs)
1332
1334
1333 # compute all statistics
1335 # compute all statistics
1334 if len(common) == 1 and repo.nullid in common:
1336 if len(common) == 1 and repo.nullid in common:
1335 common = set()
1337 common = set()
1336 heads_common = set(common)
1338 heads_common = set(common)
1337 heads_remote = set(hds)
1339 heads_remote = set(hds)
1338 heads_local = set(repo.heads())
1340 heads_local = set(repo.heads())
1339 # note: they cannot be a local or remote head that is in common and not
1341 # note: they cannot be a local or remote head that is in common and not
1340 # itself a head of common.
1342 # itself a head of common.
1341 heads_common_local = heads_common & heads_local
1343 heads_common_local = heads_common & heads_local
1342 heads_common_remote = heads_common & heads_remote
1344 heads_common_remote = heads_common & heads_remote
1343 heads_common_both = heads_common & heads_remote & heads_local
1345 heads_common_both = heads_common & heads_remote & heads_local
1344
1346
1345 all = repo.revs(b'all()')
1347 all = repo.revs(b'all()')
1346 common = repo.revs(b'::%ln', common)
1348 common = repo.revs(b'::%ln', common)
1347 roots_common = repo.revs(b'roots(::%ld)', common)
1349 roots_common = repo.revs(b'roots(::%ld)', common)
1348 missing = repo.revs(b'not ::%ld', common)
1350 missing = repo.revs(b'not ::%ld', common)
1349 heads_missing = repo.revs(b'heads(%ld)', missing)
1351 heads_missing = repo.revs(b'heads(%ld)', missing)
1350 roots_missing = repo.revs(b'roots(%ld)', missing)
1352 roots_missing = repo.revs(b'roots(%ld)', missing)
1351 assert len(common) + len(missing) == len(all)
1353 assert len(common) + len(missing) == len(all)
1352
1354
1353 initial_undecided = repo.revs(
1355 initial_undecided = repo.revs(
1354 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1356 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1355 )
1357 )
1356 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1358 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1357 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1359 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1358 common_initial_undecided = initial_undecided & common
1360 common_initial_undecided = initial_undecided & common
1359 missing_initial_undecided = initial_undecided & missing
1361 missing_initial_undecided = initial_undecided & missing
1360
1362
1361 data[b'elapsed'] = t.elapsed
1363 data[b'elapsed'] = t.elapsed
1362 data[b'nb-common-heads'] = len(heads_common)
1364 data[b'nb-common-heads'] = len(heads_common)
1363 data[b'nb-common-heads-local'] = len(heads_common_local)
1365 data[b'nb-common-heads-local'] = len(heads_common_local)
1364 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1366 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1365 data[b'nb-common-heads-both'] = len(heads_common_both)
1367 data[b'nb-common-heads-both'] = len(heads_common_both)
1366 data[b'nb-common-roots'] = len(roots_common)
1368 data[b'nb-common-roots'] = len(roots_common)
1367 data[b'nb-head-local'] = len(heads_local)
1369 data[b'nb-head-local'] = len(heads_local)
1368 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1370 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1369 data[b'nb-head-remote'] = len(heads_remote)
1371 data[b'nb-head-remote'] = len(heads_remote)
1370 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1372 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1371 heads_common_remote
1373 heads_common_remote
1372 )
1374 )
1373 data[b'nb-revs'] = len(all)
1375 data[b'nb-revs'] = len(all)
1374 data[b'nb-revs-common'] = len(common)
1376 data[b'nb-revs-common'] = len(common)
1375 data[b'nb-revs-missing'] = len(missing)
1377 data[b'nb-revs-missing'] = len(missing)
1376 data[b'nb-missing-heads'] = len(heads_missing)
1378 data[b'nb-missing-heads'] = len(heads_missing)
1377 data[b'nb-missing-roots'] = len(roots_missing)
1379 data[b'nb-missing-roots'] = len(roots_missing)
1378 data[b'nb-ini_und'] = len(initial_undecided)
1380 data[b'nb-ini_und'] = len(initial_undecided)
1379 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1381 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1380 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1382 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1381 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1383 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1382 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1384 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1383
1385
1384 fm.startitem()
1386 fm.startitem()
1385 fm.data(**pycompat.strkwargs(data))
1387 fm.data(**pycompat.strkwargs(data))
1386 # display discovery summary
1388 # display discovery summary
1387 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1389 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1388 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1390 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1389 if b'total-round-trips-heads' in data:
1391 if b'total-round-trips-heads' in data:
1390 fm.plain(
1392 fm.plain(
1391 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1393 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1392 )
1394 )
1393 if b'total-round-trips-branches' in data:
1395 if b'total-round-trips-branches' in data:
1394 fm.plain(
1396 fm.plain(
1395 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1397 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1396 % data
1398 % data
1397 )
1399 )
1398 if b'total-round-trips-between' in data:
1400 if b'total-round-trips-between' in data:
1399 fm.plain(
1401 fm.plain(
1400 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1402 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1401 )
1403 )
1402 fm.plain(b"queries: %(total-queries)9d\n" % data)
1404 fm.plain(b"queries: %(total-queries)9d\n" % data)
1403 if b'total-queries-branches' in data:
1405 if b'total-queries-branches' in data:
1404 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1406 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1405 if b'total-queries-between' in data:
1407 if b'total-queries-between' in data:
1406 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1408 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1407 fm.plain(b"heads summary:\n")
1409 fm.plain(b"heads summary:\n")
1408 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1410 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1409 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1411 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1410 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1412 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1411 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1413 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1412 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1414 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1413 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1415 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1414 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1416 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1415 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1417 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1416 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1418 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1417 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1419 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1418 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1420 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1419 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1421 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1420 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1422 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1421 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1423 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1422 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1424 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1423 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1425 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1424 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1426 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1425 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1427 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1426 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1428 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1427 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1429 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1428 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1430 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1429 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1431 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1430
1432
1431 if ui.verbose:
1433 if ui.verbose:
1432 fm.plain(
1434 fm.plain(
1433 b"common heads: %s\n"
1435 b"common heads: %s\n"
1434 % b" ".join(sorted(short(n) for n in heads_common))
1436 % b" ".join(sorted(short(n) for n in heads_common))
1435 )
1437 )
1436 fm.end()
1438 fm.end()
1437
1439
1438
1440
1439 _chunksize = 4 << 10
1441 _chunksize = 4 << 10
1440
1442
1441
1443
1442 @command(
1444 @command(
1443 b'debugdownload',
1445 b'debugdownload',
1444 [
1446 [
1445 (b'o', b'output', b'', _(b'path')),
1447 (b'o', b'output', b'', _(b'path')),
1446 ],
1448 ],
1447 optionalrepo=True,
1449 optionalrepo=True,
1448 )
1450 )
1449 def debugdownload(ui, repo, url, output=None, **opts):
1451 def debugdownload(ui, repo, url, output=None, **opts):
1450 """download a resource using Mercurial logic and config"""
1452 """download a resource using Mercurial logic and config"""
1451 fh = urlmod.open(ui, url, output)
1453 fh = urlmod.open(ui, url, output)
1452
1454
1453 dest = ui
1455 dest = ui
1454 if output:
1456 if output:
1455 dest = open(output, b"wb", _chunksize)
1457 dest = open(output, b"wb", _chunksize)
1456 try:
1458 try:
1457 data = fh.read(_chunksize)
1459 data = fh.read(_chunksize)
1458 while data:
1460 while data:
1459 dest.write(data)
1461 dest.write(data)
1460 data = fh.read(_chunksize)
1462 data = fh.read(_chunksize)
1461 finally:
1463 finally:
1462 if output:
1464 if output:
1463 dest.close()
1465 dest.close()
1464
1466
1465
1467
1466 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1468 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1467 def debugextensions(ui, repo, **opts):
1469 def debugextensions(ui, repo, **opts):
1468 '''show information about active extensions'''
1470 '''show information about active extensions'''
1469 opts = pycompat.byteskwargs(opts)
1471 opts = pycompat.byteskwargs(opts)
1470 exts = extensions.extensions(ui)
1472 exts = extensions.extensions(ui)
1471 hgver = util.version()
1473 hgver = util.version()
1472 fm = ui.formatter(b'debugextensions', opts)
1474 fm = ui.formatter(b'debugextensions', opts)
1473 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1475 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1474 isinternal = extensions.ismoduleinternal(extmod)
1476 isinternal = extensions.ismoduleinternal(extmod)
1475 extsource = None
1477 extsource = None
1476
1478
1477 if util.safehasattr(extmod, '__file__'):
1479 if util.safehasattr(extmod, '__file__'):
1478 extsource = pycompat.fsencode(extmod.__file__)
1480 extsource = pycompat.fsencode(extmod.__file__)
1479 elif getattr(sys, 'oxidized', False):
1481 elif getattr(sys, 'oxidized', False):
1480 extsource = pycompat.sysexecutable
1482 extsource = pycompat.sysexecutable
1481 if isinternal:
1483 if isinternal:
1482 exttestedwith = [] # never expose magic string to users
1484 exttestedwith = [] # never expose magic string to users
1483 else:
1485 else:
1484 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1486 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1485 extbuglink = getattr(extmod, 'buglink', None)
1487 extbuglink = getattr(extmod, 'buglink', None)
1486
1488
1487 fm.startitem()
1489 fm.startitem()
1488
1490
1489 if ui.quiet or ui.verbose:
1491 if ui.quiet or ui.verbose:
1490 fm.write(b'name', b'%s\n', extname)
1492 fm.write(b'name', b'%s\n', extname)
1491 else:
1493 else:
1492 fm.write(b'name', b'%s', extname)
1494 fm.write(b'name', b'%s', extname)
1493 if isinternal or hgver in exttestedwith:
1495 if isinternal or hgver in exttestedwith:
1494 fm.plain(b'\n')
1496 fm.plain(b'\n')
1495 elif not exttestedwith:
1497 elif not exttestedwith:
1496 fm.plain(_(b' (untested!)\n'))
1498 fm.plain(_(b' (untested!)\n'))
1497 else:
1499 else:
1498 lasttestedversion = exttestedwith[-1]
1500 lasttestedversion = exttestedwith[-1]
1499 fm.plain(b' (%s!)\n' % lasttestedversion)
1501 fm.plain(b' (%s!)\n' % lasttestedversion)
1500
1502
1501 fm.condwrite(
1503 fm.condwrite(
1502 ui.verbose and extsource,
1504 ui.verbose and extsource,
1503 b'source',
1505 b'source',
1504 _(b' location: %s\n'),
1506 _(b' location: %s\n'),
1505 extsource or b"",
1507 extsource or b"",
1506 )
1508 )
1507
1509
1508 if ui.verbose:
1510 if ui.verbose:
1509 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1511 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1510 fm.data(bundled=isinternal)
1512 fm.data(bundled=isinternal)
1511
1513
1512 fm.condwrite(
1514 fm.condwrite(
1513 ui.verbose and exttestedwith,
1515 ui.verbose and exttestedwith,
1514 b'testedwith',
1516 b'testedwith',
1515 _(b' tested with: %s\n'),
1517 _(b' tested with: %s\n'),
1516 fm.formatlist(exttestedwith, name=b'ver'),
1518 fm.formatlist(exttestedwith, name=b'ver'),
1517 )
1519 )
1518
1520
1519 fm.condwrite(
1521 fm.condwrite(
1520 ui.verbose and extbuglink,
1522 ui.verbose and extbuglink,
1521 b'buglink',
1523 b'buglink',
1522 _(b' bug reporting: %s\n'),
1524 _(b' bug reporting: %s\n'),
1523 extbuglink or b"",
1525 extbuglink or b"",
1524 )
1526 )
1525
1527
1526 fm.end()
1528 fm.end()
1527
1529
1528
1530
1529 @command(
1531 @command(
1530 b'debugfileset',
1532 b'debugfileset',
1531 [
1533 [
1532 (
1534 (
1533 b'r',
1535 b'r',
1534 b'rev',
1536 b'rev',
1535 b'',
1537 b'',
1536 _(b'apply the filespec on this revision'),
1538 _(b'apply the filespec on this revision'),
1537 _(b'REV'),
1539 _(b'REV'),
1538 ),
1540 ),
1539 (
1541 (
1540 b'',
1542 b'',
1541 b'all-files',
1543 b'all-files',
1542 False,
1544 False,
1543 _(b'test files from all revisions and working directory'),
1545 _(b'test files from all revisions and working directory'),
1544 ),
1546 ),
1545 (
1547 (
1546 b's',
1548 b's',
1547 b'show-matcher',
1549 b'show-matcher',
1548 None,
1550 None,
1549 _(b'print internal representation of matcher'),
1551 _(b'print internal representation of matcher'),
1550 ),
1552 ),
1551 (
1553 (
1552 b'p',
1554 b'p',
1553 b'show-stage',
1555 b'show-stage',
1554 [],
1556 [],
1555 _(b'print parsed tree at the given stage'),
1557 _(b'print parsed tree at the given stage'),
1556 _(b'NAME'),
1558 _(b'NAME'),
1557 ),
1559 ),
1558 ],
1560 ],
1559 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1561 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1560 )
1562 )
1561 def debugfileset(ui, repo, expr, **opts):
1563 def debugfileset(ui, repo, expr, **opts):
1562 '''parse and apply a fileset specification'''
1564 '''parse and apply a fileset specification'''
1563 from . import fileset
1565 from . import fileset
1564
1566
1565 fileset.symbols # force import of fileset so we have predicates to optimize
1567 fileset.symbols # force import of fileset so we have predicates to optimize
1566 opts = pycompat.byteskwargs(opts)
1568 opts = pycompat.byteskwargs(opts)
1567 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1569 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1568
1570
1569 stages = [
1571 stages = [
1570 (b'parsed', pycompat.identity),
1572 (b'parsed', pycompat.identity),
1571 (b'analyzed', filesetlang.analyze),
1573 (b'analyzed', filesetlang.analyze),
1572 (b'optimized', filesetlang.optimize),
1574 (b'optimized', filesetlang.optimize),
1573 ]
1575 ]
1574 stagenames = {n for n, f in stages}
1576 stagenames = {n for n, f in stages}
1575
1577
1576 showalways = set()
1578 showalways = set()
1577 if ui.verbose and not opts[b'show_stage']:
1579 if ui.verbose and not opts[b'show_stage']:
1578 # show parsed tree by --verbose (deprecated)
1580 # show parsed tree by --verbose (deprecated)
1579 showalways.add(b'parsed')
1581 showalways.add(b'parsed')
1580 if opts[b'show_stage'] == [b'all']:
1582 if opts[b'show_stage'] == [b'all']:
1581 showalways.update(stagenames)
1583 showalways.update(stagenames)
1582 else:
1584 else:
1583 for n in opts[b'show_stage']:
1585 for n in opts[b'show_stage']:
1584 if n not in stagenames:
1586 if n not in stagenames:
1585 raise error.Abort(_(b'invalid stage name: %s') % n)
1587 raise error.Abort(_(b'invalid stage name: %s') % n)
1586 showalways.update(opts[b'show_stage'])
1588 showalways.update(opts[b'show_stage'])
1587
1589
1588 tree = filesetlang.parse(expr)
1590 tree = filesetlang.parse(expr)
1589 for n, f in stages:
1591 for n, f in stages:
1590 tree = f(tree)
1592 tree = f(tree)
1591 if n in showalways:
1593 if n in showalways:
1592 if opts[b'show_stage'] or n != b'parsed':
1594 if opts[b'show_stage'] or n != b'parsed':
1593 ui.write(b"* %s:\n" % n)
1595 ui.write(b"* %s:\n" % n)
1594 ui.write(filesetlang.prettyformat(tree), b"\n")
1596 ui.write(filesetlang.prettyformat(tree), b"\n")
1595
1597
1596 files = set()
1598 files = set()
1597 if opts[b'all_files']:
1599 if opts[b'all_files']:
1598 for r in repo:
1600 for r in repo:
1599 c = repo[r]
1601 c = repo[r]
1600 files.update(c.files())
1602 files.update(c.files())
1601 files.update(c.substate)
1603 files.update(c.substate)
1602 if opts[b'all_files'] or ctx.rev() is None:
1604 if opts[b'all_files'] or ctx.rev() is None:
1603 wctx = repo[None]
1605 wctx = repo[None]
1604 files.update(
1606 files.update(
1605 repo.dirstate.walk(
1607 repo.dirstate.walk(
1606 scmutil.matchall(repo),
1608 scmutil.matchall(repo),
1607 subrepos=list(wctx.substate),
1609 subrepos=list(wctx.substate),
1608 unknown=True,
1610 unknown=True,
1609 ignored=True,
1611 ignored=True,
1610 )
1612 )
1611 )
1613 )
1612 files.update(wctx.substate)
1614 files.update(wctx.substate)
1613 else:
1615 else:
1614 files.update(ctx.files())
1616 files.update(ctx.files())
1615 files.update(ctx.substate)
1617 files.update(ctx.substate)
1616
1618
1617 m = ctx.matchfileset(repo.getcwd(), expr)
1619 m = ctx.matchfileset(repo.getcwd(), expr)
1618 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1620 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1619 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1621 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1620 for f in sorted(files):
1622 for f in sorted(files):
1621 if not m(f):
1623 if not m(f):
1622 continue
1624 continue
1623 ui.write(b"%s\n" % f)
1625 ui.write(b"%s\n" % f)
1624
1626
1625
1627
1626 @command(
1628 @command(
1627 b"debug-repair-issue6528",
1629 b"debug-repair-issue6528",
1628 [
1630 [
1629 (
1631 (
1630 b'',
1632 b'',
1631 b'to-report',
1633 b'to-report',
1632 b'',
1634 b'',
1633 _(b'build a report of affected revisions to this file'),
1635 _(b'build a report of affected revisions to this file'),
1634 _(b'FILE'),
1636 _(b'FILE'),
1635 ),
1637 ),
1636 (
1638 (
1637 b'',
1639 b'',
1638 b'from-report',
1640 b'from-report',
1639 b'',
1641 b'',
1640 _(b'repair revisions listed in this report file'),
1642 _(b'repair revisions listed in this report file'),
1641 _(b'FILE'),
1643 _(b'FILE'),
1642 ),
1644 ),
1643 (
1645 (
1644 b'',
1646 b'',
1645 b'paranoid',
1647 b'paranoid',
1646 False,
1648 False,
1647 _(b'check that both detection methods do the same thing'),
1649 _(b'check that both detection methods do the same thing'),
1648 ),
1650 ),
1649 ]
1651 ]
1650 + cmdutil.dryrunopts,
1652 + cmdutil.dryrunopts,
1651 )
1653 )
1652 def debug_repair_issue6528(ui, repo, **opts):
1654 def debug_repair_issue6528(ui, repo, **opts):
1653 """find affected revisions and repair them. See issue6528 for more details.
1655 """find affected revisions and repair them. See issue6528 for more details.
1654
1656
1655 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1657 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1656 computation of affected revisions for a given repository across clones.
1658 computation of affected revisions for a given repository across clones.
1657 The report format is line-based (with empty lines ignored):
1659 The report format is line-based (with empty lines ignored):
1658
1660
1659 ```
1661 ```
1660 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1662 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1661 ```
1663 ```
1662
1664
1663 There can be multiple broken revisions per filelog, they are separated by
1665 There can be multiple broken revisions per filelog, they are separated by
1664 a comma with no spaces. The only space is between the revision(s) and the
1666 a comma with no spaces. The only space is between the revision(s) and the
1665 filename.
1667 filename.
1666
1668
1667 Note that this does *not* mean that this repairs future affected revisions,
1669 Note that this does *not* mean that this repairs future affected revisions,
1668 that needs a separate fix at the exchange level that was introduced in
1670 that needs a separate fix at the exchange level that was introduced in
1669 Mercurial 5.9.1.
1671 Mercurial 5.9.1.
1670
1672
1671 There is a `--paranoid` flag to test that the fast implementation is correct
1673 There is a `--paranoid` flag to test that the fast implementation is correct
1672 by checking it against the slow implementation. Since this matter is quite
1674 by checking it against the slow implementation. Since this matter is quite
1673 urgent and testing every edge-case is probably quite costly, we use this
1675 urgent and testing every edge-case is probably quite costly, we use this
1674 method to test on large repositories as a fuzzing method of sorts.
1676 method to test on large repositories as a fuzzing method of sorts.
1675 """
1677 """
1676 cmdutil.check_incompatible_arguments(
1678 cmdutil.check_incompatible_arguments(
1677 opts, 'to_report', ['from_report', 'dry_run']
1679 opts, 'to_report', ['from_report', 'dry_run']
1678 )
1680 )
1679 dry_run = opts.get('dry_run')
1681 dry_run = opts.get('dry_run')
1680 to_report = opts.get('to_report')
1682 to_report = opts.get('to_report')
1681 from_report = opts.get('from_report')
1683 from_report = opts.get('from_report')
1682 paranoid = opts.get('paranoid')
1684 paranoid = opts.get('paranoid')
1683 # TODO maybe add filelog pattern and revision pattern parameters to help
1685 # TODO maybe add filelog pattern and revision pattern parameters to help
1684 # narrow down the search for users that know what they're looking for?
1686 # narrow down the search for users that know what they're looking for?
1685
1687
1686 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1688 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1687 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1689 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1688 raise error.Abort(_(msg))
1690 raise error.Abort(_(msg))
1689
1691
1690 rewrite.repair_issue6528(
1692 rewrite.repair_issue6528(
1691 ui,
1693 ui,
1692 repo,
1694 repo,
1693 dry_run=dry_run,
1695 dry_run=dry_run,
1694 to_report=to_report,
1696 to_report=to_report,
1695 from_report=from_report,
1697 from_report=from_report,
1696 paranoid=paranoid,
1698 paranoid=paranoid,
1697 )
1699 )
1698
1700
1699
1701
1700 @command(b'debugformat', [] + cmdutil.formatteropts)
1702 @command(b'debugformat', [] + cmdutil.formatteropts)
1701 def debugformat(ui, repo, **opts):
1703 def debugformat(ui, repo, **opts):
1702 """display format information about the current repository
1704 """display format information about the current repository
1703
1705
1704 Use --verbose to get extra information about current config value and
1706 Use --verbose to get extra information about current config value and
1705 Mercurial default."""
1707 Mercurial default."""
1706 opts = pycompat.byteskwargs(opts)
1708 opts = pycompat.byteskwargs(opts)
1707 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1709 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1708 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1710 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1709
1711
1710 def makeformatname(name):
1712 def makeformatname(name):
1711 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1713 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1712
1714
1713 fm = ui.formatter(b'debugformat', opts)
1715 fm = ui.formatter(b'debugformat', opts)
1714 if fm.isplain():
1716 if fm.isplain():
1715
1717
1716 def formatvalue(value):
1718 def formatvalue(value):
1717 if util.safehasattr(value, b'startswith'):
1719 if util.safehasattr(value, b'startswith'):
1718 return value
1720 return value
1719 if value:
1721 if value:
1720 return b'yes'
1722 return b'yes'
1721 else:
1723 else:
1722 return b'no'
1724 return b'no'
1723
1725
1724 else:
1726 else:
1725 formatvalue = pycompat.identity
1727 formatvalue = pycompat.identity
1726
1728
1727 fm.plain(b'format-variant')
1729 fm.plain(b'format-variant')
1728 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1730 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1729 fm.plain(b' repo')
1731 fm.plain(b' repo')
1730 if ui.verbose:
1732 if ui.verbose:
1731 fm.plain(b' config default')
1733 fm.plain(b' config default')
1732 fm.plain(b'\n')
1734 fm.plain(b'\n')
1733 for fv in upgrade.allformatvariant:
1735 for fv in upgrade.allformatvariant:
1734 fm.startitem()
1736 fm.startitem()
1735 repovalue = fv.fromrepo(repo)
1737 repovalue = fv.fromrepo(repo)
1736 configvalue = fv.fromconfig(repo)
1738 configvalue = fv.fromconfig(repo)
1737
1739
1738 if repovalue != configvalue:
1740 if repovalue != configvalue:
1739 namelabel = b'formatvariant.name.mismatchconfig'
1741 namelabel = b'formatvariant.name.mismatchconfig'
1740 repolabel = b'formatvariant.repo.mismatchconfig'
1742 repolabel = b'formatvariant.repo.mismatchconfig'
1741 elif repovalue != fv.default:
1743 elif repovalue != fv.default:
1742 namelabel = b'formatvariant.name.mismatchdefault'
1744 namelabel = b'formatvariant.name.mismatchdefault'
1743 repolabel = b'formatvariant.repo.mismatchdefault'
1745 repolabel = b'formatvariant.repo.mismatchdefault'
1744 else:
1746 else:
1745 namelabel = b'formatvariant.name.uptodate'
1747 namelabel = b'formatvariant.name.uptodate'
1746 repolabel = b'formatvariant.repo.uptodate'
1748 repolabel = b'formatvariant.repo.uptodate'
1747
1749
1748 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1750 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1749 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1751 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1750 if fv.default != configvalue:
1752 if fv.default != configvalue:
1751 configlabel = b'formatvariant.config.special'
1753 configlabel = b'formatvariant.config.special'
1752 else:
1754 else:
1753 configlabel = b'formatvariant.config.default'
1755 configlabel = b'formatvariant.config.default'
1754 fm.condwrite(
1756 fm.condwrite(
1755 ui.verbose,
1757 ui.verbose,
1756 b'config',
1758 b'config',
1757 b' %6s',
1759 b' %6s',
1758 formatvalue(configvalue),
1760 formatvalue(configvalue),
1759 label=configlabel,
1761 label=configlabel,
1760 )
1762 )
1761 fm.condwrite(
1763 fm.condwrite(
1762 ui.verbose,
1764 ui.verbose,
1763 b'default',
1765 b'default',
1764 b' %7s',
1766 b' %7s',
1765 formatvalue(fv.default),
1767 formatvalue(fv.default),
1766 label=b'formatvariant.default',
1768 label=b'formatvariant.default',
1767 )
1769 )
1768 fm.plain(b'\n')
1770 fm.plain(b'\n')
1769 fm.end()
1771 fm.end()
1770
1772
1771
1773
1772 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1774 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1773 def debugfsinfo(ui, path=b"."):
1775 def debugfsinfo(ui, path=b"."):
1774 """show information detected about current filesystem"""
1776 """show information detected about current filesystem"""
1775 ui.writenoi18n(b'path: %s\n' % path)
1777 ui.writenoi18n(b'path: %s\n' % path)
1776 ui.writenoi18n(
1778 ui.writenoi18n(
1777 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1779 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1778 )
1780 )
1779 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1781 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1780 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1782 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1781 ui.writenoi18n(
1783 ui.writenoi18n(
1782 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1784 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1783 )
1785 )
1784 ui.writenoi18n(
1786 ui.writenoi18n(
1785 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1787 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1786 )
1788 )
1787 casesensitive = b'(unknown)'
1789 casesensitive = b'(unknown)'
1788 try:
1790 try:
1789 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1791 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1790 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1792 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1791 except OSError:
1793 except OSError:
1792 pass
1794 pass
1793 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1795 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1794
1796
1795
1797
1796 @command(
1798 @command(
1797 b'debuggetbundle',
1799 b'debuggetbundle',
1798 [
1800 [
1799 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1801 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1800 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1802 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1801 (
1803 (
1802 b't',
1804 b't',
1803 b'type',
1805 b'type',
1804 b'bzip2',
1806 b'bzip2',
1805 _(b'bundle compression type to use'),
1807 _(b'bundle compression type to use'),
1806 _(b'TYPE'),
1808 _(b'TYPE'),
1807 ),
1809 ),
1808 ],
1810 ],
1809 _(b'REPO FILE [-H|-C ID]...'),
1811 _(b'REPO FILE [-H|-C ID]...'),
1810 norepo=True,
1812 norepo=True,
1811 )
1813 )
1812 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1814 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1813 """retrieves a bundle from a repo
1815 """retrieves a bundle from a repo
1814
1816
1815 Every ID must be a full-length hex node id string. Saves the bundle to the
1817 Every ID must be a full-length hex node id string. Saves the bundle to the
1816 given file.
1818 given file.
1817 """
1819 """
1818 opts = pycompat.byteskwargs(opts)
1820 opts = pycompat.byteskwargs(opts)
1819 repo = hg.peer(ui, opts, repopath)
1821 repo = hg.peer(ui, opts, repopath)
1820 if not repo.capable(b'getbundle'):
1822 if not repo.capable(b'getbundle'):
1821 raise error.Abort(b"getbundle() not supported by target repository")
1823 raise error.Abort(b"getbundle() not supported by target repository")
1822 args = {}
1824 args = {}
1823 if common:
1825 if common:
1824 args['common'] = [bin(s) for s in common]
1826 args['common'] = [bin(s) for s in common]
1825 if head:
1827 if head:
1826 args['heads'] = [bin(s) for s in head]
1828 args['heads'] = [bin(s) for s in head]
1827 # TODO: get desired bundlecaps from command line.
1829 # TODO: get desired bundlecaps from command line.
1828 args['bundlecaps'] = None
1830 args['bundlecaps'] = None
1829 bundle = repo.getbundle(b'debug', **args)
1831 bundle = repo.getbundle(b'debug', **args)
1830
1832
1831 bundletype = opts.get(b'type', b'bzip2').lower()
1833 bundletype = opts.get(b'type', b'bzip2').lower()
1832 btypes = {
1834 btypes = {
1833 b'none': b'HG10UN',
1835 b'none': b'HG10UN',
1834 b'bzip2': b'HG10BZ',
1836 b'bzip2': b'HG10BZ',
1835 b'gzip': b'HG10GZ',
1837 b'gzip': b'HG10GZ',
1836 b'bundle2': b'HG20',
1838 b'bundle2': b'HG20',
1837 }
1839 }
1838 bundletype = btypes.get(bundletype)
1840 bundletype = btypes.get(bundletype)
1839 if bundletype not in bundle2.bundletypes:
1841 if bundletype not in bundle2.bundletypes:
1840 raise error.Abort(_(b'unknown bundle type specified with --type'))
1842 raise error.Abort(_(b'unknown bundle type specified with --type'))
1841 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1843 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1842
1844
1843
1845
1844 @command(b'debugignore', [], b'[FILE]')
1846 @command(b'debugignore', [], b'[FILE]')
1845 def debugignore(ui, repo, *files, **opts):
1847 def debugignore(ui, repo, *files, **opts):
1846 """display the combined ignore pattern and information about ignored files
1848 """display the combined ignore pattern and information about ignored files
1847
1849
1848 With no argument display the combined ignore pattern.
1850 With no argument display the combined ignore pattern.
1849
1851
1850 Given space separated file names, shows if the given file is ignored and
1852 Given space separated file names, shows if the given file is ignored and
1851 if so, show the ignore rule (file and line number) that matched it.
1853 if so, show the ignore rule (file and line number) that matched it.
1852 """
1854 """
1853 ignore = repo.dirstate._ignore
1855 ignore = repo.dirstate._ignore
1854 if not files:
1856 if not files:
1855 # Show all the patterns
1857 # Show all the patterns
1856 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1858 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1857 else:
1859 else:
1858 m = scmutil.match(repo[None], pats=files)
1860 m = scmutil.match(repo[None], pats=files)
1859 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1861 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1860 for f in m.files():
1862 for f in m.files():
1861 nf = util.normpath(f)
1863 nf = util.normpath(f)
1862 ignored = None
1864 ignored = None
1863 ignoredata = None
1865 ignoredata = None
1864 if nf != b'.':
1866 if nf != b'.':
1865 if ignore(nf):
1867 if ignore(nf):
1866 ignored = nf
1868 ignored = nf
1867 ignoredata = repo.dirstate._ignorefileandline(nf)
1869 ignoredata = repo.dirstate._ignorefileandline(nf)
1868 else:
1870 else:
1869 for p in pathutil.finddirs(nf):
1871 for p in pathutil.finddirs(nf):
1870 if ignore(p):
1872 if ignore(p):
1871 ignored = p
1873 ignored = p
1872 ignoredata = repo.dirstate._ignorefileandline(p)
1874 ignoredata = repo.dirstate._ignorefileandline(p)
1873 break
1875 break
1874 if ignored:
1876 if ignored:
1875 if ignored == nf:
1877 if ignored == nf:
1876 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1878 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1877 else:
1879 else:
1878 ui.write(
1880 ui.write(
1879 _(
1881 _(
1880 b"%s is ignored because of "
1882 b"%s is ignored because of "
1881 b"containing directory %s\n"
1883 b"containing directory %s\n"
1882 )
1884 )
1883 % (uipathfn(f), ignored)
1885 % (uipathfn(f), ignored)
1884 )
1886 )
1885 ignorefile, lineno, line = ignoredata
1887 ignorefile, lineno, line = ignoredata
1886 ui.write(
1888 ui.write(
1887 _(b"(ignore rule in %s, line %d: '%s')\n")
1889 _(b"(ignore rule in %s, line %d: '%s')\n")
1888 % (ignorefile, lineno, line)
1890 % (ignorefile, lineno, line)
1889 )
1891 )
1890 else:
1892 else:
1891 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1893 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1892
1894
1893
1895
1894 @command(
1896 @command(
1895 b'debug-revlog-index|debugindex',
1897 b'debug-revlog-index|debugindex',
1896 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1898 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1897 _(b'-c|-m|FILE'),
1899 _(b'-c|-m|FILE'),
1898 )
1900 )
1899 def debugindex(ui, repo, file_=None, **opts):
1901 def debugindex(ui, repo, file_=None, **opts):
1900 """dump index data for a revlog"""
1902 """dump index data for a revlog"""
1901 opts = pycompat.byteskwargs(opts)
1903 opts = pycompat.byteskwargs(opts)
1902 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1904 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1903
1905
1904 fm = ui.formatter(b'debugindex', opts)
1906 fm = ui.formatter(b'debugindex', opts)
1905
1907
1906 revlog = getattr(store, b'_revlog', store)
1908 revlog = getattr(store, b'_revlog', store)
1907
1909
1908 return revlog_debug.debug_index(
1910 return revlog_debug.debug_index(
1909 ui,
1911 ui,
1910 repo,
1912 repo,
1911 formatter=fm,
1913 formatter=fm,
1912 revlog=revlog,
1914 revlog=revlog,
1913 full_node=ui.debugflag,
1915 full_node=ui.debugflag,
1914 )
1916 )
1915
1917
1916
1918
1917 @command(
1919 @command(
1918 b'debugindexdot',
1920 b'debugindexdot',
1919 cmdutil.debugrevlogopts,
1921 cmdutil.debugrevlogopts,
1920 _(b'-c|-m|FILE'),
1922 _(b'-c|-m|FILE'),
1921 optionalrepo=True,
1923 optionalrepo=True,
1922 )
1924 )
1923 def debugindexdot(ui, repo, file_=None, **opts):
1925 def debugindexdot(ui, repo, file_=None, **opts):
1924 """dump an index DAG as a graphviz dot file"""
1926 """dump an index DAG as a graphviz dot file"""
1925 opts = pycompat.byteskwargs(opts)
1927 opts = pycompat.byteskwargs(opts)
1926 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1928 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1927 ui.writenoi18n(b"digraph G {\n")
1929 ui.writenoi18n(b"digraph G {\n")
1928 for i in r:
1930 for i in r:
1929 node = r.node(i)
1931 node = r.node(i)
1930 pp = r.parents(node)
1932 pp = r.parents(node)
1931 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1933 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1932 if pp[1] != repo.nullid:
1934 if pp[1] != repo.nullid:
1933 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1935 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1934 ui.write(b"}\n")
1936 ui.write(b"}\n")
1935
1937
1936
1938
1937 @command(b'debugindexstats', [])
1939 @command(b'debugindexstats', [])
1938 def debugindexstats(ui, repo):
1940 def debugindexstats(ui, repo):
1939 """show stats related to the changelog index"""
1941 """show stats related to the changelog index"""
1940 repo.changelog.shortest(repo.nullid, 1)
1942 repo.changelog.shortest(repo.nullid, 1)
1941 index = repo.changelog.index
1943 index = repo.changelog.index
1942 if not util.safehasattr(index, b'stats'):
1944 if not util.safehasattr(index, b'stats'):
1943 raise error.Abort(_(b'debugindexstats only works with native code'))
1945 raise error.Abort(_(b'debugindexstats only works with native code'))
1944 for k, v in sorted(index.stats().items()):
1946 for k, v in sorted(index.stats().items()):
1945 ui.write(b'%s: %d\n' % (k, v))
1947 ui.write(b'%s: %d\n' % (k, v))
1946
1948
1947
1949
1948 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1950 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1949 def debuginstall(ui, **opts):
1951 def debuginstall(ui, **opts):
1950 """test Mercurial installation
1952 """test Mercurial installation
1951
1953
1952 Returns 0 on success.
1954 Returns 0 on success.
1953 """
1955 """
1954 opts = pycompat.byteskwargs(opts)
1956 opts = pycompat.byteskwargs(opts)
1955
1957
1956 problems = 0
1958 problems = 0
1957
1959
1958 fm = ui.formatter(b'debuginstall', opts)
1960 fm = ui.formatter(b'debuginstall', opts)
1959 fm.startitem()
1961 fm.startitem()
1960
1962
1961 # encoding might be unknown or wrong. don't translate these messages.
1963 # encoding might be unknown or wrong. don't translate these messages.
1962 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1964 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1963 err = None
1965 err = None
1964 try:
1966 try:
1965 codecs.lookup(pycompat.sysstr(encoding.encoding))
1967 codecs.lookup(pycompat.sysstr(encoding.encoding))
1966 except LookupError as inst:
1968 except LookupError as inst:
1967 err = stringutil.forcebytestr(inst)
1969 err = stringutil.forcebytestr(inst)
1968 problems += 1
1970 problems += 1
1969 fm.condwrite(
1971 fm.condwrite(
1970 err,
1972 err,
1971 b'encodingerror',
1973 b'encodingerror',
1972 b" %s\n (check that your locale is properly set)\n",
1974 b" %s\n (check that your locale is properly set)\n",
1973 err,
1975 err,
1974 )
1976 )
1975
1977
1976 # Python
1978 # Python
1977 pythonlib = None
1979 pythonlib = None
1978 if util.safehasattr(os, '__file__'):
1980 if util.safehasattr(os, '__file__'):
1979 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1981 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1980 elif getattr(sys, 'oxidized', False):
1982 elif getattr(sys, 'oxidized', False):
1981 pythonlib = pycompat.sysexecutable
1983 pythonlib = pycompat.sysexecutable
1982
1984
1983 fm.write(
1985 fm.write(
1984 b'pythonexe',
1986 b'pythonexe',
1985 _(b"checking Python executable (%s)\n"),
1987 _(b"checking Python executable (%s)\n"),
1986 pycompat.sysexecutable or _(b"unknown"),
1988 pycompat.sysexecutable or _(b"unknown"),
1987 )
1989 )
1988 fm.write(
1990 fm.write(
1989 b'pythonimplementation',
1991 b'pythonimplementation',
1990 _(b"checking Python implementation (%s)\n"),
1992 _(b"checking Python implementation (%s)\n"),
1991 pycompat.sysbytes(platform.python_implementation()),
1993 pycompat.sysbytes(platform.python_implementation()),
1992 )
1994 )
1993 fm.write(
1995 fm.write(
1994 b'pythonver',
1996 b'pythonver',
1995 _(b"checking Python version (%s)\n"),
1997 _(b"checking Python version (%s)\n"),
1996 (b"%d.%d.%d" % sys.version_info[:3]),
1998 (b"%d.%d.%d" % sys.version_info[:3]),
1997 )
1999 )
1998 fm.write(
2000 fm.write(
1999 b'pythonlib',
2001 b'pythonlib',
2000 _(b"checking Python lib (%s)...\n"),
2002 _(b"checking Python lib (%s)...\n"),
2001 pythonlib or _(b"unknown"),
2003 pythonlib or _(b"unknown"),
2002 )
2004 )
2003
2005
2004 try:
2006 try:
2005 from . import rustext # pytype: disable=import-error
2007 from . import rustext # pytype: disable=import-error
2006
2008
2007 rustext.__doc__ # trigger lazy import
2009 rustext.__doc__ # trigger lazy import
2008 except ImportError:
2010 except ImportError:
2009 rustext = None
2011 rustext = None
2010
2012
2011 security = set(sslutil.supportedprotocols)
2013 security = set(sslutil.supportedprotocols)
2012 if sslutil.hassni:
2014 if sslutil.hassni:
2013 security.add(b'sni')
2015 security.add(b'sni')
2014
2016
2015 fm.write(
2017 fm.write(
2016 b'pythonsecurity',
2018 b'pythonsecurity',
2017 _(b"checking Python security support (%s)\n"),
2019 _(b"checking Python security support (%s)\n"),
2018 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2020 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2019 )
2021 )
2020
2022
2021 # These are warnings, not errors. So don't increment problem count. This
2023 # These are warnings, not errors. So don't increment problem count. This
2022 # may change in the future.
2024 # may change in the future.
2023 if b'tls1.2' not in security:
2025 if b'tls1.2' not in security:
2024 fm.plain(
2026 fm.plain(
2025 _(
2027 _(
2026 b' TLS 1.2 not supported by Python install; '
2028 b' TLS 1.2 not supported by Python install; '
2027 b'network connections lack modern security\n'
2029 b'network connections lack modern security\n'
2028 )
2030 )
2029 )
2031 )
2030 if b'sni' not in security:
2032 if b'sni' not in security:
2031 fm.plain(
2033 fm.plain(
2032 _(
2034 _(
2033 b' SNI not supported by Python install; may have '
2035 b' SNI not supported by Python install; may have '
2034 b'connectivity issues with some servers\n'
2036 b'connectivity issues with some servers\n'
2035 )
2037 )
2036 )
2038 )
2037
2039
2038 fm.plain(
2040 fm.plain(
2039 _(
2041 _(
2040 b"checking Rust extensions (%s)\n"
2042 b"checking Rust extensions (%s)\n"
2041 % (b'missing' if rustext is None else b'installed')
2043 % (b'missing' if rustext is None else b'installed')
2042 ),
2044 ),
2043 )
2045 )
2044
2046
2045 # TODO print CA cert info
2047 # TODO print CA cert info
2046
2048
2047 # hg version
2049 # hg version
2048 hgver = util.version()
2050 hgver = util.version()
2049 fm.write(
2051 fm.write(
2050 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2052 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2051 )
2053 )
2052 fm.write(
2054 fm.write(
2053 b'hgverextra',
2055 b'hgverextra',
2054 _(b"checking Mercurial custom build (%s)\n"),
2056 _(b"checking Mercurial custom build (%s)\n"),
2055 b'+'.join(hgver.split(b'+')[1:]),
2057 b'+'.join(hgver.split(b'+')[1:]),
2056 )
2058 )
2057
2059
2058 # compiled modules
2060 # compiled modules
2059 hgmodules = None
2061 hgmodules = None
2060 if util.safehasattr(sys.modules[__name__], '__file__'):
2062 if util.safehasattr(sys.modules[__name__], '__file__'):
2061 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2063 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2062 elif getattr(sys, 'oxidized', False):
2064 elif getattr(sys, 'oxidized', False):
2063 hgmodules = pycompat.sysexecutable
2065 hgmodules = pycompat.sysexecutable
2064
2066
2065 fm.write(
2067 fm.write(
2066 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2068 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2067 )
2069 )
2068 fm.write(
2070 fm.write(
2069 b'hgmodules',
2071 b'hgmodules',
2070 _(b"checking installed modules (%s)...\n"),
2072 _(b"checking installed modules (%s)...\n"),
2071 hgmodules or _(b"unknown"),
2073 hgmodules or _(b"unknown"),
2072 )
2074 )
2073
2075
2074 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2076 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2075 rustext = rustandc # for now, that's the only case
2077 rustext = rustandc # for now, that's the only case
2076 cext = policy.policy in (b'c', b'allow') or rustandc
2078 cext = policy.policy in (b'c', b'allow') or rustandc
2077 nopure = cext or rustext
2079 nopure = cext or rustext
2078 if nopure:
2080 if nopure:
2079 err = None
2081 err = None
2080 try:
2082 try:
2081 if cext:
2083 if cext:
2082 from .cext import ( # pytype: disable=import-error
2084 from .cext import ( # pytype: disable=import-error
2083 base85,
2085 base85,
2084 bdiff,
2086 bdiff,
2085 mpatch,
2087 mpatch,
2086 osutil,
2088 osutil,
2087 )
2089 )
2088
2090
2089 # quiet pyflakes
2091 # quiet pyflakes
2090 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2092 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2091 if rustext:
2093 if rustext:
2092 from .rustext import ( # pytype: disable=import-error
2094 from .rustext import ( # pytype: disable=import-error
2093 ancestor,
2095 ancestor,
2094 dirstate,
2096 dirstate,
2095 )
2097 )
2096
2098
2097 dir(ancestor), dir(dirstate) # quiet pyflakes
2099 dir(ancestor), dir(dirstate) # quiet pyflakes
2098 except Exception as inst:
2100 except Exception as inst:
2099 err = stringutil.forcebytestr(inst)
2101 err = stringutil.forcebytestr(inst)
2100 problems += 1
2102 problems += 1
2101 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2103 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2102
2104
2103 compengines = util.compengines._engines.values()
2105 compengines = util.compengines._engines.values()
2104 fm.write(
2106 fm.write(
2105 b'compengines',
2107 b'compengines',
2106 _(b'checking registered compression engines (%s)\n'),
2108 _(b'checking registered compression engines (%s)\n'),
2107 fm.formatlist(
2109 fm.formatlist(
2108 sorted(e.name() for e in compengines),
2110 sorted(e.name() for e in compengines),
2109 name=b'compengine',
2111 name=b'compengine',
2110 fmt=b'%s',
2112 fmt=b'%s',
2111 sep=b', ',
2113 sep=b', ',
2112 ),
2114 ),
2113 )
2115 )
2114 fm.write(
2116 fm.write(
2115 b'compenginesavail',
2117 b'compenginesavail',
2116 _(b'checking available compression engines (%s)\n'),
2118 _(b'checking available compression engines (%s)\n'),
2117 fm.formatlist(
2119 fm.formatlist(
2118 sorted(e.name() for e in compengines if e.available()),
2120 sorted(e.name() for e in compengines if e.available()),
2119 name=b'compengine',
2121 name=b'compengine',
2120 fmt=b'%s',
2122 fmt=b'%s',
2121 sep=b', ',
2123 sep=b', ',
2122 ),
2124 ),
2123 )
2125 )
2124 wirecompengines = compression.compengines.supportedwireengines(
2126 wirecompengines = compression.compengines.supportedwireengines(
2125 compression.SERVERROLE
2127 compression.SERVERROLE
2126 )
2128 )
2127 fm.write(
2129 fm.write(
2128 b'compenginesserver',
2130 b'compenginesserver',
2129 _(
2131 _(
2130 b'checking available compression engines '
2132 b'checking available compression engines '
2131 b'for wire protocol (%s)\n'
2133 b'for wire protocol (%s)\n'
2132 ),
2134 ),
2133 fm.formatlist(
2135 fm.formatlist(
2134 [e.name() for e in wirecompengines if e.wireprotosupport()],
2136 [e.name() for e in wirecompengines if e.wireprotosupport()],
2135 name=b'compengine',
2137 name=b'compengine',
2136 fmt=b'%s',
2138 fmt=b'%s',
2137 sep=b', ',
2139 sep=b', ',
2138 ),
2140 ),
2139 )
2141 )
2140 re2 = b'missing'
2142 re2 = b'missing'
2141 if util._re2:
2143 if util._re2:
2142 re2 = b'available'
2144 re2 = b'available'
2143 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2145 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2144 fm.data(re2=bool(util._re2))
2146 fm.data(re2=bool(util._re2))
2145
2147
2146 # templates
2148 # templates
2147 p = templater.templatedir()
2149 p = templater.templatedir()
2148 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2150 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2149 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2151 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2150 if p:
2152 if p:
2151 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2153 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2152 if m:
2154 if m:
2153 # template found, check if it is working
2155 # template found, check if it is working
2154 err = None
2156 err = None
2155 try:
2157 try:
2156 templater.templater.frommapfile(m)
2158 templater.templater.frommapfile(m)
2157 except Exception as inst:
2159 except Exception as inst:
2158 err = stringutil.forcebytestr(inst)
2160 err = stringutil.forcebytestr(inst)
2159 p = None
2161 p = None
2160 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2162 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2161 else:
2163 else:
2162 p = None
2164 p = None
2163 fm.condwrite(
2165 fm.condwrite(
2164 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2166 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2165 )
2167 )
2166 fm.condwrite(
2168 fm.condwrite(
2167 not m,
2169 not m,
2168 b'defaulttemplatenotfound',
2170 b'defaulttemplatenotfound',
2169 _(b" template '%s' not found\n"),
2171 _(b" template '%s' not found\n"),
2170 b"default",
2172 b"default",
2171 )
2173 )
2172 if not p:
2174 if not p:
2173 problems += 1
2175 problems += 1
2174 fm.condwrite(
2176 fm.condwrite(
2175 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2177 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2176 )
2178 )
2177
2179
2178 # editor
2180 # editor
2179 editor = ui.geteditor()
2181 editor = ui.geteditor()
2180 editor = util.expandpath(editor)
2182 editor = util.expandpath(editor)
2181 editorbin = procutil.shellsplit(editor)[0]
2183 editorbin = procutil.shellsplit(editor)[0]
2182 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2184 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2183 cmdpath = procutil.findexe(editorbin)
2185 cmdpath = procutil.findexe(editorbin)
2184 fm.condwrite(
2186 fm.condwrite(
2185 not cmdpath and editor == b'vi',
2187 not cmdpath and editor == b'vi',
2186 b'vinotfound',
2188 b'vinotfound',
2187 _(
2189 _(
2188 b" No commit editor set and can't find %s in PATH\n"
2190 b" No commit editor set and can't find %s in PATH\n"
2189 b" (specify a commit editor in your configuration"
2191 b" (specify a commit editor in your configuration"
2190 b" file)\n"
2192 b" file)\n"
2191 ),
2193 ),
2192 not cmdpath and editor == b'vi' and editorbin,
2194 not cmdpath and editor == b'vi' and editorbin,
2193 )
2195 )
2194 fm.condwrite(
2196 fm.condwrite(
2195 not cmdpath and editor != b'vi',
2197 not cmdpath and editor != b'vi',
2196 b'editornotfound',
2198 b'editornotfound',
2197 _(
2199 _(
2198 b" Can't find editor '%s' in PATH\n"
2200 b" Can't find editor '%s' in PATH\n"
2199 b" (specify a commit editor in your configuration"
2201 b" (specify a commit editor in your configuration"
2200 b" file)\n"
2202 b" file)\n"
2201 ),
2203 ),
2202 not cmdpath and editorbin,
2204 not cmdpath and editorbin,
2203 )
2205 )
2204 if not cmdpath and editor != b'vi':
2206 if not cmdpath and editor != b'vi':
2205 problems += 1
2207 problems += 1
2206
2208
2207 # check username
2209 # check username
2208 username = None
2210 username = None
2209 err = None
2211 err = None
2210 try:
2212 try:
2211 username = ui.username()
2213 username = ui.username()
2212 except error.Abort as e:
2214 except error.Abort as e:
2213 err = e.message
2215 err = e.message
2214 problems += 1
2216 problems += 1
2215
2217
2216 fm.condwrite(
2218 fm.condwrite(
2217 username, b'username', _(b"checking username (%s)\n"), username
2219 username, b'username', _(b"checking username (%s)\n"), username
2218 )
2220 )
2219 fm.condwrite(
2221 fm.condwrite(
2220 err,
2222 err,
2221 b'usernameerror',
2223 b'usernameerror',
2222 _(
2224 _(
2223 b"checking username...\n %s\n"
2225 b"checking username...\n %s\n"
2224 b" (specify a username in your configuration file)\n"
2226 b" (specify a username in your configuration file)\n"
2225 ),
2227 ),
2226 err,
2228 err,
2227 )
2229 )
2228
2230
2229 for name, mod in extensions.extensions():
2231 for name, mod in extensions.extensions():
2230 handler = getattr(mod, 'debuginstall', None)
2232 handler = getattr(mod, 'debuginstall', None)
2231 if handler is not None:
2233 if handler is not None:
2232 problems += handler(ui, fm)
2234 problems += handler(ui, fm)
2233
2235
2234 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2236 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2235 if not problems:
2237 if not problems:
2236 fm.data(problems=problems)
2238 fm.data(problems=problems)
2237 fm.condwrite(
2239 fm.condwrite(
2238 problems,
2240 problems,
2239 b'problems',
2241 b'problems',
2240 _(b"%d problems detected, please check your install!\n"),
2242 _(b"%d problems detected, please check your install!\n"),
2241 problems,
2243 problems,
2242 )
2244 )
2243 fm.end()
2245 fm.end()
2244
2246
2245 return problems
2247 return problems
2246
2248
2247
2249
2248 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2250 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2249 def debugknown(ui, repopath, *ids, **opts):
2251 def debugknown(ui, repopath, *ids, **opts):
2250 """test whether node ids are known to a repo
2252 """test whether node ids are known to a repo
2251
2253
2252 Every ID must be a full-length hex node id string. Returns a list of 0s
2254 Every ID must be a full-length hex node id string. Returns a list of 0s
2253 and 1s indicating unknown/known.
2255 and 1s indicating unknown/known.
2254 """
2256 """
2255 opts = pycompat.byteskwargs(opts)
2257 opts = pycompat.byteskwargs(opts)
2256 repo = hg.peer(ui, opts, repopath)
2258 repo = hg.peer(ui, opts, repopath)
2257 if not repo.capable(b'known'):
2259 if not repo.capable(b'known'):
2258 raise error.Abort(b"known() not supported by target repository")
2260 raise error.Abort(b"known() not supported by target repository")
2259 flags = repo.known([bin(s) for s in ids])
2261 flags = repo.known([bin(s) for s in ids])
2260 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2262 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2261
2263
2262
2264
2263 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2265 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2264 def debuglabelcomplete(ui, repo, *args):
2266 def debuglabelcomplete(ui, repo, *args):
2265 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2267 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2266 debugnamecomplete(ui, repo, *args)
2268 debugnamecomplete(ui, repo, *args)
2267
2269
2268
2270
2269 @command(
2271 @command(
2270 b'debuglocks',
2272 b'debuglocks',
2271 [
2273 [
2272 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2274 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2273 (
2275 (
2274 b'W',
2276 b'W',
2275 b'force-free-wlock',
2277 b'force-free-wlock',
2276 None,
2278 None,
2277 _(b'free the working state lock (DANGEROUS)'),
2279 _(b'free the working state lock (DANGEROUS)'),
2278 ),
2280 ),
2279 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2281 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2280 (
2282 (
2281 b'S',
2283 b'S',
2282 b'set-wlock',
2284 b'set-wlock',
2283 None,
2285 None,
2284 _(b'set the working state lock until stopped'),
2286 _(b'set the working state lock until stopped'),
2285 ),
2287 ),
2286 ],
2288 ],
2287 _(b'[OPTION]...'),
2289 _(b'[OPTION]...'),
2288 )
2290 )
2289 def debuglocks(ui, repo, **opts):
2291 def debuglocks(ui, repo, **opts):
2290 """show or modify state of locks
2292 """show or modify state of locks
2291
2293
2292 By default, this command will show which locks are held. This
2294 By default, this command will show which locks are held. This
2293 includes the user and process holding the lock, the amount of time
2295 includes the user and process holding the lock, the amount of time
2294 the lock has been held, and the machine name where the process is
2296 the lock has been held, and the machine name where the process is
2295 running if it's not local.
2297 running if it's not local.
2296
2298
2297 Locks protect the integrity of Mercurial's data, so should be
2299 Locks protect the integrity of Mercurial's data, so should be
2298 treated with care. System crashes or other interruptions may cause
2300 treated with care. System crashes or other interruptions may cause
2299 locks to not be properly released, though Mercurial will usually
2301 locks to not be properly released, though Mercurial will usually
2300 detect and remove such stale locks automatically.
2302 detect and remove such stale locks automatically.
2301
2303
2302 However, detecting stale locks may not always be possible (for
2304 However, detecting stale locks may not always be possible (for
2303 instance, on a shared filesystem). Removing locks may also be
2305 instance, on a shared filesystem). Removing locks may also be
2304 blocked by filesystem permissions.
2306 blocked by filesystem permissions.
2305
2307
2306 Setting a lock will prevent other commands from changing the data.
2308 Setting a lock will prevent other commands from changing the data.
2307 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2309 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2308 The set locks are removed when the command exits.
2310 The set locks are removed when the command exits.
2309
2311
2310 Returns 0 if no locks are held.
2312 Returns 0 if no locks are held.
2311
2313
2312 """
2314 """
2313
2315
2314 if opts.get('force_free_lock'):
2316 if opts.get('force_free_lock'):
2315 repo.svfs.tryunlink(b'lock')
2317 repo.svfs.tryunlink(b'lock')
2316 if opts.get('force_free_wlock'):
2318 if opts.get('force_free_wlock'):
2317 repo.vfs.tryunlink(b'wlock')
2319 repo.vfs.tryunlink(b'wlock')
2318 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2320 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2319 return 0
2321 return 0
2320
2322
2321 locks = []
2323 locks = []
2322 try:
2324 try:
2323 if opts.get('set_wlock'):
2325 if opts.get('set_wlock'):
2324 try:
2326 try:
2325 locks.append(repo.wlock(False))
2327 locks.append(repo.wlock(False))
2326 except error.LockHeld:
2328 except error.LockHeld:
2327 raise error.Abort(_(b'wlock is already held'))
2329 raise error.Abort(_(b'wlock is already held'))
2328 if opts.get('set_lock'):
2330 if opts.get('set_lock'):
2329 try:
2331 try:
2330 locks.append(repo.lock(False))
2332 locks.append(repo.lock(False))
2331 except error.LockHeld:
2333 except error.LockHeld:
2332 raise error.Abort(_(b'lock is already held'))
2334 raise error.Abort(_(b'lock is already held'))
2333 if len(locks):
2335 if len(locks):
2334 try:
2336 try:
2335 if ui.interactive():
2337 if ui.interactive():
2336 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2338 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2337 ui.promptchoice(prompt)
2339 ui.promptchoice(prompt)
2338 else:
2340 else:
2339 msg = b"%d locks held, waiting for signal\n"
2341 msg = b"%d locks held, waiting for signal\n"
2340 msg %= len(locks)
2342 msg %= len(locks)
2341 ui.status(msg)
2343 ui.status(msg)
2342 while True: # XXX wait for a signal
2344 while True: # XXX wait for a signal
2343 time.sleep(0.1)
2345 time.sleep(0.1)
2344 except KeyboardInterrupt:
2346 except KeyboardInterrupt:
2345 msg = b"signal-received releasing locks\n"
2347 msg = b"signal-received releasing locks\n"
2346 ui.status(msg)
2348 ui.status(msg)
2347 return 0
2349 return 0
2348 finally:
2350 finally:
2349 release(*locks)
2351 release(*locks)
2350
2352
2351 now = time.time()
2353 now = time.time()
2352 held = 0
2354 held = 0
2353
2355
2354 def report(vfs, name, method):
2356 def report(vfs, name, method):
2355 # this causes stale locks to get reaped for more accurate reporting
2357 # this causes stale locks to get reaped for more accurate reporting
2356 try:
2358 try:
2357 l = method(False)
2359 l = method(False)
2358 except error.LockHeld:
2360 except error.LockHeld:
2359 l = None
2361 l = None
2360
2362
2361 if l:
2363 if l:
2362 l.release()
2364 l.release()
2363 else:
2365 else:
2364 try:
2366 try:
2365 st = vfs.lstat(name)
2367 st = vfs.lstat(name)
2366 age = now - st[stat.ST_MTIME]
2368 age = now - st[stat.ST_MTIME]
2367 user = util.username(st.st_uid)
2369 user = util.username(st.st_uid)
2368 locker = vfs.readlock(name)
2370 locker = vfs.readlock(name)
2369 if b":" in locker:
2371 if b":" in locker:
2370 host, pid = locker.split(b':')
2372 host, pid = locker.split(b':')
2371 if host == socket.gethostname():
2373 if host == socket.gethostname():
2372 locker = b'user %s, process %s' % (user or b'None', pid)
2374 locker = b'user %s, process %s' % (user or b'None', pid)
2373 else:
2375 else:
2374 locker = b'user %s, process %s, host %s' % (
2376 locker = b'user %s, process %s, host %s' % (
2375 user or b'None',
2377 user or b'None',
2376 pid,
2378 pid,
2377 host,
2379 host,
2378 )
2380 )
2379 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2381 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2380 return 1
2382 return 1
2381 except FileNotFoundError:
2383 except FileNotFoundError:
2382 pass
2384 pass
2383
2385
2384 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2386 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2385 return 0
2387 return 0
2386
2388
2387 held += report(repo.svfs, b"lock", repo.lock)
2389 held += report(repo.svfs, b"lock", repo.lock)
2388 held += report(repo.vfs, b"wlock", repo.wlock)
2390 held += report(repo.vfs, b"wlock", repo.wlock)
2389
2391
2390 return held
2392 return held
2391
2393
2392
2394
2393 @command(
2395 @command(
2394 b'debugmanifestfulltextcache',
2396 b'debugmanifestfulltextcache',
2395 [
2397 [
2396 (b'', b'clear', False, _(b'clear the cache')),
2398 (b'', b'clear', False, _(b'clear the cache')),
2397 (
2399 (
2398 b'a',
2400 b'a',
2399 b'add',
2401 b'add',
2400 [],
2402 [],
2401 _(b'add the given manifest nodes to the cache'),
2403 _(b'add the given manifest nodes to the cache'),
2402 _(b'NODE'),
2404 _(b'NODE'),
2403 ),
2405 ),
2404 ],
2406 ],
2405 b'',
2407 b'',
2406 )
2408 )
2407 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2409 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2408 """show, clear or amend the contents of the manifest fulltext cache"""
2410 """show, clear or amend the contents of the manifest fulltext cache"""
2409
2411
2410 def getcache():
2412 def getcache():
2411 r = repo.manifestlog.getstorage(b'')
2413 r = repo.manifestlog.getstorage(b'')
2412 try:
2414 try:
2413 return r._fulltextcache
2415 return r._fulltextcache
2414 except AttributeError:
2416 except AttributeError:
2415 msg = _(
2417 msg = _(
2416 b"Current revlog implementation doesn't appear to have a "
2418 b"Current revlog implementation doesn't appear to have a "
2417 b"manifest fulltext cache\n"
2419 b"manifest fulltext cache\n"
2418 )
2420 )
2419 raise error.Abort(msg)
2421 raise error.Abort(msg)
2420
2422
2421 if opts.get('clear'):
2423 if opts.get('clear'):
2422 with repo.wlock():
2424 with repo.wlock():
2423 cache = getcache()
2425 cache = getcache()
2424 cache.clear(clear_persisted_data=True)
2426 cache.clear(clear_persisted_data=True)
2425 return
2427 return
2426
2428
2427 if add:
2429 if add:
2428 with repo.wlock():
2430 with repo.wlock():
2429 m = repo.manifestlog
2431 m = repo.manifestlog
2430 store = m.getstorage(b'')
2432 store = m.getstorage(b'')
2431 for n in add:
2433 for n in add:
2432 try:
2434 try:
2433 manifest = m[store.lookup(n)]
2435 manifest = m[store.lookup(n)]
2434 except error.LookupError as e:
2436 except error.LookupError as e:
2435 raise error.Abort(
2437 raise error.Abort(
2436 bytes(e), hint=b"Check your manifest node id"
2438 bytes(e), hint=b"Check your manifest node id"
2437 )
2439 )
2438 manifest.read() # stores revisision in cache too
2440 manifest.read() # stores revisision in cache too
2439 return
2441 return
2440
2442
2441 cache = getcache()
2443 cache = getcache()
2442 if not len(cache):
2444 if not len(cache):
2443 ui.write(_(b'cache empty\n'))
2445 ui.write(_(b'cache empty\n'))
2444 else:
2446 else:
2445 ui.write(
2447 ui.write(
2446 _(
2448 _(
2447 b'cache contains %d manifest entries, in order of most to '
2449 b'cache contains %d manifest entries, in order of most to '
2448 b'least recent:\n'
2450 b'least recent:\n'
2449 )
2451 )
2450 % (len(cache),)
2452 % (len(cache),)
2451 )
2453 )
2452 totalsize = 0
2454 totalsize = 0
2453 for nodeid in cache:
2455 for nodeid in cache:
2454 # Use cache.get to not update the LRU order
2456 # Use cache.get to not update the LRU order
2455 data = cache.peek(nodeid)
2457 data = cache.peek(nodeid)
2456 size = len(data)
2458 size = len(data)
2457 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2459 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2458 ui.write(
2460 ui.write(
2459 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2461 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2460 )
2462 )
2461 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2463 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2462 ui.write(
2464 ui.write(
2463 _(b'total cache data size %s, on-disk %s\n')
2465 _(b'total cache data size %s, on-disk %s\n')
2464 % (util.bytecount(totalsize), util.bytecount(ondisk))
2466 % (util.bytecount(totalsize), util.bytecount(ondisk))
2465 )
2467 )
2466
2468
2467
2469
2468 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2470 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2469 def debugmergestate(ui, repo, *args, **opts):
2471 def debugmergestate(ui, repo, *args, **opts):
2470 """print merge state
2472 """print merge state
2471
2473
2472 Use --verbose to print out information about whether v1 or v2 merge state
2474 Use --verbose to print out information about whether v1 or v2 merge state
2473 was chosen."""
2475 was chosen."""
2474
2476
2475 if ui.verbose:
2477 if ui.verbose:
2476 ms = mergestatemod.mergestate(repo)
2478 ms = mergestatemod.mergestate(repo)
2477
2479
2478 # sort so that reasonable information is on top
2480 # sort so that reasonable information is on top
2479 v1records = ms._readrecordsv1()
2481 v1records = ms._readrecordsv1()
2480 v2records = ms._readrecordsv2()
2482 v2records = ms._readrecordsv2()
2481
2483
2482 if not v1records and not v2records:
2484 if not v1records and not v2records:
2483 pass
2485 pass
2484 elif not v2records:
2486 elif not v2records:
2485 ui.writenoi18n(b'no version 2 merge state\n')
2487 ui.writenoi18n(b'no version 2 merge state\n')
2486 elif ms._v1v2match(v1records, v2records):
2488 elif ms._v1v2match(v1records, v2records):
2487 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2489 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2488 else:
2490 else:
2489 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2491 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2490
2492
2491 opts = pycompat.byteskwargs(opts)
2493 opts = pycompat.byteskwargs(opts)
2492 if not opts[b'template']:
2494 if not opts[b'template']:
2493 opts[b'template'] = (
2495 opts[b'template'] = (
2494 b'{if(commits, "", "no merge state found\n")}'
2496 b'{if(commits, "", "no merge state found\n")}'
2495 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2497 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2496 b'{files % "file: {path} (state \\"{state}\\")\n'
2498 b'{files % "file: {path} (state \\"{state}\\")\n'
2497 b'{if(local_path, "'
2499 b'{if(local_path, "'
2498 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2500 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2499 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2501 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2500 b' other path: {other_path} (node {other_node})\n'
2502 b' other path: {other_path} (node {other_node})\n'
2501 b'")}'
2503 b'")}'
2502 b'{if(rename_side, "'
2504 b'{if(rename_side, "'
2503 b' rename side: {rename_side}\n'
2505 b' rename side: {rename_side}\n'
2504 b' renamed path: {renamed_path}\n'
2506 b' renamed path: {renamed_path}\n'
2505 b'")}'
2507 b'")}'
2506 b'{extras % " extra: {key} = {value}\n"}'
2508 b'{extras % " extra: {key} = {value}\n"}'
2507 b'"}'
2509 b'"}'
2508 b'{extras % "extra: {file} ({key} = {value})\n"}'
2510 b'{extras % "extra: {file} ({key} = {value})\n"}'
2509 )
2511 )
2510
2512
2511 ms = mergestatemod.mergestate.read(repo)
2513 ms = mergestatemod.mergestate.read(repo)
2512
2514
2513 fm = ui.formatter(b'debugmergestate', opts)
2515 fm = ui.formatter(b'debugmergestate', opts)
2514 fm.startitem()
2516 fm.startitem()
2515
2517
2516 fm_commits = fm.nested(b'commits')
2518 fm_commits = fm.nested(b'commits')
2517 if ms.active():
2519 if ms.active():
2518 for name, node, label_index in (
2520 for name, node, label_index in (
2519 (b'local', ms.local, 0),
2521 (b'local', ms.local, 0),
2520 (b'other', ms.other, 1),
2522 (b'other', ms.other, 1),
2521 ):
2523 ):
2522 fm_commits.startitem()
2524 fm_commits.startitem()
2523 fm_commits.data(name=name)
2525 fm_commits.data(name=name)
2524 fm_commits.data(node=hex(node))
2526 fm_commits.data(node=hex(node))
2525 if ms._labels and len(ms._labels) > label_index:
2527 if ms._labels and len(ms._labels) > label_index:
2526 fm_commits.data(label=ms._labels[label_index])
2528 fm_commits.data(label=ms._labels[label_index])
2527 fm_commits.end()
2529 fm_commits.end()
2528
2530
2529 fm_files = fm.nested(b'files')
2531 fm_files = fm.nested(b'files')
2530 if ms.active():
2532 if ms.active():
2531 for f in ms:
2533 for f in ms:
2532 fm_files.startitem()
2534 fm_files.startitem()
2533 fm_files.data(path=f)
2535 fm_files.data(path=f)
2534 state = ms._state[f]
2536 state = ms._state[f]
2535 fm_files.data(state=state[0])
2537 fm_files.data(state=state[0])
2536 if state[0] in (
2538 if state[0] in (
2537 mergestatemod.MERGE_RECORD_UNRESOLVED,
2539 mergestatemod.MERGE_RECORD_UNRESOLVED,
2538 mergestatemod.MERGE_RECORD_RESOLVED,
2540 mergestatemod.MERGE_RECORD_RESOLVED,
2539 ):
2541 ):
2540 fm_files.data(local_key=state[1])
2542 fm_files.data(local_key=state[1])
2541 fm_files.data(local_path=state[2])
2543 fm_files.data(local_path=state[2])
2542 fm_files.data(ancestor_path=state[3])
2544 fm_files.data(ancestor_path=state[3])
2543 fm_files.data(ancestor_node=state[4])
2545 fm_files.data(ancestor_node=state[4])
2544 fm_files.data(other_path=state[5])
2546 fm_files.data(other_path=state[5])
2545 fm_files.data(other_node=state[6])
2547 fm_files.data(other_node=state[6])
2546 fm_files.data(local_flags=state[7])
2548 fm_files.data(local_flags=state[7])
2547 elif state[0] in (
2549 elif state[0] in (
2548 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2550 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2549 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2551 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2550 ):
2552 ):
2551 fm_files.data(renamed_path=state[1])
2553 fm_files.data(renamed_path=state[1])
2552 fm_files.data(rename_side=state[2])
2554 fm_files.data(rename_side=state[2])
2553 fm_extras = fm_files.nested(b'extras')
2555 fm_extras = fm_files.nested(b'extras')
2554 for k, v in sorted(ms.extras(f).items()):
2556 for k, v in sorted(ms.extras(f).items()):
2555 fm_extras.startitem()
2557 fm_extras.startitem()
2556 fm_extras.data(key=k)
2558 fm_extras.data(key=k)
2557 fm_extras.data(value=v)
2559 fm_extras.data(value=v)
2558 fm_extras.end()
2560 fm_extras.end()
2559
2561
2560 fm_files.end()
2562 fm_files.end()
2561
2563
2562 fm_extras = fm.nested(b'extras')
2564 fm_extras = fm.nested(b'extras')
2563 for f, d in sorted(ms.allextras().items()):
2565 for f, d in sorted(ms.allextras().items()):
2564 if f in ms:
2566 if f in ms:
2565 # If file is in mergestate, we have already processed it's extras
2567 # If file is in mergestate, we have already processed it's extras
2566 continue
2568 continue
2567 for k, v in d.items():
2569 for k, v in d.items():
2568 fm_extras.startitem()
2570 fm_extras.startitem()
2569 fm_extras.data(file=f)
2571 fm_extras.data(file=f)
2570 fm_extras.data(key=k)
2572 fm_extras.data(key=k)
2571 fm_extras.data(value=v)
2573 fm_extras.data(value=v)
2572 fm_extras.end()
2574 fm_extras.end()
2573
2575
2574 fm.end()
2576 fm.end()
2575
2577
2576
2578
2577 @command(b'debugnamecomplete', [], _(b'NAME...'))
2579 @command(b'debugnamecomplete', [], _(b'NAME...'))
2578 def debugnamecomplete(ui, repo, *args):
2580 def debugnamecomplete(ui, repo, *args):
2579 '''complete "names" - tags, open branch names, bookmark names'''
2581 '''complete "names" - tags, open branch names, bookmark names'''
2580
2582
2581 names = set()
2583 names = set()
2582 # since we previously only listed open branches, we will handle that
2584 # since we previously only listed open branches, we will handle that
2583 # specially (after this for loop)
2585 # specially (after this for loop)
2584 for name, ns in repo.names.items():
2586 for name, ns in repo.names.items():
2585 if name != b'branches':
2587 if name != b'branches':
2586 names.update(ns.listnames(repo))
2588 names.update(ns.listnames(repo))
2587 names.update(
2589 names.update(
2588 tag
2590 tag
2589 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2591 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2590 if not closed
2592 if not closed
2591 )
2593 )
2592 completions = set()
2594 completions = set()
2593 if not args:
2595 if not args:
2594 args = [b'']
2596 args = [b'']
2595 for a in args:
2597 for a in args:
2596 completions.update(n for n in names if n.startswith(a))
2598 completions.update(n for n in names if n.startswith(a))
2597 ui.write(b'\n'.join(sorted(completions)))
2599 ui.write(b'\n'.join(sorted(completions)))
2598 ui.write(b'\n')
2600 ui.write(b'\n')
2599
2601
2600
2602
2601 @command(
2603 @command(
2602 b'debugnodemap',
2604 b'debugnodemap',
2603 [
2605 (
2604 (
2606 cmdutil.debugrevlogopts
2605 b'',
2607 + [
2606 b'dump-new',
2608 (
2607 False,
2609 b'',
2608 _(b'write a (new) persistent binary nodemap on stdout'),
2610 b'dump-new',
2609 ),
2611 False,
2610 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2612 _(b'write a (new) persistent binary nodemap on stdout'),
2611 (
2613 ),
2612 b'',
2614 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2613 b'check',
2615 (
2614 False,
2616 b'',
2615 _(b'check that the data on disk data are correct.'),
2617 b'check',
2616 ),
2618 False,
2617 (
2619 _(b'check that the data on disk data are correct.'),
2618 b'',
2620 ),
2619 b'metadata',
2621 (
2620 False,
2622 b'',
2621 _(b'display the on disk meta data for the nodemap'),
2623 b'metadata',
2622 ),
2624 False,
2623 ],
2625 _(b'display the on disk meta data for the nodemap'),
2626 ),
2627 ]
2628 ),
2629 _(b'-c|-m|FILE REV'),
2624 )
2630 )
2625 def debugnodemap(ui, repo, **opts):
2631 def debugnodemap(ui, repo, file_=None, **opts):
2626 """write and inspect on disk nodemap"""
2632 """write and inspect on disk nodemap"""
2633 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2634 if file_ is not None:
2635 raise error.CommandError(b'debugnodemap', _(b'invalid arguments'))
2636 elif file_ is None:
2637 opts['changelog'] = True
2638 r = cmdutil.openstorage(
2639 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2640 )
2641 if isinstance(r, manifest.manifestrevlog) or isinstance(r, filelog.filelog):
2642 r = r._revlog
2627 if opts['dump_new']:
2643 if opts['dump_new']:
2628 unfi = repo.unfiltered()
2644 if util.safehasattr(r.index, "nodemap_data_all"):
2629 cl = unfi.changelog
2645 data = r.index.nodemap_data_all()
2630 if util.safehasattr(cl.index, "nodemap_data_all"):
2631 data = cl.index.nodemap_data_all()
2632 else:
2646 else:
2633 data = nodemap.persistent_data(cl.index)
2647 data = nodemap.persistent_data(r.index)
2634 ui.write(data)
2648 ui.write(data)
2635 elif opts['dump_disk']:
2649 elif opts['dump_disk']:
2636 unfi = repo.unfiltered()
2650 nm_data = nodemap.persisted_data(r)
2637 cl = unfi.changelog
2638 nm_data = nodemap.persisted_data(cl)
2639 if nm_data is not None:
2651 if nm_data is not None:
2640 docket, data = nm_data
2652 docket, data = nm_data
2641 ui.write(data[:])
2653 ui.write(data[:])
2642 elif opts['check']:
2654 elif opts['check']:
2643 unfi = repo.unfiltered()
2655 nm_data = nodemap.persisted_data(r)
2644 cl = unfi.changelog
2645 nm_data = nodemap.persisted_data(cl)
2646 if nm_data is not None:
2656 if nm_data is not None:
2647 docket, data = nm_data
2657 docket, data = nm_data
2648 return nodemap.check_data(ui, cl.index, data)
2658 return nodemap.check_data(ui, r.index, data)
2649 elif opts['metadata']:
2659 elif opts['metadata']:
2650 unfi = repo.unfiltered()
2660 nm_data = nodemap.persisted_data(r)
2651 cl = unfi.changelog
2652 nm_data = nodemap.persisted_data(cl)
2653 if nm_data is not None:
2661 if nm_data is not None:
2654 docket, data = nm_data
2662 docket, data = nm_data
2655 ui.write((b"uid: %s\n") % docket.uid)
2663 ui.write((b"uid: %s\n") % docket.uid)
2656 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2664 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2657 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2665 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2658 ui.write((b"data-length: %d\n") % docket.data_length)
2666 ui.write((b"data-length: %d\n") % docket.data_length)
2659 ui.write((b"data-unused: %d\n") % docket.data_unused)
2667 ui.write((b"data-unused: %d\n") % docket.data_unused)
2660 unused_perc = docket.data_unused * 100.0 / docket.data_length
2668 unused_perc = docket.data_unused * 100.0 / docket.data_length
2661 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2669 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2662
2670
2663
2671
2664 @command(
2672 @command(
2665 b'debugobsolete',
2673 b'debugobsolete',
2666 [
2674 [
2667 (b'', b'flags', 0, _(b'markers flag')),
2675 (b'', b'flags', 0, _(b'markers flag')),
2668 (
2676 (
2669 b'',
2677 b'',
2670 b'record-parents',
2678 b'record-parents',
2671 False,
2679 False,
2672 _(b'record parent information for the precursor'),
2680 _(b'record parent information for the precursor'),
2673 ),
2681 ),
2674 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2682 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2675 (
2683 (
2676 b'',
2684 b'',
2677 b'exclusive',
2685 b'exclusive',
2678 False,
2686 False,
2679 _(b'restrict display to markers only relevant to REV'),
2687 _(b'restrict display to markers only relevant to REV'),
2680 ),
2688 ),
2681 (b'', b'index', False, _(b'display index of the marker')),
2689 (b'', b'index', False, _(b'display index of the marker')),
2682 (b'', b'delete', [], _(b'delete markers specified by indices')),
2690 (b'', b'delete', [], _(b'delete markers specified by indices')),
2683 ]
2691 ]
2684 + cmdutil.commitopts2
2692 + cmdutil.commitopts2
2685 + cmdutil.formatteropts,
2693 + cmdutil.formatteropts,
2686 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2694 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2687 )
2695 )
2688 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2696 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2689 """create arbitrary obsolete marker
2697 """create arbitrary obsolete marker
2690
2698
2691 With no arguments, displays the list of obsolescence markers."""
2699 With no arguments, displays the list of obsolescence markers."""
2692
2700
2693 opts = pycompat.byteskwargs(opts)
2701 opts = pycompat.byteskwargs(opts)
2694
2702
2695 def parsenodeid(s):
2703 def parsenodeid(s):
2696 try:
2704 try:
2697 # We do not use revsingle/revrange functions here to accept
2705 # We do not use revsingle/revrange functions here to accept
2698 # arbitrary node identifiers, possibly not present in the
2706 # arbitrary node identifiers, possibly not present in the
2699 # local repository.
2707 # local repository.
2700 n = bin(s)
2708 n = bin(s)
2701 if len(n) != repo.nodeconstants.nodelen:
2709 if len(n) != repo.nodeconstants.nodelen:
2702 raise ValueError
2710 raise ValueError
2703 return n
2711 return n
2704 except ValueError:
2712 except ValueError:
2705 raise error.InputError(
2713 raise error.InputError(
2706 b'changeset references must be full hexadecimal '
2714 b'changeset references must be full hexadecimal '
2707 b'node identifiers'
2715 b'node identifiers'
2708 )
2716 )
2709
2717
2710 if opts.get(b'delete'):
2718 if opts.get(b'delete'):
2711 indices = []
2719 indices = []
2712 for v in opts.get(b'delete'):
2720 for v in opts.get(b'delete'):
2713 try:
2721 try:
2714 indices.append(int(v))
2722 indices.append(int(v))
2715 except ValueError:
2723 except ValueError:
2716 raise error.InputError(
2724 raise error.InputError(
2717 _(b'invalid index value: %r') % v,
2725 _(b'invalid index value: %r') % v,
2718 hint=_(b'use integers for indices'),
2726 hint=_(b'use integers for indices'),
2719 )
2727 )
2720
2728
2721 if repo.currenttransaction():
2729 if repo.currenttransaction():
2722 raise error.Abort(
2730 raise error.Abort(
2723 _(b'cannot delete obsmarkers in the middle of transaction.')
2731 _(b'cannot delete obsmarkers in the middle of transaction.')
2724 )
2732 )
2725
2733
2726 with repo.lock():
2734 with repo.lock():
2727 n = repair.deleteobsmarkers(repo.obsstore, indices)
2735 n = repair.deleteobsmarkers(repo.obsstore, indices)
2728 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2736 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2729
2737
2730 return
2738 return
2731
2739
2732 if precursor is not None:
2740 if precursor is not None:
2733 if opts[b'rev']:
2741 if opts[b'rev']:
2734 raise error.InputError(
2742 raise error.InputError(
2735 b'cannot select revision when creating marker'
2743 b'cannot select revision when creating marker'
2736 )
2744 )
2737 metadata = {}
2745 metadata = {}
2738 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2746 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2739 succs = tuple(parsenodeid(succ) for succ in successors)
2747 succs = tuple(parsenodeid(succ) for succ in successors)
2740 l = repo.lock()
2748 l = repo.lock()
2741 try:
2749 try:
2742 tr = repo.transaction(b'debugobsolete')
2750 tr = repo.transaction(b'debugobsolete')
2743 try:
2751 try:
2744 date = opts.get(b'date')
2752 date = opts.get(b'date')
2745 if date:
2753 if date:
2746 date = dateutil.parsedate(date)
2754 date = dateutil.parsedate(date)
2747 else:
2755 else:
2748 date = None
2756 date = None
2749 prec = parsenodeid(precursor)
2757 prec = parsenodeid(precursor)
2750 parents = None
2758 parents = None
2751 if opts[b'record_parents']:
2759 if opts[b'record_parents']:
2752 if prec not in repo.unfiltered():
2760 if prec not in repo.unfiltered():
2753 raise error.Abort(
2761 raise error.Abort(
2754 b'cannot used --record-parents on '
2762 b'cannot used --record-parents on '
2755 b'unknown changesets'
2763 b'unknown changesets'
2756 )
2764 )
2757 parents = repo.unfiltered()[prec].parents()
2765 parents = repo.unfiltered()[prec].parents()
2758 parents = tuple(p.node() for p in parents)
2766 parents = tuple(p.node() for p in parents)
2759 repo.obsstore.create(
2767 repo.obsstore.create(
2760 tr,
2768 tr,
2761 prec,
2769 prec,
2762 succs,
2770 succs,
2763 opts[b'flags'],
2771 opts[b'flags'],
2764 parents=parents,
2772 parents=parents,
2765 date=date,
2773 date=date,
2766 metadata=metadata,
2774 metadata=metadata,
2767 ui=ui,
2775 ui=ui,
2768 )
2776 )
2769 tr.close()
2777 tr.close()
2770 except ValueError as exc:
2778 except ValueError as exc:
2771 raise error.Abort(
2779 raise error.Abort(
2772 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2780 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2773 )
2781 )
2774 finally:
2782 finally:
2775 tr.release()
2783 tr.release()
2776 finally:
2784 finally:
2777 l.release()
2785 l.release()
2778 else:
2786 else:
2779 if opts[b'rev']:
2787 if opts[b'rev']:
2780 revs = logcmdutil.revrange(repo, opts[b'rev'])
2788 revs = logcmdutil.revrange(repo, opts[b'rev'])
2781 nodes = [repo[r].node() for r in revs]
2789 nodes = [repo[r].node() for r in revs]
2782 markers = list(
2790 markers = list(
2783 obsutil.getmarkers(
2791 obsutil.getmarkers(
2784 repo, nodes=nodes, exclusive=opts[b'exclusive']
2792 repo, nodes=nodes, exclusive=opts[b'exclusive']
2785 )
2793 )
2786 )
2794 )
2787 markers.sort(key=lambda x: x._data)
2795 markers.sort(key=lambda x: x._data)
2788 else:
2796 else:
2789 markers = obsutil.getmarkers(repo)
2797 markers = obsutil.getmarkers(repo)
2790
2798
2791 markerstoiter = markers
2799 markerstoiter = markers
2792 isrelevant = lambda m: True
2800 isrelevant = lambda m: True
2793 if opts.get(b'rev') and opts.get(b'index'):
2801 if opts.get(b'rev') and opts.get(b'index'):
2794 markerstoiter = obsutil.getmarkers(repo)
2802 markerstoiter = obsutil.getmarkers(repo)
2795 markerset = set(markers)
2803 markerset = set(markers)
2796 isrelevant = lambda m: m in markerset
2804 isrelevant = lambda m: m in markerset
2797
2805
2798 fm = ui.formatter(b'debugobsolete', opts)
2806 fm = ui.formatter(b'debugobsolete', opts)
2799 for i, m in enumerate(markerstoiter):
2807 for i, m in enumerate(markerstoiter):
2800 if not isrelevant(m):
2808 if not isrelevant(m):
2801 # marker can be irrelevant when we're iterating over a set
2809 # marker can be irrelevant when we're iterating over a set
2802 # of markers (markerstoiter) which is bigger than the set
2810 # of markers (markerstoiter) which is bigger than the set
2803 # of markers we want to display (markers)
2811 # of markers we want to display (markers)
2804 # this can happen if both --index and --rev options are
2812 # this can happen if both --index and --rev options are
2805 # provided and thus we need to iterate over all of the markers
2813 # provided and thus we need to iterate over all of the markers
2806 # to get the correct indices, but only display the ones that
2814 # to get the correct indices, but only display the ones that
2807 # are relevant to --rev value
2815 # are relevant to --rev value
2808 continue
2816 continue
2809 fm.startitem()
2817 fm.startitem()
2810 ind = i if opts.get(b'index') else None
2818 ind = i if opts.get(b'index') else None
2811 cmdutil.showmarker(fm, m, index=ind)
2819 cmdutil.showmarker(fm, m, index=ind)
2812 fm.end()
2820 fm.end()
2813
2821
2814
2822
2815 @command(
2823 @command(
2816 b'debugp1copies',
2824 b'debugp1copies',
2817 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2825 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2818 _(b'[-r REV]'),
2826 _(b'[-r REV]'),
2819 )
2827 )
2820 def debugp1copies(ui, repo, **opts):
2828 def debugp1copies(ui, repo, **opts):
2821 """dump copy information compared to p1"""
2829 """dump copy information compared to p1"""
2822
2830
2823 opts = pycompat.byteskwargs(opts)
2831 opts = pycompat.byteskwargs(opts)
2824 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2832 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2825 for dst, src in ctx.p1copies().items():
2833 for dst, src in ctx.p1copies().items():
2826 ui.write(b'%s -> %s\n' % (src, dst))
2834 ui.write(b'%s -> %s\n' % (src, dst))
2827
2835
2828
2836
2829 @command(
2837 @command(
2830 b'debugp2copies',
2838 b'debugp2copies',
2831 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2839 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2832 _(b'[-r REV]'),
2840 _(b'[-r REV]'),
2833 )
2841 )
2834 def debugp2copies(ui, repo, **opts):
2842 def debugp2copies(ui, repo, **opts):
2835 """dump copy information compared to p2"""
2843 """dump copy information compared to p2"""
2836
2844
2837 opts = pycompat.byteskwargs(opts)
2845 opts = pycompat.byteskwargs(opts)
2838 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2846 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2839 for dst, src in ctx.p2copies().items():
2847 for dst, src in ctx.p2copies().items():
2840 ui.write(b'%s -> %s\n' % (src, dst))
2848 ui.write(b'%s -> %s\n' % (src, dst))
2841
2849
2842
2850
2843 @command(
2851 @command(
2844 b'debugpathcomplete',
2852 b'debugpathcomplete',
2845 [
2853 [
2846 (b'f', b'full', None, _(b'complete an entire path')),
2854 (b'f', b'full', None, _(b'complete an entire path')),
2847 (b'n', b'normal', None, _(b'show only normal files')),
2855 (b'n', b'normal', None, _(b'show only normal files')),
2848 (b'a', b'added', None, _(b'show only added files')),
2856 (b'a', b'added', None, _(b'show only added files')),
2849 (b'r', b'removed', None, _(b'show only removed files')),
2857 (b'r', b'removed', None, _(b'show only removed files')),
2850 ],
2858 ],
2851 _(b'FILESPEC...'),
2859 _(b'FILESPEC...'),
2852 )
2860 )
2853 def debugpathcomplete(ui, repo, *specs, **opts):
2861 def debugpathcomplete(ui, repo, *specs, **opts):
2854 """complete part or all of a tracked path
2862 """complete part or all of a tracked path
2855
2863
2856 This command supports shells that offer path name completion. It
2864 This command supports shells that offer path name completion. It
2857 currently completes only files already known to the dirstate.
2865 currently completes only files already known to the dirstate.
2858
2866
2859 Completion extends only to the next path segment unless
2867 Completion extends only to the next path segment unless
2860 --full is specified, in which case entire paths are used."""
2868 --full is specified, in which case entire paths are used."""
2861
2869
2862 def complete(path, acceptable):
2870 def complete(path, acceptable):
2863 dirstate = repo.dirstate
2871 dirstate = repo.dirstate
2864 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2872 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2865 rootdir = repo.root + pycompat.ossep
2873 rootdir = repo.root + pycompat.ossep
2866 if spec != repo.root and not spec.startswith(rootdir):
2874 if spec != repo.root and not spec.startswith(rootdir):
2867 return [], []
2875 return [], []
2868 if os.path.isdir(spec):
2876 if os.path.isdir(spec):
2869 spec += b'/'
2877 spec += b'/'
2870 spec = spec[len(rootdir) :]
2878 spec = spec[len(rootdir) :]
2871 fixpaths = pycompat.ossep != b'/'
2879 fixpaths = pycompat.ossep != b'/'
2872 if fixpaths:
2880 if fixpaths:
2873 spec = spec.replace(pycompat.ossep, b'/')
2881 spec = spec.replace(pycompat.ossep, b'/')
2874 speclen = len(spec)
2882 speclen = len(spec)
2875 fullpaths = opts['full']
2883 fullpaths = opts['full']
2876 files, dirs = set(), set()
2884 files, dirs = set(), set()
2877 adddir, addfile = dirs.add, files.add
2885 adddir, addfile = dirs.add, files.add
2878 for f, st in dirstate.items():
2886 for f, st in dirstate.items():
2879 if f.startswith(spec) and st.state in acceptable:
2887 if f.startswith(spec) and st.state in acceptable:
2880 if fixpaths:
2888 if fixpaths:
2881 f = f.replace(b'/', pycompat.ossep)
2889 f = f.replace(b'/', pycompat.ossep)
2882 if fullpaths:
2890 if fullpaths:
2883 addfile(f)
2891 addfile(f)
2884 continue
2892 continue
2885 s = f.find(pycompat.ossep, speclen)
2893 s = f.find(pycompat.ossep, speclen)
2886 if s >= 0:
2894 if s >= 0:
2887 adddir(f[:s])
2895 adddir(f[:s])
2888 else:
2896 else:
2889 addfile(f)
2897 addfile(f)
2890 return files, dirs
2898 return files, dirs
2891
2899
2892 acceptable = b''
2900 acceptable = b''
2893 if opts['normal']:
2901 if opts['normal']:
2894 acceptable += b'nm'
2902 acceptable += b'nm'
2895 if opts['added']:
2903 if opts['added']:
2896 acceptable += b'a'
2904 acceptable += b'a'
2897 if opts['removed']:
2905 if opts['removed']:
2898 acceptable += b'r'
2906 acceptable += b'r'
2899 cwd = repo.getcwd()
2907 cwd = repo.getcwd()
2900 if not specs:
2908 if not specs:
2901 specs = [b'.']
2909 specs = [b'.']
2902
2910
2903 files, dirs = set(), set()
2911 files, dirs = set(), set()
2904 for spec in specs:
2912 for spec in specs:
2905 f, d = complete(spec, acceptable or b'nmar')
2913 f, d = complete(spec, acceptable or b'nmar')
2906 files.update(f)
2914 files.update(f)
2907 dirs.update(d)
2915 dirs.update(d)
2908 files.update(dirs)
2916 files.update(dirs)
2909 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2917 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2910 ui.write(b'\n')
2918 ui.write(b'\n')
2911
2919
2912
2920
2913 @command(
2921 @command(
2914 b'debugpathcopies',
2922 b'debugpathcopies',
2915 cmdutil.walkopts,
2923 cmdutil.walkopts,
2916 b'hg debugpathcopies REV1 REV2 [FILE]',
2924 b'hg debugpathcopies REV1 REV2 [FILE]',
2917 inferrepo=True,
2925 inferrepo=True,
2918 )
2926 )
2919 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2927 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2920 """show copies between two revisions"""
2928 """show copies between two revisions"""
2921 ctx1 = scmutil.revsingle(repo, rev1)
2929 ctx1 = scmutil.revsingle(repo, rev1)
2922 ctx2 = scmutil.revsingle(repo, rev2)
2930 ctx2 = scmutil.revsingle(repo, rev2)
2923 m = scmutil.match(ctx1, pats, opts)
2931 m = scmutil.match(ctx1, pats, opts)
2924 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2932 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2925 ui.write(b'%s -> %s\n' % (src, dst))
2933 ui.write(b'%s -> %s\n' % (src, dst))
2926
2934
2927
2935
2928 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2936 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2929 def debugpeer(ui, path):
2937 def debugpeer(ui, path):
2930 """establish a connection to a peer repository"""
2938 """establish a connection to a peer repository"""
2931 # Always enable peer request logging. Requires --debug to display
2939 # Always enable peer request logging. Requires --debug to display
2932 # though.
2940 # though.
2933 overrides = {
2941 overrides = {
2934 (b'devel', b'debug.peer-request'): True,
2942 (b'devel', b'debug.peer-request'): True,
2935 }
2943 }
2936
2944
2937 with ui.configoverride(overrides):
2945 with ui.configoverride(overrides):
2938 peer = hg.peer(ui, {}, path)
2946 peer = hg.peer(ui, {}, path)
2939
2947
2940 try:
2948 try:
2941 local = peer.local() is not None
2949 local = peer.local() is not None
2942 canpush = peer.canpush()
2950 canpush = peer.canpush()
2943
2951
2944 ui.write(_(b'url: %s\n') % peer.url())
2952 ui.write(_(b'url: %s\n') % peer.url())
2945 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2953 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2946 ui.write(
2954 ui.write(
2947 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2955 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2948 )
2956 )
2949 finally:
2957 finally:
2950 peer.close()
2958 peer.close()
2951
2959
2952
2960
2953 @command(
2961 @command(
2954 b'debugpickmergetool',
2962 b'debugpickmergetool',
2955 [
2963 [
2956 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2964 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2957 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2965 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2958 ]
2966 ]
2959 + cmdutil.walkopts
2967 + cmdutil.walkopts
2960 + cmdutil.mergetoolopts,
2968 + cmdutil.mergetoolopts,
2961 _(b'[PATTERN]...'),
2969 _(b'[PATTERN]...'),
2962 inferrepo=True,
2970 inferrepo=True,
2963 )
2971 )
2964 def debugpickmergetool(ui, repo, *pats, **opts):
2972 def debugpickmergetool(ui, repo, *pats, **opts):
2965 """examine which merge tool is chosen for specified file
2973 """examine which merge tool is chosen for specified file
2966
2974
2967 As described in :hg:`help merge-tools`, Mercurial examines
2975 As described in :hg:`help merge-tools`, Mercurial examines
2968 configurations below in this order to decide which merge tool is
2976 configurations below in this order to decide which merge tool is
2969 chosen for specified file.
2977 chosen for specified file.
2970
2978
2971 1. ``--tool`` option
2979 1. ``--tool`` option
2972 2. ``HGMERGE`` environment variable
2980 2. ``HGMERGE`` environment variable
2973 3. configurations in ``merge-patterns`` section
2981 3. configurations in ``merge-patterns`` section
2974 4. configuration of ``ui.merge``
2982 4. configuration of ``ui.merge``
2975 5. configurations in ``merge-tools`` section
2983 5. configurations in ``merge-tools`` section
2976 6. ``hgmerge`` tool (for historical reason only)
2984 6. ``hgmerge`` tool (for historical reason only)
2977 7. default tool for fallback (``:merge`` or ``:prompt``)
2985 7. default tool for fallback (``:merge`` or ``:prompt``)
2978
2986
2979 This command writes out examination result in the style below::
2987 This command writes out examination result in the style below::
2980
2988
2981 FILE = MERGETOOL
2989 FILE = MERGETOOL
2982
2990
2983 By default, all files known in the first parent context of the
2991 By default, all files known in the first parent context of the
2984 working directory are examined. Use file patterns and/or -I/-X
2992 working directory are examined. Use file patterns and/or -I/-X
2985 options to limit target files. -r/--rev is also useful to examine
2993 options to limit target files. -r/--rev is also useful to examine
2986 files in another context without actual updating to it.
2994 files in another context without actual updating to it.
2987
2995
2988 With --debug, this command shows warning messages while matching
2996 With --debug, this command shows warning messages while matching
2989 against ``merge-patterns`` and so on, too. It is recommended to
2997 against ``merge-patterns`` and so on, too. It is recommended to
2990 use this option with explicit file patterns and/or -I/-X options,
2998 use this option with explicit file patterns and/or -I/-X options,
2991 because this option increases amount of output per file according
2999 because this option increases amount of output per file according
2992 to configurations in hgrc.
3000 to configurations in hgrc.
2993
3001
2994 With -v/--verbose, this command shows configurations below at
3002 With -v/--verbose, this command shows configurations below at
2995 first (only if specified).
3003 first (only if specified).
2996
3004
2997 - ``--tool`` option
3005 - ``--tool`` option
2998 - ``HGMERGE`` environment variable
3006 - ``HGMERGE`` environment variable
2999 - configuration of ``ui.merge``
3007 - configuration of ``ui.merge``
3000
3008
3001 If merge tool is chosen before matching against
3009 If merge tool is chosen before matching against
3002 ``merge-patterns``, this command can't show any helpful
3010 ``merge-patterns``, this command can't show any helpful
3003 information, even with --debug. In such case, information above is
3011 information, even with --debug. In such case, information above is
3004 useful to know why a merge tool is chosen.
3012 useful to know why a merge tool is chosen.
3005 """
3013 """
3006 opts = pycompat.byteskwargs(opts)
3014 opts = pycompat.byteskwargs(opts)
3007 overrides = {}
3015 overrides = {}
3008 if opts[b'tool']:
3016 if opts[b'tool']:
3009 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3017 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3010 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3018 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3011
3019
3012 with ui.configoverride(overrides, b'debugmergepatterns'):
3020 with ui.configoverride(overrides, b'debugmergepatterns'):
3013 hgmerge = encoding.environ.get(b"HGMERGE")
3021 hgmerge = encoding.environ.get(b"HGMERGE")
3014 if hgmerge is not None:
3022 if hgmerge is not None:
3015 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3023 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3016 uimerge = ui.config(b"ui", b"merge")
3024 uimerge = ui.config(b"ui", b"merge")
3017 if uimerge:
3025 if uimerge:
3018 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3026 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3019
3027
3020 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3028 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3021 m = scmutil.match(ctx, pats, opts)
3029 m = scmutil.match(ctx, pats, opts)
3022 changedelete = opts[b'changedelete']
3030 changedelete = opts[b'changedelete']
3023 for path in ctx.walk(m):
3031 for path in ctx.walk(m):
3024 fctx = ctx[path]
3032 fctx = ctx[path]
3025 with ui.silent(
3033 with ui.silent(
3026 error=True
3034 error=True
3027 ) if not ui.debugflag else util.nullcontextmanager():
3035 ) if not ui.debugflag else util.nullcontextmanager():
3028 tool, toolpath = filemerge._picktool(
3036 tool, toolpath = filemerge._picktool(
3029 repo,
3037 repo,
3030 ui,
3038 ui,
3031 path,
3039 path,
3032 fctx.isbinary(),
3040 fctx.isbinary(),
3033 b'l' in fctx.flags(),
3041 b'l' in fctx.flags(),
3034 changedelete,
3042 changedelete,
3035 )
3043 )
3036 ui.write(b'%s = %s\n' % (path, tool))
3044 ui.write(b'%s = %s\n' % (path, tool))
3037
3045
3038
3046
3039 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3047 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3040 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3048 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3041 """access the pushkey key/value protocol
3049 """access the pushkey key/value protocol
3042
3050
3043 With two args, list the keys in the given namespace.
3051 With two args, list the keys in the given namespace.
3044
3052
3045 With five args, set a key to new if it currently is set to old.
3053 With five args, set a key to new if it currently is set to old.
3046 Reports success or failure.
3054 Reports success or failure.
3047 """
3055 """
3048
3056
3049 target = hg.peer(ui, {}, repopath)
3057 target = hg.peer(ui, {}, repopath)
3050 try:
3058 try:
3051 if keyinfo:
3059 if keyinfo:
3052 key, old, new = keyinfo
3060 key, old, new = keyinfo
3053 with target.commandexecutor() as e:
3061 with target.commandexecutor() as e:
3054 r = e.callcommand(
3062 r = e.callcommand(
3055 b'pushkey',
3063 b'pushkey',
3056 {
3064 {
3057 b'namespace': namespace,
3065 b'namespace': namespace,
3058 b'key': key,
3066 b'key': key,
3059 b'old': old,
3067 b'old': old,
3060 b'new': new,
3068 b'new': new,
3061 },
3069 },
3062 ).result()
3070 ).result()
3063
3071
3064 ui.status(pycompat.bytestr(r) + b'\n')
3072 ui.status(pycompat.bytestr(r) + b'\n')
3065 return not r
3073 return not r
3066 else:
3074 else:
3067 for k, v in sorted(target.listkeys(namespace).items()):
3075 for k, v in sorted(target.listkeys(namespace).items()):
3068 ui.write(
3076 ui.write(
3069 b"%s\t%s\n"
3077 b"%s\t%s\n"
3070 % (stringutil.escapestr(k), stringutil.escapestr(v))
3078 % (stringutil.escapestr(k), stringutil.escapestr(v))
3071 )
3079 )
3072 finally:
3080 finally:
3073 target.close()
3081 target.close()
3074
3082
3075
3083
3076 @command(b'debugpvec', [], _(b'A B'))
3084 @command(b'debugpvec', [], _(b'A B'))
3077 def debugpvec(ui, repo, a, b=None):
3085 def debugpvec(ui, repo, a, b=None):
3078 ca = scmutil.revsingle(repo, a)
3086 ca = scmutil.revsingle(repo, a)
3079 cb = scmutil.revsingle(repo, b)
3087 cb = scmutil.revsingle(repo, b)
3080 pa = pvec.ctxpvec(ca)
3088 pa = pvec.ctxpvec(ca)
3081 pb = pvec.ctxpvec(cb)
3089 pb = pvec.ctxpvec(cb)
3082 if pa == pb:
3090 if pa == pb:
3083 rel = b"="
3091 rel = b"="
3084 elif pa > pb:
3092 elif pa > pb:
3085 rel = b">"
3093 rel = b">"
3086 elif pa < pb:
3094 elif pa < pb:
3087 rel = b"<"
3095 rel = b"<"
3088 elif pa | pb:
3096 elif pa | pb:
3089 rel = b"|"
3097 rel = b"|"
3090 ui.write(_(b"a: %s\n") % pa)
3098 ui.write(_(b"a: %s\n") % pa)
3091 ui.write(_(b"b: %s\n") % pb)
3099 ui.write(_(b"b: %s\n") % pb)
3092 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3100 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3093 ui.write(
3101 ui.write(
3094 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3102 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3095 % (
3103 % (
3096 abs(pa._depth - pb._depth),
3104 abs(pa._depth - pb._depth),
3097 pvec._hamming(pa._vec, pb._vec),
3105 pvec._hamming(pa._vec, pb._vec),
3098 pa.distance(pb),
3106 pa.distance(pb),
3099 rel,
3107 rel,
3100 )
3108 )
3101 )
3109 )
3102
3110
3103
3111
3104 @command(
3112 @command(
3105 b'debugrebuilddirstate|debugrebuildstate',
3113 b'debugrebuilddirstate|debugrebuildstate',
3106 [
3114 [
3107 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3115 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3108 (
3116 (
3109 b'',
3117 b'',
3110 b'minimal',
3118 b'minimal',
3111 None,
3119 None,
3112 _(
3120 _(
3113 b'only rebuild files that are inconsistent with '
3121 b'only rebuild files that are inconsistent with '
3114 b'the working copy parent'
3122 b'the working copy parent'
3115 ),
3123 ),
3116 ),
3124 ),
3117 ],
3125 ],
3118 _(b'[-r REV]'),
3126 _(b'[-r REV]'),
3119 )
3127 )
3120 def debugrebuilddirstate(ui, repo, rev, **opts):
3128 def debugrebuilddirstate(ui, repo, rev, **opts):
3121 """rebuild the dirstate as it would look like for the given revision
3129 """rebuild the dirstate as it would look like for the given revision
3122
3130
3123 If no revision is specified the first current parent will be used.
3131 If no revision is specified the first current parent will be used.
3124
3132
3125 The dirstate will be set to the files of the given revision.
3133 The dirstate will be set to the files of the given revision.
3126 The actual working directory content or existing dirstate
3134 The actual working directory content or existing dirstate
3127 information such as adds or removes is not considered.
3135 information such as adds or removes is not considered.
3128
3136
3129 ``minimal`` will only rebuild the dirstate status for files that claim to be
3137 ``minimal`` will only rebuild the dirstate status for files that claim to be
3130 tracked but are not in the parent manifest, or that exist in the parent
3138 tracked but are not in the parent manifest, or that exist in the parent
3131 manifest but are not in the dirstate. It will not change adds, removes, or
3139 manifest but are not in the dirstate. It will not change adds, removes, or
3132 modified files that are in the working copy parent.
3140 modified files that are in the working copy parent.
3133
3141
3134 One use of this command is to make the next :hg:`status` invocation
3142 One use of this command is to make the next :hg:`status` invocation
3135 check the actual file content.
3143 check the actual file content.
3136 """
3144 """
3137 ctx = scmutil.revsingle(repo, rev)
3145 ctx = scmutil.revsingle(repo, rev)
3138 with repo.wlock():
3146 with repo.wlock():
3139 if repo.currenttransaction() is not None:
3147 if repo.currenttransaction() is not None:
3140 msg = b'rebuild the dirstate outside of a transaction'
3148 msg = b'rebuild the dirstate outside of a transaction'
3141 raise error.ProgrammingError(msg)
3149 raise error.ProgrammingError(msg)
3142 dirstate = repo.dirstate
3150 dirstate = repo.dirstate
3143 changedfiles = None
3151 changedfiles = None
3144 # See command doc for what minimal does.
3152 # See command doc for what minimal does.
3145 if opts.get('minimal'):
3153 if opts.get('minimal'):
3146 manifestfiles = set(ctx.manifest().keys())
3154 manifestfiles = set(ctx.manifest().keys())
3147 dirstatefiles = set(dirstate)
3155 dirstatefiles = set(dirstate)
3148 manifestonly = manifestfiles - dirstatefiles
3156 manifestonly = manifestfiles - dirstatefiles
3149 dsonly = dirstatefiles - manifestfiles
3157 dsonly = dirstatefiles - manifestfiles
3150 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3158 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3151 changedfiles = manifestonly | dsnotadded
3159 changedfiles = manifestonly | dsnotadded
3152
3160
3153 with dirstate.changing_parents(repo):
3161 with dirstate.changing_parents(repo):
3154 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3162 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3155
3163
3156
3164
3157 @command(
3165 @command(
3158 b'debugrebuildfncache',
3166 b'debugrebuildfncache',
3159 [
3167 [
3160 (
3168 (
3161 b'',
3169 b'',
3162 b'only-data',
3170 b'only-data',
3163 False,
3171 False,
3164 _(b'only look for wrong .d files (much faster)'),
3172 _(b'only look for wrong .d files (much faster)'),
3165 )
3173 )
3166 ],
3174 ],
3167 b'',
3175 b'',
3168 )
3176 )
3169 def debugrebuildfncache(ui, repo, **opts):
3177 def debugrebuildfncache(ui, repo, **opts):
3170 """rebuild the fncache file"""
3178 """rebuild the fncache file"""
3171 opts = pycompat.byteskwargs(opts)
3179 opts = pycompat.byteskwargs(opts)
3172 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3180 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3173
3181
3174
3182
3175 @command(
3183 @command(
3176 b'debugrename',
3184 b'debugrename',
3177 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3185 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3178 _(b'[-r REV] [FILE]...'),
3186 _(b'[-r REV] [FILE]...'),
3179 )
3187 )
3180 def debugrename(ui, repo, *pats, **opts):
3188 def debugrename(ui, repo, *pats, **opts):
3181 """dump rename information"""
3189 """dump rename information"""
3182
3190
3183 opts = pycompat.byteskwargs(opts)
3191 opts = pycompat.byteskwargs(opts)
3184 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3192 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3185 m = scmutil.match(ctx, pats, opts)
3193 m = scmutil.match(ctx, pats, opts)
3186 for abs in ctx.walk(m):
3194 for abs in ctx.walk(m):
3187 fctx = ctx[abs]
3195 fctx = ctx[abs]
3188 o = fctx.filelog().renamed(fctx.filenode())
3196 o = fctx.filelog().renamed(fctx.filenode())
3189 rel = repo.pathto(abs)
3197 rel = repo.pathto(abs)
3190 if o:
3198 if o:
3191 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3199 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3192 else:
3200 else:
3193 ui.write(_(b"%s not renamed\n") % rel)
3201 ui.write(_(b"%s not renamed\n") % rel)
3194
3202
3195
3203
3196 @command(b'debugrequires|debugrequirements', [], b'')
3204 @command(b'debugrequires|debugrequirements', [], b'')
3197 def debugrequirements(ui, repo):
3205 def debugrequirements(ui, repo):
3198 """print the current repo requirements"""
3206 """print the current repo requirements"""
3199 for r in sorted(repo.requirements):
3207 for r in sorted(repo.requirements):
3200 ui.write(b"%s\n" % r)
3208 ui.write(b"%s\n" % r)
3201
3209
3202
3210
3203 @command(
3211 @command(
3204 b'debugrevlog',
3212 b'debugrevlog',
3205 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3213 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3206 _(b'-c|-m|FILE'),
3214 _(b'-c|-m|FILE'),
3207 optionalrepo=True,
3215 optionalrepo=True,
3208 )
3216 )
3209 def debugrevlog(ui, repo, file_=None, **opts):
3217 def debugrevlog(ui, repo, file_=None, **opts):
3210 """show data and statistics about a revlog"""
3218 """show data and statistics about a revlog"""
3211 opts = pycompat.byteskwargs(opts)
3219 opts = pycompat.byteskwargs(opts)
3212 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3220 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3213
3221
3214 if opts.get(b"dump"):
3222 if opts.get(b"dump"):
3215 revlog_debug.dump(ui, r)
3223 revlog_debug.dump(ui, r)
3216 else:
3224 else:
3217 revlog_debug.debug_revlog(ui, r)
3225 revlog_debug.debug_revlog(ui, r)
3218 return 0
3226 return 0
3219
3227
3220
3228
3221 @command(
3229 @command(
3222 b'debugrevlogindex',
3230 b'debugrevlogindex',
3223 cmdutil.debugrevlogopts
3231 cmdutil.debugrevlogopts
3224 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3232 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3225 _(b'[-f FORMAT] -c|-m|FILE'),
3233 _(b'[-f FORMAT] -c|-m|FILE'),
3226 optionalrepo=True,
3234 optionalrepo=True,
3227 )
3235 )
3228 def debugrevlogindex(ui, repo, file_=None, **opts):
3236 def debugrevlogindex(ui, repo, file_=None, **opts):
3229 """dump the contents of a revlog index"""
3237 """dump the contents of a revlog index"""
3230 opts = pycompat.byteskwargs(opts)
3238 opts = pycompat.byteskwargs(opts)
3231 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3239 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3232 format = opts.get(b'format', 0)
3240 format = opts.get(b'format', 0)
3233 if format not in (0, 1):
3241 if format not in (0, 1):
3234 raise error.Abort(_(b"unknown format %d") % format)
3242 raise error.Abort(_(b"unknown format %d") % format)
3235
3243
3236 if ui.debugflag:
3244 if ui.debugflag:
3237 shortfn = hex
3245 shortfn = hex
3238 else:
3246 else:
3239 shortfn = short
3247 shortfn = short
3240
3248
3241 # There might not be anything in r, so have a sane default
3249 # There might not be anything in r, so have a sane default
3242 idlen = 12
3250 idlen = 12
3243 for i in r:
3251 for i in r:
3244 idlen = len(shortfn(r.node(i)))
3252 idlen = len(shortfn(r.node(i)))
3245 break
3253 break
3246
3254
3247 if format == 0:
3255 if format == 0:
3248 if ui.verbose:
3256 if ui.verbose:
3249 ui.writenoi18n(
3257 ui.writenoi18n(
3250 b" rev offset length linkrev %s %s p2\n"
3258 b" rev offset length linkrev %s %s p2\n"
3251 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3259 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3252 )
3260 )
3253 else:
3261 else:
3254 ui.writenoi18n(
3262 ui.writenoi18n(
3255 b" rev linkrev %s %s p2\n"
3263 b" rev linkrev %s %s p2\n"
3256 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3264 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3257 )
3265 )
3258 elif format == 1:
3266 elif format == 1:
3259 if ui.verbose:
3267 if ui.verbose:
3260 ui.writenoi18n(
3268 ui.writenoi18n(
3261 (
3269 (
3262 b" rev flag offset length size link p1"
3270 b" rev flag offset length size link p1"
3263 b" p2 %s\n"
3271 b" p2 %s\n"
3264 )
3272 )
3265 % b"nodeid".rjust(idlen)
3273 % b"nodeid".rjust(idlen)
3266 )
3274 )
3267 else:
3275 else:
3268 ui.writenoi18n(
3276 ui.writenoi18n(
3269 b" rev flag size link p1 p2 %s\n"
3277 b" rev flag size link p1 p2 %s\n"
3270 % b"nodeid".rjust(idlen)
3278 % b"nodeid".rjust(idlen)
3271 )
3279 )
3272
3280
3273 for i in r:
3281 for i in r:
3274 node = r.node(i)
3282 node = r.node(i)
3275 if format == 0:
3283 if format == 0:
3276 try:
3284 try:
3277 pp = r.parents(node)
3285 pp = r.parents(node)
3278 except Exception:
3286 except Exception:
3279 pp = [repo.nullid, repo.nullid]
3287 pp = [repo.nullid, repo.nullid]
3280 if ui.verbose:
3288 if ui.verbose:
3281 ui.write(
3289 ui.write(
3282 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3290 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3283 % (
3291 % (
3284 i,
3292 i,
3285 r.start(i),
3293 r.start(i),
3286 r.length(i),
3294 r.length(i),
3287 r.linkrev(i),
3295 r.linkrev(i),
3288 shortfn(node),
3296 shortfn(node),
3289 shortfn(pp[0]),
3297 shortfn(pp[0]),
3290 shortfn(pp[1]),
3298 shortfn(pp[1]),
3291 )
3299 )
3292 )
3300 )
3293 else:
3301 else:
3294 ui.write(
3302 ui.write(
3295 b"% 6d % 7d %s %s %s\n"
3303 b"% 6d % 7d %s %s %s\n"
3296 % (
3304 % (
3297 i,
3305 i,
3298 r.linkrev(i),
3306 r.linkrev(i),
3299 shortfn(node),
3307 shortfn(node),
3300 shortfn(pp[0]),
3308 shortfn(pp[0]),
3301 shortfn(pp[1]),
3309 shortfn(pp[1]),
3302 )
3310 )
3303 )
3311 )
3304 elif format == 1:
3312 elif format == 1:
3305 pr = r.parentrevs(i)
3313 pr = r.parentrevs(i)
3306 if ui.verbose:
3314 if ui.verbose:
3307 ui.write(
3315 ui.write(
3308 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3316 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3309 % (
3317 % (
3310 i,
3318 i,
3311 r.flags(i),
3319 r.flags(i),
3312 r.start(i),
3320 r.start(i),
3313 r.length(i),
3321 r.length(i),
3314 r.rawsize(i),
3322 r.rawsize(i),
3315 r.linkrev(i),
3323 r.linkrev(i),
3316 pr[0],
3324 pr[0],
3317 pr[1],
3325 pr[1],
3318 shortfn(node),
3326 shortfn(node),
3319 )
3327 )
3320 )
3328 )
3321 else:
3329 else:
3322 ui.write(
3330 ui.write(
3323 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3331 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3324 % (
3332 % (
3325 i,
3333 i,
3326 r.flags(i),
3334 r.flags(i),
3327 r.rawsize(i),
3335 r.rawsize(i),
3328 r.linkrev(i),
3336 r.linkrev(i),
3329 pr[0],
3337 pr[0],
3330 pr[1],
3338 pr[1],
3331 shortfn(node),
3339 shortfn(node),
3332 )
3340 )
3333 )
3341 )
3334
3342
3335
3343
3336 @command(
3344 @command(
3337 b'debugrevspec',
3345 b'debugrevspec',
3338 [
3346 [
3339 (
3347 (
3340 b'',
3348 b'',
3341 b'optimize',
3349 b'optimize',
3342 None,
3350 None,
3343 _(b'print parsed tree after optimizing (DEPRECATED)'),
3351 _(b'print parsed tree after optimizing (DEPRECATED)'),
3344 ),
3352 ),
3345 (
3353 (
3346 b'',
3354 b'',
3347 b'show-revs',
3355 b'show-revs',
3348 True,
3356 True,
3349 _(b'print list of result revisions (default)'),
3357 _(b'print list of result revisions (default)'),
3350 ),
3358 ),
3351 (
3359 (
3352 b's',
3360 b's',
3353 b'show-set',
3361 b'show-set',
3354 None,
3362 None,
3355 _(b'print internal representation of result set'),
3363 _(b'print internal representation of result set'),
3356 ),
3364 ),
3357 (
3365 (
3358 b'p',
3366 b'p',
3359 b'show-stage',
3367 b'show-stage',
3360 [],
3368 [],
3361 _(b'print parsed tree at the given stage'),
3369 _(b'print parsed tree at the given stage'),
3362 _(b'NAME'),
3370 _(b'NAME'),
3363 ),
3371 ),
3364 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3372 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3365 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3373 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3366 ],
3374 ],
3367 b'REVSPEC',
3375 b'REVSPEC',
3368 )
3376 )
3369 def debugrevspec(ui, repo, expr, **opts):
3377 def debugrevspec(ui, repo, expr, **opts):
3370 """parse and apply a revision specification
3378 """parse and apply a revision specification
3371
3379
3372 Use -p/--show-stage option to print the parsed tree at the given stages.
3380 Use -p/--show-stage option to print the parsed tree at the given stages.
3373 Use -p all to print tree at every stage.
3381 Use -p all to print tree at every stage.
3374
3382
3375 Use --no-show-revs option with -s or -p to print only the set
3383 Use --no-show-revs option with -s or -p to print only the set
3376 representation or the parsed tree respectively.
3384 representation or the parsed tree respectively.
3377
3385
3378 Use --verify-optimized to compare the optimized result with the unoptimized
3386 Use --verify-optimized to compare the optimized result with the unoptimized
3379 one. Returns 1 if the optimized result differs.
3387 one. Returns 1 if the optimized result differs.
3380 """
3388 """
3381 opts = pycompat.byteskwargs(opts)
3389 opts = pycompat.byteskwargs(opts)
3382 aliases = ui.configitems(b'revsetalias')
3390 aliases = ui.configitems(b'revsetalias')
3383 stages = [
3391 stages = [
3384 (b'parsed', lambda tree: tree),
3392 (b'parsed', lambda tree: tree),
3385 (
3393 (
3386 b'expanded',
3394 b'expanded',
3387 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3395 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3388 ),
3396 ),
3389 (b'concatenated', revsetlang.foldconcat),
3397 (b'concatenated', revsetlang.foldconcat),
3390 (b'analyzed', revsetlang.analyze),
3398 (b'analyzed', revsetlang.analyze),
3391 (b'optimized', revsetlang.optimize),
3399 (b'optimized', revsetlang.optimize),
3392 ]
3400 ]
3393 if opts[b'no_optimized']:
3401 if opts[b'no_optimized']:
3394 stages = stages[:-1]
3402 stages = stages[:-1]
3395 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3403 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3396 raise error.Abort(
3404 raise error.Abort(
3397 _(b'cannot use --verify-optimized with --no-optimized')
3405 _(b'cannot use --verify-optimized with --no-optimized')
3398 )
3406 )
3399 stagenames = {n for n, f in stages}
3407 stagenames = {n for n, f in stages}
3400
3408
3401 showalways = set()
3409 showalways = set()
3402 showchanged = set()
3410 showchanged = set()
3403 if ui.verbose and not opts[b'show_stage']:
3411 if ui.verbose and not opts[b'show_stage']:
3404 # show parsed tree by --verbose (deprecated)
3412 # show parsed tree by --verbose (deprecated)
3405 showalways.add(b'parsed')
3413 showalways.add(b'parsed')
3406 showchanged.update([b'expanded', b'concatenated'])
3414 showchanged.update([b'expanded', b'concatenated'])
3407 if opts[b'optimize']:
3415 if opts[b'optimize']:
3408 showalways.add(b'optimized')
3416 showalways.add(b'optimized')
3409 if opts[b'show_stage'] and opts[b'optimize']:
3417 if opts[b'show_stage'] and opts[b'optimize']:
3410 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3418 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3411 if opts[b'show_stage'] == [b'all']:
3419 if opts[b'show_stage'] == [b'all']:
3412 showalways.update(stagenames)
3420 showalways.update(stagenames)
3413 else:
3421 else:
3414 for n in opts[b'show_stage']:
3422 for n in opts[b'show_stage']:
3415 if n not in stagenames:
3423 if n not in stagenames:
3416 raise error.Abort(_(b'invalid stage name: %s') % n)
3424 raise error.Abort(_(b'invalid stage name: %s') % n)
3417 showalways.update(opts[b'show_stage'])
3425 showalways.update(opts[b'show_stage'])
3418
3426
3419 treebystage = {}
3427 treebystage = {}
3420 printedtree = None
3428 printedtree = None
3421 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3429 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3422 for n, f in stages:
3430 for n, f in stages:
3423 treebystage[n] = tree = f(tree)
3431 treebystage[n] = tree = f(tree)
3424 if n in showalways or (n in showchanged and tree != printedtree):
3432 if n in showalways or (n in showchanged and tree != printedtree):
3425 if opts[b'show_stage'] or n != b'parsed':
3433 if opts[b'show_stage'] or n != b'parsed':
3426 ui.write(b"* %s:\n" % n)
3434 ui.write(b"* %s:\n" % n)
3427 ui.write(revsetlang.prettyformat(tree), b"\n")
3435 ui.write(revsetlang.prettyformat(tree), b"\n")
3428 printedtree = tree
3436 printedtree = tree
3429
3437
3430 if opts[b'verify_optimized']:
3438 if opts[b'verify_optimized']:
3431 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3439 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3432 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3440 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3433 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3441 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3434 ui.writenoi18n(
3442 ui.writenoi18n(
3435 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3443 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3436 )
3444 )
3437 ui.writenoi18n(
3445 ui.writenoi18n(
3438 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3446 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3439 )
3447 )
3440 arevs = list(arevs)
3448 arevs = list(arevs)
3441 brevs = list(brevs)
3449 brevs = list(brevs)
3442 if arevs == brevs:
3450 if arevs == brevs:
3443 return 0
3451 return 0
3444 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3452 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3445 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3453 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3446 sm = difflib.SequenceMatcher(None, arevs, brevs)
3454 sm = difflib.SequenceMatcher(None, arevs, brevs)
3447 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3455 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3448 if tag in ('delete', 'replace'):
3456 if tag in ('delete', 'replace'):
3449 for c in arevs[alo:ahi]:
3457 for c in arevs[alo:ahi]:
3450 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3458 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3451 if tag in ('insert', 'replace'):
3459 if tag in ('insert', 'replace'):
3452 for c in brevs[blo:bhi]:
3460 for c in brevs[blo:bhi]:
3453 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3461 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3454 if tag == 'equal':
3462 if tag == 'equal':
3455 for c in arevs[alo:ahi]:
3463 for c in arevs[alo:ahi]:
3456 ui.write(b' %d\n' % c)
3464 ui.write(b' %d\n' % c)
3457 return 1
3465 return 1
3458
3466
3459 func = revset.makematcher(tree)
3467 func = revset.makematcher(tree)
3460 revs = func(repo)
3468 revs = func(repo)
3461 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3469 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3462 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3470 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3463 if not opts[b'show_revs']:
3471 if not opts[b'show_revs']:
3464 return
3472 return
3465 for c in revs:
3473 for c in revs:
3466 ui.write(b"%d\n" % c)
3474 ui.write(b"%d\n" % c)
3467
3475
3468
3476
3469 @command(
3477 @command(
3470 b'debugserve',
3478 b'debugserve',
3471 [
3479 [
3472 (
3480 (
3473 b'',
3481 b'',
3474 b'sshstdio',
3482 b'sshstdio',
3475 False,
3483 False,
3476 _(b'run an SSH server bound to process handles'),
3484 _(b'run an SSH server bound to process handles'),
3477 ),
3485 ),
3478 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3486 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3479 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3487 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3480 ],
3488 ],
3481 b'',
3489 b'',
3482 )
3490 )
3483 def debugserve(ui, repo, **opts):
3491 def debugserve(ui, repo, **opts):
3484 """run a server with advanced settings
3492 """run a server with advanced settings
3485
3493
3486 This command is similar to :hg:`serve`. It exists partially as a
3494 This command is similar to :hg:`serve`. It exists partially as a
3487 workaround to the fact that ``hg serve --stdio`` must have specific
3495 workaround to the fact that ``hg serve --stdio`` must have specific
3488 arguments for security reasons.
3496 arguments for security reasons.
3489 """
3497 """
3490 opts = pycompat.byteskwargs(opts)
3498 opts = pycompat.byteskwargs(opts)
3491
3499
3492 if not opts[b'sshstdio']:
3500 if not opts[b'sshstdio']:
3493 raise error.Abort(_(b'only --sshstdio is currently supported'))
3501 raise error.Abort(_(b'only --sshstdio is currently supported'))
3494
3502
3495 logfh = None
3503 logfh = None
3496
3504
3497 if opts[b'logiofd'] and opts[b'logiofile']:
3505 if opts[b'logiofd'] and opts[b'logiofile']:
3498 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3506 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3499
3507
3500 if opts[b'logiofd']:
3508 if opts[b'logiofd']:
3501 # Ideally we would be line buffered. But line buffering in binary
3509 # Ideally we would be line buffered. But line buffering in binary
3502 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3510 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3503 # buffering could have performance impacts. But since this isn't
3511 # buffering could have performance impacts. But since this isn't
3504 # performance critical code, it should be fine.
3512 # performance critical code, it should be fine.
3505 try:
3513 try:
3506 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3514 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3507 except OSError as e:
3515 except OSError as e:
3508 if e.errno != errno.ESPIPE:
3516 if e.errno != errno.ESPIPE:
3509 raise
3517 raise
3510 # can't seek a pipe, so `ab` mode fails on py3
3518 # can't seek a pipe, so `ab` mode fails on py3
3511 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3519 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3512 elif opts[b'logiofile']:
3520 elif opts[b'logiofile']:
3513 logfh = open(opts[b'logiofile'], b'ab', 0)
3521 logfh = open(opts[b'logiofile'], b'ab', 0)
3514
3522
3515 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3523 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3516 s.serve_forever()
3524 s.serve_forever()
3517
3525
3518
3526
3519 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3527 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3520 def debugsetparents(ui, repo, rev1, rev2=None):
3528 def debugsetparents(ui, repo, rev1, rev2=None):
3521 """manually set the parents of the current working directory (DANGEROUS)
3529 """manually set the parents of the current working directory (DANGEROUS)
3522
3530
3523 This command is not what you are looking for and should not be used. Using
3531 This command is not what you are looking for and should not be used. Using
3524 this command will most certainly results in slight corruption of the file
3532 this command will most certainly results in slight corruption of the file
3525 level histories withing your repository. DO NOT USE THIS COMMAND.
3533 level histories withing your repository. DO NOT USE THIS COMMAND.
3526
3534
3527 The command update the p1 and p2 field in the dirstate, and not touching
3535 The command update the p1 and p2 field in the dirstate, and not touching
3528 anything else. This useful for writing repository conversion tools, but
3536 anything else. This useful for writing repository conversion tools, but
3529 should be used with extreme care. For example, neither the working
3537 should be used with extreme care. For example, neither the working
3530 directory nor the dirstate is updated, so file status may be incorrect
3538 directory nor the dirstate is updated, so file status may be incorrect
3531 after running this command. Only used if you are one of the few people that
3539 after running this command. Only used if you are one of the few people that
3532 deeply unstand both conversion tools and file level histories. If you are
3540 deeply unstand both conversion tools and file level histories. If you are
3533 reading this help, you are not one of this people (most of them sailed west
3541 reading this help, you are not one of this people (most of them sailed west
3534 from Mithlond anyway.
3542 from Mithlond anyway.
3535
3543
3536 So one last time DO NOT USE THIS COMMAND.
3544 So one last time DO NOT USE THIS COMMAND.
3537
3545
3538 Returns 0 on success.
3546 Returns 0 on success.
3539 """
3547 """
3540
3548
3541 node1 = scmutil.revsingle(repo, rev1).node()
3549 node1 = scmutil.revsingle(repo, rev1).node()
3542 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3550 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3543
3551
3544 with repo.wlock():
3552 with repo.wlock():
3545 repo.setparents(node1, node2)
3553 repo.setparents(node1, node2)
3546
3554
3547
3555
3548 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3556 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3549 def debugsidedata(ui, repo, file_, rev=None, **opts):
3557 def debugsidedata(ui, repo, file_, rev=None, **opts):
3550 """dump the side data for a cl/manifest/file revision
3558 """dump the side data for a cl/manifest/file revision
3551
3559
3552 Use --verbose to dump the sidedata content."""
3560 Use --verbose to dump the sidedata content."""
3553 opts = pycompat.byteskwargs(opts)
3561 opts = pycompat.byteskwargs(opts)
3554 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3562 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3555 if rev is not None:
3563 if rev is not None:
3556 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3564 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3557 file_, rev = None, file_
3565 file_, rev = None, file_
3558 elif rev is None:
3566 elif rev is None:
3559 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3567 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3560 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3568 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3561 r = getattr(r, '_revlog', r)
3569 r = getattr(r, '_revlog', r)
3562 try:
3570 try:
3563 sidedata = r.sidedata(r.lookup(rev))
3571 sidedata = r.sidedata(r.lookup(rev))
3564 except KeyError:
3572 except KeyError:
3565 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3573 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3566 if sidedata:
3574 if sidedata:
3567 sidedata = list(sidedata.items())
3575 sidedata = list(sidedata.items())
3568 sidedata.sort()
3576 sidedata.sort()
3569 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3577 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3570 for key, value in sidedata:
3578 for key, value in sidedata:
3571 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3579 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3572 if ui.verbose:
3580 if ui.verbose:
3573 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3581 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3574
3582
3575
3583
3576 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3584 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3577 def debugssl(ui, repo, source=None, **opts):
3585 def debugssl(ui, repo, source=None, **opts):
3578 """test a secure connection to a server
3586 """test a secure connection to a server
3579
3587
3580 This builds the certificate chain for the server on Windows, installing the
3588 This builds the certificate chain for the server on Windows, installing the
3581 missing intermediates and trusted root via Windows Update if necessary. It
3589 missing intermediates and trusted root via Windows Update if necessary. It
3582 does nothing on other platforms.
3590 does nothing on other platforms.
3583
3591
3584 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3592 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3585 that server is used. See :hg:`help urls` for more information.
3593 that server is used. See :hg:`help urls` for more information.
3586
3594
3587 If the update succeeds, retry the original operation. Otherwise, the cause
3595 If the update succeeds, retry the original operation. Otherwise, the cause
3588 of the SSL error is likely another issue.
3596 of the SSL error is likely another issue.
3589 """
3597 """
3590 if not pycompat.iswindows:
3598 if not pycompat.iswindows:
3591 raise error.Abort(
3599 raise error.Abort(
3592 _(b'certificate chain building is only possible on Windows')
3600 _(b'certificate chain building is only possible on Windows')
3593 )
3601 )
3594
3602
3595 if not source:
3603 if not source:
3596 if not repo:
3604 if not repo:
3597 raise error.Abort(
3605 raise error.Abort(
3598 _(
3606 _(
3599 b"there is no Mercurial repository here, and no "
3607 b"there is no Mercurial repository here, and no "
3600 b"server specified"
3608 b"server specified"
3601 )
3609 )
3602 )
3610 )
3603 source = b"default"
3611 source = b"default"
3604
3612
3605 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3613 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3606 url = path.url
3614 url = path.url
3607
3615
3608 defaultport = {b'https': 443, b'ssh': 22}
3616 defaultport = {b'https': 443, b'ssh': 22}
3609 if url.scheme in defaultport:
3617 if url.scheme in defaultport:
3610 try:
3618 try:
3611 addr = (url.host, int(url.port or defaultport[url.scheme]))
3619 addr = (url.host, int(url.port or defaultport[url.scheme]))
3612 except ValueError:
3620 except ValueError:
3613 raise error.Abort(_(b"malformed port number in URL"))
3621 raise error.Abort(_(b"malformed port number in URL"))
3614 else:
3622 else:
3615 raise error.Abort(_(b"only https and ssh connections are supported"))
3623 raise error.Abort(_(b"only https and ssh connections are supported"))
3616
3624
3617 from . import win32
3625 from . import win32
3618
3626
3619 s = ssl.wrap_socket(
3627 s = ssl.wrap_socket(
3620 socket.socket(),
3628 socket.socket(),
3621 ssl_version=ssl.PROTOCOL_TLS,
3629 ssl_version=ssl.PROTOCOL_TLS,
3622 cert_reqs=ssl.CERT_NONE,
3630 cert_reqs=ssl.CERT_NONE,
3623 ca_certs=None,
3631 ca_certs=None,
3624 )
3632 )
3625
3633
3626 try:
3634 try:
3627 s.connect(addr)
3635 s.connect(addr)
3628 cert = s.getpeercert(True)
3636 cert = s.getpeercert(True)
3629
3637
3630 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3638 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3631
3639
3632 complete = win32.checkcertificatechain(cert, build=False)
3640 complete = win32.checkcertificatechain(cert, build=False)
3633
3641
3634 if not complete:
3642 if not complete:
3635 ui.status(_(b'certificate chain is incomplete, updating... '))
3643 ui.status(_(b'certificate chain is incomplete, updating... '))
3636
3644
3637 if not win32.checkcertificatechain(cert):
3645 if not win32.checkcertificatechain(cert):
3638 ui.status(_(b'failed.\n'))
3646 ui.status(_(b'failed.\n'))
3639 else:
3647 else:
3640 ui.status(_(b'done.\n'))
3648 ui.status(_(b'done.\n'))
3641 else:
3649 else:
3642 ui.status(_(b'full certificate chain is available\n'))
3650 ui.status(_(b'full certificate chain is available\n'))
3643 finally:
3651 finally:
3644 s.close()
3652 s.close()
3645
3653
3646
3654
3647 @command(
3655 @command(
3648 b'debug::stable-tail-sort',
3656 b'debug::stable-tail-sort',
3649 [
3657 [
3650 (
3658 (
3651 b'T',
3659 b'T',
3652 b'template',
3660 b'template',
3653 b'{rev}\n',
3661 b'{rev}\n',
3654 _(b'display with template'),
3662 _(b'display with template'),
3655 _(b'TEMPLATE'),
3663 _(b'TEMPLATE'),
3656 ),
3664 ),
3657 ],
3665 ],
3658 b'REV',
3666 b'REV',
3659 )
3667 )
3660 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3668 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3661 """display the stable-tail sort of the ancestors of a given node"""
3669 """display the stable-tail sort of the ancestors of a given node"""
3662 rev = logcmdutil.revsingle(repo, revspec).rev()
3670 rev = logcmdutil.revsingle(repo, revspec).rev()
3663 cl = repo.changelog
3671 cl = repo.changelog
3664
3672
3665 displayer = logcmdutil.maketemplater(ui, repo, template)
3673 displayer = logcmdutil.maketemplater(ui, repo, template)
3666 sorted_revs = stabletailsort._stable_tail_sort(cl, rev)
3674 sorted_revs = stabletailsort._stable_tail_sort(cl, rev)
3667 for ancestor_rev in sorted_revs:
3675 for ancestor_rev in sorted_revs:
3668 displayer.show(repo[ancestor_rev])
3676 displayer.show(repo[ancestor_rev])
3669
3677
3670
3678
3671 @command(
3679 @command(
3672 b"debugbackupbundle",
3680 b"debugbackupbundle",
3673 [
3681 [
3674 (
3682 (
3675 b"",
3683 b"",
3676 b"recover",
3684 b"recover",
3677 b"",
3685 b"",
3678 b"brings the specified changeset back into the repository",
3686 b"brings the specified changeset back into the repository",
3679 )
3687 )
3680 ]
3688 ]
3681 + cmdutil.logopts,
3689 + cmdutil.logopts,
3682 _(b"hg debugbackupbundle [--recover HASH]"),
3690 _(b"hg debugbackupbundle [--recover HASH]"),
3683 )
3691 )
3684 def debugbackupbundle(ui, repo, *pats, **opts):
3692 def debugbackupbundle(ui, repo, *pats, **opts):
3685 """lists the changesets available in backup bundles
3693 """lists the changesets available in backup bundles
3686
3694
3687 Without any arguments, this command prints a list of the changesets in each
3695 Without any arguments, this command prints a list of the changesets in each
3688 backup bundle.
3696 backup bundle.
3689
3697
3690 --recover takes a changeset hash and unbundles the first bundle that
3698 --recover takes a changeset hash and unbundles the first bundle that
3691 contains that hash, which puts that changeset back in your repository.
3699 contains that hash, which puts that changeset back in your repository.
3692
3700
3693 --verbose will print the entire commit message and the bundle path for that
3701 --verbose will print the entire commit message and the bundle path for that
3694 backup.
3702 backup.
3695 """
3703 """
3696 backups = list(
3704 backups = list(
3697 filter(
3705 filter(
3698 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3706 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3699 )
3707 )
3700 )
3708 )
3701 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3709 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3702
3710
3703 opts = pycompat.byteskwargs(opts)
3711 opts = pycompat.byteskwargs(opts)
3704 opts[b"bundle"] = b""
3712 opts[b"bundle"] = b""
3705 opts[b"force"] = None
3713 opts[b"force"] = None
3706 limit = logcmdutil.getlimit(opts)
3714 limit = logcmdutil.getlimit(opts)
3707
3715
3708 def display(other, chlist, displayer):
3716 def display(other, chlist, displayer):
3709 if opts.get(b"newest_first"):
3717 if opts.get(b"newest_first"):
3710 chlist.reverse()
3718 chlist.reverse()
3711 count = 0
3719 count = 0
3712 for n in chlist:
3720 for n in chlist:
3713 if limit is not None and count >= limit:
3721 if limit is not None and count >= limit:
3714 break
3722 break
3715 parents = [
3723 parents = [
3716 True for p in other.changelog.parents(n) if p != repo.nullid
3724 True for p in other.changelog.parents(n) if p != repo.nullid
3717 ]
3725 ]
3718 if opts.get(b"no_merges") and len(parents) == 2:
3726 if opts.get(b"no_merges") and len(parents) == 2:
3719 continue
3727 continue
3720 count += 1
3728 count += 1
3721 displayer.show(other[n])
3729 displayer.show(other[n])
3722
3730
3723 recovernode = opts.get(b"recover")
3731 recovernode = opts.get(b"recover")
3724 if recovernode:
3732 if recovernode:
3725 if scmutil.isrevsymbol(repo, recovernode):
3733 if scmutil.isrevsymbol(repo, recovernode):
3726 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3734 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3727 return
3735 return
3728 elif backups:
3736 elif backups:
3729 msg = _(
3737 msg = _(
3730 b"Recover changesets using: hg debugbackupbundle --recover "
3738 b"Recover changesets using: hg debugbackupbundle --recover "
3731 b"<changeset hash>\n\nAvailable backup changesets:"
3739 b"<changeset hash>\n\nAvailable backup changesets:"
3732 )
3740 )
3733 ui.status(msg, label=b"status.removed")
3741 ui.status(msg, label=b"status.removed")
3734 else:
3742 else:
3735 ui.status(_(b"no backup changesets found\n"))
3743 ui.status(_(b"no backup changesets found\n"))
3736 return
3744 return
3737
3745
3738 for backup in backups:
3746 for backup in backups:
3739 # Much of this is copied from the hg incoming logic
3747 # Much of this is copied from the hg incoming logic
3740 source = os.path.relpath(backup, encoding.getcwd())
3748 source = os.path.relpath(backup, encoding.getcwd())
3741 path = urlutil.get_unique_pull_path_obj(
3749 path = urlutil.get_unique_pull_path_obj(
3742 b'debugbackupbundle',
3750 b'debugbackupbundle',
3743 ui,
3751 ui,
3744 source,
3752 source,
3745 )
3753 )
3746 try:
3754 try:
3747 other = hg.peer(repo, opts, path)
3755 other = hg.peer(repo, opts, path)
3748 except error.LookupError as ex:
3756 except error.LookupError as ex:
3749 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3757 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3750 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3758 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3751 ui.warn(msg, hint=hint)
3759 ui.warn(msg, hint=hint)
3752 continue
3760 continue
3753 branches = (path.branch, opts.get(b'branch', []))
3761 branches = (path.branch, opts.get(b'branch', []))
3754 revs, checkout = hg.addbranchrevs(
3762 revs, checkout = hg.addbranchrevs(
3755 repo, other, branches, opts.get(b"rev")
3763 repo, other, branches, opts.get(b"rev")
3756 )
3764 )
3757
3765
3758 if revs:
3766 if revs:
3759 revs = [other.lookup(rev) for rev in revs]
3767 revs = [other.lookup(rev) for rev in revs]
3760
3768
3761 with ui.silent():
3769 with ui.silent():
3762 try:
3770 try:
3763 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3771 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3764 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3772 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3765 )
3773 )
3766 except error.LookupError:
3774 except error.LookupError:
3767 continue
3775 continue
3768
3776
3769 try:
3777 try:
3770 if not chlist:
3778 if not chlist:
3771 continue
3779 continue
3772 if recovernode:
3780 if recovernode:
3773 with repo.lock(), repo.transaction(b"unbundle") as tr:
3781 with repo.lock(), repo.transaction(b"unbundle") as tr:
3774 if scmutil.isrevsymbol(other, recovernode):
3782 if scmutil.isrevsymbol(other, recovernode):
3775 ui.status(_(b"Unbundling %s\n") % (recovernode))
3783 ui.status(_(b"Unbundling %s\n") % (recovernode))
3776 f = hg.openpath(ui, path.loc)
3784 f = hg.openpath(ui, path.loc)
3777 gen = exchange.readbundle(ui, f, path.loc)
3785 gen = exchange.readbundle(ui, f, path.loc)
3778 if isinstance(gen, bundle2.unbundle20):
3786 if isinstance(gen, bundle2.unbundle20):
3779 bundle2.applybundle(
3787 bundle2.applybundle(
3780 repo,
3788 repo,
3781 gen,
3789 gen,
3782 tr,
3790 tr,
3783 source=b"unbundle",
3791 source=b"unbundle",
3784 url=b"bundle:" + path.loc,
3792 url=b"bundle:" + path.loc,
3785 )
3793 )
3786 else:
3794 else:
3787 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3795 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3788 break
3796 break
3789 else:
3797 else:
3790 backupdate = encoding.strtolocal(
3798 backupdate = encoding.strtolocal(
3791 time.strftime(
3799 time.strftime(
3792 "%a %H:%M, %Y-%m-%d",
3800 "%a %H:%M, %Y-%m-%d",
3793 time.localtime(os.path.getmtime(path.loc)),
3801 time.localtime(os.path.getmtime(path.loc)),
3794 )
3802 )
3795 )
3803 )
3796 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3804 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3797 if ui.verbose:
3805 if ui.verbose:
3798 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3806 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3799 else:
3807 else:
3800 opts[
3808 opts[
3801 b"template"
3809 b"template"
3802 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3810 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3803 displayer = logcmdutil.changesetdisplayer(
3811 displayer = logcmdutil.changesetdisplayer(
3804 ui, other, opts, False
3812 ui, other, opts, False
3805 )
3813 )
3806 display(other, chlist, displayer)
3814 display(other, chlist, displayer)
3807 displayer.close()
3815 displayer.close()
3808 finally:
3816 finally:
3809 cleanupfn()
3817 cleanupfn()
3810
3818
3811
3819
3812 @command(
3820 @command(
3813 b'debugsub',
3821 b'debugsub',
3814 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3822 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3815 _(b'[-r REV] [REV]'),
3823 _(b'[-r REV] [REV]'),
3816 )
3824 )
3817 def debugsub(ui, repo, rev=None):
3825 def debugsub(ui, repo, rev=None):
3818 ctx = scmutil.revsingle(repo, rev, None)
3826 ctx = scmutil.revsingle(repo, rev, None)
3819 for k, v in sorted(ctx.substate.items()):
3827 for k, v in sorted(ctx.substate.items()):
3820 ui.writenoi18n(b'path %s\n' % k)
3828 ui.writenoi18n(b'path %s\n' % k)
3821 ui.writenoi18n(b' source %s\n' % v[0])
3829 ui.writenoi18n(b' source %s\n' % v[0])
3822 ui.writenoi18n(b' revision %s\n' % v[1])
3830 ui.writenoi18n(b' revision %s\n' % v[1])
3823
3831
3824
3832
3825 @command(
3833 @command(
3826 b'debugshell',
3834 b'debugshell',
3827 [
3835 [
3828 (
3836 (
3829 b'c',
3837 b'c',
3830 b'command',
3838 b'command',
3831 b'',
3839 b'',
3832 _(b'program passed in as a string'),
3840 _(b'program passed in as a string'),
3833 _(b'COMMAND'),
3841 _(b'COMMAND'),
3834 )
3842 )
3835 ],
3843 ],
3836 _(b'[-c COMMAND]'),
3844 _(b'[-c COMMAND]'),
3837 optionalrepo=True,
3845 optionalrepo=True,
3838 )
3846 )
3839 def debugshell(ui, repo, **opts):
3847 def debugshell(ui, repo, **opts):
3840 """run an interactive Python interpreter
3848 """run an interactive Python interpreter
3841
3849
3842 The local namespace is provided with a reference to the ui and
3850 The local namespace is provided with a reference to the ui and
3843 the repo instance (if available).
3851 the repo instance (if available).
3844 """
3852 """
3845 import code
3853 import code
3846
3854
3847 imported_objects = {
3855 imported_objects = {
3848 'ui': ui,
3856 'ui': ui,
3849 'repo': repo,
3857 'repo': repo,
3850 }
3858 }
3851
3859
3852 # py2exe disables initialization of the site module, which is responsible
3860 # py2exe disables initialization of the site module, which is responsible
3853 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3861 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3854 # the stuff that site normally does here, so that the interpreter can be
3862 # the stuff that site normally does here, so that the interpreter can be
3855 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3863 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3856 # py.exe, or py2exe.
3864 # py.exe, or py2exe.
3857 if getattr(sys, "frozen", None) == 'console_exe':
3865 if getattr(sys, "frozen", None) == 'console_exe':
3858 try:
3866 try:
3859 import site
3867 import site
3860
3868
3861 site.setcopyright()
3869 site.setcopyright()
3862 site.sethelper()
3870 site.sethelper()
3863 site.setquit()
3871 site.setquit()
3864 except ImportError:
3872 except ImportError:
3865 site = None # Keep PyCharm happy
3873 site = None # Keep PyCharm happy
3866
3874
3867 command = opts.get('command')
3875 command = opts.get('command')
3868 if command:
3876 if command:
3869 compiled = code.compile_command(encoding.strfromlocal(command))
3877 compiled = code.compile_command(encoding.strfromlocal(command))
3870 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3878 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3871 return
3879 return
3872
3880
3873 code.interact(local=imported_objects)
3881 code.interact(local=imported_objects)
3874
3882
3875
3883
3876 @command(
3884 @command(
3877 b'debug-revlog-stats',
3885 b'debug-revlog-stats',
3878 [
3886 [
3879 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3887 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3880 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3888 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3881 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3889 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3882 ]
3890 ]
3883 + cmdutil.formatteropts,
3891 + cmdutil.formatteropts,
3884 )
3892 )
3885 def debug_revlog_stats(ui, repo, **opts):
3893 def debug_revlog_stats(ui, repo, **opts):
3886 """display statistics about revlogs in the store"""
3894 """display statistics about revlogs in the store"""
3887 opts = pycompat.byteskwargs(opts)
3895 opts = pycompat.byteskwargs(opts)
3888 changelog = opts[b"changelog"]
3896 changelog = opts[b"changelog"]
3889 manifest = opts[b"manifest"]
3897 manifest = opts[b"manifest"]
3890 filelogs = opts[b"filelogs"]
3898 filelogs = opts[b"filelogs"]
3891
3899
3892 if changelog is None and manifest is None and filelogs is None:
3900 if changelog is None and manifest is None and filelogs is None:
3893 changelog = True
3901 changelog = True
3894 manifest = True
3902 manifest = True
3895 filelogs = True
3903 filelogs = True
3896
3904
3897 repo = repo.unfiltered()
3905 repo = repo.unfiltered()
3898 fm = ui.formatter(b'debug-revlog-stats', opts)
3906 fm = ui.formatter(b'debug-revlog-stats', opts)
3899 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3907 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3900 fm.end()
3908 fm.end()
3901
3909
3902
3910
3903 @command(
3911 @command(
3904 b'debugsuccessorssets',
3912 b'debugsuccessorssets',
3905 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3913 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3906 _(b'[REV]'),
3914 _(b'[REV]'),
3907 )
3915 )
3908 def debugsuccessorssets(ui, repo, *revs, **opts):
3916 def debugsuccessorssets(ui, repo, *revs, **opts):
3909 """show set of successors for revision
3917 """show set of successors for revision
3910
3918
3911 A successors set of changeset A is a consistent group of revisions that
3919 A successors set of changeset A is a consistent group of revisions that
3912 succeed A. It contains non-obsolete changesets only unless closests
3920 succeed A. It contains non-obsolete changesets only unless closests
3913 successors set is set.
3921 successors set is set.
3914
3922
3915 In most cases a changeset A has a single successors set containing a single
3923 In most cases a changeset A has a single successors set containing a single
3916 successor (changeset A replaced by A').
3924 successor (changeset A replaced by A').
3917
3925
3918 A changeset that is made obsolete with no successors are called "pruned".
3926 A changeset that is made obsolete with no successors are called "pruned".
3919 Such changesets have no successors sets at all.
3927 Such changesets have no successors sets at all.
3920
3928
3921 A changeset that has been "split" will have a successors set containing
3929 A changeset that has been "split" will have a successors set containing
3922 more than one successor.
3930 more than one successor.
3923
3931
3924 A changeset that has been rewritten in multiple different ways is called
3932 A changeset that has been rewritten in multiple different ways is called
3925 "divergent". Such changesets have multiple successor sets (each of which
3933 "divergent". Such changesets have multiple successor sets (each of which
3926 may also be split, i.e. have multiple successors).
3934 may also be split, i.e. have multiple successors).
3927
3935
3928 Results are displayed as follows::
3936 Results are displayed as follows::
3929
3937
3930 <rev1>
3938 <rev1>
3931 <successors-1A>
3939 <successors-1A>
3932 <rev2>
3940 <rev2>
3933 <successors-2A>
3941 <successors-2A>
3934 <successors-2B1> <successors-2B2> <successors-2B3>
3942 <successors-2B1> <successors-2B2> <successors-2B3>
3935
3943
3936 Here rev2 has two possible (i.e. divergent) successors sets. The first
3944 Here rev2 has two possible (i.e. divergent) successors sets. The first
3937 holds one element, whereas the second holds three (i.e. the changeset has
3945 holds one element, whereas the second holds three (i.e. the changeset has
3938 been split).
3946 been split).
3939 """
3947 """
3940 # passed to successorssets caching computation from one call to another
3948 # passed to successorssets caching computation from one call to another
3941 cache = {}
3949 cache = {}
3942 ctx2str = bytes
3950 ctx2str = bytes
3943 node2str = short
3951 node2str = short
3944 for rev in logcmdutil.revrange(repo, revs):
3952 for rev in logcmdutil.revrange(repo, revs):
3945 ctx = repo[rev]
3953 ctx = repo[rev]
3946 ui.write(b'%s\n' % ctx2str(ctx))
3954 ui.write(b'%s\n' % ctx2str(ctx))
3947 for succsset in obsutil.successorssets(
3955 for succsset in obsutil.successorssets(
3948 repo, ctx.node(), closest=opts['closest'], cache=cache
3956 repo, ctx.node(), closest=opts['closest'], cache=cache
3949 ):
3957 ):
3950 if succsset:
3958 if succsset:
3951 ui.write(b' ')
3959 ui.write(b' ')
3952 ui.write(node2str(succsset[0]))
3960 ui.write(node2str(succsset[0]))
3953 for node in succsset[1:]:
3961 for node in succsset[1:]:
3954 ui.write(b' ')
3962 ui.write(b' ')
3955 ui.write(node2str(node))
3963 ui.write(node2str(node))
3956 ui.write(b'\n')
3964 ui.write(b'\n')
3957
3965
3958
3966
3959 @command(b'debugtagscache', [])
3967 @command(b'debugtagscache', [])
3960 def debugtagscache(ui, repo):
3968 def debugtagscache(ui, repo):
3961 """display the contents of .hg/cache/hgtagsfnodes1"""
3969 """display the contents of .hg/cache/hgtagsfnodes1"""
3962 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3970 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3963 flog = repo.file(b'.hgtags')
3971 flog = repo.file(b'.hgtags')
3964 for r in repo:
3972 for r in repo:
3965 node = repo[r].node()
3973 node = repo[r].node()
3966 tagsnode = cache.getfnode(node, computemissing=False)
3974 tagsnode = cache.getfnode(node, computemissing=False)
3967 if tagsnode:
3975 if tagsnode:
3968 tagsnodedisplay = hex(tagsnode)
3976 tagsnodedisplay = hex(tagsnode)
3969 if not flog.hasnode(tagsnode):
3977 if not flog.hasnode(tagsnode):
3970 tagsnodedisplay += b' (unknown node)'
3978 tagsnodedisplay += b' (unknown node)'
3971 elif tagsnode is None:
3979 elif tagsnode is None:
3972 tagsnodedisplay = b'missing'
3980 tagsnodedisplay = b'missing'
3973 else:
3981 else:
3974 tagsnodedisplay = b'invalid'
3982 tagsnodedisplay = b'invalid'
3975
3983
3976 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3984 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3977
3985
3978
3986
3979 @command(
3987 @command(
3980 b'debugtemplate',
3988 b'debugtemplate',
3981 [
3989 [
3982 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3990 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3983 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3991 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3984 ],
3992 ],
3985 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3993 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3986 optionalrepo=True,
3994 optionalrepo=True,
3987 )
3995 )
3988 def debugtemplate(ui, repo, tmpl, **opts):
3996 def debugtemplate(ui, repo, tmpl, **opts):
3989 """parse and apply a template
3997 """parse and apply a template
3990
3998
3991 If -r/--rev is given, the template is processed as a log template and
3999 If -r/--rev is given, the template is processed as a log template and
3992 applied to the given changesets. Otherwise, it is processed as a generic
4000 applied to the given changesets. Otherwise, it is processed as a generic
3993 template.
4001 template.
3994
4002
3995 Use --verbose to print the parsed tree.
4003 Use --verbose to print the parsed tree.
3996 """
4004 """
3997 revs = None
4005 revs = None
3998 if opts['rev']:
4006 if opts['rev']:
3999 if repo is None:
4007 if repo is None:
4000 raise error.RepoError(
4008 raise error.RepoError(
4001 _(b'there is no Mercurial repository here (.hg not found)')
4009 _(b'there is no Mercurial repository here (.hg not found)')
4002 )
4010 )
4003 revs = logcmdutil.revrange(repo, opts['rev'])
4011 revs = logcmdutil.revrange(repo, opts['rev'])
4004
4012
4005 props = {}
4013 props = {}
4006 for d in opts['define']:
4014 for d in opts['define']:
4007 try:
4015 try:
4008 k, v = (e.strip() for e in d.split(b'=', 1))
4016 k, v = (e.strip() for e in d.split(b'=', 1))
4009 if not k or k == b'ui':
4017 if not k or k == b'ui':
4010 raise ValueError
4018 raise ValueError
4011 props[k] = v
4019 props[k] = v
4012 except ValueError:
4020 except ValueError:
4013 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4021 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4014
4022
4015 if ui.verbose:
4023 if ui.verbose:
4016 aliases = ui.configitems(b'templatealias')
4024 aliases = ui.configitems(b'templatealias')
4017 tree = templater.parse(tmpl)
4025 tree = templater.parse(tmpl)
4018 ui.note(templater.prettyformat(tree), b'\n')
4026 ui.note(templater.prettyformat(tree), b'\n')
4019 newtree = templater.expandaliases(tree, aliases)
4027 newtree = templater.expandaliases(tree, aliases)
4020 if newtree != tree:
4028 if newtree != tree:
4021 ui.notenoi18n(
4029 ui.notenoi18n(
4022 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4030 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4023 )
4031 )
4024
4032
4025 if revs is None:
4033 if revs is None:
4026 tres = formatter.templateresources(ui, repo)
4034 tres = formatter.templateresources(ui, repo)
4027 t = formatter.maketemplater(ui, tmpl, resources=tres)
4035 t = formatter.maketemplater(ui, tmpl, resources=tres)
4028 if ui.verbose:
4036 if ui.verbose:
4029 kwds, funcs = t.symbolsuseddefault()
4037 kwds, funcs = t.symbolsuseddefault()
4030 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4038 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4031 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4039 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4032 ui.write(t.renderdefault(props))
4040 ui.write(t.renderdefault(props))
4033 else:
4041 else:
4034 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4042 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4035 if ui.verbose:
4043 if ui.verbose:
4036 kwds, funcs = displayer.t.symbolsuseddefault()
4044 kwds, funcs = displayer.t.symbolsuseddefault()
4037 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4045 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4038 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4046 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4039 for r in revs:
4047 for r in revs:
4040 displayer.show(repo[r], **pycompat.strkwargs(props))
4048 displayer.show(repo[r], **pycompat.strkwargs(props))
4041 displayer.close()
4049 displayer.close()
4042
4050
4043
4051
4044 @command(
4052 @command(
4045 b'debuguigetpass',
4053 b'debuguigetpass',
4046 [
4054 [
4047 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4055 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4048 ],
4056 ],
4049 _(b'[-p TEXT]'),
4057 _(b'[-p TEXT]'),
4050 norepo=True,
4058 norepo=True,
4051 )
4059 )
4052 def debuguigetpass(ui, prompt=b''):
4060 def debuguigetpass(ui, prompt=b''):
4053 """show prompt to type password"""
4061 """show prompt to type password"""
4054 r = ui.getpass(prompt)
4062 r = ui.getpass(prompt)
4055 if r is None:
4063 if r is None:
4056 r = b"<default response>"
4064 r = b"<default response>"
4057 ui.writenoi18n(b'response: %s\n' % r)
4065 ui.writenoi18n(b'response: %s\n' % r)
4058
4066
4059
4067
4060 @command(
4068 @command(
4061 b'debuguiprompt',
4069 b'debuguiprompt',
4062 [
4070 [
4063 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4071 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4064 ],
4072 ],
4065 _(b'[-p TEXT]'),
4073 _(b'[-p TEXT]'),
4066 norepo=True,
4074 norepo=True,
4067 )
4075 )
4068 def debuguiprompt(ui, prompt=b''):
4076 def debuguiprompt(ui, prompt=b''):
4069 """show plain prompt"""
4077 """show plain prompt"""
4070 r = ui.prompt(prompt)
4078 r = ui.prompt(prompt)
4071 ui.writenoi18n(b'response: %s\n' % r)
4079 ui.writenoi18n(b'response: %s\n' % r)
4072
4080
4073
4081
4074 @command(b'debugupdatecaches', [])
4082 @command(b'debugupdatecaches', [])
4075 def debugupdatecaches(ui, repo, *pats, **opts):
4083 def debugupdatecaches(ui, repo, *pats, **opts):
4076 """warm all known caches in the repository"""
4084 """warm all known caches in the repository"""
4077 with repo.wlock(), repo.lock():
4085 with repo.wlock(), repo.lock():
4078 repo.updatecaches(caches=repository.CACHES_ALL)
4086 repo.updatecaches(caches=repository.CACHES_ALL)
4079
4087
4080
4088
4081 @command(
4089 @command(
4082 b'debugupgraderepo',
4090 b'debugupgraderepo',
4083 [
4091 [
4084 (
4092 (
4085 b'o',
4093 b'o',
4086 b'optimize',
4094 b'optimize',
4087 [],
4095 [],
4088 _(b'extra optimization to perform'),
4096 _(b'extra optimization to perform'),
4089 _(b'NAME'),
4097 _(b'NAME'),
4090 ),
4098 ),
4091 (b'', b'run', False, _(b'performs an upgrade')),
4099 (b'', b'run', False, _(b'performs an upgrade')),
4092 (b'', b'backup', True, _(b'keep the old repository content around')),
4100 (b'', b'backup', True, _(b'keep the old repository content around')),
4093 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4101 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4094 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4102 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4095 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4103 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4096 ],
4104 ],
4097 )
4105 )
4098 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4106 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4099 """upgrade a repository to use different features
4107 """upgrade a repository to use different features
4100
4108
4101 If no arguments are specified, the repository is evaluated for upgrade
4109 If no arguments are specified, the repository is evaluated for upgrade
4102 and a list of problems and potential optimizations is printed.
4110 and a list of problems and potential optimizations is printed.
4103
4111
4104 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4112 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4105 can be influenced via additional arguments. More details will be provided
4113 can be influenced via additional arguments. More details will be provided
4106 by the command output when run without ``--run``.
4114 by the command output when run without ``--run``.
4107
4115
4108 During the upgrade, the repository will be locked and no writes will be
4116 During the upgrade, the repository will be locked and no writes will be
4109 allowed.
4117 allowed.
4110
4118
4111 At the end of the upgrade, the repository may not be readable while new
4119 At the end of the upgrade, the repository may not be readable while new
4112 repository data is swapped in. This window will be as long as it takes to
4120 repository data is swapped in. This window will be as long as it takes to
4113 rename some directories inside the ``.hg`` directory. On most machines, this
4121 rename some directories inside the ``.hg`` directory. On most machines, this
4114 should complete almost instantaneously and the chances of a consumer being
4122 should complete almost instantaneously and the chances of a consumer being
4115 unable to access the repository should be low.
4123 unable to access the repository should be low.
4116
4124
4117 By default, all revlogs will be upgraded. You can restrict this using flags
4125 By default, all revlogs will be upgraded. You can restrict this using flags
4118 such as `--manifest`:
4126 such as `--manifest`:
4119
4127
4120 * `--manifest`: only optimize the manifest
4128 * `--manifest`: only optimize the manifest
4121 * `--no-manifest`: optimize all revlog but the manifest
4129 * `--no-manifest`: optimize all revlog but the manifest
4122 * `--changelog`: optimize the changelog only
4130 * `--changelog`: optimize the changelog only
4123 * `--no-changelog --no-manifest`: optimize filelogs only
4131 * `--no-changelog --no-manifest`: optimize filelogs only
4124 * `--filelogs`: optimize the filelogs only
4132 * `--filelogs`: optimize the filelogs only
4125 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4133 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4126 """
4134 """
4127 return upgrade.upgraderepo(
4135 return upgrade.upgraderepo(
4128 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4136 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4129 )
4137 )
4130
4138
4131
4139
4132 @command(
4140 @command(
4133 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4141 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4134 )
4142 )
4135 def debugwalk(ui, repo, *pats, **opts):
4143 def debugwalk(ui, repo, *pats, **opts):
4136 """show how files match on given patterns"""
4144 """show how files match on given patterns"""
4137 opts = pycompat.byteskwargs(opts)
4145 opts = pycompat.byteskwargs(opts)
4138 m = scmutil.match(repo[None], pats, opts)
4146 m = scmutil.match(repo[None], pats, opts)
4139 if ui.verbose:
4147 if ui.verbose:
4140 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4148 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4141 items = list(repo[None].walk(m))
4149 items = list(repo[None].walk(m))
4142 if not items:
4150 if not items:
4143 return
4151 return
4144 f = lambda fn: fn
4152 f = lambda fn: fn
4145 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4153 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4146 f = lambda fn: util.normpath(fn)
4154 f = lambda fn: util.normpath(fn)
4147 fmt = b'f %%-%ds %%-%ds %%s' % (
4155 fmt = b'f %%-%ds %%-%ds %%s' % (
4148 max([len(abs) for abs in items]),
4156 max([len(abs) for abs in items]),
4149 max([len(repo.pathto(abs)) for abs in items]),
4157 max([len(repo.pathto(abs)) for abs in items]),
4150 )
4158 )
4151 for abs in items:
4159 for abs in items:
4152 line = fmt % (
4160 line = fmt % (
4153 abs,
4161 abs,
4154 f(repo.pathto(abs)),
4162 f(repo.pathto(abs)),
4155 m.exact(abs) and b'exact' or b'',
4163 m.exact(abs) and b'exact' or b'',
4156 )
4164 )
4157 ui.write(b"%s\n" % line.rstrip())
4165 ui.write(b"%s\n" % line.rstrip())
4158
4166
4159
4167
4160 @command(b'debugwhyunstable', [], _(b'REV'))
4168 @command(b'debugwhyunstable', [], _(b'REV'))
4161 def debugwhyunstable(ui, repo, rev):
4169 def debugwhyunstable(ui, repo, rev):
4162 """explain instabilities of a changeset"""
4170 """explain instabilities of a changeset"""
4163 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4171 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4164 dnodes = b''
4172 dnodes = b''
4165 if entry.get(b'divergentnodes'):
4173 if entry.get(b'divergentnodes'):
4166 dnodes = (
4174 dnodes = (
4167 b' '.join(
4175 b' '.join(
4168 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4176 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4169 for ctx in entry[b'divergentnodes']
4177 for ctx in entry[b'divergentnodes']
4170 )
4178 )
4171 + b' '
4179 + b' '
4172 )
4180 )
4173 ui.write(
4181 ui.write(
4174 b'%s: %s%s %s\n'
4182 b'%s: %s%s %s\n'
4175 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4183 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4176 )
4184 )
4177
4185
4178
4186
4179 @command(
4187 @command(
4180 b'debugwireargs',
4188 b'debugwireargs',
4181 [
4189 [
4182 (b'', b'three', b'', b'three'),
4190 (b'', b'three', b'', b'three'),
4183 (b'', b'four', b'', b'four'),
4191 (b'', b'four', b'', b'four'),
4184 (b'', b'five', b'', b'five'),
4192 (b'', b'five', b'', b'five'),
4185 ]
4193 ]
4186 + cmdutil.remoteopts,
4194 + cmdutil.remoteopts,
4187 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4195 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4188 norepo=True,
4196 norepo=True,
4189 )
4197 )
4190 def debugwireargs(ui, repopath, *vals, **opts):
4198 def debugwireargs(ui, repopath, *vals, **opts):
4191 opts = pycompat.byteskwargs(opts)
4199 opts = pycompat.byteskwargs(opts)
4192 repo = hg.peer(ui, opts, repopath)
4200 repo = hg.peer(ui, opts, repopath)
4193 try:
4201 try:
4194 for opt in cmdutil.remoteopts:
4202 for opt in cmdutil.remoteopts:
4195 del opts[opt[1]]
4203 del opts[opt[1]]
4196 args = {}
4204 args = {}
4197 for k, v in opts.items():
4205 for k, v in opts.items():
4198 if v:
4206 if v:
4199 args[k] = v
4207 args[k] = v
4200 args = pycompat.strkwargs(args)
4208 args = pycompat.strkwargs(args)
4201 # run twice to check that we don't mess up the stream for the next command
4209 # run twice to check that we don't mess up the stream for the next command
4202 res1 = repo.debugwireargs(*vals, **args)
4210 res1 = repo.debugwireargs(*vals, **args)
4203 res2 = repo.debugwireargs(*vals, **args)
4211 res2 = repo.debugwireargs(*vals, **args)
4204 ui.write(b"%s\n" % res1)
4212 ui.write(b"%s\n" % res1)
4205 if res1 != res2:
4213 if res1 != res2:
4206 ui.warn(b"%s\n" % res2)
4214 ui.warn(b"%s\n" % res2)
4207 finally:
4215 finally:
4208 repo.close()
4216 repo.close()
4209
4217
4210
4218
4211 def _parsewirelangblocks(fh):
4219 def _parsewirelangblocks(fh):
4212 activeaction = None
4220 activeaction = None
4213 blocklines = []
4221 blocklines = []
4214 lastindent = 0
4222 lastindent = 0
4215
4223
4216 for line in fh:
4224 for line in fh:
4217 line = line.rstrip()
4225 line = line.rstrip()
4218 if not line:
4226 if not line:
4219 continue
4227 continue
4220
4228
4221 if line.startswith(b'#'):
4229 if line.startswith(b'#'):
4222 continue
4230 continue
4223
4231
4224 if not line.startswith(b' '):
4232 if not line.startswith(b' '):
4225 # New block. Flush previous one.
4233 # New block. Flush previous one.
4226 if activeaction:
4234 if activeaction:
4227 yield activeaction, blocklines
4235 yield activeaction, blocklines
4228
4236
4229 activeaction = line
4237 activeaction = line
4230 blocklines = []
4238 blocklines = []
4231 lastindent = 0
4239 lastindent = 0
4232 continue
4240 continue
4233
4241
4234 # Else we start with an indent.
4242 # Else we start with an indent.
4235
4243
4236 if not activeaction:
4244 if not activeaction:
4237 raise error.Abort(_(b'indented line outside of block'))
4245 raise error.Abort(_(b'indented line outside of block'))
4238
4246
4239 indent = len(line) - len(line.lstrip())
4247 indent = len(line) - len(line.lstrip())
4240
4248
4241 # If this line is indented more than the last line, concatenate it.
4249 # If this line is indented more than the last line, concatenate it.
4242 if indent > lastindent and blocklines:
4250 if indent > lastindent and blocklines:
4243 blocklines[-1] += line.lstrip()
4251 blocklines[-1] += line.lstrip()
4244 else:
4252 else:
4245 blocklines.append(line)
4253 blocklines.append(line)
4246 lastindent = indent
4254 lastindent = indent
4247
4255
4248 # Flush last block.
4256 # Flush last block.
4249 if activeaction:
4257 if activeaction:
4250 yield activeaction, blocklines
4258 yield activeaction, blocklines
4251
4259
4252
4260
4253 @command(
4261 @command(
4254 b'debugwireproto',
4262 b'debugwireproto',
4255 [
4263 [
4256 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4264 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4257 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4265 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4258 (
4266 (
4259 b'',
4267 b'',
4260 b'noreadstderr',
4268 b'noreadstderr',
4261 False,
4269 False,
4262 _(b'do not read from stderr of the remote'),
4270 _(b'do not read from stderr of the remote'),
4263 ),
4271 ),
4264 (
4272 (
4265 b'',
4273 b'',
4266 b'nologhandshake',
4274 b'nologhandshake',
4267 False,
4275 False,
4268 _(b'do not log I/O related to the peer handshake'),
4276 _(b'do not log I/O related to the peer handshake'),
4269 ),
4277 ),
4270 ]
4278 ]
4271 + cmdutil.remoteopts,
4279 + cmdutil.remoteopts,
4272 _(b'[PATH]'),
4280 _(b'[PATH]'),
4273 optionalrepo=True,
4281 optionalrepo=True,
4274 )
4282 )
4275 def debugwireproto(ui, repo, path=None, **opts):
4283 def debugwireproto(ui, repo, path=None, **opts):
4276 """send wire protocol commands to a server
4284 """send wire protocol commands to a server
4277
4285
4278 This command can be used to issue wire protocol commands to remote
4286 This command can be used to issue wire protocol commands to remote
4279 peers and to debug the raw data being exchanged.
4287 peers and to debug the raw data being exchanged.
4280
4288
4281 ``--localssh`` will start an SSH server against the current repository
4289 ``--localssh`` will start an SSH server against the current repository
4282 and connect to that. By default, the connection will perform a handshake
4290 and connect to that. By default, the connection will perform a handshake
4283 and establish an appropriate peer instance.
4291 and establish an appropriate peer instance.
4284
4292
4285 ``--peer`` can be used to bypass the handshake protocol and construct a
4293 ``--peer`` can be used to bypass the handshake protocol and construct a
4286 peer instance using the specified class type. Valid values are ``raw``,
4294 peer instance using the specified class type. Valid values are ``raw``,
4287 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4295 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4288 don't support higher-level command actions.
4296 don't support higher-level command actions.
4289
4297
4290 ``--noreadstderr`` can be used to disable automatic reading from stderr
4298 ``--noreadstderr`` can be used to disable automatic reading from stderr
4291 of the peer (for SSH connections only). Disabling automatic reading of
4299 of the peer (for SSH connections only). Disabling automatic reading of
4292 stderr is useful for making output more deterministic.
4300 stderr is useful for making output more deterministic.
4293
4301
4294 Commands are issued via a mini language which is specified via stdin.
4302 Commands are issued via a mini language which is specified via stdin.
4295 The language consists of individual actions to perform. An action is
4303 The language consists of individual actions to perform. An action is
4296 defined by a block. A block is defined as a line with no leading
4304 defined by a block. A block is defined as a line with no leading
4297 space followed by 0 or more lines with leading space. Blocks are
4305 space followed by 0 or more lines with leading space. Blocks are
4298 effectively a high-level command with additional metadata.
4306 effectively a high-level command with additional metadata.
4299
4307
4300 Lines beginning with ``#`` are ignored.
4308 Lines beginning with ``#`` are ignored.
4301
4309
4302 The following sections denote available actions.
4310 The following sections denote available actions.
4303
4311
4304 raw
4312 raw
4305 ---
4313 ---
4306
4314
4307 Send raw data to the server.
4315 Send raw data to the server.
4308
4316
4309 The block payload contains the raw data to send as one atomic send
4317 The block payload contains the raw data to send as one atomic send
4310 operation. The data may not actually be delivered in a single system
4318 operation. The data may not actually be delivered in a single system
4311 call: it depends on the abilities of the transport being used.
4319 call: it depends on the abilities of the transport being used.
4312
4320
4313 Each line in the block is de-indented and concatenated. Then, that
4321 Each line in the block is de-indented and concatenated. Then, that
4314 value is evaluated as a Python b'' literal. This allows the use of
4322 value is evaluated as a Python b'' literal. This allows the use of
4315 backslash escaping, etc.
4323 backslash escaping, etc.
4316
4324
4317 raw+
4325 raw+
4318 ----
4326 ----
4319
4327
4320 Behaves like ``raw`` except flushes output afterwards.
4328 Behaves like ``raw`` except flushes output afterwards.
4321
4329
4322 command <X>
4330 command <X>
4323 -----------
4331 -----------
4324
4332
4325 Send a request to run a named command, whose name follows the ``command``
4333 Send a request to run a named command, whose name follows the ``command``
4326 string.
4334 string.
4327
4335
4328 Arguments to the command are defined as lines in this block. The format of
4336 Arguments to the command are defined as lines in this block. The format of
4329 each line is ``<key> <value>``. e.g.::
4337 each line is ``<key> <value>``. e.g.::
4330
4338
4331 command listkeys
4339 command listkeys
4332 namespace bookmarks
4340 namespace bookmarks
4333
4341
4334 If the value begins with ``eval:``, it will be interpreted as a Python
4342 If the value begins with ``eval:``, it will be interpreted as a Python
4335 literal expression. Otherwise values are interpreted as Python b'' literals.
4343 literal expression. Otherwise values are interpreted as Python b'' literals.
4336 This allows sending complex types and encoding special byte sequences via
4344 This allows sending complex types and encoding special byte sequences via
4337 backslash escaping.
4345 backslash escaping.
4338
4346
4339 The following arguments have special meaning:
4347 The following arguments have special meaning:
4340
4348
4341 ``PUSHFILE``
4349 ``PUSHFILE``
4342 When defined, the *push* mechanism of the peer will be used instead
4350 When defined, the *push* mechanism of the peer will be used instead
4343 of the static request-response mechanism and the content of the
4351 of the static request-response mechanism and the content of the
4344 file specified in the value of this argument will be sent as the
4352 file specified in the value of this argument will be sent as the
4345 command payload.
4353 command payload.
4346
4354
4347 This can be used to submit a local bundle file to the remote.
4355 This can be used to submit a local bundle file to the remote.
4348
4356
4349 batchbegin
4357 batchbegin
4350 ----------
4358 ----------
4351
4359
4352 Instruct the peer to begin a batched send.
4360 Instruct the peer to begin a batched send.
4353
4361
4354 All ``command`` blocks are queued for execution until the next
4362 All ``command`` blocks are queued for execution until the next
4355 ``batchsubmit`` block.
4363 ``batchsubmit`` block.
4356
4364
4357 batchsubmit
4365 batchsubmit
4358 -----------
4366 -----------
4359
4367
4360 Submit previously queued ``command`` blocks as a batch request.
4368 Submit previously queued ``command`` blocks as a batch request.
4361
4369
4362 This action MUST be paired with a ``batchbegin`` action.
4370 This action MUST be paired with a ``batchbegin`` action.
4363
4371
4364 httprequest <method> <path>
4372 httprequest <method> <path>
4365 ---------------------------
4373 ---------------------------
4366
4374
4367 (HTTP peer only)
4375 (HTTP peer only)
4368
4376
4369 Send an HTTP request to the peer.
4377 Send an HTTP request to the peer.
4370
4378
4371 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4379 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4372
4380
4373 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4381 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4374 headers to add to the request. e.g. ``Accept: foo``.
4382 headers to add to the request. e.g. ``Accept: foo``.
4375
4383
4376 The following arguments are special:
4384 The following arguments are special:
4377
4385
4378 ``BODYFILE``
4386 ``BODYFILE``
4379 The content of the file defined as the value to this argument will be
4387 The content of the file defined as the value to this argument will be
4380 transferred verbatim as the HTTP request body.
4388 transferred verbatim as the HTTP request body.
4381
4389
4382 ``frame <type> <flags> <payload>``
4390 ``frame <type> <flags> <payload>``
4383 Send a unified protocol frame as part of the request body.
4391 Send a unified protocol frame as part of the request body.
4384
4392
4385 All frames will be collected and sent as the body to the HTTP
4393 All frames will be collected and sent as the body to the HTTP
4386 request.
4394 request.
4387
4395
4388 close
4396 close
4389 -----
4397 -----
4390
4398
4391 Close the connection to the server.
4399 Close the connection to the server.
4392
4400
4393 flush
4401 flush
4394 -----
4402 -----
4395
4403
4396 Flush data written to the server.
4404 Flush data written to the server.
4397
4405
4398 readavailable
4406 readavailable
4399 -------------
4407 -------------
4400
4408
4401 Close the write end of the connection and read all available data from
4409 Close the write end of the connection and read all available data from
4402 the server.
4410 the server.
4403
4411
4404 If the connection to the server encompasses multiple pipes, we poll both
4412 If the connection to the server encompasses multiple pipes, we poll both
4405 pipes and read available data.
4413 pipes and read available data.
4406
4414
4407 readline
4415 readline
4408 --------
4416 --------
4409
4417
4410 Read a line of output from the server. If there are multiple output
4418 Read a line of output from the server. If there are multiple output
4411 pipes, reads only the main pipe.
4419 pipes, reads only the main pipe.
4412
4420
4413 ereadline
4421 ereadline
4414 ---------
4422 ---------
4415
4423
4416 Like ``readline``, but read from the stderr pipe, if available.
4424 Like ``readline``, but read from the stderr pipe, if available.
4417
4425
4418 read <X>
4426 read <X>
4419 --------
4427 --------
4420
4428
4421 ``read()`` N bytes from the server's main output pipe.
4429 ``read()`` N bytes from the server's main output pipe.
4422
4430
4423 eread <X>
4431 eread <X>
4424 ---------
4432 ---------
4425
4433
4426 ``read()`` N bytes from the server's stderr pipe, if available.
4434 ``read()`` N bytes from the server's stderr pipe, if available.
4427
4435
4428 Specifying Unified Frame-Based Protocol Frames
4436 Specifying Unified Frame-Based Protocol Frames
4429 ----------------------------------------------
4437 ----------------------------------------------
4430
4438
4431 It is possible to emit a *Unified Frame-Based Protocol* by using special
4439 It is possible to emit a *Unified Frame-Based Protocol* by using special
4432 syntax.
4440 syntax.
4433
4441
4434 A frame is composed as a type, flags, and payload. These can be parsed
4442 A frame is composed as a type, flags, and payload. These can be parsed
4435 from a string of the form:
4443 from a string of the form:
4436
4444
4437 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4445 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4438
4446
4439 ``request-id`` and ``stream-id`` are integers defining the request and
4447 ``request-id`` and ``stream-id`` are integers defining the request and
4440 stream identifiers.
4448 stream identifiers.
4441
4449
4442 ``type`` can be an integer value for the frame type or the string name
4450 ``type`` can be an integer value for the frame type or the string name
4443 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4451 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4444 ``command-name``.
4452 ``command-name``.
4445
4453
4446 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4454 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4447 components. Each component (and there can be just one) can be an integer
4455 components. Each component (and there can be just one) can be an integer
4448 or a flag name for stream flags or frame flags, respectively. Values are
4456 or a flag name for stream flags or frame flags, respectively. Values are
4449 resolved to integers and then bitwise OR'd together.
4457 resolved to integers and then bitwise OR'd together.
4450
4458
4451 ``payload`` represents the raw frame payload. If it begins with
4459 ``payload`` represents the raw frame payload. If it begins with
4452 ``cbor:``, the following string is evaluated as Python code and the
4460 ``cbor:``, the following string is evaluated as Python code and the
4453 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4461 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4454 as a Python byte string literal.
4462 as a Python byte string literal.
4455 """
4463 """
4456 opts = pycompat.byteskwargs(opts)
4464 opts = pycompat.byteskwargs(opts)
4457
4465
4458 if opts[b'localssh'] and not repo:
4466 if opts[b'localssh'] and not repo:
4459 raise error.Abort(_(b'--localssh requires a repository'))
4467 raise error.Abort(_(b'--localssh requires a repository'))
4460
4468
4461 if opts[b'peer'] and opts[b'peer'] not in (
4469 if opts[b'peer'] and opts[b'peer'] not in (
4462 b'raw',
4470 b'raw',
4463 b'ssh1',
4471 b'ssh1',
4464 ):
4472 ):
4465 raise error.Abort(
4473 raise error.Abort(
4466 _(b'invalid value for --peer'),
4474 _(b'invalid value for --peer'),
4467 hint=_(b'valid values are "raw" and "ssh1"'),
4475 hint=_(b'valid values are "raw" and "ssh1"'),
4468 )
4476 )
4469
4477
4470 if path and opts[b'localssh']:
4478 if path and opts[b'localssh']:
4471 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4479 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4472
4480
4473 if ui.interactive():
4481 if ui.interactive():
4474 ui.write(_(b'(waiting for commands on stdin)\n'))
4482 ui.write(_(b'(waiting for commands on stdin)\n'))
4475
4483
4476 blocks = list(_parsewirelangblocks(ui.fin))
4484 blocks = list(_parsewirelangblocks(ui.fin))
4477
4485
4478 proc = None
4486 proc = None
4479 stdin = None
4487 stdin = None
4480 stdout = None
4488 stdout = None
4481 stderr = None
4489 stderr = None
4482 opener = None
4490 opener = None
4483
4491
4484 if opts[b'localssh']:
4492 if opts[b'localssh']:
4485 # We start the SSH server in its own process so there is process
4493 # We start the SSH server in its own process so there is process
4486 # separation. This prevents a whole class of potential bugs around
4494 # separation. This prevents a whole class of potential bugs around
4487 # shared state from interfering with server operation.
4495 # shared state from interfering with server operation.
4488 args = procutil.hgcmd() + [
4496 args = procutil.hgcmd() + [
4489 b'-R',
4497 b'-R',
4490 repo.root,
4498 repo.root,
4491 b'debugserve',
4499 b'debugserve',
4492 b'--sshstdio',
4500 b'--sshstdio',
4493 ]
4501 ]
4494 proc = subprocess.Popen(
4502 proc = subprocess.Popen(
4495 pycompat.rapply(procutil.tonativestr, args),
4503 pycompat.rapply(procutil.tonativestr, args),
4496 stdin=subprocess.PIPE,
4504 stdin=subprocess.PIPE,
4497 stdout=subprocess.PIPE,
4505 stdout=subprocess.PIPE,
4498 stderr=subprocess.PIPE,
4506 stderr=subprocess.PIPE,
4499 bufsize=0,
4507 bufsize=0,
4500 )
4508 )
4501
4509
4502 stdin = proc.stdin
4510 stdin = proc.stdin
4503 stdout = proc.stdout
4511 stdout = proc.stdout
4504 stderr = proc.stderr
4512 stderr = proc.stderr
4505
4513
4506 # We turn the pipes into observers so we can log I/O.
4514 # We turn the pipes into observers so we can log I/O.
4507 if ui.verbose or opts[b'peer'] == b'raw':
4515 if ui.verbose or opts[b'peer'] == b'raw':
4508 stdin = util.makeloggingfileobject(
4516 stdin = util.makeloggingfileobject(
4509 ui, proc.stdin, b'i', logdata=True
4517 ui, proc.stdin, b'i', logdata=True
4510 )
4518 )
4511 stdout = util.makeloggingfileobject(
4519 stdout = util.makeloggingfileobject(
4512 ui, proc.stdout, b'o', logdata=True
4520 ui, proc.stdout, b'o', logdata=True
4513 )
4521 )
4514 stderr = util.makeloggingfileobject(
4522 stderr = util.makeloggingfileobject(
4515 ui, proc.stderr, b'e', logdata=True
4523 ui, proc.stderr, b'e', logdata=True
4516 )
4524 )
4517
4525
4518 # --localssh also implies the peer connection settings.
4526 # --localssh also implies the peer connection settings.
4519
4527
4520 url = b'ssh://localserver'
4528 url = b'ssh://localserver'
4521 autoreadstderr = not opts[b'noreadstderr']
4529 autoreadstderr = not opts[b'noreadstderr']
4522
4530
4523 if opts[b'peer'] == b'ssh1':
4531 if opts[b'peer'] == b'ssh1':
4524 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4532 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4525 peer = sshpeer.sshv1peer(
4533 peer = sshpeer.sshv1peer(
4526 ui,
4534 ui,
4527 url,
4535 url,
4528 proc,
4536 proc,
4529 stdin,
4537 stdin,
4530 stdout,
4538 stdout,
4531 stderr,
4539 stderr,
4532 None,
4540 None,
4533 autoreadstderr=autoreadstderr,
4541 autoreadstderr=autoreadstderr,
4534 )
4542 )
4535 elif opts[b'peer'] == b'raw':
4543 elif opts[b'peer'] == b'raw':
4536 ui.write(_(b'using raw connection to peer\n'))
4544 ui.write(_(b'using raw connection to peer\n'))
4537 peer = None
4545 peer = None
4538 else:
4546 else:
4539 ui.write(_(b'creating ssh peer from handshake results\n'))
4547 ui.write(_(b'creating ssh peer from handshake results\n'))
4540 peer = sshpeer._make_peer(
4548 peer = sshpeer._make_peer(
4541 ui,
4549 ui,
4542 url,
4550 url,
4543 proc,
4551 proc,
4544 stdin,
4552 stdin,
4545 stdout,
4553 stdout,
4546 stderr,
4554 stderr,
4547 autoreadstderr=autoreadstderr,
4555 autoreadstderr=autoreadstderr,
4548 )
4556 )
4549
4557
4550 elif path:
4558 elif path:
4551 # We bypass hg.peer() so we can proxy the sockets.
4559 # We bypass hg.peer() so we can proxy the sockets.
4552 # TODO consider not doing this because we skip
4560 # TODO consider not doing this because we skip
4553 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4561 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4554 u = urlutil.url(path)
4562 u = urlutil.url(path)
4555 if u.scheme != b'http':
4563 if u.scheme != b'http':
4556 raise error.Abort(_(b'only http:// paths are currently supported'))
4564 raise error.Abort(_(b'only http:// paths are currently supported'))
4557
4565
4558 url, authinfo = u.authinfo()
4566 url, authinfo = u.authinfo()
4559 openerargs = {
4567 openerargs = {
4560 'useragent': b'Mercurial debugwireproto',
4568 'useragent': b'Mercurial debugwireproto',
4561 }
4569 }
4562
4570
4563 # Turn pipes/sockets into observers so we can log I/O.
4571 # Turn pipes/sockets into observers so we can log I/O.
4564 if ui.verbose:
4572 if ui.verbose:
4565 openerargs.update(
4573 openerargs.update(
4566 {
4574 {
4567 'loggingfh': ui,
4575 'loggingfh': ui,
4568 'loggingname': b's',
4576 'loggingname': b's',
4569 'loggingopts': {
4577 'loggingopts': {
4570 'logdata': True,
4578 'logdata': True,
4571 'logdataapis': False,
4579 'logdataapis': False,
4572 },
4580 },
4573 }
4581 }
4574 )
4582 )
4575
4583
4576 if ui.debugflag:
4584 if ui.debugflag:
4577 openerargs['loggingopts']['logdataapis'] = True
4585 openerargs['loggingopts']['logdataapis'] = True
4578
4586
4579 # Don't send default headers when in raw mode. This allows us to
4587 # Don't send default headers when in raw mode. This allows us to
4580 # bypass most of the behavior of our URL handling code so we can
4588 # bypass most of the behavior of our URL handling code so we can
4581 # have near complete control over what's sent on the wire.
4589 # have near complete control over what's sent on the wire.
4582 if opts[b'peer'] == b'raw':
4590 if opts[b'peer'] == b'raw':
4583 openerargs['sendaccept'] = False
4591 openerargs['sendaccept'] = False
4584
4592
4585 opener = urlmod.opener(ui, authinfo, **openerargs)
4593 opener = urlmod.opener(ui, authinfo, **openerargs)
4586
4594
4587 if opts[b'peer'] == b'raw':
4595 if opts[b'peer'] == b'raw':
4588 ui.write(_(b'using raw connection to peer\n'))
4596 ui.write(_(b'using raw connection to peer\n'))
4589 peer = None
4597 peer = None
4590 elif opts[b'peer']:
4598 elif opts[b'peer']:
4591 raise error.Abort(
4599 raise error.Abort(
4592 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4600 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4593 )
4601 )
4594 else:
4602 else:
4595 peer_path = urlutil.try_path(ui, path)
4603 peer_path = urlutil.try_path(ui, path)
4596 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4604 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4597
4605
4598 # We /could/ populate stdin/stdout with sock.makefile()...
4606 # We /could/ populate stdin/stdout with sock.makefile()...
4599 else:
4607 else:
4600 raise error.Abort(_(b'unsupported connection configuration'))
4608 raise error.Abort(_(b'unsupported connection configuration'))
4601
4609
4602 batchedcommands = None
4610 batchedcommands = None
4603
4611
4604 # Now perform actions based on the parsed wire language instructions.
4612 # Now perform actions based on the parsed wire language instructions.
4605 for action, lines in blocks:
4613 for action, lines in blocks:
4606 if action in (b'raw', b'raw+'):
4614 if action in (b'raw', b'raw+'):
4607 if not stdin:
4615 if not stdin:
4608 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4616 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4609
4617
4610 # Concatenate the data together.
4618 # Concatenate the data together.
4611 data = b''.join(l.lstrip() for l in lines)
4619 data = b''.join(l.lstrip() for l in lines)
4612 data = stringutil.unescapestr(data)
4620 data = stringutil.unescapestr(data)
4613 stdin.write(data)
4621 stdin.write(data)
4614
4622
4615 if action == b'raw+':
4623 if action == b'raw+':
4616 stdin.flush()
4624 stdin.flush()
4617 elif action == b'flush':
4625 elif action == b'flush':
4618 if not stdin:
4626 if not stdin:
4619 raise error.Abort(_(b'cannot call flush on this peer'))
4627 raise error.Abort(_(b'cannot call flush on this peer'))
4620 stdin.flush()
4628 stdin.flush()
4621 elif action.startswith(b'command'):
4629 elif action.startswith(b'command'):
4622 if not peer:
4630 if not peer:
4623 raise error.Abort(
4631 raise error.Abort(
4624 _(
4632 _(
4625 b'cannot send commands unless peer instance '
4633 b'cannot send commands unless peer instance '
4626 b'is available'
4634 b'is available'
4627 )
4635 )
4628 )
4636 )
4629
4637
4630 command = action.split(b' ', 1)[1]
4638 command = action.split(b' ', 1)[1]
4631
4639
4632 args = {}
4640 args = {}
4633 for line in lines:
4641 for line in lines:
4634 # We need to allow empty values.
4642 # We need to allow empty values.
4635 fields = line.lstrip().split(b' ', 1)
4643 fields = line.lstrip().split(b' ', 1)
4636 if len(fields) == 1:
4644 if len(fields) == 1:
4637 key = fields[0]
4645 key = fields[0]
4638 value = b''
4646 value = b''
4639 else:
4647 else:
4640 key, value = fields
4648 key, value = fields
4641
4649
4642 if value.startswith(b'eval:'):
4650 if value.startswith(b'eval:'):
4643 value = stringutil.evalpythonliteral(value[5:])
4651 value = stringutil.evalpythonliteral(value[5:])
4644 else:
4652 else:
4645 value = stringutil.unescapestr(value)
4653 value = stringutil.unescapestr(value)
4646
4654
4647 args[key] = value
4655 args[key] = value
4648
4656
4649 if batchedcommands is not None:
4657 if batchedcommands is not None:
4650 batchedcommands.append((command, args))
4658 batchedcommands.append((command, args))
4651 continue
4659 continue
4652
4660
4653 ui.status(_(b'sending %s command\n') % command)
4661 ui.status(_(b'sending %s command\n') % command)
4654
4662
4655 if b'PUSHFILE' in args:
4663 if b'PUSHFILE' in args:
4656 with open(args[b'PUSHFILE'], 'rb') as fh:
4664 with open(args[b'PUSHFILE'], 'rb') as fh:
4657 del args[b'PUSHFILE']
4665 del args[b'PUSHFILE']
4658 res, output = peer._callpush(
4666 res, output = peer._callpush(
4659 command, fh, **pycompat.strkwargs(args)
4667 command, fh, **pycompat.strkwargs(args)
4660 )
4668 )
4661 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4669 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4662 ui.status(
4670 ui.status(
4663 _(b'remote output: %s\n') % stringutil.escapestr(output)
4671 _(b'remote output: %s\n') % stringutil.escapestr(output)
4664 )
4672 )
4665 else:
4673 else:
4666 with peer.commandexecutor() as e:
4674 with peer.commandexecutor() as e:
4667 res = e.callcommand(command, args).result()
4675 res = e.callcommand(command, args).result()
4668
4676
4669 ui.status(
4677 ui.status(
4670 _(b'response: %s\n')
4678 _(b'response: %s\n')
4671 % stringutil.pprint(res, bprefix=True, indent=2)
4679 % stringutil.pprint(res, bprefix=True, indent=2)
4672 )
4680 )
4673
4681
4674 elif action == b'batchbegin':
4682 elif action == b'batchbegin':
4675 if batchedcommands is not None:
4683 if batchedcommands is not None:
4676 raise error.Abort(_(b'nested batchbegin not allowed'))
4684 raise error.Abort(_(b'nested batchbegin not allowed'))
4677
4685
4678 batchedcommands = []
4686 batchedcommands = []
4679 elif action == b'batchsubmit':
4687 elif action == b'batchsubmit':
4680 # There is a batching API we could go through. But it would be
4688 # There is a batching API we could go through. But it would be
4681 # difficult to normalize requests into function calls. It is easier
4689 # difficult to normalize requests into function calls. It is easier
4682 # to bypass this layer and normalize to commands + args.
4690 # to bypass this layer and normalize to commands + args.
4683 ui.status(
4691 ui.status(
4684 _(b'sending batch with %d sub-commands\n')
4692 _(b'sending batch with %d sub-commands\n')
4685 % len(batchedcommands)
4693 % len(batchedcommands)
4686 )
4694 )
4687 assert peer is not None
4695 assert peer is not None
4688 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4696 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4689 ui.status(
4697 ui.status(
4690 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4698 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4691 )
4699 )
4692
4700
4693 batchedcommands = None
4701 batchedcommands = None
4694
4702
4695 elif action.startswith(b'httprequest '):
4703 elif action.startswith(b'httprequest '):
4696 if not opener:
4704 if not opener:
4697 raise error.Abort(
4705 raise error.Abort(
4698 _(b'cannot use httprequest without an HTTP peer')
4706 _(b'cannot use httprequest without an HTTP peer')
4699 )
4707 )
4700
4708
4701 request = action.split(b' ', 2)
4709 request = action.split(b' ', 2)
4702 if len(request) != 3:
4710 if len(request) != 3:
4703 raise error.Abort(
4711 raise error.Abort(
4704 _(
4712 _(
4705 b'invalid httprequest: expected format is '
4713 b'invalid httprequest: expected format is '
4706 b'"httprequest <method> <path>'
4714 b'"httprequest <method> <path>'
4707 )
4715 )
4708 )
4716 )
4709
4717
4710 method, httppath = request[1:]
4718 method, httppath = request[1:]
4711 headers = {}
4719 headers = {}
4712 body = None
4720 body = None
4713 frames = []
4721 frames = []
4714 for line in lines:
4722 for line in lines:
4715 line = line.lstrip()
4723 line = line.lstrip()
4716 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4724 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4717 if m:
4725 if m:
4718 # Headers need to use native strings.
4726 # Headers need to use native strings.
4719 key = pycompat.strurl(m.group(1))
4727 key = pycompat.strurl(m.group(1))
4720 value = pycompat.strurl(m.group(2))
4728 value = pycompat.strurl(m.group(2))
4721 headers[key] = value
4729 headers[key] = value
4722 continue
4730 continue
4723
4731
4724 if line.startswith(b'BODYFILE '):
4732 if line.startswith(b'BODYFILE '):
4725 with open(line.split(b' ', 1), b'rb') as fh:
4733 with open(line.split(b' ', 1), b'rb') as fh:
4726 body = fh.read()
4734 body = fh.read()
4727 elif line.startswith(b'frame '):
4735 elif line.startswith(b'frame '):
4728 frame = wireprotoframing.makeframefromhumanstring(
4736 frame = wireprotoframing.makeframefromhumanstring(
4729 line[len(b'frame ') :]
4737 line[len(b'frame ') :]
4730 )
4738 )
4731
4739
4732 frames.append(frame)
4740 frames.append(frame)
4733 else:
4741 else:
4734 raise error.Abort(
4742 raise error.Abort(
4735 _(b'unknown argument to httprequest: %s') % line
4743 _(b'unknown argument to httprequest: %s') % line
4736 )
4744 )
4737
4745
4738 url = path + httppath
4746 url = path + httppath
4739
4747
4740 if frames:
4748 if frames:
4741 body = b''.join(bytes(f) for f in frames)
4749 body = b''.join(bytes(f) for f in frames)
4742
4750
4743 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4751 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4744
4752
4745 # urllib.Request insists on using has_data() as a proxy for
4753 # urllib.Request insists on using has_data() as a proxy for
4746 # determining the request method. Override that to use our
4754 # determining the request method. Override that to use our
4747 # explicitly requested method.
4755 # explicitly requested method.
4748 req.get_method = lambda: pycompat.sysstr(method)
4756 req.get_method = lambda: pycompat.sysstr(method)
4749
4757
4750 try:
4758 try:
4751 res = opener.open(req)
4759 res = opener.open(req)
4752 body = res.read()
4760 body = res.read()
4753 except util.urlerr.urlerror as e:
4761 except util.urlerr.urlerror as e:
4754 # read() method must be called, but only exists in Python 2
4762 # read() method must be called, but only exists in Python 2
4755 getattr(e, 'read', lambda: None)()
4763 getattr(e, 'read', lambda: None)()
4756 continue
4764 continue
4757
4765
4758 ct = res.headers.get('Content-Type')
4766 ct = res.headers.get('Content-Type')
4759 if ct == 'application/mercurial-cbor':
4767 if ct == 'application/mercurial-cbor':
4760 ui.write(
4768 ui.write(
4761 _(b'cbor> %s\n')
4769 _(b'cbor> %s\n')
4762 % stringutil.pprint(
4770 % stringutil.pprint(
4763 cborutil.decodeall(body), bprefix=True, indent=2
4771 cborutil.decodeall(body), bprefix=True, indent=2
4764 )
4772 )
4765 )
4773 )
4766
4774
4767 elif action == b'close':
4775 elif action == b'close':
4768 assert peer is not None
4776 assert peer is not None
4769 peer.close()
4777 peer.close()
4770 elif action == b'readavailable':
4778 elif action == b'readavailable':
4771 if not stdout or not stderr:
4779 if not stdout or not stderr:
4772 raise error.Abort(
4780 raise error.Abort(
4773 _(b'readavailable not available on this peer')
4781 _(b'readavailable not available on this peer')
4774 )
4782 )
4775
4783
4776 stdin.close()
4784 stdin.close()
4777 stdout.read()
4785 stdout.read()
4778 stderr.read()
4786 stderr.read()
4779
4787
4780 elif action == b'readline':
4788 elif action == b'readline':
4781 if not stdout:
4789 if not stdout:
4782 raise error.Abort(_(b'readline not available on this peer'))
4790 raise error.Abort(_(b'readline not available on this peer'))
4783 stdout.readline()
4791 stdout.readline()
4784 elif action == b'ereadline':
4792 elif action == b'ereadline':
4785 if not stderr:
4793 if not stderr:
4786 raise error.Abort(_(b'ereadline not available on this peer'))
4794 raise error.Abort(_(b'ereadline not available on this peer'))
4787 stderr.readline()
4795 stderr.readline()
4788 elif action.startswith(b'read '):
4796 elif action.startswith(b'read '):
4789 count = int(action.split(b' ', 1)[1])
4797 count = int(action.split(b' ', 1)[1])
4790 if not stdout:
4798 if not stdout:
4791 raise error.Abort(_(b'read not available on this peer'))
4799 raise error.Abort(_(b'read not available on this peer'))
4792 stdout.read(count)
4800 stdout.read(count)
4793 elif action.startswith(b'eread '):
4801 elif action.startswith(b'eread '):
4794 count = int(action.split(b' ', 1)[1])
4802 count = int(action.split(b' ', 1)[1])
4795 if not stderr:
4803 if not stderr:
4796 raise error.Abort(_(b'eread not available on this peer'))
4804 raise error.Abort(_(b'eread not available on this peer'))
4797 stderr.read(count)
4805 stderr.read(count)
4798 else:
4806 else:
4799 raise error.Abort(_(b'unknown action: %s') % action)
4807 raise error.Abort(_(b'unknown action: %s') % action)
4800
4808
4801 if batchedcommands is not None:
4809 if batchedcommands is not None:
4802 raise error.Abort(_(b'unclosed "batchbegin" request'))
4810 raise error.Abort(_(b'unclosed "batchbegin" request'))
4803
4811
4804 if peer:
4812 if peer:
4805 peer.close()
4813 peer.close()
4806
4814
4807 if proc:
4815 if proc:
4808 proc.kill()
4816 proc.kill()
@@ -1,453 +1,453 b''
1 Show all commands except debug commands
1 Show all commands except debug commands
2 $ hg debugcomplete
2 $ hg debugcomplete
3 abort
3 abort
4 add
4 add
5 addremove
5 addremove
6 annotate
6 annotate
7 archive
7 archive
8 backout
8 backout
9 bisect
9 bisect
10 bookmarks
10 bookmarks
11 branch
11 branch
12 branches
12 branches
13 bundle
13 bundle
14 cat
14 cat
15 clone
15 clone
16 commit
16 commit
17 config
17 config
18 continue
18 continue
19 copy
19 copy
20 diff
20 diff
21 export
21 export
22 files
22 files
23 forget
23 forget
24 graft
24 graft
25 grep
25 grep
26 heads
26 heads
27 help
27 help
28 identify
28 identify
29 import
29 import
30 incoming
30 incoming
31 init
31 init
32 locate
32 locate
33 log
33 log
34 manifest
34 manifest
35 merge
35 merge
36 outgoing
36 outgoing
37 parents
37 parents
38 paths
38 paths
39 phase
39 phase
40 pull
40 pull
41 purge
41 purge
42 push
42 push
43 recover
43 recover
44 remove
44 remove
45 rename
45 rename
46 resolve
46 resolve
47 revert
47 revert
48 rollback
48 rollback
49 root
49 root
50 serve
50 serve
51 shelve
51 shelve
52 status
52 status
53 summary
53 summary
54 tag
54 tag
55 tags
55 tags
56 tip
56 tip
57 unbundle
57 unbundle
58 unshelve
58 unshelve
59 update
59 update
60 verify
60 verify
61 version
61 version
62
62
63 Show all commands that start with "a"
63 Show all commands that start with "a"
64 $ hg debugcomplete a
64 $ hg debugcomplete a
65 abort
65 abort
66 add
66 add
67 addremove
67 addremove
68 annotate
68 annotate
69 archive
69 archive
70
70
71 Do not show debug commands if there are other candidates
71 Do not show debug commands if there are other candidates
72 $ hg debugcomplete d
72 $ hg debugcomplete d
73 diff
73 diff
74
74
75 Show debug commands if there are no other candidates
75 Show debug commands if there are no other candidates
76 $ hg debugcomplete debug
76 $ hg debugcomplete debug
77 debug-delta-find
77 debug-delta-find
78 debug-repair-issue6528
78 debug-repair-issue6528
79 debug-revlog-index
79 debug-revlog-index
80 debug-revlog-stats
80 debug-revlog-stats
81 debug::stable-tail-sort
81 debug::stable-tail-sort
82 debugancestor
82 debugancestor
83 debugantivirusrunning
83 debugantivirusrunning
84 debugapplystreamclonebundle
84 debugapplystreamclonebundle
85 debugbackupbundle
85 debugbackupbundle
86 debugbuilddag
86 debugbuilddag
87 debugbundle
87 debugbundle
88 debugcapabilities
88 debugcapabilities
89 debugchangedfiles
89 debugchangedfiles
90 debugcheckstate
90 debugcheckstate
91 debugcolor
91 debugcolor
92 debugcommands
92 debugcommands
93 debugcomplete
93 debugcomplete
94 debugconfig
94 debugconfig
95 debugcreatestreamclonebundle
95 debugcreatestreamclonebundle
96 debugdag
96 debugdag
97 debugdata
97 debugdata
98 debugdate
98 debugdate
99 debugdeltachain
99 debugdeltachain
100 debugdirstate
100 debugdirstate
101 debugdirstateignorepatternshash
101 debugdirstateignorepatternshash
102 debugdiscovery
102 debugdiscovery
103 debugdownload
103 debugdownload
104 debugextensions
104 debugextensions
105 debugfileset
105 debugfileset
106 debugformat
106 debugformat
107 debugfsinfo
107 debugfsinfo
108 debuggetbundle
108 debuggetbundle
109 debugignore
109 debugignore
110 debugindexdot
110 debugindexdot
111 debugindexstats
111 debugindexstats
112 debuginstall
112 debuginstall
113 debugknown
113 debugknown
114 debuglabelcomplete
114 debuglabelcomplete
115 debuglocks
115 debuglocks
116 debugmanifestfulltextcache
116 debugmanifestfulltextcache
117 debugmergestate
117 debugmergestate
118 debugnamecomplete
118 debugnamecomplete
119 debugnodemap
119 debugnodemap
120 debugobsolete
120 debugobsolete
121 debugp1copies
121 debugp1copies
122 debugp2copies
122 debugp2copies
123 debugpathcomplete
123 debugpathcomplete
124 debugpathcopies
124 debugpathcopies
125 debugpeer
125 debugpeer
126 debugpickmergetool
126 debugpickmergetool
127 debugpushkey
127 debugpushkey
128 debugpvec
128 debugpvec
129 debugrebuilddirstate
129 debugrebuilddirstate
130 debugrebuildfncache
130 debugrebuildfncache
131 debugrename
131 debugrename
132 debugrequires
132 debugrequires
133 debugrevlog
133 debugrevlog
134 debugrevlogindex
134 debugrevlogindex
135 debugrevspec
135 debugrevspec
136 debugserve
136 debugserve
137 debugsetparents
137 debugsetparents
138 debugshell
138 debugshell
139 debugsidedata
139 debugsidedata
140 debugssl
140 debugssl
141 debugstrip
141 debugstrip
142 debugsub
142 debugsub
143 debugsuccessorssets
143 debugsuccessorssets
144 debugtagscache
144 debugtagscache
145 debugtemplate
145 debugtemplate
146 debuguigetpass
146 debuguigetpass
147 debuguiprompt
147 debuguiprompt
148 debugupdatecaches
148 debugupdatecaches
149 debugupgraderepo
149 debugupgraderepo
150 debugwalk
150 debugwalk
151 debugwhyunstable
151 debugwhyunstable
152 debugwireargs
152 debugwireargs
153 debugwireproto
153 debugwireproto
154
154
155 Do not show the alias of a debug command if there are other candidates
155 Do not show the alias of a debug command if there are other candidates
156 (this should hide rawcommit)
156 (this should hide rawcommit)
157 $ hg debugcomplete r
157 $ hg debugcomplete r
158 recover
158 recover
159 remove
159 remove
160 rename
160 rename
161 resolve
161 resolve
162 revert
162 revert
163 rollback
163 rollback
164 root
164 root
165 Show the alias of a debug command if there are no other candidates
165 Show the alias of a debug command if there are no other candidates
166 $ hg debugcomplete rawc
166 $ hg debugcomplete rawc
167
167
168
168
169 Show the global options
169 Show the global options
170 $ hg debugcomplete --options | sort
170 $ hg debugcomplete --options | sort
171 --color
171 --color
172 --config
172 --config
173 --cwd
173 --cwd
174 --debug
174 --debug
175 --debugger
175 --debugger
176 --encoding
176 --encoding
177 --encodingmode
177 --encodingmode
178 --help
178 --help
179 --hidden
179 --hidden
180 --noninteractive
180 --noninteractive
181 --pager
181 --pager
182 --profile
182 --profile
183 --quiet
183 --quiet
184 --repository
184 --repository
185 --time
185 --time
186 --traceback
186 --traceback
187 --verbose
187 --verbose
188 --version
188 --version
189 -R
189 -R
190 -h
190 -h
191 -q
191 -q
192 -v
192 -v
193 -y
193 -y
194
194
195 Show the options for the "serve" command
195 Show the options for the "serve" command
196 $ hg debugcomplete --options serve | sort
196 $ hg debugcomplete --options serve | sort
197 --accesslog
197 --accesslog
198 --address
198 --address
199 --certificate
199 --certificate
200 --cmdserver
200 --cmdserver
201 --color
201 --color
202 --config
202 --config
203 --cwd
203 --cwd
204 --daemon
204 --daemon
205 --daemon-postexec
205 --daemon-postexec
206 --debug
206 --debug
207 --debugger
207 --debugger
208 --encoding
208 --encoding
209 --encodingmode
209 --encodingmode
210 --errorlog
210 --errorlog
211 --help
211 --help
212 --hidden
212 --hidden
213 --ipv6
213 --ipv6
214 --name
214 --name
215 --noninteractive
215 --noninteractive
216 --pager
216 --pager
217 --pid-file
217 --pid-file
218 --port
218 --port
219 --prefix
219 --prefix
220 --print-url
220 --print-url
221 --profile
221 --profile
222 --quiet
222 --quiet
223 --repository
223 --repository
224 --stdio
224 --stdio
225 --style
225 --style
226 --subrepos
226 --subrepos
227 --templates
227 --templates
228 --time
228 --time
229 --traceback
229 --traceback
230 --verbose
230 --verbose
231 --version
231 --version
232 --web-conf
232 --web-conf
233 -6
233 -6
234 -A
234 -A
235 -E
235 -E
236 -R
236 -R
237 -S
237 -S
238 -a
238 -a
239 -d
239 -d
240 -h
240 -h
241 -n
241 -n
242 -p
242 -p
243 -q
243 -q
244 -t
244 -t
245 -v
245 -v
246 -y
246 -y
247
247
248 Show an error if we use --options with an ambiguous abbreviation
248 Show an error if we use --options with an ambiguous abbreviation
249 $ hg debugcomplete --options s
249 $ hg debugcomplete --options s
250 hg: command 's' is ambiguous:
250 hg: command 's' is ambiguous:
251 serve shelve showconfig status summary
251 serve shelve showconfig status summary
252 [10]
252 [10]
253
253
254 Show all commands + options
254 Show all commands + options
255 $ hg debugcommands
255 $ hg debugcommands
256 abort: dry-run
256 abort: dry-run
257 add: include, exclude, subrepos, dry-run
257 add: include, exclude, subrepos, dry-run
258 addremove: similarity, subrepos, include, exclude, dry-run
258 addremove: similarity, subrepos, include, exclude, dry-run
259 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
259 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
260 archive: no-decode, prefix, rev, type, subrepos, include, exclude
260 archive: no-decode, prefix, rev, type, subrepos, include, exclude
261 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
261 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
262 bisect: reset, good, bad, skip, extend, command, noupdate
262 bisect: reset, good, bad, skip, extend, command, noupdate
263 bookmarks: force, rev, delete, rename, inactive, list, template
263 bookmarks: force, rev, delete, rename, inactive, list, template
264 branch: force, clean, rev
264 branch: force, clean, rev
265 branches: active, closed, rev, template
265 branches: active, closed, rev, template
266 bundle: exact, force, rev, branch, base, all, type, ssh, remotecmd, insecure
266 bundle: exact, force, rev, branch, base, all, type, ssh, remotecmd, insecure
267 cat: output, rev, decode, include, exclude, template
267 cat: output, rev, decode, include, exclude, template
268 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
268 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
269 commit: addremove, close-branch, amend, secret, draft, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
269 commit: addremove, close-branch, amend, secret, draft, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
270 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
270 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
271 continue: dry-run
271 continue: dry-run
272 copy: forget, after, at-rev, force, include, exclude, dry-run
272 copy: forget, after, at-rev, force, include, exclude, dry-run
273 debug-delta-find: changelog, manifest, dir, template, source
273 debug-delta-find: changelog, manifest, dir, template, source
274 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
274 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
275 debug-revlog-index: changelog, manifest, dir, template
275 debug-revlog-index: changelog, manifest, dir, template
276 debug-revlog-stats: changelog, manifest, filelogs, template
276 debug-revlog-stats: changelog, manifest, filelogs, template
277 debug::stable-tail-sort: template
277 debug::stable-tail-sort: template
278 debugancestor:
278 debugancestor:
279 debugantivirusrunning:
279 debugantivirusrunning:
280 debugapplystreamclonebundle:
280 debugapplystreamclonebundle:
281 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
281 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
282 debugbuilddag: mergeable-file, overwritten-file, new-file, from-existing
282 debugbuilddag: mergeable-file, overwritten-file, new-file, from-existing
283 debugbundle: all, part-type, spec
283 debugbundle: all, part-type, spec
284 debugcapabilities:
284 debugcapabilities:
285 debugchangedfiles: compute
285 debugchangedfiles: compute
286 debugcheckstate:
286 debugcheckstate:
287 debugcolor: style
287 debugcolor: style
288 debugcommands:
288 debugcommands:
289 debugcomplete: options
289 debugcomplete: options
290 debugcreatestreamclonebundle:
290 debugcreatestreamclonebundle:
291 debugdag: tags, branches, dots, spaces
291 debugdag: tags, branches, dots, spaces
292 debugdata: changelog, manifest, dir
292 debugdata: changelog, manifest, dir
293 debugdate: extended
293 debugdate: extended
294 debugdeltachain: changelog, manifest, dir, template
294 debugdeltachain: changelog, manifest, dir, template
295 debugdirstateignorepatternshash:
295 debugdirstateignorepatternshash:
296 debugdirstate: nodates, dates, datesort, docket, all
296 debugdirstate: nodates, dates, datesort, docket, all
297 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
297 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
298 debugdownload: output
298 debugdownload: output
299 debugextensions: template
299 debugextensions: template
300 debugfileset: rev, all-files, show-matcher, show-stage
300 debugfileset: rev, all-files, show-matcher, show-stage
301 debugformat: template
301 debugformat: template
302 debugfsinfo:
302 debugfsinfo:
303 debuggetbundle: head, common, type
303 debuggetbundle: head, common, type
304 debugignore:
304 debugignore:
305 debugindexdot: changelog, manifest, dir
305 debugindexdot: changelog, manifest, dir
306 debugindexstats:
306 debugindexstats:
307 debuginstall: template
307 debuginstall: template
308 debugknown:
308 debugknown:
309 debuglabelcomplete:
309 debuglabelcomplete:
310 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
310 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
311 debugmanifestfulltextcache: clear, add
311 debugmanifestfulltextcache: clear, add
312 debugmergestate: style, template
312 debugmergestate: style, template
313 debugnamecomplete:
313 debugnamecomplete:
314 debugnodemap: dump-new, dump-disk, check, metadata
314 debugnodemap: changelog, manifest, dir, dump-new, dump-disk, check, metadata
315 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
315 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
316 debugp1copies: rev
316 debugp1copies: rev
317 debugp2copies: rev
317 debugp2copies: rev
318 debugpathcomplete: full, normal, added, removed
318 debugpathcomplete: full, normal, added, removed
319 debugpathcopies: include, exclude
319 debugpathcopies: include, exclude
320 debugpeer:
320 debugpeer:
321 debugpickmergetool: rev, changedelete, include, exclude, tool
321 debugpickmergetool: rev, changedelete, include, exclude, tool
322 debugpushkey:
322 debugpushkey:
323 debugpvec:
323 debugpvec:
324 debugrebuilddirstate: rev, minimal
324 debugrebuilddirstate: rev, minimal
325 debugrebuildfncache: only-data
325 debugrebuildfncache: only-data
326 debugrename: rev
326 debugrename: rev
327 debugrequires:
327 debugrequires:
328 debugrevlog: changelog, manifest, dir, dump
328 debugrevlog: changelog, manifest, dir, dump
329 debugrevlogindex: changelog, manifest, dir, format
329 debugrevlogindex: changelog, manifest, dir, format
330 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
330 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
331 debugserve: sshstdio, logiofd, logiofile
331 debugserve: sshstdio, logiofd, logiofile
332 debugsetparents:
332 debugsetparents:
333 debugshell: command
333 debugshell: command
334 debugsidedata: changelog, manifest, dir
334 debugsidedata: changelog, manifest, dir
335 debugssl:
335 debugssl:
336 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
336 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
337 debugsub: rev
337 debugsub: rev
338 debugsuccessorssets: closest
338 debugsuccessorssets: closest
339 debugtagscache:
339 debugtagscache:
340 debugtemplate: rev, define
340 debugtemplate: rev, define
341 debuguigetpass: prompt
341 debuguigetpass: prompt
342 debuguiprompt: prompt
342 debuguiprompt: prompt
343 debugupdatecaches:
343 debugupdatecaches:
344 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
344 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
345 debugwalk: include, exclude
345 debugwalk: include, exclude
346 debugwhyunstable:
346 debugwhyunstable:
347 debugwireargs: three, four, five, ssh, remotecmd, insecure
347 debugwireargs: three, four, five, ssh, remotecmd, insecure
348 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
348 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
349 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
349 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
350 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
350 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
351 files: rev, print0, include, exclude, template, subrepos
351 files: rev, print0, include, exclude, template, subrepos
352 forget: interactive, include, exclude, dry-run
352 forget: interactive, include, exclude, dry-run
353 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
353 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
354 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
354 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
355 heads: rev, topo, active, closed, style, template
355 heads: rev, topo, active, closed, style, template
356 help: extension, command, keyword, system
356 help: extension, command, keyword, system
357 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
357 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
358 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
358 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
359 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
359 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
360 init: ssh, remotecmd, insecure
360 init: ssh, remotecmd, insecure
361 locate: rev, print0, fullpath, include, exclude
361 locate: rev, print0, fullpath, include, exclude
362 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
362 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
363 manifest: rev, all, template
363 manifest: rev, all, template
364 merge: force, rev, preview, abort, tool
364 merge: force, rev, preview, abort, tool
365 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
365 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
366 parents: rev, style, template
366 parents: rev, style, template
367 paths: template
367 paths: template
368 phase: public, draft, secret, force, rev
368 phase: public, draft, secret, force, rev
369 pull: update, force, confirm, rev, bookmark, branch, remote-hidden, ssh, remotecmd, insecure
369 pull: update, force, confirm, rev, bookmark, branch, remote-hidden, ssh, remotecmd, insecure
370 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
370 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
371 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
371 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
372 recover: verify
372 recover: verify
373 remove: after, force, subrepos, include, exclude, dry-run
373 remove: after, force, subrepos, include, exclude, dry-run
374 rename: forget, after, at-rev, force, include, exclude, dry-run
374 rename: forget, after, at-rev, force, include, exclude, dry-run
375 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
375 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
376 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
376 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
377 rollback: dry-run, force
377 rollback: dry-run, force
378 root: template
378 root: template
379 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
379 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
380 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
380 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
381 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
381 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
382 summary: remote
382 summary: remote
383 tag: force, local, rev, remove, edit, message, date, user
383 tag: force, local, rev, remove, edit, message, date, user
384 tags: template
384 tags: template
385 tip: patch, git, style, template
385 tip: patch, git, style, template
386 unbundle: update
386 unbundle: update
387 unshelve: abort, continue, interactive, keep, name, tool, date
387 unshelve: abort, continue, interactive, keep, name, tool, date
388 update: clean, check, merge, date, rev, tool
388 update: clean, check, merge, date, rev, tool
389 verify: full
389 verify: full
390 version: template
390 version: template
391
391
392 $ hg init a
392 $ hg init a
393 $ cd a
393 $ cd a
394 $ echo fee > fee
394 $ echo fee > fee
395 $ hg ci -q -Amfee
395 $ hg ci -q -Amfee
396 $ hg tag fee
396 $ hg tag fee
397 $ mkdir fie
397 $ mkdir fie
398 $ echo dead > fie/dead
398 $ echo dead > fie/dead
399 $ echo live > fie/live
399 $ echo live > fie/live
400 $ hg bookmark fo
400 $ hg bookmark fo
401 $ hg branch -q fie
401 $ hg branch -q fie
402 $ hg ci -q -Amfie
402 $ hg ci -q -Amfie
403 $ echo fo > fo
403 $ echo fo > fo
404 $ hg branch -qf default
404 $ hg branch -qf default
405 $ hg ci -q -Amfo
405 $ hg ci -q -Amfo
406 $ echo Fum > Fum
406 $ echo Fum > Fum
407 $ hg ci -q -AmFum
407 $ hg ci -q -AmFum
408 $ hg bookmark Fum
408 $ hg bookmark Fum
409
409
410 Test debugpathcomplete
410 Test debugpathcomplete
411
411
412 $ hg debugpathcomplete f
412 $ hg debugpathcomplete f
413 fee
413 fee
414 fie
414 fie
415 fo
415 fo
416 $ hg debugpathcomplete -f f
416 $ hg debugpathcomplete -f f
417 fee
417 fee
418 fie/dead
418 fie/dead
419 fie/live
419 fie/live
420 fo
420 fo
421
421
422 $ hg rm Fum
422 $ hg rm Fum
423 $ hg debugpathcomplete -r F
423 $ hg debugpathcomplete -r F
424 Fum
424 Fum
425
425
426 Test debugnamecomplete
426 Test debugnamecomplete
427
427
428 $ hg debugnamecomplete
428 $ hg debugnamecomplete
429 Fum
429 Fum
430 default
430 default
431 fee
431 fee
432 fie
432 fie
433 fo
433 fo
434 tip
434 tip
435 $ hg debugnamecomplete f
435 $ hg debugnamecomplete f
436 fee
436 fee
437 fie
437 fie
438 fo
438 fo
439
439
440 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
440 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
441 used for completions in some shells.
441 used for completions in some shells.
442
442
443 $ hg debuglabelcomplete
443 $ hg debuglabelcomplete
444 Fum
444 Fum
445 default
445 default
446 fee
446 fee
447 fie
447 fie
448 fo
448 fo
449 tip
449 tip
450 $ hg debuglabelcomplete f
450 $ hg debuglabelcomplete f
451 fee
451 fee
452 fie
452 fie
453 fo
453 fo
General Comments 0
You need to be logged in to leave comments. Login now