##// END OF EJS Templates
stream-clone: update debugcreatestreamclonebundle helps...
marmoute -
r51537:89556caf default
parent child Browse files
Show More
@@ -1,4852 +1,4856 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 bundlerepo,
41 bundlerepo,
42 changegroup,
42 changegroup,
43 cmdutil,
43 cmdutil,
44 color,
44 color,
45 context,
45 context,
46 copies,
46 copies,
47 dagparser,
47 dagparser,
48 dirstateutils,
48 dirstateutils,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filelog,
53 filelog,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 manifest,
62 manifest,
63 mergestate as mergestatemod,
63 mergestate as mergestatemod,
64 metadata,
64 metadata,
65 obsolete,
65 obsolete,
66 obsutil,
66 obsutil,
67 pathutil,
67 pathutil,
68 phases,
68 phases,
69 policy,
69 policy,
70 pvec,
70 pvec,
71 pycompat,
71 pycompat,
72 registrar,
72 registrar,
73 repair,
73 repair,
74 repoview,
74 repoview,
75 requirements,
75 requirements,
76 revlog,
76 revlog,
77 revset,
77 revset,
78 revsetlang,
78 revsetlang,
79 scmutil,
79 scmutil,
80 setdiscovery,
80 setdiscovery,
81 simplemerge,
81 simplemerge,
82 sshpeer,
82 sshpeer,
83 sslutil,
83 sslutil,
84 streamclone,
84 streamclone,
85 strip,
85 strip,
86 tags as tagsmod,
86 tags as tagsmod,
87 templater,
87 templater,
88 treediscovery,
88 treediscovery,
89 upgrade,
89 upgrade,
90 url as urlmod,
90 url as urlmod,
91 util,
91 util,
92 verify,
92 verify,
93 vfs as vfsmod,
93 vfs as vfsmod,
94 wireprotoframing,
94 wireprotoframing,
95 wireprotoserver,
95 wireprotoserver,
96 )
96 )
97 from .interfaces import repository
97 from .interfaces import repository
98 from .stabletailgraph import stabletailsort
98 from .stabletailgraph import stabletailsort
99 from .utils import (
99 from .utils import (
100 cborutil,
100 cborutil,
101 compression,
101 compression,
102 dateutil,
102 dateutil,
103 procutil,
103 procutil,
104 stringutil,
104 stringutil,
105 urlutil,
105 urlutil,
106 )
106 )
107
107
108 from .revlogutils import (
108 from .revlogutils import (
109 constants as revlog_constants,
109 constants as revlog_constants,
110 debug as revlog_debug,
110 debug as revlog_debug,
111 deltas as deltautil,
111 deltas as deltautil,
112 nodemap,
112 nodemap,
113 rewrite,
113 rewrite,
114 sidedata,
114 sidedata,
115 )
115 )
116
116
117 release = lockmod.release
117 release = lockmod.release
118
118
119 table = {}
119 table = {}
120 table.update(strip.command._table)
120 table.update(strip.command._table)
121 command = registrar.command(table)
121 command = registrar.command(table)
122
122
123
123
124 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
124 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
125 def debugancestor(ui, repo, *args):
125 def debugancestor(ui, repo, *args):
126 """find the ancestor revision of two revisions in a given index"""
126 """find the ancestor revision of two revisions in a given index"""
127 if len(args) == 3:
127 if len(args) == 3:
128 index, rev1, rev2 = args
128 index, rev1, rev2 = args
129 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
129 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
130 lookup = r.lookup
130 lookup = r.lookup
131 elif len(args) == 2:
131 elif len(args) == 2:
132 if not repo:
132 if not repo:
133 raise error.Abort(
133 raise error.Abort(
134 _(b'there is no Mercurial repository here (.hg not found)')
134 _(b'there is no Mercurial repository here (.hg not found)')
135 )
135 )
136 rev1, rev2 = args
136 rev1, rev2 = args
137 r = repo.changelog
137 r = repo.changelog
138 lookup = repo.lookup
138 lookup = repo.lookup
139 else:
139 else:
140 raise error.Abort(_(b'either two or three arguments required'))
140 raise error.Abort(_(b'either two or three arguments required'))
141 a = r.ancestor(lookup(rev1), lookup(rev2))
141 a = r.ancestor(lookup(rev1), lookup(rev2))
142 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
142 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
143
143
144
144
145 @command(b'debugantivirusrunning', [])
145 @command(b'debugantivirusrunning', [])
146 def debugantivirusrunning(ui, repo):
146 def debugantivirusrunning(ui, repo):
147 """attempt to trigger an antivirus scanner to see if one is active"""
147 """attempt to trigger an antivirus scanner to see if one is active"""
148 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
148 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
149 f.write(
149 f.write(
150 util.b85decode(
150 util.b85decode(
151 # This is a base85-armored version of the EICAR test file. See
151 # This is a base85-armored version of the EICAR test file. See
152 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
152 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
153 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
153 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
154 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
154 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
155 )
155 )
156 )
156 )
157 # Give an AV engine time to scan the file.
157 # Give an AV engine time to scan the file.
158 time.sleep(2)
158 time.sleep(2)
159 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
159 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
160
160
161
161
162 @command(b'debugapplystreamclonebundle', [], b'FILE')
162 @command(b'debugapplystreamclonebundle', [], b'FILE')
163 def debugapplystreamclonebundle(ui, repo, fname):
163 def debugapplystreamclonebundle(ui, repo, fname):
164 """apply a stream clone bundle file"""
164 """apply a stream clone bundle file"""
165 f = hg.openpath(ui, fname)
165 f = hg.openpath(ui, fname)
166 gen = exchange.readbundle(ui, f, fname)
166 gen = exchange.readbundle(ui, f, fname)
167 gen.apply(repo)
167 gen.apply(repo)
168
168
169
169
170 @command(
170 @command(
171 b'debugbuilddag',
171 b'debugbuilddag',
172 [
172 [
173 (
173 (
174 b'm',
174 b'm',
175 b'mergeable-file',
175 b'mergeable-file',
176 None,
176 None,
177 _(b'add single file mergeable changes'),
177 _(b'add single file mergeable changes'),
178 ),
178 ),
179 (
179 (
180 b'o',
180 b'o',
181 b'overwritten-file',
181 b'overwritten-file',
182 None,
182 None,
183 _(b'add single file all revs overwrite'),
183 _(b'add single file all revs overwrite'),
184 ),
184 ),
185 (b'n', b'new-file', None, _(b'add new file at each rev')),
185 (b'n', b'new-file', None, _(b'add new file at each rev')),
186 (
186 (
187 b'',
187 b'',
188 b'from-existing',
188 b'from-existing',
189 None,
189 None,
190 _(b'continue from a non-empty repository'),
190 _(b'continue from a non-empty repository'),
191 ),
191 ),
192 ],
192 ],
193 _(b'[OPTION]... [TEXT]'),
193 _(b'[OPTION]... [TEXT]'),
194 )
194 )
195 def debugbuilddag(
195 def debugbuilddag(
196 ui,
196 ui,
197 repo,
197 repo,
198 text=None,
198 text=None,
199 mergeable_file=False,
199 mergeable_file=False,
200 overwritten_file=False,
200 overwritten_file=False,
201 new_file=False,
201 new_file=False,
202 from_existing=False,
202 from_existing=False,
203 ):
203 ):
204 """builds a repo with a given DAG from scratch in the current empty repo
204 """builds a repo with a given DAG from scratch in the current empty repo
205
205
206 The description of the DAG is read from stdin if not given on the
206 The description of the DAG is read from stdin if not given on the
207 command line.
207 command line.
208
208
209 Elements:
209 Elements:
210
210
211 - "+n" is a linear run of n nodes based on the current default parent
211 - "+n" is a linear run of n nodes based on the current default parent
212 - "." is a single node based on the current default parent
212 - "." is a single node based on the current default parent
213 - "$" resets the default parent to null (implied at the start);
213 - "$" resets the default parent to null (implied at the start);
214 otherwise the default parent is always the last node created
214 otherwise the default parent is always the last node created
215 - "<p" sets the default parent to the backref p
215 - "<p" sets the default parent to the backref p
216 - "*p" is a fork at parent p, which is a backref
216 - "*p" is a fork at parent p, which is a backref
217 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
217 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
218 - "/p2" is a merge of the preceding node and p2
218 - "/p2" is a merge of the preceding node and p2
219 - ":tag" defines a local tag for the preceding node
219 - ":tag" defines a local tag for the preceding node
220 - "@branch" sets the named branch for subsequent nodes
220 - "@branch" sets the named branch for subsequent nodes
221 - "#...\\n" is a comment up to the end of the line
221 - "#...\\n" is a comment up to the end of the line
222
222
223 Whitespace between the above elements is ignored.
223 Whitespace between the above elements is ignored.
224
224
225 A backref is either
225 A backref is either
226
226
227 - a number n, which references the node curr-n, where curr is the current
227 - a number n, which references the node curr-n, where curr is the current
228 node, or
228 node, or
229 - the name of a local tag you placed earlier using ":tag", or
229 - the name of a local tag you placed earlier using ":tag", or
230 - empty to denote the default parent.
230 - empty to denote the default parent.
231
231
232 All string valued-elements are either strictly alphanumeric, or must
232 All string valued-elements are either strictly alphanumeric, or must
233 be enclosed in double quotes ("..."), with "\\" as escape character.
233 be enclosed in double quotes ("..."), with "\\" as escape character.
234 """
234 """
235
235
236 if text is None:
236 if text is None:
237 ui.status(_(b"reading DAG from stdin\n"))
237 ui.status(_(b"reading DAG from stdin\n"))
238 text = ui.fin.read()
238 text = ui.fin.read()
239
239
240 cl = repo.changelog
240 cl = repo.changelog
241 if len(cl) > 0 and not from_existing:
241 if len(cl) > 0 and not from_existing:
242 raise error.Abort(_(b'repository is not empty'))
242 raise error.Abort(_(b'repository is not empty'))
243
243
244 # determine number of revs in DAG
244 # determine number of revs in DAG
245 total = 0
245 total = 0
246 for type, data in dagparser.parsedag(text):
246 for type, data in dagparser.parsedag(text):
247 if type == b'n':
247 if type == b'n':
248 total += 1
248 total += 1
249
249
250 if mergeable_file:
250 if mergeable_file:
251 linesperrev = 2
251 linesperrev = 2
252 # make a file with k lines per rev
252 # make a file with k lines per rev
253 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
253 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
254 initialmergedlines.append(b"")
254 initialmergedlines.append(b"")
255
255
256 tags = []
256 tags = []
257 progress = ui.makeprogress(
257 progress = ui.makeprogress(
258 _(b'building'), unit=_(b'revisions'), total=total
258 _(b'building'), unit=_(b'revisions'), total=total
259 )
259 )
260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
261 at = -1
261 at = -1
262 atbranch = b'default'
262 atbranch = b'default'
263 nodeids = []
263 nodeids = []
264 id = 0
264 id = 0
265 progress.update(id)
265 progress.update(id)
266 for type, data in dagparser.parsedag(text):
266 for type, data in dagparser.parsedag(text):
267 if type == b'n':
267 if type == b'n':
268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
269 id, ps = data
269 id, ps = data
270
270
271 files = []
271 files = []
272 filecontent = {}
272 filecontent = {}
273
273
274 p2 = None
274 p2 = None
275 if mergeable_file:
275 if mergeable_file:
276 fn = b"mf"
276 fn = b"mf"
277 p1 = repo[ps[0]]
277 p1 = repo[ps[0]]
278 if len(ps) > 1:
278 if len(ps) > 1:
279 p2 = repo[ps[1]]
279 p2 = repo[ps[1]]
280 pa = p1.ancestor(p2)
280 pa = p1.ancestor(p2)
281 base, local, other = [
281 base, local, other = [
282 x[fn].data() for x in (pa, p1, p2)
282 x[fn].data() for x in (pa, p1, p2)
283 ]
283 ]
284 m3 = simplemerge.Merge3Text(base, local, other)
284 m3 = simplemerge.Merge3Text(base, local, other)
285 ml = [
285 ml = [
286 l.strip()
286 l.strip()
287 for l in simplemerge.render_minimized(m3)[0]
287 for l in simplemerge.render_minimized(m3)[0]
288 ]
288 ]
289 ml.append(b"")
289 ml.append(b"")
290 elif at > 0:
290 elif at > 0:
291 ml = p1[fn].data().split(b"\n")
291 ml = p1[fn].data().split(b"\n")
292 else:
292 else:
293 ml = initialmergedlines
293 ml = initialmergedlines
294 ml[id * linesperrev] += b" r%i" % id
294 ml[id * linesperrev] += b" r%i" % id
295 mergedtext = b"\n".join(ml)
295 mergedtext = b"\n".join(ml)
296 files.append(fn)
296 files.append(fn)
297 filecontent[fn] = mergedtext
297 filecontent[fn] = mergedtext
298
298
299 if overwritten_file:
299 if overwritten_file:
300 fn = b"of"
300 fn = b"of"
301 files.append(fn)
301 files.append(fn)
302 filecontent[fn] = b"r%i\n" % id
302 filecontent[fn] = b"r%i\n" % id
303
303
304 if new_file:
304 if new_file:
305 fn = b"nf%i" % id
305 fn = b"nf%i" % id
306 files.append(fn)
306 files.append(fn)
307 filecontent[fn] = b"r%i\n" % id
307 filecontent[fn] = b"r%i\n" % id
308 if len(ps) > 1:
308 if len(ps) > 1:
309 if not p2:
309 if not p2:
310 p2 = repo[ps[1]]
310 p2 = repo[ps[1]]
311 for fn in p2:
311 for fn in p2:
312 if fn.startswith(b"nf"):
312 if fn.startswith(b"nf"):
313 files.append(fn)
313 files.append(fn)
314 filecontent[fn] = p2[fn].data()
314 filecontent[fn] = p2[fn].data()
315
315
316 def fctxfn(repo, cx, path):
316 def fctxfn(repo, cx, path):
317 if path in filecontent:
317 if path in filecontent:
318 return context.memfilectx(
318 return context.memfilectx(
319 repo, cx, path, filecontent[path]
319 repo, cx, path, filecontent[path]
320 )
320 )
321 return None
321 return None
322
322
323 if len(ps) == 0 or ps[0] < 0:
323 if len(ps) == 0 or ps[0] < 0:
324 pars = [None, None]
324 pars = [None, None]
325 elif len(ps) == 1:
325 elif len(ps) == 1:
326 pars = [nodeids[ps[0]], None]
326 pars = [nodeids[ps[0]], None]
327 else:
327 else:
328 pars = [nodeids[p] for p in ps]
328 pars = [nodeids[p] for p in ps]
329 cx = context.memctx(
329 cx = context.memctx(
330 repo,
330 repo,
331 pars,
331 pars,
332 b"r%i" % id,
332 b"r%i" % id,
333 files,
333 files,
334 fctxfn,
334 fctxfn,
335 date=(id, 0),
335 date=(id, 0),
336 user=b"debugbuilddag",
336 user=b"debugbuilddag",
337 extra={b'branch': atbranch},
337 extra={b'branch': atbranch},
338 )
338 )
339 nodeid = repo.commitctx(cx)
339 nodeid = repo.commitctx(cx)
340 nodeids.append(nodeid)
340 nodeids.append(nodeid)
341 at = id
341 at = id
342 elif type == b'l':
342 elif type == b'l':
343 id, name = data
343 id, name = data
344 ui.note((b'tag %s\n' % name))
344 ui.note((b'tag %s\n' % name))
345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
346 elif type == b'a':
346 elif type == b'a':
347 ui.note((b'branch %s\n' % data))
347 ui.note((b'branch %s\n' % data))
348 atbranch = data
348 atbranch = data
349 progress.update(id)
349 progress.update(id)
350
350
351 if tags:
351 if tags:
352 repo.vfs.write(b"localtags", b"".join(tags))
352 repo.vfs.write(b"localtags", b"".join(tags))
353
353
354
354
355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
356 indent_string = b' ' * indent
356 indent_string = b' ' * indent
357 if all:
357 if all:
358 ui.writenoi18n(
358 ui.writenoi18n(
359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
360 % indent_string
360 % indent_string
361 )
361 )
362
362
363 def showchunks(named):
363 def showchunks(named):
364 ui.write(b"\n%s%s\n" % (indent_string, named))
364 ui.write(b"\n%s%s\n" % (indent_string, named))
365 for deltadata in gen.deltaiter():
365 for deltadata in gen.deltaiter():
366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
367 ui.write(
367 ui.write(
368 b"%s%s %s %s %s %s %d\n"
368 b"%s%s %s %s %s %s %d\n"
369 % (
369 % (
370 indent_string,
370 indent_string,
371 hex(node),
371 hex(node),
372 hex(p1),
372 hex(p1),
373 hex(p2),
373 hex(p2),
374 hex(cs),
374 hex(cs),
375 hex(deltabase),
375 hex(deltabase),
376 len(delta),
376 len(delta),
377 )
377 )
378 )
378 )
379
379
380 gen.changelogheader()
380 gen.changelogheader()
381 showchunks(b"changelog")
381 showchunks(b"changelog")
382 gen.manifestheader()
382 gen.manifestheader()
383 showchunks(b"manifest")
383 showchunks(b"manifest")
384 for chunkdata in iter(gen.filelogheader, {}):
384 for chunkdata in iter(gen.filelogheader, {}):
385 fname = chunkdata[b'filename']
385 fname = chunkdata[b'filename']
386 showchunks(fname)
386 showchunks(fname)
387 else:
387 else:
388 if isinstance(gen, bundle2.unbundle20):
388 if isinstance(gen, bundle2.unbundle20):
389 raise error.Abort(_(b'use debugbundle2 for this file'))
389 raise error.Abort(_(b'use debugbundle2 for this file'))
390 gen.changelogheader()
390 gen.changelogheader()
391 for deltadata in gen.deltaiter():
391 for deltadata in gen.deltaiter():
392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
394
394
395
395
396 def _debugobsmarkers(ui, part, indent=0, **opts):
396 def _debugobsmarkers(ui, part, indent=0, **opts):
397 """display version and markers contained in 'data'"""
397 """display version and markers contained in 'data'"""
398 opts = pycompat.byteskwargs(opts)
398 opts = pycompat.byteskwargs(opts)
399 data = part.read()
399 data = part.read()
400 indent_string = b' ' * indent
400 indent_string = b' ' * indent
401 try:
401 try:
402 version, markers = obsolete._readmarkers(data)
402 version, markers = obsolete._readmarkers(data)
403 except error.UnknownVersion as exc:
403 except error.UnknownVersion as exc:
404 msg = b"%sunsupported version: %s (%d bytes)\n"
404 msg = b"%sunsupported version: %s (%d bytes)\n"
405 msg %= indent_string, exc.version, len(data)
405 msg %= indent_string, exc.version, len(data)
406 ui.write(msg)
406 ui.write(msg)
407 else:
407 else:
408 msg = b"%sversion: %d (%d bytes)\n"
408 msg = b"%sversion: %d (%d bytes)\n"
409 msg %= indent_string, version, len(data)
409 msg %= indent_string, version, len(data)
410 ui.write(msg)
410 ui.write(msg)
411 fm = ui.formatter(b'debugobsolete', opts)
411 fm = ui.formatter(b'debugobsolete', opts)
412 for rawmarker in sorted(markers):
412 for rawmarker in sorted(markers):
413 m = obsutil.marker(None, rawmarker)
413 m = obsutil.marker(None, rawmarker)
414 fm.startitem()
414 fm.startitem()
415 fm.plain(indent_string)
415 fm.plain(indent_string)
416 cmdutil.showmarker(fm, m)
416 cmdutil.showmarker(fm, m)
417 fm.end()
417 fm.end()
418
418
419
419
420 def _debugphaseheads(ui, data, indent=0):
420 def _debugphaseheads(ui, data, indent=0):
421 """display version and markers contained in 'data'"""
421 """display version and markers contained in 'data'"""
422 indent_string = b' ' * indent
422 indent_string = b' ' * indent
423 headsbyphase = phases.binarydecode(data)
423 headsbyphase = phases.binarydecode(data)
424 for phase in phases.allphases:
424 for phase in phases.allphases:
425 for head in headsbyphase[phase]:
425 for head in headsbyphase[phase]:
426 ui.write(indent_string)
426 ui.write(indent_string)
427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
428
428
429
429
430 def _quasirepr(thing):
430 def _quasirepr(thing):
431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
432 return b'{%s}' % (
432 return b'{%s}' % (
433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
434 )
434 )
435 return pycompat.bytestr(repr(thing))
435 return pycompat.bytestr(repr(thing))
436
436
437
437
438 def _debugbundle2(ui, gen, all=None, **opts):
438 def _debugbundle2(ui, gen, all=None, **opts):
439 """lists the contents of a bundle2"""
439 """lists the contents of a bundle2"""
440 if not isinstance(gen, bundle2.unbundle20):
440 if not isinstance(gen, bundle2.unbundle20):
441 raise error.Abort(_(b'not a bundle2 file'))
441 raise error.Abort(_(b'not a bundle2 file'))
442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
443 parttypes = opts.get('part_type', [])
443 parttypes = opts.get('part_type', [])
444 for part in gen.iterparts():
444 for part in gen.iterparts():
445 if parttypes and part.type not in parttypes:
445 if parttypes and part.type not in parttypes:
446 continue
446 continue
447 msg = b'%s -- %s (mandatory: %r)\n'
447 msg = b'%s -- %s (mandatory: %r)\n'
448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
449 if part.type == b'changegroup':
449 if part.type == b'changegroup':
450 version = part.params.get(b'version', b'01')
450 version = part.params.get(b'version', b'01')
451 cg = changegroup.getunbundler(version, part, b'UN')
451 cg = changegroup.getunbundler(version, part, b'UN')
452 if not ui.quiet:
452 if not ui.quiet:
453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
454 if part.type == b'obsmarkers':
454 if part.type == b'obsmarkers':
455 if not ui.quiet:
455 if not ui.quiet:
456 _debugobsmarkers(ui, part, indent=4, **opts)
456 _debugobsmarkers(ui, part, indent=4, **opts)
457 if part.type == b'phase-heads':
457 if part.type == b'phase-heads':
458 if not ui.quiet:
458 if not ui.quiet:
459 _debugphaseheads(ui, part, indent=4)
459 _debugphaseheads(ui, part, indent=4)
460
460
461
461
462 @command(
462 @command(
463 b'debugbundle',
463 b'debugbundle',
464 [
464 [
465 (b'a', b'all', None, _(b'show all details')),
465 (b'a', b'all', None, _(b'show all details')),
466 (b'', b'part-type', [], _(b'show only the named part type')),
466 (b'', b'part-type', [], _(b'show only the named part type')),
467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
468 ],
468 ],
469 _(b'FILE'),
469 _(b'FILE'),
470 norepo=True,
470 norepo=True,
471 )
471 )
472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
473 """lists the contents of a bundle"""
473 """lists the contents of a bundle"""
474 with hg.openpath(ui, bundlepath) as f:
474 with hg.openpath(ui, bundlepath) as f:
475 if spec:
475 if spec:
476 spec = exchange.getbundlespec(ui, f)
476 spec = exchange.getbundlespec(ui, f)
477 ui.write(b'%s\n' % spec)
477 ui.write(b'%s\n' % spec)
478 return
478 return
479
479
480 gen = exchange.readbundle(ui, f, bundlepath)
480 gen = exchange.readbundle(ui, f, bundlepath)
481 if isinstance(gen, bundle2.unbundle20):
481 if isinstance(gen, bundle2.unbundle20):
482 return _debugbundle2(ui, gen, all=all, **opts)
482 return _debugbundle2(ui, gen, all=all, **opts)
483 _debugchangegroup(ui, gen, all=all, **opts)
483 _debugchangegroup(ui, gen, all=all, **opts)
484
484
485
485
486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
487 def debugcapabilities(ui, path, **opts):
487 def debugcapabilities(ui, path, **opts):
488 """lists the capabilities of a remote peer"""
488 """lists the capabilities of a remote peer"""
489 opts = pycompat.byteskwargs(opts)
489 opts = pycompat.byteskwargs(opts)
490 peer = hg.peer(ui, opts, path)
490 peer = hg.peer(ui, opts, path)
491 try:
491 try:
492 caps = peer.capabilities()
492 caps = peer.capabilities()
493 ui.writenoi18n(b'Main capabilities:\n')
493 ui.writenoi18n(b'Main capabilities:\n')
494 for c in sorted(caps):
494 for c in sorted(caps):
495 ui.write(b' %s\n' % c)
495 ui.write(b' %s\n' % c)
496 b2caps = bundle2.bundle2caps(peer)
496 b2caps = bundle2.bundle2caps(peer)
497 if b2caps:
497 if b2caps:
498 ui.writenoi18n(b'Bundle2 capabilities:\n')
498 ui.writenoi18n(b'Bundle2 capabilities:\n')
499 for key, values in sorted(b2caps.items()):
499 for key, values in sorted(b2caps.items()):
500 ui.write(b' %s\n' % key)
500 ui.write(b' %s\n' % key)
501 for v in values:
501 for v in values:
502 ui.write(b' %s\n' % v)
502 ui.write(b' %s\n' % v)
503 finally:
503 finally:
504 peer.close()
504 peer.close()
505
505
506
506
507 @command(
507 @command(
508 b'debugchangedfiles',
508 b'debugchangedfiles',
509 [
509 [
510 (
510 (
511 b'',
511 b'',
512 b'compute',
512 b'compute',
513 False,
513 False,
514 b"compute information instead of reading it from storage",
514 b"compute information instead of reading it from storage",
515 ),
515 ),
516 ],
516 ],
517 b'REV',
517 b'REV',
518 )
518 )
519 def debugchangedfiles(ui, repo, rev, **opts):
519 def debugchangedfiles(ui, repo, rev, **opts):
520 """list the stored files changes for a revision"""
520 """list the stored files changes for a revision"""
521 ctx = logcmdutil.revsingle(repo, rev, None)
521 ctx = logcmdutil.revsingle(repo, rev, None)
522 files = None
522 files = None
523
523
524 if opts['compute']:
524 if opts['compute']:
525 files = metadata.compute_all_files_changes(ctx)
525 files = metadata.compute_all_files_changes(ctx)
526 else:
526 else:
527 sd = repo.changelog.sidedata(ctx.rev())
527 sd = repo.changelog.sidedata(ctx.rev())
528 files_block = sd.get(sidedata.SD_FILES)
528 files_block = sd.get(sidedata.SD_FILES)
529 if files_block is not None:
529 if files_block is not None:
530 files = metadata.decode_files_sidedata(sd)
530 files = metadata.decode_files_sidedata(sd)
531 if files is not None:
531 if files is not None:
532 for f in sorted(files.touched):
532 for f in sorted(files.touched):
533 if f in files.added:
533 if f in files.added:
534 action = b"added"
534 action = b"added"
535 elif f in files.removed:
535 elif f in files.removed:
536 action = b"removed"
536 action = b"removed"
537 elif f in files.merged:
537 elif f in files.merged:
538 action = b"merged"
538 action = b"merged"
539 elif f in files.salvaged:
539 elif f in files.salvaged:
540 action = b"salvaged"
540 action = b"salvaged"
541 else:
541 else:
542 action = b"touched"
542 action = b"touched"
543
543
544 copy_parent = b""
544 copy_parent = b""
545 copy_source = b""
545 copy_source = b""
546 if f in files.copied_from_p1:
546 if f in files.copied_from_p1:
547 copy_parent = b"p1"
547 copy_parent = b"p1"
548 copy_source = files.copied_from_p1[f]
548 copy_source = files.copied_from_p1[f]
549 elif f in files.copied_from_p2:
549 elif f in files.copied_from_p2:
550 copy_parent = b"p2"
550 copy_parent = b"p2"
551 copy_source = files.copied_from_p2[f]
551 copy_source = files.copied_from_p2[f]
552
552
553 data = (action, copy_parent, f, copy_source)
553 data = (action, copy_parent, f, copy_source)
554 template = b"%-8s %2s: %s, %s;\n"
554 template = b"%-8s %2s: %s, %s;\n"
555 ui.write(template % data)
555 ui.write(template % data)
556
556
557
557
558 @command(b'debugcheckstate', [], b'')
558 @command(b'debugcheckstate', [], b'')
559 def debugcheckstate(ui, repo):
559 def debugcheckstate(ui, repo):
560 """validate the correctness of the current dirstate"""
560 """validate the correctness of the current dirstate"""
561 errors = verify.verifier(repo)._verify_dirstate()
561 errors = verify.verifier(repo)._verify_dirstate()
562 if errors:
562 if errors:
563 errstr = _(b"dirstate inconsistent with current parent's manifest")
563 errstr = _(b"dirstate inconsistent with current parent's manifest")
564 raise error.Abort(errstr)
564 raise error.Abort(errstr)
565
565
566
566
567 @command(
567 @command(
568 b'debugcolor',
568 b'debugcolor',
569 [(b'', b'style', None, _(b'show all configured styles'))],
569 [(b'', b'style', None, _(b'show all configured styles'))],
570 b'hg debugcolor',
570 b'hg debugcolor',
571 )
571 )
572 def debugcolor(ui, repo, **opts):
572 def debugcolor(ui, repo, **opts):
573 """show available color, effects or style"""
573 """show available color, effects or style"""
574 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
574 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
575 if opts.get('style'):
575 if opts.get('style'):
576 return _debugdisplaystyle(ui)
576 return _debugdisplaystyle(ui)
577 else:
577 else:
578 return _debugdisplaycolor(ui)
578 return _debugdisplaycolor(ui)
579
579
580
580
581 def _debugdisplaycolor(ui):
581 def _debugdisplaycolor(ui):
582 ui = ui.copy()
582 ui = ui.copy()
583 ui._styles.clear()
583 ui._styles.clear()
584 for effect in color._activeeffects(ui).keys():
584 for effect in color._activeeffects(ui).keys():
585 ui._styles[effect] = effect
585 ui._styles[effect] = effect
586 if ui._terminfoparams:
586 if ui._terminfoparams:
587 for k, v in ui.configitems(b'color'):
587 for k, v in ui.configitems(b'color'):
588 if k.startswith(b'color.'):
588 if k.startswith(b'color.'):
589 ui._styles[k] = k[6:]
589 ui._styles[k] = k[6:]
590 elif k.startswith(b'terminfo.'):
590 elif k.startswith(b'terminfo.'):
591 ui._styles[k] = k[9:]
591 ui._styles[k] = k[9:]
592 ui.write(_(b'available colors:\n'))
592 ui.write(_(b'available colors:\n'))
593 # sort label with a '_' after the other to group '_background' entry.
593 # sort label with a '_' after the other to group '_background' entry.
594 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
594 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
595 for colorname, label in items:
595 for colorname, label in items:
596 ui.write(b'%s\n' % colorname, label=label)
596 ui.write(b'%s\n' % colorname, label=label)
597
597
598
598
599 def _debugdisplaystyle(ui):
599 def _debugdisplaystyle(ui):
600 ui.write(_(b'available style:\n'))
600 ui.write(_(b'available style:\n'))
601 if not ui._styles:
601 if not ui._styles:
602 return
602 return
603 width = max(len(s) for s in ui._styles)
603 width = max(len(s) for s in ui._styles)
604 for label, effects in sorted(ui._styles.items()):
604 for label, effects in sorted(ui._styles.items()):
605 ui.write(b'%s' % label, label=label)
605 ui.write(b'%s' % label, label=label)
606 if effects:
606 if effects:
607 # 50
607 # 50
608 ui.write(b': ')
608 ui.write(b': ')
609 ui.write(b' ' * (max(0, width - len(label))))
609 ui.write(b' ' * (max(0, width - len(label))))
610 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
610 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
611 ui.write(b'\n')
611 ui.write(b'\n')
612
612
613
613
614 @command(b'debugcreatestreamclonebundle', [], b'FILE')
614 @command(b'debugcreatestreamclonebundle', [], b'FILE')
615 def debugcreatestreamclonebundle(ui, repo, fname):
615 def debugcreatestreamclonebundle(ui, repo, fname):
616 """create a stream clone bundle file
616 """create a stream clone bundle file
617
617
618 Stream bundles are special bundles that are essentially archives of
618 Stream bundles are special bundles that are essentially archives of
619 revlog files. They are commonly used for cloning very quickly.
619 revlog files. They are commonly used for cloning very quickly.
620
621 This command creates a "version 1" stream clone, which is deprecated in
622 favor of newer versions of the stream protocol. Bundles using such newer
623 versions can be generated using the `hg bundle` command.
620 """
624 """
621 # TODO we may want to turn this into an abort when this functionality
625 # TODO we may want to turn this into an abort when this functionality
622 # is moved into `hg bundle`.
626 # is moved into `hg bundle`.
623 if phases.hassecret(repo):
627 if phases.hassecret(repo):
624 ui.warn(
628 ui.warn(
625 _(
629 _(
626 b'(warning: stream clone bundle will contain secret '
630 b'(warning: stream clone bundle will contain secret '
627 b'revisions)\n'
631 b'revisions)\n'
628 )
632 )
629 )
633 )
630
634
631 requirements, gen = streamclone.generatebundlev1(repo)
635 requirements, gen = streamclone.generatebundlev1(repo)
632 changegroup.writechunks(ui, gen, fname)
636 changegroup.writechunks(ui, gen, fname)
633
637
634 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
635
639
636
640
637 @command(
641 @command(
638 b'debugdag',
642 b'debugdag',
639 [
643 [
640 (b't', b'tags', None, _(b'use tags as labels')),
644 (b't', b'tags', None, _(b'use tags as labels')),
641 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
642 (b'', b'dots', None, _(b'use dots for runs')),
646 (b'', b'dots', None, _(b'use dots for runs')),
643 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
644 ],
648 ],
645 _(b'[OPTION]... [FILE [REV]...]'),
649 _(b'[OPTION]... [FILE [REV]...]'),
646 optionalrepo=True,
650 optionalrepo=True,
647 )
651 )
648 def debugdag(ui, repo, file_=None, *revs, **opts):
652 def debugdag(ui, repo, file_=None, *revs, **opts):
649 """format the changelog or an index DAG as a concise textual description
653 """format the changelog or an index DAG as a concise textual description
650
654
651 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
652 revision numbers, they get labeled in the output as rN.
656 revision numbers, they get labeled in the output as rN.
653
657
654 Otherwise, the changelog DAG of the current repo is emitted.
658 Otherwise, the changelog DAG of the current repo is emitted.
655 """
659 """
656 spaces = opts.get('spaces')
660 spaces = opts.get('spaces')
657 dots = opts.get('dots')
661 dots = opts.get('dots')
658 if file_:
662 if file_:
659 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
660 revs = {int(r) for r in revs}
664 revs = {int(r) for r in revs}
661
665
662 def events():
666 def events():
663 for r in rlog:
667 for r in rlog:
664 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
665 if r in revs:
669 if r in revs:
666 yield b'l', (r, b"r%i" % r)
670 yield b'l', (r, b"r%i" % r)
667
671
668 elif repo:
672 elif repo:
669 cl = repo.changelog
673 cl = repo.changelog
670 tags = opts.get('tags')
674 tags = opts.get('tags')
671 branches = opts.get('branches')
675 branches = opts.get('branches')
672 if tags:
676 if tags:
673 labels = {}
677 labels = {}
674 for l, n in repo.tags().items():
678 for l, n in repo.tags().items():
675 labels.setdefault(cl.rev(n), []).append(l)
679 labels.setdefault(cl.rev(n), []).append(l)
676
680
677 def events():
681 def events():
678 b = b"default"
682 b = b"default"
679 for r in cl:
683 for r in cl:
680 if branches:
684 if branches:
681 newb = cl.read(cl.node(r))[5][b'branch']
685 newb = cl.read(cl.node(r))[5][b'branch']
682 if newb != b:
686 if newb != b:
683 yield b'a', newb
687 yield b'a', newb
684 b = newb
688 b = newb
685 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
686 if tags:
690 if tags:
687 ls = labels.get(r)
691 ls = labels.get(r)
688 if ls:
692 if ls:
689 for l in ls:
693 for l in ls:
690 yield b'l', (r, l)
694 yield b'l', (r, l)
691
695
692 else:
696 else:
693 raise error.Abort(_(b'need repo for changelog dag'))
697 raise error.Abort(_(b'need repo for changelog dag'))
694
698
695 for line in dagparser.dagtextlines(
699 for line in dagparser.dagtextlines(
696 events(),
700 events(),
697 addspaces=spaces,
701 addspaces=spaces,
698 wraplabels=True,
702 wraplabels=True,
699 wrapannotations=True,
703 wrapannotations=True,
700 wrapnonlinear=dots,
704 wrapnonlinear=dots,
701 usedots=dots,
705 usedots=dots,
702 maxlinewidth=70,
706 maxlinewidth=70,
703 ):
707 ):
704 ui.write(line)
708 ui.write(line)
705 ui.write(b"\n")
709 ui.write(b"\n")
706
710
707
711
708 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
709 def debugdata(ui, repo, file_, rev=None, **opts):
713 def debugdata(ui, repo, file_, rev=None, **opts):
710 """dump the contents of a data file revision"""
714 """dump the contents of a data file revision"""
711 opts = pycompat.byteskwargs(opts)
715 opts = pycompat.byteskwargs(opts)
712 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
713 if rev is not None:
717 if rev is not None:
714 raise error.InputError(
718 raise error.InputError(
715 _(b'cannot specify a revision with other arguments')
719 _(b'cannot specify a revision with other arguments')
716 )
720 )
717 file_, rev = None, file_
721 file_, rev = None, file_
718 elif rev is None:
722 elif rev is None:
719 raise error.InputError(_(b'please specify a revision'))
723 raise error.InputError(_(b'please specify a revision'))
720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 try:
725 try:
722 ui.write(r.rawdata(r.lookup(rev)))
726 ui.write(r.rawdata(r.lookup(rev)))
723 except KeyError:
727 except KeyError:
724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725
729
726
730
727 @command(
731 @command(
728 b'debugdate',
732 b'debugdate',
729 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 _(b'[-e] DATE [RANGE]'),
734 _(b'[-e] DATE [RANGE]'),
731 norepo=True,
735 norepo=True,
732 optionalrepo=True,
736 optionalrepo=True,
733 )
737 )
734 def debugdate(ui, date, range=None, **opts):
738 def debugdate(ui, date, range=None, **opts):
735 """parse and display a date"""
739 """parse and display a date"""
736 if opts["extended"]:
740 if opts["extended"]:
737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 else:
742 else:
739 d = dateutil.parsedate(date)
743 d = dateutil.parsedate(date)
740 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"internal: %d %d\n" % d)
741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 if range:
746 if range:
743 m = dateutil.matchdate(range)
747 m = dateutil.matchdate(range)
744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745
749
746
750
747 @command(
751 @command(
748 b'debugdeltachain',
752 b'debugdeltachain',
749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 _(b'-c|-m|FILE'),
754 _(b'-c|-m|FILE'),
751 optionalrepo=True,
755 optionalrepo=True,
752 )
756 )
753 def debugdeltachain(ui, repo, file_=None, **opts):
757 def debugdeltachain(ui, repo, file_=None, **opts):
754 """dump information about delta chains in a revlog
758 """dump information about delta chains in a revlog
755
759
756 Output can be templatized. Available template keywords are:
760 Output can be templatized. Available template keywords are:
757
761
758 :``rev``: revision number
762 :``rev``: revision number
759 :``p1``: parent 1 revision number (for reference)
763 :``p1``: parent 1 revision number (for reference)
760 :``p2``: parent 2 revision number (for reference)
764 :``p2``: parent 2 revision number (for reference)
761 :``chainid``: delta chain identifier (numbered by unique base)
765 :``chainid``: delta chain identifier (numbered by unique base)
762 :``chainlen``: delta chain length to this revision
766 :``chainlen``: delta chain length to this revision
763 :``prevrev``: previous revision in delta chain
767 :``prevrev``: previous revision in delta chain
764 :``deltatype``: role of delta / how it was computed
768 :``deltatype``: role of delta / how it was computed
765 - base: a full snapshot
769 - base: a full snapshot
766 - snap: an intermediate snapshot
770 - snap: an intermediate snapshot
767 - p1: a delta against the first parent
771 - p1: a delta against the first parent
768 - p2: a delta against the second parent
772 - p2: a delta against the second parent
769 - skip1: a delta against the same base as p1
773 - skip1: a delta against the same base as p1
770 (when p1 has empty delta
774 (when p1 has empty delta
771 - skip2: a delta against the same base as p2
775 - skip2: a delta against the same base as p2
772 (when p2 has empty delta
776 (when p2 has empty delta
773 - prev: a delta against the previous revision
777 - prev: a delta against the previous revision
774 - other: a delta against an arbitrary revision
778 - other: a delta against an arbitrary revision
775 :``compsize``: compressed size of revision
779 :``compsize``: compressed size of revision
776 :``uncompsize``: uncompressed size of revision
780 :``uncompsize``: uncompressed size of revision
777 :``chainsize``: total size of compressed revisions in chain
781 :``chainsize``: total size of compressed revisions in chain
778 :``chainratio``: total chain size divided by uncompressed revision size
782 :``chainratio``: total chain size divided by uncompressed revision size
779 (new delta chains typically start at ratio 2.00)
783 (new delta chains typically start at ratio 2.00)
780 :``lindist``: linear distance from base revision in delta chain to end
784 :``lindist``: linear distance from base revision in delta chain to end
781 of this revision
785 of this revision
782 :``extradist``: total size of revisions not part of this delta chain from
786 :``extradist``: total size of revisions not part of this delta chain from
783 base of delta chain to end of this revision; a measurement
787 base of delta chain to end of this revision; a measurement
784 of how much extra data we need to read/seek across to read
788 of how much extra data we need to read/seek across to read
785 the delta chain for this revision
789 the delta chain for this revision
786 :``extraratio``: extradist divided by chainsize; another representation of
790 :``extraratio``: extradist divided by chainsize; another representation of
787 how much unrelated data is needed to load this delta chain
791 how much unrelated data is needed to load this delta chain
788
792
789 If the repository is configured to use the sparse read, additional keywords
793 If the repository is configured to use the sparse read, additional keywords
790 are available:
794 are available:
791
795
792 :``readsize``: total size of data read from the disk for a revision
796 :``readsize``: total size of data read from the disk for a revision
793 (sum of the sizes of all the blocks)
797 (sum of the sizes of all the blocks)
794 :``largestblock``: size of the largest block of data read from the disk
798 :``largestblock``: size of the largest block of data read from the disk
795 :``readdensity``: density of useful bytes in the data read from the disk
799 :``readdensity``: density of useful bytes in the data read from the disk
796 :``srchunks``: in how many data hunks the whole revision would be read
800 :``srchunks``: in how many data hunks the whole revision would be read
797
801
798 The sparse read can be enabled with experimental.sparse-read = True
802 The sparse read can be enabled with experimental.sparse-read = True
799 """
803 """
800 opts = pycompat.byteskwargs(opts)
804 opts = pycompat.byteskwargs(opts)
801 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
805 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
802 index = r.index
806 index = r.index
803 start = r.start
807 start = r.start
804 length = r.length
808 length = r.length
805 generaldelta = r._generaldelta
809 generaldelta = r._generaldelta
806 withsparseread = getattr(r, '_withsparseread', False)
810 withsparseread = getattr(r, '_withsparseread', False)
807
811
808 # security to avoid crash on corrupted revlogs
812 # security to avoid crash on corrupted revlogs
809 total_revs = len(index)
813 total_revs = len(index)
810
814
811 chain_size_cache = {}
815 chain_size_cache = {}
812
816
813 def revinfo(rev):
817 def revinfo(rev):
814 e = index[rev]
818 e = index[rev]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
819 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
820 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817
821
818 base = e[revlog_constants.ENTRY_DELTA_BASE]
822 base = e[revlog_constants.ENTRY_DELTA_BASE]
819 p1 = e[revlog_constants.ENTRY_PARENT_1]
823 p1 = e[revlog_constants.ENTRY_PARENT_1]
820 p2 = e[revlog_constants.ENTRY_PARENT_2]
824 p2 = e[revlog_constants.ENTRY_PARENT_2]
821
825
822 # If the parents of a revision has an empty delta, we never try to delta
826 # If the parents of a revision has an empty delta, we never try to delta
823 # against that parent, but directly against the delta base of that
827 # against that parent, but directly against the delta base of that
824 # parent (recursively). It avoids adding a useless entry in the chain.
828 # parent (recursively). It avoids adding a useless entry in the chain.
825 #
829 #
826 # However we need to detect that as a special case for delta-type, that
830 # However we need to detect that as a special case for delta-type, that
827 # is not simply "other".
831 # is not simply "other".
828 p1_base = p1
832 p1_base = p1
829 if p1 != nullrev and p1 < total_revs:
833 if p1 != nullrev and p1 < total_revs:
830 e1 = index[p1]
834 e1 = index[p1]
831 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
835 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
832 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
836 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
833 if (
837 if (
834 new_base == p1_base
838 new_base == p1_base
835 or new_base == nullrev
839 or new_base == nullrev
836 or new_base >= total_revs
840 or new_base >= total_revs
837 ):
841 ):
838 break
842 break
839 p1_base = new_base
843 p1_base = new_base
840 e1 = index[p1_base]
844 e1 = index[p1_base]
841 p2_base = p2
845 p2_base = p2
842 if p2 != nullrev and p2 < total_revs:
846 if p2 != nullrev and p2 < total_revs:
843 e2 = index[p2]
847 e2 = index[p2]
844 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
848 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
845 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
849 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
846 if (
850 if (
847 new_base == p2_base
851 new_base == p2_base
848 or new_base == nullrev
852 or new_base == nullrev
849 or new_base >= total_revs
853 or new_base >= total_revs
850 ):
854 ):
851 break
855 break
852 p2_base = new_base
856 p2_base = new_base
853 e2 = index[p2_base]
857 e2 = index[p2_base]
854
858
855 if generaldelta:
859 if generaldelta:
856 if base == p1:
860 if base == p1:
857 deltatype = b'p1'
861 deltatype = b'p1'
858 elif base == p2:
862 elif base == p2:
859 deltatype = b'p2'
863 deltatype = b'p2'
860 elif base == rev:
864 elif base == rev:
861 deltatype = b'base'
865 deltatype = b'base'
862 elif base == p1_base:
866 elif base == p1_base:
863 deltatype = b'skip1'
867 deltatype = b'skip1'
864 elif base == p2_base:
868 elif base == p2_base:
865 deltatype = b'skip2'
869 deltatype = b'skip2'
866 elif r.issnapshot(rev):
870 elif r.issnapshot(rev):
867 deltatype = b'snap'
871 deltatype = b'snap'
868 elif base == rev - 1:
872 elif base == rev - 1:
869 deltatype = b'prev'
873 deltatype = b'prev'
870 else:
874 else:
871 deltatype = b'other'
875 deltatype = b'other'
872 else:
876 else:
873 if base == rev:
877 if base == rev:
874 deltatype = b'base'
878 deltatype = b'base'
875 else:
879 else:
876 deltatype = b'prev'
880 deltatype = b'prev'
877
881
878 chain = r._deltachain(rev)[0]
882 chain = r._deltachain(rev)[0]
879 chain_size = 0
883 chain_size = 0
880 for iter_rev in reversed(chain):
884 for iter_rev in reversed(chain):
881 cached = chain_size_cache.get(iter_rev)
885 cached = chain_size_cache.get(iter_rev)
882 if cached is not None:
886 if cached is not None:
883 chain_size += cached
887 chain_size += cached
884 break
888 break
885 e = index[iter_rev]
889 e = index[iter_rev]
886 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
890 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
887 chain_size_cache[rev] = chain_size
891 chain_size_cache[rev] = chain_size
888
892
889 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
893 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
890
894
891 fm = ui.formatter(b'debugdeltachain', opts)
895 fm = ui.formatter(b'debugdeltachain', opts)
892
896
893 fm.plain(
897 fm.plain(
894 b' rev p1 p2 chain# chainlen prev delta '
898 b' rev p1 p2 chain# chainlen prev delta '
895 b'size rawsize chainsize ratio lindist extradist '
899 b'size rawsize chainsize ratio lindist extradist '
896 b'extraratio'
900 b'extraratio'
897 )
901 )
898 if withsparseread:
902 if withsparseread:
899 fm.plain(b' readsize largestblk rddensity srchunks')
903 fm.plain(b' readsize largestblk rddensity srchunks')
900 fm.plain(b'\n')
904 fm.plain(b'\n')
901
905
902 chainbases = {}
906 chainbases = {}
903 for rev in r:
907 for rev in r:
904 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
908 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
905 chainbase = chain[0]
909 chainbase = chain[0]
906 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
910 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
907 basestart = start(chainbase)
911 basestart = start(chainbase)
908 revstart = start(rev)
912 revstart = start(rev)
909 lineardist = revstart + comp - basestart
913 lineardist = revstart + comp - basestart
910 extradist = lineardist - chainsize
914 extradist = lineardist - chainsize
911 try:
915 try:
912 prevrev = chain[-2]
916 prevrev = chain[-2]
913 except IndexError:
917 except IndexError:
914 prevrev = -1
918 prevrev = -1
915
919
916 if uncomp != 0:
920 if uncomp != 0:
917 chainratio = float(chainsize) / float(uncomp)
921 chainratio = float(chainsize) / float(uncomp)
918 else:
922 else:
919 chainratio = chainsize
923 chainratio = chainsize
920
924
921 if chainsize != 0:
925 if chainsize != 0:
922 extraratio = float(extradist) / float(chainsize)
926 extraratio = float(extradist) / float(chainsize)
923 else:
927 else:
924 extraratio = extradist
928 extraratio = extradist
925
929
926 fm.startitem()
930 fm.startitem()
927 fm.write(
931 fm.write(
928 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
932 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
929 b'uncompsize chainsize chainratio lindist extradist '
933 b'uncompsize chainsize chainratio lindist extradist '
930 b'extraratio',
934 b'extraratio',
931 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
935 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
932 rev,
936 rev,
933 p1,
937 p1,
934 p2,
938 p2,
935 chainid,
939 chainid,
936 len(chain),
940 len(chain),
937 prevrev,
941 prevrev,
938 deltatype,
942 deltatype,
939 comp,
943 comp,
940 uncomp,
944 uncomp,
941 chainsize,
945 chainsize,
942 chainratio,
946 chainratio,
943 lineardist,
947 lineardist,
944 extradist,
948 extradist,
945 extraratio,
949 extraratio,
946 rev=rev,
950 rev=rev,
947 chainid=chainid,
951 chainid=chainid,
948 chainlen=len(chain),
952 chainlen=len(chain),
949 prevrev=prevrev,
953 prevrev=prevrev,
950 deltatype=deltatype,
954 deltatype=deltatype,
951 compsize=comp,
955 compsize=comp,
952 uncompsize=uncomp,
956 uncompsize=uncomp,
953 chainsize=chainsize,
957 chainsize=chainsize,
954 chainratio=chainratio,
958 chainratio=chainratio,
955 lindist=lineardist,
959 lindist=lineardist,
956 extradist=extradist,
960 extradist=extradist,
957 extraratio=extraratio,
961 extraratio=extraratio,
958 )
962 )
959 if withsparseread:
963 if withsparseread:
960 readsize = 0
964 readsize = 0
961 largestblock = 0
965 largestblock = 0
962 srchunks = 0
966 srchunks = 0
963
967
964 for revschunk in deltautil.slicechunk(r, chain):
968 for revschunk in deltautil.slicechunk(r, chain):
965 srchunks += 1
969 srchunks += 1
966 blkend = start(revschunk[-1]) + length(revschunk[-1])
970 blkend = start(revschunk[-1]) + length(revschunk[-1])
967 blksize = blkend - start(revschunk[0])
971 blksize = blkend - start(revschunk[0])
968
972
969 readsize += blksize
973 readsize += blksize
970 if largestblock < blksize:
974 if largestblock < blksize:
971 largestblock = blksize
975 largestblock = blksize
972
976
973 if readsize:
977 if readsize:
974 readdensity = float(chainsize) / float(readsize)
978 readdensity = float(chainsize) / float(readsize)
975 else:
979 else:
976 readdensity = 1
980 readdensity = 1
977
981
978 fm.write(
982 fm.write(
979 b'readsize largestblock readdensity srchunks',
983 b'readsize largestblock readdensity srchunks',
980 b' %10d %10d %9.5f %8d',
984 b' %10d %10d %9.5f %8d',
981 readsize,
985 readsize,
982 largestblock,
986 largestblock,
983 readdensity,
987 readdensity,
984 srchunks,
988 srchunks,
985 readsize=readsize,
989 readsize=readsize,
986 largestblock=largestblock,
990 largestblock=largestblock,
987 readdensity=readdensity,
991 readdensity=readdensity,
988 srchunks=srchunks,
992 srchunks=srchunks,
989 )
993 )
990
994
991 fm.plain(b'\n')
995 fm.plain(b'\n')
992
996
993 fm.end()
997 fm.end()
994
998
995
999
996 @command(
1000 @command(
997 b'debug-delta-find',
1001 b'debug-delta-find',
998 cmdutil.debugrevlogopts
1002 cmdutil.debugrevlogopts
999 + cmdutil.formatteropts
1003 + cmdutil.formatteropts
1000 + [
1004 + [
1001 (
1005 (
1002 b'',
1006 b'',
1003 b'source',
1007 b'source',
1004 b'full',
1008 b'full',
1005 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1009 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1006 ),
1010 ),
1007 ],
1011 ],
1008 _(b'-c|-m|FILE REV'),
1012 _(b'-c|-m|FILE REV'),
1009 optionalrepo=True,
1013 optionalrepo=True,
1010 )
1014 )
1011 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1015 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1012 """display the computation to get to a valid delta for storing REV
1016 """display the computation to get to a valid delta for storing REV
1013
1017
1014 This command will replay the process used to find the "best" delta to store
1018 This command will replay the process used to find the "best" delta to store
1015 a revision and display information about all the steps used to get to that
1019 a revision and display information about all the steps used to get to that
1016 result.
1020 result.
1017
1021
1018 By default, the process is fed with a the full-text for the revision. This
1022 By default, the process is fed with a the full-text for the revision. This
1019 can be controlled with the --source flag.
1023 can be controlled with the --source flag.
1020
1024
1021 The revision use the revision number of the target storage (not changelog
1025 The revision use the revision number of the target storage (not changelog
1022 revision number).
1026 revision number).
1023
1027
1024 note: the process is initiated from a full text of the revision to store.
1028 note: the process is initiated from a full text of the revision to store.
1025 """
1029 """
1026 opts = pycompat.byteskwargs(opts)
1030 opts = pycompat.byteskwargs(opts)
1027 if arg_2 is None:
1031 if arg_2 is None:
1028 file_ = None
1032 file_ = None
1029 rev = arg_1
1033 rev = arg_1
1030 else:
1034 else:
1031 file_ = arg_1
1035 file_ = arg_1
1032 rev = arg_2
1036 rev = arg_2
1033
1037
1034 rev = int(rev)
1038 rev = int(rev)
1035
1039
1036 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1040 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1037 p1r, p2r = revlog.parentrevs(rev)
1041 p1r, p2r = revlog.parentrevs(rev)
1038
1042
1039 if source == b'full':
1043 if source == b'full':
1040 base_rev = nullrev
1044 base_rev = nullrev
1041 elif source == b'storage':
1045 elif source == b'storage':
1042 base_rev = revlog.deltaparent(rev)
1046 base_rev = revlog.deltaparent(rev)
1043 elif source == b'p1':
1047 elif source == b'p1':
1044 base_rev = p1r
1048 base_rev = p1r
1045 elif source == b'p2':
1049 elif source == b'p2':
1046 base_rev = p2r
1050 base_rev = p2r
1047 elif source == b'prev':
1051 elif source == b'prev':
1048 base_rev = rev - 1
1052 base_rev = rev - 1
1049 else:
1053 else:
1050 raise error.InputError(b"invalid --source value: %s" % source)
1054 raise error.InputError(b"invalid --source value: %s" % source)
1051
1055
1052 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1056 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1053
1057
1054
1058
1055 @command(
1059 @command(
1056 b'debugdirstate|debugstate',
1060 b'debugdirstate|debugstate',
1057 [
1061 [
1058 (
1062 (
1059 b'',
1063 b'',
1060 b'nodates',
1064 b'nodates',
1061 None,
1065 None,
1062 _(b'do not display the saved mtime (DEPRECATED)'),
1066 _(b'do not display the saved mtime (DEPRECATED)'),
1063 ),
1067 ),
1064 (b'', b'dates', True, _(b'display the saved mtime')),
1068 (b'', b'dates', True, _(b'display the saved mtime')),
1065 (b'', b'datesort', None, _(b'sort by saved mtime')),
1069 (b'', b'datesort', None, _(b'sort by saved mtime')),
1066 (
1070 (
1067 b'',
1071 b'',
1068 b'docket',
1072 b'docket',
1069 False,
1073 False,
1070 _(b'display the docket (metadata file) instead'),
1074 _(b'display the docket (metadata file) instead'),
1071 ),
1075 ),
1072 (
1076 (
1073 b'',
1077 b'',
1074 b'all',
1078 b'all',
1075 False,
1079 False,
1076 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1080 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1077 ),
1081 ),
1078 ],
1082 ],
1079 _(b'[OPTION]...'),
1083 _(b'[OPTION]...'),
1080 )
1084 )
1081 def debugstate(ui, repo, **opts):
1085 def debugstate(ui, repo, **opts):
1082 """show the contents of the current dirstate"""
1086 """show the contents of the current dirstate"""
1083
1087
1084 if opts.get("docket"):
1088 if opts.get("docket"):
1085 if not repo.dirstate._use_dirstate_v2:
1089 if not repo.dirstate._use_dirstate_v2:
1086 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1090 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1087
1091
1088 docket = repo.dirstate._map.docket
1092 docket = repo.dirstate._map.docket
1089 (
1093 (
1090 start_offset,
1094 start_offset,
1091 root_nodes,
1095 root_nodes,
1092 nodes_with_entry,
1096 nodes_with_entry,
1093 nodes_with_copy,
1097 nodes_with_copy,
1094 unused_bytes,
1098 unused_bytes,
1095 _unused,
1099 _unused,
1096 ignore_pattern,
1100 ignore_pattern,
1097 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1101 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1098
1102
1099 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1103 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1100 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1104 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1101 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1105 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1102 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1106 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1103 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1107 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1104 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1108 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1105 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1109 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1106 ui.write(
1110 ui.write(
1107 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1111 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1108 )
1112 )
1109 return
1113 return
1110
1114
1111 nodates = not opts['dates']
1115 nodates = not opts['dates']
1112 if opts.get('nodates') is not None:
1116 if opts.get('nodates') is not None:
1113 nodates = True
1117 nodates = True
1114 datesort = opts.get('datesort')
1118 datesort = opts.get('datesort')
1115
1119
1116 if datesort:
1120 if datesort:
1117
1121
1118 def keyfunc(entry):
1122 def keyfunc(entry):
1119 filename, _state, _mode, _size, mtime = entry
1123 filename, _state, _mode, _size, mtime = entry
1120 return (mtime, filename)
1124 return (mtime, filename)
1121
1125
1122 else:
1126 else:
1123 keyfunc = None # sort by filename
1127 keyfunc = None # sort by filename
1124 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1128 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1125 entries.sort(key=keyfunc)
1129 entries.sort(key=keyfunc)
1126 for entry in entries:
1130 for entry in entries:
1127 filename, state, mode, size, mtime = entry
1131 filename, state, mode, size, mtime = entry
1128 if mtime == -1:
1132 if mtime == -1:
1129 timestr = b'unset '
1133 timestr = b'unset '
1130 elif nodates:
1134 elif nodates:
1131 timestr = b'set '
1135 timestr = b'set '
1132 else:
1136 else:
1133 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1137 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1134 timestr = encoding.strtolocal(timestr)
1138 timestr = encoding.strtolocal(timestr)
1135 if mode & 0o20000:
1139 if mode & 0o20000:
1136 mode = b'lnk'
1140 mode = b'lnk'
1137 else:
1141 else:
1138 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1142 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1139 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1143 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1140 for f in repo.dirstate.copies():
1144 for f in repo.dirstate.copies():
1141 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1145 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1142
1146
1143
1147
1144 @command(
1148 @command(
1145 b'debugdirstateignorepatternshash',
1149 b'debugdirstateignorepatternshash',
1146 [],
1150 [],
1147 _(b''),
1151 _(b''),
1148 )
1152 )
1149 def debugdirstateignorepatternshash(ui, repo, **opts):
1153 def debugdirstateignorepatternshash(ui, repo, **opts):
1150 """show the hash of ignore patterns stored in dirstate if v2,
1154 """show the hash of ignore patterns stored in dirstate if v2,
1151 or nothing for dirstate-v2
1155 or nothing for dirstate-v2
1152 """
1156 """
1153 if repo.dirstate._use_dirstate_v2:
1157 if repo.dirstate._use_dirstate_v2:
1154 docket = repo.dirstate._map.docket
1158 docket = repo.dirstate._map.docket
1155 hash_len = 20 # 160 bits for SHA-1
1159 hash_len = 20 # 160 bits for SHA-1
1156 hash_bytes = docket.tree_metadata[-hash_len:]
1160 hash_bytes = docket.tree_metadata[-hash_len:]
1157 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1161 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1158
1162
1159
1163
1160 @command(
1164 @command(
1161 b'debugdiscovery',
1165 b'debugdiscovery',
1162 [
1166 [
1163 (b'', b'old', None, _(b'use old-style discovery')),
1167 (b'', b'old', None, _(b'use old-style discovery')),
1164 (
1168 (
1165 b'',
1169 b'',
1166 b'nonheads',
1170 b'nonheads',
1167 None,
1171 None,
1168 _(b'use old-style discovery with non-heads included'),
1172 _(b'use old-style discovery with non-heads included'),
1169 ),
1173 ),
1170 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1174 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1171 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1175 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1172 (
1176 (
1173 b'',
1177 b'',
1174 b'local-as-revs',
1178 b'local-as-revs',
1175 b"",
1179 b"",
1176 b'treat local has having these revisions only',
1180 b'treat local has having these revisions only',
1177 ),
1181 ),
1178 (
1182 (
1179 b'',
1183 b'',
1180 b'remote-as-revs',
1184 b'remote-as-revs',
1181 b"",
1185 b"",
1182 b'use local as remote, with only these revisions',
1186 b'use local as remote, with only these revisions',
1183 ),
1187 ),
1184 ]
1188 ]
1185 + cmdutil.remoteopts
1189 + cmdutil.remoteopts
1186 + cmdutil.formatteropts,
1190 + cmdutil.formatteropts,
1187 _(b'[--rev REV] [OTHER]'),
1191 _(b'[--rev REV] [OTHER]'),
1188 )
1192 )
1189 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1193 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1190 """runs the changeset discovery protocol in isolation
1194 """runs the changeset discovery protocol in isolation
1191
1195
1192 The local peer can be "replaced" by a subset of the local repository by
1196 The local peer can be "replaced" by a subset of the local repository by
1193 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1197 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1194 can be "replaced" by a subset of the local repository using the
1198 can be "replaced" by a subset of the local repository using the
1195 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1199 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1196 discovery situations.
1200 discovery situations.
1197
1201
1198 The following developer oriented config are relevant for people playing with this command:
1202 The following developer oriented config are relevant for people playing with this command:
1199
1203
1200 * devel.discovery.exchange-heads=True
1204 * devel.discovery.exchange-heads=True
1201
1205
1202 If False, the discovery will not start with
1206 If False, the discovery will not start with
1203 remote head fetching and local head querying.
1207 remote head fetching and local head querying.
1204
1208
1205 * devel.discovery.grow-sample=True
1209 * devel.discovery.grow-sample=True
1206
1210
1207 If False, the sample size used in set discovery will not be increased
1211 If False, the sample size used in set discovery will not be increased
1208 through the process
1212 through the process
1209
1213
1210 * devel.discovery.grow-sample.dynamic=True
1214 * devel.discovery.grow-sample.dynamic=True
1211
1215
1212 When discovery.grow-sample.dynamic is True, the default, the sample size is
1216 When discovery.grow-sample.dynamic is True, the default, the sample size is
1213 adapted to the shape of the undecided set (it is set to the max of:
1217 adapted to the shape of the undecided set (it is set to the max of:
1214 <target-size>, len(roots(undecided)), len(heads(undecided)
1218 <target-size>, len(roots(undecided)), len(heads(undecided)
1215
1219
1216 * devel.discovery.grow-sample.rate=1.05
1220 * devel.discovery.grow-sample.rate=1.05
1217
1221
1218 the rate at which the sample grow
1222 the rate at which the sample grow
1219
1223
1220 * devel.discovery.randomize=True
1224 * devel.discovery.randomize=True
1221
1225
1222 If andom sampling during discovery are deterministic. It is meant for
1226 If andom sampling during discovery are deterministic. It is meant for
1223 integration tests.
1227 integration tests.
1224
1228
1225 * devel.discovery.sample-size=200
1229 * devel.discovery.sample-size=200
1226
1230
1227 Control the initial size of the discovery sample
1231 Control the initial size of the discovery sample
1228
1232
1229 * devel.discovery.sample-size.initial=100
1233 * devel.discovery.sample-size.initial=100
1230
1234
1231 Control the initial size of the discovery for initial change
1235 Control the initial size of the discovery for initial change
1232 """
1236 """
1233 opts = pycompat.byteskwargs(opts)
1237 opts = pycompat.byteskwargs(opts)
1234 unfi = repo.unfiltered()
1238 unfi = repo.unfiltered()
1235
1239
1236 # setup potential extra filtering
1240 # setup potential extra filtering
1237 local_revs = opts[b"local_as_revs"]
1241 local_revs = opts[b"local_as_revs"]
1238 remote_revs = opts[b"remote_as_revs"]
1242 remote_revs = opts[b"remote_as_revs"]
1239
1243
1240 # make sure tests are repeatable
1244 # make sure tests are repeatable
1241 random.seed(int(opts[b'seed']))
1245 random.seed(int(opts[b'seed']))
1242
1246
1243 if not remote_revs:
1247 if not remote_revs:
1244 path = urlutil.get_unique_pull_path_obj(
1248 path = urlutil.get_unique_pull_path_obj(
1245 b'debugdiscovery', ui, remoteurl
1249 b'debugdiscovery', ui, remoteurl
1246 )
1250 )
1247 branches = (path.branch, [])
1251 branches = (path.branch, [])
1248 remote = hg.peer(repo, opts, path)
1252 remote = hg.peer(repo, opts, path)
1249 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1253 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1250 else:
1254 else:
1251 branches = (None, [])
1255 branches = (None, [])
1252 remote_filtered_revs = logcmdutil.revrange(
1256 remote_filtered_revs = logcmdutil.revrange(
1253 unfi, [b"not (::(%s))" % remote_revs]
1257 unfi, [b"not (::(%s))" % remote_revs]
1254 )
1258 )
1255 remote_filtered_revs = frozenset(remote_filtered_revs)
1259 remote_filtered_revs = frozenset(remote_filtered_revs)
1256
1260
1257 def remote_func(x):
1261 def remote_func(x):
1258 return remote_filtered_revs
1262 return remote_filtered_revs
1259
1263
1260 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1264 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1261
1265
1262 remote = repo.peer()
1266 remote = repo.peer()
1263 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1267 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1264
1268
1265 if local_revs:
1269 if local_revs:
1266 local_filtered_revs = logcmdutil.revrange(
1270 local_filtered_revs = logcmdutil.revrange(
1267 unfi, [b"not (::(%s))" % local_revs]
1271 unfi, [b"not (::(%s))" % local_revs]
1268 )
1272 )
1269 local_filtered_revs = frozenset(local_filtered_revs)
1273 local_filtered_revs = frozenset(local_filtered_revs)
1270
1274
1271 def local_func(x):
1275 def local_func(x):
1272 return local_filtered_revs
1276 return local_filtered_revs
1273
1277
1274 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1278 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1275 repo = repo.filtered(b'debug-discovery-local-filter')
1279 repo = repo.filtered(b'debug-discovery-local-filter')
1276
1280
1277 data = {}
1281 data = {}
1278 if opts.get(b'old'):
1282 if opts.get(b'old'):
1279
1283
1280 def doit(pushedrevs, remoteheads, remote=remote):
1284 def doit(pushedrevs, remoteheads, remote=remote):
1281 if not util.safehasattr(remote, 'branches'):
1285 if not util.safehasattr(remote, 'branches'):
1282 # enable in-client legacy support
1286 # enable in-client legacy support
1283 remote = localrepo.locallegacypeer(remote.local())
1287 remote = localrepo.locallegacypeer(remote.local())
1284 if remote_revs:
1288 if remote_revs:
1285 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1289 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1286 remote._repo = r
1290 remote._repo = r
1287 common, _in, hds = treediscovery.findcommonincoming(
1291 common, _in, hds = treediscovery.findcommonincoming(
1288 repo, remote, force=True, audit=data
1292 repo, remote, force=True, audit=data
1289 )
1293 )
1290 common = set(common)
1294 common = set(common)
1291 if not opts.get(b'nonheads'):
1295 if not opts.get(b'nonheads'):
1292 ui.writenoi18n(
1296 ui.writenoi18n(
1293 b"unpruned common: %s\n"
1297 b"unpruned common: %s\n"
1294 % b" ".join(sorted(short(n) for n in common))
1298 % b" ".join(sorted(short(n) for n in common))
1295 )
1299 )
1296
1300
1297 clnode = repo.changelog.node
1301 clnode = repo.changelog.node
1298 common = repo.revs(b'heads(::%ln)', common)
1302 common = repo.revs(b'heads(::%ln)', common)
1299 common = {clnode(r) for r in common}
1303 common = {clnode(r) for r in common}
1300 return common, hds
1304 return common, hds
1301
1305
1302 else:
1306 else:
1303
1307
1304 def doit(pushedrevs, remoteheads, remote=remote):
1308 def doit(pushedrevs, remoteheads, remote=remote):
1305 nodes = None
1309 nodes = None
1306 if pushedrevs:
1310 if pushedrevs:
1307 revs = logcmdutil.revrange(repo, pushedrevs)
1311 revs = logcmdutil.revrange(repo, pushedrevs)
1308 nodes = [repo[r].node() for r in revs]
1312 nodes = [repo[r].node() for r in revs]
1309 common, any, hds = setdiscovery.findcommonheads(
1313 common, any, hds = setdiscovery.findcommonheads(
1310 ui,
1314 ui,
1311 repo,
1315 repo,
1312 remote,
1316 remote,
1313 ancestorsof=nodes,
1317 ancestorsof=nodes,
1314 audit=data,
1318 audit=data,
1315 abortwhenunrelated=False,
1319 abortwhenunrelated=False,
1316 )
1320 )
1317 return common, hds
1321 return common, hds
1318
1322
1319 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1323 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1320 localrevs = opts[b'rev']
1324 localrevs = opts[b'rev']
1321
1325
1322 fm = ui.formatter(b'debugdiscovery', opts)
1326 fm = ui.formatter(b'debugdiscovery', opts)
1323 if fm.strict_format:
1327 if fm.strict_format:
1324
1328
1325 @contextlib.contextmanager
1329 @contextlib.contextmanager
1326 def may_capture_output():
1330 def may_capture_output():
1327 ui.pushbuffer()
1331 ui.pushbuffer()
1328 yield
1332 yield
1329 data[b'output'] = ui.popbuffer()
1333 data[b'output'] = ui.popbuffer()
1330
1334
1331 else:
1335 else:
1332 may_capture_output = util.nullcontextmanager
1336 may_capture_output = util.nullcontextmanager
1333 with may_capture_output():
1337 with may_capture_output():
1334 with util.timedcm('debug-discovery') as t:
1338 with util.timedcm('debug-discovery') as t:
1335 common, hds = doit(localrevs, remoterevs)
1339 common, hds = doit(localrevs, remoterevs)
1336
1340
1337 # compute all statistics
1341 # compute all statistics
1338 if len(common) == 1 and repo.nullid in common:
1342 if len(common) == 1 and repo.nullid in common:
1339 common = set()
1343 common = set()
1340 heads_common = set(common)
1344 heads_common = set(common)
1341 heads_remote = set(hds)
1345 heads_remote = set(hds)
1342 heads_local = set(repo.heads())
1346 heads_local = set(repo.heads())
1343 # note: they cannot be a local or remote head that is in common and not
1347 # note: they cannot be a local or remote head that is in common and not
1344 # itself a head of common.
1348 # itself a head of common.
1345 heads_common_local = heads_common & heads_local
1349 heads_common_local = heads_common & heads_local
1346 heads_common_remote = heads_common & heads_remote
1350 heads_common_remote = heads_common & heads_remote
1347 heads_common_both = heads_common & heads_remote & heads_local
1351 heads_common_both = heads_common & heads_remote & heads_local
1348
1352
1349 all = repo.revs(b'all()')
1353 all = repo.revs(b'all()')
1350 common = repo.revs(b'::%ln', common)
1354 common = repo.revs(b'::%ln', common)
1351 roots_common = repo.revs(b'roots(::%ld)', common)
1355 roots_common = repo.revs(b'roots(::%ld)', common)
1352 missing = repo.revs(b'not ::%ld', common)
1356 missing = repo.revs(b'not ::%ld', common)
1353 heads_missing = repo.revs(b'heads(%ld)', missing)
1357 heads_missing = repo.revs(b'heads(%ld)', missing)
1354 roots_missing = repo.revs(b'roots(%ld)', missing)
1358 roots_missing = repo.revs(b'roots(%ld)', missing)
1355 assert len(common) + len(missing) == len(all)
1359 assert len(common) + len(missing) == len(all)
1356
1360
1357 initial_undecided = repo.revs(
1361 initial_undecided = repo.revs(
1358 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1362 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1359 )
1363 )
1360 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1364 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1361 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1365 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1362 common_initial_undecided = initial_undecided & common
1366 common_initial_undecided = initial_undecided & common
1363 missing_initial_undecided = initial_undecided & missing
1367 missing_initial_undecided = initial_undecided & missing
1364
1368
1365 data[b'elapsed'] = t.elapsed
1369 data[b'elapsed'] = t.elapsed
1366 data[b'nb-common-heads'] = len(heads_common)
1370 data[b'nb-common-heads'] = len(heads_common)
1367 data[b'nb-common-heads-local'] = len(heads_common_local)
1371 data[b'nb-common-heads-local'] = len(heads_common_local)
1368 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1372 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1369 data[b'nb-common-heads-both'] = len(heads_common_both)
1373 data[b'nb-common-heads-both'] = len(heads_common_both)
1370 data[b'nb-common-roots'] = len(roots_common)
1374 data[b'nb-common-roots'] = len(roots_common)
1371 data[b'nb-head-local'] = len(heads_local)
1375 data[b'nb-head-local'] = len(heads_local)
1372 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1376 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1373 data[b'nb-head-remote'] = len(heads_remote)
1377 data[b'nb-head-remote'] = len(heads_remote)
1374 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1378 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1375 heads_common_remote
1379 heads_common_remote
1376 )
1380 )
1377 data[b'nb-revs'] = len(all)
1381 data[b'nb-revs'] = len(all)
1378 data[b'nb-revs-common'] = len(common)
1382 data[b'nb-revs-common'] = len(common)
1379 data[b'nb-revs-missing'] = len(missing)
1383 data[b'nb-revs-missing'] = len(missing)
1380 data[b'nb-missing-heads'] = len(heads_missing)
1384 data[b'nb-missing-heads'] = len(heads_missing)
1381 data[b'nb-missing-roots'] = len(roots_missing)
1385 data[b'nb-missing-roots'] = len(roots_missing)
1382 data[b'nb-ini_und'] = len(initial_undecided)
1386 data[b'nb-ini_und'] = len(initial_undecided)
1383 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1387 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1384 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1388 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1385 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1389 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1386 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1390 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1387
1391
1388 fm.startitem()
1392 fm.startitem()
1389 fm.data(**pycompat.strkwargs(data))
1393 fm.data(**pycompat.strkwargs(data))
1390 # display discovery summary
1394 # display discovery summary
1391 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1395 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1392 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1396 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1393 if b'total-round-trips-heads' in data:
1397 if b'total-round-trips-heads' in data:
1394 fm.plain(
1398 fm.plain(
1395 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1399 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1396 )
1400 )
1397 if b'total-round-trips-branches' in data:
1401 if b'total-round-trips-branches' in data:
1398 fm.plain(
1402 fm.plain(
1399 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1403 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1400 % data
1404 % data
1401 )
1405 )
1402 if b'total-round-trips-between' in data:
1406 if b'total-round-trips-between' in data:
1403 fm.plain(
1407 fm.plain(
1404 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1408 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1405 )
1409 )
1406 fm.plain(b"queries: %(total-queries)9d\n" % data)
1410 fm.plain(b"queries: %(total-queries)9d\n" % data)
1407 if b'total-queries-branches' in data:
1411 if b'total-queries-branches' in data:
1408 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1412 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1409 if b'total-queries-between' in data:
1413 if b'total-queries-between' in data:
1410 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1414 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1411 fm.plain(b"heads summary:\n")
1415 fm.plain(b"heads summary:\n")
1412 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1416 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1413 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1417 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1414 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1418 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1415 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1419 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1416 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1420 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1417 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1421 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1418 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1422 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1419 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1423 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1420 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1424 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1421 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1425 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1422 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1426 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1423 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1427 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1424 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1428 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1425 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1429 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1426 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1430 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1427 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1431 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1428 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1432 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1429 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1433 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1430 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1434 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1431 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1435 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1432 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1436 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1433 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1437 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1434
1438
1435 if ui.verbose:
1439 if ui.verbose:
1436 fm.plain(
1440 fm.plain(
1437 b"common heads: %s\n"
1441 b"common heads: %s\n"
1438 % b" ".join(sorted(short(n) for n in heads_common))
1442 % b" ".join(sorted(short(n) for n in heads_common))
1439 )
1443 )
1440 fm.end()
1444 fm.end()
1441
1445
1442
1446
1443 _chunksize = 4 << 10
1447 _chunksize = 4 << 10
1444
1448
1445
1449
1446 @command(
1450 @command(
1447 b'debugdownload',
1451 b'debugdownload',
1448 [
1452 [
1449 (b'o', b'output', b'', _(b'path')),
1453 (b'o', b'output', b'', _(b'path')),
1450 ],
1454 ],
1451 optionalrepo=True,
1455 optionalrepo=True,
1452 )
1456 )
1453 def debugdownload(ui, repo, url, output=None, **opts):
1457 def debugdownload(ui, repo, url, output=None, **opts):
1454 """download a resource using Mercurial logic and config"""
1458 """download a resource using Mercurial logic and config"""
1455 fh = urlmod.open(ui, url, output)
1459 fh = urlmod.open(ui, url, output)
1456
1460
1457 dest = ui
1461 dest = ui
1458 if output:
1462 if output:
1459 dest = open(output, b"wb", _chunksize)
1463 dest = open(output, b"wb", _chunksize)
1460 try:
1464 try:
1461 data = fh.read(_chunksize)
1465 data = fh.read(_chunksize)
1462 while data:
1466 while data:
1463 dest.write(data)
1467 dest.write(data)
1464 data = fh.read(_chunksize)
1468 data = fh.read(_chunksize)
1465 finally:
1469 finally:
1466 if output:
1470 if output:
1467 dest.close()
1471 dest.close()
1468
1472
1469
1473
1470 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1474 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1471 def debugextensions(ui, repo, **opts):
1475 def debugextensions(ui, repo, **opts):
1472 '''show information about active extensions'''
1476 '''show information about active extensions'''
1473 opts = pycompat.byteskwargs(opts)
1477 opts = pycompat.byteskwargs(opts)
1474 exts = extensions.extensions(ui)
1478 exts = extensions.extensions(ui)
1475 hgver = util.version()
1479 hgver = util.version()
1476 fm = ui.formatter(b'debugextensions', opts)
1480 fm = ui.formatter(b'debugextensions', opts)
1477 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1481 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1478 isinternal = extensions.ismoduleinternal(extmod)
1482 isinternal = extensions.ismoduleinternal(extmod)
1479 extsource = None
1483 extsource = None
1480
1484
1481 if util.safehasattr(extmod, '__file__'):
1485 if util.safehasattr(extmod, '__file__'):
1482 extsource = pycompat.fsencode(extmod.__file__)
1486 extsource = pycompat.fsencode(extmod.__file__)
1483 elif getattr(sys, 'oxidized', False):
1487 elif getattr(sys, 'oxidized', False):
1484 extsource = pycompat.sysexecutable
1488 extsource = pycompat.sysexecutable
1485 if isinternal:
1489 if isinternal:
1486 exttestedwith = [] # never expose magic string to users
1490 exttestedwith = [] # never expose magic string to users
1487 else:
1491 else:
1488 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1492 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1489 extbuglink = getattr(extmod, 'buglink', None)
1493 extbuglink = getattr(extmod, 'buglink', None)
1490
1494
1491 fm.startitem()
1495 fm.startitem()
1492
1496
1493 if ui.quiet or ui.verbose:
1497 if ui.quiet or ui.verbose:
1494 fm.write(b'name', b'%s\n', extname)
1498 fm.write(b'name', b'%s\n', extname)
1495 else:
1499 else:
1496 fm.write(b'name', b'%s', extname)
1500 fm.write(b'name', b'%s', extname)
1497 if isinternal or hgver in exttestedwith:
1501 if isinternal or hgver in exttestedwith:
1498 fm.plain(b'\n')
1502 fm.plain(b'\n')
1499 elif not exttestedwith:
1503 elif not exttestedwith:
1500 fm.plain(_(b' (untested!)\n'))
1504 fm.plain(_(b' (untested!)\n'))
1501 else:
1505 else:
1502 lasttestedversion = exttestedwith[-1]
1506 lasttestedversion = exttestedwith[-1]
1503 fm.plain(b' (%s!)\n' % lasttestedversion)
1507 fm.plain(b' (%s!)\n' % lasttestedversion)
1504
1508
1505 fm.condwrite(
1509 fm.condwrite(
1506 ui.verbose and extsource,
1510 ui.verbose and extsource,
1507 b'source',
1511 b'source',
1508 _(b' location: %s\n'),
1512 _(b' location: %s\n'),
1509 extsource or b"",
1513 extsource or b"",
1510 )
1514 )
1511
1515
1512 if ui.verbose:
1516 if ui.verbose:
1513 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1517 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1514 fm.data(bundled=isinternal)
1518 fm.data(bundled=isinternal)
1515
1519
1516 fm.condwrite(
1520 fm.condwrite(
1517 ui.verbose and exttestedwith,
1521 ui.verbose and exttestedwith,
1518 b'testedwith',
1522 b'testedwith',
1519 _(b' tested with: %s\n'),
1523 _(b' tested with: %s\n'),
1520 fm.formatlist(exttestedwith, name=b'ver'),
1524 fm.formatlist(exttestedwith, name=b'ver'),
1521 )
1525 )
1522
1526
1523 fm.condwrite(
1527 fm.condwrite(
1524 ui.verbose and extbuglink,
1528 ui.verbose and extbuglink,
1525 b'buglink',
1529 b'buglink',
1526 _(b' bug reporting: %s\n'),
1530 _(b' bug reporting: %s\n'),
1527 extbuglink or b"",
1531 extbuglink or b"",
1528 )
1532 )
1529
1533
1530 fm.end()
1534 fm.end()
1531
1535
1532
1536
1533 @command(
1537 @command(
1534 b'debugfileset',
1538 b'debugfileset',
1535 [
1539 [
1536 (
1540 (
1537 b'r',
1541 b'r',
1538 b'rev',
1542 b'rev',
1539 b'',
1543 b'',
1540 _(b'apply the filespec on this revision'),
1544 _(b'apply the filespec on this revision'),
1541 _(b'REV'),
1545 _(b'REV'),
1542 ),
1546 ),
1543 (
1547 (
1544 b'',
1548 b'',
1545 b'all-files',
1549 b'all-files',
1546 False,
1550 False,
1547 _(b'test files from all revisions and working directory'),
1551 _(b'test files from all revisions and working directory'),
1548 ),
1552 ),
1549 (
1553 (
1550 b's',
1554 b's',
1551 b'show-matcher',
1555 b'show-matcher',
1552 None,
1556 None,
1553 _(b'print internal representation of matcher'),
1557 _(b'print internal representation of matcher'),
1554 ),
1558 ),
1555 (
1559 (
1556 b'p',
1560 b'p',
1557 b'show-stage',
1561 b'show-stage',
1558 [],
1562 [],
1559 _(b'print parsed tree at the given stage'),
1563 _(b'print parsed tree at the given stage'),
1560 _(b'NAME'),
1564 _(b'NAME'),
1561 ),
1565 ),
1562 ],
1566 ],
1563 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1567 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1564 )
1568 )
1565 def debugfileset(ui, repo, expr, **opts):
1569 def debugfileset(ui, repo, expr, **opts):
1566 '''parse and apply a fileset specification'''
1570 '''parse and apply a fileset specification'''
1567 from . import fileset
1571 from . import fileset
1568
1572
1569 fileset.symbols # force import of fileset so we have predicates to optimize
1573 fileset.symbols # force import of fileset so we have predicates to optimize
1570 opts = pycompat.byteskwargs(opts)
1574 opts = pycompat.byteskwargs(opts)
1571 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1575 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1572
1576
1573 stages = [
1577 stages = [
1574 (b'parsed', pycompat.identity),
1578 (b'parsed', pycompat.identity),
1575 (b'analyzed', filesetlang.analyze),
1579 (b'analyzed', filesetlang.analyze),
1576 (b'optimized', filesetlang.optimize),
1580 (b'optimized', filesetlang.optimize),
1577 ]
1581 ]
1578 stagenames = {n for n, f in stages}
1582 stagenames = {n for n, f in stages}
1579
1583
1580 showalways = set()
1584 showalways = set()
1581 if ui.verbose and not opts[b'show_stage']:
1585 if ui.verbose and not opts[b'show_stage']:
1582 # show parsed tree by --verbose (deprecated)
1586 # show parsed tree by --verbose (deprecated)
1583 showalways.add(b'parsed')
1587 showalways.add(b'parsed')
1584 if opts[b'show_stage'] == [b'all']:
1588 if opts[b'show_stage'] == [b'all']:
1585 showalways.update(stagenames)
1589 showalways.update(stagenames)
1586 else:
1590 else:
1587 for n in opts[b'show_stage']:
1591 for n in opts[b'show_stage']:
1588 if n not in stagenames:
1592 if n not in stagenames:
1589 raise error.Abort(_(b'invalid stage name: %s') % n)
1593 raise error.Abort(_(b'invalid stage name: %s') % n)
1590 showalways.update(opts[b'show_stage'])
1594 showalways.update(opts[b'show_stage'])
1591
1595
1592 tree = filesetlang.parse(expr)
1596 tree = filesetlang.parse(expr)
1593 for n, f in stages:
1597 for n, f in stages:
1594 tree = f(tree)
1598 tree = f(tree)
1595 if n in showalways:
1599 if n in showalways:
1596 if opts[b'show_stage'] or n != b'parsed':
1600 if opts[b'show_stage'] or n != b'parsed':
1597 ui.write(b"* %s:\n" % n)
1601 ui.write(b"* %s:\n" % n)
1598 ui.write(filesetlang.prettyformat(tree), b"\n")
1602 ui.write(filesetlang.prettyformat(tree), b"\n")
1599
1603
1600 files = set()
1604 files = set()
1601 if opts[b'all_files']:
1605 if opts[b'all_files']:
1602 for r in repo:
1606 for r in repo:
1603 c = repo[r]
1607 c = repo[r]
1604 files.update(c.files())
1608 files.update(c.files())
1605 files.update(c.substate)
1609 files.update(c.substate)
1606 if opts[b'all_files'] or ctx.rev() is None:
1610 if opts[b'all_files'] or ctx.rev() is None:
1607 wctx = repo[None]
1611 wctx = repo[None]
1608 files.update(
1612 files.update(
1609 repo.dirstate.walk(
1613 repo.dirstate.walk(
1610 scmutil.matchall(repo),
1614 scmutil.matchall(repo),
1611 subrepos=list(wctx.substate),
1615 subrepos=list(wctx.substate),
1612 unknown=True,
1616 unknown=True,
1613 ignored=True,
1617 ignored=True,
1614 )
1618 )
1615 )
1619 )
1616 files.update(wctx.substate)
1620 files.update(wctx.substate)
1617 else:
1621 else:
1618 files.update(ctx.files())
1622 files.update(ctx.files())
1619 files.update(ctx.substate)
1623 files.update(ctx.substate)
1620
1624
1621 m = ctx.matchfileset(repo.getcwd(), expr)
1625 m = ctx.matchfileset(repo.getcwd(), expr)
1622 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1626 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1623 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1627 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1624 for f in sorted(files):
1628 for f in sorted(files):
1625 if not m(f):
1629 if not m(f):
1626 continue
1630 continue
1627 ui.write(b"%s\n" % f)
1631 ui.write(b"%s\n" % f)
1628
1632
1629
1633
1630 @command(
1634 @command(
1631 b"debug-repair-issue6528",
1635 b"debug-repair-issue6528",
1632 [
1636 [
1633 (
1637 (
1634 b'',
1638 b'',
1635 b'to-report',
1639 b'to-report',
1636 b'',
1640 b'',
1637 _(b'build a report of affected revisions to this file'),
1641 _(b'build a report of affected revisions to this file'),
1638 _(b'FILE'),
1642 _(b'FILE'),
1639 ),
1643 ),
1640 (
1644 (
1641 b'',
1645 b'',
1642 b'from-report',
1646 b'from-report',
1643 b'',
1647 b'',
1644 _(b'repair revisions listed in this report file'),
1648 _(b'repair revisions listed in this report file'),
1645 _(b'FILE'),
1649 _(b'FILE'),
1646 ),
1650 ),
1647 (
1651 (
1648 b'',
1652 b'',
1649 b'paranoid',
1653 b'paranoid',
1650 False,
1654 False,
1651 _(b'check that both detection methods do the same thing'),
1655 _(b'check that both detection methods do the same thing'),
1652 ),
1656 ),
1653 ]
1657 ]
1654 + cmdutil.dryrunopts,
1658 + cmdutil.dryrunopts,
1655 )
1659 )
1656 def debug_repair_issue6528(ui, repo, **opts):
1660 def debug_repair_issue6528(ui, repo, **opts):
1657 """find affected revisions and repair them. See issue6528 for more details.
1661 """find affected revisions and repair them. See issue6528 for more details.
1658
1662
1659 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1663 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1660 computation of affected revisions for a given repository across clones.
1664 computation of affected revisions for a given repository across clones.
1661 The report format is line-based (with empty lines ignored):
1665 The report format is line-based (with empty lines ignored):
1662
1666
1663 ```
1667 ```
1664 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1668 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1665 ```
1669 ```
1666
1670
1667 There can be multiple broken revisions per filelog, they are separated by
1671 There can be multiple broken revisions per filelog, they are separated by
1668 a comma with no spaces. The only space is between the revision(s) and the
1672 a comma with no spaces. The only space is between the revision(s) and the
1669 filename.
1673 filename.
1670
1674
1671 Note that this does *not* mean that this repairs future affected revisions,
1675 Note that this does *not* mean that this repairs future affected revisions,
1672 that needs a separate fix at the exchange level that was introduced in
1676 that needs a separate fix at the exchange level that was introduced in
1673 Mercurial 5.9.1.
1677 Mercurial 5.9.1.
1674
1678
1675 There is a `--paranoid` flag to test that the fast implementation is correct
1679 There is a `--paranoid` flag to test that the fast implementation is correct
1676 by checking it against the slow implementation. Since this matter is quite
1680 by checking it against the slow implementation. Since this matter is quite
1677 urgent and testing every edge-case is probably quite costly, we use this
1681 urgent and testing every edge-case is probably quite costly, we use this
1678 method to test on large repositories as a fuzzing method of sorts.
1682 method to test on large repositories as a fuzzing method of sorts.
1679 """
1683 """
1680 cmdutil.check_incompatible_arguments(
1684 cmdutil.check_incompatible_arguments(
1681 opts, 'to_report', ['from_report', 'dry_run']
1685 opts, 'to_report', ['from_report', 'dry_run']
1682 )
1686 )
1683 dry_run = opts.get('dry_run')
1687 dry_run = opts.get('dry_run')
1684 to_report = opts.get('to_report')
1688 to_report = opts.get('to_report')
1685 from_report = opts.get('from_report')
1689 from_report = opts.get('from_report')
1686 paranoid = opts.get('paranoid')
1690 paranoid = opts.get('paranoid')
1687 # TODO maybe add filelog pattern and revision pattern parameters to help
1691 # TODO maybe add filelog pattern and revision pattern parameters to help
1688 # narrow down the search for users that know what they're looking for?
1692 # narrow down the search for users that know what they're looking for?
1689
1693
1690 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1694 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1691 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1695 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1692 raise error.Abort(_(msg))
1696 raise error.Abort(_(msg))
1693
1697
1694 rewrite.repair_issue6528(
1698 rewrite.repair_issue6528(
1695 ui,
1699 ui,
1696 repo,
1700 repo,
1697 dry_run=dry_run,
1701 dry_run=dry_run,
1698 to_report=to_report,
1702 to_report=to_report,
1699 from_report=from_report,
1703 from_report=from_report,
1700 paranoid=paranoid,
1704 paranoid=paranoid,
1701 )
1705 )
1702
1706
1703
1707
1704 @command(b'debugformat', [] + cmdutil.formatteropts)
1708 @command(b'debugformat', [] + cmdutil.formatteropts)
1705 def debugformat(ui, repo, **opts):
1709 def debugformat(ui, repo, **opts):
1706 """display format information about the current repository
1710 """display format information about the current repository
1707
1711
1708 Use --verbose to get extra information about current config value and
1712 Use --verbose to get extra information about current config value and
1709 Mercurial default."""
1713 Mercurial default."""
1710 opts = pycompat.byteskwargs(opts)
1714 opts = pycompat.byteskwargs(opts)
1711 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1715 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1712 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1716 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1713
1717
1714 def makeformatname(name):
1718 def makeformatname(name):
1715 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1719 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1716
1720
1717 fm = ui.formatter(b'debugformat', opts)
1721 fm = ui.formatter(b'debugformat', opts)
1718 if fm.isplain():
1722 if fm.isplain():
1719
1723
1720 def formatvalue(value):
1724 def formatvalue(value):
1721 if util.safehasattr(value, 'startswith'):
1725 if util.safehasattr(value, 'startswith'):
1722 return value
1726 return value
1723 if value:
1727 if value:
1724 return b'yes'
1728 return b'yes'
1725 else:
1729 else:
1726 return b'no'
1730 return b'no'
1727
1731
1728 else:
1732 else:
1729 formatvalue = pycompat.identity
1733 formatvalue = pycompat.identity
1730
1734
1731 fm.plain(b'format-variant')
1735 fm.plain(b'format-variant')
1732 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1736 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1733 fm.plain(b' repo')
1737 fm.plain(b' repo')
1734 if ui.verbose:
1738 if ui.verbose:
1735 fm.plain(b' config default')
1739 fm.plain(b' config default')
1736 fm.plain(b'\n')
1740 fm.plain(b'\n')
1737 for fv in upgrade.allformatvariant:
1741 for fv in upgrade.allformatvariant:
1738 fm.startitem()
1742 fm.startitem()
1739 repovalue = fv.fromrepo(repo)
1743 repovalue = fv.fromrepo(repo)
1740 configvalue = fv.fromconfig(repo)
1744 configvalue = fv.fromconfig(repo)
1741
1745
1742 if repovalue != configvalue:
1746 if repovalue != configvalue:
1743 namelabel = b'formatvariant.name.mismatchconfig'
1747 namelabel = b'formatvariant.name.mismatchconfig'
1744 repolabel = b'formatvariant.repo.mismatchconfig'
1748 repolabel = b'formatvariant.repo.mismatchconfig'
1745 elif repovalue != fv.default:
1749 elif repovalue != fv.default:
1746 namelabel = b'formatvariant.name.mismatchdefault'
1750 namelabel = b'formatvariant.name.mismatchdefault'
1747 repolabel = b'formatvariant.repo.mismatchdefault'
1751 repolabel = b'formatvariant.repo.mismatchdefault'
1748 else:
1752 else:
1749 namelabel = b'formatvariant.name.uptodate'
1753 namelabel = b'formatvariant.name.uptodate'
1750 repolabel = b'formatvariant.repo.uptodate'
1754 repolabel = b'formatvariant.repo.uptodate'
1751
1755
1752 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1756 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1753 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1757 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1754 if fv.default != configvalue:
1758 if fv.default != configvalue:
1755 configlabel = b'formatvariant.config.special'
1759 configlabel = b'formatvariant.config.special'
1756 else:
1760 else:
1757 configlabel = b'formatvariant.config.default'
1761 configlabel = b'formatvariant.config.default'
1758 fm.condwrite(
1762 fm.condwrite(
1759 ui.verbose,
1763 ui.verbose,
1760 b'config',
1764 b'config',
1761 b' %6s',
1765 b' %6s',
1762 formatvalue(configvalue),
1766 formatvalue(configvalue),
1763 label=configlabel,
1767 label=configlabel,
1764 )
1768 )
1765 fm.condwrite(
1769 fm.condwrite(
1766 ui.verbose,
1770 ui.verbose,
1767 b'default',
1771 b'default',
1768 b' %7s',
1772 b' %7s',
1769 formatvalue(fv.default),
1773 formatvalue(fv.default),
1770 label=b'formatvariant.default',
1774 label=b'formatvariant.default',
1771 )
1775 )
1772 fm.plain(b'\n')
1776 fm.plain(b'\n')
1773 fm.end()
1777 fm.end()
1774
1778
1775
1779
1776 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1780 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1777 def debugfsinfo(ui, path=b"."):
1781 def debugfsinfo(ui, path=b"."):
1778 """show information detected about current filesystem"""
1782 """show information detected about current filesystem"""
1779 ui.writenoi18n(b'path: %s\n' % path)
1783 ui.writenoi18n(b'path: %s\n' % path)
1780 ui.writenoi18n(
1784 ui.writenoi18n(
1781 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1785 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1782 )
1786 )
1783 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1787 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1784 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1788 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1785 ui.writenoi18n(
1789 ui.writenoi18n(
1786 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1790 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1787 )
1791 )
1788 ui.writenoi18n(
1792 ui.writenoi18n(
1789 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1793 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1790 )
1794 )
1791 casesensitive = b'(unknown)'
1795 casesensitive = b'(unknown)'
1792 try:
1796 try:
1793 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1797 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1794 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1798 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1795 except OSError:
1799 except OSError:
1796 pass
1800 pass
1797 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1801 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1798
1802
1799
1803
1800 @command(
1804 @command(
1801 b'debuggetbundle',
1805 b'debuggetbundle',
1802 [
1806 [
1803 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1807 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1804 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1808 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1805 (
1809 (
1806 b't',
1810 b't',
1807 b'type',
1811 b'type',
1808 b'bzip2',
1812 b'bzip2',
1809 _(b'bundle compression type to use'),
1813 _(b'bundle compression type to use'),
1810 _(b'TYPE'),
1814 _(b'TYPE'),
1811 ),
1815 ),
1812 ],
1816 ],
1813 _(b'REPO FILE [-H|-C ID]...'),
1817 _(b'REPO FILE [-H|-C ID]...'),
1814 norepo=True,
1818 norepo=True,
1815 )
1819 )
1816 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1820 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1817 """retrieves a bundle from a repo
1821 """retrieves a bundle from a repo
1818
1822
1819 Every ID must be a full-length hex node id string. Saves the bundle to the
1823 Every ID must be a full-length hex node id string. Saves the bundle to the
1820 given file.
1824 given file.
1821 """
1825 """
1822 opts = pycompat.byteskwargs(opts)
1826 opts = pycompat.byteskwargs(opts)
1823 repo = hg.peer(ui, opts, repopath)
1827 repo = hg.peer(ui, opts, repopath)
1824 if not repo.capable(b'getbundle'):
1828 if not repo.capable(b'getbundle'):
1825 raise error.Abort(b"getbundle() not supported by target repository")
1829 raise error.Abort(b"getbundle() not supported by target repository")
1826 args = {}
1830 args = {}
1827 if common:
1831 if common:
1828 args['common'] = [bin(s) for s in common]
1832 args['common'] = [bin(s) for s in common]
1829 if head:
1833 if head:
1830 args['heads'] = [bin(s) for s in head]
1834 args['heads'] = [bin(s) for s in head]
1831 # TODO: get desired bundlecaps from command line.
1835 # TODO: get desired bundlecaps from command line.
1832 args['bundlecaps'] = None
1836 args['bundlecaps'] = None
1833 bundle = repo.getbundle(b'debug', **args)
1837 bundle = repo.getbundle(b'debug', **args)
1834
1838
1835 bundletype = opts.get(b'type', b'bzip2').lower()
1839 bundletype = opts.get(b'type', b'bzip2').lower()
1836 btypes = {
1840 btypes = {
1837 b'none': b'HG10UN',
1841 b'none': b'HG10UN',
1838 b'bzip2': b'HG10BZ',
1842 b'bzip2': b'HG10BZ',
1839 b'gzip': b'HG10GZ',
1843 b'gzip': b'HG10GZ',
1840 b'bundle2': b'HG20',
1844 b'bundle2': b'HG20',
1841 }
1845 }
1842 bundletype = btypes.get(bundletype)
1846 bundletype = btypes.get(bundletype)
1843 if bundletype not in bundle2.bundletypes:
1847 if bundletype not in bundle2.bundletypes:
1844 raise error.Abort(_(b'unknown bundle type specified with --type'))
1848 raise error.Abort(_(b'unknown bundle type specified with --type'))
1845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1849 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1846
1850
1847
1851
1848 @command(b'debugignore', [], b'[FILE]...')
1852 @command(b'debugignore', [], b'[FILE]...')
1849 def debugignore(ui, repo, *files, **opts):
1853 def debugignore(ui, repo, *files, **opts):
1850 """display the combined ignore pattern and information about ignored files
1854 """display the combined ignore pattern and information about ignored files
1851
1855
1852 With no argument display the combined ignore pattern.
1856 With no argument display the combined ignore pattern.
1853
1857
1854 Given space separated file names, shows if the given file is ignored and
1858 Given space separated file names, shows if the given file is ignored and
1855 if so, show the ignore rule (file and line number) that matched it.
1859 if so, show the ignore rule (file and line number) that matched it.
1856 """
1860 """
1857 ignore = repo.dirstate._ignore
1861 ignore = repo.dirstate._ignore
1858 if not files:
1862 if not files:
1859 # Show all the patterns
1863 # Show all the patterns
1860 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1864 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1861 else:
1865 else:
1862 m = scmutil.match(repo[None], pats=files)
1866 m = scmutil.match(repo[None], pats=files)
1863 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1867 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1864 for f in m.files():
1868 for f in m.files():
1865 nf = util.normpath(f)
1869 nf = util.normpath(f)
1866 ignored = None
1870 ignored = None
1867 ignoredata = None
1871 ignoredata = None
1868 if nf != b'.':
1872 if nf != b'.':
1869 if ignore(nf):
1873 if ignore(nf):
1870 ignored = nf
1874 ignored = nf
1871 ignoredata = repo.dirstate._ignorefileandline(nf)
1875 ignoredata = repo.dirstate._ignorefileandline(nf)
1872 else:
1876 else:
1873 for p in pathutil.finddirs(nf):
1877 for p in pathutil.finddirs(nf):
1874 if ignore(p):
1878 if ignore(p):
1875 ignored = p
1879 ignored = p
1876 ignoredata = repo.dirstate._ignorefileandline(p)
1880 ignoredata = repo.dirstate._ignorefileandline(p)
1877 break
1881 break
1878 if ignored:
1882 if ignored:
1879 if ignored == nf:
1883 if ignored == nf:
1880 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1884 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1881 else:
1885 else:
1882 ui.write(
1886 ui.write(
1883 _(
1887 _(
1884 b"%s is ignored because of "
1888 b"%s is ignored because of "
1885 b"containing directory %s\n"
1889 b"containing directory %s\n"
1886 )
1890 )
1887 % (uipathfn(f), ignored)
1891 % (uipathfn(f), ignored)
1888 )
1892 )
1889 ignorefile, lineno, line = ignoredata
1893 ignorefile, lineno, line = ignoredata
1890 ui.write(
1894 ui.write(
1891 _(b"(ignore rule in %s, line %d: '%s')\n")
1895 _(b"(ignore rule in %s, line %d: '%s')\n")
1892 % (ignorefile, lineno, line)
1896 % (ignorefile, lineno, line)
1893 )
1897 )
1894 else:
1898 else:
1895 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1899 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1896
1900
1897
1901
1898 @command(
1902 @command(
1899 b'debug-revlog-index|debugindex',
1903 b'debug-revlog-index|debugindex',
1900 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1904 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1901 _(b'-c|-m|FILE'),
1905 _(b'-c|-m|FILE'),
1902 )
1906 )
1903 def debugindex(ui, repo, file_=None, **opts):
1907 def debugindex(ui, repo, file_=None, **opts):
1904 """dump index data for a revlog"""
1908 """dump index data for a revlog"""
1905 opts = pycompat.byteskwargs(opts)
1909 opts = pycompat.byteskwargs(opts)
1906 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1910 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1907
1911
1908 fm = ui.formatter(b'debugindex', opts)
1912 fm = ui.formatter(b'debugindex', opts)
1909
1913
1910 revlog = getattr(store, '_revlog', store)
1914 revlog = getattr(store, '_revlog', store)
1911
1915
1912 return revlog_debug.debug_index(
1916 return revlog_debug.debug_index(
1913 ui,
1917 ui,
1914 repo,
1918 repo,
1915 formatter=fm,
1919 formatter=fm,
1916 revlog=revlog,
1920 revlog=revlog,
1917 full_node=ui.debugflag,
1921 full_node=ui.debugflag,
1918 )
1922 )
1919
1923
1920
1924
1921 @command(
1925 @command(
1922 b'debugindexdot',
1926 b'debugindexdot',
1923 cmdutil.debugrevlogopts,
1927 cmdutil.debugrevlogopts,
1924 _(b'-c|-m|FILE'),
1928 _(b'-c|-m|FILE'),
1925 optionalrepo=True,
1929 optionalrepo=True,
1926 )
1930 )
1927 def debugindexdot(ui, repo, file_=None, **opts):
1931 def debugindexdot(ui, repo, file_=None, **opts):
1928 """dump an index DAG as a graphviz dot file"""
1932 """dump an index DAG as a graphviz dot file"""
1929 opts = pycompat.byteskwargs(opts)
1933 opts = pycompat.byteskwargs(opts)
1930 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1934 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1931 ui.writenoi18n(b"digraph G {\n")
1935 ui.writenoi18n(b"digraph G {\n")
1932 for i in r:
1936 for i in r:
1933 node = r.node(i)
1937 node = r.node(i)
1934 pp = r.parents(node)
1938 pp = r.parents(node)
1935 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1939 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1936 if pp[1] != repo.nullid:
1940 if pp[1] != repo.nullid:
1937 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1941 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1938 ui.write(b"}\n")
1942 ui.write(b"}\n")
1939
1943
1940
1944
1941 @command(b'debugindexstats', [])
1945 @command(b'debugindexstats', [])
1942 def debugindexstats(ui, repo):
1946 def debugindexstats(ui, repo):
1943 """show stats related to the changelog index"""
1947 """show stats related to the changelog index"""
1944 repo.changelog.shortest(repo.nullid, 1)
1948 repo.changelog.shortest(repo.nullid, 1)
1945 index = repo.changelog.index
1949 index = repo.changelog.index
1946 if not util.safehasattr(index, 'stats'):
1950 if not util.safehasattr(index, 'stats'):
1947 raise error.Abort(_(b'debugindexstats only works with native code'))
1951 raise error.Abort(_(b'debugindexstats only works with native code'))
1948 for k, v in sorted(index.stats().items()):
1952 for k, v in sorted(index.stats().items()):
1949 ui.write(b'%s: %d\n' % (k, v))
1953 ui.write(b'%s: %d\n' % (k, v))
1950
1954
1951
1955
1952 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1956 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1953 def debuginstall(ui, **opts):
1957 def debuginstall(ui, **opts):
1954 """test Mercurial installation
1958 """test Mercurial installation
1955
1959
1956 Returns 0 on success.
1960 Returns 0 on success.
1957 """
1961 """
1958 opts = pycompat.byteskwargs(opts)
1962 opts = pycompat.byteskwargs(opts)
1959
1963
1960 problems = 0
1964 problems = 0
1961
1965
1962 fm = ui.formatter(b'debuginstall', opts)
1966 fm = ui.formatter(b'debuginstall', opts)
1963 fm.startitem()
1967 fm.startitem()
1964
1968
1965 # encoding might be unknown or wrong. don't translate these messages.
1969 # encoding might be unknown or wrong. don't translate these messages.
1966 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1970 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1967 err = None
1971 err = None
1968 try:
1972 try:
1969 codecs.lookup(pycompat.sysstr(encoding.encoding))
1973 codecs.lookup(pycompat.sysstr(encoding.encoding))
1970 except LookupError as inst:
1974 except LookupError as inst:
1971 err = stringutil.forcebytestr(inst)
1975 err = stringutil.forcebytestr(inst)
1972 problems += 1
1976 problems += 1
1973 fm.condwrite(
1977 fm.condwrite(
1974 err,
1978 err,
1975 b'encodingerror',
1979 b'encodingerror',
1976 b" %s\n (check that your locale is properly set)\n",
1980 b" %s\n (check that your locale is properly set)\n",
1977 err,
1981 err,
1978 )
1982 )
1979
1983
1980 # Python
1984 # Python
1981 pythonlib = None
1985 pythonlib = None
1982 if util.safehasattr(os, '__file__'):
1986 if util.safehasattr(os, '__file__'):
1983 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1987 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1984 elif getattr(sys, 'oxidized', False):
1988 elif getattr(sys, 'oxidized', False):
1985 pythonlib = pycompat.sysexecutable
1989 pythonlib = pycompat.sysexecutable
1986
1990
1987 fm.write(
1991 fm.write(
1988 b'pythonexe',
1992 b'pythonexe',
1989 _(b"checking Python executable (%s)\n"),
1993 _(b"checking Python executable (%s)\n"),
1990 pycompat.sysexecutable or _(b"unknown"),
1994 pycompat.sysexecutable or _(b"unknown"),
1991 )
1995 )
1992 fm.write(
1996 fm.write(
1993 b'pythonimplementation',
1997 b'pythonimplementation',
1994 _(b"checking Python implementation (%s)\n"),
1998 _(b"checking Python implementation (%s)\n"),
1995 pycompat.sysbytes(platform.python_implementation()),
1999 pycompat.sysbytes(platform.python_implementation()),
1996 )
2000 )
1997 fm.write(
2001 fm.write(
1998 b'pythonver',
2002 b'pythonver',
1999 _(b"checking Python version (%s)\n"),
2003 _(b"checking Python version (%s)\n"),
2000 (b"%d.%d.%d" % sys.version_info[:3]),
2004 (b"%d.%d.%d" % sys.version_info[:3]),
2001 )
2005 )
2002 fm.write(
2006 fm.write(
2003 b'pythonlib',
2007 b'pythonlib',
2004 _(b"checking Python lib (%s)...\n"),
2008 _(b"checking Python lib (%s)...\n"),
2005 pythonlib or _(b"unknown"),
2009 pythonlib or _(b"unknown"),
2006 )
2010 )
2007
2011
2008 try:
2012 try:
2009 from . import rustext # pytype: disable=import-error
2013 from . import rustext # pytype: disable=import-error
2010
2014
2011 rustext.__doc__ # trigger lazy import
2015 rustext.__doc__ # trigger lazy import
2012 except ImportError:
2016 except ImportError:
2013 rustext = None
2017 rustext = None
2014
2018
2015 security = set(sslutil.supportedprotocols)
2019 security = set(sslutil.supportedprotocols)
2016 if sslutil.hassni:
2020 if sslutil.hassni:
2017 security.add(b'sni')
2021 security.add(b'sni')
2018
2022
2019 fm.write(
2023 fm.write(
2020 b'pythonsecurity',
2024 b'pythonsecurity',
2021 _(b"checking Python security support (%s)\n"),
2025 _(b"checking Python security support (%s)\n"),
2022 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2026 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2023 )
2027 )
2024
2028
2025 # These are warnings, not errors. So don't increment problem count. This
2029 # These are warnings, not errors. So don't increment problem count. This
2026 # may change in the future.
2030 # may change in the future.
2027 if b'tls1.2' not in security:
2031 if b'tls1.2' not in security:
2028 fm.plain(
2032 fm.plain(
2029 _(
2033 _(
2030 b' TLS 1.2 not supported by Python install; '
2034 b' TLS 1.2 not supported by Python install; '
2031 b'network connections lack modern security\n'
2035 b'network connections lack modern security\n'
2032 )
2036 )
2033 )
2037 )
2034 if b'sni' not in security:
2038 if b'sni' not in security:
2035 fm.plain(
2039 fm.plain(
2036 _(
2040 _(
2037 b' SNI not supported by Python install; may have '
2041 b' SNI not supported by Python install; may have '
2038 b'connectivity issues with some servers\n'
2042 b'connectivity issues with some servers\n'
2039 )
2043 )
2040 )
2044 )
2041
2045
2042 fm.plain(
2046 fm.plain(
2043 _(
2047 _(
2044 b"checking Rust extensions (%s)\n"
2048 b"checking Rust extensions (%s)\n"
2045 % (b'missing' if rustext is None else b'installed')
2049 % (b'missing' if rustext is None else b'installed')
2046 ),
2050 ),
2047 )
2051 )
2048
2052
2049 # TODO print CA cert info
2053 # TODO print CA cert info
2050
2054
2051 # hg version
2055 # hg version
2052 hgver = util.version()
2056 hgver = util.version()
2053 fm.write(
2057 fm.write(
2054 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2058 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2055 )
2059 )
2056 fm.write(
2060 fm.write(
2057 b'hgverextra',
2061 b'hgverextra',
2058 _(b"checking Mercurial custom build (%s)\n"),
2062 _(b"checking Mercurial custom build (%s)\n"),
2059 b'+'.join(hgver.split(b'+')[1:]),
2063 b'+'.join(hgver.split(b'+')[1:]),
2060 )
2064 )
2061
2065
2062 # compiled modules
2066 # compiled modules
2063 hgmodules = None
2067 hgmodules = None
2064 if util.safehasattr(sys.modules[__name__], '__file__'):
2068 if util.safehasattr(sys.modules[__name__], '__file__'):
2065 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2069 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2066 elif getattr(sys, 'oxidized', False):
2070 elif getattr(sys, 'oxidized', False):
2067 hgmodules = pycompat.sysexecutable
2071 hgmodules = pycompat.sysexecutable
2068
2072
2069 fm.write(
2073 fm.write(
2070 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2074 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2071 )
2075 )
2072 fm.write(
2076 fm.write(
2073 b'hgmodules',
2077 b'hgmodules',
2074 _(b"checking installed modules (%s)...\n"),
2078 _(b"checking installed modules (%s)...\n"),
2075 hgmodules or _(b"unknown"),
2079 hgmodules or _(b"unknown"),
2076 )
2080 )
2077
2081
2078 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2082 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2079 rustext = rustandc # for now, that's the only case
2083 rustext = rustandc # for now, that's the only case
2080 cext = policy.policy in (b'c', b'allow') or rustandc
2084 cext = policy.policy in (b'c', b'allow') or rustandc
2081 nopure = cext or rustext
2085 nopure = cext or rustext
2082 if nopure:
2086 if nopure:
2083 err = None
2087 err = None
2084 try:
2088 try:
2085 if cext:
2089 if cext:
2086 from .cext import ( # pytype: disable=import-error
2090 from .cext import ( # pytype: disable=import-error
2087 base85,
2091 base85,
2088 bdiff,
2092 bdiff,
2089 mpatch,
2093 mpatch,
2090 osutil,
2094 osutil,
2091 )
2095 )
2092
2096
2093 # quiet pyflakes
2097 # quiet pyflakes
2094 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2098 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2095 if rustext:
2099 if rustext:
2096 from .rustext import ( # pytype: disable=import-error
2100 from .rustext import ( # pytype: disable=import-error
2097 ancestor,
2101 ancestor,
2098 dirstate,
2102 dirstate,
2099 )
2103 )
2100
2104
2101 dir(ancestor), dir(dirstate) # quiet pyflakes
2105 dir(ancestor), dir(dirstate) # quiet pyflakes
2102 except Exception as inst:
2106 except Exception as inst:
2103 err = stringutil.forcebytestr(inst)
2107 err = stringutil.forcebytestr(inst)
2104 problems += 1
2108 problems += 1
2105 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2109 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2106
2110
2107 compengines = util.compengines._engines.values()
2111 compengines = util.compengines._engines.values()
2108 fm.write(
2112 fm.write(
2109 b'compengines',
2113 b'compengines',
2110 _(b'checking registered compression engines (%s)\n'),
2114 _(b'checking registered compression engines (%s)\n'),
2111 fm.formatlist(
2115 fm.formatlist(
2112 sorted(e.name() for e in compengines),
2116 sorted(e.name() for e in compengines),
2113 name=b'compengine',
2117 name=b'compengine',
2114 fmt=b'%s',
2118 fmt=b'%s',
2115 sep=b', ',
2119 sep=b', ',
2116 ),
2120 ),
2117 )
2121 )
2118 fm.write(
2122 fm.write(
2119 b'compenginesavail',
2123 b'compenginesavail',
2120 _(b'checking available compression engines (%s)\n'),
2124 _(b'checking available compression engines (%s)\n'),
2121 fm.formatlist(
2125 fm.formatlist(
2122 sorted(e.name() for e in compengines if e.available()),
2126 sorted(e.name() for e in compengines if e.available()),
2123 name=b'compengine',
2127 name=b'compengine',
2124 fmt=b'%s',
2128 fmt=b'%s',
2125 sep=b', ',
2129 sep=b', ',
2126 ),
2130 ),
2127 )
2131 )
2128 wirecompengines = compression.compengines.supportedwireengines(
2132 wirecompengines = compression.compengines.supportedwireengines(
2129 compression.SERVERROLE
2133 compression.SERVERROLE
2130 )
2134 )
2131 fm.write(
2135 fm.write(
2132 b'compenginesserver',
2136 b'compenginesserver',
2133 _(
2137 _(
2134 b'checking available compression engines '
2138 b'checking available compression engines '
2135 b'for wire protocol (%s)\n'
2139 b'for wire protocol (%s)\n'
2136 ),
2140 ),
2137 fm.formatlist(
2141 fm.formatlist(
2138 [e.name() for e in wirecompengines if e.wireprotosupport()],
2142 [e.name() for e in wirecompengines if e.wireprotosupport()],
2139 name=b'compengine',
2143 name=b'compengine',
2140 fmt=b'%s',
2144 fmt=b'%s',
2141 sep=b', ',
2145 sep=b', ',
2142 ),
2146 ),
2143 )
2147 )
2144 re2 = b'missing'
2148 re2 = b'missing'
2145 if util._re2:
2149 if util._re2:
2146 re2 = b'available'
2150 re2 = b'available'
2147 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2151 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2148 fm.data(re2=bool(util._re2))
2152 fm.data(re2=bool(util._re2))
2149
2153
2150 # templates
2154 # templates
2151 p = templater.templatedir()
2155 p = templater.templatedir()
2152 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2156 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2153 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2157 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2154 if p:
2158 if p:
2155 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2159 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2156 if m:
2160 if m:
2157 # template found, check if it is working
2161 # template found, check if it is working
2158 err = None
2162 err = None
2159 try:
2163 try:
2160 templater.templater.frommapfile(m)
2164 templater.templater.frommapfile(m)
2161 except Exception as inst:
2165 except Exception as inst:
2162 err = stringutil.forcebytestr(inst)
2166 err = stringutil.forcebytestr(inst)
2163 p = None
2167 p = None
2164 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2168 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2165 else:
2169 else:
2166 p = None
2170 p = None
2167 fm.condwrite(
2171 fm.condwrite(
2168 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2172 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2169 )
2173 )
2170 fm.condwrite(
2174 fm.condwrite(
2171 not m,
2175 not m,
2172 b'defaulttemplatenotfound',
2176 b'defaulttemplatenotfound',
2173 _(b" template '%s' not found\n"),
2177 _(b" template '%s' not found\n"),
2174 b"default",
2178 b"default",
2175 )
2179 )
2176 if not p:
2180 if not p:
2177 problems += 1
2181 problems += 1
2178 fm.condwrite(
2182 fm.condwrite(
2179 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2183 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2180 )
2184 )
2181
2185
2182 # editor
2186 # editor
2183 editor = ui.geteditor()
2187 editor = ui.geteditor()
2184 editor = util.expandpath(editor)
2188 editor = util.expandpath(editor)
2185 editorbin = procutil.shellsplit(editor)[0]
2189 editorbin = procutil.shellsplit(editor)[0]
2186 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2190 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2187 cmdpath = procutil.findexe(editorbin)
2191 cmdpath = procutil.findexe(editorbin)
2188 fm.condwrite(
2192 fm.condwrite(
2189 not cmdpath and editor == b'vi',
2193 not cmdpath and editor == b'vi',
2190 b'vinotfound',
2194 b'vinotfound',
2191 _(
2195 _(
2192 b" No commit editor set and can't find %s in PATH\n"
2196 b" No commit editor set and can't find %s in PATH\n"
2193 b" (specify a commit editor in your configuration"
2197 b" (specify a commit editor in your configuration"
2194 b" file)\n"
2198 b" file)\n"
2195 ),
2199 ),
2196 not cmdpath and editor == b'vi' and editorbin,
2200 not cmdpath and editor == b'vi' and editorbin,
2197 )
2201 )
2198 fm.condwrite(
2202 fm.condwrite(
2199 not cmdpath and editor != b'vi',
2203 not cmdpath and editor != b'vi',
2200 b'editornotfound',
2204 b'editornotfound',
2201 _(
2205 _(
2202 b" Can't find editor '%s' in PATH\n"
2206 b" Can't find editor '%s' in PATH\n"
2203 b" (specify a commit editor in your configuration"
2207 b" (specify a commit editor in your configuration"
2204 b" file)\n"
2208 b" file)\n"
2205 ),
2209 ),
2206 not cmdpath and editorbin,
2210 not cmdpath and editorbin,
2207 )
2211 )
2208 if not cmdpath and editor != b'vi':
2212 if not cmdpath and editor != b'vi':
2209 problems += 1
2213 problems += 1
2210
2214
2211 # check username
2215 # check username
2212 username = None
2216 username = None
2213 err = None
2217 err = None
2214 try:
2218 try:
2215 username = ui.username()
2219 username = ui.username()
2216 except error.Abort as e:
2220 except error.Abort as e:
2217 err = e.message
2221 err = e.message
2218 problems += 1
2222 problems += 1
2219
2223
2220 fm.condwrite(
2224 fm.condwrite(
2221 username, b'username', _(b"checking username (%s)\n"), username
2225 username, b'username', _(b"checking username (%s)\n"), username
2222 )
2226 )
2223 fm.condwrite(
2227 fm.condwrite(
2224 err,
2228 err,
2225 b'usernameerror',
2229 b'usernameerror',
2226 _(
2230 _(
2227 b"checking username...\n %s\n"
2231 b"checking username...\n %s\n"
2228 b" (specify a username in your configuration file)\n"
2232 b" (specify a username in your configuration file)\n"
2229 ),
2233 ),
2230 err,
2234 err,
2231 )
2235 )
2232
2236
2233 for name, mod in extensions.extensions():
2237 for name, mod in extensions.extensions():
2234 handler = getattr(mod, 'debuginstall', None)
2238 handler = getattr(mod, 'debuginstall', None)
2235 if handler is not None:
2239 if handler is not None:
2236 problems += handler(ui, fm)
2240 problems += handler(ui, fm)
2237
2241
2238 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2242 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2239 if not problems:
2243 if not problems:
2240 fm.data(problems=problems)
2244 fm.data(problems=problems)
2241 fm.condwrite(
2245 fm.condwrite(
2242 problems,
2246 problems,
2243 b'problems',
2247 b'problems',
2244 _(b"%d problems detected, please check your install!\n"),
2248 _(b"%d problems detected, please check your install!\n"),
2245 problems,
2249 problems,
2246 )
2250 )
2247 fm.end()
2251 fm.end()
2248
2252
2249 return problems
2253 return problems
2250
2254
2251
2255
2252 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2256 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2253 def debugknown(ui, repopath, *ids, **opts):
2257 def debugknown(ui, repopath, *ids, **opts):
2254 """test whether node ids are known to a repo
2258 """test whether node ids are known to a repo
2255
2259
2256 Every ID must be a full-length hex node id string. Returns a list of 0s
2260 Every ID must be a full-length hex node id string. Returns a list of 0s
2257 and 1s indicating unknown/known.
2261 and 1s indicating unknown/known.
2258 """
2262 """
2259 opts = pycompat.byteskwargs(opts)
2263 opts = pycompat.byteskwargs(opts)
2260 repo = hg.peer(ui, opts, repopath)
2264 repo = hg.peer(ui, opts, repopath)
2261 if not repo.capable(b'known'):
2265 if not repo.capable(b'known'):
2262 raise error.Abort(b"known() not supported by target repository")
2266 raise error.Abort(b"known() not supported by target repository")
2263 flags = repo.known([bin(s) for s in ids])
2267 flags = repo.known([bin(s) for s in ids])
2264 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2268 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2265
2269
2266
2270
2267 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2271 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2268 def debuglabelcomplete(ui, repo, *args):
2272 def debuglabelcomplete(ui, repo, *args):
2269 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2273 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2270 debugnamecomplete(ui, repo, *args)
2274 debugnamecomplete(ui, repo, *args)
2271
2275
2272
2276
2273 @command(
2277 @command(
2274 b'debuglocks',
2278 b'debuglocks',
2275 [
2279 [
2276 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2280 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2277 (
2281 (
2278 b'W',
2282 b'W',
2279 b'force-free-wlock',
2283 b'force-free-wlock',
2280 None,
2284 None,
2281 _(b'free the working state lock (DANGEROUS)'),
2285 _(b'free the working state lock (DANGEROUS)'),
2282 ),
2286 ),
2283 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2287 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2284 (
2288 (
2285 b'S',
2289 b'S',
2286 b'set-wlock',
2290 b'set-wlock',
2287 None,
2291 None,
2288 _(b'set the working state lock until stopped'),
2292 _(b'set the working state lock until stopped'),
2289 ),
2293 ),
2290 ],
2294 ],
2291 _(b'[OPTION]...'),
2295 _(b'[OPTION]...'),
2292 )
2296 )
2293 def debuglocks(ui, repo, **opts):
2297 def debuglocks(ui, repo, **opts):
2294 """show or modify state of locks
2298 """show or modify state of locks
2295
2299
2296 By default, this command will show which locks are held. This
2300 By default, this command will show which locks are held. This
2297 includes the user and process holding the lock, the amount of time
2301 includes the user and process holding the lock, the amount of time
2298 the lock has been held, and the machine name where the process is
2302 the lock has been held, and the machine name where the process is
2299 running if it's not local.
2303 running if it's not local.
2300
2304
2301 Locks protect the integrity of Mercurial's data, so should be
2305 Locks protect the integrity of Mercurial's data, so should be
2302 treated with care. System crashes or other interruptions may cause
2306 treated with care. System crashes or other interruptions may cause
2303 locks to not be properly released, though Mercurial will usually
2307 locks to not be properly released, though Mercurial will usually
2304 detect and remove such stale locks automatically.
2308 detect and remove such stale locks automatically.
2305
2309
2306 However, detecting stale locks may not always be possible (for
2310 However, detecting stale locks may not always be possible (for
2307 instance, on a shared filesystem). Removing locks may also be
2311 instance, on a shared filesystem). Removing locks may also be
2308 blocked by filesystem permissions.
2312 blocked by filesystem permissions.
2309
2313
2310 Setting a lock will prevent other commands from changing the data.
2314 Setting a lock will prevent other commands from changing the data.
2311 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2315 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2312 The set locks are removed when the command exits.
2316 The set locks are removed when the command exits.
2313
2317
2314 Returns 0 if no locks are held.
2318 Returns 0 if no locks are held.
2315
2319
2316 """
2320 """
2317
2321
2318 if opts.get('force_free_lock'):
2322 if opts.get('force_free_lock'):
2319 repo.svfs.tryunlink(b'lock')
2323 repo.svfs.tryunlink(b'lock')
2320 if opts.get('force_free_wlock'):
2324 if opts.get('force_free_wlock'):
2321 repo.vfs.tryunlink(b'wlock')
2325 repo.vfs.tryunlink(b'wlock')
2322 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2326 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2323 return 0
2327 return 0
2324
2328
2325 locks = []
2329 locks = []
2326 try:
2330 try:
2327 if opts.get('set_wlock'):
2331 if opts.get('set_wlock'):
2328 try:
2332 try:
2329 locks.append(repo.wlock(False))
2333 locks.append(repo.wlock(False))
2330 except error.LockHeld:
2334 except error.LockHeld:
2331 raise error.Abort(_(b'wlock is already held'))
2335 raise error.Abort(_(b'wlock is already held'))
2332 if opts.get('set_lock'):
2336 if opts.get('set_lock'):
2333 try:
2337 try:
2334 locks.append(repo.lock(False))
2338 locks.append(repo.lock(False))
2335 except error.LockHeld:
2339 except error.LockHeld:
2336 raise error.Abort(_(b'lock is already held'))
2340 raise error.Abort(_(b'lock is already held'))
2337 if len(locks):
2341 if len(locks):
2338 try:
2342 try:
2339 if ui.interactive():
2343 if ui.interactive():
2340 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2344 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2341 ui.promptchoice(prompt)
2345 ui.promptchoice(prompt)
2342 else:
2346 else:
2343 msg = b"%d locks held, waiting for signal\n"
2347 msg = b"%d locks held, waiting for signal\n"
2344 msg %= len(locks)
2348 msg %= len(locks)
2345 ui.status(msg)
2349 ui.status(msg)
2346 while True: # XXX wait for a signal
2350 while True: # XXX wait for a signal
2347 time.sleep(0.1)
2351 time.sleep(0.1)
2348 except KeyboardInterrupt:
2352 except KeyboardInterrupt:
2349 msg = b"signal-received releasing locks\n"
2353 msg = b"signal-received releasing locks\n"
2350 ui.status(msg)
2354 ui.status(msg)
2351 return 0
2355 return 0
2352 finally:
2356 finally:
2353 release(*locks)
2357 release(*locks)
2354
2358
2355 now = time.time()
2359 now = time.time()
2356 held = 0
2360 held = 0
2357
2361
2358 def report(vfs, name, method):
2362 def report(vfs, name, method):
2359 # this causes stale locks to get reaped for more accurate reporting
2363 # this causes stale locks to get reaped for more accurate reporting
2360 try:
2364 try:
2361 l = method(False)
2365 l = method(False)
2362 except error.LockHeld:
2366 except error.LockHeld:
2363 l = None
2367 l = None
2364
2368
2365 if l:
2369 if l:
2366 l.release()
2370 l.release()
2367 else:
2371 else:
2368 try:
2372 try:
2369 st = vfs.lstat(name)
2373 st = vfs.lstat(name)
2370 age = now - st[stat.ST_MTIME]
2374 age = now - st[stat.ST_MTIME]
2371 user = util.username(st.st_uid)
2375 user = util.username(st.st_uid)
2372 locker = vfs.readlock(name)
2376 locker = vfs.readlock(name)
2373 if b":" in locker:
2377 if b":" in locker:
2374 host, pid = locker.split(b':')
2378 host, pid = locker.split(b':')
2375 if host == socket.gethostname():
2379 if host == socket.gethostname():
2376 locker = b'user %s, process %s' % (user or b'None', pid)
2380 locker = b'user %s, process %s' % (user or b'None', pid)
2377 else:
2381 else:
2378 locker = b'user %s, process %s, host %s' % (
2382 locker = b'user %s, process %s, host %s' % (
2379 user or b'None',
2383 user or b'None',
2380 pid,
2384 pid,
2381 host,
2385 host,
2382 )
2386 )
2383 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2387 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2384 return 1
2388 return 1
2385 except FileNotFoundError:
2389 except FileNotFoundError:
2386 pass
2390 pass
2387
2391
2388 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2392 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2389 return 0
2393 return 0
2390
2394
2391 held += report(repo.svfs, b"lock", repo.lock)
2395 held += report(repo.svfs, b"lock", repo.lock)
2392 held += report(repo.vfs, b"wlock", repo.wlock)
2396 held += report(repo.vfs, b"wlock", repo.wlock)
2393
2397
2394 return held
2398 return held
2395
2399
2396
2400
2397 @command(
2401 @command(
2398 b'debugmanifestfulltextcache',
2402 b'debugmanifestfulltextcache',
2399 [
2403 [
2400 (b'', b'clear', False, _(b'clear the cache')),
2404 (b'', b'clear', False, _(b'clear the cache')),
2401 (
2405 (
2402 b'a',
2406 b'a',
2403 b'add',
2407 b'add',
2404 [],
2408 [],
2405 _(b'add the given manifest nodes to the cache'),
2409 _(b'add the given manifest nodes to the cache'),
2406 _(b'NODE'),
2410 _(b'NODE'),
2407 ),
2411 ),
2408 ],
2412 ],
2409 b'',
2413 b'',
2410 )
2414 )
2411 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2415 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2412 """show, clear or amend the contents of the manifest fulltext cache"""
2416 """show, clear or amend the contents of the manifest fulltext cache"""
2413
2417
2414 def getcache():
2418 def getcache():
2415 r = repo.manifestlog.getstorage(b'')
2419 r = repo.manifestlog.getstorage(b'')
2416 try:
2420 try:
2417 return r._fulltextcache
2421 return r._fulltextcache
2418 except AttributeError:
2422 except AttributeError:
2419 msg = _(
2423 msg = _(
2420 b"Current revlog implementation doesn't appear to have a "
2424 b"Current revlog implementation doesn't appear to have a "
2421 b"manifest fulltext cache\n"
2425 b"manifest fulltext cache\n"
2422 )
2426 )
2423 raise error.Abort(msg)
2427 raise error.Abort(msg)
2424
2428
2425 if opts.get('clear'):
2429 if opts.get('clear'):
2426 with repo.wlock():
2430 with repo.wlock():
2427 cache = getcache()
2431 cache = getcache()
2428 cache.clear(clear_persisted_data=True)
2432 cache.clear(clear_persisted_data=True)
2429 return
2433 return
2430
2434
2431 if add:
2435 if add:
2432 with repo.wlock():
2436 with repo.wlock():
2433 m = repo.manifestlog
2437 m = repo.manifestlog
2434 store = m.getstorage(b'')
2438 store = m.getstorage(b'')
2435 for n in add:
2439 for n in add:
2436 try:
2440 try:
2437 manifest = m[store.lookup(n)]
2441 manifest = m[store.lookup(n)]
2438 except error.LookupError as e:
2442 except error.LookupError as e:
2439 raise error.Abort(
2443 raise error.Abort(
2440 bytes(e), hint=b"Check your manifest node id"
2444 bytes(e), hint=b"Check your manifest node id"
2441 )
2445 )
2442 manifest.read() # stores revisision in cache too
2446 manifest.read() # stores revisision in cache too
2443 return
2447 return
2444
2448
2445 cache = getcache()
2449 cache = getcache()
2446 if not len(cache):
2450 if not len(cache):
2447 ui.write(_(b'cache empty\n'))
2451 ui.write(_(b'cache empty\n'))
2448 else:
2452 else:
2449 ui.write(
2453 ui.write(
2450 _(
2454 _(
2451 b'cache contains %d manifest entries, in order of most to '
2455 b'cache contains %d manifest entries, in order of most to '
2452 b'least recent:\n'
2456 b'least recent:\n'
2453 )
2457 )
2454 % (len(cache),)
2458 % (len(cache),)
2455 )
2459 )
2456 totalsize = 0
2460 totalsize = 0
2457 for nodeid in cache:
2461 for nodeid in cache:
2458 # Use cache.get to not update the LRU order
2462 # Use cache.get to not update the LRU order
2459 data = cache.peek(nodeid)
2463 data = cache.peek(nodeid)
2460 size = len(data)
2464 size = len(data)
2461 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2465 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2462 ui.write(
2466 ui.write(
2463 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2467 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2464 )
2468 )
2465 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2469 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2466 ui.write(
2470 ui.write(
2467 _(b'total cache data size %s, on-disk %s\n')
2471 _(b'total cache data size %s, on-disk %s\n')
2468 % (util.bytecount(totalsize), util.bytecount(ondisk))
2472 % (util.bytecount(totalsize), util.bytecount(ondisk))
2469 )
2473 )
2470
2474
2471
2475
2472 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2476 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2473 def debugmergestate(ui, repo, *args, **opts):
2477 def debugmergestate(ui, repo, *args, **opts):
2474 """print merge state
2478 """print merge state
2475
2479
2476 Use --verbose to print out information about whether v1 or v2 merge state
2480 Use --verbose to print out information about whether v1 or v2 merge state
2477 was chosen."""
2481 was chosen."""
2478
2482
2479 if ui.verbose:
2483 if ui.verbose:
2480 ms = mergestatemod.mergestate(repo)
2484 ms = mergestatemod.mergestate(repo)
2481
2485
2482 # sort so that reasonable information is on top
2486 # sort so that reasonable information is on top
2483 v1records = ms._readrecordsv1()
2487 v1records = ms._readrecordsv1()
2484 v2records = ms._readrecordsv2()
2488 v2records = ms._readrecordsv2()
2485
2489
2486 if not v1records and not v2records:
2490 if not v1records and not v2records:
2487 pass
2491 pass
2488 elif not v2records:
2492 elif not v2records:
2489 ui.writenoi18n(b'no version 2 merge state\n')
2493 ui.writenoi18n(b'no version 2 merge state\n')
2490 elif ms._v1v2match(v1records, v2records):
2494 elif ms._v1v2match(v1records, v2records):
2491 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2495 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2492 else:
2496 else:
2493 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2497 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2494
2498
2495 opts = pycompat.byteskwargs(opts)
2499 opts = pycompat.byteskwargs(opts)
2496 if not opts[b'template']:
2500 if not opts[b'template']:
2497 opts[b'template'] = (
2501 opts[b'template'] = (
2498 b'{if(commits, "", "no merge state found\n")}'
2502 b'{if(commits, "", "no merge state found\n")}'
2499 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2503 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2500 b'{files % "file: {path} (state \\"{state}\\")\n'
2504 b'{files % "file: {path} (state \\"{state}\\")\n'
2501 b'{if(local_path, "'
2505 b'{if(local_path, "'
2502 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2506 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2503 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2507 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2504 b' other path: {other_path} (node {other_node})\n'
2508 b' other path: {other_path} (node {other_node})\n'
2505 b'")}'
2509 b'")}'
2506 b'{if(rename_side, "'
2510 b'{if(rename_side, "'
2507 b' rename side: {rename_side}\n'
2511 b' rename side: {rename_side}\n'
2508 b' renamed path: {renamed_path}\n'
2512 b' renamed path: {renamed_path}\n'
2509 b'")}'
2513 b'")}'
2510 b'{extras % " extra: {key} = {value}\n"}'
2514 b'{extras % " extra: {key} = {value}\n"}'
2511 b'"}'
2515 b'"}'
2512 b'{extras % "extra: {file} ({key} = {value})\n"}'
2516 b'{extras % "extra: {file} ({key} = {value})\n"}'
2513 )
2517 )
2514
2518
2515 ms = mergestatemod.mergestate.read(repo)
2519 ms = mergestatemod.mergestate.read(repo)
2516
2520
2517 fm = ui.formatter(b'debugmergestate', opts)
2521 fm = ui.formatter(b'debugmergestate', opts)
2518 fm.startitem()
2522 fm.startitem()
2519
2523
2520 fm_commits = fm.nested(b'commits')
2524 fm_commits = fm.nested(b'commits')
2521 if ms.active():
2525 if ms.active():
2522 for name, node, label_index in (
2526 for name, node, label_index in (
2523 (b'local', ms.local, 0),
2527 (b'local', ms.local, 0),
2524 (b'other', ms.other, 1),
2528 (b'other', ms.other, 1),
2525 ):
2529 ):
2526 fm_commits.startitem()
2530 fm_commits.startitem()
2527 fm_commits.data(name=name)
2531 fm_commits.data(name=name)
2528 fm_commits.data(node=hex(node))
2532 fm_commits.data(node=hex(node))
2529 if ms._labels and len(ms._labels) > label_index:
2533 if ms._labels and len(ms._labels) > label_index:
2530 fm_commits.data(label=ms._labels[label_index])
2534 fm_commits.data(label=ms._labels[label_index])
2531 fm_commits.end()
2535 fm_commits.end()
2532
2536
2533 fm_files = fm.nested(b'files')
2537 fm_files = fm.nested(b'files')
2534 if ms.active():
2538 if ms.active():
2535 for f in ms:
2539 for f in ms:
2536 fm_files.startitem()
2540 fm_files.startitem()
2537 fm_files.data(path=f)
2541 fm_files.data(path=f)
2538 state = ms._state[f]
2542 state = ms._state[f]
2539 fm_files.data(state=state[0])
2543 fm_files.data(state=state[0])
2540 if state[0] in (
2544 if state[0] in (
2541 mergestatemod.MERGE_RECORD_UNRESOLVED,
2545 mergestatemod.MERGE_RECORD_UNRESOLVED,
2542 mergestatemod.MERGE_RECORD_RESOLVED,
2546 mergestatemod.MERGE_RECORD_RESOLVED,
2543 ):
2547 ):
2544 fm_files.data(local_key=state[1])
2548 fm_files.data(local_key=state[1])
2545 fm_files.data(local_path=state[2])
2549 fm_files.data(local_path=state[2])
2546 fm_files.data(ancestor_path=state[3])
2550 fm_files.data(ancestor_path=state[3])
2547 fm_files.data(ancestor_node=state[4])
2551 fm_files.data(ancestor_node=state[4])
2548 fm_files.data(other_path=state[5])
2552 fm_files.data(other_path=state[5])
2549 fm_files.data(other_node=state[6])
2553 fm_files.data(other_node=state[6])
2550 fm_files.data(local_flags=state[7])
2554 fm_files.data(local_flags=state[7])
2551 elif state[0] in (
2555 elif state[0] in (
2552 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2556 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2553 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2557 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2554 ):
2558 ):
2555 fm_files.data(renamed_path=state[1])
2559 fm_files.data(renamed_path=state[1])
2556 fm_files.data(rename_side=state[2])
2560 fm_files.data(rename_side=state[2])
2557 fm_extras = fm_files.nested(b'extras')
2561 fm_extras = fm_files.nested(b'extras')
2558 for k, v in sorted(ms.extras(f).items()):
2562 for k, v in sorted(ms.extras(f).items()):
2559 fm_extras.startitem()
2563 fm_extras.startitem()
2560 fm_extras.data(key=k)
2564 fm_extras.data(key=k)
2561 fm_extras.data(value=v)
2565 fm_extras.data(value=v)
2562 fm_extras.end()
2566 fm_extras.end()
2563
2567
2564 fm_files.end()
2568 fm_files.end()
2565
2569
2566 fm_extras = fm.nested(b'extras')
2570 fm_extras = fm.nested(b'extras')
2567 for f, d in sorted(ms.allextras().items()):
2571 for f, d in sorted(ms.allextras().items()):
2568 if f in ms:
2572 if f in ms:
2569 # If file is in mergestate, we have already processed it's extras
2573 # If file is in mergestate, we have already processed it's extras
2570 continue
2574 continue
2571 for k, v in d.items():
2575 for k, v in d.items():
2572 fm_extras.startitem()
2576 fm_extras.startitem()
2573 fm_extras.data(file=f)
2577 fm_extras.data(file=f)
2574 fm_extras.data(key=k)
2578 fm_extras.data(key=k)
2575 fm_extras.data(value=v)
2579 fm_extras.data(value=v)
2576 fm_extras.end()
2580 fm_extras.end()
2577
2581
2578 fm.end()
2582 fm.end()
2579
2583
2580
2584
2581 @command(b'debugnamecomplete', [], _(b'NAME...'))
2585 @command(b'debugnamecomplete', [], _(b'NAME...'))
2582 def debugnamecomplete(ui, repo, *args):
2586 def debugnamecomplete(ui, repo, *args):
2583 '''complete "names" - tags, open branch names, bookmark names'''
2587 '''complete "names" - tags, open branch names, bookmark names'''
2584
2588
2585 names = set()
2589 names = set()
2586 # since we previously only listed open branches, we will handle that
2590 # since we previously only listed open branches, we will handle that
2587 # specially (after this for loop)
2591 # specially (after this for loop)
2588 for name, ns in repo.names.items():
2592 for name, ns in repo.names.items():
2589 if name != b'branches':
2593 if name != b'branches':
2590 names.update(ns.listnames(repo))
2594 names.update(ns.listnames(repo))
2591 names.update(
2595 names.update(
2592 tag
2596 tag
2593 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2597 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2594 if not closed
2598 if not closed
2595 )
2599 )
2596 completions = set()
2600 completions = set()
2597 if not args:
2601 if not args:
2598 args = [b'']
2602 args = [b'']
2599 for a in args:
2603 for a in args:
2600 completions.update(n for n in names if n.startswith(a))
2604 completions.update(n for n in names if n.startswith(a))
2601 ui.write(b'\n'.join(sorted(completions)))
2605 ui.write(b'\n'.join(sorted(completions)))
2602 ui.write(b'\n')
2606 ui.write(b'\n')
2603
2607
2604
2608
2605 @command(
2609 @command(
2606 b'debugnodemap',
2610 b'debugnodemap',
2607 (
2611 (
2608 cmdutil.debugrevlogopts
2612 cmdutil.debugrevlogopts
2609 + [
2613 + [
2610 (
2614 (
2611 b'',
2615 b'',
2612 b'dump-new',
2616 b'dump-new',
2613 False,
2617 False,
2614 _(b'write a (new) persistent binary nodemap on stdout'),
2618 _(b'write a (new) persistent binary nodemap on stdout'),
2615 ),
2619 ),
2616 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2620 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2617 (
2621 (
2618 b'',
2622 b'',
2619 b'check',
2623 b'check',
2620 False,
2624 False,
2621 _(b'check that the data on disk data are correct.'),
2625 _(b'check that the data on disk data are correct.'),
2622 ),
2626 ),
2623 (
2627 (
2624 b'',
2628 b'',
2625 b'metadata',
2629 b'metadata',
2626 False,
2630 False,
2627 _(b'display the on disk meta data for the nodemap'),
2631 _(b'display the on disk meta data for the nodemap'),
2628 ),
2632 ),
2629 ]
2633 ]
2630 ),
2634 ),
2631 _(b'-c|-m|FILE'),
2635 _(b'-c|-m|FILE'),
2632 )
2636 )
2633 def debugnodemap(ui, repo, file_=None, **opts):
2637 def debugnodemap(ui, repo, file_=None, **opts):
2634 """write and inspect on disk nodemap"""
2638 """write and inspect on disk nodemap"""
2635 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2639 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2636 if file_ is not None:
2640 if file_ is not None:
2637 raise error.InputError(
2641 raise error.InputError(
2638 _(b'cannot specify a file with other arguments')
2642 _(b'cannot specify a file with other arguments')
2639 )
2643 )
2640 elif file_ is None:
2644 elif file_ is None:
2641 opts['changelog'] = True
2645 opts['changelog'] = True
2642 r = cmdutil.openstorage(
2646 r = cmdutil.openstorage(
2643 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2647 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2644 )
2648 )
2645 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2649 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2646 r = r._revlog
2650 r = r._revlog
2647 if opts['dump_new']:
2651 if opts['dump_new']:
2648 if util.safehasattr(r.index, "nodemap_data_all"):
2652 if util.safehasattr(r.index, "nodemap_data_all"):
2649 data = r.index.nodemap_data_all()
2653 data = r.index.nodemap_data_all()
2650 else:
2654 else:
2651 data = nodemap.persistent_data(r.index)
2655 data = nodemap.persistent_data(r.index)
2652 ui.write(data)
2656 ui.write(data)
2653 elif opts['dump_disk']:
2657 elif opts['dump_disk']:
2654 nm_data = nodemap.persisted_data(r)
2658 nm_data = nodemap.persisted_data(r)
2655 if nm_data is not None:
2659 if nm_data is not None:
2656 docket, data = nm_data
2660 docket, data = nm_data
2657 ui.write(data[:])
2661 ui.write(data[:])
2658 elif opts['check']:
2662 elif opts['check']:
2659 nm_data = nodemap.persisted_data(r)
2663 nm_data = nodemap.persisted_data(r)
2660 if nm_data is not None:
2664 if nm_data is not None:
2661 docket, data = nm_data
2665 docket, data = nm_data
2662 return nodemap.check_data(ui, r.index, data)
2666 return nodemap.check_data(ui, r.index, data)
2663 elif opts['metadata']:
2667 elif opts['metadata']:
2664 nm_data = nodemap.persisted_data(r)
2668 nm_data = nodemap.persisted_data(r)
2665 if nm_data is not None:
2669 if nm_data is not None:
2666 docket, data = nm_data
2670 docket, data = nm_data
2667 ui.write((b"uid: %s\n") % docket.uid)
2671 ui.write((b"uid: %s\n") % docket.uid)
2668 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2672 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2669 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2673 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2670 ui.write((b"data-length: %d\n") % docket.data_length)
2674 ui.write((b"data-length: %d\n") % docket.data_length)
2671 ui.write((b"data-unused: %d\n") % docket.data_unused)
2675 ui.write((b"data-unused: %d\n") % docket.data_unused)
2672 unused_perc = docket.data_unused * 100.0 / docket.data_length
2676 unused_perc = docket.data_unused * 100.0 / docket.data_length
2673 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2677 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2674
2678
2675
2679
2676 @command(
2680 @command(
2677 b'debugobsolete',
2681 b'debugobsolete',
2678 [
2682 [
2679 (b'', b'flags', 0, _(b'markers flag')),
2683 (b'', b'flags', 0, _(b'markers flag')),
2680 (
2684 (
2681 b'',
2685 b'',
2682 b'record-parents',
2686 b'record-parents',
2683 False,
2687 False,
2684 _(b'record parent information for the precursor'),
2688 _(b'record parent information for the precursor'),
2685 ),
2689 ),
2686 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2690 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2687 (
2691 (
2688 b'',
2692 b'',
2689 b'exclusive',
2693 b'exclusive',
2690 False,
2694 False,
2691 _(b'restrict display to markers only relevant to REV'),
2695 _(b'restrict display to markers only relevant to REV'),
2692 ),
2696 ),
2693 (b'', b'index', False, _(b'display index of the marker')),
2697 (b'', b'index', False, _(b'display index of the marker')),
2694 (b'', b'delete', [], _(b'delete markers specified by indices')),
2698 (b'', b'delete', [], _(b'delete markers specified by indices')),
2695 ]
2699 ]
2696 + cmdutil.commitopts2
2700 + cmdutil.commitopts2
2697 + cmdutil.formatteropts,
2701 + cmdutil.formatteropts,
2698 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2702 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2699 )
2703 )
2700 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2704 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2701 """create arbitrary obsolete marker
2705 """create arbitrary obsolete marker
2702
2706
2703 With no arguments, displays the list of obsolescence markers."""
2707 With no arguments, displays the list of obsolescence markers."""
2704
2708
2705 opts = pycompat.byteskwargs(opts)
2709 opts = pycompat.byteskwargs(opts)
2706
2710
2707 def parsenodeid(s):
2711 def parsenodeid(s):
2708 try:
2712 try:
2709 # We do not use revsingle/revrange functions here to accept
2713 # We do not use revsingle/revrange functions here to accept
2710 # arbitrary node identifiers, possibly not present in the
2714 # arbitrary node identifiers, possibly not present in the
2711 # local repository.
2715 # local repository.
2712 n = bin(s)
2716 n = bin(s)
2713 if len(n) != repo.nodeconstants.nodelen:
2717 if len(n) != repo.nodeconstants.nodelen:
2714 raise ValueError
2718 raise ValueError
2715 return n
2719 return n
2716 except ValueError:
2720 except ValueError:
2717 raise error.InputError(
2721 raise error.InputError(
2718 b'changeset references must be full hexadecimal '
2722 b'changeset references must be full hexadecimal '
2719 b'node identifiers'
2723 b'node identifiers'
2720 )
2724 )
2721
2725
2722 if opts.get(b'delete'):
2726 if opts.get(b'delete'):
2723 indices = []
2727 indices = []
2724 for v in opts.get(b'delete'):
2728 for v in opts.get(b'delete'):
2725 try:
2729 try:
2726 indices.append(int(v))
2730 indices.append(int(v))
2727 except ValueError:
2731 except ValueError:
2728 raise error.InputError(
2732 raise error.InputError(
2729 _(b'invalid index value: %r') % v,
2733 _(b'invalid index value: %r') % v,
2730 hint=_(b'use integers for indices'),
2734 hint=_(b'use integers for indices'),
2731 )
2735 )
2732
2736
2733 if repo.currenttransaction():
2737 if repo.currenttransaction():
2734 raise error.Abort(
2738 raise error.Abort(
2735 _(b'cannot delete obsmarkers in the middle of transaction.')
2739 _(b'cannot delete obsmarkers in the middle of transaction.')
2736 )
2740 )
2737
2741
2738 with repo.lock():
2742 with repo.lock():
2739 n = repair.deleteobsmarkers(repo.obsstore, indices)
2743 n = repair.deleteobsmarkers(repo.obsstore, indices)
2740 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2744 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2741
2745
2742 return
2746 return
2743
2747
2744 if precursor is not None:
2748 if precursor is not None:
2745 if opts[b'rev']:
2749 if opts[b'rev']:
2746 raise error.InputError(
2750 raise error.InputError(
2747 b'cannot select revision when creating marker'
2751 b'cannot select revision when creating marker'
2748 )
2752 )
2749 metadata = {}
2753 metadata = {}
2750 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2754 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2751 succs = tuple(parsenodeid(succ) for succ in successors)
2755 succs = tuple(parsenodeid(succ) for succ in successors)
2752 l = repo.lock()
2756 l = repo.lock()
2753 try:
2757 try:
2754 tr = repo.transaction(b'debugobsolete')
2758 tr = repo.transaction(b'debugobsolete')
2755 try:
2759 try:
2756 date = opts.get(b'date')
2760 date = opts.get(b'date')
2757 if date:
2761 if date:
2758 date = dateutil.parsedate(date)
2762 date = dateutil.parsedate(date)
2759 else:
2763 else:
2760 date = None
2764 date = None
2761 prec = parsenodeid(precursor)
2765 prec = parsenodeid(precursor)
2762 parents = None
2766 parents = None
2763 if opts[b'record_parents']:
2767 if opts[b'record_parents']:
2764 if prec not in repo.unfiltered():
2768 if prec not in repo.unfiltered():
2765 raise error.Abort(
2769 raise error.Abort(
2766 b'cannot used --record-parents on '
2770 b'cannot used --record-parents on '
2767 b'unknown changesets'
2771 b'unknown changesets'
2768 )
2772 )
2769 parents = repo.unfiltered()[prec].parents()
2773 parents = repo.unfiltered()[prec].parents()
2770 parents = tuple(p.node() for p in parents)
2774 parents = tuple(p.node() for p in parents)
2771 repo.obsstore.create(
2775 repo.obsstore.create(
2772 tr,
2776 tr,
2773 prec,
2777 prec,
2774 succs,
2778 succs,
2775 opts[b'flags'],
2779 opts[b'flags'],
2776 parents=parents,
2780 parents=parents,
2777 date=date,
2781 date=date,
2778 metadata=metadata,
2782 metadata=metadata,
2779 ui=ui,
2783 ui=ui,
2780 )
2784 )
2781 tr.close()
2785 tr.close()
2782 except ValueError as exc:
2786 except ValueError as exc:
2783 raise error.Abort(
2787 raise error.Abort(
2784 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2788 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2785 )
2789 )
2786 finally:
2790 finally:
2787 tr.release()
2791 tr.release()
2788 finally:
2792 finally:
2789 l.release()
2793 l.release()
2790 else:
2794 else:
2791 if opts[b'rev']:
2795 if opts[b'rev']:
2792 revs = logcmdutil.revrange(repo, opts[b'rev'])
2796 revs = logcmdutil.revrange(repo, opts[b'rev'])
2793 nodes = [repo[r].node() for r in revs]
2797 nodes = [repo[r].node() for r in revs]
2794 markers = list(
2798 markers = list(
2795 obsutil.getmarkers(
2799 obsutil.getmarkers(
2796 repo, nodes=nodes, exclusive=opts[b'exclusive']
2800 repo, nodes=nodes, exclusive=opts[b'exclusive']
2797 )
2801 )
2798 )
2802 )
2799 markers.sort(key=lambda x: x._data)
2803 markers.sort(key=lambda x: x._data)
2800 else:
2804 else:
2801 markers = obsutil.getmarkers(repo)
2805 markers = obsutil.getmarkers(repo)
2802
2806
2803 markerstoiter = markers
2807 markerstoiter = markers
2804 isrelevant = lambda m: True
2808 isrelevant = lambda m: True
2805 if opts.get(b'rev') and opts.get(b'index'):
2809 if opts.get(b'rev') and opts.get(b'index'):
2806 markerstoiter = obsutil.getmarkers(repo)
2810 markerstoiter = obsutil.getmarkers(repo)
2807 markerset = set(markers)
2811 markerset = set(markers)
2808 isrelevant = lambda m: m in markerset
2812 isrelevant = lambda m: m in markerset
2809
2813
2810 fm = ui.formatter(b'debugobsolete', opts)
2814 fm = ui.formatter(b'debugobsolete', opts)
2811 for i, m in enumerate(markerstoiter):
2815 for i, m in enumerate(markerstoiter):
2812 if not isrelevant(m):
2816 if not isrelevant(m):
2813 # marker can be irrelevant when we're iterating over a set
2817 # marker can be irrelevant when we're iterating over a set
2814 # of markers (markerstoiter) which is bigger than the set
2818 # of markers (markerstoiter) which is bigger than the set
2815 # of markers we want to display (markers)
2819 # of markers we want to display (markers)
2816 # this can happen if both --index and --rev options are
2820 # this can happen if both --index and --rev options are
2817 # provided and thus we need to iterate over all of the markers
2821 # provided and thus we need to iterate over all of the markers
2818 # to get the correct indices, but only display the ones that
2822 # to get the correct indices, but only display the ones that
2819 # are relevant to --rev value
2823 # are relevant to --rev value
2820 continue
2824 continue
2821 fm.startitem()
2825 fm.startitem()
2822 ind = i if opts.get(b'index') else None
2826 ind = i if opts.get(b'index') else None
2823 cmdutil.showmarker(fm, m, index=ind)
2827 cmdutil.showmarker(fm, m, index=ind)
2824 fm.end()
2828 fm.end()
2825
2829
2826
2830
2827 @command(
2831 @command(
2828 b'debugp1copies',
2832 b'debugp1copies',
2829 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2833 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2830 _(b'[-r REV]'),
2834 _(b'[-r REV]'),
2831 )
2835 )
2832 def debugp1copies(ui, repo, **opts):
2836 def debugp1copies(ui, repo, **opts):
2833 """dump copy information compared to p1"""
2837 """dump copy information compared to p1"""
2834
2838
2835 opts = pycompat.byteskwargs(opts)
2839 opts = pycompat.byteskwargs(opts)
2836 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2840 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2837 for dst, src in ctx.p1copies().items():
2841 for dst, src in ctx.p1copies().items():
2838 ui.write(b'%s -> %s\n' % (src, dst))
2842 ui.write(b'%s -> %s\n' % (src, dst))
2839
2843
2840
2844
2841 @command(
2845 @command(
2842 b'debugp2copies',
2846 b'debugp2copies',
2843 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2847 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2844 _(b'[-r REV]'),
2848 _(b'[-r REV]'),
2845 )
2849 )
2846 def debugp2copies(ui, repo, **opts):
2850 def debugp2copies(ui, repo, **opts):
2847 """dump copy information compared to p2"""
2851 """dump copy information compared to p2"""
2848
2852
2849 opts = pycompat.byteskwargs(opts)
2853 opts = pycompat.byteskwargs(opts)
2850 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2854 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2851 for dst, src in ctx.p2copies().items():
2855 for dst, src in ctx.p2copies().items():
2852 ui.write(b'%s -> %s\n' % (src, dst))
2856 ui.write(b'%s -> %s\n' % (src, dst))
2853
2857
2854
2858
2855 @command(
2859 @command(
2856 b'debugpathcomplete',
2860 b'debugpathcomplete',
2857 [
2861 [
2858 (b'f', b'full', None, _(b'complete an entire path')),
2862 (b'f', b'full', None, _(b'complete an entire path')),
2859 (b'n', b'normal', None, _(b'show only normal files')),
2863 (b'n', b'normal', None, _(b'show only normal files')),
2860 (b'a', b'added', None, _(b'show only added files')),
2864 (b'a', b'added', None, _(b'show only added files')),
2861 (b'r', b'removed', None, _(b'show only removed files')),
2865 (b'r', b'removed', None, _(b'show only removed files')),
2862 ],
2866 ],
2863 _(b'FILESPEC...'),
2867 _(b'FILESPEC...'),
2864 )
2868 )
2865 def debugpathcomplete(ui, repo, *specs, **opts):
2869 def debugpathcomplete(ui, repo, *specs, **opts):
2866 """complete part or all of a tracked path
2870 """complete part or all of a tracked path
2867
2871
2868 This command supports shells that offer path name completion. It
2872 This command supports shells that offer path name completion. It
2869 currently completes only files already known to the dirstate.
2873 currently completes only files already known to the dirstate.
2870
2874
2871 Completion extends only to the next path segment unless
2875 Completion extends only to the next path segment unless
2872 --full is specified, in which case entire paths are used."""
2876 --full is specified, in which case entire paths are used."""
2873
2877
2874 def complete(path, acceptable):
2878 def complete(path, acceptable):
2875 dirstate = repo.dirstate
2879 dirstate = repo.dirstate
2876 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2880 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2877 rootdir = repo.root + pycompat.ossep
2881 rootdir = repo.root + pycompat.ossep
2878 if spec != repo.root and not spec.startswith(rootdir):
2882 if spec != repo.root and not spec.startswith(rootdir):
2879 return [], []
2883 return [], []
2880 if os.path.isdir(spec):
2884 if os.path.isdir(spec):
2881 spec += b'/'
2885 spec += b'/'
2882 spec = spec[len(rootdir) :]
2886 spec = spec[len(rootdir) :]
2883 fixpaths = pycompat.ossep != b'/'
2887 fixpaths = pycompat.ossep != b'/'
2884 if fixpaths:
2888 if fixpaths:
2885 spec = spec.replace(pycompat.ossep, b'/')
2889 spec = spec.replace(pycompat.ossep, b'/')
2886 speclen = len(spec)
2890 speclen = len(spec)
2887 fullpaths = opts['full']
2891 fullpaths = opts['full']
2888 files, dirs = set(), set()
2892 files, dirs = set(), set()
2889 adddir, addfile = dirs.add, files.add
2893 adddir, addfile = dirs.add, files.add
2890 for f, st in dirstate.items():
2894 for f, st in dirstate.items():
2891 if f.startswith(spec) and st.state in acceptable:
2895 if f.startswith(spec) and st.state in acceptable:
2892 if fixpaths:
2896 if fixpaths:
2893 f = f.replace(b'/', pycompat.ossep)
2897 f = f.replace(b'/', pycompat.ossep)
2894 if fullpaths:
2898 if fullpaths:
2895 addfile(f)
2899 addfile(f)
2896 continue
2900 continue
2897 s = f.find(pycompat.ossep, speclen)
2901 s = f.find(pycompat.ossep, speclen)
2898 if s >= 0:
2902 if s >= 0:
2899 adddir(f[:s])
2903 adddir(f[:s])
2900 else:
2904 else:
2901 addfile(f)
2905 addfile(f)
2902 return files, dirs
2906 return files, dirs
2903
2907
2904 acceptable = b''
2908 acceptable = b''
2905 if opts['normal']:
2909 if opts['normal']:
2906 acceptable += b'nm'
2910 acceptable += b'nm'
2907 if opts['added']:
2911 if opts['added']:
2908 acceptable += b'a'
2912 acceptable += b'a'
2909 if opts['removed']:
2913 if opts['removed']:
2910 acceptable += b'r'
2914 acceptable += b'r'
2911 cwd = repo.getcwd()
2915 cwd = repo.getcwd()
2912 if not specs:
2916 if not specs:
2913 specs = [b'.']
2917 specs = [b'.']
2914
2918
2915 files, dirs = set(), set()
2919 files, dirs = set(), set()
2916 for spec in specs:
2920 for spec in specs:
2917 f, d = complete(spec, acceptable or b'nmar')
2921 f, d = complete(spec, acceptable or b'nmar')
2918 files.update(f)
2922 files.update(f)
2919 dirs.update(d)
2923 dirs.update(d)
2920 files.update(dirs)
2924 files.update(dirs)
2921 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2925 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2922 ui.write(b'\n')
2926 ui.write(b'\n')
2923
2927
2924
2928
2925 @command(
2929 @command(
2926 b'debugpathcopies',
2930 b'debugpathcopies',
2927 cmdutil.walkopts,
2931 cmdutil.walkopts,
2928 b'hg debugpathcopies REV1 REV2 [FILE]',
2932 b'hg debugpathcopies REV1 REV2 [FILE]',
2929 inferrepo=True,
2933 inferrepo=True,
2930 )
2934 )
2931 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2935 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2932 """show copies between two revisions"""
2936 """show copies between two revisions"""
2933 ctx1 = scmutil.revsingle(repo, rev1)
2937 ctx1 = scmutil.revsingle(repo, rev1)
2934 ctx2 = scmutil.revsingle(repo, rev2)
2938 ctx2 = scmutil.revsingle(repo, rev2)
2935 m = scmutil.match(ctx1, pats, opts)
2939 m = scmutil.match(ctx1, pats, opts)
2936 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2940 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2937 ui.write(b'%s -> %s\n' % (src, dst))
2941 ui.write(b'%s -> %s\n' % (src, dst))
2938
2942
2939
2943
2940 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2944 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2941 def debugpeer(ui, path):
2945 def debugpeer(ui, path):
2942 """establish a connection to a peer repository"""
2946 """establish a connection to a peer repository"""
2943 # Always enable peer request logging. Requires --debug to display
2947 # Always enable peer request logging. Requires --debug to display
2944 # though.
2948 # though.
2945 overrides = {
2949 overrides = {
2946 (b'devel', b'debug.peer-request'): True,
2950 (b'devel', b'debug.peer-request'): True,
2947 }
2951 }
2948
2952
2949 with ui.configoverride(overrides):
2953 with ui.configoverride(overrides):
2950 peer = hg.peer(ui, {}, path)
2954 peer = hg.peer(ui, {}, path)
2951
2955
2952 try:
2956 try:
2953 local = peer.local() is not None
2957 local = peer.local() is not None
2954 canpush = peer.canpush()
2958 canpush = peer.canpush()
2955
2959
2956 ui.write(_(b'url: %s\n') % peer.url())
2960 ui.write(_(b'url: %s\n') % peer.url())
2957 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2961 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2958 ui.write(
2962 ui.write(
2959 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2963 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2960 )
2964 )
2961 finally:
2965 finally:
2962 peer.close()
2966 peer.close()
2963
2967
2964
2968
2965 @command(
2969 @command(
2966 b'debugpickmergetool',
2970 b'debugpickmergetool',
2967 [
2971 [
2968 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2972 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2969 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2973 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2970 ]
2974 ]
2971 + cmdutil.walkopts
2975 + cmdutil.walkopts
2972 + cmdutil.mergetoolopts,
2976 + cmdutil.mergetoolopts,
2973 _(b'[PATTERN]...'),
2977 _(b'[PATTERN]...'),
2974 inferrepo=True,
2978 inferrepo=True,
2975 )
2979 )
2976 def debugpickmergetool(ui, repo, *pats, **opts):
2980 def debugpickmergetool(ui, repo, *pats, **opts):
2977 """examine which merge tool is chosen for specified file
2981 """examine which merge tool is chosen for specified file
2978
2982
2979 As described in :hg:`help merge-tools`, Mercurial examines
2983 As described in :hg:`help merge-tools`, Mercurial examines
2980 configurations below in this order to decide which merge tool is
2984 configurations below in this order to decide which merge tool is
2981 chosen for specified file.
2985 chosen for specified file.
2982
2986
2983 1. ``--tool`` option
2987 1. ``--tool`` option
2984 2. ``HGMERGE`` environment variable
2988 2. ``HGMERGE`` environment variable
2985 3. configurations in ``merge-patterns`` section
2989 3. configurations in ``merge-patterns`` section
2986 4. configuration of ``ui.merge``
2990 4. configuration of ``ui.merge``
2987 5. configurations in ``merge-tools`` section
2991 5. configurations in ``merge-tools`` section
2988 6. ``hgmerge`` tool (for historical reason only)
2992 6. ``hgmerge`` tool (for historical reason only)
2989 7. default tool for fallback (``:merge`` or ``:prompt``)
2993 7. default tool for fallback (``:merge`` or ``:prompt``)
2990
2994
2991 This command writes out examination result in the style below::
2995 This command writes out examination result in the style below::
2992
2996
2993 FILE = MERGETOOL
2997 FILE = MERGETOOL
2994
2998
2995 By default, all files known in the first parent context of the
2999 By default, all files known in the first parent context of the
2996 working directory are examined. Use file patterns and/or -I/-X
3000 working directory are examined. Use file patterns and/or -I/-X
2997 options to limit target files. -r/--rev is also useful to examine
3001 options to limit target files. -r/--rev is also useful to examine
2998 files in another context without actual updating to it.
3002 files in another context without actual updating to it.
2999
3003
3000 With --debug, this command shows warning messages while matching
3004 With --debug, this command shows warning messages while matching
3001 against ``merge-patterns`` and so on, too. It is recommended to
3005 against ``merge-patterns`` and so on, too. It is recommended to
3002 use this option with explicit file patterns and/or -I/-X options,
3006 use this option with explicit file patterns and/or -I/-X options,
3003 because this option increases amount of output per file according
3007 because this option increases amount of output per file according
3004 to configurations in hgrc.
3008 to configurations in hgrc.
3005
3009
3006 With -v/--verbose, this command shows configurations below at
3010 With -v/--verbose, this command shows configurations below at
3007 first (only if specified).
3011 first (only if specified).
3008
3012
3009 - ``--tool`` option
3013 - ``--tool`` option
3010 - ``HGMERGE`` environment variable
3014 - ``HGMERGE`` environment variable
3011 - configuration of ``ui.merge``
3015 - configuration of ``ui.merge``
3012
3016
3013 If merge tool is chosen before matching against
3017 If merge tool is chosen before matching against
3014 ``merge-patterns``, this command can't show any helpful
3018 ``merge-patterns``, this command can't show any helpful
3015 information, even with --debug. In such case, information above is
3019 information, even with --debug. In such case, information above is
3016 useful to know why a merge tool is chosen.
3020 useful to know why a merge tool is chosen.
3017 """
3021 """
3018 opts = pycompat.byteskwargs(opts)
3022 opts = pycompat.byteskwargs(opts)
3019 overrides = {}
3023 overrides = {}
3020 if opts[b'tool']:
3024 if opts[b'tool']:
3021 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3025 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3022 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3026 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3023
3027
3024 with ui.configoverride(overrides, b'debugmergepatterns'):
3028 with ui.configoverride(overrides, b'debugmergepatterns'):
3025 hgmerge = encoding.environ.get(b"HGMERGE")
3029 hgmerge = encoding.environ.get(b"HGMERGE")
3026 if hgmerge is not None:
3030 if hgmerge is not None:
3027 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3031 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3028 uimerge = ui.config(b"ui", b"merge")
3032 uimerge = ui.config(b"ui", b"merge")
3029 if uimerge:
3033 if uimerge:
3030 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3034 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3031
3035
3032 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3036 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3033 m = scmutil.match(ctx, pats, opts)
3037 m = scmutil.match(ctx, pats, opts)
3034 changedelete = opts[b'changedelete']
3038 changedelete = opts[b'changedelete']
3035 for path in ctx.walk(m):
3039 for path in ctx.walk(m):
3036 fctx = ctx[path]
3040 fctx = ctx[path]
3037 with ui.silent(
3041 with ui.silent(
3038 error=True
3042 error=True
3039 ) if not ui.debugflag else util.nullcontextmanager():
3043 ) if not ui.debugflag else util.nullcontextmanager():
3040 tool, toolpath = filemerge._picktool(
3044 tool, toolpath = filemerge._picktool(
3041 repo,
3045 repo,
3042 ui,
3046 ui,
3043 path,
3047 path,
3044 fctx.isbinary(),
3048 fctx.isbinary(),
3045 b'l' in fctx.flags(),
3049 b'l' in fctx.flags(),
3046 changedelete,
3050 changedelete,
3047 )
3051 )
3048 ui.write(b'%s = %s\n' % (path, tool))
3052 ui.write(b'%s = %s\n' % (path, tool))
3049
3053
3050
3054
3051 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3055 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3052 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3056 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3053 """access the pushkey key/value protocol
3057 """access the pushkey key/value protocol
3054
3058
3055 With two args, list the keys in the given namespace.
3059 With two args, list the keys in the given namespace.
3056
3060
3057 With five args, set a key to new if it currently is set to old.
3061 With five args, set a key to new if it currently is set to old.
3058 Reports success or failure.
3062 Reports success or failure.
3059 """
3063 """
3060
3064
3061 target = hg.peer(ui, {}, repopath)
3065 target = hg.peer(ui, {}, repopath)
3062 try:
3066 try:
3063 if keyinfo:
3067 if keyinfo:
3064 key, old, new = keyinfo
3068 key, old, new = keyinfo
3065 with target.commandexecutor() as e:
3069 with target.commandexecutor() as e:
3066 r = e.callcommand(
3070 r = e.callcommand(
3067 b'pushkey',
3071 b'pushkey',
3068 {
3072 {
3069 b'namespace': namespace,
3073 b'namespace': namespace,
3070 b'key': key,
3074 b'key': key,
3071 b'old': old,
3075 b'old': old,
3072 b'new': new,
3076 b'new': new,
3073 },
3077 },
3074 ).result()
3078 ).result()
3075
3079
3076 ui.status(pycompat.bytestr(r) + b'\n')
3080 ui.status(pycompat.bytestr(r) + b'\n')
3077 return not r
3081 return not r
3078 else:
3082 else:
3079 for k, v in sorted(target.listkeys(namespace).items()):
3083 for k, v in sorted(target.listkeys(namespace).items()):
3080 ui.write(
3084 ui.write(
3081 b"%s\t%s\n"
3085 b"%s\t%s\n"
3082 % (stringutil.escapestr(k), stringutil.escapestr(v))
3086 % (stringutil.escapestr(k), stringutil.escapestr(v))
3083 )
3087 )
3084 finally:
3088 finally:
3085 target.close()
3089 target.close()
3086
3090
3087
3091
3088 @command(b'debugpvec', [], _(b'A B'))
3092 @command(b'debugpvec', [], _(b'A B'))
3089 def debugpvec(ui, repo, a, b=None):
3093 def debugpvec(ui, repo, a, b=None):
3090 ca = scmutil.revsingle(repo, a)
3094 ca = scmutil.revsingle(repo, a)
3091 cb = scmutil.revsingle(repo, b)
3095 cb = scmutil.revsingle(repo, b)
3092 pa = pvec.ctxpvec(ca)
3096 pa = pvec.ctxpvec(ca)
3093 pb = pvec.ctxpvec(cb)
3097 pb = pvec.ctxpvec(cb)
3094 if pa == pb:
3098 if pa == pb:
3095 rel = b"="
3099 rel = b"="
3096 elif pa > pb:
3100 elif pa > pb:
3097 rel = b">"
3101 rel = b">"
3098 elif pa < pb:
3102 elif pa < pb:
3099 rel = b"<"
3103 rel = b"<"
3100 elif pa | pb:
3104 elif pa | pb:
3101 rel = b"|"
3105 rel = b"|"
3102 ui.write(_(b"a: %s\n") % pa)
3106 ui.write(_(b"a: %s\n") % pa)
3103 ui.write(_(b"b: %s\n") % pb)
3107 ui.write(_(b"b: %s\n") % pb)
3104 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3108 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3105 ui.write(
3109 ui.write(
3106 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3110 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3107 % (
3111 % (
3108 abs(pa._depth - pb._depth),
3112 abs(pa._depth - pb._depth),
3109 pvec._hamming(pa._vec, pb._vec),
3113 pvec._hamming(pa._vec, pb._vec),
3110 pa.distance(pb),
3114 pa.distance(pb),
3111 rel,
3115 rel,
3112 )
3116 )
3113 )
3117 )
3114
3118
3115
3119
3116 @command(
3120 @command(
3117 b'debugrebuilddirstate|debugrebuildstate',
3121 b'debugrebuilddirstate|debugrebuildstate',
3118 [
3122 [
3119 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3123 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3120 (
3124 (
3121 b'',
3125 b'',
3122 b'minimal',
3126 b'minimal',
3123 None,
3127 None,
3124 _(
3128 _(
3125 b'only rebuild files that are inconsistent with '
3129 b'only rebuild files that are inconsistent with '
3126 b'the working copy parent'
3130 b'the working copy parent'
3127 ),
3131 ),
3128 ),
3132 ),
3129 ],
3133 ],
3130 _(b'[-r REV]'),
3134 _(b'[-r REV]'),
3131 )
3135 )
3132 def debugrebuilddirstate(ui, repo, rev, **opts):
3136 def debugrebuilddirstate(ui, repo, rev, **opts):
3133 """rebuild the dirstate as it would look like for the given revision
3137 """rebuild the dirstate as it would look like for the given revision
3134
3138
3135 If no revision is specified the first current parent will be used.
3139 If no revision is specified the first current parent will be used.
3136
3140
3137 The dirstate will be set to the files of the given revision.
3141 The dirstate will be set to the files of the given revision.
3138 The actual working directory content or existing dirstate
3142 The actual working directory content or existing dirstate
3139 information such as adds or removes is not considered.
3143 information such as adds or removes is not considered.
3140
3144
3141 ``minimal`` will only rebuild the dirstate status for files that claim to be
3145 ``minimal`` will only rebuild the dirstate status for files that claim to be
3142 tracked but are not in the parent manifest, or that exist in the parent
3146 tracked but are not in the parent manifest, or that exist in the parent
3143 manifest but are not in the dirstate. It will not change adds, removes, or
3147 manifest but are not in the dirstate. It will not change adds, removes, or
3144 modified files that are in the working copy parent.
3148 modified files that are in the working copy parent.
3145
3149
3146 One use of this command is to make the next :hg:`status` invocation
3150 One use of this command is to make the next :hg:`status` invocation
3147 check the actual file content.
3151 check the actual file content.
3148 """
3152 """
3149 ctx = scmutil.revsingle(repo, rev)
3153 ctx = scmutil.revsingle(repo, rev)
3150 with repo.wlock():
3154 with repo.wlock():
3151 if repo.currenttransaction() is not None:
3155 if repo.currenttransaction() is not None:
3152 msg = b'rebuild the dirstate outside of a transaction'
3156 msg = b'rebuild the dirstate outside of a transaction'
3153 raise error.ProgrammingError(msg)
3157 raise error.ProgrammingError(msg)
3154 dirstate = repo.dirstate
3158 dirstate = repo.dirstate
3155 changedfiles = None
3159 changedfiles = None
3156 # See command doc for what minimal does.
3160 # See command doc for what minimal does.
3157 if opts.get('minimal'):
3161 if opts.get('minimal'):
3158 manifestfiles = set(ctx.manifest().keys())
3162 manifestfiles = set(ctx.manifest().keys())
3159 dirstatefiles = set(dirstate)
3163 dirstatefiles = set(dirstate)
3160 manifestonly = manifestfiles - dirstatefiles
3164 manifestonly = manifestfiles - dirstatefiles
3161 dsonly = dirstatefiles - manifestfiles
3165 dsonly = dirstatefiles - manifestfiles
3162 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3166 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3163 changedfiles = manifestonly | dsnotadded
3167 changedfiles = manifestonly | dsnotadded
3164
3168
3165 with dirstate.changing_parents(repo):
3169 with dirstate.changing_parents(repo):
3166 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3170 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3167
3171
3168
3172
3169 @command(
3173 @command(
3170 b'debugrebuildfncache',
3174 b'debugrebuildfncache',
3171 [
3175 [
3172 (
3176 (
3173 b'',
3177 b'',
3174 b'only-data',
3178 b'only-data',
3175 False,
3179 False,
3176 _(b'only look for wrong .d files (much faster)'),
3180 _(b'only look for wrong .d files (much faster)'),
3177 )
3181 )
3178 ],
3182 ],
3179 b'',
3183 b'',
3180 )
3184 )
3181 def debugrebuildfncache(ui, repo, **opts):
3185 def debugrebuildfncache(ui, repo, **opts):
3182 """rebuild the fncache file"""
3186 """rebuild the fncache file"""
3183 opts = pycompat.byteskwargs(opts)
3187 opts = pycompat.byteskwargs(opts)
3184 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3188 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3185
3189
3186
3190
3187 @command(
3191 @command(
3188 b'debugrename',
3192 b'debugrename',
3189 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3193 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3190 _(b'[-r REV] [FILE]...'),
3194 _(b'[-r REV] [FILE]...'),
3191 )
3195 )
3192 def debugrename(ui, repo, *pats, **opts):
3196 def debugrename(ui, repo, *pats, **opts):
3193 """dump rename information"""
3197 """dump rename information"""
3194
3198
3195 opts = pycompat.byteskwargs(opts)
3199 opts = pycompat.byteskwargs(opts)
3196 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3200 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3197 m = scmutil.match(ctx, pats, opts)
3201 m = scmutil.match(ctx, pats, opts)
3198 for abs in ctx.walk(m):
3202 for abs in ctx.walk(m):
3199 fctx = ctx[abs]
3203 fctx = ctx[abs]
3200 o = fctx.filelog().renamed(fctx.filenode())
3204 o = fctx.filelog().renamed(fctx.filenode())
3201 rel = repo.pathto(abs)
3205 rel = repo.pathto(abs)
3202 if o:
3206 if o:
3203 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3207 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3204 else:
3208 else:
3205 ui.write(_(b"%s not renamed\n") % rel)
3209 ui.write(_(b"%s not renamed\n") % rel)
3206
3210
3207
3211
3208 @command(b'debugrequires|debugrequirements', [], b'')
3212 @command(b'debugrequires|debugrequirements', [], b'')
3209 def debugrequirements(ui, repo):
3213 def debugrequirements(ui, repo):
3210 """print the current repo requirements"""
3214 """print the current repo requirements"""
3211 for r in sorted(repo.requirements):
3215 for r in sorted(repo.requirements):
3212 ui.write(b"%s\n" % r)
3216 ui.write(b"%s\n" % r)
3213
3217
3214
3218
3215 @command(
3219 @command(
3216 b'debugrevlog',
3220 b'debugrevlog',
3217 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3221 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3218 _(b'-c|-m|FILE'),
3222 _(b'-c|-m|FILE'),
3219 optionalrepo=True,
3223 optionalrepo=True,
3220 )
3224 )
3221 def debugrevlog(ui, repo, file_=None, **opts):
3225 def debugrevlog(ui, repo, file_=None, **opts):
3222 """show data and statistics about a revlog"""
3226 """show data and statistics about a revlog"""
3223 opts = pycompat.byteskwargs(opts)
3227 opts = pycompat.byteskwargs(opts)
3224 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3228 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3225
3229
3226 if opts.get(b"dump"):
3230 if opts.get(b"dump"):
3227 revlog_debug.dump(ui, r)
3231 revlog_debug.dump(ui, r)
3228 else:
3232 else:
3229 revlog_debug.debug_revlog(ui, r)
3233 revlog_debug.debug_revlog(ui, r)
3230 return 0
3234 return 0
3231
3235
3232
3236
3233 @command(
3237 @command(
3234 b'debugrevlogindex',
3238 b'debugrevlogindex',
3235 cmdutil.debugrevlogopts
3239 cmdutil.debugrevlogopts
3236 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3240 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3237 _(b'[-f FORMAT] -c|-m|FILE'),
3241 _(b'[-f FORMAT] -c|-m|FILE'),
3238 optionalrepo=True,
3242 optionalrepo=True,
3239 )
3243 )
3240 def debugrevlogindex(ui, repo, file_=None, **opts):
3244 def debugrevlogindex(ui, repo, file_=None, **opts):
3241 """dump the contents of a revlog index"""
3245 """dump the contents of a revlog index"""
3242 opts = pycompat.byteskwargs(opts)
3246 opts = pycompat.byteskwargs(opts)
3243 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3247 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3244 format = opts.get(b'format', 0)
3248 format = opts.get(b'format', 0)
3245 if format not in (0, 1):
3249 if format not in (0, 1):
3246 raise error.Abort(_(b"unknown format %d") % format)
3250 raise error.Abort(_(b"unknown format %d") % format)
3247
3251
3248 if ui.debugflag:
3252 if ui.debugflag:
3249 shortfn = hex
3253 shortfn = hex
3250 else:
3254 else:
3251 shortfn = short
3255 shortfn = short
3252
3256
3253 # There might not be anything in r, so have a sane default
3257 # There might not be anything in r, so have a sane default
3254 idlen = 12
3258 idlen = 12
3255 for i in r:
3259 for i in r:
3256 idlen = len(shortfn(r.node(i)))
3260 idlen = len(shortfn(r.node(i)))
3257 break
3261 break
3258
3262
3259 if format == 0:
3263 if format == 0:
3260 if ui.verbose:
3264 if ui.verbose:
3261 ui.writenoi18n(
3265 ui.writenoi18n(
3262 b" rev offset length linkrev %s %s p2\n"
3266 b" rev offset length linkrev %s %s p2\n"
3263 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3267 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3264 )
3268 )
3265 else:
3269 else:
3266 ui.writenoi18n(
3270 ui.writenoi18n(
3267 b" rev linkrev %s %s p2\n"
3271 b" rev linkrev %s %s p2\n"
3268 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3272 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3269 )
3273 )
3270 elif format == 1:
3274 elif format == 1:
3271 if ui.verbose:
3275 if ui.verbose:
3272 ui.writenoi18n(
3276 ui.writenoi18n(
3273 (
3277 (
3274 b" rev flag offset length size link p1"
3278 b" rev flag offset length size link p1"
3275 b" p2 %s\n"
3279 b" p2 %s\n"
3276 )
3280 )
3277 % b"nodeid".rjust(idlen)
3281 % b"nodeid".rjust(idlen)
3278 )
3282 )
3279 else:
3283 else:
3280 ui.writenoi18n(
3284 ui.writenoi18n(
3281 b" rev flag size link p1 p2 %s\n"
3285 b" rev flag size link p1 p2 %s\n"
3282 % b"nodeid".rjust(idlen)
3286 % b"nodeid".rjust(idlen)
3283 )
3287 )
3284
3288
3285 for i in r:
3289 for i in r:
3286 node = r.node(i)
3290 node = r.node(i)
3287 if format == 0:
3291 if format == 0:
3288 try:
3292 try:
3289 pp = r.parents(node)
3293 pp = r.parents(node)
3290 except Exception:
3294 except Exception:
3291 pp = [repo.nullid, repo.nullid]
3295 pp = [repo.nullid, repo.nullid]
3292 if ui.verbose:
3296 if ui.verbose:
3293 ui.write(
3297 ui.write(
3294 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3298 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3295 % (
3299 % (
3296 i,
3300 i,
3297 r.start(i),
3301 r.start(i),
3298 r.length(i),
3302 r.length(i),
3299 r.linkrev(i),
3303 r.linkrev(i),
3300 shortfn(node),
3304 shortfn(node),
3301 shortfn(pp[0]),
3305 shortfn(pp[0]),
3302 shortfn(pp[1]),
3306 shortfn(pp[1]),
3303 )
3307 )
3304 )
3308 )
3305 else:
3309 else:
3306 ui.write(
3310 ui.write(
3307 b"% 6d % 7d %s %s %s\n"
3311 b"% 6d % 7d %s %s %s\n"
3308 % (
3312 % (
3309 i,
3313 i,
3310 r.linkrev(i),
3314 r.linkrev(i),
3311 shortfn(node),
3315 shortfn(node),
3312 shortfn(pp[0]),
3316 shortfn(pp[0]),
3313 shortfn(pp[1]),
3317 shortfn(pp[1]),
3314 )
3318 )
3315 )
3319 )
3316 elif format == 1:
3320 elif format == 1:
3317 pr = r.parentrevs(i)
3321 pr = r.parentrevs(i)
3318 if ui.verbose:
3322 if ui.verbose:
3319 ui.write(
3323 ui.write(
3320 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3324 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3321 % (
3325 % (
3322 i,
3326 i,
3323 r.flags(i),
3327 r.flags(i),
3324 r.start(i),
3328 r.start(i),
3325 r.length(i),
3329 r.length(i),
3326 r.rawsize(i),
3330 r.rawsize(i),
3327 r.linkrev(i),
3331 r.linkrev(i),
3328 pr[0],
3332 pr[0],
3329 pr[1],
3333 pr[1],
3330 shortfn(node),
3334 shortfn(node),
3331 )
3335 )
3332 )
3336 )
3333 else:
3337 else:
3334 ui.write(
3338 ui.write(
3335 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3339 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3336 % (
3340 % (
3337 i,
3341 i,
3338 r.flags(i),
3342 r.flags(i),
3339 r.rawsize(i),
3343 r.rawsize(i),
3340 r.linkrev(i),
3344 r.linkrev(i),
3341 pr[0],
3345 pr[0],
3342 pr[1],
3346 pr[1],
3343 shortfn(node),
3347 shortfn(node),
3344 )
3348 )
3345 )
3349 )
3346
3350
3347
3351
3348 @command(
3352 @command(
3349 b'debugrevspec',
3353 b'debugrevspec',
3350 [
3354 [
3351 (
3355 (
3352 b'',
3356 b'',
3353 b'optimize',
3357 b'optimize',
3354 None,
3358 None,
3355 _(b'print parsed tree after optimizing (DEPRECATED)'),
3359 _(b'print parsed tree after optimizing (DEPRECATED)'),
3356 ),
3360 ),
3357 (
3361 (
3358 b'',
3362 b'',
3359 b'show-revs',
3363 b'show-revs',
3360 True,
3364 True,
3361 _(b'print list of result revisions (default)'),
3365 _(b'print list of result revisions (default)'),
3362 ),
3366 ),
3363 (
3367 (
3364 b's',
3368 b's',
3365 b'show-set',
3369 b'show-set',
3366 None,
3370 None,
3367 _(b'print internal representation of result set'),
3371 _(b'print internal representation of result set'),
3368 ),
3372 ),
3369 (
3373 (
3370 b'p',
3374 b'p',
3371 b'show-stage',
3375 b'show-stage',
3372 [],
3376 [],
3373 _(b'print parsed tree at the given stage'),
3377 _(b'print parsed tree at the given stage'),
3374 _(b'NAME'),
3378 _(b'NAME'),
3375 ),
3379 ),
3376 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3380 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3377 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3381 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3378 ],
3382 ],
3379 b'REVSPEC',
3383 b'REVSPEC',
3380 )
3384 )
3381 def debugrevspec(ui, repo, expr, **opts):
3385 def debugrevspec(ui, repo, expr, **opts):
3382 """parse and apply a revision specification
3386 """parse and apply a revision specification
3383
3387
3384 Use -p/--show-stage option to print the parsed tree at the given stages.
3388 Use -p/--show-stage option to print the parsed tree at the given stages.
3385 Use -p all to print tree at every stage.
3389 Use -p all to print tree at every stage.
3386
3390
3387 Use --no-show-revs option with -s or -p to print only the set
3391 Use --no-show-revs option with -s or -p to print only the set
3388 representation or the parsed tree respectively.
3392 representation or the parsed tree respectively.
3389
3393
3390 Use --verify-optimized to compare the optimized result with the unoptimized
3394 Use --verify-optimized to compare the optimized result with the unoptimized
3391 one. Returns 1 if the optimized result differs.
3395 one. Returns 1 if the optimized result differs.
3392 """
3396 """
3393 opts = pycompat.byteskwargs(opts)
3397 opts = pycompat.byteskwargs(opts)
3394 aliases = ui.configitems(b'revsetalias')
3398 aliases = ui.configitems(b'revsetalias')
3395 stages = [
3399 stages = [
3396 (b'parsed', lambda tree: tree),
3400 (b'parsed', lambda tree: tree),
3397 (
3401 (
3398 b'expanded',
3402 b'expanded',
3399 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3403 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3400 ),
3404 ),
3401 (b'concatenated', revsetlang.foldconcat),
3405 (b'concatenated', revsetlang.foldconcat),
3402 (b'analyzed', revsetlang.analyze),
3406 (b'analyzed', revsetlang.analyze),
3403 (b'optimized', revsetlang.optimize),
3407 (b'optimized', revsetlang.optimize),
3404 ]
3408 ]
3405 if opts[b'no_optimized']:
3409 if opts[b'no_optimized']:
3406 stages = stages[:-1]
3410 stages = stages[:-1]
3407 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3411 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3408 raise error.Abort(
3412 raise error.Abort(
3409 _(b'cannot use --verify-optimized with --no-optimized')
3413 _(b'cannot use --verify-optimized with --no-optimized')
3410 )
3414 )
3411 stagenames = {n for n, f in stages}
3415 stagenames = {n for n, f in stages}
3412
3416
3413 showalways = set()
3417 showalways = set()
3414 showchanged = set()
3418 showchanged = set()
3415 if ui.verbose and not opts[b'show_stage']:
3419 if ui.verbose and not opts[b'show_stage']:
3416 # show parsed tree by --verbose (deprecated)
3420 # show parsed tree by --verbose (deprecated)
3417 showalways.add(b'parsed')
3421 showalways.add(b'parsed')
3418 showchanged.update([b'expanded', b'concatenated'])
3422 showchanged.update([b'expanded', b'concatenated'])
3419 if opts[b'optimize']:
3423 if opts[b'optimize']:
3420 showalways.add(b'optimized')
3424 showalways.add(b'optimized')
3421 if opts[b'show_stage'] and opts[b'optimize']:
3425 if opts[b'show_stage'] and opts[b'optimize']:
3422 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3426 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3423 if opts[b'show_stage'] == [b'all']:
3427 if opts[b'show_stage'] == [b'all']:
3424 showalways.update(stagenames)
3428 showalways.update(stagenames)
3425 else:
3429 else:
3426 for n in opts[b'show_stage']:
3430 for n in opts[b'show_stage']:
3427 if n not in stagenames:
3431 if n not in stagenames:
3428 raise error.Abort(_(b'invalid stage name: %s') % n)
3432 raise error.Abort(_(b'invalid stage name: %s') % n)
3429 showalways.update(opts[b'show_stage'])
3433 showalways.update(opts[b'show_stage'])
3430
3434
3431 treebystage = {}
3435 treebystage = {}
3432 printedtree = None
3436 printedtree = None
3433 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3437 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3434 for n, f in stages:
3438 for n, f in stages:
3435 treebystage[n] = tree = f(tree)
3439 treebystage[n] = tree = f(tree)
3436 if n in showalways or (n in showchanged and tree != printedtree):
3440 if n in showalways or (n in showchanged and tree != printedtree):
3437 if opts[b'show_stage'] or n != b'parsed':
3441 if opts[b'show_stage'] or n != b'parsed':
3438 ui.write(b"* %s:\n" % n)
3442 ui.write(b"* %s:\n" % n)
3439 ui.write(revsetlang.prettyformat(tree), b"\n")
3443 ui.write(revsetlang.prettyformat(tree), b"\n")
3440 printedtree = tree
3444 printedtree = tree
3441
3445
3442 if opts[b'verify_optimized']:
3446 if opts[b'verify_optimized']:
3443 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3447 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3444 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3448 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3445 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3449 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3446 ui.writenoi18n(
3450 ui.writenoi18n(
3447 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3451 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3448 )
3452 )
3449 ui.writenoi18n(
3453 ui.writenoi18n(
3450 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3454 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3451 )
3455 )
3452 arevs = list(arevs)
3456 arevs = list(arevs)
3453 brevs = list(brevs)
3457 brevs = list(brevs)
3454 if arevs == brevs:
3458 if arevs == brevs:
3455 return 0
3459 return 0
3456 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3460 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3457 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3461 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3458 sm = difflib.SequenceMatcher(None, arevs, brevs)
3462 sm = difflib.SequenceMatcher(None, arevs, brevs)
3459 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3463 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3460 if tag in ('delete', 'replace'):
3464 if tag in ('delete', 'replace'):
3461 for c in arevs[alo:ahi]:
3465 for c in arevs[alo:ahi]:
3462 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3466 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3463 if tag in ('insert', 'replace'):
3467 if tag in ('insert', 'replace'):
3464 for c in brevs[blo:bhi]:
3468 for c in brevs[blo:bhi]:
3465 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3469 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3466 if tag == 'equal':
3470 if tag == 'equal':
3467 for c in arevs[alo:ahi]:
3471 for c in arevs[alo:ahi]:
3468 ui.write(b' %d\n' % c)
3472 ui.write(b' %d\n' % c)
3469 return 1
3473 return 1
3470
3474
3471 func = revset.makematcher(tree)
3475 func = revset.makematcher(tree)
3472 revs = func(repo)
3476 revs = func(repo)
3473 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3477 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3474 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3478 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3475 if not opts[b'show_revs']:
3479 if not opts[b'show_revs']:
3476 return
3480 return
3477 for c in revs:
3481 for c in revs:
3478 ui.write(b"%d\n" % c)
3482 ui.write(b"%d\n" % c)
3479
3483
3480
3484
3481 @command(
3485 @command(
3482 b'debugserve',
3486 b'debugserve',
3483 [
3487 [
3484 (
3488 (
3485 b'',
3489 b'',
3486 b'sshstdio',
3490 b'sshstdio',
3487 False,
3491 False,
3488 _(b'run an SSH server bound to process handles'),
3492 _(b'run an SSH server bound to process handles'),
3489 ),
3493 ),
3490 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3494 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3491 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3495 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3492 ],
3496 ],
3493 b'',
3497 b'',
3494 )
3498 )
3495 def debugserve(ui, repo, **opts):
3499 def debugserve(ui, repo, **opts):
3496 """run a server with advanced settings
3500 """run a server with advanced settings
3497
3501
3498 This command is similar to :hg:`serve`. It exists partially as a
3502 This command is similar to :hg:`serve`. It exists partially as a
3499 workaround to the fact that ``hg serve --stdio`` must have specific
3503 workaround to the fact that ``hg serve --stdio`` must have specific
3500 arguments for security reasons.
3504 arguments for security reasons.
3501 """
3505 """
3502 opts = pycompat.byteskwargs(opts)
3506 opts = pycompat.byteskwargs(opts)
3503
3507
3504 if not opts[b'sshstdio']:
3508 if not opts[b'sshstdio']:
3505 raise error.Abort(_(b'only --sshstdio is currently supported'))
3509 raise error.Abort(_(b'only --sshstdio is currently supported'))
3506
3510
3507 logfh = None
3511 logfh = None
3508
3512
3509 if opts[b'logiofd'] and opts[b'logiofile']:
3513 if opts[b'logiofd'] and opts[b'logiofile']:
3510 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3514 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3511
3515
3512 if opts[b'logiofd']:
3516 if opts[b'logiofd']:
3513 # Ideally we would be line buffered. But line buffering in binary
3517 # Ideally we would be line buffered. But line buffering in binary
3514 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3518 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3515 # buffering could have performance impacts. But since this isn't
3519 # buffering could have performance impacts. But since this isn't
3516 # performance critical code, it should be fine.
3520 # performance critical code, it should be fine.
3517 try:
3521 try:
3518 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3522 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3519 except OSError as e:
3523 except OSError as e:
3520 if e.errno != errno.ESPIPE:
3524 if e.errno != errno.ESPIPE:
3521 raise
3525 raise
3522 # can't seek a pipe, so `ab` mode fails on py3
3526 # can't seek a pipe, so `ab` mode fails on py3
3523 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3527 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3524 elif opts[b'logiofile']:
3528 elif opts[b'logiofile']:
3525 logfh = open(opts[b'logiofile'], b'ab', 0)
3529 logfh = open(opts[b'logiofile'], b'ab', 0)
3526
3530
3527 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3531 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3528 s.serve_forever()
3532 s.serve_forever()
3529
3533
3530
3534
3531 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3535 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3532 def debugsetparents(ui, repo, rev1, rev2=None):
3536 def debugsetparents(ui, repo, rev1, rev2=None):
3533 """manually set the parents of the current working directory (DANGEROUS)
3537 """manually set the parents of the current working directory (DANGEROUS)
3534
3538
3535 This command is not what you are looking for and should not be used. Using
3539 This command is not what you are looking for and should not be used. Using
3536 this command will most certainly results in slight corruption of the file
3540 this command will most certainly results in slight corruption of the file
3537 level histories withing your repository. DO NOT USE THIS COMMAND.
3541 level histories withing your repository. DO NOT USE THIS COMMAND.
3538
3542
3539 The command update the p1 and p2 field in the dirstate, and not touching
3543 The command update the p1 and p2 field in the dirstate, and not touching
3540 anything else. This useful for writing repository conversion tools, but
3544 anything else. This useful for writing repository conversion tools, but
3541 should be used with extreme care. For example, neither the working
3545 should be used with extreme care. For example, neither the working
3542 directory nor the dirstate is updated, so file status may be incorrect
3546 directory nor the dirstate is updated, so file status may be incorrect
3543 after running this command. Only used if you are one of the few people that
3547 after running this command. Only used if you are one of the few people that
3544 deeply unstand both conversion tools and file level histories. If you are
3548 deeply unstand both conversion tools and file level histories. If you are
3545 reading this help, you are not one of this people (most of them sailed west
3549 reading this help, you are not one of this people (most of them sailed west
3546 from Mithlond anyway.
3550 from Mithlond anyway.
3547
3551
3548 So one last time DO NOT USE THIS COMMAND.
3552 So one last time DO NOT USE THIS COMMAND.
3549
3553
3550 Returns 0 on success.
3554 Returns 0 on success.
3551 """
3555 """
3552
3556
3553 node1 = scmutil.revsingle(repo, rev1).node()
3557 node1 = scmutil.revsingle(repo, rev1).node()
3554 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3558 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3555
3559
3556 with repo.wlock():
3560 with repo.wlock():
3557 repo.setparents(node1, node2)
3561 repo.setparents(node1, node2)
3558
3562
3559
3563
3560 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3564 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3561 def debugsidedata(ui, repo, file_, rev=None, **opts):
3565 def debugsidedata(ui, repo, file_, rev=None, **opts):
3562 """dump the side data for a cl/manifest/file revision
3566 """dump the side data for a cl/manifest/file revision
3563
3567
3564 Use --verbose to dump the sidedata content."""
3568 Use --verbose to dump the sidedata content."""
3565 opts = pycompat.byteskwargs(opts)
3569 opts = pycompat.byteskwargs(opts)
3566 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3570 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3567 if rev is not None:
3571 if rev is not None:
3568 raise error.InputError(
3572 raise error.InputError(
3569 _(b'cannot specify a revision with other arguments')
3573 _(b'cannot specify a revision with other arguments')
3570 )
3574 )
3571 file_, rev = None, file_
3575 file_, rev = None, file_
3572 elif rev is None:
3576 elif rev is None:
3573 raise error.InputError(_(b'please specify a revision'))
3577 raise error.InputError(_(b'please specify a revision'))
3574 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3578 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3575 r = getattr(r, '_revlog', r)
3579 r = getattr(r, '_revlog', r)
3576 try:
3580 try:
3577 sidedata = r.sidedata(r.lookup(rev))
3581 sidedata = r.sidedata(r.lookup(rev))
3578 except KeyError:
3582 except KeyError:
3579 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3583 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3580 if sidedata:
3584 if sidedata:
3581 sidedata = list(sidedata.items())
3585 sidedata = list(sidedata.items())
3582 sidedata.sort()
3586 sidedata.sort()
3583 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3587 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3584 for key, value in sidedata:
3588 for key, value in sidedata:
3585 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3589 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3586 if ui.verbose:
3590 if ui.verbose:
3587 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3591 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3588
3592
3589
3593
3590 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3594 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3591 def debugssl(ui, repo, source=None, **opts):
3595 def debugssl(ui, repo, source=None, **opts):
3592 """test a secure connection to a server
3596 """test a secure connection to a server
3593
3597
3594 This builds the certificate chain for the server on Windows, installing the
3598 This builds the certificate chain for the server on Windows, installing the
3595 missing intermediates and trusted root via Windows Update if necessary. It
3599 missing intermediates and trusted root via Windows Update if necessary. It
3596 does nothing on other platforms.
3600 does nothing on other platforms.
3597
3601
3598 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3602 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3599 that server is used. See :hg:`help urls` for more information.
3603 that server is used. See :hg:`help urls` for more information.
3600
3604
3601 If the update succeeds, retry the original operation. Otherwise, the cause
3605 If the update succeeds, retry the original operation. Otherwise, the cause
3602 of the SSL error is likely another issue.
3606 of the SSL error is likely another issue.
3603 """
3607 """
3604 if not pycompat.iswindows:
3608 if not pycompat.iswindows:
3605 raise error.Abort(
3609 raise error.Abort(
3606 _(b'certificate chain building is only possible on Windows')
3610 _(b'certificate chain building is only possible on Windows')
3607 )
3611 )
3608
3612
3609 if not source:
3613 if not source:
3610 if not repo:
3614 if not repo:
3611 raise error.Abort(
3615 raise error.Abort(
3612 _(
3616 _(
3613 b"there is no Mercurial repository here, and no "
3617 b"there is no Mercurial repository here, and no "
3614 b"server specified"
3618 b"server specified"
3615 )
3619 )
3616 )
3620 )
3617 source = b"default"
3621 source = b"default"
3618
3622
3619 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3623 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3620 url = path.url
3624 url = path.url
3621
3625
3622 defaultport = {b'https': 443, b'ssh': 22}
3626 defaultport = {b'https': 443, b'ssh': 22}
3623 if url.scheme in defaultport:
3627 if url.scheme in defaultport:
3624 try:
3628 try:
3625 addr = (url.host, int(url.port or defaultport[url.scheme]))
3629 addr = (url.host, int(url.port or defaultport[url.scheme]))
3626 except ValueError:
3630 except ValueError:
3627 raise error.Abort(_(b"malformed port number in URL"))
3631 raise error.Abort(_(b"malformed port number in URL"))
3628 else:
3632 else:
3629 raise error.Abort(_(b"only https and ssh connections are supported"))
3633 raise error.Abort(_(b"only https and ssh connections are supported"))
3630
3634
3631 from . import win32
3635 from . import win32
3632
3636
3633 s = ssl.wrap_socket(
3637 s = ssl.wrap_socket(
3634 socket.socket(),
3638 socket.socket(),
3635 ssl_version=ssl.PROTOCOL_TLS,
3639 ssl_version=ssl.PROTOCOL_TLS,
3636 cert_reqs=ssl.CERT_NONE,
3640 cert_reqs=ssl.CERT_NONE,
3637 ca_certs=None,
3641 ca_certs=None,
3638 )
3642 )
3639
3643
3640 try:
3644 try:
3641 s.connect(addr)
3645 s.connect(addr)
3642 cert = s.getpeercert(True)
3646 cert = s.getpeercert(True)
3643
3647
3644 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3648 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3645
3649
3646 complete = win32.checkcertificatechain(cert, build=False)
3650 complete = win32.checkcertificatechain(cert, build=False)
3647
3651
3648 if not complete:
3652 if not complete:
3649 ui.status(_(b'certificate chain is incomplete, updating... '))
3653 ui.status(_(b'certificate chain is incomplete, updating... '))
3650
3654
3651 if not win32.checkcertificatechain(cert):
3655 if not win32.checkcertificatechain(cert):
3652 ui.status(_(b'failed.\n'))
3656 ui.status(_(b'failed.\n'))
3653 else:
3657 else:
3654 ui.status(_(b'done.\n'))
3658 ui.status(_(b'done.\n'))
3655 else:
3659 else:
3656 ui.status(_(b'full certificate chain is available\n'))
3660 ui.status(_(b'full certificate chain is available\n'))
3657 finally:
3661 finally:
3658 s.close()
3662 s.close()
3659
3663
3660
3664
3661 @command(
3665 @command(
3662 b'debug::stable-tail-sort',
3666 b'debug::stable-tail-sort',
3663 [
3667 [
3664 (
3668 (
3665 b'T',
3669 b'T',
3666 b'template',
3670 b'template',
3667 b'{rev}\n',
3671 b'{rev}\n',
3668 _(b'display with template'),
3672 _(b'display with template'),
3669 _(b'TEMPLATE'),
3673 _(b'TEMPLATE'),
3670 ),
3674 ),
3671 ],
3675 ],
3672 b'REV',
3676 b'REV',
3673 )
3677 )
3674 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3678 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3675 """display the stable-tail sort of the ancestors of a given node"""
3679 """display the stable-tail sort of the ancestors of a given node"""
3676 rev = logcmdutil.revsingle(repo, revspec).rev()
3680 rev = logcmdutil.revsingle(repo, revspec).rev()
3677 cl = repo.changelog
3681 cl = repo.changelog
3678
3682
3679 displayer = logcmdutil.maketemplater(ui, repo, template)
3683 displayer = logcmdutil.maketemplater(ui, repo, template)
3680 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3684 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3681 for ancestor_rev in sorted_revs:
3685 for ancestor_rev in sorted_revs:
3682 displayer.show(repo[ancestor_rev])
3686 displayer.show(repo[ancestor_rev])
3683
3687
3684
3688
3685 @command(
3689 @command(
3686 b'debug::stable-tail-sort-leaps',
3690 b'debug::stable-tail-sort-leaps',
3687 [
3691 [
3688 (
3692 (
3689 b'T',
3693 b'T',
3690 b'template',
3694 b'template',
3691 b'{rev}',
3695 b'{rev}',
3692 _(b'display with template'),
3696 _(b'display with template'),
3693 _(b'TEMPLATE'),
3697 _(b'TEMPLATE'),
3694 ),
3698 ),
3695 (b's', b'specific', False, _(b'restrict to specific leaps')),
3699 (b's', b'specific', False, _(b'restrict to specific leaps')),
3696 ],
3700 ],
3697 b'REV',
3701 b'REV',
3698 )
3702 )
3699 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3703 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3700 """display the leaps in the stable-tail sort of a node, one per line"""
3704 """display the leaps in the stable-tail sort of a node, one per line"""
3701 rev = logcmdutil.revsingle(repo, rspec).rev()
3705 rev = logcmdutil.revsingle(repo, rspec).rev()
3702
3706
3703 if specific:
3707 if specific:
3704 get_leaps = stabletailsort._find_specific_leaps_naive
3708 get_leaps = stabletailsort._find_specific_leaps_naive
3705 else:
3709 else:
3706 get_leaps = stabletailsort._find_all_leaps_naive
3710 get_leaps = stabletailsort._find_all_leaps_naive
3707
3711
3708 displayer = logcmdutil.maketemplater(ui, repo, template)
3712 displayer = logcmdutil.maketemplater(ui, repo, template)
3709 for source, target in get_leaps(repo.changelog, rev):
3713 for source, target in get_leaps(repo.changelog, rev):
3710 displayer.show(repo[source])
3714 displayer.show(repo[source])
3711 displayer.show(repo[target])
3715 displayer.show(repo[target])
3712 ui.write(b'\n')
3716 ui.write(b'\n')
3713
3717
3714
3718
3715 @command(
3719 @command(
3716 b"debugbackupbundle",
3720 b"debugbackupbundle",
3717 [
3721 [
3718 (
3722 (
3719 b"",
3723 b"",
3720 b"recover",
3724 b"recover",
3721 b"",
3725 b"",
3722 b"brings the specified changeset back into the repository",
3726 b"brings the specified changeset back into the repository",
3723 )
3727 )
3724 ]
3728 ]
3725 + cmdutil.logopts,
3729 + cmdutil.logopts,
3726 _(b"hg debugbackupbundle [--recover HASH]"),
3730 _(b"hg debugbackupbundle [--recover HASH]"),
3727 )
3731 )
3728 def debugbackupbundle(ui, repo, *pats, **opts):
3732 def debugbackupbundle(ui, repo, *pats, **opts):
3729 """lists the changesets available in backup bundles
3733 """lists the changesets available in backup bundles
3730
3734
3731 Without any arguments, this command prints a list of the changesets in each
3735 Without any arguments, this command prints a list of the changesets in each
3732 backup bundle.
3736 backup bundle.
3733
3737
3734 --recover takes a changeset hash and unbundles the first bundle that
3738 --recover takes a changeset hash and unbundles the first bundle that
3735 contains that hash, which puts that changeset back in your repository.
3739 contains that hash, which puts that changeset back in your repository.
3736
3740
3737 --verbose will print the entire commit message and the bundle path for that
3741 --verbose will print the entire commit message and the bundle path for that
3738 backup.
3742 backup.
3739 """
3743 """
3740 backups = list(
3744 backups = list(
3741 filter(
3745 filter(
3742 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3746 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3743 )
3747 )
3744 )
3748 )
3745 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3749 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3746
3750
3747 opts = pycompat.byteskwargs(opts)
3751 opts = pycompat.byteskwargs(opts)
3748 opts[b"bundle"] = b""
3752 opts[b"bundle"] = b""
3749 opts[b"force"] = None
3753 opts[b"force"] = None
3750 limit = logcmdutil.getlimit(opts)
3754 limit = logcmdutil.getlimit(opts)
3751
3755
3752 def display(other, chlist, displayer):
3756 def display(other, chlist, displayer):
3753 if opts.get(b"newest_first"):
3757 if opts.get(b"newest_first"):
3754 chlist.reverse()
3758 chlist.reverse()
3755 count = 0
3759 count = 0
3756 for n in chlist:
3760 for n in chlist:
3757 if limit is not None and count >= limit:
3761 if limit is not None and count >= limit:
3758 break
3762 break
3759 parents = [
3763 parents = [
3760 True for p in other.changelog.parents(n) if p != repo.nullid
3764 True for p in other.changelog.parents(n) if p != repo.nullid
3761 ]
3765 ]
3762 if opts.get(b"no_merges") and len(parents) == 2:
3766 if opts.get(b"no_merges") and len(parents) == 2:
3763 continue
3767 continue
3764 count += 1
3768 count += 1
3765 displayer.show(other[n])
3769 displayer.show(other[n])
3766
3770
3767 recovernode = opts.get(b"recover")
3771 recovernode = opts.get(b"recover")
3768 if recovernode:
3772 if recovernode:
3769 if scmutil.isrevsymbol(repo, recovernode):
3773 if scmutil.isrevsymbol(repo, recovernode):
3770 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3774 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3771 return
3775 return
3772 elif backups:
3776 elif backups:
3773 msg = _(
3777 msg = _(
3774 b"Recover changesets using: hg debugbackupbundle --recover "
3778 b"Recover changesets using: hg debugbackupbundle --recover "
3775 b"<changeset hash>\n\nAvailable backup changesets:"
3779 b"<changeset hash>\n\nAvailable backup changesets:"
3776 )
3780 )
3777 ui.status(msg, label=b"status.removed")
3781 ui.status(msg, label=b"status.removed")
3778 else:
3782 else:
3779 ui.status(_(b"no backup changesets found\n"))
3783 ui.status(_(b"no backup changesets found\n"))
3780 return
3784 return
3781
3785
3782 for backup in backups:
3786 for backup in backups:
3783 # Much of this is copied from the hg incoming logic
3787 # Much of this is copied from the hg incoming logic
3784 source = os.path.relpath(backup, encoding.getcwd())
3788 source = os.path.relpath(backup, encoding.getcwd())
3785 path = urlutil.get_unique_pull_path_obj(
3789 path = urlutil.get_unique_pull_path_obj(
3786 b'debugbackupbundle',
3790 b'debugbackupbundle',
3787 ui,
3791 ui,
3788 source,
3792 source,
3789 )
3793 )
3790 try:
3794 try:
3791 other = hg.peer(repo, opts, path)
3795 other = hg.peer(repo, opts, path)
3792 except error.LookupError as ex:
3796 except error.LookupError as ex:
3793 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3797 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3794 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3798 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3795 ui.warn(msg, hint=hint)
3799 ui.warn(msg, hint=hint)
3796 continue
3800 continue
3797 branches = (path.branch, opts.get(b'branch', []))
3801 branches = (path.branch, opts.get(b'branch', []))
3798 revs, checkout = hg.addbranchrevs(
3802 revs, checkout = hg.addbranchrevs(
3799 repo, other, branches, opts.get(b"rev")
3803 repo, other, branches, opts.get(b"rev")
3800 )
3804 )
3801
3805
3802 if revs:
3806 if revs:
3803 revs = [other.lookup(rev) for rev in revs]
3807 revs = [other.lookup(rev) for rev in revs]
3804
3808
3805 with ui.silent():
3809 with ui.silent():
3806 try:
3810 try:
3807 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3811 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3808 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3812 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3809 )
3813 )
3810 except error.LookupError:
3814 except error.LookupError:
3811 continue
3815 continue
3812
3816
3813 try:
3817 try:
3814 if not chlist:
3818 if not chlist:
3815 continue
3819 continue
3816 if recovernode:
3820 if recovernode:
3817 with repo.lock(), repo.transaction(b"unbundle") as tr:
3821 with repo.lock(), repo.transaction(b"unbundle") as tr:
3818 if scmutil.isrevsymbol(other, recovernode):
3822 if scmutil.isrevsymbol(other, recovernode):
3819 ui.status(_(b"Unbundling %s\n") % (recovernode))
3823 ui.status(_(b"Unbundling %s\n") % (recovernode))
3820 f = hg.openpath(ui, path.loc)
3824 f = hg.openpath(ui, path.loc)
3821 gen = exchange.readbundle(ui, f, path.loc)
3825 gen = exchange.readbundle(ui, f, path.loc)
3822 if isinstance(gen, bundle2.unbundle20):
3826 if isinstance(gen, bundle2.unbundle20):
3823 bundle2.applybundle(
3827 bundle2.applybundle(
3824 repo,
3828 repo,
3825 gen,
3829 gen,
3826 tr,
3830 tr,
3827 source=b"unbundle",
3831 source=b"unbundle",
3828 url=b"bundle:" + path.loc,
3832 url=b"bundle:" + path.loc,
3829 )
3833 )
3830 else:
3834 else:
3831 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3835 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3832 break
3836 break
3833 else:
3837 else:
3834 backupdate = encoding.strtolocal(
3838 backupdate = encoding.strtolocal(
3835 time.strftime(
3839 time.strftime(
3836 "%a %H:%M, %Y-%m-%d",
3840 "%a %H:%M, %Y-%m-%d",
3837 time.localtime(os.path.getmtime(path.loc)),
3841 time.localtime(os.path.getmtime(path.loc)),
3838 )
3842 )
3839 )
3843 )
3840 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3844 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3841 if ui.verbose:
3845 if ui.verbose:
3842 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3846 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3843 else:
3847 else:
3844 opts[
3848 opts[
3845 b"template"
3849 b"template"
3846 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3850 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3847 displayer = logcmdutil.changesetdisplayer(
3851 displayer = logcmdutil.changesetdisplayer(
3848 ui, other, opts, False
3852 ui, other, opts, False
3849 )
3853 )
3850 display(other, chlist, displayer)
3854 display(other, chlist, displayer)
3851 displayer.close()
3855 displayer.close()
3852 finally:
3856 finally:
3853 cleanupfn()
3857 cleanupfn()
3854
3858
3855
3859
3856 @command(
3860 @command(
3857 b'debugsub',
3861 b'debugsub',
3858 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3862 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3859 _(b'[-r REV] [REV]'),
3863 _(b'[-r REV] [REV]'),
3860 )
3864 )
3861 def debugsub(ui, repo, rev=None):
3865 def debugsub(ui, repo, rev=None):
3862 ctx = scmutil.revsingle(repo, rev, None)
3866 ctx = scmutil.revsingle(repo, rev, None)
3863 for k, v in sorted(ctx.substate.items()):
3867 for k, v in sorted(ctx.substate.items()):
3864 ui.writenoi18n(b'path %s\n' % k)
3868 ui.writenoi18n(b'path %s\n' % k)
3865 ui.writenoi18n(b' source %s\n' % v[0])
3869 ui.writenoi18n(b' source %s\n' % v[0])
3866 ui.writenoi18n(b' revision %s\n' % v[1])
3870 ui.writenoi18n(b' revision %s\n' % v[1])
3867
3871
3868
3872
3869 @command(
3873 @command(
3870 b'debugshell',
3874 b'debugshell',
3871 [
3875 [
3872 (
3876 (
3873 b'c',
3877 b'c',
3874 b'command',
3878 b'command',
3875 b'',
3879 b'',
3876 _(b'program passed in as a string'),
3880 _(b'program passed in as a string'),
3877 _(b'COMMAND'),
3881 _(b'COMMAND'),
3878 )
3882 )
3879 ],
3883 ],
3880 _(b'[-c COMMAND]'),
3884 _(b'[-c COMMAND]'),
3881 optionalrepo=True,
3885 optionalrepo=True,
3882 )
3886 )
3883 def debugshell(ui, repo, **opts):
3887 def debugshell(ui, repo, **opts):
3884 """run an interactive Python interpreter
3888 """run an interactive Python interpreter
3885
3889
3886 The local namespace is provided with a reference to the ui and
3890 The local namespace is provided with a reference to the ui and
3887 the repo instance (if available).
3891 the repo instance (if available).
3888 """
3892 """
3889 import code
3893 import code
3890
3894
3891 imported_objects = {
3895 imported_objects = {
3892 'ui': ui,
3896 'ui': ui,
3893 'repo': repo,
3897 'repo': repo,
3894 }
3898 }
3895
3899
3896 # py2exe disables initialization of the site module, which is responsible
3900 # py2exe disables initialization of the site module, which is responsible
3897 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3901 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3898 # the stuff that site normally does here, so that the interpreter can be
3902 # the stuff that site normally does here, so that the interpreter can be
3899 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3903 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3900 # py.exe, or py2exe.
3904 # py.exe, or py2exe.
3901 if getattr(sys, "frozen", None) == 'console_exe':
3905 if getattr(sys, "frozen", None) == 'console_exe':
3902 try:
3906 try:
3903 import site
3907 import site
3904
3908
3905 site.setcopyright()
3909 site.setcopyright()
3906 site.sethelper()
3910 site.sethelper()
3907 site.setquit()
3911 site.setquit()
3908 except ImportError:
3912 except ImportError:
3909 site = None # Keep PyCharm happy
3913 site = None # Keep PyCharm happy
3910
3914
3911 command = opts.get('command')
3915 command = opts.get('command')
3912 if command:
3916 if command:
3913 compiled = code.compile_command(encoding.strfromlocal(command))
3917 compiled = code.compile_command(encoding.strfromlocal(command))
3914 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3918 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3915 return
3919 return
3916
3920
3917 code.interact(local=imported_objects)
3921 code.interact(local=imported_objects)
3918
3922
3919
3923
3920 @command(
3924 @command(
3921 b'debug-revlog-stats',
3925 b'debug-revlog-stats',
3922 [
3926 [
3923 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3927 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3924 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3928 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3925 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3929 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3926 ]
3930 ]
3927 + cmdutil.formatteropts,
3931 + cmdutil.formatteropts,
3928 )
3932 )
3929 def debug_revlog_stats(ui, repo, **opts):
3933 def debug_revlog_stats(ui, repo, **opts):
3930 """display statistics about revlogs in the store"""
3934 """display statistics about revlogs in the store"""
3931 opts = pycompat.byteskwargs(opts)
3935 opts = pycompat.byteskwargs(opts)
3932 changelog = opts[b"changelog"]
3936 changelog = opts[b"changelog"]
3933 manifest = opts[b"manifest"]
3937 manifest = opts[b"manifest"]
3934 filelogs = opts[b"filelogs"]
3938 filelogs = opts[b"filelogs"]
3935
3939
3936 if changelog is None and manifest is None and filelogs is None:
3940 if changelog is None and manifest is None and filelogs is None:
3937 changelog = True
3941 changelog = True
3938 manifest = True
3942 manifest = True
3939 filelogs = True
3943 filelogs = True
3940
3944
3941 repo = repo.unfiltered()
3945 repo = repo.unfiltered()
3942 fm = ui.formatter(b'debug-revlog-stats', opts)
3946 fm = ui.formatter(b'debug-revlog-stats', opts)
3943 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3947 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3944 fm.end()
3948 fm.end()
3945
3949
3946
3950
3947 @command(
3951 @command(
3948 b'debugsuccessorssets',
3952 b'debugsuccessorssets',
3949 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3953 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3950 _(b'[REV]'),
3954 _(b'[REV]'),
3951 )
3955 )
3952 def debugsuccessorssets(ui, repo, *revs, **opts):
3956 def debugsuccessorssets(ui, repo, *revs, **opts):
3953 """show set of successors for revision
3957 """show set of successors for revision
3954
3958
3955 A successors set of changeset A is a consistent group of revisions that
3959 A successors set of changeset A is a consistent group of revisions that
3956 succeed A. It contains non-obsolete changesets only unless closests
3960 succeed A. It contains non-obsolete changesets only unless closests
3957 successors set is set.
3961 successors set is set.
3958
3962
3959 In most cases a changeset A has a single successors set containing a single
3963 In most cases a changeset A has a single successors set containing a single
3960 successor (changeset A replaced by A').
3964 successor (changeset A replaced by A').
3961
3965
3962 A changeset that is made obsolete with no successors are called "pruned".
3966 A changeset that is made obsolete with no successors are called "pruned".
3963 Such changesets have no successors sets at all.
3967 Such changesets have no successors sets at all.
3964
3968
3965 A changeset that has been "split" will have a successors set containing
3969 A changeset that has been "split" will have a successors set containing
3966 more than one successor.
3970 more than one successor.
3967
3971
3968 A changeset that has been rewritten in multiple different ways is called
3972 A changeset that has been rewritten in multiple different ways is called
3969 "divergent". Such changesets have multiple successor sets (each of which
3973 "divergent". Such changesets have multiple successor sets (each of which
3970 may also be split, i.e. have multiple successors).
3974 may also be split, i.e. have multiple successors).
3971
3975
3972 Results are displayed as follows::
3976 Results are displayed as follows::
3973
3977
3974 <rev1>
3978 <rev1>
3975 <successors-1A>
3979 <successors-1A>
3976 <rev2>
3980 <rev2>
3977 <successors-2A>
3981 <successors-2A>
3978 <successors-2B1> <successors-2B2> <successors-2B3>
3982 <successors-2B1> <successors-2B2> <successors-2B3>
3979
3983
3980 Here rev2 has two possible (i.e. divergent) successors sets. The first
3984 Here rev2 has two possible (i.e. divergent) successors sets. The first
3981 holds one element, whereas the second holds three (i.e. the changeset has
3985 holds one element, whereas the second holds three (i.e. the changeset has
3982 been split).
3986 been split).
3983 """
3987 """
3984 # passed to successorssets caching computation from one call to another
3988 # passed to successorssets caching computation from one call to another
3985 cache = {}
3989 cache = {}
3986 ctx2str = bytes
3990 ctx2str = bytes
3987 node2str = short
3991 node2str = short
3988 for rev in logcmdutil.revrange(repo, revs):
3992 for rev in logcmdutil.revrange(repo, revs):
3989 ctx = repo[rev]
3993 ctx = repo[rev]
3990 ui.write(b'%s\n' % ctx2str(ctx))
3994 ui.write(b'%s\n' % ctx2str(ctx))
3991 for succsset in obsutil.successorssets(
3995 for succsset in obsutil.successorssets(
3992 repo, ctx.node(), closest=opts['closest'], cache=cache
3996 repo, ctx.node(), closest=opts['closest'], cache=cache
3993 ):
3997 ):
3994 if succsset:
3998 if succsset:
3995 ui.write(b' ')
3999 ui.write(b' ')
3996 ui.write(node2str(succsset[0]))
4000 ui.write(node2str(succsset[0]))
3997 for node in succsset[1:]:
4001 for node in succsset[1:]:
3998 ui.write(b' ')
4002 ui.write(b' ')
3999 ui.write(node2str(node))
4003 ui.write(node2str(node))
4000 ui.write(b'\n')
4004 ui.write(b'\n')
4001
4005
4002
4006
4003 @command(b'debugtagscache', [])
4007 @command(b'debugtagscache', [])
4004 def debugtagscache(ui, repo):
4008 def debugtagscache(ui, repo):
4005 """display the contents of .hg/cache/hgtagsfnodes1"""
4009 """display the contents of .hg/cache/hgtagsfnodes1"""
4006 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4010 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4007 flog = repo.file(b'.hgtags')
4011 flog = repo.file(b'.hgtags')
4008 for r in repo:
4012 for r in repo:
4009 node = repo[r].node()
4013 node = repo[r].node()
4010 tagsnode = cache.getfnode(node, computemissing=False)
4014 tagsnode = cache.getfnode(node, computemissing=False)
4011 if tagsnode:
4015 if tagsnode:
4012 tagsnodedisplay = hex(tagsnode)
4016 tagsnodedisplay = hex(tagsnode)
4013 if not flog.hasnode(tagsnode):
4017 if not flog.hasnode(tagsnode):
4014 tagsnodedisplay += b' (unknown node)'
4018 tagsnodedisplay += b' (unknown node)'
4015 elif tagsnode is None:
4019 elif tagsnode is None:
4016 tagsnodedisplay = b'missing'
4020 tagsnodedisplay = b'missing'
4017 else:
4021 else:
4018 tagsnodedisplay = b'invalid'
4022 tagsnodedisplay = b'invalid'
4019
4023
4020 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4024 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4021
4025
4022
4026
4023 @command(
4027 @command(
4024 b'debugtemplate',
4028 b'debugtemplate',
4025 [
4029 [
4026 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4030 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4027 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4031 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4028 ],
4032 ],
4029 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4033 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4030 optionalrepo=True,
4034 optionalrepo=True,
4031 )
4035 )
4032 def debugtemplate(ui, repo, tmpl, **opts):
4036 def debugtemplate(ui, repo, tmpl, **opts):
4033 """parse and apply a template
4037 """parse and apply a template
4034
4038
4035 If -r/--rev is given, the template is processed as a log template and
4039 If -r/--rev is given, the template is processed as a log template and
4036 applied to the given changesets. Otherwise, it is processed as a generic
4040 applied to the given changesets. Otherwise, it is processed as a generic
4037 template.
4041 template.
4038
4042
4039 Use --verbose to print the parsed tree.
4043 Use --verbose to print the parsed tree.
4040 """
4044 """
4041 revs = None
4045 revs = None
4042 if opts['rev']:
4046 if opts['rev']:
4043 if repo is None:
4047 if repo is None:
4044 raise error.RepoError(
4048 raise error.RepoError(
4045 _(b'there is no Mercurial repository here (.hg not found)')
4049 _(b'there is no Mercurial repository here (.hg not found)')
4046 )
4050 )
4047 revs = logcmdutil.revrange(repo, opts['rev'])
4051 revs = logcmdutil.revrange(repo, opts['rev'])
4048
4052
4049 props = {}
4053 props = {}
4050 for d in opts['define']:
4054 for d in opts['define']:
4051 try:
4055 try:
4052 k, v = (e.strip() for e in d.split(b'=', 1))
4056 k, v = (e.strip() for e in d.split(b'=', 1))
4053 if not k or k == b'ui':
4057 if not k or k == b'ui':
4054 raise ValueError
4058 raise ValueError
4055 props[k] = v
4059 props[k] = v
4056 except ValueError:
4060 except ValueError:
4057 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4061 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4058
4062
4059 if ui.verbose:
4063 if ui.verbose:
4060 aliases = ui.configitems(b'templatealias')
4064 aliases = ui.configitems(b'templatealias')
4061 tree = templater.parse(tmpl)
4065 tree = templater.parse(tmpl)
4062 ui.note(templater.prettyformat(tree), b'\n')
4066 ui.note(templater.prettyformat(tree), b'\n')
4063 newtree = templater.expandaliases(tree, aliases)
4067 newtree = templater.expandaliases(tree, aliases)
4064 if newtree != tree:
4068 if newtree != tree:
4065 ui.notenoi18n(
4069 ui.notenoi18n(
4066 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4070 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4067 )
4071 )
4068
4072
4069 if revs is None:
4073 if revs is None:
4070 tres = formatter.templateresources(ui, repo)
4074 tres = formatter.templateresources(ui, repo)
4071 t = formatter.maketemplater(ui, tmpl, resources=tres)
4075 t = formatter.maketemplater(ui, tmpl, resources=tres)
4072 if ui.verbose:
4076 if ui.verbose:
4073 kwds, funcs = t.symbolsuseddefault()
4077 kwds, funcs = t.symbolsuseddefault()
4074 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4078 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4075 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4079 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4076 ui.write(t.renderdefault(props))
4080 ui.write(t.renderdefault(props))
4077 else:
4081 else:
4078 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4082 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4079 if ui.verbose:
4083 if ui.verbose:
4080 kwds, funcs = displayer.t.symbolsuseddefault()
4084 kwds, funcs = displayer.t.symbolsuseddefault()
4081 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4085 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4082 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4086 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4083 for r in revs:
4087 for r in revs:
4084 displayer.show(repo[r], **pycompat.strkwargs(props))
4088 displayer.show(repo[r], **pycompat.strkwargs(props))
4085 displayer.close()
4089 displayer.close()
4086
4090
4087
4091
4088 @command(
4092 @command(
4089 b'debuguigetpass',
4093 b'debuguigetpass',
4090 [
4094 [
4091 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4095 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4092 ],
4096 ],
4093 _(b'[-p TEXT]'),
4097 _(b'[-p TEXT]'),
4094 norepo=True,
4098 norepo=True,
4095 )
4099 )
4096 def debuguigetpass(ui, prompt=b''):
4100 def debuguigetpass(ui, prompt=b''):
4097 """show prompt to type password"""
4101 """show prompt to type password"""
4098 r = ui.getpass(prompt)
4102 r = ui.getpass(prompt)
4099 if r is None:
4103 if r is None:
4100 r = b"<default response>"
4104 r = b"<default response>"
4101 ui.writenoi18n(b'response: %s\n' % r)
4105 ui.writenoi18n(b'response: %s\n' % r)
4102
4106
4103
4107
4104 @command(
4108 @command(
4105 b'debuguiprompt',
4109 b'debuguiprompt',
4106 [
4110 [
4107 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4111 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4108 ],
4112 ],
4109 _(b'[-p TEXT]'),
4113 _(b'[-p TEXT]'),
4110 norepo=True,
4114 norepo=True,
4111 )
4115 )
4112 def debuguiprompt(ui, prompt=b''):
4116 def debuguiprompt(ui, prompt=b''):
4113 """show plain prompt"""
4117 """show plain prompt"""
4114 r = ui.prompt(prompt)
4118 r = ui.prompt(prompt)
4115 ui.writenoi18n(b'response: %s\n' % r)
4119 ui.writenoi18n(b'response: %s\n' % r)
4116
4120
4117
4121
4118 @command(b'debugupdatecaches', [])
4122 @command(b'debugupdatecaches', [])
4119 def debugupdatecaches(ui, repo, *pats, **opts):
4123 def debugupdatecaches(ui, repo, *pats, **opts):
4120 """warm all known caches in the repository"""
4124 """warm all known caches in the repository"""
4121 with repo.wlock(), repo.lock():
4125 with repo.wlock(), repo.lock():
4122 repo.updatecaches(caches=repository.CACHES_ALL)
4126 repo.updatecaches(caches=repository.CACHES_ALL)
4123
4127
4124
4128
4125 @command(
4129 @command(
4126 b'debugupgraderepo',
4130 b'debugupgraderepo',
4127 [
4131 [
4128 (
4132 (
4129 b'o',
4133 b'o',
4130 b'optimize',
4134 b'optimize',
4131 [],
4135 [],
4132 _(b'extra optimization to perform'),
4136 _(b'extra optimization to perform'),
4133 _(b'NAME'),
4137 _(b'NAME'),
4134 ),
4138 ),
4135 (b'', b'run', False, _(b'performs an upgrade')),
4139 (b'', b'run', False, _(b'performs an upgrade')),
4136 (b'', b'backup', True, _(b'keep the old repository content around')),
4140 (b'', b'backup', True, _(b'keep the old repository content around')),
4137 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4141 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4138 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4142 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4139 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4143 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4140 ],
4144 ],
4141 )
4145 )
4142 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4146 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4143 """upgrade a repository to use different features
4147 """upgrade a repository to use different features
4144
4148
4145 If no arguments are specified, the repository is evaluated for upgrade
4149 If no arguments are specified, the repository is evaluated for upgrade
4146 and a list of problems and potential optimizations is printed.
4150 and a list of problems and potential optimizations is printed.
4147
4151
4148 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4152 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4149 can be influenced via additional arguments. More details will be provided
4153 can be influenced via additional arguments. More details will be provided
4150 by the command output when run without ``--run``.
4154 by the command output when run without ``--run``.
4151
4155
4152 During the upgrade, the repository will be locked and no writes will be
4156 During the upgrade, the repository will be locked and no writes will be
4153 allowed.
4157 allowed.
4154
4158
4155 At the end of the upgrade, the repository may not be readable while new
4159 At the end of the upgrade, the repository may not be readable while new
4156 repository data is swapped in. This window will be as long as it takes to
4160 repository data is swapped in. This window will be as long as it takes to
4157 rename some directories inside the ``.hg`` directory. On most machines, this
4161 rename some directories inside the ``.hg`` directory. On most machines, this
4158 should complete almost instantaneously and the chances of a consumer being
4162 should complete almost instantaneously and the chances of a consumer being
4159 unable to access the repository should be low.
4163 unable to access the repository should be low.
4160
4164
4161 By default, all revlogs will be upgraded. You can restrict this using flags
4165 By default, all revlogs will be upgraded. You can restrict this using flags
4162 such as `--manifest`:
4166 such as `--manifest`:
4163
4167
4164 * `--manifest`: only optimize the manifest
4168 * `--manifest`: only optimize the manifest
4165 * `--no-manifest`: optimize all revlog but the manifest
4169 * `--no-manifest`: optimize all revlog but the manifest
4166 * `--changelog`: optimize the changelog only
4170 * `--changelog`: optimize the changelog only
4167 * `--no-changelog --no-manifest`: optimize filelogs only
4171 * `--no-changelog --no-manifest`: optimize filelogs only
4168 * `--filelogs`: optimize the filelogs only
4172 * `--filelogs`: optimize the filelogs only
4169 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4173 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4170 """
4174 """
4171 return upgrade.upgraderepo(
4175 return upgrade.upgraderepo(
4172 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4176 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4173 )
4177 )
4174
4178
4175
4179
4176 @command(
4180 @command(
4177 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4181 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4178 )
4182 )
4179 def debugwalk(ui, repo, *pats, **opts):
4183 def debugwalk(ui, repo, *pats, **opts):
4180 """show how files match on given patterns"""
4184 """show how files match on given patterns"""
4181 opts = pycompat.byteskwargs(opts)
4185 opts = pycompat.byteskwargs(opts)
4182 m = scmutil.match(repo[None], pats, opts)
4186 m = scmutil.match(repo[None], pats, opts)
4183 if ui.verbose:
4187 if ui.verbose:
4184 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4188 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4185 items = list(repo[None].walk(m))
4189 items = list(repo[None].walk(m))
4186 if not items:
4190 if not items:
4187 return
4191 return
4188 f = lambda fn: fn
4192 f = lambda fn: fn
4189 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4193 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4190 f = lambda fn: util.normpath(fn)
4194 f = lambda fn: util.normpath(fn)
4191 fmt = b'f %%-%ds %%-%ds %%s' % (
4195 fmt = b'f %%-%ds %%-%ds %%s' % (
4192 max([len(abs) for abs in items]),
4196 max([len(abs) for abs in items]),
4193 max([len(repo.pathto(abs)) for abs in items]),
4197 max([len(repo.pathto(abs)) for abs in items]),
4194 )
4198 )
4195 for abs in items:
4199 for abs in items:
4196 line = fmt % (
4200 line = fmt % (
4197 abs,
4201 abs,
4198 f(repo.pathto(abs)),
4202 f(repo.pathto(abs)),
4199 m.exact(abs) and b'exact' or b'',
4203 m.exact(abs) and b'exact' or b'',
4200 )
4204 )
4201 ui.write(b"%s\n" % line.rstrip())
4205 ui.write(b"%s\n" % line.rstrip())
4202
4206
4203
4207
4204 @command(b'debugwhyunstable', [], _(b'REV'))
4208 @command(b'debugwhyunstable', [], _(b'REV'))
4205 def debugwhyunstable(ui, repo, rev):
4209 def debugwhyunstable(ui, repo, rev):
4206 """explain instabilities of a changeset"""
4210 """explain instabilities of a changeset"""
4207 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4211 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4208 dnodes = b''
4212 dnodes = b''
4209 if entry.get(b'divergentnodes'):
4213 if entry.get(b'divergentnodes'):
4210 dnodes = (
4214 dnodes = (
4211 b' '.join(
4215 b' '.join(
4212 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4216 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4213 for ctx in entry[b'divergentnodes']
4217 for ctx in entry[b'divergentnodes']
4214 )
4218 )
4215 + b' '
4219 + b' '
4216 )
4220 )
4217 ui.write(
4221 ui.write(
4218 b'%s: %s%s %s\n'
4222 b'%s: %s%s %s\n'
4219 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4223 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4220 )
4224 )
4221
4225
4222
4226
4223 @command(
4227 @command(
4224 b'debugwireargs',
4228 b'debugwireargs',
4225 [
4229 [
4226 (b'', b'three', b'', b'three'),
4230 (b'', b'three', b'', b'three'),
4227 (b'', b'four', b'', b'four'),
4231 (b'', b'four', b'', b'four'),
4228 (b'', b'five', b'', b'five'),
4232 (b'', b'five', b'', b'five'),
4229 ]
4233 ]
4230 + cmdutil.remoteopts,
4234 + cmdutil.remoteopts,
4231 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4235 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4232 norepo=True,
4236 norepo=True,
4233 )
4237 )
4234 def debugwireargs(ui, repopath, *vals, **opts):
4238 def debugwireargs(ui, repopath, *vals, **opts):
4235 opts = pycompat.byteskwargs(opts)
4239 opts = pycompat.byteskwargs(opts)
4236 repo = hg.peer(ui, opts, repopath)
4240 repo = hg.peer(ui, opts, repopath)
4237 try:
4241 try:
4238 for opt in cmdutil.remoteopts:
4242 for opt in cmdutil.remoteopts:
4239 del opts[opt[1]]
4243 del opts[opt[1]]
4240 args = {}
4244 args = {}
4241 for k, v in opts.items():
4245 for k, v in opts.items():
4242 if v:
4246 if v:
4243 args[k] = v
4247 args[k] = v
4244 args = pycompat.strkwargs(args)
4248 args = pycompat.strkwargs(args)
4245 # run twice to check that we don't mess up the stream for the next command
4249 # run twice to check that we don't mess up the stream for the next command
4246 res1 = repo.debugwireargs(*vals, **args)
4250 res1 = repo.debugwireargs(*vals, **args)
4247 res2 = repo.debugwireargs(*vals, **args)
4251 res2 = repo.debugwireargs(*vals, **args)
4248 ui.write(b"%s\n" % res1)
4252 ui.write(b"%s\n" % res1)
4249 if res1 != res2:
4253 if res1 != res2:
4250 ui.warn(b"%s\n" % res2)
4254 ui.warn(b"%s\n" % res2)
4251 finally:
4255 finally:
4252 repo.close()
4256 repo.close()
4253
4257
4254
4258
4255 def _parsewirelangblocks(fh):
4259 def _parsewirelangblocks(fh):
4256 activeaction = None
4260 activeaction = None
4257 blocklines = []
4261 blocklines = []
4258 lastindent = 0
4262 lastindent = 0
4259
4263
4260 for line in fh:
4264 for line in fh:
4261 line = line.rstrip()
4265 line = line.rstrip()
4262 if not line:
4266 if not line:
4263 continue
4267 continue
4264
4268
4265 if line.startswith(b'#'):
4269 if line.startswith(b'#'):
4266 continue
4270 continue
4267
4271
4268 if not line.startswith(b' '):
4272 if not line.startswith(b' '):
4269 # New block. Flush previous one.
4273 # New block. Flush previous one.
4270 if activeaction:
4274 if activeaction:
4271 yield activeaction, blocklines
4275 yield activeaction, blocklines
4272
4276
4273 activeaction = line
4277 activeaction = line
4274 blocklines = []
4278 blocklines = []
4275 lastindent = 0
4279 lastindent = 0
4276 continue
4280 continue
4277
4281
4278 # Else we start with an indent.
4282 # Else we start with an indent.
4279
4283
4280 if not activeaction:
4284 if not activeaction:
4281 raise error.Abort(_(b'indented line outside of block'))
4285 raise error.Abort(_(b'indented line outside of block'))
4282
4286
4283 indent = len(line) - len(line.lstrip())
4287 indent = len(line) - len(line.lstrip())
4284
4288
4285 # If this line is indented more than the last line, concatenate it.
4289 # If this line is indented more than the last line, concatenate it.
4286 if indent > lastindent and blocklines:
4290 if indent > lastindent and blocklines:
4287 blocklines[-1] += line.lstrip()
4291 blocklines[-1] += line.lstrip()
4288 else:
4292 else:
4289 blocklines.append(line)
4293 blocklines.append(line)
4290 lastindent = indent
4294 lastindent = indent
4291
4295
4292 # Flush last block.
4296 # Flush last block.
4293 if activeaction:
4297 if activeaction:
4294 yield activeaction, blocklines
4298 yield activeaction, blocklines
4295
4299
4296
4300
4297 @command(
4301 @command(
4298 b'debugwireproto',
4302 b'debugwireproto',
4299 [
4303 [
4300 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4304 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4301 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4305 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4302 (
4306 (
4303 b'',
4307 b'',
4304 b'noreadstderr',
4308 b'noreadstderr',
4305 False,
4309 False,
4306 _(b'do not read from stderr of the remote'),
4310 _(b'do not read from stderr of the remote'),
4307 ),
4311 ),
4308 (
4312 (
4309 b'',
4313 b'',
4310 b'nologhandshake',
4314 b'nologhandshake',
4311 False,
4315 False,
4312 _(b'do not log I/O related to the peer handshake'),
4316 _(b'do not log I/O related to the peer handshake'),
4313 ),
4317 ),
4314 ]
4318 ]
4315 + cmdutil.remoteopts,
4319 + cmdutil.remoteopts,
4316 _(b'[PATH]'),
4320 _(b'[PATH]'),
4317 optionalrepo=True,
4321 optionalrepo=True,
4318 )
4322 )
4319 def debugwireproto(ui, repo, path=None, **opts):
4323 def debugwireproto(ui, repo, path=None, **opts):
4320 """send wire protocol commands to a server
4324 """send wire protocol commands to a server
4321
4325
4322 This command can be used to issue wire protocol commands to remote
4326 This command can be used to issue wire protocol commands to remote
4323 peers and to debug the raw data being exchanged.
4327 peers and to debug the raw data being exchanged.
4324
4328
4325 ``--localssh`` will start an SSH server against the current repository
4329 ``--localssh`` will start an SSH server against the current repository
4326 and connect to that. By default, the connection will perform a handshake
4330 and connect to that. By default, the connection will perform a handshake
4327 and establish an appropriate peer instance.
4331 and establish an appropriate peer instance.
4328
4332
4329 ``--peer`` can be used to bypass the handshake protocol and construct a
4333 ``--peer`` can be used to bypass the handshake protocol and construct a
4330 peer instance using the specified class type. Valid values are ``raw``,
4334 peer instance using the specified class type. Valid values are ``raw``,
4331 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4335 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4332 don't support higher-level command actions.
4336 don't support higher-level command actions.
4333
4337
4334 ``--noreadstderr`` can be used to disable automatic reading from stderr
4338 ``--noreadstderr`` can be used to disable automatic reading from stderr
4335 of the peer (for SSH connections only). Disabling automatic reading of
4339 of the peer (for SSH connections only). Disabling automatic reading of
4336 stderr is useful for making output more deterministic.
4340 stderr is useful for making output more deterministic.
4337
4341
4338 Commands are issued via a mini language which is specified via stdin.
4342 Commands are issued via a mini language which is specified via stdin.
4339 The language consists of individual actions to perform. An action is
4343 The language consists of individual actions to perform. An action is
4340 defined by a block. A block is defined as a line with no leading
4344 defined by a block. A block is defined as a line with no leading
4341 space followed by 0 or more lines with leading space. Blocks are
4345 space followed by 0 or more lines with leading space. Blocks are
4342 effectively a high-level command with additional metadata.
4346 effectively a high-level command with additional metadata.
4343
4347
4344 Lines beginning with ``#`` are ignored.
4348 Lines beginning with ``#`` are ignored.
4345
4349
4346 The following sections denote available actions.
4350 The following sections denote available actions.
4347
4351
4348 raw
4352 raw
4349 ---
4353 ---
4350
4354
4351 Send raw data to the server.
4355 Send raw data to the server.
4352
4356
4353 The block payload contains the raw data to send as one atomic send
4357 The block payload contains the raw data to send as one atomic send
4354 operation. The data may not actually be delivered in a single system
4358 operation. The data may not actually be delivered in a single system
4355 call: it depends on the abilities of the transport being used.
4359 call: it depends on the abilities of the transport being used.
4356
4360
4357 Each line in the block is de-indented and concatenated. Then, that
4361 Each line in the block is de-indented and concatenated. Then, that
4358 value is evaluated as a Python b'' literal. This allows the use of
4362 value is evaluated as a Python b'' literal. This allows the use of
4359 backslash escaping, etc.
4363 backslash escaping, etc.
4360
4364
4361 raw+
4365 raw+
4362 ----
4366 ----
4363
4367
4364 Behaves like ``raw`` except flushes output afterwards.
4368 Behaves like ``raw`` except flushes output afterwards.
4365
4369
4366 command <X>
4370 command <X>
4367 -----------
4371 -----------
4368
4372
4369 Send a request to run a named command, whose name follows the ``command``
4373 Send a request to run a named command, whose name follows the ``command``
4370 string.
4374 string.
4371
4375
4372 Arguments to the command are defined as lines in this block. The format of
4376 Arguments to the command are defined as lines in this block. The format of
4373 each line is ``<key> <value>``. e.g.::
4377 each line is ``<key> <value>``. e.g.::
4374
4378
4375 command listkeys
4379 command listkeys
4376 namespace bookmarks
4380 namespace bookmarks
4377
4381
4378 If the value begins with ``eval:``, it will be interpreted as a Python
4382 If the value begins with ``eval:``, it will be interpreted as a Python
4379 literal expression. Otherwise values are interpreted as Python b'' literals.
4383 literal expression. Otherwise values are interpreted as Python b'' literals.
4380 This allows sending complex types and encoding special byte sequences via
4384 This allows sending complex types and encoding special byte sequences via
4381 backslash escaping.
4385 backslash escaping.
4382
4386
4383 The following arguments have special meaning:
4387 The following arguments have special meaning:
4384
4388
4385 ``PUSHFILE``
4389 ``PUSHFILE``
4386 When defined, the *push* mechanism of the peer will be used instead
4390 When defined, the *push* mechanism of the peer will be used instead
4387 of the static request-response mechanism and the content of the
4391 of the static request-response mechanism and the content of the
4388 file specified in the value of this argument will be sent as the
4392 file specified in the value of this argument will be sent as the
4389 command payload.
4393 command payload.
4390
4394
4391 This can be used to submit a local bundle file to the remote.
4395 This can be used to submit a local bundle file to the remote.
4392
4396
4393 batchbegin
4397 batchbegin
4394 ----------
4398 ----------
4395
4399
4396 Instruct the peer to begin a batched send.
4400 Instruct the peer to begin a batched send.
4397
4401
4398 All ``command`` blocks are queued for execution until the next
4402 All ``command`` blocks are queued for execution until the next
4399 ``batchsubmit`` block.
4403 ``batchsubmit`` block.
4400
4404
4401 batchsubmit
4405 batchsubmit
4402 -----------
4406 -----------
4403
4407
4404 Submit previously queued ``command`` blocks as a batch request.
4408 Submit previously queued ``command`` blocks as a batch request.
4405
4409
4406 This action MUST be paired with a ``batchbegin`` action.
4410 This action MUST be paired with a ``batchbegin`` action.
4407
4411
4408 httprequest <method> <path>
4412 httprequest <method> <path>
4409 ---------------------------
4413 ---------------------------
4410
4414
4411 (HTTP peer only)
4415 (HTTP peer only)
4412
4416
4413 Send an HTTP request to the peer.
4417 Send an HTTP request to the peer.
4414
4418
4415 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4419 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4416
4420
4417 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4421 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4418 headers to add to the request. e.g. ``Accept: foo``.
4422 headers to add to the request. e.g. ``Accept: foo``.
4419
4423
4420 The following arguments are special:
4424 The following arguments are special:
4421
4425
4422 ``BODYFILE``
4426 ``BODYFILE``
4423 The content of the file defined as the value to this argument will be
4427 The content of the file defined as the value to this argument will be
4424 transferred verbatim as the HTTP request body.
4428 transferred verbatim as the HTTP request body.
4425
4429
4426 ``frame <type> <flags> <payload>``
4430 ``frame <type> <flags> <payload>``
4427 Send a unified protocol frame as part of the request body.
4431 Send a unified protocol frame as part of the request body.
4428
4432
4429 All frames will be collected and sent as the body to the HTTP
4433 All frames will be collected and sent as the body to the HTTP
4430 request.
4434 request.
4431
4435
4432 close
4436 close
4433 -----
4437 -----
4434
4438
4435 Close the connection to the server.
4439 Close the connection to the server.
4436
4440
4437 flush
4441 flush
4438 -----
4442 -----
4439
4443
4440 Flush data written to the server.
4444 Flush data written to the server.
4441
4445
4442 readavailable
4446 readavailable
4443 -------------
4447 -------------
4444
4448
4445 Close the write end of the connection and read all available data from
4449 Close the write end of the connection and read all available data from
4446 the server.
4450 the server.
4447
4451
4448 If the connection to the server encompasses multiple pipes, we poll both
4452 If the connection to the server encompasses multiple pipes, we poll both
4449 pipes and read available data.
4453 pipes and read available data.
4450
4454
4451 readline
4455 readline
4452 --------
4456 --------
4453
4457
4454 Read a line of output from the server. If there are multiple output
4458 Read a line of output from the server. If there are multiple output
4455 pipes, reads only the main pipe.
4459 pipes, reads only the main pipe.
4456
4460
4457 ereadline
4461 ereadline
4458 ---------
4462 ---------
4459
4463
4460 Like ``readline``, but read from the stderr pipe, if available.
4464 Like ``readline``, but read from the stderr pipe, if available.
4461
4465
4462 read <X>
4466 read <X>
4463 --------
4467 --------
4464
4468
4465 ``read()`` N bytes from the server's main output pipe.
4469 ``read()`` N bytes from the server's main output pipe.
4466
4470
4467 eread <X>
4471 eread <X>
4468 ---------
4472 ---------
4469
4473
4470 ``read()`` N bytes from the server's stderr pipe, if available.
4474 ``read()`` N bytes from the server's stderr pipe, if available.
4471
4475
4472 Specifying Unified Frame-Based Protocol Frames
4476 Specifying Unified Frame-Based Protocol Frames
4473 ----------------------------------------------
4477 ----------------------------------------------
4474
4478
4475 It is possible to emit a *Unified Frame-Based Protocol* by using special
4479 It is possible to emit a *Unified Frame-Based Protocol* by using special
4476 syntax.
4480 syntax.
4477
4481
4478 A frame is composed as a type, flags, and payload. These can be parsed
4482 A frame is composed as a type, flags, and payload. These can be parsed
4479 from a string of the form:
4483 from a string of the form:
4480
4484
4481 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4485 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4482
4486
4483 ``request-id`` and ``stream-id`` are integers defining the request and
4487 ``request-id`` and ``stream-id`` are integers defining the request and
4484 stream identifiers.
4488 stream identifiers.
4485
4489
4486 ``type`` can be an integer value for the frame type or the string name
4490 ``type`` can be an integer value for the frame type or the string name
4487 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4491 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4488 ``command-name``.
4492 ``command-name``.
4489
4493
4490 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4494 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4491 components. Each component (and there can be just one) can be an integer
4495 components. Each component (and there can be just one) can be an integer
4492 or a flag name for stream flags or frame flags, respectively. Values are
4496 or a flag name for stream flags or frame flags, respectively. Values are
4493 resolved to integers and then bitwise OR'd together.
4497 resolved to integers and then bitwise OR'd together.
4494
4498
4495 ``payload`` represents the raw frame payload. If it begins with
4499 ``payload`` represents the raw frame payload. If it begins with
4496 ``cbor:``, the following string is evaluated as Python code and the
4500 ``cbor:``, the following string is evaluated as Python code and the
4497 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4501 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4498 as a Python byte string literal.
4502 as a Python byte string literal.
4499 """
4503 """
4500 opts = pycompat.byteskwargs(opts)
4504 opts = pycompat.byteskwargs(opts)
4501
4505
4502 if opts[b'localssh'] and not repo:
4506 if opts[b'localssh'] and not repo:
4503 raise error.Abort(_(b'--localssh requires a repository'))
4507 raise error.Abort(_(b'--localssh requires a repository'))
4504
4508
4505 if opts[b'peer'] and opts[b'peer'] not in (
4509 if opts[b'peer'] and opts[b'peer'] not in (
4506 b'raw',
4510 b'raw',
4507 b'ssh1',
4511 b'ssh1',
4508 ):
4512 ):
4509 raise error.Abort(
4513 raise error.Abort(
4510 _(b'invalid value for --peer'),
4514 _(b'invalid value for --peer'),
4511 hint=_(b'valid values are "raw" and "ssh1"'),
4515 hint=_(b'valid values are "raw" and "ssh1"'),
4512 )
4516 )
4513
4517
4514 if path and opts[b'localssh']:
4518 if path and opts[b'localssh']:
4515 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4519 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4516
4520
4517 if ui.interactive():
4521 if ui.interactive():
4518 ui.write(_(b'(waiting for commands on stdin)\n'))
4522 ui.write(_(b'(waiting for commands on stdin)\n'))
4519
4523
4520 blocks = list(_parsewirelangblocks(ui.fin))
4524 blocks = list(_parsewirelangblocks(ui.fin))
4521
4525
4522 proc = None
4526 proc = None
4523 stdin = None
4527 stdin = None
4524 stdout = None
4528 stdout = None
4525 stderr = None
4529 stderr = None
4526 opener = None
4530 opener = None
4527
4531
4528 if opts[b'localssh']:
4532 if opts[b'localssh']:
4529 # We start the SSH server in its own process so there is process
4533 # We start the SSH server in its own process so there is process
4530 # separation. This prevents a whole class of potential bugs around
4534 # separation. This prevents a whole class of potential bugs around
4531 # shared state from interfering with server operation.
4535 # shared state from interfering with server operation.
4532 args = procutil.hgcmd() + [
4536 args = procutil.hgcmd() + [
4533 b'-R',
4537 b'-R',
4534 repo.root,
4538 repo.root,
4535 b'debugserve',
4539 b'debugserve',
4536 b'--sshstdio',
4540 b'--sshstdio',
4537 ]
4541 ]
4538 proc = subprocess.Popen(
4542 proc = subprocess.Popen(
4539 pycompat.rapply(procutil.tonativestr, args),
4543 pycompat.rapply(procutil.tonativestr, args),
4540 stdin=subprocess.PIPE,
4544 stdin=subprocess.PIPE,
4541 stdout=subprocess.PIPE,
4545 stdout=subprocess.PIPE,
4542 stderr=subprocess.PIPE,
4546 stderr=subprocess.PIPE,
4543 bufsize=0,
4547 bufsize=0,
4544 )
4548 )
4545
4549
4546 stdin = proc.stdin
4550 stdin = proc.stdin
4547 stdout = proc.stdout
4551 stdout = proc.stdout
4548 stderr = proc.stderr
4552 stderr = proc.stderr
4549
4553
4550 # We turn the pipes into observers so we can log I/O.
4554 # We turn the pipes into observers so we can log I/O.
4551 if ui.verbose or opts[b'peer'] == b'raw':
4555 if ui.verbose or opts[b'peer'] == b'raw':
4552 stdin = util.makeloggingfileobject(
4556 stdin = util.makeloggingfileobject(
4553 ui, proc.stdin, b'i', logdata=True
4557 ui, proc.stdin, b'i', logdata=True
4554 )
4558 )
4555 stdout = util.makeloggingfileobject(
4559 stdout = util.makeloggingfileobject(
4556 ui, proc.stdout, b'o', logdata=True
4560 ui, proc.stdout, b'o', logdata=True
4557 )
4561 )
4558 stderr = util.makeloggingfileobject(
4562 stderr = util.makeloggingfileobject(
4559 ui, proc.stderr, b'e', logdata=True
4563 ui, proc.stderr, b'e', logdata=True
4560 )
4564 )
4561
4565
4562 # --localssh also implies the peer connection settings.
4566 # --localssh also implies the peer connection settings.
4563
4567
4564 url = b'ssh://localserver'
4568 url = b'ssh://localserver'
4565 autoreadstderr = not opts[b'noreadstderr']
4569 autoreadstderr = not opts[b'noreadstderr']
4566
4570
4567 if opts[b'peer'] == b'ssh1':
4571 if opts[b'peer'] == b'ssh1':
4568 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4572 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4569 peer = sshpeer.sshv1peer(
4573 peer = sshpeer.sshv1peer(
4570 ui,
4574 ui,
4571 url,
4575 url,
4572 proc,
4576 proc,
4573 stdin,
4577 stdin,
4574 stdout,
4578 stdout,
4575 stderr,
4579 stderr,
4576 None,
4580 None,
4577 autoreadstderr=autoreadstderr,
4581 autoreadstderr=autoreadstderr,
4578 )
4582 )
4579 elif opts[b'peer'] == b'raw':
4583 elif opts[b'peer'] == b'raw':
4580 ui.write(_(b'using raw connection to peer\n'))
4584 ui.write(_(b'using raw connection to peer\n'))
4581 peer = None
4585 peer = None
4582 else:
4586 else:
4583 ui.write(_(b'creating ssh peer from handshake results\n'))
4587 ui.write(_(b'creating ssh peer from handshake results\n'))
4584 peer = sshpeer._make_peer(
4588 peer = sshpeer._make_peer(
4585 ui,
4589 ui,
4586 url,
4590 url,
4587 proc,
4591 proc,
4588 stdin,
4592 stdin,
4589 stdout,
4593 stdout,
4590 stderr,
4594 stderr,
4591 autoreadstderr=autoreadstderr,
4595 autoreadstderr=autoreadstderr,
4592 )
4596 )
4593
4597
4594 elif path:
4598 elif path:
4595 # We bypass hg.peer() so we can proxy the sockets.
4599 # We bypass hg.peer() so we can proxy the sockets.
4596 # TODO consider not doing this because we skip
4600 # TODO consider not doing this because we skip
4597 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4601 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4598 u = urlutil.url(path)
4602 u = urlutil.url(path)
4599 if u.scheme != b'http':
4603 if u.scheme != b'http':
4600 raise error.Abort(_(b'only http:// paths are currently supported'))
4604 raise error.Abort(_(b'only http:// paths are currently supported'))
4601
4605
4602 url, authinfo = u.authinfo()
4606 url, authinfo = u.authinfo()
4603 openerargs = {
4607 openerargs = {
4604 'useragent': b'Mercurial debugwireproto',
4608 'useragent': b'Mercurial debugwireproto',
4605 }
4609 }
4606
4610
4607 # Turn pipes/sockets into observers so we can log I/O.
4611 # Turn pipes/sockets into observers so we can log I/O.
4608 if ui.verbose:
4612 if ui.verbose:
4609 openerargs.update(
4613 openerargs.update(
4610 {
4614 {
4611 'loggingfh': ui,
4615 'loggingfh': ui,
4612 'loggingname': b's',
4616 'loggingname': b's',
4613 'loggingopts': {
4617 'loggingopts': {
4614 'logdata': True,
4618 'logdata': True,
4615 'logdataapis': False,
4619 'logdataapis': False,
4616 },
4620 },
4617 }
4621 }
4618 )
4622 )
4619
4623
4620 if ui.debugflag:
4624 if ui.debugflag:
4621 openerargs['loggingopts']['logdataapis'] = True
4625 openerargs['loggingopts']['logdataapis'] = True
4622
4626
4623 # Don't send default headers when in raw mode. This allows us to
4627 # Don't send default headers when in raw mode. This allows us to
4624 # bypass most of the behavior of our URL handling code so we can
4628 # bypass most of the behavior of our URL handling code so we can
4625 # have near complete control over what's sent on the wire.
4629 # have near complete control over what's sent on the wire.
4626 if opts[b'peer'] == b'raw':
4630 if opts[b'peer'] == b'raw':
4627 openerargs['sendaccept'] = False
4631 openerargs['sendaccept'] = False
4628
4632
4629 opener = urlmod.opener(ui, authinfo, **openerargs)
4633 opener = urlmod.opener(ui, authinfo, **openerargs)
4630
4634
4631 if opts[b'peer'] == b'raw':
4635 if opts[b'peer'] == b'raw':
4632 ui.write(_(b'using raw connection to peer\n'))
4636 ui.write(_(b'using raw connection to peer\n'))
4633 peer = None
4637 peer = None
4634 elif opts[b'peer']:
4638 elif opts[b'peer']:
4635 raise error.Abort(
4639 raise error.Abort(
4636 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4640 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4637 )
4641 )
4638 else:
4642 else:
4639 peer_path = urlutil.try_path(ui, path)
4643 peer_path = urlutil.try_path(ui, path)
4640 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4644 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4641
4645
4642 # We /could/ populate stdin/stdout with sock.makefile()...
4646 # We /could/ populate stdin/stdout with sock.makefile()...
4643 else:
4647 else:
4644 raise error.Abort(_(b'unsupported connection configuration'))
4648 raise error.Abort(_(b'unsupported connection configuration'))
4645
4649
4646 batchedcommands = None
4650 batchedcommands = None
4647
4651
4648 # Now perform actions based on the parsed wire language instructions.
4652 # Now perform actions based on the parsed wire language instructions.
4649 for action, lines in blocks:
4653 for action, lines in blocks:
4650 if action in (b'raw', b'raw+'):
4654 if action in (b'raw', b'raw+'):
4651 if not stdin:
4655 if not stdin:
4652 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4656 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4653
4657
4654 # Concatenate the data together.
4658 # Concatenate the data together.
4655 data = b''.join(l.lstrip() for l in lines)
4659 data = b''.join(l.lstrip() for l in lines)
4656 data = stringutil.unescapestr(data)
4660 data = stringutil.unescapestr(data)
4657 stdin.write(data)
4661 stdin.write(data)
4658
4662
4659 if action == b'raw+':
4663 if action == b'raw+':
4660 stdin.flush()
4664 stdin.flush()
4661 elif action == b'flush':
4665 elif action == b'flush':
4662 if not stdin:
4666 if not stdin:
4663 raise error.Abort(_(b'cannot call flush on this peer'))
4667 raise error.Abort(_(b'cannot call flush on this peer'))
4664 stdin.flush()
4668 stdin.flush()
4665 elif action.startswith(b'command'):
4669 elif action.startswith(b'command'):
4666 if not peer:
4670 if not peer:
4667 raise error.Abort(
4671 raise error.Abort(
4668 _(
4672 _(
4669 b'cannot send commands unless peer instance '
4673 b'cannot send commands unless peer instance '
4670 b'is available'
4674 b'is available'
4671 )
4675 )
4672 )
4676 )
4673
4677
4674 command = action.split(b' ', 1)[1]
4678 command = action.split(b' ', 1)[1]
4675
4679
4676 args = {}
4680 args = {}
4677 for line in lines:
4681 for line in lines:
4678 # We need to allow empty values.
4682 # We need to allow empty values.
4679 fields = line.lstrip().split(b' ', 1)
4683 fields = line.lstrip().split(b' ', 1)
4680 if len(fields) == 1:
4684 if len(fields) == 1:
4681 key = fields[0]
4685 key = fields[0]
4682 value = b''
4686 value = b''
4683 else:
4687 else:
4684 key, value = fields
4688 key, value = fields
4685
4689
4686 if value.startswith(b'eval:'):
4690 if value.startswith(b'eval:'):
4687 value = stringutil.evalpythonliteral(value[5:])
4691 value = stringutil.evalpythonliteral(value[5:])
4688 else:
4692 else:
4689 value = stringutil.unescapestr(value)
4693 value = stringutil.unescapestr(value)
4690
4694
4691 args[key] = value
4695 args[key] = value
4692
4696
4693 if batchedcommands is not None:
4697 if batchedcommands is not None:
4694 batchedcommands.append((command, args))
4698 batchedcommands.append((command, args))
4695 continue
4699 continue
4696
4700
4697 ui.status(_(b'sending %s command\n') % command)
4701 ui.status(_(b'sending %s command\n') % command)
4698
4702
4699 if b'PUSHFILE' in args:
4703 if b'PUSHFILE' in args:
4700 with open(args[b'PUSHFILE'], 'rb') as fh:
4704 with open(args[b'PUSHFILE'], 'rb') as fh:
4701 del args[b'PUSHFILE']
4705 del args[b'PUSHFILE']
4702 res, output = peer._callpush(
4706 res, output = peer._callpush(
4703 command, fh, **pycompat.strkwargs(args)
4707 command, fh, **pycompat.strkwargs(args)
4704 )
4708 )
4705 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4709 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4706 ui.status(
4710 ui.status(
4707 _(b'remote output: %s\n') % stringutil.escapestr(output)
4711 _(b'remote output: %s\n') % stringutil.escapestr(output)
4708 )
4712 )
4709 else:
4713 else:
4710 with peer.commandexecutor() as e:
4714 with peer.commandexecutor() as e:
4711 res = e.callcommand(command, args).result()
4715 res = e.callcommand(command, args).result()
4712
4716
4713 ui.status(
4717 ui.status(
4714 _(b'response: %s\n')
4718 _(b'response: %s\n')
4715 % stringutil.pprint(res, bprefix=True, indent=2)
4719 % stringutil.pprint(res, bprefix=True, indent=2)
4716 )
4720 )
4717
4721
4718 elif action == b'batchbegin':
4722 elif action == b'batchbegin':
4719 if batchedcommands is not None:
4723 if batchedcommands is not None:
4720 raise error.Abort(_(b'nested batchbegin not allowed'))
4724 raise error.Abort(_(b'nested batchbegin not allowed'))
4721
4725
4722 batchedcommands = []
4726 batchedcommands = []
4723 elif action == b'batchsubmit':
4727 elif action == b'batchsubmit':
4724 # There is a batching API we could go through. But it would be
4728 # There is a batching API we could go through. But it would be
4725 # difficult to normalize requests into function calls. It is easier
4729 # difficult to normalize requests into function calls. It is easier
4726 # to bypass this layer and normalize to commands + args.
4730 # to bypass this layer and normalize to commands + args.
4727 ui.status(
4731 ui.status(
4728 _(b'sending batch with %d sub-commands\n')
4732 _(b'sending batch with %d sub-commands\n')
4729 % len(batchedcommands)
4733 % len(batchedcommands)
4730 )
4734 )
4731 assert peer is not None
4735 assert peer is not None
4732 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4736 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4733 ui.status(
4737 ui.status(
4734 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4738 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4735 )
4739 )
4736
4740
4737 batchedcommands = None
4741 batchedcommands = None
4738
4742
4739 elif action.startswith(b'httprequest '):
4743 elif action.startswith(b'httprequest '):
4740 if not opener:
4744 if not opener:
4741 raise error.Abort(
4745 raise error.Abort(
4742 _(b'cannot use httprequest without an HTTP peer')
4746 _(b'cannot use httprequest without an HTTP peer')
4743 )
4747 )
4744
4748
4745 request = action.split(b' ', 2)
4749 request = action.split(b' ', 2)
4746 if len(request) != 3:
4750 if len(request) != 3:
4747 raise error.Abort(
4751 raise error.Abort(
4748 _(
4752 _(
4749 b'invalid httprequest: expected format is '
4753 b'invalid httprequest: expected format is '
4750 b'"httprequest <method> <path>'
4754 b'"httprequest <method> <path>'
4751 )
4755 )
4752 )
4756 )
4753
4757
4754 method, httppath = request[1:]
4758 method, httppath = request[1:]
4755 headers = {}
4759 headers = {}
4756 body = None
4760 body = None
4757 frames = []
4761 frames = []
4758 for line in lines:
4762 for line in lines:
4759 line = line.lstrip()
4763 line = line.lstrip()
4760 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4764 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4761 if m:
4765 if m:
4762 # Headers need to use native strings.
4766 # Headers need to use native strings.
4763 key = pycompat.strurl(m.group(1))
4767 key = pycompat.strurl(m.group(1))
4764 value = pycompat.strurl(m.group(2))
4768 value = pycompat.strurl(m.group(2))
4765 headers[key] = value
4769 headers[key] = value
4766 continue
4770 continue
4767
4771
4768 if line.startswith(b'BODYFILE '):
4772 if line.startswith(b'BODYFILE '):
4769 with open(line.split(b' ', 1), b'rb') as fh:
4773 with open(line.split(b' ', 1), b'rb') as fh:
4770 body = fh.read()
4774 body = fh.read()
4771 elif line.startswith(b'frame '):
4775 elif line.startswith(b'frame '):
4772 frame = wireprotoframing.makeframefromhumanstring(
4776 frame = wireprotoframing.makeframefromhumanstring(
4773 line[len(b'frame ') :]
4777 line[len(b'frame ') :]
4774 )
4778 )
4775
4779
4776 frames.append(frame)
4780 frames.append(frame)
4777 else:
4781 else:
4778 raise error.Abort(
4782 raise error.Abort(
4779 _(b'unknown argument to httprequest: %s') % line
4783 _(b'unknown argument to httprequest: %s') % line
4780 )
4784 )
4781
4785
4782 url = path + httppath
4786 url = path + httppath
4783
4787
4784 if frames:
4788 if frames:
4785 body = b''.join(bytes(f) for f in frames)
4789 body = b''.join(bytes(f) for f in frames)
4786
4790
4787 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4791 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4788
4792
4789 # urllib.Request insists on using has_data() as a proxy for
4793 # urllib.Request insists on using has_data() as a proxy for
4790 # determining the request method. Override that to use our
4794 # determining the request method. Override that to use our
4791 # explicitly requested method.
4795 # explicitly requested method.
4792 req.get_method = lambda: pycompat.sysstr(method)
4796 req.get_method = lambda: pycompat.sysstr(method)
4793
4797
4794 try:
4798 try:
4795 res = opener.open(req)
4799 res = opener.open(req)
4796 body = res.read()
4800 body = res.read()
4797 except util.urlerr.urlerror as e:
4801 except util.urlerr.urlerror as e:
4798 # read() method must be called, but only exists in Python 2
4802 # read() method must be called, but only exists in Python 2
4799 getattr(e, 'read', lambda: None)()
4803 getattr(e, 'read', lambda: None)()
4800 continue
4804 continue
4801
4805
4802 ct = res.headers.get('Content-Type')
4806 ct = res.headers.get('Content-Type')
4803 if ct == 'application/mercurial-cbor':
4807 if ct == 'application/mercurial-cbor':
4804 ui.write(
4808 ui.write(
4805 _(b'cbor> %s\n')
4809 _(b'cbor> %s\n')
4806 % stringutil.pprint(
4810 % stringutil.pprint(
4807 cborutil.decodeall(body), bprefix=True, indent=2
4811 cborutil.decodeall(body), bprefix=True, indent=2
4808 )
4812 )
4809 )
4813 )
4810
4814
4811 elif action == b'close':
4815 elif action == b'close':
4812 assert peer is not None
4816 assert peer is not None
4813 peer.close()
4817 peer.close()
4814 elif action == b'readavailable':
4818 elif action == b'readavailable':
4815 if not stdout or not stderr:
4819 if not stdout or not stderr:
4816 raise error.Abort(
4820 raise error.Abort(
4817 _(b'readavailable not available on this peer')
4821 _(b'readavailable not available on this peer')
4818 )
4822 )
4819
4823
4820 stdin.close()
4824 stdin.close()
4821 stdout.read()
4825 stdout.read()
4822 stderr.read()
4826 stderr.read()
4823
4827
4824 elif action == b'readline':
4828 elif action == b'readline':
4825 if not stdout:
4829 if not stdout:
4826 raise error.Abort(_(b'readline not available on this peer'))
4830 raise error.Abort(_(b'readline not available on this peer'))
4827 stdout.readline()
4831 stdout.readline()
4828 elif action == b'ereadline':
4832 elif action == b'ereadline':
4829 if not stderr:
4833 if not stderr:
4830 raise error.Abort(_(b'ereadline not available on this peer'))
4834 raise error.Abort(_(b'ereadline not available on this peer'))
4831 stderr.readline()
4835 stderr.readline()
4832 elif action.startswith(b'read '):
4836 elif action.startswith(b'read '):
4833 count = int(action.split(b' ', 1)[1])
4837 count = int(action.split(b' ', 1)[1])
4834 if not stdout:
4838 if not stdout:
4835 raise error.Abort(_(b'read not available on this peer'))
4839 raise error.Abort(_(b'read not available on this peer'))
4836 stdout.read(count)
4840 stdout.read(count)
4837 elif action.startswith(b'eread '):
4841 elif action.startswith(b'eread '):
4838 count = int(action.split(b' ', 1)[1])
4842 count = int(action.split(b' ', 1)[1])
4839 if not stderr:
4843 if not stderr:
4840 raise error.Abort(_(b'eread not available on this peer'))
4844 raise error.Abort(_(b'eread not available on this peer'))
4841 stderr.read(count)
4845 stderr.read(count)
4842 else:
4846 else:
4843 raise error.Abort(_(b'unknown action: %s') % action)
4847 raise error.Abort(_(b'unknown action: %s') % action)
4844
4848
4845 if batchedcommands is not None:
4849 if batchedcommands is not None:
4846 raise error.Abort(_(b'unclosed "batchbegin" request'))
4850 raise error.Abort(_(b'unclosed "batchbegin" request'))
4847
4851
4848 if peer:
4852 if peer:
4849 peer.close()
4853 peer.close()
4850
4854
4851 if proc:
4855 if proc:
4852 proc.kill()
4856 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now