##// END OF EJS Templates
debugdiscovery: fix a typo in the help...
marmoute -
r49810:db960032 default
parent child Browse files
Show More
@@ -1,4883 +1,4883
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 repoview,
72 repoview,
73 requirements,
73 requirements,
74 revlog,
74 revlog,
75 revset,
75 revset,
76 revsetlang,
76 revsetlang,
77 scmutil,
77 scmutil,
78 setdiscovery,
78 setdiscovery,
79 simplemerge,
79 simplemerge,
80 sshpeer,
80 sshpeer,
81 sslutil,
81 sslutil,
82 streamclone,
82 streamclone,
83 strip,
83 strip,
84 tags as tagsmod,
84 tags as tagsmod,
85 templater,
85 templater,
86 treediscovery,
86 treediscovery,
87 upgrade,
87 upgrade,
88 url as urlmod,
88 url as urlmod,
89 util,
89 util,
90 vfs as vfsmod,
90 vfs as vfsmod,
91 wireprotoframing,
91 wireprotoframing,
92 wireprotoserver,
92 wireprotoserver,
93 )
93 )
94 from .interfaces import repository
94 from .interfaces import repository
95 from .utils import (
95 from .utils import (
96 cborutil,
96 cborutil,
97 compression,
97 compression,
98 dateutil,
98 dateutil,
99 procutil,
99 procutil,
100 stringutil,
100 stringutil,
101 urlutil,
101 urlutil,
102 )
102 )
103
103
104 from .revlogutils import (
104 from .revlogutils import (
105 deltas as deltautil,
105 deltas as deltautil,
106 nodemap,
106 nodemap,
107 rewrite,
107 rewrite,
108 sidedata,
108 sidedata,
109 )
109 )
110
110
111 release = lockmod.release
111 release = lockmod.release
112
112
113 table = {}
113 table = {}
114 table.update(strip.command._table)
114 table.update(strip.command._table)
115 command = registrar.command(table)
115 command = registrar.command(table)
116
116
117
117
118 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
118 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
119 def debugancestor(ui, repo, *args):
119 def debugancestor(ui, repo, *args):
120 """find the ancestor revision of two revisions in a given index"""
120 """find the ancestor revision of two revisions in a given index"""
121 if len(args) == 3:
121 if len(args) == 3:
122 index, rev1, rev2 = args
122 index, rev1, rev2 = args
123 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
123 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
124 lookup = r.lookup
124 lookup = r.lookup
125 elif len(args) == 2:
125 elif len(args) == 2:
126 if not repo:
126 if not repo:
127 raise error.Abort(
127 raise error.Abort(
128 _(b'there is no Mercurial repository here (.hg not found)')
128 _(b'there is no Mercurial repository here (.hg not found)')
129 )
129 )
130 rev1, rev2 = args
130 rev1, rev2 = args
131 r = repo.changelog
131 r = repo.changelog
132 lookup = repo.lookup
132 lookup = repo.lookup
133 else:
133 else:
134 raise error.Abort(_(b'either two or three arguments required'))
134 raise error.Abort(_(b'either two or three arguments required'))
135 a = r.ancestor(lookup(rev1), lookup(rev2))
135 a = r.ancestor(lookup(rev1), lookup(rev2))
136 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
136 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
137
137
138
138
139 @command(b'debugantivirusrunning', [])
139 @command(b'debugantivirusrunning', [])
140 def debugantivirusrunning(ui, repo):
140 def debugantivirusrunning(ui, repo):
141 """attempt to trigger an antivirus scanner to see if one is active"""
141 """attempt to trigger an antivirus scanner to see if one is active"""
142 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
142 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
143 f.write(
143 f.write(
144 util.b85decode(
144 util.b85decode(
145 # This is a base85-armored version of the EICAR test file. See
145 # This is a base85-armored version of the EICAR test file. See
146 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
146 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
147 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
147 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
148 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
148 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
149 )
149 )
150 )
150 )
151 # Give an AV engine time to scan the file.
151 # Give an AV engine time to scan the file.
152 time.sleep(2)
152 time.sleep(2)
153 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
153 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
154
154
155
155
156 @command(b'debugapplystreamclonebundle', [], b'FILE')
156 @command(b'debugapplystreamclonebundle', [], b'FILE')
157 def debugapplystreamclonebundle(ui, repo, fname):
157 def debugapplystreamclonebundle(ui, repo, fname):
158 """apply a stream clone bundle file"""
158 """apply a stream clone bundle file"""
159 f = hg.openpath(ui, fname)
159 f = hg.openpath(ui, fname)
160 gen = exchange.readbundle(ui, f, fname)
160 gen = exchange.readbundle(ui, f, fname)
161 gen.apply(repo)
161 gen.apply(repo)
162
162
163
163
164 @command(
164 @command(
165 b'debugbuilddag',
165 b'debugbuilddag',
166 [
166 [
167 (
167 (
168 b'm',
168 b'm',
169 b'mergeable-file',
169 b'mergeable-file',
170 None,
170 None,
171 _(b'add single file mergeable changes'),
171 _(b'add single file mergeable changes'),
172 ),
172 ),
173 (
173 (
174 b'o',
174 b'o',
175 b'overwritten-file',
175 b'overwritten-file',
176 None,
176 None,
177 _(b'add single file all revs overwrite'),
177 _(b'add single file all revs overwrite'),
178 ),
178 ),
179 (b'n', b'new-file', None, _(b'add new file at each rev')),
179 (b'n', b'new-file', None, _(b'add new file at each rev')),
180 (
180 (
181 b'',
181 b'',
182 b'from-existing',
182 b'from-existing',
183 None,
183 None,
184 _(b'continue from a non-empty repository'),
184 _(b'continue from a non-empty repository'),
185 ),
185 ),
186 ],
186 ],
187 _(b'[OPTION]... [TEXT]'),
187 _(b'[OPTION]... [TEXT]'),
188 )
188 )
189 def debugbuilddag(
189 def debugbuilddag(
190 ui,
190 ui,
191 repo,
191 repo,
192 text=None,
192 text=None,
193 mergeable_file=False,
193 mergeable_file=False,
194 overwritten_file=False,
194 overwritten_file=False,
195 new_file=False,
195 new_file=False,
196 from_existing=False,
196 from_existing=False,
197 ):
197 ):
198 """builds a repo with a given DAG from scratch in the current empty repo
198 """builds a repo with a given DAG from scratch in the current empty repo
199
199
200 The description of the DAG is read from stdin if not given on the
200 The description of the DAG is read from stdin if not given on the
201 command line.
201 command line.
202
202
203 Elements:
203 Elements:
204
204
205 - "+n" is a linear run of n nodes based on the current default parent
205 - "+n" is a linear run of n nodes based on the current default parent
206 - "." is a single node based on the current default parent
206 - "." is a single node based on the current default parent
207 - "$" resets the default parent to null (implied at the start);
207 - "$" resets the default parent to null (implied at the start);
208 otherwise the default parent is always the last node created
208 otherwise the default parent is always the last node created
209 - "<p" sets the default parent to the backref p
209 - "<p" sets the default parent to the backref p
210 - "*p" is a fork at parent p, which is a backref
210 - "*p" is a fork at parent p, which is a backref
211 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
211 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
212 - "/p2" is a merge of the preceding node and p2
212 - "/p2" is a merge of the preceding node and p2
213 - ":tag" defines a local tag for the preceding node
213 - ":tag" defines a local tag for the preceding node
214 - "@branch" sets the named branch for subsequent nodes
214 - "@branch" sets the named branch for subsequent nodes
215 - "#...\\n" is a comment up to the end of the line
215 - "#...\\n" is a comment up to the end of the line
216
216
217 Whitespace between the above elements is ignored.
217 Whitespace between the above elements is ignored.
218
218
219 A backref is either
219 A backref is either
220
220
221 - a number n, which references the node curr-n, where curr is the current
221 - a number n, which references the node curr-n, where curr is the current
222 node, or
222 node, or
223 - the name of a local tag you placed earlier using ":tag", or
223 - the name of a local tag you placed earlier using ":tag", or
224 - empty to denote the default parent.
224 - empty to denote the default parent.
225
225
226 All string valued-elements are either strictly alphanumeric, or must
226 All string valued-elements are either strictly alphanumeric, or must
227 be enclosed in double quotes ("..."), with "\\" as escape character.
227 be enclosed in double quotes ("..."), with "\\" as escape character.
228 """
228 """
229
229
230 if text is None:
230 if text is None:
231 ui.status(_(b"reading DAG from stdin\n"))
231 ui.status(_(b"reading DAG from stdin\n"))
232 text = ui.fin.read()
232 text = ui.fin.read()
233
233
234 cl = repo.changelog
234 cl = repo.changelog
235 if len(cl) > 0 and not from_existing:
235 if len(cl) > 0 and not from_existing:
236 raise error.Abort(_(b'repository is not empty'))
236 raise error.Abort(_(b'repository is not empty'))
237
237
238 # determine number of revs in DAG
238 # determine number of revs in DAG
239 total = 0
239 total = 0
240 for type, data in dagparser.parsedag(text):
240 for type, data in dagparser.parsedag(text):
241 if type == b'n':
241 if type == b'n':
242 total += 1
242 total += 1
243
243
244 if mergeable_file:
244 if mergeable_file:
245 linesperrev = 2
245 linesperrev = 2
246 # make a file with k lines per rev
246 # make a file with k lines per rev
247 initialmergedlines = [
247 initialmergedlines = [
248 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
248 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
249 ]
249 ]
250 initialmergedlines.append(b"")
250 initialmergedlines.append(b"")
251
251
252 tags = []
252 tags = []
253 progress = ui.makeprogress(
253 progress = ui.makeprogress(
254 _(b'building'), unit=_(b'revisions'), total=total
254 _(b'building'), unit=_(b'revisions'), total=total
255 )
255 )
256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 at = -1
257 at = -1
258 atbranch = b'default'
258 atbranch = b'default'
259 nodeids = []
259 nodeids = []
260 id = 0
260 id = 0
261 progress.update(id)
261 progress.update(id)
262 for type, data in dagparser.parsedag(text):
262 for type, data in dagparser.parsedag(text):
263 if type == b'n':
263 if type == b'n':
264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 id, ps = data
265 id, ps = data
266
266
267 files = []
267 files = []
268 filecontent = {}
268 filecontent = {}
269
269
270 p2 = None
270 p2 = None
271 if mergeable_file:
271 if mergeable_file:
272 fn = b"mf"
272 fn = b"mf"
273 p1 = repo[ps[0]]
273 p1 = repo[ps[0]]
274 if len(ps) > 1:
274 if len(ps) > 1:
275 p2 = repo[ps[1]]
275 p2 = repo[ps[1]]
276 pa = p1.ancestor(p2)
276 pa = p1.ancestor(p2)
277 base, local, other = [
277 base, local, other = [
278 x[fn].data() for x in (pa, p1, p2)
278 x[fn].data() for x in (pa, p1, p2)
279 ]
279 ]
280 m3 = simplemerge.Merge3Text(base, local, other)
280 m3 = simplemerge.Merge3Text(base, local, other)
281 ml = [
281 ml = [
282 l.strip()
282 l.strip()
283 for l in simplemerge.render_minimized(m3)[0]
283 for l in simplemerge.render_minimized(m3)[0]
284 ]
284 ]
285 ml.append(b"")
285 ml.append(b"")
286 elif at > 0:
286 elif at > 0:
287 ml = p1[fn].data().split(b"\n")
287 ml = p1[fn].data().split(b"\n")
288 else:
288 else:
289 ml = initialmergedlines
289 ml = initialmergedlines
290 ml[id * linesperrev] += b" r%i" % id
290 ml[id * linesperrev] += b" r%i" % id
291 mergedtext = b"\n".join(ml)
291 mergedtext = b"\n".join(ml)
292 files.append(fn)
292 files.append(fn)
293 filecontent[fn] = mergedtext
293 filecontent[fn] = mergedtext
294
294
295 if overwritten_file:
295 if overwritten_file:
296 fn = b"of"
296 fn = b"of"
297 files.append(fn)
297 files.append(fn)
298 filecontent[fn] = b"r%i\n" % id
298 filecontent[fn] = b"r%i\n" % id
299
299
300 if new_file:
300 if new_file:
301 fn = b"nf%i" % id
301 fn = b"nf%i" % id
302 files.append(fn)
302 files.append(fn)
303 filecontent[fn] = b"r%i\n" % id
303 filecontent[fn] = b"r%i\n" % id
304 if len(ps) > 1:
304 if len(ps) > 1:
305 if not p2:
305 if not p2:
306 p2 = repo[ps[1]]
306 p2 = repo[ps[1]]
307 for fn in p2:
307 for fn in p2:
308 if fn.startswith(b"nf"):
308 if fn.startswith(b"nf"):
309 files.append(fn)
309 files.append(fn)
310 filecontent[fn] = p2[fn].data()
310 filecontent[fn] = p2[fn].data()
311
311
312 def fctxfn(repo, cx, path):
312 def fctxfn(repo, cx, path):
313 if path in filecontent:
313 if path in filecontent:
314 return context.memfilectx(
314 return context.memfilectx(
315 repo, cx, path, filecontent[path]
315 repo, cx, path, filecontent[path]
316 )
316 )
317 return None
317 return None
318
318
319 if len(ps) == 0 or ps[0] < 0:
319 if len(ps) == 0 or ps[0] < 0:
320 pars = [None, None]
320 pars = [None, None]
321 elif len(ps) == 1:
321 elif len(ps) == 1:
322 pars = [nodeids[ps[0]], None]
322 pars = [nodeids[ps[0]], None]
323 else:
323 else:
324 pars = [nodeids[p] for p in ps]
324 pars = [nodeids[p] for p in ps]
325 cx = context.memctx(
325 cx = context.memctx(
326 repo,
326 repo,
327 pars,
327 pars,
328 b"r%i" % id,
328 b"r%i" % id,
329 files,
329 files,
330 fctxfn,
330 fctxfn,
331 date=(id, 0),
331 date=(id, 0),
332 user=b"debugbuilddag",
332 user=b"debugbuilddag",
333 extra={b'branch': atbranch},
333 extra={b'branch': atbranch},
334 )
334 )
335 nodeid = repo.commitctx(cx)
335 nodeid = repo.commitctx(cx)
336 nodeids.append(nodeid)
336 nodeids.append(nodeid)
337 at = id
337 at = id
338 elif type == b'l':
338 elif type == b'l':
339 id, name = data
339 id, name = data
340 ui.note((b'tag %s\n' % name))
340 ui.note((b'tag %s\n' % name))
341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 elif type == b'a':
342 elif type == b'a':
343 ui.note((b'branch %s\n' % data))
343 ui.note((b'branch %s\n' % data))
344 atbranch = data
344 atbranch = data
345 progress.update(id)
345 progress.update(id)
346
346
347 if tags:
347 if tags:
348 repo.vfs.write(b"localtags", b"".join(tags))
348 repo.vfs.write(b"localtags", b"".join(tags))
349
349
350
350
351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 indent_string = b' ' * indent
352 indent_string = b' ' * indent
353 if all:
353 if all:
354 ui.writenoi18n(
354 ui.writenoi18n(
355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 % indent_string
356 % indent_string
357 )
357 )
358
358
359 def showchunks(named):
359 def showchunks(named):
360 ui.write(b"\n%s%s\n" % (indent_string, named))
360 ui.write(b"\n%s%s\n" % (indent_string, named))
361 for deltadata in gen.deltaiter():
361 for deltadata in gen.deltaiter():
362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 ui.write(
363 ui.write(
364 b"%s%s %s %s %s %s %d\n"
364 b"%s%s %s %s %s %s %d\n"
365 % (
365 % (
366 indent_string,
366 indent_string,
367 hex(node),
367 hex(node),
368 hex(p1),
368 hex(p1),
369 hex(p2),
369 hex(p2),
370 hex(cs),
370 hex(cs),
371 hex(deltabase),
371 hex(deltabase),
372 len(delta),
372 len(delta),
373 )
373 )
374 )
374 )
375
375
376 gen.changelogheader()
376 gen.changelogheader()
377 showchunks(b"changelog")
377 showchunks(b"changelog")
378 gen.manifestheader()
378 gen.manifestheader()
379 showchunks(b"manifest")
379 showchunks(b"manifest")
380 for chunkdata in iter(gen.filelogheader, {}):
380 for chunkdata in iter(gen.filelogheader, {}):
381 fname = chunkdata[b'filename']
381 fname = chunkdata[b'filename']
382 showchunks(fname)
382 showchunks(fname)
383 else:
383 else:
384 if isinstance(gen, bundle2.unbundle20):
384 if isinstance(gen, bundle2.unbundle20):
385 raise error.Abort(_(b'use debugbundle2 for this file'))
385 raise error.Abort(_(b'use debugbundle2 for this file'))
386 gen.changelogheader()
386 gen.changelogheader()
387 for deltadata in gen.deltaiter():
387 for deltadata in gen.deltaiter():
388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390
390
391
391
392 def _debugobsmarkers(ui, part, indent=0, **opts):
392 def _debugobsmarkers(ui, part, indent=0, **opts):
393 """display version and markers contained in 'data'"""
393 """display version and markers contained in 'data'"""
394 opts = pycompat.byteskwargs(opts)
394 opts = pycompat.byteskwargs(opts)
395 data = part.read()
395 data = part.read()
396 indent_string = b' ' * indent
396 indent_string = b' ' * indent
397 try:
397 try:
398 version, markers = obsolete._readmarkers(data)
398 version, markers = obsolete._readmarkers(data)
399 except error.UnknownVersion as exc:
399 except error.UnknownVersion as exc:
400 msg = b"%sunsupported version: %s (%d bytes)\n"
400 msg = b"%sunsupported version: %s (%d bytes)\n"
401 msg %= indent_string, exc.version, len(data)
401 msg %= indent_string, exc.version, len(data)
402 ui.write(msg)
402 ui.write(msg)
403 else:
403 else:
404 msg = b"%sversion: %d (%d bytes)\n"
404 msg = b"%sversion: %d (%d bytes)\n"
405 msg %= indent_string, version, len(data)
405 msg %= indent_string, version, len(data)
406 ui.write(msg)
406 ui.write(msg)
407 fm = ui.formatter(b'debugobsolete', opts)
407 fm = ui.formatter(b'debugobsolete', opts)
408 for rawmarker in sorted(markers):
408 for rawmarker in sorted(markers):
409 m = obsutil.marker(None, rawmarker)
409 m = obsutil.marker(None, rawmarker)
410 fm.startitem()
410 fm.startitem()
411 fm.plain(indent_string)
411 fm.plain(indent_string)
412 cmdutil.showmarker(fm, m)
412 cmdutil.showmarker(fm, m)
413 fm.end()
413 fm.end()
414
414
415
415
416 def _debugphaseheads(ui, data, indent=0):
416 def _debugphaseheads(ui, data, indent=0):
417 """display version and markers contained in 'data'"""
417 """display version and markers contained in 'data'"""
418 indent_string = b' ' * indent
418 indent_string = b' ' * indent
419 headsbyphase = phases.binarydecode(data)
419 headsbyphase = phases.binarydecode(data)
420 for phase in phases.allphases:
420 for phase in phases.allphases:
421 for head in headsbyphase[phase]:
421 for head in headsbyphase[phase]:
422 ui.write(indent_string)
422 ui.write(indent_string)
423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424
424
425
425
426 def _quasirepr(thing):
426 def _quasirepr(thing):
427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 return b'{%s}' % (
428 return b'{%s}' % (
429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 )
430 )
431 return pycompat.bytestr(repr(thing))
431 return pycompat.bytestr(repr(thing))
432
432
433
433
434 def _debugbundle2(ui, gen, all=None, **opts):
434 def _debugbundle2(ui, gen, all=None, **opts):
435 """lists the contents of a bundle2"""
435 """lists the contents of a bundle2"""
436 if not isinstance(gen, bundle2.unbundle20):
436 if not isinstance(gen, bundle2.unbundle20):
437 raise error.Abort(_(b'not a bundle2 file'))
437 raise error.Abort(_(b'not a bundle2 file'))
438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 parttypes = opts.get('part_type', [])
439 parttypes = opts.get('part_type', [])
440 for part in gen.iterparts():
440 for part in gen.iterparts():
441 if parttypes and part.type not in parttypes:
441 if parttypes and part.type not in parttypes:
442 continue
442 continue
443 msg = b'%s -- %s (mandatory: %r)\n'
443 msg = b'%s -- %s (mandatory: %r)\n'
444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 if part.type == b'changegroup':
445 if part.type == b'changegroup':
446 version = part.params.get(b'version', b'01')
446 version = part.params.get(b'version', b'01')
447 cg = changegroup.getunbundler(version, part, b'UN')
447 cg = changegroup.getunbundler(version, part, b'UN')
448 if not ui.quiet:
448 if not ui.quiet:
449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 if part.type == b'obsmarkers':
450 if part.type == b'obsmarkers':
451 if not ui.quiet:
451 if not ui.quiet:
452 _debugobsmarkers(ui, part, indent=4, **opts)
452 _debugobsmarkers(ui, part, indent=4, **opts)
453 if part.type == b'phase-heads':
453 if part.type == b'phase-heads':
454 if not ui.quiet:
454 if not ui.quiet:
455 _debugphaseheads(ui, part, indent=4)
455 _debugphaseheads(ui, part, indent=4)
456
456
457
457
458 @command(
458 @command(
459 b'debugbundle',
459 b'debugbundle',
460 [
460 [
461 (b'a', b'all', None, _(b'show all details')),
461 (b'a', b'all', None, _(b'show all details')),
462 (b'', b'part-type', [], _(b'show only the named part type')),
462 (b'', b'part-type', [], _(b'show only the named part type')),
463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 ],
464 ],
465 _(b'FILE'),
465 _(b'FILE'),
466 norepo=True,
466 norepo=True,
467 )
467 )
468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 """lists the contents of a bundle"""
469 """lists the contents of a bundle"""
470 with hg.openpath(ui, bundlepath) as f:
470 with hg.openpath(ui, bundlepath) as f:
471 if spec:
471 if spec:
472 spec = exchange.getbundlespec(ui, f)
472 spec = exchange.getbundlespec(ui, f)
473 ui.write(b'%s\n' % spec)
473 ui.write(b'%s\n' % spec)
474 return
474 return
475
475
476 gen = exchange.readbundle(ui, f, bundlepath)
476 gen = exchange.readbundle(ui, f, bundlepath)
477 if isinstance(gen, bundle2.unbundle20):
477 if isinstance(gen, bundle2.unbundle20):
478 return _debugbundle2(ui, gen, all=all, **opts)
478 return _debugbundle2(ui, gen, all=all, **opts)
479 _debugchangegroup(ui, gen, all=all, **opts)
479 _debugchangegroup(ui, gen, all=all, **opts)
480
480
481
481
482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 def debugcapabilities(ui, path, **opts):
483 def debugcapabilities(ui, path, **opts):
484 """lists the capabilities of a remote peer"""
484 """lists the capabilities of a remote peer"""
485 opts = pycompat.byteskwargs(opts)
485 opts = pycompat.byteskwargs(opts)
486 peer = hg.peer(ui, opts, path)
486 peer = hg.peer(ui, opts, path)
487 try:
487 try:
488 caps = peer.capabilities()
488 caps = peer.capabilities()
489 ui.writenoi18n(b'Main capabilities:\n')
489 ui.writenoi18n(b'Main capabilities:\n')
490 for c in sorted(caps):
490 for c in sorted(caps):
491 ui.write(b' %s\n' % c)
491 ui.write(b' %s\n' % c)
492 b2caps = bundle2.bundle2caps(peer)
492 b2caps = bundle2.bundle2caps(peer)
493 if b2caps:
493 if b2caps:
494 ui.writenoi18n(b'Bundle2 capabilities:\n')
494 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 for key, values in sorted(b2caps.items()):
495 for key, values in sorted(b2caps.items()):
496 ui.write(b' %s\n' % key)
496 ui.write(b' %s\n' % key)
497 for v in values:
497 for v in values:
498 ui.write(b' %s\n' % v)
498 ui.write(b' %s\n' % v)
499 finally:
499 finally:
500 peer.close()
500 peer.close()
501
501
502
502
503 @command(
503 @command(
504 b'debugchangedfiles',
504 b'debugchangedfiles',
505 [
505 [
506 (
506 (
507 b'',
507 b'',
508 b'compute',
508 b'compute',
509 False,
509 False,
510 b"compute information instead of reading it from storage",
510 b"compute information instead of reading it from storage",
511 ),
511 ),
512 ],
512 ],
513 b'REV',
513 b'REV',
514 )
514 )
515 def debugchangedfiles(ui, repo, rev, **opts):
515 def debugchangedfiles(ui, repo, rev, **opts):
516 """list the stored files changes for a revision"""
516 """list the stored files changes for a revision"""
517 ctx = logcmdutil.revsingle(repo, rev, None)
517 ctx = logcmdutil.revsingle(repo, rev, None)
518 files = None
518 files = None
519
519
520 if opts['compute']:
520 if opts['compute']:
521 files = metadata.compute_all_files_changes(ctx)
521 files = metadata.compute_all_files_changes(ctx)
522 else:
522 else:
523 sd = repo.changelog.sidedata(ctx.rev())
523 sd = repo.changelog.sidedata(ctx.rev())
524 files_block = sd.get(sidedata.SD_FILES)
524 files_block = sd.get(sidedata.SD_FILES)
525 if files_block is not None:
525 if files_block is not None:
526 files = metadata.decode_files_sidedata(sd)
526 files = metadata.decode_files_sidedata(sd)
527 if files is not None:
527 if files is not None:
528 for f in sorted(files.touched):
528 for f in sorted(files.touched):
529 if f in files.added:
529 if f in files.added:
530 action = b"added"
530 action = b"added"
531 elif f in files.removed:
531 elif f in files.removed:
532 action = b"removed"
532 action = b"removed"
533 elif f in files.merged:
533 elif f in files.merged:
534 action = b"merged"
534 action = b"merged"
535 elif f in files.salvaged:
535 elif f in files.salvaged:
536 action = b"salvaged"
536 action = b"salvaged"
537 else:
537 else:
538 action = b"touched"
538 action = b"touched"
539
539
540 copy_parent = b""
540 copy_parent = b""
541 copy_source = b""
541 copy_source = b""
542 if f in files.copied_from_p1:
542 if f in files.copied_from_p1:
543 copy_parent = b"p1"
543 copy_parent = b"p1"
544 copy_source = files.copied_from_p1[f]
544 copy_source = files.copied_from_p1[f]
545 elif f in files.copied_from_p2:
545 elif f in files.copied_from_p2:
546 copy_parent = b"p2"
546 copy_parent = b"p2"
547 copy_source = files.copied_from_p2[f]
547 copy_source = files.copied_from_p2[f]
548
548
549 data = (action, copy_parent, f, copy_source)
549 data = (action, copy_parent, f, copy_source)
550 template = b"%-8s %2s: %s, %s;\n"
550 template = b"%-8s %2s: %s, %s;\n"
551 ui.write(template % data)
551 ui.write(template % data)
552
552
553
553
554 @command(b'debugcheckstate', [], b'')
554 @command(b'debugcheckstate', [], b'')
555 def debugcheckstate(ui, repo):
555 def debugcheckstate(ui, repo):
556 """validate the correctness of the current dirstate"""
556 """validate the correctness of the current dirstate"""
557 parent1, parent2 = repo.dirstate.parents()
557 parent1, parent2 = repo.dirstate.parents()
558 m1 = repo[parent1].manifest()
558 m1 = repo[parent1].manifest()
559 m2 = repo[parent2].manifest()
559 m2 = repo[parent2].manifest()
560 errors = 0
560 errors = 0
561 for err in repo.dirstate.verify(m1, m2):
561 for err in repo.dirstate.verify(m1, m2):
562 ui.warn(err[0] % err[1:])
562 ui.warn(err[0] % err[1:])
563 errors += 1
563 errors += 1
564 if errors:
564 if errors:
565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
566 raise error.Abort(errstr)
566 raise error.Abort(errstr)
567
567
568
568
569 @command(
569 @command(
570 b'debugcolor',
570 b'debugcolor',
571 [(b'', b'style', None, _(b'show all configured styles'))],
571 [(b'', b'style', None, _(b'show all configured styles'))],
572 b'hg debugcolor',
572 b'hg debugcolor',
573 )
573 )
574 def debugcolor(ui, repo, **opts):
574 def debugcolor(ui, repo, **opts):
575 """show available color, effects or style"""
575 """show available color, effects or style"""
576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
577 if opts.get('style'):
577 if opts.get('style'):
578 return _debugdisplaystyle(ui)
578 return _debugdisplaystyle(ui)
579 else:
579 else:
580 return _debugdisplaycolor(ui)
580 return _debugdisplaycolor(ui)
581
581
582
582
583 def _debugdisplaycolor(ui):
583 def _debugdisplaycolor(ui):
584 ui = ui.copy()
584 ui = ui.copy()
585 ui._styles.clear()
585 ui._styles.clear()
586 for effect in color._activeeffects(ui).keys():
586 for effect in color._activeeffects(ui).keys():
587 ui._styles[effect] = effect
587 ui._styles[effect] = effect
588 if ui._terminfoparams:
588 if ui._terminfoparams:
589 for k, v in ui.configitems(b'color'):
589 for k, v in ui.configitems(b'color'):
590 if k.startswith(b'color.'):
590 if k.startswith(b'color.'):
591 ui._styles[k] = k[6:]
591 ui._styles[k] = k[6:]
592 elif k.startswith(b'terminfo.'):
592 elif k.startswith(b'terminfo.'):
593 ui._styles[k] = k[9:]
593 ui._styles[k] = k[9:]
594 ui.write(_(b'available colors:\n'))
594 ui.write(_(b'available colors:\n'))
595 # sort label with a '_' after the other to group '_background' entry.
595 # sort label with a '_' after the other to group '_background' entry.
596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
597 for colorname, label in items:
597 for colorname, label in items:
598 ui.write(b'%s\n' % colorname, label=label)
598 ui.write(b'%s\n' % colorname, label=label)
599
599
600
600
601 def _debugdisplaystyle(ui):
601 def _debugdisplaystyle(ui):
602 ui.write(_(b'available style:\n'))
602 ui.write(_(b'available style:\n'))
603 if not ui._styles:
603 if not ui._styles:
604 return
604 return
605 width = max(len(s) for s in ui._styles)
605 width = max(len(s) for s in ui._styles)
606 for label, effects in sorted(ui._styles.items()):
606 for label, effects in sorted(ui._styles.items()):
607 ui.write(b'%s' % label, label=label)
607 ui.write(b'%s' % label, label=label)
608 if effects:
608 if effects:
609 # 50
609 # 50
610 ui.write(b': ')
610 ui.write(b': ')
611 ui.write(b' ' * (max(0, width - len(label))))
611 ui.write(b' ' * (max(0, width - len(label))))
612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
613 ui.write(b'\n')
613 ui.write(b'\n')
614
614
615
615
616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
617 def debugcreatestreamclonebundle(ui, repo, fname):
617 def debugcreatestreamclonebundle(ui, repo, fname):
618 """create a stream clone bundle file
618 """create a stream clone bundle file
619
619
620 Stream bundles are special bundles that are essentially archives of
620 Stream bundles are special bundles that are essentially archives of
621 revlog files. They are commonly used for cloning very quickly.
621 revlog files. They are commonly used for cloning very quickly.
622 """
622 """
623 # TODO we may want to turn this into an abort when this functionality
623 # TODO we may want to turn this into an abort when this functionality
624 # is moved into `hg bundle`.
624 # is moved into `hg bundle`.
625 if phases.hassecret(repo):
625 if phases.hassecret(repo):
626 ui.warn(
626 ui.warn(
627 _(
627 _(
628 b'(warning: stream clone bundle will contain secret '
628 b'(warning: stream clone bundle will contain secret '
629 b'revisions)\n'
629 b'revisions)\n'
630 )
630 )
631 )
631 )
632
632
633 requirements, gen = streamclone.generatebundlev1(repo)
633 requirements, gen = streamclone.generatebundlev1(repo)
634 changegroup.writechunks(ui, gen, fname)
634 changegroup.writechunks(ui, gen, fname)
635
635
636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
637
637
638
638
639 @command(
639 @command(
640 b'debugdag',
640 b'debugdag',
641 [
641 [
642 (b't', b'tags', None, _(b'use tags as labels')),
642 (b't', b'tags', None, _(b'use tags as labels')),
643 (b'b', b'branches', None, _(b'annotate with branch names')),
643 (b'b', b'branches', None, _(b'annotate with branch names')),
644 (b'', b'dots', None, _(b'use dots for runs')),
644 (b'', b'dots', None, _(b'use dots for runs')),
645 (b's', b'spaces', None, _(b'separate elements by spaces')),
645 (b's', b'spaces', None, _(b'separate elements by spaces')),
646 ],
646 ],
647 _(b'[OPTION]... [FILE [REV]...]'),
647 _(b'[OPTION]... [FILE [REV]...]'),
648 optionalrepo=True,
648 optionalrepo=True,
649 )
649 )
650 def debugdag(ui, repo, file_=None, *revs, **opts):
650 def debugdag(ui, repo, file_=None, *revs, **opts):
651 """format the changelog or an index DAG as a concise textual description
651 """format the changelog or an index DAG as a concise textual description
652
652
653 If you pass a revlog index, the revlog's DAG is emitted. If you list
653 If you pass a revlog index, the revlog's DAG is emitted. If you list
654 revision numbers, they get labeled in the output as rN.
654 revision numbers, they get labeled in the output as rN.
655
655
656 Otherwise, the changelog DAG of the current repo is emitted.
656 Otherwise, the changelog DAG of the current repo is emitted.
657 """
657 """
658 spaces = opts.get('spaces')
658 spaces = opts.get('spaces')
659 dots = opts.get('dots')
659 dots = opts.get('dots')
660 if file_:
660 if file_:
661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
662 revs = {int(r) for r in revs}
662 revs = {int(r) for r in revs}
663
663
664 def events():
664 def events():
665 for r in rlog:
665 for r in rlog:
666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
667 if r in revs:
667 if r in revs:
668 yield b'l', (r, b"r%i" % r)
668 yield b'l', (r, b"r%i" % r)
669
669
670 elif repo:
670 elif repo:
671 cl = repo.changelog
671 cl = repo.changelog
672 tags = opts.get('tags')
672 tags = opts.get('tags')
673 branches = opts.get('branches')
673 branches = opts.get('branches')
674 if tags:
674 if tags:
675 labels = {}
675 labels = {}
676 for l, n in repo.tags().items():
676 for l, n in repo.tags().items():
677 labels.setdefault(cl.rev(n), []).append(l)
677 labels.setdefault(cl.rev(n), []).append(l)
678
678
679 def events():
679 def events():
680 b = b"default"
680 b = b"default"
681 for r in cl:
681 for r in cl:
682 if branches:
682 if branches:
683 newb = cl.read(cl.node(r))[5][b'branch']
683 newb = cl.read(cl.node(r))[5][b'branch']
684 if newb != b:
684 if newb != b:
685 yield b'a', newb
685 yield b'a', newb
686 b = newb
686 b = newb
687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
688 if tags:
688 if tags:
689 ls = labels.get(r)
689 ls = labels.get(r)
690 if ls:
690 if ls:
691 for l in ls:
691 for l in ls:
692 yield b'l', (r, l)
692 yield b'l', (r, l)
693
693
694 else:
694 else:
695 raise error.Abort(_(b'need repo for changelog dag'))
695 raise error.Abort(_(b'need repo for changelog dag'))
696
696
697 for line in dagparser.dagtextlines(
697 for line in dagparser.dagtextlines(
698 events(),
698 events(),
699 addspaces=spaces,
699 addspaces=spaces,
700 wraplabels=True,
700 wraplabels=True,
701 wrapannotations=True,
701 wrapannotations=True,
702 wrapnonlinear=dots,
702 wrapnonlinear=dots,
703 usedots=dots,
703 usedots=dots,
704 maxlinewidth=70,
704 maxlinewidth=70,
705 ):
705 ):
706 ui.write(line)
706 ui.write(line)
707 ui.write(b"\n")
707 ui.write(b"\n")
708
708
709
709
710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
711 def debugdata(ui, repo, file_, rev=None, **opts):
711 def debugdata(ui, repo, file_, rev=None, **opts):
712 """dump the contents of a data file revision"""
712 """dump the contents of a data file revision"""
713 opts = pycompat.byteskwargs(opts)
713 opts = pycompat.byteskwargs(opts)
714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
715 if rev is not None:
715 if rev is not None:
716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
717 file_, rev = None, file_
717 file_, rev = None, file_
718 elif rev is None:
718 elif rev is None:
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 try:
721 try:
722 ui.write(r.rawdata(r.lookup(rev)))
722 ui.write(r.rawdata(r.lookup(rev)))
723 except KeyError:
723 except KeyError:
724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725
725
726
726
727 @command(
727 @command(
728 b'debugdate',
728 b'debugdate',
729 [(b'e', b'extended', None, _(b'try extended date formats'))],
729 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 _(b'[-e] DATE [RANGE]'),
730 _(b'[-e] DATE [RANGE]'),
731 norepo=True,
731 norepo=True,
732 optionalrepo=True,
732 optionalrepo=True,
733 )
733 )
734 def debugdate(ui, date, range=None, **opts):
734 def debugdate(ui, date, range=None, **opts):
735 """parse and display a date"""
735 """parse and display a date"""
736 if opts["extended"]:
736 if opts["extended"]:
737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 else:
738 else:
739 d = dateutil.parsedate(date)
739 d = dateutil.parsedate(date)
740 ui.writenoi18n(b"internal: %d %d\n" % d)
740 ui.writenoi18n(b"internal: %d %d\n" % d)
741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 if range:
742 if range:
743 m = dateutil.matchdate(range)
743 m = dateutil.matchdate(range)
744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745
745
746
746
747 @command(
747 @command(
748 b'debugdeltachain',
748 b'debugdeltachain',
749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 _(b'-c|-m|FILE'),
750 _(b'-c|-m|FILE'),
751 optionalrepo=True,
751 optionalrepo=True,
752 )
752 )
753 def debugdeltachain(ui, repo, file_=None, **opts):
753 def debugdeltachain(ui, repo, file_=None, **opts):
754 """dump information about delta chains in a revlog
754 """dump information about delta chains in a revlog
755
755
756 Output can be templatized. Available template keywords are:
756 Output can be templatized. Available template keywords are:
757
757
758 :``rev``: revision number
758 :``rev``: revision number
759 :``chainid``: delta chain identifier (numbered by unique base)
759 :``chainid``: delta chain identifier (numbered by unique base)
760 :``chainlen``: delta chain length to this revision
760 :``chainlen``: delta chain length to this revision
761 :``prevrev``: previous revision in delta chain
761 :``prevrev``: previous revision in delta chain
762 :``deltatype``: role of delta / how it was computed
762 :``deltatype``: role of delta / how it was computed
763 :``compsize``: compressed size of revision
763 :``compsize``: compressed size of revision
764 :``uncompsize``: uncompressed size of revision
764 :``uncompsize``: uncompressed size of revision
765 :``chainsize``: total size of compressed revisions in chain
765 :``chainsize``: total size of compressed revisions in chain
766 :``chainratio``: total chain size divided by uncompressed revision size
766 :``chainratio``: total chain size divided by uncompressed revision size
767 (new delta chains typically start at ratio 2.00)
767 (new delta chains typically start at ratio 2.00)
768 :``lindist``: linear distance from base revision in delta chain to end
768 :``lindist``: linear distance from base revision in delta chain to end
769 of this revision
769 of this revision
770 :``extradist``: total size of revisions not part of this delta chain from
770 :``extradist``: total size of revisions not part of this delta chain from
771 base of delta chain to end of this revision; a measurement
771 base of delta chain to end of this revision; a measurement
772 of how much extra data we need to read/seek across to read
772 of how much extra data we need to read/seek across to read
773 the delta chain for this revision
773 the delta chain for this revision
774 :``extraratio``: extradist divided by chainsize; another representation of
774 :``extraratio``: extradist divided by chainsize; another representation of
775 how much unrelated data is needed to load this delta chain
775 how much unrelated data is needed to load this delta chain
776
776
777 If the repository is configured to use the sparse read, additional keywords
777 If the repository is configured to use the sparse read, additional keywords
778 are available:
778 are available:
779
779
780 :``readsize``: total size of data read from the disk for a revision
780 :``readsize``: total size of data read from the disk for a revision
781 (sum of the sizes of all the blocks)
781 (sum of the sizes of all the blocks)
782 :``largestblock``: size of the largest block of data read from the disk
782 :``largestblock``: size of the largest block of data read from the disk
783 :``readdensity``: density of useful bytes in the data read from the disk
783 :``readdensity``: density of useful bytes in the data read from the disk
784 :``srchunks``: in how many data hunks the whole revision would be read
784 :``srchunks``: in how many data hunks the whole revision would be read
785
785
786 The sparse read can be enabled with experimental.sparse-read = True
786 The sparse read can be enabled with experimental.sparse-read = True
787 """
787 """
788 opts = pycompat.byteskwargs(opts)
788 opts = pycompat.byteskwargs(opts)
789 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
789 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
790 index = r.index
790 index = r.index
791 start = r.start
791 start = r.start
792 length = r.length
792 length = r.length
793 generaldelta = r._generaldelta
793 generaldelta = r._generaldelta
794 withsparseread = getattr(r, '_withsparseread', False)
794 withsparseread = getattr(r, '_withsparseread', False)
795
795
796 def revinfo(rev):
796 def revinfo(rev):
797 e = index[rev]
797 e = index[rev]
798 compsize = e[1]
798 compsize = e[1]
799 uncompsize = e[2]
799 uncompsize = e[2]
800 chainsize = 0
800 chainsize = 0
801
801
802 if generaldelta:
802 if generaldelta:
803 if e[3] == e[5]:
803 if e[3] == e[5]:
804 deltatype = b'p1'
804 deltatype = b'p1'
805 elif e[3] == e[6]:
805 elif e[3] == e[6]:
806 deltatype = b'p2'
806 deltatype = b'p2'
807 elif e[3] == rev - 1:
807 elif e[3] == rev - 1:
808 deltatype = b'prev'
808 deltatype = b'prev'
809 elif e[3] == rev:
809 elif e[3] == rev:
810 deltatype = b'base'
810 deltatype = b'base'
811 else:
811 else:
812 deltatype = b'other'
812 deltatype = b'other'
813 else:
813 else:
814 if e[3] == rev:
814 if e[3] == rev:
815 deltatype = b'base'
815 deltatype = b'base'
816 else:
816 else:
817 deltatype = b'prev'
817 deltatype = b'prev'
818
818
819 chain = r._deltachain(rev)[0]
819 chain = r._deltachain(rev)[0]
820 for iterrev in chain:
820 for iterrev in chain:
821 e = index[iterrev]
821 e = index[iterrev]
822 chainsize += e[1]
822 chainsize += e[1]
823
823
824 return compsize, uncompsize, deltatype, chain, chainsize
824 return compsize, uncompsize, deltatype, chain, chainsize
825
825
826 fm = ui.formatter(b'debugdeltachain', opts)
826 fm = ui.formatter(b'debugdeltachain', opts)
827
827
828 fm.plain(
828 fm.plain(
829 b' rev chain# chainlen prev delta '
829 b' rev chain# chainlen prev delta '
830 b'size rawsize chainsize ratio lindist extradist '
830 b'size rawsize chainsize ratio lindist extradist '
831 b'extraratio'
831 b'extraratio'
832 )
832 )
833 if withsparseread:
833 if withsparseread:
834 fm.plain(b' readsize largestblk rddensity srchunks')
834 fm.plain(b' readsize largestblk rddensity srchunks')
835 fm.plain(b'\n')
835 fm.plain(b'\n')
836
836
837 chainbases = {}
837 chainbases = {}
838 for rev in r:
838 for rev in r:
839 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
839 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
840 chainbase = chain[0]
840 chainbase = chain[0]
841 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
841 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
842 basestart = start(chainbase)
842 basestart = start(chainbase)
843 revstart = start(rev)
843 revstart = start(rev)
844 lineardist = revstart + comp - basestart
844 lineardist = revstart + comp - basestart
845 extradist = lineardist - chainsize
845 extradist = lineardist - chainsize
846 try:
846 try:
847 prevrev = chain[-2]
847 prevrev = chain[-2]
848 except IndexError:
848 except IndexError:
849 prevrev = -1
849 prevrev = -1
850
850
851 if uncomp != 0:
851 if uncomp != 0:
852 chainratio = float(chainsize) / float(uncomp)
852 chainratio = float(chainsize) / float(uncomp)
853 else:
853 else:
854 chainratio = chainsize
854 chainratio = chainsize
855
855
856 if chainsize != 0:
856 if chainsize != 0:
857 extraratio = float(extradist) / float(chainsize)
857 extraratio = float(extradist) / float(chainsize)
858 else:
858 else:
859 extraratio = extradist
859 extraratio = extradist
860
860
861 fm.startitem()
861 fm.startitem()
862 fm.write(
862 fm.write(
863 b'rev chainid chainlen prevrev deltatype compsize '
863 b'rev chainid chainlen prevrev deltatype compsize '
864 b'uncompsize chainsize chainratio lindist extradist '
864 b'uncompsize chainsize chainratio lindist extradist '
865 b'extraratio',
865 b'extraratio',
866 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
866 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
867 rev,
867 rev,
868 chainid,
868 chainid,
869 len(chain),
869 len(chain),
870 prevrev,
870 prevrev,
871 deltatype,
871 deltatype,
872 comp,
872 comp,
873 uncomp,
873 uncomp,
874 chainsize,
874 chainsize,
875 chainratio,
875 chainratio,
876 lineardist,
876 lineardist,
877 extradist,
877 extradist,
878 extraratio,
878 extraratio,
879 rev=rev,
879 rev=rev,
880 chainid=chainid,
880 chainid=chainid,
881 chainlen=len(chain),
881 chainlen=len(chain),
882 prevrev=prevrev,
882 prevrev=prevrev,
883 deltatype=deltatype,
883 deltatype=deltatype,
884 compsize=comp,
884 compsize=comp,
885 uncompsize=uncomp,
885 uncompsize=uncomp,
886 chainsize=chainsize,
886 chainsize=chainsize,
887 chainratio=chainratio,
887 chainratio=chainratio,
888 lindist=lineardist,
888 lindist=lineardist,
889 extradist=extradist,
889 extradist=extradist,
890 extraratio=extraratio,
890 extraratio=extraratio,
891 )
891 )
892 if withsparseread:
892 if withsparseread:
893 readsize = 0
893 readsize = 0
894 largestblock = 0
894 largestblock = 0
895 srchunks = 0
895 srchunks = 0
896
896
897 for revschunk in deltautil.slicechunk(r, chain):
897 for revschunk in deltautil.slicechunk(r, chain):
898 srchunks += 1
898 srchunks += 1
899 blkend = start(revschunk[-1]) + length(revschunk[-1])
899 blkend = start(revschunk[-1]) + length(revschunk[-1])
900 blksize = blkend - start(revschunk[0])
900 blksize = blkend - start(revschunk[0])
901
901
902 readsize += blksize
902 readsize += blksize
903 if largestblock < blksize:
903 if largestblock < blksize:
904 largestblock = blksize
904 largestblock = blksize
905
905
906 if readsize:
906 if readsize:
907 readdensity = float(chainsize) / float(readsize)
907 readdensity = float(chainsize) / float(readsize)
908 else:
908 else:
909 readdensity = 1
909 readdensity = 1
910
910
911 fm.write(
911 fm.write(
912 b'readsize largestblock readdensity srchunks',
912 b'readsize largestblock readdensity srchunks',
913 b' %10d %10d %9.5f %8d',
913 b' %10d %10d %9.5f %8d',
914 readsize,
914 readsize,
915 largestblock,
915 largestblock,
916 readdensity,
916 readdensity,
917 srchunks,
917 srchunks,
918 readsize=readsize,
918 readsize=readsize,
919 largestblock=largestblock,
919 largestblock=largestblock,
920 readdensity=readdensity,
920 readdensity=readdensity,
921 srchunks=srchunks,
921 srchunks=srchunks,
922 )
922 )
923
923
924 fm.plain(b'\n')
924 fm.plain(b'\n')
925
925
926 fm.end()
926 fm.end()
927
927
928
928
929 @command(
929 @command(
930 b'debugdirstate|debugstate',
930 b'debugdirstate|debugstate',
931 [
931 [
932 (
932 (
933 b'',
933 b'',
934 b'nodates',
934 b'nodates',
935 None,
935 None,
936 _(b'do not display the saved mtime (DEPRECATED)'),
936 _(b'do not display the saved mtime (DEPRECATED)'),
937 ),
937 ),
938 (b'', b'dates', True, _(b'display the saved mtime')),
938 (b'', b'dates', True, _(b'display the saved mtime')),
939 (b'', b'datesort', None, _(b'sort by saved mtime')),
939 (b'', b'datesort', None, _(b'sort by saved mtime')),
940 (
940 (
941 b'',
941 b'',
942 b'all',
942 b'all',
943 False,
943 False,
944 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
944 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
945 ),
945 ),
946 ],
946 ],
947 _(b'[OPTION]...'),
947 _(b'[OPTION]...'),
948 )
948 )
949 def debugstate(ui, repo, **opts):
949 def debugstate(ui, repo, **opts):
950 """show the contents of the current dirstate"""
950 """show the contents of the current dirstate"""
951
951
952 nodates = not opts['dates']
952 nodates = not opts['dates']
953 if opts.get('nodates') is not None:
953 if opts.get('nodates') is not None:
954 nodates = True
954 nodates = True
955 datesort = opts.get('datesort')
955 datesort = opts.get('datesort')
956
956
957 if datesort:
957 if datesort:
958
958
959 def keyfunc(entry):
959 def keyfunc(entry):
960 filename, _state, _mode, _size, mtime = entry
960 filename, _state, _mode, _size, mtime = entry
961 return (mtime, filename)
961 return (mtime, filename)
962
962
963 else:
963 else:
964 keyfunc = None # sort by filename
964 keyfunc = None # sort by filename
965 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
965 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
966 entries.sort(key=keyfunc)
966 entries.sort(key=keyfunc)
967 for entry in entries:
967 for entry in entries:
968 filename, state, mode, size, mtime = entry
968 filename, state, mode, size, mtime = entry
969 if mtime == -1:
969 if mtime == -1:
970 timestr = b'unset '
970 timestr = b'unset '
971 elif nodates:
971 elif nodates:
972 timestr = b'set '
972 timestr = b'set '
973 else:
973 else:
974 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
974 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
975 timestr = encoding.strtolocal(timestr)
975 timestr = encoding.strtolocal(timestr)
976 if mode & 0o20000:
976 if mode & 0o20000:
977 mode = b'lnk'
977 mode = b'lnk'
978 else:
978 else:
979 mode = b'%3o' % (mode & 0o777 & ~util.umask)
979 mode = b'%3o' % (mode & 0o777 & ~util.umask)
980 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
980 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
981 for f in repo.dirstate.copies():
981 for f in repo.dirstate.copies():
982 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
982 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
983
983
984
984
985 @command(
985 @command(
986 b'debugdirstateignorepatternshash',
986 b'debugdirstateignorepatternshash',
987 [],
987 [],
988 _(b''),
988 _(b''),
989 )
989 )
990 def debugdirstateignorepatternshash(ui, repo, **opts):
990 def debugdirstateignorepatternshash(ui, repo, **opts):
991 """show the hash of ignore patterns stored in dirstate if v2,
991 """show the hash of ignore patterns stored in dirstate if v2,
992 or nothing for dirstate-v2
992 or nothing for dirstate-v2
993 """
993 """
994 if repo.dirstate._use_dirstate_v2:
994 if repo.dirstate._use_dirstate_v2:
995 docket = repo.dirstate._map.docket
995 docket = repo.dirstate._map.docket
996 hash_len = 20 # 160 bits for SHA-1
996 hash_len = 20 # 160 bits for SHA-1
997 hash_bytes = docket.tree_metadata[-hash_len:]
997 hash_bytes = docket.tree_metadata[-hash_len:]
998 ui.write(binascii.hexlify(hash_bytes) + b'\n')
998 ui.write(binascii.hexlify(hash_bytes) + b'\n')
999
999
1000
1000
1001 @command(
1001 @command(
1002 b'debugdiscovery',
1002 b'debugdiscovery',
1003 [
1003 [
1004 (b'', b'old', None, _(b'use old-style discovery')),
1004 (b'', b'old', None, _(b'use old-style discovery')),
1005 (
1005 (
1006 b'',
1006 b'',
1007 b'nonheads',
1007 b'nonheads',
1008 None,
1008 None,
1009 _(b'use old-style discovery with non-heads included'),
1009 _(b'use old-style discovery with non-heads included'),
1010 ),
1010 ),
1011 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1011 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1012 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1012 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1013 (
1013 (
1014 b'',
1014 b'',
1015 b'local-as-revs',
1015 b'local-as-revs',
1016 b"",
1016 b"",
1017 b'treat local has having these revisions only',
1017 b'treat local has having these revisions only',
1018 ),
1018 ),
1019 (
1019 (
1020 b'',
1020 b'',
1021 b'remote-as-revs',
1021 b'remote-as-revs',
1022 b"",
1022 b"",
1023 b'use local as remote, with only these these revisions',
1023 b'use local as remote, with only these revisions',
1024 ),
1024 ),
1025 ]
1025 ]
1026 + cmdutil.remoteopts
1026 + cmdutil.remoteopts
1027 + cmdutil.formatteropts,
1027 + cmdutil.formatteropts,
1028 _(b'[--rev REV] [OTHER]'),
1028 _(b'[--rev REV] [OTHER]'),
1029 )
1029 )
1030 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1030 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1031 """runs the changeset discovery protocol in isolation
1031 """runs the changeset discovery protocol in isolation
1032
1032
1033 The local peer can be "replaced" by a subset of the local repository by
1033 The local peer can be "replaced" by a subset of the local repository by
1034 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1034 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1035 be "replaced" by a subset of the local repository using the
1035 be "replaced" by a subset of the local repository using the
1036 `--local-as-revs` flag. This is useful to efficiently debug pathological
1036 `--local-as-revs` flag. This is useful to efficiently debug pathological
1037 discovery situation.
1037 discovery situation.
1038
1038
1039 The following developer oriented config are relevant for people playing with this command:
1039 The following developer oriented config are relevant for people playing with this command:
1040
1040
1041 * devel.discovery.exchange-heads=True
1041 * devel.discovery.exchange-heads=True
1042
1042
1043 If False, the discovery will not start with
1043 If False, the discovery will not start with
1044 remote head fetching and local head querying.
1044 remote head fetching and local head querying.
1045
1045
1046 * devel.discovery.grow-sample=True
1046 * devel.discovery.grow-sample=True
1047
1047
1048 If False, the sample size used in set discovery will not be increased
1048 If False, the sample size used in set discovery will not be increased
1049 through the process
1049 through the process
1050
1050
1051 * devel.discovery.grow-sample.dynamic=True
1051 * devel.discovery.grow-sample.dynamic=True
1052
1052
1053 When discovery.grow-sample.dynamic is True, the default, the sample size is
1053 When discovery.grow-sample.dynamic is True, the default, the sample size is
1054 adapted to the shape of the undecided set (it is set to the max of:
1054 adapted to the shape of the undecided set (it is set to the max of:
1055 <target-size>, len(roots(undecided)), len(heads(undecided)
1055 <target-size>, len(roots(undecided)), len(heads(undecided)
1056
1056
1057 * devel.discovery.grow-sample.rate=1.05
1057 * devel.discovery.grow-sample.rate=1.05
1058
1058
1059 the rate at which the sample grow
1059 the rate at which the sample grow
1060
1060
1061 * devel.discovery.randomize=True
1061 * devel.discovery.randomize=True
1062
1062
1063 If andom sampling during discovery are deterministic. It is meant for
1063 If andom sampling during discovery are deterministic. It is meant for
1064 integration tests.
1064 integration tests.
1065
1065
1066 * devel.discovery.sample-size=200
1066 * devel.discovery.sample-size=200
1067
1067
1068 Control the initial size of the discovery sample
1068 Control the initial size of the discovery sample
1069
1069
1070 * devel.discovery.sample-size.initial=100
1070 * devel.discovery.sample-size.initial=100
1071
1071
1072 Control the initial size of the discovery for initial change
1072 Control the initial size of the discovery for initial change
1073 """
1073 """
1074 opts = pycompat.byteskwargs(opts)
1074 opts = pycompat.byteskwargs(opts)
1075 unfi = repo.unfiltered()
1075 unfi = repo.unfiltered()
1076
1076
1077 # setup potential extra filtering
1077 # setup potential extra filtering
1078 local_revs = opts[b"local_as_revs"]
1078 local_revs = opts[b"local_as_revs"]
1079 remote_revs = opts[b"remote_as_revs"]
1079 remote_revs = opts[b"remote_as_revs"]
1080
1080
1081 # make sure tests are repeatable
1081 # make sure tests are repeatable
1082 random.seed(int(opts[b'seed']))
1082 random.seed(int(opts[b'seed']))
1083
1083
1084 if not remote_revs:
1084 if not remote_revs:
1085
1085
1086 remoteurl, branches = urlutil.get_unique_pull_path(
1086 remoteurl, branches = urlutil.get_unique_pull_path(
1087 b'debugdiscovery', repo, ui, remoteurl
1087 b'debugdiscovery', repo, ui, remoteurl
1088 )
1088 )
1089 remote = hg.peer(repo, opts, remoteurl)
1089 remote = hg.peer(repo, opts, remoteurl)
1090 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1090 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1091 else:
1091 else:
1092 branches = (None, [])
1092 branches = (None, [])
1093 remote_filtered_revs = logcmdutil.revrange(
1093 remote_filtered_revs = logcmdutil.revrange(
1094 unfi, [b"not (::(%s))" % remote_revs]
1094 unfi, [b"not (::(%s))" % remote_revs]
1095 )
1095 )
1096 remote_filtered_revs = frozenset(remote_filtered_revs)
1096 remote_filtered_revs = frozenset(remote_filtered_revs)
1097
1097
1098 def remote_func(x):
1098 def remote_func(x):
1099 return remote_filtered_revs
1099 return remote_filtered_revs
1100
1100
1101 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1101 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1102
1102
1103 remote = repo.peer()
1103 remote = repo.peer()
1104 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1104 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1105
1105
1106 if local_revs:
1106 if local_revs:
1107 local_filtered_revs = logcmdutil.revrange(
1107 local_filtered_revs = logcmdutil.revrange(
1108 unfi, [b"not (::(%s))" % local_revs]
1108 unfi, [b"not (::(%s))" % local_revs]
1109 )
1109 )
1110 local_filtered_revs = frozenset(local_filtered_revs)
1110 local_filtered_revs = frozenset(local_filtered_revs)
1111
1111
1112 def local_func(x):
1112 def local_func(x):
1113 return local_filtered_revs
1113 return local_filtered_revs
1114
1114
1115 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1115 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1116 repo = repo.filtered(b'debug-discovery-local-filter')
1116 repo = repo.filtered(b'debug-discovery-local-filter')
1117
1117
1118 data = {}
1118 data = {}
1119 if opts.get(b'old'):
1119 if opts.get(b'old'):
1120
1120
1121 def doit(pushedrevs, remoteheads, remote=remote):
1121 def doit(pushedrevs, remoteheads, remote=remote):
1122 if not util.safehasattr(remote, b'branches'):
1122 if not util.safehasattr(remote, b'branches'):
1123 # enable in-client legacy support
1123 # enable in-client legacy support
1124 remote = localrepo.locallegacypeer(remote.local())
1124 remote = localrepo.locallegacypeer(remote.local())
1125 common, _in, hds = treediscovery.findcommonincoming(
1125 common, _in, hds = treediscovery.findcommonincoming(
1126 repo, remote, force=True, audit=data
1126 repo, remote, force=True, audit=data
1127 )
1127 )
1128 common = set(common)
1128 common = set(common)
1129 if not opts.get(b'nonheads'):
1129 if not opts.get(b'nonheads'):
1130 ui.writenoi18n(
1130 ui.writenoi18n(
1131 b"unpruned common: %s\n"
1131 b"unpruned common: %s\n"
1132 % b" ".join(sorted(short(n) for n in common))
1132 % b" ".join(sorted(short(n) for n in common))
1133 )
1133 )
1134
1134
1135 clnode = repo.changelog.node
1135 clnode = repo.changelog.node
1136 common = repo.revs(b'heads(::%ln)', common)
1136 common = repo.revs(b'heads(::%ln)', common)
1137 common = {clnode(r) for r in common}
1137 common = {clnode(r) for r in common}
1138 return common, hds
1138 return common, hds
1139
1139
1140 else:
1140 else:
1141
1141
1142 def doit(pushedrevs, remoteheads, remote=remote):
1142 def doit(pushedrevs, remoteheads, remote=remote):
1143 nodes = None
1143 nodes = None
1144 if pushedrevs:
1144 if pushedrevs:
1145 revs = logcmdutil.revrange(repo, pushedrevs)
1145 revs = logcmdutil.revrange(repo, pushedrevs)
1146 nodes = [repo[r].node() for r in revs]
1146 nodes = [repo[r].node() for r in revs]
1147 common, any, hds = setdiscovery.findcommonheads(
1147 common, any, hds = setdiscovery.findcommonheads(
1148 ui, repo, remote, ancestorsof=nodes, audit=data
1148 ui, repo, remote, ancestorsof=nodes, audit=data
1149 )
1149 )
1150 return common, hds
1150 return common, hds
1151
1151
1152 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1152 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1153 localrevs = opts[b'rev']
1153 localrevs = opts[b'rev']
1154
1154
1155 fm = ui.formatter(b'debugdiscovery', opts)
1155 fm = ui.formatter(b'debugdiscovery', opts)
1156 if fm.strict_format:
1156 if fm.strict_format:
1157
1157
1158 @contextlib.contextmanager
1158 @contextlib.contextmanager
1159 def may_capture_output():
1159 def may_capture_output():
1160 ui.pushbuffer()
1160 ui.pushbuffer()
1161 yield
1161 yield
1162 data[b'output'] = ui.popbuffer()
1162 data[b'output'] = ui.popbuffer()
1163
1163
1164 else:
1164 else:
1165 may_capture_output = util.nullcontextmanager
1165 may_capture_output = util.nullcontextmanager
1166 with may_capture_output():
1166 with may_capture_output():
1167 with util.timedcm('debug-discovery') as t:
1167 with util.timedcm('debug-discovery') as t:
1168 common, hds = doit(localrevs, remoterevs)
1168 common, hds = doit(localrevs, remoterevs)
1169
1169
1170 # compute all statistics
1170 # compute all statistics
1171 heads_common = set(common)
1171 heads_common = set(common)
1172 heads_remote = set(hds)
1172 heads_remote = set(hds)
1173 heads_local = set(repo.heads())
1173 heads_local = set(repo.heads())
1174 # note: they cannot be a local or remote head that is in common and not
1174 # note: they cannot be a local or remote head that is in common and not
1175 # itself a head of common.
1175 # itself a head of common.
1176 heads_common_local = heads_common & heads_local
1176 heads_common_local = heads_common & heads_local
1177 heads_common_remote = heads_common & heads_remote
1177 heads_common_remote = heads_common & heads_remote
1178 heads_common_both = heads_common & heads_remote & heads_local
1178 heads_common_both = heads_common & heads_remote & heads_local
1179
1179
1180 all = repo.revs(b'all()')
1180 all = repo.revs(b'all()')
1181 common = repo.revs(b'::%ln', common)
1181 common = repo.revs(b'::%ln', common)
1182 roots_common = repo.revs(b'roots(::%ld)', common)
1182 roots_common = repo.revs(b'roots(::%ld)', common)
1183 missing = repo.revs(b'not ::%ld', common)
1183 missing = repo.revs(b'not ::%ld', common)
1184 heads_missing = repo.revs(b'heads(%ld)', missing)
1184 heads_missing = repo.revs(b'heads(%ld)', missing)
1185 roots_missing = repo.revs(b'roots(%ld)', missing)
1185 roots_missing = repo.revs(b'roots(%ld)', missing)
1186 assert len(common) + len(missing) == len(all)
1186 assert len(common) + len(missing) == len(all)
1187
1187
1188 initial_undecided = repo.revs(
1188 initial_undecided = repo.revs(
1189 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1189 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1190 )
1190 )
1191 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1191 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1192 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1192 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1193 common_initial_undecided = initial_undecided & common
1193 common_initial_undecided = initial_undecided & common
1194 missing_initial_undecided = initial_undecided & missing
1194 missing_initial_undecided = initial_undecided & missing
1195
1195
1196 data[b'elapsed'] = t.elapsed
1196 data[b'elapsed'] = t.elapsed
1197 data[b'nb-common-heads'] = len(heads_common)
1197 data[b'nb-common-heads'] = len(heads_common)
1198 data[b'nb-common-heads-local'] = len(heads_common_local)
1198 data[b'nb-common-heads-local'] = len(heads_common_local)
1199 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1199 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1200 data[b'nb-common-heads-both'] = len(heads_common_both)
1200 data[b'nb-common-heads-both'] = len(heads_common_both)
1201 data[b'nb-common-roots'] = len(roots_common)
1201 data[b'nb-common-roots'] = len(roots_common)
1202 data[b'nb-head-local'] = len(heads_local)
1202 data[b'nb-head-local'] = len(heads_local)
1203 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1203 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1204 data[b'nb-head-remote'] = len(heads_remote)
1204 data[b'nb-head-remote'] = len(heads_remote)
1205 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1205 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1206 heads_common_remote
1206 heads_common_remote
1207 )
1207 )
1208 data[b'nb-revs'] = len(all)
1208 data[b'nb-revs'] = len(all)
1209 data[b'nb-revs-common'] = len(common)
1209 data[b'nb-revs-common'] = len(common)
1210 data[b'nb-revs-missing'] = len(missing)
1210 data[b'nb-revs-missing'] = len(missing)
1211 data[b'nb-missing-heads'] = len(heads_missing)
1211 data[b'nb-missing-heads'] = len(heads_missing)
1212 data[b'nb-missing-roots'] = len(roots_missing)
1212 data[b'nb-missing-roots'] = len(roots_missing)
1213 data[b'nb-ini_und'] = len(initial_undecided)
1213 data[b'nb-ini_und'] = len(initial_undecided)
1214 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1214 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1215 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1215 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1216 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1216 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1217 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1217 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1218
1218
1219 fm.startitem()
1219 fm.startitem()
1220 fm.data(**pycompat.strkwargs(data))
1220 fm.data(**pycompat.strkwargs(data))
1221 # display discovery summary
1221 # display discovery summary
1222 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1222 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1223 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1223 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1224 fm.plain(b"heads summary:\n")
1224 fm.plain(b"heads summary:\n")
1225 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1225 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1226 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1226 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1227 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1227 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1228 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1228 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1229 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1229 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1230 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1230 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1231 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1231 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1232 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1232 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1233 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1233 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1234 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1234 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1235 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1235 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1236 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1236 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1237 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1237 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1238 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1238 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1239 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1239 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1240 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1240 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1241 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1241 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1242 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1242 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1243 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1243 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1244 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1244 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1245 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1245 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1246 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1246 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1247
1247
1248 if ui.verbose:
1248 if ui.verbose:
1249 fm.plain(
1249 fm.plain(
1250 b"common heads: %s\n"
1250 b"common heads: %s\n"
1251 % b" ".join(sorted(short(n) for n in heads_common))
1251 % b" ".join(sorted(short(n) for n in heads_common))
1252 )
1252 )
1253 fm.end()
1253 fm.end()
1254
1254
1255
1255
1256 _chunksize = 4 << 10
1256 _chunksize = 4 << 10
1257
1257
1258
1258
1259 @command(
1259 @command(
1260 b'debugdownload',
1260 b'debugdownload',
1261 [
1261 [
1262 (b'o', b'output', b'', _(b'path')),
1262 (b'o', b'output', b'', _(b'path')),
1263 ],
1263 ],
1264 optionalrepo=True,
1264 optionalrepo=True,
1265 )
1265 )
1266 def debugdownload(ui, repo, url, output=None, **opts):
1266 def debugdownload(ui, repo, url, output=None, **opts):
1267 """download a resource using Mercurial logic and config"""
1267 """download a resource using Mercurial logic and config"""
1268 fh = urlmod.open(ui, url, output)
1268 fh = urlmod.open(ui, url, output)
1269
1269
1270 dest = ui
1270 dest = ui
1271 if output:
1271 if output:
1272 dest = open(output, b"wb", _chunksize)
1272 dest = open(output, b"wb", _chunksize)
1273 try:
1273 try:
1274 data = fh.read(_chunksize)
1274 data = fh.read(_chunksize)
1275 while data:
1275 while data:
1276 dest.write(data)
1276 dest.write(data)
1277 data = fh.read(_chunksize)
1277 data = fh.read(_chunksize)
1278 finally:
1278 finally:
1279 if output:
1279 if output:
1280 dest.close()
1280 dest.close()
1281
1281
1282
1282
1283 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1283 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1284 def debugextensions(ui, repo, **opts):
1284 def debugextensions(ui, repo, **opts):
1285 '''show information about active extensions'''
1285 '''show information about active extensions'''
1286 opts = pycompat.byteskwargs(opts)
1286 opts = pycompat.byteskwargs(opts)
1287 exts = extensions.extensions(ui)
1287 exts = extensions.extensions(ui)
1288 hgver = util.version()
1288 hgver = util.version()
1289 fm = ui.formatter(b'debugextensions', opts)
1289 fm = ui.formatter(b'debugextensions', opts)
1290 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1290 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1291 isinternal = extensions.ismoduleinternal(extmod)
1291 isinternal = extensions.ismoduleinternal(extmod)
1292 extsource = None
1292 extsource = None
1293
1293
1294 if util.safehasattr(extmod, '__file__'):
1294 if util.safehasattr(extmod, '__file__'):
1295 extsource = pycompat.fsencode(extmod.__file__)
1295 extsource = pycompat.fsencode(extmod.__file__)
1296 elif getattr(sys, 'oxidized', False):
1296 elif getattr(sys, 'oxidized', False):
1297 extsource = pycompat.sysexecutable
1297 extsource = pycompat.sysexecutable
1298 if isinternal:
1298 if isinternal:
1299 exttestedwith = [] # never expose magic string to users
1299 exttestedwith = [] # never expose magic string to users
1300 else:
1300 else:
1301 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1301 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1302 extbuglink = getattr(extmod, 'buglink', None)
1302 extbuglink = getattr(extmod, 'buglink', None)
1303
1303
1304 fm.startitem()
1304 fm.startitem()
1305
1305
1306 if ui.quiet or ui.verbose:
1306 if ui.quiet or ui.verbose:
1307 fm.write(b'name', b'%s\n', extname)
1307 fm.write(b'name', b'%s\n', extname)
1308 else:
1308 else:
1309 fm.write(b'name', b'%s', extname)
1309 fm.write(b'name', b'%s', extname)
1310 if isinternal or hgver in exttestedwith:
1310 if isinternal or hgver in exttestedwith:
1311 fm.plain(b'\n')
1311 fm.plain(b'\n')
1312 elif not exttestedwith:
1312 elif not exttestedwith:
1313 fm.plain(_(b' (untested!)\n'))
1313 fm.plain(_(b' (untested!)\n'))
1314 else:
1314 else:
1315 lasttestedversion = exttestedwith[-1]
1315 lasttestedversion = exttestedwith[-1]
1316 fm.plain(b' (%s!)\n' % lasttestedversion)
1316 fm.plain(b' (%s!)\n' % lasttestedversion)
1317
1317
1318 fm.condwrite(
1318 fm.condwrite(
1319 ui.verbose and extsource,
1319 ui.verbose and extsource,
1320 b'source',
1320 b'source',
1321 _(b' location: %s\n'),
1321 _(b' location: %s\n'),
1322 extsource or b"",
1322 extsource or b"",
1323 )
1323 )
1324
1324
1325 if ui.verbose:
1325 if ui.verbose:
1326 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1326 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1327 fm.data(bundled=isinternal)
1327 fm.data(bundled=isinternal)
1328
1328
1329 fm.condwrite(
1329 fm.condwrite(
1330 ui.verbose and exttestedwith,
1330 ui.verbose and exttestedwith,
1331 b'testedwith',
1331 b'testedwith',
1332 _(b' tested with: %s\n'),
1332 _(b' tested with: %s\n'),
1333 fm.formatlist(exttestedwith, name=b'ver'),
1333 fm.formatlist(exttestedwith, name=b'ver'),
1334 )
1334 )
1335
1335
1336 fm.condwrite(
1336 fm.condwrite(
1337 ui.verbose and extbuglink,
1337 ui.verbose and extbuglink,
1338 b'buglink',
1338 b'buglink',
1339 _(b' bug reporting: %s\n'),
1339 _(b' bug reporting: %s\n'),
1340 extbuglink or b"",
1340 extbuglink or b"",
1341 )
1341 )
1342
1342
1343 fm.end()
1343 fm.end()
1344
1344
1345
1345
1346 @command(
1346 @command(
1347 b'debugfileset',
1347 b'debugfileset',
1348 [
1348 [
1349 (
1349 (
1350 b'r',
1350 b'r',
1351 b'rev',
1351 b'rev',
1352 b'',
1352 b'',
1353 _(b'apply the filespec on this revision'),
1353 _(b'apply the filespec on this revision'),
1354 _(b'REV'),
1354 _(b'REV'),
1355 ),
1355 ),
1356 (
1356 (
1357 b'',
1357 b'',
1358 b'all-files',
1358 b'all-files',
1359 False,
1359 False,
1360 _(b'test files from all revisions and working directory'),
1360 _(b'test files from all revisions and working directory'),
1361 ),
1361 ),
1362 (
1362 (
1363 b's',
1363 b's',
1364 b'show-matcher',
1364 b'show-matcher',
1365 None,
1365 None,
1366 _(b'print internal representation of matcher'),
1366 _(b'print internal representation of matcher'),
1367 ),
1367 ),
1368 (
1368 (
1369 b'p',
1369 b'p',
1370 b'show-stage',
1370 b'show-stage',
1371 [],
1371 [],
1372 _(b'print parsed tree at the given stage'),
1372 _(b'print parsed tree at the given stage'),
1373 _(b'NAME'),
1373 _(b'NAME'),
1374 ),
1374 ),
1375 ],
1375 ],
1376 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1376 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1377 )
1377 )
1378 def debugfileset(ui, repo, expr, **opts):
1378 def debugfileset(ui, repo, expr, **opts):
1379 '''parse and apply a fileset specification'''
1379 '''parse and apply a fileset specification'''
1380 from . import fileset
1380 from . import fileset
1381
1381
1382 fileset.symbols # force import of fileset so we have predicates to optimize
1382 fileset.symbols # force import of fileset so we have predicates to optimize
1383 opts = pycompat.byteskwargs(opts)
1383 opts = pycompat.byteskwargs(opts)
1384 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1384 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1385
1385
1386 stages = [
1386 stages = [
1387 (b'parsed', pycompat.identity),
1387 (b'parsed', pycompat.identity),
1388 (b'analyzed', filesetlang.analyze),
1388 (b'analyzed', filesetlang.analyze),
1389 (b'optimized', filesetlang.optimize),
1389 (b'optimized', filesetlang.optimize),
1390 ]
1390 ]
1391 stagenames = {n for n, f in stages}
1391 stagenames = {n for n, f in stages}
1392
1392
1393 showalways = set()
1393 showalways = set()
1394 if ui.verbose and not opts[b'show_stage']:
1394 if ui.verbose and not opts[b'show_stage']:
1395 # show parsed tree by --verbose (deprecated)
1395 # show parsed tree by --verbose (deprecated)
1396 showalways.add(b'parsed')
1396 showalways.add(b'parsed')
1397 if opts[b'show_stage'] == [b'all']:
1397 if opts[b'show_stage'] == [b'all']:
1398 showalways.update(stagenames)
1398 showalways.update(stagenames)
1399 else:
1399 else:
1400 for n in opts[b'show_stage']:
1400 for n in opts[b'show_stage']:
1401 if n not in stagenames:
1401 if n not in stagenames:
1402 raise error.Abort(_(b'invalid stage name: %s') % n)
1402 raise error.Abort(_(b'invalid stage name: %s') % n)
1403 showalways.update(opts[b'show_stage'])
1403 showalways.update(opts[b'show_stage'])
1404
1404
1405 tree = filesetlang.parse(expr)
1405 tree = filesetlang.parse(expr)
1406 for n, f in stages:
1406 for n, f in stages:
1407 tree = f(tree)
1407 tree = f(tree)
1408 if n in showalways:
1408 if n in showalways:
1409 if opts[b'show_stage'] or n != b'parsed':
1409 if opts[b'show_stage'] or n != b'parsed':
1410 ui.write(b"* %s:\n" % n)
1410 ui.write(b"* %s:\n" % n)
1411 ui.write(filesetlang.prettyformat(tree), b"\n")
1411 ui.write(filesetlang.prettyformat(tree), b"\n")
1412
1412
1413 files = set()
1413 files = set()
1414 if opts[b'all_files']:
1414 if opts[b'all_files']:
1415 for r in repo:
1415 for r in repo:
1416 c = repo[r]
1416 c = repo[r]
1417 files.update(c.files())
1417 files.update(c.files())
1418 files.update(c.substate)
1418 files.update(c.substate)
1419 if opts[b'all_files'] or ctx.rev() is None:
1419 if opts[b'all_files'] or ctx.rev() is None:
1420 wctx = repo[None]
1420 wctx = repo[None]
1421 files.update(
1421 files.update(
1422 repo.dirstate.walk(
1422 repo.dirstate.walk(
1423 scmutil.matchall(repo),
1423 scmutil.matchall(repo),
1424 subrepos=list(wctx.substate),
1424 subrepos=list(wctx.substate),
1425 unknown=True,
1425 unknown=True,
1426 ignored=True,
1426 ignored=True,
1427 )
1427 )
1428 )
1428 )
1429 files.update(wctx.substate)
1429 files.update(wctx.substate)
1430 else:
1430 else:
1431 files.update(ctx.files())
1431 files.update(ctx.files())
1432 files.update(ctx.substate)
1432 files.update(ctx.substate)
1433
1433
1434 m = ctx.matchfileset(repo.getcwd(), expr)
1434 m = ctx.matchfileset(repo.getcwd(), expr)
1435 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1435 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1436 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1436 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1437 for f in sorted(files):
1437 for f in sorted(files):
1438 if not m(f):
1438 if not m(f):
1439 continue
1439 continue
1440 ui.write(b"%s\n" % f)
1440 ui.write(b"%s\n" % f)
1441
1441
1442
1442
1443 @command(
1443 @command(
1444 b"debug-repair-issue6528",
1444 b"debug-repair-issue6528",
1445 [
1445 [
1446 (
1446 (
1447 b'',
1447 b'',
1448 b'to-report',
1448 b'to-report',
1449 b'',
1449 b'',
1450 _(b'build a report of affected revisions to this file'),
1450 _(b'build a report of affected revisions to this file'),
1451 _(b'FILE'),
1451 _(b'FILE'),
1452 ),
1452 ),
1453 (
1453 (
1454 b'',
1454 b'',
1455 b'from-report',
1455 b'from-report',
1456 b'',
1456 b'',
1457 _(b'repair revisions listed in this report file'),
1457 _(b'repair revisions listed in this report file'),
1458 _(b'FILE'),
1458 _(b'FILE'),
1459 ),
1459 ),
1460 (
1460 (
1461 b'',
1461 b'',
1462 b'paranoid',
1462 b'paranoid',
1463 False,
1463 False,
1464 _(b'check that both detection methods do the same thing'),
1464 _(b'check that both detection methods do the same thing'),
1465 ),
1465 ),
1466 ]
1466 ]
1467 + cmdutil.dryrunopts,
1467 + cmdutil.dryrunopts,
1468 )
1468 )
1469 def debug_repair_issue6528(ui, repo, **opts):
1469 def debug_repair_issue6528(ui, repo, **opts):
1470 """find affected revisions and repair them. See issue6528 for more details.
1470 """find affected revisions and repair them. See issue6528 for more details.
1471
1471
1472 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1472 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1473 computation of affected revisions for a given repository across clones.
1473 computation of affected revisions for a given repository across clones.
1474 The report format is line-based (with empty lines ignored):
1474 The report format is line-based (with empty lines ignored):
1475
1475
1476 ```
1476 ```
1477 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1477 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1478 ```
1478 ```
1479
1479
1480 There can be multiple broken revisions per filelog, they are separated by
1480 There can be multiple broken revisions per filelog, they are separated by
1481 a comma with no spaces. The only space is between the revision(s) and the
1481 a comma with no spaces. The only space is between the revision(s) and the
1482 filename.
1482 filename.
1483
1483
1484 Note that this does *not* mean that this repairs future affected revisions,
1484 Note that this does *not* mean that this repairs future affected revisions,
1485 that needs a separate fix at the exchange level that was introduced in
1485 that needs a separate fix at the exchange level that was introduced in
1486 Mercurial 5.9.1.
1486 Mercurial 5.9.1.
1487
1487
1488 There is a `--paranoid` flag to test that the fast implementation is correct
1488 There is a `--paranoid` flag to test that the fast implementation is correct
1489 by checking it against the slow implementation. Since this matter is quite
1489 by checking it against the slow implementation. Since this matter is quite
1490 urgent and testing every edge-case is probably quite costly, we use this
1490 urgent and testing every edge-case is probably quite costly, we use this
1491 method to test on large repositories as a fuzzing method of sorts.
1491 method to test on large repositories as a fuzzing method of sorts.
1492 """
1492 """
1493 cmdutil.check_incompatible_arguments(
1493 cmdutil.check_incompatible_arguments(
1494 opts, 'to_report', ['from_report', 'dry_run']
1494 opts, 'to_report', ['from_report', 'dry_run']
1495 )
1495 )
1496 dry_run = opts.get('dry_run')
1496 dry_run = opts.get('dry_run')
1497 to_report = opts.get('to_report')
1497 to_report = opts.get('to_report')
1498 from_report = opts.get('from_report')
1498 from_report = opts.get('from_report')
1499 paranoid = opts.get('paranoid')
1499 paranoid = opts.get('paranoid')
1500 # TODO maybe add filelog pattern and revision pattern parameters to help
1500 # TODO maybe add filelog pattern and revision pattern parameters to help
1501 # narrow down the search for users that know what they're looking for?
1501 # narrow down the search for users that know what they're looking for?
1502
1502
1503 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1503 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1504 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1504 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1505 raise error.Abort(_(msg))
1505 raise error.Abort(_(msg))
1506
1506
1507 rewrite.repair_issue6528(
1507 rewrite.repair_issue6528(
1508 ui,
1508 ui,
1509 repo,
1509 repo,
1510 dry_run=dry_run,
1510 dry_run=dry_run,
1511 to_report=to_report,
1511 to_report=to_report,
1512 from_report=from_report,
1512 from_report=from_report,
1513 paranoid=paranoid,
1513 paranoid=paranoid,
1514 )
1514 )
1515
1515
1516
1516
1517 @command(b'debugformat', [] + cmdutil.formatteropts)
1517 @command(b'debugformat', [] + cmdutil.formatteropts)
1518 def debugformat(ui, repo, **opts):
1518 def debugformat(ui, repo, **opts):
1519 """display format information about the current repository
1519 """display format information about the current repository
1520
1520
1521 Use --verbose to get extra information about current config value and
1521 Use --verbose to get extra information about current config value and
1522 Mercurial default."""
1522 Mercurial default."""
1523 opts = pycompat.byteskwargs(opts)
1523 opts = pycompat.byteskwargs(opts)
1524 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1524 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1525 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1525 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1526
1526
1527 def makeformatname(name):
1527 def makeformatname(name):
1528 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1528 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1529
1529
1530 fm = ui.formatter(b'debugformat', opts)
1530 fm = ui.formatter(b'debugformat', opts)
1531 if fm.isplain():
1531 if fm.isplain():
1532
1532
1533 def formatvalue(value):
1533 def formatvalue(value):
1534 if util.safehasattr(value, b'startswith'):
1534 if util.safehasattr(value, b'startswith'):
1535 return value
1535 return value
1536 if value:
1536 if value:
1537 return b'yes'
1537 return b'yes'
1538 else:
1538 else:
1539 return b'no'
1539 return b'no'
1540
1540
1541 else:
1541 else:
1542 formatvalue = pycompat.identity
1542 formatvalue = pycompat.identity
1543
1543
1544 fm.plain(b'format-variant')
1544 fm.plain(b'format-variant')
1545 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1545 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1546 fm.plain(b' repo')
1546 fm.plain(b' repo')
1547 if ui.verbose:
1547 if ui.verbose:
1548 fm.plain(b' config default')
1548 fm.plain(b' config default')
1549 fm.plain(b'\n')
1549 fm.plain(b'\n')
1550 for fv in upgrade.allformatvariant:
1550 for fv in upgrade.allformatvariant:
1551 fm.startitem()
1551 fm.startitem()
1552 repovalue = fv.fromrepo(repo)
1552 repovalue = fv.fromrepo(repo)
1553 configvalue = fv.fromconfig(repo)
1553 configvalue = fv.fromconfig(repo)
1554
1554
1555 if repovalue != configvalue:
1555 if repovalue != configvalue:
1556 namelabel = b'formatvariant.name.mismatchconfig'
1556 namelabel = b'formatvariant.name.mismatchconfig'
1557 repolabel = b'formatvariant.repo.mismatchconfig'
1557 repolabel = b'formatvariant.repo.mismatchconfig'
1558 elif repovalue != fv.default:
1558 elif repovalue != fv.default:
1559 namelabel = b'formatvariant.name.mismatchdefault'
1559 namelabel = b'formatvariant.name.mismatchdefault'
1560 repolabel = b'formatvariant.repo.mismatchdefault'
1560 repolabel = b'formatvariant.repo.mismatchdefault'
1561 else:
1561 else:
1562 namelabel = b'formatvariant.name.uptodate'
1562 namelabel = b'formatvariant.name.uptodate'
1563 repolabel = b'formatvariant.repo.uptodate'
1563 repolabel = b'formatvariant.repo.uptodate'
1564
1564
1565 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1565 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1566 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1566 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1567 if fv.default != configvalue:
1567 if fv.default != configvalue:
1568 configlabel = b'formatvariant.config.special'
1568 configlabel = b'formatvariant.config.special'
1569 else:
1569 else:
1570 configlabel = b'formatvariant.config.default'
1570 configlabel = b'formatvariant.config.default'
1571 fm.condwrite(
1571 fm.condwrite(
1572 ui.verbose,
1572 ui.verbose,
1573 b'config',
1573 b'config',
1574 b' %6s',
1574 b' %6s',
1575 formatvalue(configvalue),
1575 formatvalue(configvalue),
1576 label=configlabel,
1576 label=configlabel,
1577 )
1577 )
1578 fm.condwrite(
1578 fm.condwrite(
1579 ui.verbose,
1579 ui.verbose,
1580 b'default',
1580 b'default',
1581 b' %7s',
1581 b' %7s',
1582 formatvalue(fv.default),
1582 formatvalue(fv.default),
1583 label=b'formatvariant.default',
1583 label=b'formatvariant.default',
1584 )
1584 )
1585 fm.plain(b'\n')
1585 fm.plain(b'\n')
1586 fm.end()
1586 fm.end()
1587
1587
1588
1588
1589 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1589 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1590 def debugfsinfo(ui, path=b"."):
1590 def debugfsinfo(ui, path=b"."):
1591 """show information detected about current filesystem"""
1591 """show information detected about current filesystem"""
1592 ui.writenoi18n(b'path: %s\n' % path)
1592 ui.writenoi18n(b'path: %s\n' % path)
1593 ui.writenoi18n(
1593 ui.writenoi18n(
1594 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1594 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1595 )
1595 )
1596 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1596 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1597 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1597 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1598 ui.writenoi18n(
1598 ui.writenoi18n(
1599 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1599 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1600 )
1600 )
1601 ui.writenoi18n(
1601 ui.writenoi18n(
1602 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1602 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1603 )
1603 )
1604 casesensitive = b'(unknown)'
1604 casesensitive = b'(unknown)'
1605 try:
1605 try:
1606 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1606 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1607 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1607 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1608 except OSError:
1608 except OSError:
1609 pass
1609 pass
1610 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1610 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1611
1611
1612
1612
1613 @command(
1613 @command(
1614 b'debuggetbundle',
1614 b'debuggetbundle',
1615 [
1615 [
1616 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1616 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1617 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1617 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1618 (
1618 (
1619 b't',
1619 b't',
1620 b'type',
1620 b'type',
1621 b'bzip2',
1621 b'bzip2',
1622 _(b'bundle compression type to use'),
1622 _(b'bundle compression type to use'),
1623 _(b'TYPE'),
1623 _(b'TYPE'),
1624 ),
1624 ),
1625 ],
1625 ],
1626 _(b'REPO FILE [-H|-C ID]...'),
1626 _(b'REPO FILE [-H|-C ID]...'),
1627 norepo=True,
1627 norepo=True,
1628 )
1628 )
1629 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1629 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1630 """retrieves a bundle from a repo
1630 """retrieves a bundle from a repo
1631
1631
1632 Every ID must be a full-length hex node id string. Saves the bundle to the
1632 Every ID must be a full-length hex node id string. Saves the bundle to the
1633 given file.
1633 given file.
1634 """
1634 """
1635 opts = pycompat.byteskwargs(opts)
1635 opts = pycompat.byteskwargs(opts)
1636 repo = hg.peer(ui, opts, repopath)
1636 repo = hg.peer(ui, opts, repopath)
1637 if not repo.capable(b'getbundle'):
1637 if not repo.capable(b'getbundle'):
1638 raise error.Abort(b"getbundle() not supported by target repository")
1638 raise error.Abort(b"getbundle() not supported by target repository")
1639 args = {}
1639 args = {}
1640 if common:
1640 if common:
1641 args['common'] = [bin(s) for s in common]
1641 args['common'] = [bin(s) for s in common]
1642 if head:
1642 if head:
1643 args['heads'] = [bin(s) for s in head]
1643 args['heads'] = [bin(s) for s in head]
1644 # TODO: get desired bundlecaps from command line.
1644 # TODO: get desired bundlecaps from command line.
1645 args['bundlecaps'] = None
1645 args['bundlecaps'] = None
1646 bundle = repo.getbundle(b'debug', **args)
1646 bundle = repo.getbundle(b'debug', **args)
1647
1647
1648 bundletype = opts.get(b'type', b'bzip2').lower()
1648 bundletype = opts.get(b'type', b'bzip2').lower()
1649 btypes = {
1649 btypes = {
1650 b'none': b'HG10UN',
1650 b'none': b'HG10UN',
1651 b'bzip2': b'HG10BZ',
1651 b'bzip2': b'HG10BZ',
1652 b'gzip': b'HG10GZ',
1652 b'gzip': b'HG10GZ',
1653 b'bundle2': b'HG20',
1653 b'bundle2': b'HG20',
1654 }
1654 }
1655 bundletype = btypes.get(bundletype)
1655 bundletype = btypes.get(bundletype)
1656 if bundletype not in bundle2.bundletypes:
1656 if bundletype not in bundle2.bundletypes:
1657 raise error.Abort(_(b'unknown bundle type specified with --type'))
1657 raise error.Abort(_(b'unknown bundle type specified with --type'))
1658 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1658 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1659
1659
1660
1660
1661 @command(b'debugignore', [], b'[FILE]')
1661 @command(b'debugignore', [], b'[FILE]')
1662 def debugignore(ui, repo, *files, **opts):
1662 def debugignore(ui, repo, *files, **opts):
1663 """display the combined ignore pattern and information about ignored files
1663 """display the combined ignore pattern and information about ignored files
1664
1664
1665 With no argument display the combined ignore pattern.
1665 With no argument display the combined ignore pattern.
1666
1666
1667 Given space separated file names, shows if the given file is ignored and
1667 Given space separated file names, shows if the given file is ignored and
1668 if so, show the ignore rule (file and line number) that matched it.
1668 if so, show the ignore rule (file and line number) that matched it.
1669 """
1669 """
1670 ignore = repo.dirstate._ignore
1670 ignore = repo.dirstate._ignore
1671 if not files:
1671 if not files:
1672 # Show all the patterns
1672 # Show all the patterns
1673 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1673 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1674 else:
1674 else:
1675 m = scmutil.match(repo[None], pats=files)
1675 m = scmutil.match(repo[None], pats=files)
1676 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1676 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1677 for f in m.files():
1677 for f in m.files():
1678 nf = util.normpath(f)
1678 nf = util.normpath(f)
1679 ignored = None
1679 ignored = None
1680 ignoredata = None
1680 ignoredata = None
1681 if nf != b'.':
1681 if nf != b'.':
1682 if ignore(nf):
1682 if ignore(nf):
1683 ignored = nf
1683 ignored = nf
1684 ignoredata = repo.dirstate._ignorefileandline(nf)
1684 ignoredata = repo.dirstate._ignorefileandline(nf)
1685 else:
1685 else:
1686 for p in pathutil.finddirs(nf):
1686 for p in pathutil.finddirs(nf):
1687 if ignore(p):
1687 if ignore(p):
1688 ignored = p
1688 ignored = p
1689 ignoredata = repo.dirstate._ignorefileandline(p)
1689 ignoredata = repo.dirstate._ignorefileandline(p)
1690 break
1690 break
1691 if ignored:
1691 if ignored:
1692 if ignored == nf:
1692 if ignored == nf:
1693 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1693 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1694 else:
1694 else:
1695 ui.write(
1695 ui.write(
1696 _(
1696 _(
1697 b"%s is ignored because of "
1697 b"%s is ignored because of "
1698 b"containing directory %s\n"
1698 b"containing directory %s\n"
1699 )
1699 )
1700 % (uipathfn(f), ignored)
1700 % (uipathfn(f), ignored)
1701 )
1701 )
1702 ignorefile, lineno, line = ignoredata
1702 ignorefile, lineno, line = ignoredata
1703 ui.write(
1703 ui.write(
1704 _(b"(ignore rule in %s, line %d: '%s')\n")
1704 _(b"(ignore rule in %s, line %d: '%s')\n")
1705 % (ignorefile, lineno, line)
1705 % (ignorefile, lineno, line)
1706 )
1706 )
1707 else:
1707 else:
1708 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1708 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1709
1709
1710
1710
1711 @command(
1711 @command(
1712 b'debugindex',
1712 b'debugindex',
1713 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1713 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1714 _(b'-c|-m|FILE'),
1714 _(b'-c|-m|FILE'),
1715 )
1715 )
1716 def debugindex(ui, repo, file_=None, **opts):
1716 def debugindex(ui, repo, file_=None, **opts):
1717 """dump index data for a storage primitive"""
1717 """dump index data for a storage primitive"""
1718 opts = pycompat.byteskwargs(opts)
1718 opts = pycompat.byteskwargs(opts)
1719 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1719 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1720
1720
1721 if ui.debugflag:
1721 if ui.debugflag:
1722 shortfn = hex
1722 shortfn = hex
1723 else:
1723 else:
1724 shortfn = short
1724 shortfn = short
1725
1725
1726 idlen = 12
1726 idlen = 12
1727 for i in store:
1727 for i in store:
1728 idlen = len(shortfn(store.node(i)))
1728 idlen = len(shortfn(store.node(i)))
1729 break
1729 break
1730
1730
1731 fm = ui.formatter(b'debugindex', opts)
1731 fm = ui.formatter(b'debugindex', opts)
1732 fm.plain(
1732 fm.plain(
1733 b' rev linkrev %s %s p2\n'
1733 b' rev linkrev %s %s p2\n'
1734 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1734 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1735 )
1735 )
1736
1736
1737 for rev in store:
1737 for rev in store:
1738 node = store.node(rev)
1738 node = store.node(rev)
1739 parents = store.parents(node)
1739 parents = store.parents(node)
1740
1740
1741 fm.startitem()
1741 fm.startitem()
1742 fm.write(b'rev', b'%6d ', rev)
1742 fm.write(b'rev', b'%6d ', rev)
1743 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1743 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1744 fm.write(b'node', b'%s ', shortfn(node))
1744 fm.write(b'node', b'%s ', shortfn(node))
1745 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1745 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1746 fm.write(b'p2', b'%s', shortfn(parents[1]))
1746 fm.write(b'p2', b'%s', shortfn(parents[1]))
1747 fm.plain(b'\n')
1747 fm.plain(b'\n')
1748
1748
1749 fm.end()
1749 fm.end()
1750
1750
1751
1751
1752 @command(
1752 @command(
1753 b'debugindexdot',
1753 b'debugindexdot',
1754 cmdutil.debugrevlogopts,
1754 cmdutil.debugrevlogopts,
1755 _(b'-c|-m|FILE'),
1755 _(b'-c|-m|FILE'),
1756 optionalrepo=True,
1756 optionalrepo=True,
1757 )
1757 )
1758 def debugindexdot(ui, repo, file_=None, **opts):
1758 def debugindexdot(ui, repo, file_=None, **opts):
1759 """dump an index DAG as a graphviz dot file"""
1759 """dump an index DAG as a graphviz dot file"""
1760 opts = pycompat.byteskwargs(opts)
1760 opts = pycompat.byteskwargs(opts)
1761 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1761 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1762 ui.writenoi18n(b"digraph G {\n")
1762 ui.writenoi18n(b"digraph G {\n")
1763 for i in r:
1763 for i in r:
1764 node = r.node(i)
1764 node = r.node(i)
1765 pp = r.parents(node)
1765 pp = r.parents(node)
1766 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1766 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1767 if pp[1] != repo.nullid:
1767 if pp[1] != repo.nullid:
1768 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1768 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1769 ui.write(b"}\n")
1769 ui.write(b"}\n")
1770
1770
1771
1771
1772 @command(b'debugindexstats', [])
1772 @command(b'debugindexstats', [])
1773 def debugindexstats(ui, repo):
1773 def debugindexstats(ui, repo):
1774 """show stats related to the changelog index"""
1774 """show stats related to the changelog index"""
1775 repo.changelog.shortest(repo.nullid, 1)
1775 repo.changelog.shortest(repo.nullid, 1)
1776 index = repo.changelog.index
1776 index = repo.changelog.index
1777 if not util.safehasattr(index, b'stats'):
1777 if not util.safehasattr(index, b'stats'):
1778 raise error.Abort(_(b'debugindexstats only works with native code'))
1778 raise error.Abort(_(b'debugindexstats only works with native code'))
1779 for k, v in sorted(index.stats().items()):
1779 for k, v in sorted(index.stats().items()):
1780 ui.write(b'%s: %d\n' % (k, v))
1780 ui.write(b'%s: %d\n' % (k, v))
1781
1781
1782
1782
1783 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1783 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1784 def debuginstall(ui, **opts):
1784 def debuginstall(ui, **opts):
1785 """test Mercurial installation
1785 """test Mercurial installation
1786
1786
1787 Returns 0 on success.
1787 Returns 0 on success.
1788 """
1788 """
1789 opts = pycompat.byteskwargs(opts)
1789 opts = pycompat.byteskwargs(opts)
1790
1790
1791 problems = 0
1791 problems = 0
1792
1792
1793 fm = ui.formatter(b'debuginstall', opts)
1793 fm = ui.formatter(b'debuginstall', opts)
1794 fm.startitem()
1794 fm.startitem()
1795
1795
1796 # encoding might be unknown or wrong. don't translate these messages.
1796 # encoding might be unknown or wrong. don't translate these messages.
1797 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1797 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1798 err = None
1798 err = None
1799 try:
1799 try:
1800 codecs.lookup(pycompat.sysstr(encoding.encoding))
1800 codecs.lookup(pycompat.sysstr(encoding.encoding))
1801 except LookupError as inst:
1801 except LookupError as inst:
1802 err = stringutil.forcebytestr(inst)
1802 err = stringutil.forcebytestr(inst)
1803 problems += 1
1803 problems += 1
1804 fm.condwrite(
1804 fm.condwrite(
1805 err,
1805 err,
1806 b'encodingerror',
1806 b'encodingerror',
1807 b" %s\n (check that your locale is properly set)\n",
1807 b" %s\n (check that your locale is properly set)\n",
1808 err,
1808 err,
1809 )
1809 )
1810
1810
1811 # Python
1811 # Python
1812 pythonlib = None
1812 pythonlib = None
1813 if util.safehasattr(os, '__file__'):
1813 if util.safehasattr(os, '__file__'):
1814 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1814 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1815 elif getattr(sys, 'oxidized', False):
1815 elif getattr(sys, 'oxidized', False):
1816 pythonlib = pycompat.sysexecutable
1816 pythonlib = pycompat.sysexecutable
1817
1817
1818 fm.write(
1818 fm.write(
1819 b'pythonexe',
1819 b'pythonexe',
1820 _(b"checking Python executable (%s)\n"),
1820 _(b"checking Python executable (%s)\n"),
1821 pycompat.sysexecutable or _(b"unknown"),
1821 pycompat.sysexecutable or _(b"unknown"),
1822 )
1822 )
1823 fm.write(
1823 fm.write(
1824 b'pythonimplementation',
1824 b'pythonimplementation',
1825 _(b"checking Python implementation (%s)\n"),
1825 _(b"checking Python implementation (%s)\n"),
1826 pycompat.sysbytes(platform.python_implementation()),
1826 pycompat.sysbytes(platform.python_implementation()),
1827 )
1827 )
1828 fm.write(
1828 fm.write(
1829 b'pythonver',
1829 b'pythonver',
1830 _(b"checking Python version (%s)\n"),
1830 _(b"checking Python version (%s)\n"),
1831 (b"%d.%d.%d" % sys.version_info[:3]),
1831 (b"%d.%d.%d" % sys.version_info[:3]),
1832 )
1832 )
1833 fm.write(
1833 fm.write(
1834 b'pythonlib',
1834 b'pythonlib',
1835 _(b"checking Python lib (%s)...\n"),
1835 _(b"checking Python lib (%s)...\n"),
1836 pythonlib or _(b"unknown"),
1836 pythonlib or _(b"unknown"),
1837 )
1837 )
1838
1838
1839 try:
1839 try:
1840 from . import rustext # pytype: disable=import-error
1840 from . import rustext # pytype: disable=import-error
1841
1841
1842 rustext.__doc__ # trigger lazy import
1842 rustext.__doc__ # trigger lazy import
1843 except ImportError:
1843 except ImportError:
1844 rustext = None
1844 rustext = None
1845
1845
1846 security = set(sslutil.supportedprotocols)
1846 security = set(sslutil.supportedprotocols)
1847 if sslutil.hassni:
1847 if sslutil.hassni:
1848 security.add(b'sni')
1848 security.add(b'sni')
1849
1849
1850 fm.write(
1850 fm.write(
1851 b'pythonsecurity',
1851 b'pythonsecurity',
1852 _(b"checking Python security support (%s)\n"),
1852 _(b"checking Python security support (%s)\n"),
1853 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1853 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1854 )
1854 )
1855
1855
1856 # These are warnings, not errors. So don't increment problem count. This
1856 # These are warnings, not errors. So don't increment problem count. This
1857 # may change in the future.
1857 # may change in the future.
1858 if b'tls1.2' not in security:
1858 if b'tls1.2' not in security:
1859 fm.plain(
1859 fm.plain(
1860 _(
1860 _(
1861 b' TLS 1.2 not supported by Python install; '
1861 b' TLS 1.2 not supported by Python install; '
1862 b'network connections lack modern security\n'
1862 b'network connections lack modern security\n'
1863 )
1863 )
1864 )
1864 )
1865 if b'sni' not in security:
1865 if b'sni' not in security:
1866 fm.plain(
1866 fm.plain(
1867 _(
1867 _(
1868 b' SNI not supported by Python install; may have '
1868 b' SNI not supported by Python install; may have '
1869 b'connectivity issues with some servers\n'
1869 b'connectivity issues with some servers\n'
1870 )
1870 )
1871 )
1871 )
1872
1872
1873 fm.plain(
1873 fm.plain(
1874 _(
1874 _(
1875 b"checking Rust extensions (%s)\n"
1875 b"checking Rust extensions (%s)\n"
1876 % (b'missing' if rustext is None else b'installed')
1876 % (b'missing' if rustext is None else b'installed')
1877 ),
1877 ),
1878 )
1878 )
1879
1879
1880 # TODO print CA cert info
1880 # TODO print CA cert info
1881
1881
1882 # hg version
1882 # hg version
1883 hgver = util.version()
1883 hgver = util.version()
1884 fm.write(
1884 fm.write(
1885 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1885 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1886 )
1886 )
1887 fm.write(
1887 fm.write(
1888 b'hgverextra',
1888 b'hgverextra',
1889 _(b"checking Mercurial custom build (%s)\n"),
1889 _(b"checking Mercurial custom build (%s)\n"),
1890 b'+'.join(hgver.split(b'+')[1:]),
1890 b'+'.join(hgver.split(b'+')[1:]),
1891 )
1891 )
1892
1892
1893 # compiled modules
1893 # compiled modules
1894 hgmodules = None
1894 hgmodules = None
1895 if util.safehasattr(sys.modules[__name__], '__file__'):
1895 if util.safehasattr(sys.modules[__name__], '__file__'):
1896 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1896 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1897 elif getattr(sys, 'oxidized', False):
1897 elif getattr(sys, 'oxidized', False):
1898 hgmodules = pycompat.sysexecutable
1898 hgmodules = pycompat.sysexecutable
1899
1899
1900 fm.write(
1900 fm.write(
1901 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1901 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1902 )
1902 )
1903 fm.write(
1903 fm.write(
1904 b'hgmodules',
1904 b'hgmodules',
1905 _(b"checking installed modules (%s)...\n"),
1905 _(b"checking installed modules (%s)...\n"),
1906 hgmodules or _(b"unknown"),
1906 hgmodules or _(b"unknown"),
1907 )
1907 )
1908
1908
1909 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1909 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1910 rustext = rustandc # for now, that's the only case
1910 rustext = rustandc # for now, that's the only case
1911 cext = policy.policy in (b'c', b'allow') or rustandc
1911 cext = policy.policy in (b'c', b'allow') or rustandc
1912 nopure = cext or rustext
1912 nopure = cext or rustext
1913 if nopure:
1913 if nopure:
1914 err = None
1914 err = None
1915 try:
1915 try:
1916 if cext:
1916 if cext:
1917 from .cext import ( # pytype: disable=import-error
1917 from .cext import ( # pytype: disable=import-error
1918 base85,
1918 base85,
1919 bdiff,
1919 bdiff,
1920 mpatch,
1920 mpatch,
1921 osutil,
1921 osutil,
1922 )
1922 )
1923
1923
1924 # quiet pyflakes
1924 # quiet pyflakes
1925 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1925 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1926 if rustext:
1926 if rustext:
1927 from .rustext import ( # pytype: disable=import-error
1927 from .rustext import ( # pytype: disable=import-error
1928 ancestor,
1928 ancestor,
1929 dirstate,
1929 dirstate,
1930 )
1930 )
1931
1931
1932 dir(ancestor), dir(dirstate) # quiet pyflakes
1932 dir(ancestor), dir(dirstate) # quiet pyflakes
1933 except Exception as inst:
1933 except Exception as inst:
1934 err = stringutil.forcebytestr(inst)
1934 err = stringutil.forcebytestr(inst)
1935 problems += 1
1935 problems += 1
1936 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1936 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1937
1937
1938 compengines = util.compengines._engines.values()
1938 compengines = util.compengines._engines.values()
1939 fm.write(
1939 fm.write(
1940 b'compengines',
1940 b'compengines',
1941 _(b'checking registered compression engines (%s)\n'),
1941 _(b'checking registered compression engines (%s)\n'),
1942 fm.formatlist(
1942 fm.formatlist(
1943 sorted(e.name() for e in compengines),
1943 sorted(e.name() for e in compengines),
1944 name=b'compengine',
1944 name=b'compengine',
1945 fmt=b'%s',
1945 fmt=b'%s',
1946 sep=b', ',
1946 sep=b', ',
1947 ),
1947 ),
1948 )
1948 )
1949 fm.write(
1949 fm.write(
1950 b'compenginesavail',
1950 b'compenginesavail',
1951 _(b'checking available compression engines (%s)\n'),
1951 _(b'checking available compression engines (%s)\n'),
1952 fm.formatlist(
1952 fm.formatlist(
1953 sorted(e.name() for e in compengines if e.available()),
1953 sorted(e.name() for e in compengines if e.available()),
1954 name=b'compengine',
1954 name=b'compengine',
1955 fmt=b'%s',
1955 fmt=b'%s',
1956 sep=b', ',
1956 sep=b', ',
1957 ),
1957 ),
1958 )
1958 )
1959 wirecompengines = compression.compengines.supportedwireengines(
1959 wirecompengines = compression.compengines.supportedwireengines(
1960 compression.SERVERROLE
1960 compression.SERVERROLE
1961 )
1961 )
1962 fm.write(
1962 fm.write(
1963 b'compenginesserver',
1963 b'compenginesserver',
1964 _(
1964 _(
1965 b'checking available compression engines '
1965 b'checking available compression engines '
1966 b'for wire protocol (%s)\n'
1966 b'for wire protocol (%s)\n'
1967 ),
1967 ),
1968 fm.formatlist(
1968 fm.formatlist(
1969 [e.name() for e in wirecompengines if e.wireprotosupport()],
1969 [e.name() for e in wirecompengines if e.wireprotosupport()],
1970 name=b'compengine',
1970 name=b'compengine',
1971 fmt=b'%s',
1971 fmt=b'%s',
1972 sep=b', ',
1972 sep=b', ',
1973 ),
1973 ),
1974 )
1974 )
1975 re2 = b'missing'
1975 re2 = b'missing'
1976 if util._re2:
1976 if util._re2:
1977 re2 = b'available'
1977 re2 = b'available'
1978 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1978 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1979 fm.data(re2=bool(util._re2))
1979 fm.data(re2=bool(util._re2))
1980
1980
1981 # templates
1981 # templates
1982 p = templater.templatedir()
1982 p = templater.templatedir()
1983 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1983 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1984 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1984 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1985 if p:
1985 if p:
1986 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1986 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1987 if m:
1987 if m:
1988 # template found, check if it is working
1988 # template found, check if it is working
1989 err = None
1989 err = None
1990 try:
1990 try:
1991 templater.templater.frommapfile(m)
1991 templater.templater.frommapfile(m)
1992 except Exception as inst:
1992 except Exception as inst:
1993 err = stringutil.forcebytestr(inst)
1993 err = stringutil.forcebytestr(inst)
1994 p = None
1994 p = None
1995 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1995 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1996 else:
1996 else:
1997 p = None
1997 p = None
1998 fm.condwrite(
1998 fm.condwrite(
1999 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1999 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2000 )
2000 )
2001 fm.condwrite(
2001 fm.condwrite(
2002 not m,
2002 not m,
2003 b'defaulttemplatenotfound',
2003 b'defaulttemplatenotfound',
2004 _(b" template '%s' not found\n"),
2004 _(b" template '%s' not found\n"),
2005 b"default",
2005 b"default",
2006 )
2006 )
2007 if not p:
2007 if not p:
2008 problems += 1
2008 problems += 1
2009 fm.condwrite(
2009 fm.condwrite(
2010 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2010 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2011 )
2011 )
2012
2012
2013 # editor
2013 # editor
2014 editor = ui.geteditor()
2014 editor = ui.geteditor()
2015 editor = util.expandpath(editor)
2015 editor = util.expandpath(editor)
2016 editorbin = procutil.shellsplit(editor)[0]
2016 editorbin = procutil.shellsplit(editor)[0]
2017 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2017 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2018 cmdpath = procutil.findexe(editorbin)
2018 cmdpath = procutil.findexe(editorbin)
2019 fm.condwrite(
2019 fm.condwrite(
2020 not cmdpath and editor == b'vi',
2020 not cmdpath and editor == b'vi',
2021 b'vinotfound',
2021 b'vinotfound',
2022 _(
2022 _(
2023 b" No commit editor set and can't find %s in PATH\n"
2023 b" No commit editor set and can't find %s in PATH\n"
2024 b" (specify a commit editor in your configuration"
2024 b" (specify a commit editor in your configuration"
2025 b" file)\n"
2025 b" file)\n"
2026 ),
2026 ),
2027 not cmdpath and editor == b'vi' and editorbin,
2027 not cmdpath and editor == b'vi' and editorbin,
2028 )
2028 )
2029 fm.condwrite(
2029 fm.condwrite(
2030 not cmdpath and editor != b'vi',
2030 not cmdpath and editor != b'vi',
2031 b'editornotfound',
2031 b'editornotfound',
2032 _(
2032 _(
2033 b" Can't find editor '%s' in PATH\n"
2033 b" Can't find editor '%s' in PATH\n"
2034 b" (specify a commit editor in your configuration"
2034 b" (specify a commit editor in your configuration"
2035 b" file)\n"
2035 b" file)\n"
2036 ),
2036 ),
2037 not cmdpath and editorbin,
2037 not cmdpath and editorbin,
2038 )
2038 )
2039 if not cmdpath and editor != b'vi':
2039 if not cmdpath and editor != b'vi':
2040 problems += 1
2040 problems += 1
2041
2041
2042 # check username
2042 # check username
2043 username = None
2043 username = None
2044 err = None
2044 err = None
2045 try:
2045 try:
2046 username = ui.username()
2046 username = ui.username()
2047 except error.Abort as e:
2047 except error.Abort as e:
2048 err = e.message
2048 err = e.message
2049 problems += 1
2049 problems += 1
2050
2050
2051 fm.condwrite(
2051 fm.condwrite(
2052 username, b'username', _(b"checking username (%s)\n"), username
2052 username, b'username', _(b"checking username (%s)\n"), username
2053 )
2053 )
2054 fm.condwrite(
2054 fm.condwrite(
2055 err,
2055 err,
2056 b'usernameerror',
2056 b'usernameerror',
2057 _(
2057 _(
2058 b"checking username...\n %s\n"
2058 b"checking username...\n %s\n"
2059 b" (specify a username in your configuration file)\n"
2059 b" (specify a username in your configuration file)\n"
2060 ),
2060 ),
2061 err,
2061 err,
2062 )
2062 )
2063
2063
2064 for name, mod in extensions.extensions():
2064 for name, mod in extensions.extensions():
2065 handler = getattr(mod, 'debuginstall', None)
2065 handler = getattr(mod, 'debuginstall', None)
2066 if handler is not None:
2066 if handler is not None:
2067 problems += handler(ui, fm)
2067 problems += handler(ui, fm)
2068
2068
2069 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2069 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2070 if not problems:
2070 if not problems:
2071 fm.data(problems=problems)
2071 fm.data(problems=problems)
2072 fm.condwrite(
2072 fm.condwrite(
2073 problems,
2073 problems,
2074 b'problems',
2074 b'problems',
2075 _(b"%d problems detected, please check your install!\n"),
2075 _(b"%d problems detected, please check your install!\n"),
2076 problems,
2076 problems,
2077 )
2077 )
2078 fm.end()
2078 fm.end()
2079
2079
2080 return problems
2080 return problems
2081
2081
2082
2082
2083 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2083 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2084 def debugknown(ui, repopath, *ids, **opts):
2084 def debugknown(ui, repopath, *ids, **opts):
2085 """test whether node ids are known to a repo
2085 """test whether node ids are known to a repo
2086
2086
2087 Every ID must be a full-length hex node id string. Returns a list of 0s
2087 Every ID must be a full-length hex node id string. Returns a list of 0s
2088 and 1s indicating unknown/known.
2088 and 1s indicating unknown/known.
2089 """
2089 """
2090 opts = pycompat.byteskwargs(opts)
2090 opts = pycompat.byteskwargs(opts)
2091 repo = hg.peer(ui, opts, repopath)
2091 repo = hg.peer(ui, opts, repopath)
2092 if not repo.capable(b'known'):
2092 if not repo.capable(b'known'):
2093 raise error.Abort(b"known() not supported by target repository")
2093 raise error.Abort(b"known() not supported by target repository")
2094 flags = repo.known([bin(s) for s in ids])
2094 flags = repo.known([bin(s) for s in ids])
2095 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2095 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2096
2096
2097
2097
2098 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2098 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2099 def debuglabelcomplete(ui, repo, *args):
2099 def debuglabelcomplete(ui, repo, *args):
2100 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2100 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2101 debugnamecomplete(ui, repo, *args)
2101 debugnamecomplete(ui, repo, *args)
2102
2102
2103
2103
2104 @command(
2104 @command(
2105 b'debuglocks',
2105 b'debuglocks',
2106 [
2106 [
2107 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2107 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2108 (
2108 (
2109 b'W',
2109 b'W',
2110 b'force-free-wlock',
2110 b'force-free-wlock',
2111 None,
2111 None,
2112 _(b'free the working state lock (DANGEROUS)'),
2112 _(b'free the working state lock (DANGEROUS)'),
2113 ),
2113 ),
2114 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2114 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2115 (
2115 (
2116 b'S',
2116 b'S',
2117 b'set-wlock',
2117 b'set-wlock',
2118 None,
2118 None,
2119 _(b'set the working state lock until stopped'),
2119 _(b'set the working state lock until stopped'),
2120 ),
2120 ),
2121 ],
2121 ],
2122 _(b'[OPTION]...'),
2122 _(b'[OPTION]...'),
2123 )
2123 )
2124 def debuglocks(ui, repo, **opts):
2124 def debuglocks(ui, repo, **opts):
2125 """show or modify state of locks
2125 """show or modify state of locks
2126
2126
2127 By default, this command will show which locks are held. This
2127 By default, this command will show which locks are held. This
2128 includes the user and process holding the lock, the amount of time
2128 includes the user and process holding the lock, the amount of time
2129 the lock has been held, and the machine name where the process is
2129 the lock has been held, and the machine name where the process is
2130 running if it's not local.
2130 running if it's not local.
2131
2131
2132 Locks protect the integrity of Mercurial's data, so should be
2132 Locks protect the integrity of Mercurial's data, so should be
2133 treated with care. System crashes or other interruptions may cause
2133 treated with care. System crashes or other interruptions may cause
2134 locks to not be properly released, though Mercurial will usually
2134 locks to not be properly released, though Mercurial will usually
2135 detect and remove such stale locks automatically.
2135 detect and remove such stale locks automatically.
2136
2136
2137 However, detecting stale locks may not always be possible (for
2137 However, detecting stale locks may not always be possible (for
2138 instance, on a shared filesystem). Removing locks may also be
2138 instance, on a shared filesystem). Removing locks may also be
2139 blocked by filesystem permissions.
2139 blocked by filesystem permissions.
2140
2140
2141 Setting a lock will prevent other commands from changing the data.
2141 Setting a lock will prevent other commands from changing the data.
2142 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2142 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2143 The set locks are removed when the command exits.
2143 The set locks are removed when the command exits.
2144
2144
2145 Returns 0 if no locks are held.
2145 Returns 0 if no locks are held.
2146
2146
2147 """
2147 """
2148
2148
2149 if opts.get('force_free_lock'):
2149 if opts.get('force_free_lock'):
2150 repo.svfs.unlink(b'lock')
2150 repo.svfs.unlink(b'lock')
2151 if opts.get('force_free_wlock'):
2151 if opts.get('force_free_wlock'):
2152 repo.vfs.unlink(b'wlock')
2152 repo.vfs.unlink(b'wlock')
2153 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2153 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2154 return 0
2154 return 0
2155
2155
2156 locks = []
2156 locks = []
2157 try:
2157 try:
2158 if opts.get('set_wlock'):
2158 if opts.get('set_wlock'):
2159 try:
2159 try:
2160 locks.append(repo.wlock(False))
2160 locks.append(repo.wlock(False))
2161 except error.LockHeld:
2161 except error.LockHeld:
2162 raise error.Abort(_(b'wlock is already held'))
2162 raise error.Abort(_(b'wlock is already held'))
2163 if opts.get('set_lock'):
2163 if opts.get('set_lock'):
2164 try:
2164 try:
2165 locks.append(repo.lock(False))
2165 locks.append(repo.lock(False))
2166 except error.LockHeld:
2166 except error.LockHeld:
2167 raise error.Abort(_(b'lock is already held'))
2167 raise error.Abort(_(b'lock is already held'))
2168 if len(locks):
2168 if len(locks):
2169 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2169 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2170 return 0
2170 return 0
2171 finally:
2171 finally:
2172 release(*locks)
2172 release(*locks)
2173
2173
2174 now = time.time()
2174 now = time.time()
2175 held = 0
2175 held = 0
2176
2176
2177 def report(vfs, name, method):
2177 def report(vfs, name, method):
2178 # this causes stale locks to get reaped for more accurate reporting
2178 # this causes stale locks to get reaped for more accurate reporting
2179 try:
2179 try:
2180 l = method(False)
2180 l = method(False)
2181 except error.LockHeld:
2181 except error.LockHeld:
2182 l = None
2182 l = None
2183
2183
2184 if l:
2184 if l:
2185 l.release()
2185 l.release()
2186 else:
2186 else:
2187 try:
2187 try:
2188 st = vfs.lstat(name)
2188 st = vfs.lstat(name)
2189 age = now - st[stat.ST_MTIME]
2189 age = now - st[stat.ST_MTIME]
2190 user = util.username(st.st_uid)
2190 user = util.username(st.st_uid)
2191 locker = vfs.readlock(name)
2191 locker = vfs.readlock(name)
2192 if b":" in locker:
2192 if b":" in locker:
2193 host, pid = locker.split(b':')
2193 host, pid = locker.split(b':')
2194 if host == socket.gethostname():
2194 if host == socket.gethostname():
2195 locker = b'user %s, process %s' % (user or b'None', pid)
2195 locker = b'user %s, process %s' % (user or b'None', pid)
2196 else:
2196 else:
2197 locker = b'user %s, process %s, host %s' % (
2197 locker = b'user %s, process %s, host %s' % (
2198 user or b'None',
2198 user or b'None',
2199 pid,
2199 pid,
2200 host,
2200 host,
2201 )
2201 )
2202 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2202 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2203 return 1
2203 return 1
2204 except OSError as e:
2204 except OSError as e:
2205 if e.errno != errno.ENOENT:
2205 if e.errno != errno.ENOENT:
2206 raise
2206 raise
2207
2207
2208 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2208 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2209 return 0
2209 return 0
2210
2210
2211 held += report(repo.svfs, b"lock", repo.lock)
2211 held += report(repo.svfs, b"lock", repo.lock)
2212 held += report(repo.vfs, b"wlock", repo.wlock)
2212 held += report(repo.vfs, b"wlock", repo.wlock)
2213
2213
2214 return held
2214 return held
2215
2215
2216
2216
2217 @command(
2217 @command(
2218 b'debugmanifestfulltextcache',
2218 b'debugmanifestfulltextcache',
2219 [
2219 [
2220 (b'', b'clear', False, _(b'clear the cache')),
2220 (b'', b'clear', False, _(b'clear the cache')),
2221 (
2221 (
2222 b'a',
2222 b'a',
2223 b'add',
2223 b'add',
2224 [],
2224 [],
2225 _(b'add the given manifest nodes to the cache'),
2225 _(b'add the given manifest nodes to the cache'),
2226 _(b'NODE'),
2226 _(b'NODE'),
2227 ),
2227 ),
2228 ],
2228 ],
2229 b'',
2229 b'',
2230 )
2230 )
2231 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2231 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2232 """show, clear or amend the contents of the manifest fulltext cache"""
2232 """show, clear or amend the contents of the manifest fulltext cache"""
2233
2233
2234 def getcache():
2234 def getcache():
2235 r = repo.manifestlog.getstorage(b'')
2235 r = repo.manifestlog.getstorage(b'')
2236 try:
2236 try:
2237 return r._fulltextcache
2237 return r._fulltextcache
2238 except AttributeError:
2238 except AttributeError:
2239 msg = _(
2239 msg = _(
2240 b"Current revlog implementation doesn't appear to have a "
2240 b"Current revlog implementation doesn't appear to have a "
2241 b"manifest fulltext cache\n"
2241 b"manifest fulltext cache\n"
2242 )
2242 )
2243 raise error.Abort(msg)
2243 raise error.Abort(msg)
2244
2244
2245 if opts.get('clear'):
2245 if opts.get('clear'):
2246 with repo.wlock():
2246 with repo.wlock():
2247 cache = getcache()
2247 cache = getcache()
2248 cache.clear(clear_persisted_data=True)
2248 cache.clear(clear_persisted_data=True)
2249 return
2249 return
2250
2250
2251 if add:
2251 if add:
2252 with repo.wlock():
2252 with repo.wlock():
2253 m = repo.manifestlog
2253 m = repo.manifestlog
2254 store = m.getstorage(b'')
2254 store = m.getstorage(b'')
2255 for n in add:
2255 for n in add:
2256 try:
2256 try:
2257 manifest = m[store.lookup(n)]
2257 manifest = m[store.lookup(n)]
2258 except error.LookupError as e:
2258 except error.LookupError as e:
2259 raise error.Abort(
2259 raise error.Abort(
2260 bytes(e), hint=b"Check your manifest node id"
2260 bytes(e), hint=b"Check your manifest node id"
2261 )
2261 )
2262 manifest.read() # stores revisision in cache too
2262 manifest.read() # stores revisision in cache too
2263 return
2263 return
2264
2264
2265 cache = getcache()
2265 cache = getcache()
2266 if not len(cache):
2266 if not len(cache):
2267 ui.write(_(b'cache empty\n'))
2267 ui.write(_(b'cache empty\n'))
2268 else:
2268 else:
2269 ui.write(
2269 ui.write(
2270 _(
2270 _(
2271 b'cache contains %d manifest entries, in order of most to '
2271 b'cache contains %d manifest entries, in order of most to '
2272 b'least recent:\n'
2272 b'least recent:\n'
2273 )
2273 )
2274 % (len(cache),)
2274 % (len(cache),)
2275 )
2275 )
2276 totalsize = 0
2276 totalsize = 0
2277 for nodeid in cache:
2277 for nodeid in cache:
2278 # Use cache.get to not update the LRU order
2278 # Use cache.get to not update the LRU order
2279 data = cache.peek(nodeid)
2279 data = cache.peek(nodeid)
2280 size = len(data)
2280 size = len(data)
2281 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2281 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2282 ui.write(
2282 ui.write(
2283 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2283 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2284 )
2284 )
2285 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2285 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2286 ui.write(
2286 ui.write(
2287 _(b'total cache data size %s, on-disk %s\n')
2287 _(b'total cache data size %s, on-disk %s\n')
2288 % (util.bytecount(totalsize), util.bytecount(ondisk))
2288 % (util.bytecount(totalsize), util.bytecount(ondisk))
2289 )
2289 )
2290
2290
2291
2291
2292 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2292 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2293 def debugmergestate(ui, repo, *args, **opts):
2293 def debugmergestate(ui, repo, *args, **opts):
2294 """print merge state
2294 """print merge state
2295
2295
2296 Use --verbose to print out information about whether v1 or v2 merge state
2296 Use --verbose to print out information about whether v1 or v2 merge state
2297 was chosen."""
2297 was chosen."""
2298
2298
2299 if ui.verbose:
2299 if ui.verbose:
2300 ms = mergestatemod.mergestate(repo)
2300 ms = mergestatemod.mergestate(repo)
2301
2301
2302 # sort so that reasonable information is on top
2302 # sort so that reasonable information is on top
2303 v1records = ms._readrecordsv1()
2303 v1records = ms._readrecordsv1()
2304 v2records = ms._readrecordsv2()
2304 v2records = ms._readrecordsv2()
2305
2305
2306 if not v1records and not v2records:
2306 if not v1records and not v2records:
2307 pass
2307 pass
2308 elif not v2records:
2308 elif not v2records:
2309 ui.writenoi18n(b'no version 2 merge state\n')
2309 ui.writenoi18n(b'no version 2 merge state\n')
2310 elif ms._v1v2match(v1records, v2records):
2310 elif ms._v1v2match(v1records, v2records):
2311 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2311 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2312 else:
2312 else:
2313 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2313 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2314
2314
2315 opts = pycompat.byteskwargs(opts)
2315 opts = pycompat.byteskwargs(opts)
2316 if not opts[b'template']:
2316 if not opts[b'template']:
2317 opts[b'template'] = (
2317 opts[b'template'] = (
2318 b'{if(commits, "", "no merge state found\n")}'
2318 b'{if(commits, "", "no merge state found\n")}'
2319 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2319 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2320 b'{files % "file: {path} (state \\"{state}\\")\n'
2320 b'{files % "file: {path} (state \\"{state}\\")\n'
2321 b'{if(local_path, "'
2321 b'{if(local_path, "'
2322 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2322 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2323 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2323 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2324 b' other path: {other_path} (node {other_node})\n'
2324 b' other path: {other_path} (node {other_node})\n'
2325 b'")}'
2325 b'")}'
2326 b'{if(rename_side, "'
2326 b'{if(rename_side, "'
2327 b' rename side: {rename_side}\n'
2327 b' rename side: {rename_side}\n'
2328 b' renamed path: {renamed_path}\n'
2328 b' renamed path: {renamed_path}\n'
2329 b'")}'
2329 b'")}'
2330 b'{extras % " extra: {key} = {value}\n"}'
2330 b'{extras % " extra: {key} = {value}\n"}'
2331 b'"}'
2331 b'"}'
2332 b'{extras % "extra: {file} ({key} = {value})\n"}'
2332 b'{extras % "extra: {file} ({key} = {value})\n"}'
2333 )
2333 )
2334
2334
2335 ms = mergestatemod.mergestate.read(repo)
2335 ms = mergestatemod.mergestate.read(repo)
2336
2336
2337 fm = ui.formatter(b'debugmergestate', opts)
2337 fm = ui.formatter(b'debugmergestate', opts)
2338 fm.startitem()
2338 fm.startitem()
2339
2339
2340 fm_commits = fm.nested(b'commits')
2340 fm_commits = fm.nested(b'commits')
2341 if ms.active():
2341 if ms.active():
2342 for name, node, label_index in (
2342 for name, node, label_index in (
2343 (b'local', ms.local, 0),
2343 (b'local', ms.local, 0),
2344 (b'other', ms.other, 1),
2344 (b'other', ms.other, 1),
2345 ):
2345 ):
2346 fm_commits.startitem()
2346 fm_commits.startitem()
2347 fm_commits.data(name=name)
2347 fm_commits.data(name=name)
2348 fm_commits.data(node=hex(node))
2348 fm_commits.data(node=hex(node))
2349 if ms._labels and len(ms._labels) > label_index:
2349 if ms._labels and len(ms._labels) > label_index:
2350 fm_commits.data(label=ms._labels[label_index])
2350 fm_commits.data(label=ms._labels[label_index])
2351 fm_commits.end()
2351 fm_commits.end()
2352
2352
2353 fm_files = fm.nested(b'files')
2353 fm_files = fm.nested(b'files')
2354 if ms.active():
2354 if ms.active():
2355 for f in ms:
2355 for f in ms:
2356 fm_files.startitem()
2356 fm_files.startitem()
2357 fm_files.data(path=f)
2357 fm_files.data(path=f)
2358 state = ms._state[f]
2358 state = ms._state[f]
2359 fm_files.data(state=state[0])
2359 fm_files.data(state=state[0])
2360 if state[0] in (
2360 if state[0] in (
2361 mergestatemod.MERGE_RECORD_UNRESOLVED,
2361 mergestatemod.MERGE_RECORD_UNRESOLVED,
2362 mergestatemod.MERGE_RECORD_RESOLVED,
2362 mergestatemod.MERGE_RECORD_RESOLVED,
2363 ):
2363 ):
2364 fm_files.data(local_key=state[1])
2364 fm_files.data(local_key=state[1])
2365 fm_files.data(local_path=state[2])
2365 fm_files.data(local_path=state[2])
2366 fm_files.data(ancestor_path=state[3])
2366 fm_files.data(ancestor_path=state[3])
2367 fm_files.data(ancestor_node=state[4])
2367 fm_files.data(ancestor_node=state[4])
2368 fm_files.data(other_path=state[5])
2368 fm_files.data(other_path=state[5])
2369 fm_files.data(other_node=state[6])
2369 fm_files.data(other_node=state[6])
2370 fm_files.data(local_flags=state[7])
2370 fm_files.data(local_flags=state[7])
2371 elif state[0] in (
2371 elif state[0] in (
2372 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2372 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2373 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2373 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2374 ):
2374 ):
2375 fm_files.data(renamed_path=state[1])
2375 fm_files.data(renamed_path=state[1])
2376 fm_files.data(rename_side=state[2])
2376 fm_files.data(rename_side=state[2])
2377 fm_extras = fm_files.nested(b'extras')
2377 fm_extras = fm_files.nested(b'extras')
2378 for k, v in sorted(ms.extras(f).items()):
2378 for k, v in sorted(ms.extras(f).items()):
2379 fm_extras.startitem()
2379 fm_extras.startitem()
2380 fm_extras.data(key=k)
2380 fm_extras.data(key=k)
2381 fm_extras.data(value=v)
2381 fm_extras.data(value=v)
2382 fm_extras.end()
2382 fm_extras.end()
2383
2383
2384 fm_files.end()
2384 fm_files.end()
2385
2385
2386 fm_extras = fm.nested(b'extras')
2386 fm_extras = fm.nested(b'extras')
2387 for f, d in sorted(ms.allextras().items()):
2387 for f, d in sorted(ms.allextras().items()):
2388 if f in ms:
2388 if f in ms:
2389 # If file is in mergestate, we have already processed it's extras
2389 # If file is in mergestate, we have already processed it's extras
2390 continue
2390 continue
2391 for k, v in d.items():
2391 for k, v in d.items():
2392 fm_extras.startitem()
2392 fm_extras.startitem()
2393 fm_extras.data(file=f)
2393 fm_extras.data(file=f)
2394 fm_extras.data(key=k)
2394 fm_extras.data(key=k)
2395 fm_extras.data(value=v)
2395 fm_extras.data(value=v)
2396 fm_extras.end()
2396 fm_extras.end()
2397
2397
2398 fm.end()
2398 fm.end()
2399
2399
2400
2400
2401 @command(b'debugnamecomplete', [], _(b'NAME...'))
2401 @command(b'debugnamecomplete', [], _(b'NAME...'))
2402 def debugnamecomplete(ui, repo, *args):
2402 def debugnamecomplete(ui, repo, *args):
2403 '''complete "names" - tags, open branch names, bookmark names'''
2403 '''complete "names" - tags, open branch names, bookmark names'''
2404
2404
2405 names = set()
2405 names = set()
2406 # since we previously only listed open branches, we will handle that
2406 # since we previously only listed open branches, we will handle that
2407 # specially (after this for loop)
2407 # specially (after this for loop)
2408 for name, ns in repo.names.items():
2408 for name, ns in repo.names.items():
2409 if name != b'branches':
2409 if name != b'branches':
2410 names.update(ns.listnames(repo))
2410 names.update(ns.listnames(repo))
2411 names.update(
2411 names.update(
2412 tag
2412 tag
2413 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2413 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2414 if not closed
2414 if not closed
2415 )
2415 )
2416 completions = set()
2416 completions = set()
2417 if not args:
2417 if not args:
2418 args = [b'']
2418 args = [b'']
2419 for a in args:
2419 for a in args:
2420 completions.update(n for n in names if n.startswith(a))
2420 completions.update(n for n in names if n.startswith(a))
2421 ui.write(b'\n'.join(sorted(completions)))
2421 ui.write(b'\n'.join(sorted(completions)))
2422 ui.write(b'\n')
2422 ui.write(b'\n')
2423
2423
2424
2424
2425 @command(
2425 @command(
2426 b'debugnodemap',
2426 b'debugnodemap',
2427 [
2427 [
2428 (
2428 (
2429 b'',
2429 b'',
2430 b'dump-new',
2430 b'dump-new',
2431 False,
2431 False,
2432 _(b'write a (new) persistent binary nodemap on stdout'),
2432 _(b'write a (new) persistent binary nodemap on stdout'),
2433 ),
2433 ),
2434 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2434 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2435 (
2435 (
2436 b'',
2436 b'',
2437 b'check',
2437 b'check',
2438 False,
2438 False,
2439 _(b'check that the data on disk data are correct.'),
2439 _(b'check that the data on disk data are correct.'),
2440 ),
2440 ),
2441 (
2441 (
2442 b'',
2442 b'',
2443 b'metadata',
2443 b'metadata',
2444 False,
2444 False,
2445 _(b'display the on disk meta data for the nodemap'),
2445 _(b'display the on disk meta data for the nodemap'),
2446 ),
2446 ),
2447 ],
2447 ],
2448 )
2448 )
2449 def debugnodemap(ui, repo, **opts):
2449 def debugnodemap(ui, repo, **opts):
2450 """write and inspect on disk nodemap"""
2450 """write and inspect on disk nodemap"""
2451 if opts['dump_new']:
2451 if opts['dump_new']:
2452 unfi = repo.unfiltered()
2452 unfi = repo.unfiltered()
2453 cl = unfi.changelog
2453 cl = unfi.changelog
2454 if util.safehasattr(cl.index, "nodemap_data_all"):
2454 if util.safehasattr(cl.index, "nodemap_data_all"):
2455 data = cl.index.nodemap_data_all()
2455 data = cl.index.nodemap_data_all()
2456 else:
2456 else:
2457 data = nodemap.persistent_data(cl.index)
2457 data = nodemap.persistent_data(cl.index)
2458 ui.write(data)
2458 ui.write(data)
2459 elif opts['dump_disk']:
2459 elif opts['dump_disk']:
2460 unfi = repo.unfiltered()
2460 unfi = repo.unfiltered()
2461 cl = unfi.changelog
2461 cl = unfi.changelog
2462 nm_data = nodemap.persisted_data(cl)
2462 nm_data = nodemap.persisted_data(cl)
2463 if nm_data is not None:
2463 if nm_data is not None:
2464 docket, data = nm_data
2464 docket, data = nm_data
2465 ui.write(data[:])
2465 ui.write(data[:])
2466 elif opts['check']:
2466 elif opts['check']:
2467 unfi = repo.unfiltered()
2467 unfi = repo.unfiltered()
2468 cl = unfi.changelog
2468 cl = unfi.changelog
2469 nm_data = nodemap.persisted_data(cl)
2469 nm_data = nodemap.persisted_data(cl)
2470 if nm_data is not None:
2470 if nm_data is not None:
2471 docket, data = nm_data
2471 docket, data = nm_data
2472 return nodemap.check_data(ui, cl.index, data)
2472 return nodemap.check_data(ui, cl.index, data)
2473 elif opts['metadata']:
2473 elif opts['metadata']:
2474 unfi = repo.unfiltered()
2474 unfi = repo.unfiltered()
2475 cl = unfi.changelog
2475 cl = unfi.changelog
2476 nm_data = nodemap.persisted_data(cl)
2476 nm_data = nodemap.persisted_data(cl)
2477 if nm_data is not None:
2477 if nm_data is not None:
2478 docket, data = nm_data
2478 docket, data = nm_data
2479 ui.write((b"uid: %s\n") % docket.uid)
2479 ui.write((b"uid: %s\n") % docket.uid)
2480 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2480 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2481 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2481 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2482 ui.write((b"data-length: %d\n") % docket.data_length)
2482 ui.write((b"data-length: %d\n") % docket.data_length)
2483 ui.write((b"data-unused: %d\n") % docket.data_unused)
2483 ui.write((b"data-unused: %d\n") % docket.data_unused)
2484 unused_perc = docket.data_unused * 100.0 / docket.data_length
2484 unused_perc = docket.data_unused * 100.0 / docket.data_length
2485 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2485 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2486
2486
2487
2487
2488 @command(
2488 @command(
2489 b'debugobsolete',
2489 b'debugobsolete',
2490 [
2490 [
2491 (b'', b'flags', 0, _(b'markers flag')),
2491 (b'', b'flags', 0, _(b'markers flag')),
2492 (
2492 (
2493 b'',
2493 b'',
2494 b'record-parents',
2494 b'record-parents',
2495 False,
2495 False,
2496 _(b'record parent information for the precursor'),
2496 _(b'record parent information for the precursor'),
2497 ),
2497 ),
2498 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2498 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2499 (
2499 (
2500 b'',
2500 b'',
2501 b'exclusive',
2501 b'exclusive',
2502 False,
2502 False,
2503 _(b'restrict display to markers only relevant to REV'),
2503 _(b'restrict display to markers only relevant to REV'),
2504 ),
2504 ),
2505 (b'', b'index', False, _(b'display index of the marker')),
2505 (b'', b'index', False, _(b'display index of the marker')),
2506 (b'', b'delete', [], _(b'delete markers specified by indices')),
2506 (b'', b'delete', [], _(b'delete markers specified by indices')),
2507 ]
2507 ]
2508 + cmdutil.commitopts2
2508 + cmdutil.commitopts2
2509 + cmdutil.formatteropts,
2509 + cmdutil.formatteropts,
2510 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2510 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2511 )
2511 )
2512 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2512 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2513 """create arbitrary obsolete marker
2513 """create arbitrary obsolete marker
2514
2514
2515 With no arguments, displays the list of obsolescence markers."""
2515 With no arguments, displays the list of obsolescence markers."""
2516
2516
2517 opts = pycompat.byteskwargs(opts)
2517 opts = pycompat.byteskwargs(opts)
2518
2518
2519 def parsenodeid(s):
2519 def parsenodeid(s):
2520 try:
2520 try:
2521 # We do not use revsingle/revrange functions here to accept
2521 # We do not use revsingle/revrange functions here to accept
2522 # arbitrary node identifiers, possibly not present in the
2522 # arbitrary node identifiers, possibly not present in the
2523 # local repository.
2523 # local repository.
2524 n = bin(s)
2524 n = bin(s)
2525 if len(n) != repo.nodeconstants.nodelen:
2525 if len(n) != repo.nodeconstants.nodelen:
2526 raise TypeError()
2526 raise TypeError()
2527 return n
2527 return n
2528 except TypeError:
2528 except TypeError:
2529 raise error.InputError(
2529 raise error.InputError(
2530 b'changeset references must be full hexadecimal '
2530 b'changeset references must be full hexadecimal '
2531 b'node identifiers'
2531 b'node identifiers'
2532 )
2532 )
2533
2533
2534 if opts.get(b'delete'):
2534 if opts.get(b'delete'):
2535 indices = []
2535 indices = []
2536 for v in opts.get(b'delete'):
2536 for v in opts.get(b'delete'):
2537 try:
2537 try:
2538 indices.append(int(v))
2538 indices.append(int(v))
2539 except ValueError:
2539 except ValueError:
2540 raise error.InputError(
2540 raise error.InputError(
2541 _(b'invalid index value: %r') % v,
2541 _(b'invalid index value: %r') % v,
2542 hint=_(b'use integers for indices'),
2542 hint=_(b'use integers for indices'),
2543 )
2543 )
2544
2544
2545 if repo.currenttransaction():
2545 if repo.currenttransaction():
2546 raise error.Abort(
2546 raise error.Abort(
2547 _(b'cannot delete obsmarkers in the middle of transaction.')
2547 _(b'cannot delete obsmarkers in the middle of transaction.')
2548 )
2548 )
2549
2549
2550 with repo.lock():
2550 with repo.lock():
2551 n = repair.deleteobsmarkers(repo.obsstore, indices)
2551 n = repair.deleteobsmarkers(repo.obsstore, indices)
2552 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2552 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2553
2553
2554 return
2554 return
2555
2555
2556 if precursor is not None:
2556 if precursor is not None:
2557 if opts[b'rev']:
2557 if opts[b'rev']:
2558 raise error.InputError(
2558 raise error.InputError(
2559 b'cannot select revision when creating marker'
2559 b'cannot select revision when creating marker'
2560 )
2560 )
2561 metadata = {}
2561 metadata = {}
2562 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2562 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2563 succs = tuple(parsenodeid(succ) for succ in successors)
2563 succs = tuple(parsenodeid(succ) for succ in successors)
2564 l = repo.lock()
2564 l = repo.lock()
2565 try:
2565 try:
2566 tr = repo.transaction(b'debugobsolete')
2566 tr = repo.transaction(b'debugobsolete')
2567 try:
2567 try:
2568 date = opts.get(b'date')
2568 date = opts.get(b'date')
2569 if date:
2569 if date:
2570 date = dateutil.parsedate(date)
2570 date = dateutil.parsedate(date)
2571 else:
2571 else:
2572 date = None
2572 date = None
2573 prec = parsenodeid(precursor)
2573 prec = parsenodeid(precursor)
2574 parents = None
2574 parents = None
2575 if opts[b'record_parents']:
2575 if opts[b'record_parents']:
2576 if prec not in repo.unfiltered():
2576 if prec not in repo.unfiltered():
2577 raise error.Abort(
2577 raise error.Abort(
2578 b'cannot used --record-parents on '
2578 b'cannot used --record-parents on '
2579 b'unknown changesets'
2579 b'unknown changesets'
2580 )
2580 )
2581 parents = repo.unfiltered()[prec].parents()
2581 parents = repo.unfiltered()[prec].parents()
2582 parents = tuple(p.node() for p in parents)
2582 parents = tuple(p.node() for p in parents)
2583 repo.obsstore.create(
2583 repo.obsstore.create(
2584 tr,
2584 tr,
2585 prec,
2585 prec,
2586 succs,
2586 succs,
2587 opts[b'flags'],
2587 opts[b'flags'],
2588 parents=parents,
2588 parents=parents,
2589 date=date,
2589 date=date,
2590 metadata=metadata,
2590 metadata=metadata,
2591 ui=ui,
2591 ui=ui,
2592 )
2592 )
2593 tr.close()
2593 tr.close()
2594 except ValueError as exc:
2594 except ValueError as exc:
2595 raise error.Abort(
2595 raise error.Abort(
2596 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2596 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2597 )
2597 )
2598 finally:
2598 finally:
2599 tr.release()
2599 tr.release()
2600 finally:
2600 finally:
2601 l.release()
2601 l.release()
2602 else:
2602 else:
2603 if opts[b'rev']:
2603 if opts[b'rev']:
2604 revs = logcmdutil.revrange(repo, opts[b'rev'])
2604 revs = logcmdutil.revrange(repo, opts[b'rev'])
2605 nodes = [repo[r].node() for r in revs]
2605 nodes = [repo[r].node() for r in revs]
2606 markers = list(
2606 markers = list(
2607 obsutil.getmarkers(
2607 obsutil.getmarkers(
2608 repo, nodes=nodes, exclusive=opts[b'exclusive']
2608 repo, nodes=nodes, exclusive=opts[b'exclusive']
2609 )
2609 )
2610 )
2610 )
2611 markers.sort(key=lambda x: x._data)
2611 markers.sort(key=lambda x: x._data)
2612 else:
2612 else:
2613 markers = obsutil.getmarkers(repo)
2613 markers = obsutil.getmarkers(repo)
2614
2614
2615 markerstoiter = markers
2615 markerstoiter = markers
2616 isrelevant = lambda m: True
2616 isrelevant = lambda m: True
2617 if opts.get(b'rev') and opts.get(b'index'):
2617 if opts.get(b'rev') and opts.get(b'index'):
2618 markerstoiter = obsutil.getmarkers(repo)
2618 markerstoiter = obsutil.getmarkers(repo)
2619 markerset = set(markers)
2619 markerset = set(markers)
2620 isrelevant = lambda m: m in markerset
2620 isrelevant = lambda m: m in markerset
2621
2621
2622 fm = ui.formatter(b'debugobsolete', opts)
2622 fm = ui.formatter(b'debugobsolete', opts)
2623 for i, m in enumerate(markerstoiter):
2623 for i, m in enumerate(markerstoiter):
2624 if not isrelevant(m):
2624 if not isrelevant(m):
2625 # marker can be irrelevant when we're iterating over a set
2625 # marker can be irrelevant when we're iterating over a set
2626 # of markers (markerstoiter) which is bigger than the set
2626 # of markers (markerstoiter) which is bigger than the set
2627 # of markers we want to display (markers)
2627 # of markers we want to display (markers)
2628 # this can happen if both --index and --rev options are
2628 # this can happen if both --index and --rev options are
2629 # provided and thus we need to iterate over all of the markers
2629 # provided and thus we need to iterate over all of the markers
2630 # to get the correct indices, but only display the ones that
2630 # to get the correct indices, but only display the ones that
2631 # are relevant to --rev value
2631 # are relevant to --rev value
2632 continue
2632 continue
2633 fm.startitem()
2633 fm.startitem()
2634 ind = i if opts.get(b'index') else None
2634 ind = i if opts.get(b'index') else None
2635 cmdutil.showmarker(fm, m, index=ind)
2635 cmdutil.showmarker(fm, m, index=ind)
2636 fm.end()
2636 fm.end()
2637
2637
2638
2638
2639 @command(
2639 @command(
2640 b'debugp1copies',
2640 b'debugp1copies',
2641 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2641 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2642 _(b'[-r REV]'),
2642 _(b'[-r REV]'),
2643 )
2643 )
2644 def debugp1copies(ui, repo, **opts):
2644 def debugp1copies(ui, repo, **opts):
2645 """dump copy information compared to p1"""
2645 """dump copy information compared to p1"""
2646
2646
2647 opts = pycompat.byteskwargs(opts)
2647 opts = pycompat.byteskwargs(opts)
2648 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2648 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2649 for dst, src in ctx.p1copies().items():
2649 for dst, src in ctx.p1copies().items():
2650 ui.write(b'%s -> %s\n' % (src, dst))
2650 ui.write(b'%s -> %s\n' % (src, dst))
2651
2651
2652
2652
2653 @command(
2653 @command(
2654 b'debugp2copies',
2654 b'debugp2copies',
2655 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2655 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2656 _(b'[-r REV]'),
2656 _(b'[-r REV]'),
2657 )
2657 )
2658 def debugp1copies(ui, repo, **opts):
2658 def debugp1copies(ui, repo, **opts):
2659 """dump copy information compared to p2"""
2659 """dump copy information compared to p2"""
2660
2660
2661 opts = pycompat.byteskwargs(opts)
2661 opts = pycompat.byteskwargs(opts)
2662 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2662 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2663 for dst, src in ctx.p2copies().items():
2663 for dst, src in ctx.p2copies().items():
2664 ui.write(b'%s -> %s\n' % (src, dst))
2664 ui.write(b'%s -> %s\n' % (src, dst))
2665
2665
2666
2666
2667 @command(
2667 @command(
2668 b'debugpathcomplete',
2668 b'debugpathcomplete',
2669 [
2669 [
2670 (b'f', b'full', None, _(b'complete an entire path')),
2670 (b'f', b'full', None, _(b'complete an entire path')),
2671 (b'n', b'normal', None, _(b'show only normal files')),
2671 (b'n', b'normal', None, _(b'show only normal files')),
2672 (b'a', b'added', None, _(b'show only added files')),
2672 (b'a', b'added', None, _(b'show only added files')),
2673 (b'r', b'removed', None, _(b'show only removed files')),
2673 (b'r', b'removed', None, _(b'show only removed files')),
2674 ],
2674 ],
2675 _(b'FILESPEC...'),
2675 _(b'FILESPEC...'),
2676 )
2676 )
2677 def debugpathcomplete(ui, repo, *specs, **opts):
2677 def debugpathcomplete(ui, repo, *specs, **opts):
2678 """complete part or all of a tracked path
2678 """complete part or all of a tracked path
2679
2679
2680 This command supports shells that offer path name completion. It
2680 This command supports shells that offer path name completion. It
2681 currently completes only files already known to the dirstate.
2681 currently completes only files already known to the dirstate.
2682
2682
2683 Completion extends only to the next path segment unless
2683 Completion extends only to the next path segment unless
2684 --full is specified, in which case entire paths are used."""
2684 --full is specified, in which case entire paths are used."""
2685
2685
2686 def complete(path, acceptable):
2686 def complete(path, acceptable):
2687 dirstate = repo.dirstate
2687 dirstate = repo.dirstate
2688 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2688 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2689 rootdir = repo.root + pycompat.ossep
2689 rootdir = repo.root + pycompat.ossep
2690 if spec != repo.root and not spec.startswith(rootdir):
2690 if spec != repo.root and not spec.startswith(rootdir):
2691 return [], []
2691 return [], []
2692 if os.path.isdir(spec):
2692 if os.path.isdir(spec):
2693 spec += b'/'
2693 spec += b'/'
2694 spec = spec[len(rootdir) :]
2694 spec = spec[len(rootdir) :]
2695 fixpaths = pycompat.ossep != b'/'
2695 fixpaths = pycompat.ossep != b'/'
2696 if fixpaths:
2696 if fixpaths:
2697 spec = spec.replace(pycompat.ossep, b'/')
2697 spec = spec.replace(pycompat.ossep, b'/')
2698 speclen = len(spec)
2698 speclen = len(spec)
2699 fullpaths = opts['full']
2699 fullpaths = opts['full']
2700 files, dirs = set(), set()
2700 files, dirs = set(), set()
2701 adddir, addfile = dirs.add, files.add
2701 adddir, addfile = dirs.add, files.add
2702 for f, st in dirstate.items():
2702 for f, st in dirstate.items():
2703 if f.startswith(spec) and st.state in acceptable:
2703 if f.startswith(spec) and st.state in acceptable:
2704 if fixpaths:
2704 if fixpaths:
2705 f = f.replace(b'/', pycompat.ossep)
2705 f = f.replace(b'/', pycompat.ossep)
2706 if fullpaths:
2706 if fullpaths:
2707 addfile(f)
2707 addfile(f)
2708 continue
2708 continue
2709 s = f.find(pycompat.ossep, speclen)
2709 s = f.find(pycompat.ossep, speclen)
2710 if s >= 0:
2710 if s >= 0:
2711 adddir(f[:s])
2711 adddir(f[:s])
2712 else:
2712 else:
2713 addfile(f)
2713 addfile(f)
2714 return files, dirs
2714 return files, dirs
2715
2715
2716 acceptable = b''
2716 acceptable = b''
2717 if opts['normal']:
2717 if opts['normal']:
2718 acceptable += b'nm'
2718 acceptable += b'nm'
2719 if opts['added']:
2719 if opts['added']:
2720 acceptable += b'a'
2720 acceptable += b'a'
2721 if opts['removed']:
2721 if opts['removed']:
2722 acceptable += b'r'
2722 acceptable += b'r'
2723 cwd = repo.getcwd()
2723 cwd = repo.getcwd()
2724 if not specs:
2724 if not specs:
2725 specs = [b'.']
2725 specs = [b'.']
2726
2726
2727 files, dirs = set(), set()
2727 files, dirs = set(), set()
2728 for spec in specs:
2728 for spec in specs:
2729 f, d = complete(spec, acceptable or b'nmar')
2729 f, d = complete(spec, acceptable or b'nmar')
2730 files.update(f)
2730 files.update(f)
2731 dirs.update(d)
2731 dirs.update(d)
2732 files.update(dirs)
2732 files.update(dirs)
2733 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2733 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2734 ui.write(b'\n')
2734 ui.write(b'\n')
2735
2735
2736
2736
2737 @command(
2737 @command(
2738 b'debugpathcopies',
2738 b'debugpathcopies',
2739 cmdutil.walkopts,
2739 cmdutil.walkopts,
2740 b'hg debugpathcopies REV1 REV2 [FILE]',
2740 b'hg debugpathcopies REV1 REV2 [FILE]',
2741 inferrepo=True,
2741 inferrepo=True,
2742 )
2742 )
2743 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2743 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2744 """show copies between two revisions"""
2744 """show copies between two revisions"""
2745 ctx1 = scmutil.revsingle(repo, rev1)
2745 ctx1 = scmutil.revsingle(repo, rev1)
2746 ctx2 = scmutil.revsingle(repo, rev2)
2746 ctx2 = scmutil.revsingle(repo, rev2)
2747 m = scmutil.match(ctx1, pats, opts)
2747 m = scmutil.match(ctx1, pats, opts)
2748 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2748 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2749 ui.write(b'%s -> %s\n' % (src, dst))
2749 ui.write(b'%s -> %s\n' % (src, dst))
2750
2750
2751
2751
2752 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2752 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2753 def debugpeer(ui, path):
2753 def debugpeer(ui, path):
2754 """establish a connection to a peer repository"""
2754 """establish a connection to a peer repository"""
2755 # Always enable peer request logging. Requires --debug to display
2755 # Always enable peer request logging. Requires --debug to display
2756 # though.
2756 # though.
2757 overrides = {
2757 overrides = {
2758 (b'devel', b'debug.peer-request'): True,
2758 (b'devel', b'debug.peer-request'): True,
2759 }
2759 }
2760
2760
2761 with ui.configoverride(overrides):
2761 with ui.configoverride(overrides):
2762 peer = hg.peer(ui, {}, path)
2762 peer = hg.peer(ui, {}, path)
2763
2763
2764 try:
2764 try:
2765 local = peer.local() is not None
2765 local = peer.local() is not None
2766 canpush = peer.canpush()
2766 canpush = peer.canpush()
2767
2767
2768 ui.write(_(b'url: %s\n') % peer.url())
2768 ui.write(_(b'url: %s\n') % peer.url())
2769 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2769 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2770 ui.write(
2770 ui.write(
2771 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2771 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2772 )
2772 )
2773 finally:
2773 finally:
2774 peer.close()
2774 peer.close()
2775
2775
2776
2776
2777 @command(
2777 @command(
2778 b'debugpickmergetool',
2778 b'debugpickmergetool',
2779 [
2779 [
2780 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2780 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2781 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2781 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2782 ]
2782 ]
2783 + cmdutil.walkopts
2783 + cmdutil.walkopts
2784 + cmdutil.mergetoolopts,
2784 + cmdutil.mergetoolopts,
2785 _(b'[PATTERN]...'),
2785 _(b'[PATTERN]...'),
2786 inferrepo=True,
2786 inferrepo=True,
2787 )
2787 )
2788 def debugpickmergetool(ui, repo, *pats, **opts):
2788 def debugpickmergetool(ui, repo, *pats, **opts):
2789 """examine which merge tool is chosen for specified file
2789 """examine which merge tool is chosen for specified file
2790
2790
2791 As described in :hg:`help merge-tools`, Mercurial examines
2791 As described in :hg:`help merge-tools`, Mercurial examines
2792 configurations below in this order to decide which merge tool is
2792 configurations below in this order to decide which merge tool is
2793 chosen for specified file.
2793 chosen for specified file.
2794
2794
2795 1. ``--tool`` option
2795 1. ``--tool`` option
2796 2. ``HGMERGE`` environment variable
2796 2. ``HGMERGE`` environment variable
2797 3. configurations in ``merge-patterns`` section
2797 3. configurations in ``merge-patterns`` section
2798 4. configuration of ``ui.merge``
2798 4. configuration of ``ui.merge``
2799 5. configurations in ``merge-tools`` section
2799 5. configurations in ``merge-tools`` section
2800 6. ``hgmerge`` tool (for historical reason only)
2800 6. ``hgmerge`` tool (for historical reason only)
2801 7. default tool for fallback (``:merge`` or ``:prompt``)
2801 7. default tool for fallback (``:merge`` or ``:prompt``)
2802
2802
2803 This command writes out examination result in the style below::
2803 This command writes out examination result in the style below::
2804
2804
2805 FILE = MERGETOOL
2805 FILE = MERGETOOL
2806
2806
2807 By default, all files known in the first parent context of the
2807 By default, all files known in the first parent context of the
2808 working directory are examined. Use file patterns and/or -I/-X
2808 working directory are examined. Use file patterns and/or -I/-X
2809 options to limit target files. -r/--rev is also useful to examine
2809 options to limit target files. -r/--rev is also useful to examine
2810 files in another context without actual updating to it.
2810 files in another context without actual updating to it.
2811
2811
2812 With --debug, this command shows warning messages while matching
2812 With --debug, this command shows warning messages while matching
2813 against ``merge-patterns`` and so on, too. It is recommended to
2813 against ``merge-patterns`` and so on, too. It is recommended to
2814 use this option with explicit file patterns and/or -I/-X options,
2814 use this option with explicit file patterns and/or -I/-X options,
2815 because this option increases amount of output per file according
2815 because this option increases amount of output per file according
2816 to configurations in hgrc.
2816 to configurations in hgrc.
2817
2817
2818 With -v/--verbose, this command shows configurations below at
2818 With -v/--verbose, this command shows configurations below at
2819 first (only if specified).
2819 first (only if specified).
2820
2820
2821 - ``--tool`` option
2821 - ``--tool`` option
2822 - ``HGMERGE`` environment variable
2822 - ``HGMERGE`` environment variable
2823 - configuration of ``ui.merge``
2823 - configuration of ``ui.merge``
2824
2824
2825 If merge tool is chosen before matching against
2825 If merge tool is chosen before matching against
2826 ``merge-patterns``, this command can't show any helpful
2826 ``merge-patterns``, this command can't show any helpful
2827 information, even with --debug. In such case, information above is
2827 information, even with --debug. In such case, information above is
2828 useful to know why a merge tool is chosen.
2828 useful to know why a merge tool is chosen.
2829 """
2829 """
2830 opts = pycompat.byteskwargs(opts)
2830 opts = pycompat.byteskwargs(opts)
2831 overrides = {}
2831 overrides = {}
2832 if opts[b'tool']:
2832 if opts[b'tool']:
2833 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2833 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2834 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2834 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2835
2835
2836 with ui.configoverride(overrides, b'debugmergepatterns'):
2836 with ui.configoverride(overrides, b'debugmergepatterns'):
2837 hgmerge = encoding.environ.get(b"HGMERGE")
2837 hgmerge = encoding.environ.get(b"HGMERGE")
2838 if hgmerge is not None:
2838 if hgmerge is not None:
2839 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2839 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2840 uimerge = ui.config(b"ui", b"merge")
2840 uimerge = ui.config(b"ui", b"merge")
2841 if uimerge:
2841 if uimerge:
2842 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2842 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2843
2843
2844 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2844 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2845 m = scmutil.match(ctx, pats, opts)
2845 m = scmutil.match(ctx, pats, opts)
2846 changedelete = opts[b'changedelete']
2846 changedelete = opts[b'changedelete']
2847 for path in ctx.walk(m):
2847 for path in ctx.walk(m):
2848 fctx = ctx[path]
2848 fctx = ctx[path]
2849 with ui.silent(
2849 with ui.silent(
2850 error=True
2850 error=True
2851 ) if not ui.debugflag else util.nullcontextmanager():
2851 ) if not ui.debugflag else util.nullcontextmanager():
2852 tool, toolpath = filemerge._picktool(
2852 tool, toolpath = filemerge._picktool(
2853 repo,
2853 repo,
2854 ui,
2854 ui,
2855 path,
2855 path,
2856 fctx.isbinary(),
2856 fctx.isbinary(),
2857 b'l' in fctx.flags(),
2857 b'l' in fctx.flags(),
2858 changedelete,
2858 changedelete,
2859 )
2859 )
2860 ui.write(b'%s = %s\n' % (path, tool))
2860 ui.write(b'%s = %s\n' % (path, tool))
2861
2861
2862
2862
2863 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2863 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2864 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2864 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2865 """access the pushkey key/value protocol
2865 """access the pushkey key/value protocol
2866
2866
2867 With two args, list the keys in the given namespace.
2867 With two args, list the keys in the given namespace.
2868
2868
2869 With five args, set a key to new if it currently is set to old.
2869 With five args, set a key to new if it currently is set to old.
2870 Reports success or failure.
2870 Reports success or failure.
2871 """
2871 """
2872
2872
2873 target = hg.peer(ui, {}, repopath)
2873 target = hg.peer(ui, {}, repopath)
2874 try:
2874 try:
2875 if keyinfo:
2875 if keyinfo:
2876 key, old, new = keyinfo
2876 key, old, new = keyinfo
2877 with target.commandexecutor() as e:
2877 with target.commandexecutor() as e:
2878 r = e.callcommand(
2878 r = e.callcommand(
2879 b'pushkey',
2879 b'pushkey',
2880 {
2880 {
2881 b'namespace': namespace,
2881 b'namespace': namespace,
2882 b'key': key,
2882 b'key': key,
2883 b'old': old,
2883 b'old': old,
2884 b'new': new,
2884 b'new': new,
2885 },
2885 },
2886 ).result()
2886 ).result()
2887
2887
2888 ui.status(pycompat.bytestr(r) + b'\n')
2888 ui.status(pycompat.bytestr(r) + b'\n')
2889 return not r
2889 return not r
2890 else:
2890 else:
2891 for k, v in sorted(target.listkeys(namespace).items()):
2891 for k, v in sorted(target.listkeys(namespace).items()):
2892 ui.write(
2892 ui.write(
2893 b"%s\t%s\n"
2893 b"%s\t%s\n"
2894 % (stringutil.escapestr(k), stringutil.escapestr(v))
2894 % (stringutil.escapestr(k), stringutil.escapestr(v))
2895 )
2895 )
2896 finally:
2896 finally:
2897 target.close()
2897 target.close()
2898
2898
2899
2899
2900 @command(b'debugpvec', [], _(b'A B'))
2900 @command(b'debugpvec', [], _(b'A B'))
2901 def debugpvec(ui, repo, a, b=None):
2901 def debugpvec(ui, repo, a, b=None):
2902 ca = scmutil.revsingle(repo, a)
2902 ca = scmutil.revsingle(repo, a)
2903 cb = scmutil.revsingle(repo, b)
2903 cb = scmutil.revsingle(repo, b)
2904 pa = pvec.ctxpvec(ca)
2904 pa = pvec.ctxpvec(ca)
2905 pb = pvec.ctxpvec(cb)
2905 pb = pvec.ctxpvec(cb)
2906 if pa == pb:
2906 if pa == pb:
2907 rel = b"="
2907 rel = b"="
2908 elif pa > pb:
2908 elif pa > pb:
2909 rel = b">"
2909 rel = b">"
2910 elif pa < pb:
2910 elif pa < pb:
2911 rel = b"<"
2911 rel = b"<"
2912 elif pa | pb:
2912 elif pa | pb:
2913 rel = b"|"
2913 rel = b"|"
2914 ui.write(_(b"a: %s\n") % pa)
2914 ui.write(_(b"a: %s\n") % pa)
2915 ui.write(_(b"b: %s\n") % pb)
2915 ui.write(_(b"b: %s\n") % pb)
2916 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2916 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2917 ui.write(
2917 ui.write(
2918 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2918 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2919 % (
2919 % (
2920 abs(pa._depth - pb._depth),
2920 abs(pa._depth - pb._depth),
2921 pvec._hamming(pa._vec, pb._vec),
2921 pvec._hamming(pa._vec, pb._vec),
2922 pa.distance(pb),
2922 pa.distance(pb),
2923 rel,
2923 rel,
2924 )
2924 )
2925 )
2925 )
2926
2926
2927
2927
2928 @command(
2928 @command(
2929 b'debugrebuilddirstate|debugrebuildstate',
2929 b'debugrebuilddirstate|debugrebuildstate',
2930 [
2930 [
2931 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2931 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2932 (
2932 (
2933 b'',
2933 b'',
2934 b'minimal',
2934 b'minimal',
2935 None,
2935 None,
2936 _(
2936 _(
2937 b'only rebuild files that are inconsistent with '
2937 b'only rebuild files that are inconsistent with '
2938 b'the working copy parent'
2938 b'the working copy parent'
2939 ),
2939 ),
2940 ),
2940 ),
2941 ],
2941 ],
2942 _(b'[-r REV]'),
2942 _(b'[-r REV]'),
2943 )
2943 )
2944 def debugrebuilddirstate(ui, repo, rev, **opts):
2944 def debugrebuilddirstate(ui, repo, rev, **opts):
2945 """rebuild the dirstate as it would look like for the given revision
2945 """rebuild the dirstate as it would look like for the given revision
2946
2946
2947 If no revision is specified the first current parent will be used.
2947 If no revision is specified the first current parent will be used.
2948
2948
2949 The dirstate will be set to the files of the given revision.
2949 The dirstate will be set to the files of the given revision.
2950 The actual working directory content or existing dirstate
2950 The actual working directory content or existing dirstate
2951 information such as adds or removes is not considered.
2951 information such as adds or removes is not considered.
2952
2952
2953 ``minimal`` will only rebuild the dirstate status for files that claim to be
2953 ``minimal`` will only rebuild the dirstate status for files that claim to be
2954 tracked but are not in the parent manifest, or that exist in the parent
2954 tracked but are not in the parent manifest, or that exist in the parent
2955 manifest but are not in the dirstate. It will not change adds, removes, or
2955 manifest but are not in the dirstate. It will not change adds, removes, or
2956 modified files that are in the working copy parent.
2956 modified files that are in the working copy parent.
2957
2957
2958 One use of this command is to make the next :hg:`status` invocation
2958 One use of this command is to make the next :hg:`status` invocation
2959 check the actual file content.
2959 check the actual file content.
2960 """
2960 """
2961 ctx = scmutil.revsingle(repo, rev)
2961 ctx = scmutil.revsingle(repo, rev)
2962 with repo.wlock():
2962 with repo.wlock():
2963 dirstate = repo.dirstate
2963 dirstate = repo.dirstate
2964 changedfiles = None
2964 changedfiles = None
2965 # See command doc for what minimal does.
2965 # See command doc for what minimal does.
2966 if opts.get('minimal'):
2966 if opts.get('minimal'):
2967 manifestfiles = set(ctx.manifest().keys())
2967 manifestfiles = set(ctx.manifest().keys())
2968 dirstatefiles = set(dirstate)
2968 dirstatefiles = set(dirstate)
2969 manifestonly = manifestfiles - dirstatefiles
2969 manifestonly = manifestfiles - dirstatefiles
2970 dsonly = dirstatefiles - manifestfiles
2970 dsonly = dirstatefiles - manifestfiles
2971 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2971 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2972 changedfiles = manifestonly | dsnotadded
2972 changedfiles = manifestonly | dsnotadded
2973
2973
2974 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2974 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2975
2975
2976
2976
2977 @command(
2977 @command(
2978 b'debugrebuildfncache',
2978 b'debugrebuildfncache',
2979 [
2979 [
2980 (
2980 (
2981 b'',
2981 b'',
2982 b'only-data',
2982 b'only-data',
2983 False,
2983 False,
2984 _(b'only look for wrong .d files (much faster)'),
2984 _(b'only look for wrong .d files (much faster)'),
2985 )
2985 )
2986 ],
2986 ],
2987 b'',
2987 b'',
2988 )
2988 )
2989 def debugrebuildfncache(ui, repo, **opts):
2989 def debugrebuildfncache(ui, repo, **opts):
2990 """rebuild the fncache file"""
2990 """rebuild the fncache file"""
2991 opts = pycompat.byteskwargs(opts)
2991 opts = pycompat.byteskwargs(opts)
2992 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2992 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2993
2993
2994
2994
2995 @command(
2995 @command(
2996 b'debugrename',
2996 b'debugrename',
2997 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2997 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2998 _(b'[-r REV] [FILE]...'),
2998 _(b'[-r REV] [FILE]...'),
2999 )
2999 )
3000 def debugrename(ui, repo, *pats, **opts):
3000 def debugrename(ui, repo, *pats, **opts):
3001 """dump rename information"""
3001 """dump rename information"""
3002
3002
3003 opts = pycompat.byteskwargs(opts)
3003 opts = pycompat.byteskwargs(opts)
3004 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3004 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3005 m = scmutil.match(ctx, pats, opts)
3005 m = scmutil.match(ctx, pats, opts)
3006 for abs in ctx.walk(m):
3006 for abs in ctx.walk(m):
3007 fctx = ctx[abs]
3007 fctx = ctx[abs]
3008 o = fctx.filelog().renamed(fctx.filenode())
3008 o = fctx.filelog().renamed(fctx.filenode())
3009 rel = repo.pathto(abs)
3009 rel = repo.pathto(abs)
3010 if o:
3010 if o:
3011 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3011 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3012 else:
3012 else:
3013 ui.write(_(b"%s not renamed\n") % rel)
3013 ui.write(_(b"%s not renamed\n") % rel)
3014
3014
3015
3015
3016 @command(b'debugrequires|debugrequirements', [], b'')
3016 @command(b'debugrequires|debugrequirements', [], b'')
3017 def debugrequirements(ui, repo):
3017 def debugrequirements(ui, repo):
3018 """print the current repo requirements"""
3018 """print the current repo requirements"""
3019 for r in sorted(repo.requirements):
3019 for r in sorted(repo.requirements):
3020 ui.write(b"%s\n" % r)
3020 ui.write(b"%s\n" % r)
3021
3021
3022
3022
3023 @command(
3023 @command(
3024 b'debugrevlog',
3024 b'debugrevlog',
3025 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3025 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3026 _(b'-c|-m|FILE'),
3026 _(b'-c|-m|FILE'),
3027 optionalrepo=True,
3027 optionalrepo=True,
3028 )
3028 )
3029 def debugrevlog(ui, repo, file_=None, **opts):
3029 def debugrevlog(ui, repo, file_=None, **opts):
3030 """show data and statistics about a revlog"""
3030 """show data and statistics about a revlog"""
3031 opts = pycompat.byteskwargs(opts)
3031 opts = pycompat.byteskwargs(opts)
3032 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3032 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3033
3033
3034 if opts.get(b"dump"):
3034 if opts.get(b"dump"):
3035 numrevs = len(r)
3035 numrevs = len(r)
3036 ui.write(
3036 ui.write(
3037 (
3037 (
3038 b"# rev p1rev p2rev start end deltastart base p1 p2"
3038 b"# rev p1rev p2rev start end deltastart base p1 p2"
3039 b" rawsize totalsize compression heads chainlen\n"
3039 b" rawsize totalsize compression heads chainlen\n"
3040 )
3040 )
3041 )
3041 )
3042 ts = 0
3042 ts = 0
3043 heads = set()
3043 heads = set()
3044
3044
3045 for rev in pycompat.xrange(numrevs):
3045 for rev in pycompat.xrange(numrevs):
3046 dbase = r.deltaparent(rev)
3046 dbase = r.deltaparent(rev)
3047 if dbase == -1:
3047 if dbase == -1:
3048 dbase = rev
3048 dbase = rev
3049 cbase = r.chainbase(rev)
3049 cbase = r.chainbase(rev)
3050 clen = r.chainlen(rev)
3050 clen = r.chainlen(rev)
3051 p1, p2 = r.parentrevs(rev)
3051 p1, p2 = r.parentrevs(rev)
3052 rs = r.rawsize(rev)
3052 rs = r.rawsize(rev)
3053 ts = ts + rs
3053 ts = ts + rs
3054 heads -= set(r.parentrevs(rev))
3054 heads -= set(r.parentrevs(rev))
3055 heads.add(rev)
3055 heads.add(rev)
3056 try:
3056 try:
3057 compression = ts / r.end(rev)
3057 compression = ts / r.end(rev)
3058 except ZeroDivisionError:
3058 except ZeroDivisionError:
3059 compression = 0
3059 compression = 0
3060 ui.write(
3060 ui.write(
3061 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3061 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3062 b"%11d %5d %8d\n"
3062 b"%11d %5d %8d\n"
3063 % (
3063 % (
3064 rev,
3064 rev,
3065 p1,
3065 p1,
3066 p2,
3066 p2,
3067 r.start(rev),
3067 r.start(rev),
3068 r.end(rev),
3068 r.end(rev),
3069 r.start(dbase),
3069 r.start(dbase),
3070 r.start(cbase),
3070 r.start(cbase),
3071 r.start(p1),
3071 r.start(p1),
3072 r.start(p2),
3072 r.start(p2),
3073 rs,
3073 rs,
3074 ts,
3074 ts,
3075 compression,
3075 compression,
3076 len(heads),
3076 len(heads),
3077 clen,
3077 clen,
3078 )
3078 )
3079 )
3079 )
3080 return 0
3080 return 0
3081
3081
3082 format = r._format_version
3082 format = r._format_version
3083 v = r._format_flags
3083 v = r._format_flags
3084 flags = []
3084 flags = []
3085 gdelta = False
3085 gdelta = False
3086 if v & revlog.FLAG_INLINE_DATA:
3086 if v & revlog.FLAG_INLINE_DATA:
3087 flags.append(b'inline')
3087 flags.append(b'inline')
3088 if v & revlog.FLAG_GENERALDELTA:
3088 if v & revlog.FLAG_GENERALDELTA:
3089 gdelta = True
3089 gdelta = True
3090 flags.append(b'generaldelta')
3090 flags.append(b'generaldelta')
3091 if not flags:
3091 if not flags:
3092 flags = [b'(none)']
3092 flags = [b'(none)']
3093
3093
3094 ### tracks merge vs single parent
3094 ### tracks merge vs single parent
3095 nummerges = 0
3095 nummerges = 0
3096
3096
3097 ### tracks ways the "delta" are build
3097 ### tracks ways the "delta" are build
3098 # nodelta
3098 # nodelta
3099 numempty = 0
3099 numempty = 0
3100 numemptytext = 0
3100 numemptytext = 0
3101 numemptydelta = 0
3101 numemptydelta = 0
3102 # full file content
3102 # full file content
3103 numfull = 0
3103 numfull = 0
3104 # intermediate snapshot against a prior snapshot
3104 # intermediate snapshot against a prior snapshot
3105 numsemi = 0
3105 numsemi = 0
3106 # snapshot count per depth
3106 # snapshot count per depth
3107 numsnapdepth = collections.defaultdict(lambda: 0)
3107 numsnapdepth = collections.defaultdict(lambda: 0)
3108 # delta against previous revision
3108 # delta against previous revision
3109 numprev = 0
3109 numprev = 0
3110 # delta against first or second parent (not prev)
3110 # delta against first or second parent (not prev)
3111 nump1 = 0
3111 nump1 = 0
3112 nump2 = 0
3112 nump2 = 0
3113 # delta against neither prev nor parents
3113 # delta against neither prev nor parents
3114 numother = 0
3114 numother = 0
3115 # delta against prev that are also first or second parent
3115 # delta against prev that are also first or second parent
3116 # (details of `numprev`)
3116 # (details of `numprev`)
3117 nump1prev = 0
3117 nump1prev = 0
3118 nump2prev = 0
3118 nump2prev = 0
3119
3119
3120 # data about delta chain of each revs
3120 # data about delta chain of each revs
3121 chainlengths = []
3121 chainlengths = []
3122 chainbases = []
3122 chainbases = []
3123 chainspans = []
3123 chainspans = []
3124
3124
3125 # data about each revision
3125 # data about each revision
3126 datasize = [None, 0, 0]
3126 datasize = [None, 0, 0]
3127 fullsize = [None, 0, 0]
3127 fullsize = [None, 0, 0]
3128 semisize = [None, 0, 0]
3128 semisize = [None, 0, 0]
3129 # snapshot count per depth
3129 # snapshot count per depth
3130 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3130 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3131 deltasize = [None, 0, 0]
3131 deltasize = [None, 0, 0]
3132 chunktypecounts = {}
3132 chunktypecounts = {}
3133 chunktypesizes = {}
3133 chunktypesizes = {}
3134
3134
3135 def addsize(size, l):
3135 def addsize(size, l):
3136 if l[0] is None or size < l[0]:
3136 if l[0] is None or size < l[0]:
3137 l[0] = size
3137 l[0] = size
3138 if size > l[1]:
3138 if size > l[1]:
3139 l[1] = size
3139 l[1] = size
3140 l[2] += size
3140 l[2] += size
3141
3141
3142 numrevs = len(r)
3142 numrevs = len(r)
3143 for rev in pycompat.xrange(numrevs):
3143 for rev in pycompat.xrange(numrevs):
3144 p1, p2 = r.parentrevs(rev)
3144 p1, p2 = r.parentrevs(rev)
3145 delta = r.deltaparent(rev)
3145 delta = r.deltaparent(rev)
3146 if format > 0:
3146 if format > 0:
3147 addsize(r.rawsize(rev), datasize)
3147 addsize(r.rawsize(rev), datasize)
3148 if p2 != nullrev:
3148 if p2 != nullrev:
3149 nummerges += 1
3149 nummerges += 1
3150 size = r.length(rev)
3150 size = r.length(rev)
3151 if delta == nullrev:
3151 if delta == nullrev:
3152 chainlengths.append(0)
3152 chainlengths.append(0)
3153 chainbases.append(r.start(rev))
3153 chainbases.append(r.start(rev))
3154 chainspans.append(size)
3154 chainspans.append(size)
3155 if size == 0:
3155 if size == 0:
3156 numempty += 1
3156 numempty += 1
3157 numemptytext += 1
3157 numemptytext += 1
3158 else:
3158 else:
3159 numfull += 1
3159 numfull += 1
3160 numsnapdepth[0] += 1
3160 numsnapdepth[0] += 1
3161 addsize(size, fullsize)
3161 addsize(size, fullsize)
3162 addsize(size, snapsizedepth[0])
3162 addsize(size, snapsizedepth[0])
3163 else:
3163 else:
3164 chainlengths.append(chainlengths[delta] + 1)
3164 chainlengths.append(chainlengths[delta] + 1)
3165 baseaddr = chainbases[delta]
3165 baseaddr = chainbases[delta]
3166 revaddr = r.start(rev)
3166 revaddr = r.start(rev)
3167 chainbases.append(baseaddr)
3167 chainbases.append(baseaddr)
3168 chainspans.append((revaddr - baseaddr) + size)
3168 chainspans.append((revaddr - baseaddr) + size)
3169 if size == 0:
3169 if size == 0:
3170 numempty += 1
3170 numempty += 1
3171 numemptydelta += 1
3171 numemptydelta += 1
3172 elif r.issnapshot(rev):
3172 elif r.issnapshot(rev):
3173 addsize(size, semisize)
3173 addsize(size, semisize)
3174 numsemi += 1
3174 numsemi += 1
3175 depth = r.snapshotdepth(rev)
3175 depth = r.snapshotdepth(rev)
3176 numsnapdepth[depth] += 1
3176 numsnapdepth[depth] += 1
3177 addsize(size, snapsizedepth[depth])
3177 addsize(size, snapsizedepth[depth])
3178 else:
3178 else:
3179 addsize(size, deltasize)
3179 addsize(size, deltasize)
3180 if delta == rev - 1:
3180 if delta == rev - 1:
3181 numprev += 1
3181 numprev += 1
3182 if delta == p1:
3182 if delta == p1:
3183 nump1prev += 1
3183 nump1prev += 1
3184 elif delta == p2:
3184 elif delta == p2:
3185 nump2prev += 1
3185 nump2prev += 1
3186 elif delta == p1:
3186 elif delta == p1:
3187 nump1 += 1
3187 nump1 += 1
3188 elif delta == p2:
3188 elif delta == p2:
3189 nump2 += 1
3189 nump2 += 1
3190 elif delta != nullrev:
3190 elif delta != nullrev:
3191 numother += 1
3191 numother += 1
3192
3192
3193 # Obtain data on the raw chunks in the revlog.
3193 # Obtain data on the raw chunks in the revlog.
3194 if util.safehasattr(r, b'_getsegmentforrevs'):
3194 if util.safehasattr(r, b'_getsegmentforrevs'):
3195 segment = r._getsegmentforrevs(rev, rev)[1]
3195 segment = r._getsegmentforrevs(rev, rev)[1]
3196 else:
3196 else:
3197 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3197 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3198 if segment:
3198 if segment:
3199 chunktype = bytes(segment[0:1])
3199 chunktype = bytes(segment[0:1])
3200 else:
3200 else:
3201 chunktype = b'empty'
3201 chunktype = b'empty'
3202
3202
3203 if chunktype not in chunktypecounts:
3203 if chunktype not in chunktypecounts:
3204 chunktypecounts[chunktype] = 0
3204 chunktypecounts[chunktype] = 0
3205 chunktypesizes[chunktype] = 0
3205 chunktypesizes[chunktype] = 0
3206
3206
3207 chunktypecounts[chunktype] += 1
3207 chunktypecounts[chunktype] += 1
3208 chunktypesizes[chunktype] += size
3208 chunktypesizes[chunktype] += size
3209
3209
3210 # Adjust size min value for empty cases
3210 # Adjust size min value for empty cases
3211 for size in (datasize, fullsize, semisize, deltasize):
3211 for size in (datasize, fullsize, semisize, deltasize):
3212 if size[0] is None:
3212 if size[0] is None:
3213 size[0] = 0
3213 size[0] = 0
3214
3214
3215 numdeltas = numrevs - numfull - numempty - numsemi
3215 numdeltas = numrevs - numfull - numempty - numsemi
3216 numoprev = numprev - nump1prev - nump2prev
3216 numoprev = numprev - nump1prev - nump2prev
3217 totalrawsize = datasize[2]
3217 totalrawsize = datasize[2]
3218 datasize[2] /= numrevs
3218 datasize[2] /= numrevs
3219 fulltotal = fullsize[2]
3219 fulltotal = fullsize[2]
3220 if numfull == 0:
3220 if numfull == 0:
3221 fullsize[2] = 0
3221 fullsize[2] = 0
3222 else:
3222 else:
3223 fullsize[2] /= numfull
3223 fullsize[2] /= numfull
3224 semitotal = semisize[2]
3224 semitotal = semisize[2]
3225 snaptotal = {}
3225 snaptotal = {}
3226 if numsemi > 0:
3226 if numsemi > 0:
3227 semisize[2] /= numsemi
3227 semisize[2] /= numsemi
3228 for depth in snapsizedepth:
3228 for depth in snapsizedepth:
3229 snaptotal[depth] = snapsizedepth[depth][2]
3229 snaptotal[depth] = snapsizedepth[depth][2]
3230 snapsizedepth[depth][2] /= numsnapdepth[depth]
3230 snapsizedepth[depth][2] /= numsnapdepth[depth]
3231
3231
3232 deltatotal = deltasize[2]
3232 deltatotal = deltasize[2]
3233 if numdeltas > 0:
3233 if numdeltas > 0:
3234 deltasize[2] /= numdeltas
3234 deltasize[2] /= numdeltas
3235 totalsize = fulltotal + semitotal + deltatotal
3235 totalsize = fulltotal + semitotal + deltatotal
3236 avgchainlen = sum(chainlengths) / numrevs
3236 avgchainlen = sum(chainlengths) / numrevs
3237 maxchainlen = max(chainlengths)
3237 maxchainlen = max(chainlengths)
3238 maxchainspan = max(chainspans)
3238 maxchainspan = max(chainspans)
3239 compratio = 1
3239 compratio = 1
3240 if totalsize:
3240 if totalsize:
3241 compratio = totalrawsize / totalsize
3241 compratio = totalrawsize / totalsize
3242
3242
3243 basedfmtstr = b'%%%dd\n'
3243 basedfmtstr = b'%%%dd\n'
3244 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3244 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3245
3245
3246 def dfmtstr(max):
3246 def dfmtstr(max):
3247 return basedfmtstr % len(str(max))
3247 return basedfmtstr % len(str(max))
3248
3248
3249 def pcfmtstr(max, padding=0):
3249 def pcfmtstr(max, padding=0):
3250 return basepcfmtstr % (len(str(max)), b' ' * padding)
3250 return basepcfmtstr % (len(str(max)), b' ' * padding)
3251
3251
3252 def pcfmt(value, total):
3252 def pcfmt(value, total):
3253 if total:
3253 if total:
3254 return (value, 100 * float(value) / total)
3254 return (value, 100 * float(value) / total)
3255 else:
3255 else:
3256 return value, 100.0
3256 return value, 100.0
3257
3257
3258 ui.writenoi18n(b'format : %d\n' % format)
3258 ui.writenoi18n(b'format : %d\n' % format)
3259 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3259 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3260
3260
3261 ui.write(b'\n')
3261 ui.write(b'\n')
3262 fmt = pcfmtstr(totalsize)
3262 fmt = pcfmtstr(totalsize)
3263 fmt2 = dfmtstr(totalsize)
3263 fmt2 = dfmtstr(totalsize)
3264 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3264 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3265 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3265 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3266 ui.writenoi18n(
3266 ui.writenoi18n(
3267 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3267 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3268 )
3268 )
3269 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3269 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3270 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3270 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3271 ui.writenoi18n(
3271 ui.writenoi18n(
3272 b' text : '
3272 b' text : '
3273 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3273 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3274 )
3274 )
3275 ui.writenoi18n(
3275 ui.writenoi18n(
3276 b' delta : '
3276 b' delta : '
3277 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3277 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3278 )
3278 )
3279 ui.writenoi18n(
3279 ui.writenoi18n(
3280 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3280 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3281 )
3281 )
3282 for depth in sorted(numsnapdepth):
3282 for depth in sorted(numsnapdepth):
3283 ui.write(
3283 ui.write(
3284 (b' lvl-%-3d : ' % depth)
3284 (b' lvl-%-3d : ' % depth)
3285 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3285 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3286 )
3286 )
3287 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3287 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3288 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3288 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3289 ui.writenoi18n(
3289 ui.writenoi18n(
3290 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3290 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3291 )
3291 )
3292 for depth in sorted(numsnapdepth):
3292 for depth in sorted(numsnapdepth):
3293 ui.write(
3293 ui.write(
3294 (b' lvl-%-3d : ' % depth)
3294 (b' lvl-%-3d : ' % depth)
3295 + fmt % pcfmt(snaptotal[depth], totalsize)
3295 + fmt % pcfmt(snaptotal[depth], totalsize)
3296 )
3296 )
3297 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3297 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3298
3298
3299 def fmtchunktype(chunktype):
3299 def fmtchunktype(chunktype):
3300 if chunktype == b'empty':
3300 if chunktype == b'empty':
3301 return b' %s : ' % chunktype
3301 return b' %s : ' % chunktype
3302 elif chunktype in pycompat.bytestr(string.ascii_letters):
3302 elif chunktype in pycompat.bytestr(string.ascii_letters):
3303 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3303 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3304 else:
3304 else:
3305 return b' 0x%s : ' % hex(chunktype)
3305 return b' 0x%s : ' % hex(chunktype)
3306
3306
3307 ui.write(b'\n')
3307 ui.write(b'\n')
3308 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3308 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3309 for chunktype in sorted(chunktypecounts):
3309 for chunktype in sorted(chunktypecounts):
3310 ui.write(fmtchunktype(chunktype))
3310 ui.write(fmtchunktype(chunktype))
3311 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3311 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3312 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3312 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3313 for chunktype in sorted(chunktypecounts):
3313 for chunktype in sorted(chunktypecounts):
3314 ui.write(fmtchunktype(chunktype))
3314 ui.write(fmtchunktype(chunktype))
3315 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3315 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3316
3316
3317 ui.write(b'\n')
3317 ui.write(b'\n')
3318 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3318 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3319 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3319 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3320 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3320 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3321 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3321 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3322 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3322 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3323
3323
3324 if format > 0:
3324 if format > 0:
3325 ui.write(b'\n')
3325 ui.write(b'\n')
3326 ui.writenoi18n(
3326 ui.writenoi18n(
3327 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3327 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3328 % tuple(datasize)
3328 % tuple(datasize)
3329 )
3329 )
3330 ui.writenoi18n(
3330 ui.writenoi18n(
3331 b'full revision size (min/max/avg) : %d / %d / %d\n'
3331 b'full revision size (min/max/avg) : %d / %d / %d\n'
3332 % tuple(fullsize)
3332 % tuple(fullsize)
3333 )
3333 )
3334 ui.writenoi18n(
3334 ui.writenoi18n(
3335 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3335 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3336 % tuple(semisize)
3336 % tuple(semisize)
3337 )
3337 )
3338 for depth in sorted(snapsizedepth):
3338 for depth in sorted(snapsizedepth):
3339 if depth == 0:
3339 if depth == 0:
3340 continue
3340 continue
3341 ui.writenoi18n(
3341 ui.writenoi18n(
3342 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3342 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3343 % ((depth,) + tuple(snapsizedepth[depth]))
3343 % ((depth,) + tuple(snapsizedepth[depth]))
3344 )
3344 )
3345 ui.writenoi18n(
3345 ui.writenoi18n(
3346 b'delta size (min/max/avg) : %d / %d / %d\n'
3346 b'delta size (min/max/avg) : %d / %d / %d\n'
3347 % tuple(deltasize)
3347 % tuple(deltasize)
3348 )
3348 )
3349
3349
3350 if numdeltas > 0:
3350 if numdeltas > 0:
3351 ui.write(b'\n')
3351 ui.write(b'\n')
3352 fmt = pcfmtstr(numdeltas)
3352 fmt = pcfmtstr(numdeltas)
3353 fmt2 = pcfmtstr(numdeltas, 4)
3353 fmt2 = pcfmtstr(numdeltas, 4)
3354 ui.writenoi18n(
3354 ui.writenoi18n(
3355 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3355 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3356 )
3356 )
3357 if numprev > 0:
3357 if numprev > 0:
3358 ui.writenoi18n(
3358 ui.writenoi18n(
3359 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3359 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3360 )
3360 )
3361 ui.writenoi18n(
3361 ui.writenoi18n(
3362 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3362 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3363 )
3363 )
3364 ui.writenoi18n(
3364 ui.writenoi18n(
3365 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3365 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3366 )
3366 )
3367 if gdelta:
3367 if gdelta:
3368 ui.writenoi18n(
3368 ui.writenoi18n(
3369 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3369 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3370 )
3370 )
3371 ui.writenoi18n(
3371 ui.writenoi18n(
3372 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3372 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3373 )
3373 )
3374 ui.writenoi18n(
3374 ui.writenoi18n(
3375 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3375 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3376 )
3376 )
3377
3377
3378
3378
3379 @command(
3379 @command(
3380 b'debugrevlogindex',
3380 b'debugrevlogindex',
3381 cmdutil.debugrevlogopts
3381 cmdutil.debugrevlogopts
3382 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3382 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3383 _(b'[-f FORMAT] -c|-m|FILE'),
3383 _(b'[-f FORMAT] -c|-m|FILE'),
3384 optionalrepo=True,
3384 optionalrepo=True,
3385 )
3385 )
3386 def debugrevlogindex(ui, repo, file_=None, **opts):
3386 def debugrevlogindex(ui, repo, file_=None, **opts):
3387 """dump the contents of a revlog index"""
3387 """dump the contents of a revlog index"""
3388 opts = pycompat.byteskwargs(opts)
3388 opts = pycompat.byteskwargs(opts)
3389 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3389 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3390 format = opts.get(b'format', 0)
3390 format = opts.get(b'format', 0)
3391 if format not in (0, 1):
3391 if format not in (0, 1):
3392 raise error.Abort(_(b"unknown format %d") % format)
3392 raise error.Abort(_(b"unknown format %d") % format)
3393
3393
3394 if ui.debugflag:
3394 if ui.debugflag:
3395 shortfn = hex
3395 shortfn = hex
3396 else:
3396 else:
3397 shortfn = short
3397 shortfn = short
3398
3398
3399 # There might not be anything in r, so have a sane default
3399 # There might not be anything in r, so have a sane default
3400 idlen = 12
3400 idlen = 12
3401 for i in r:
3401 for i in r:
3402 idlen = len(shortfn(r.node(i)))
3402 idlen = len(shortfn(r.node(i)))
3403 break
3403 break
3404
3404
3405 if format == 0:
3405 if format == 0:
3406 if ui.verbose:
3406 if ui.verbose:
3407 ui.writenoi18n(
3407 ui.writenoi18n(
3408 b" rev offset length linkrev %s %s p2\n"
3408 b" rev offset length linkrev %s %s p2\n"
3409 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3409 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3410 )
3410 )
3411 else:
3411 else:
3412 ui.writenoi18n(
3412 ui.writenoi18n(
3413 b" rev linkrev %s %s p2\n"
3413 b" rev linkrev %s %s p2\n"
3414 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3414 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3415 )
3415 )
3416 elif format == 1:
3416 elif format == 1:
3417 if ui.verbose:
3417 if ui.verbose:
3418 ui.writenoi18n(
3418 ui.writenoi18n(
3419 (
3419 (
3420 b" rev flag offset length size link p1"
3420 b" rev flag offset length size link p1"
3421 b" p2 %s\n"
3421 b" p2 %s\n"
3422 )
3422 )
3423 % b"nodeid".rjust(idlen)
3423 % b"nodeid".rjust(idlen)
3424 )
3424 )
3425 else:
3425 else:
3426 ui.writenoi18n(
3426 ui.writenoi18n(
3427 b" rev flag size link p1 p2 %s\n"
3427 b" rev flag size link p1 p2 %s\n"
3428 % b"nodeid".rjust(idlen)
3428 % b"nodeid".rjust(idlen)
3429 )
3429 )
3430
3430
3431 for i in r:
3431 for i in r:
3432 node = r.node(i)
3432 node = r.node(i)
3433 if format == 0:
3433 if format == 0:
3434 try:
3434 try:
3435 pp = r.parents(node)
3435 pp = r.parents(node)
3436 except Exception:
3436 except Exception:
3437 pp = [repo.nullid, repo.nullid]
3437 pp = [repo.nullid, repo.nullid]
3438 if ui.verbose:
3438 if ui.verbose:
3439 ui.write(
3439 ui.write(
3440 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3440 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3441 % (
3441 % (
3442 i,
3442 i,
3443 r.start(i),
3443 r.start(i),
3444 r.length(i),
3444 r.length(i),
3445 r.linkrev(i),
3445 r.linkrev(i),
3446 shortfn(node),
3446 shortfn(node),
3447 shortfn(pp[0]),
3447 shortfn(pp[0]),
3448 shortfn(pp[1]),
3448 shortfn(pp[1]),
3449 )
3449 )
3450 )
3450 )
3451 else:
3451 else:
3452 ui.write(
3452 ui.write(
3453 b"% 6d % 7d %s %s %s\n"
3453 b"% 6d % 7d %s %s %s\n"
3454 % (
3454 % (
3455 i,
3455 i,
3456 r.linkrev(i),
3456 r.linkrev(i),
3457 shortfn(node),
3457 shortfn(node),
3458 shortfn(pp[0]),
3458 shortfn(pp[0]),
3459 shortfn(pp[1]),
3459 shortfn(pp[1]),
3460 )
3460 )
3461 )
3461 )
3462 elif format == 1:
3462 elif format == 1:
3463 pr = r.parentrevs(i)
3463 pr = r.parentrevs(i)
3464 if ui.verbose:
3464 if ui.verbose:
3465 ui.write(
3465 ui.write(
3466 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3466 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3467 % (
3467 % (
3468 i,
3468 i,
3469 r.flags(i),
3469 r.flags(i),
3470 r.start(i),
3470 r.start(i),
3471 r.length(i),
3471 r.length(i),
3472 r.rawsize(i),
3472 r.rawsize(i),
3473 r.linkrev(i),
3473 r.linkrev(i),
3474 pr[0],
3474 pr[0],
3475 pr[1],
3475 pr[1],
3476 shortfn(node),
3476 shortfn(node),
3477 )
3477 )
3478 )
3478 )
3479 else:
3479 else:
3480 ui.write(
3480 ui.write(
3481 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3481 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3482 % (
3482 % (
3483 i,
3483 i,
3484 r.flags(i),
3484 r.flags(i),
3485 r.rawsize(i),
3485 r.rawsize(i),
3486 r.linkrev(i),
3486 r.linkrev(i),
3487 pr[0],
3487 pr[0],
3488 pr[1],
3488 pr[1],
3489 shortfn(node),
3489 shortfn(node),
3490 )
3490 )
3491 )
3491 )
3492
3492
3493
3493
3494 @command(
3494 @command(
3495 b'debugrevspec',
3495 b'debugrevspec',
3496 [
3496 [
3497 (
3497 (
3498 b'',
3498 b'',
3499 b'optimize',
3499 b'optimize',
3500 None,
3500 None,
3501 _(b'print parsed tree after optimizing (DEPRECATED)'),
3501 _(b'print parsed tree after optimizing (DEPRECATED)'),
3502 ),
3502 ),
3503 (
3503 (
3504 b'',
3504 b'',
3505 b'show-revs',
3505 b'show-revs',
3506 True,
3506 True,
3507 _(b'print list of result revisions (default)'),
3507 _(b'print list of result revisions (default)'),
3508 ),
3508 ),
3509 (
3509 (
3510 b's',
3510 b's',
3511 b'show-set',
3511 b'show-set',
3512 None,
3512 None,
3513 _(b'print internal representation of result set'),
3513 _(b'print internal representation of result set'),
3514 ),
3514 ),
3515 (
3515 (
3516 b'p',
3516 b'p',
3517 b'show-stage',
3517 b'show-stage',
3518 [],
3518 [],
3519 _(b'print parsed tree at the given stage'),
3519 _(b'print parsed tree at the given stage'),
3520 _(b'NAME'),
3520 _(b'NAME'),
3521 ),
3521 ),
3522 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3522 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3523 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3523 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3524 ],
3524 ],
3525 b'REVSPEC',
3525 b'REVSPEC',
3526 )
3526 )
3527 def debugrevspec(ui, repo, expr, **opts):
3527 def debugrevspec(ui, repo, expr, **opts):
3528 """parse and apply a revision specification
3528 """parse and apply a revision specification
3529
3529
3530 Use -p/--show-stage option to print the parsed tree at the given stages.
3530 Use -p/--show-stage option to print the parsed tree at the given stages.
3531 Use -p all to print tree at every stage.
3531 Use -p all to print tree at every stage.
3532
3532
3533 Use --no-show-revs option with -s or -p to print only the set
3533 Use --no-show-revs option with -s or -p to print only the set
3534 representation or the parsed tree respectively.
3534 representation or the parsed tree respectively.
3535
3535
3536 Use --verify-optimized to compare the optimized result with the unoptimized
3536 Use --verify-optimized to compare the optimized result with the unoptimized
3537 one. Returns 1 if the optimized result differs.
3537 one. Returns 1 if the optimized result differs.
3538 """
3538 """
3539 opts = pycompat.byteskwargs(opts)
3539 opts = pycompat.byteskwargs(opts)
3540 aliases = ui.configitems(b'revsetalias')
3540 aliases = ui.configitems(b'revsetalias')
3541 stages = [
3541 stages = [
3542 (b'parsed', lambda tree: tree),
3542 (b'parsed', lambda tree: tree),
3543 (
3543 (
3544 b'expanded',
3544 b'expanded',
3545 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3545 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3546 ),
3546 ),
3547 (b'concatenated', revsetlang.foldconcat),
3547 (b'concatenated', revsetlang.foldconcat),
3548 (b'analyzed', revsetlang.analyze),
3548 (b'analyzed', revsetlang.analyze),
3549 (b'optimized', revsetlang.optimize),
3549 (b'optimized', revsetlang.optimize),
3550 ]
3550 ]
3551 if opts[b'no_optimized']:
3551 if opts[b'no_optimized']:
3552 stages = stages[:-1]
3552 stages = stages[:-1]
3553 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3553 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3554 raise error.Abort(
3554 raise error.Abort(
3555 _(b'cannot use --verify-optimized with --no-optimized')
3555 _(b'cannot use --verify-optimized with --no-optimized')
3556 )
3556 )
3557 stagenames = {n for n, f in stages}
3557 stagenames = {n for n, f in stages}
3558
3558
3559 showalways = set()
3559 showalways = set()
3560 showchanged = set()
3560 showchanged = set()
3561 if ui.verbose and not opts[b'show_stage']:
3561 if ui.verbose and not opts[b'show_stage']:
3562 # show parsed tree by --verbose (deprecated)
3562 # show parsed tree by --verbose (deprecated)
3563 showalways.add(b'parsed')
3563 showalways.add(b'parsed')
3564 showchanged.update([b'expanded', b'concatenated'])
3564 showchanged.update([b'expanded', b'concatenated'])
3565 if opts[b'optimize']:
3565 if opts[b'optimize']:
3566 showalways.add(b'optimized')
3566 showalways.add(b'optimized')
3567 if opts[b'show_stage'] and opts[b'optimize']:
3567 if opts[b'show_stage'] and opts[b'optimize']:
3568 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3568 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3569 if opts[b'show_stage'] == [b'all']:
3569 if opts[b'show_stage'] == [b'all']:
3570 showalways.update(stagenames)
3570 showalways.update(stagenames)
3571 else:
3571 else:
3572 for n in opts[b'show_stage']:
3572 for n in opts[b'show_stage']:
3573 if n not in stagenames:
3573 if n not in stagenames:
3574 raise error.Abort(_(b'invalid stage name: %s') % n)
3574 raise error.Abort(_(b'invalid stage name: %s') % n)
3575 showalways.update(opts[b'show_stage'])
3575 showalways.update(opts[b'show_stage'])
3576
3576
3577 treebystage = {}
3577 treebystage = {}
3578 printedtree = None
3578 printedtree = None
3579 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3579 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3580 for n, f in stages:
3580 for n, f in stages:
3581 treebystage[n] = tree = f(tree)
3581 treebystage[n] = tree = f(tree)
3582 if n in showalways or (n in showchanged and tree != printedtree):
3582 if n in showalways or (n in showchanged and tree != printedtree):
3583 if opts[b'show_stage'] or n != b'parsed':
3583 if opts[b'show_stage'] or n != b'parsed':
3584 ui.write(b"* %s:\n" % n)
3584 ui.write(b"* %s:\n" % n)
3585 ui.write(revsetlang.prettyformat(tree), b"\n")
3585 ui.write(revsetlang.prettyformat(tree), b"\n")
3586 printedtree = tree
3586 printedtree = tree
3587
3587
3588 if opts[b'verify_optimized']:
3588 if opts[b'verify_optimized']:
3589 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3589 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3590 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3590 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3591 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3591 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3592 ui.writenoi18n(
3592 ui.writenoi18n(
3593 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3593 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3594 )
3594 )
3595 ui.writenoi18n(
3595 ui.writenoi18n(
3596 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3596 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3597 )
3597 )
3598 arevs = list(arevs)
3598 arevs = list(arevs)
3599 brevs = list(brevs)
3599 brevs = list(brevs)
3600 if arevs == brevs:
3600 if arevs == brevs:
3601 return 0
3601 return 0
3602 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3602 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3603 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3603 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3604 sm = difflib.SequenceMatcher(None, arevs, brevs)
3604 sm = difflib.SequenceMatcher(None, arevs, brevs)
3605 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3605 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3606 if tag in ('delete', 'replace'):
3606 if tag in ('delete', 'replace'):
3607 for c in arevs[alo:ahi]:
3607 for c in arevs[alo:ahi]:
3608 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3608 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3609 if tag in ('insert', 'replace'):
3609 if tag in ('insert', 'replace'):
3610 for c in brevs[blo:bhi]:
3610 for c in brevs[blo:bhi]:
3611 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3611 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3612 if tag == 'equal':
3612 if tag == 'equal':
3613 for c in arevs[alo:ahi]:
3613 for c in arevs[alo:ahi]:
3614 ui.write(b' %d\n' % c)
3614 ui.write(b' %d\n' % c)
3615 return 1
3615 return 1
3616
3616
3617 func = revset.makematcher(tree)
3617 func = revset.makematcher(tree)
3618 revs = func(repo)
3618 revs = func(repo)
3619 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3619 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3620 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3620 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3621 if not opts[b'show_revs']:
3621 if not opts[b'show_revs']:
3622 return
3622 return
3623 for c in revs:
3623 for c in revs:
3624 ui.write(b"%d\n" % c)
3624 ui.write(b"%d\n" % c)
3625
3625
3626
3626
3627 @command(
3627 @command(
3628 b'debugserve',
3628 b'debugserve',
3629 [
3629 [
3630 (
3630 (
3631 b'',
3631 b'',
3632 b'sshstdio',
3632 b'sshstdio',
3633 False,
3633 False,
3634 _(b'run an SSH server bound to process handles'),
3634 _(b'run an SSH server bound to process handles'),
3635 ),
3635 ),
3636 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3636 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3637 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3637 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3638 ],
3638 ],
3639 b'',
3639 b'',
3640 )
3640 )
3641 def debugserve(ui, repo, **opts):
3641 def debugserve(ui, repo, **opts):
3642 """run a server with advanced settings
3642 """run a server with advanced settings
3643
3643
3644 This command is similar to :hg:`serve`. It exists partially as a
3644 This command is similar to :hg:`serve`. It exists partially as a
3645 workaround to the fact that ``hg serve --stdio`` must have specific
3645 workaround to the fact that ``hg serve --stdio`` must have specific
3646 arguments for security reasons.
3646 arguments for security reasons.
3647 """
3647 """
3648 opts = pycompat.byteskwargs(opts)
3648 opts = pycompat.byteskwargs(opts)
3649
3649
3650 if not opts[b'sshstdio']:
3650 if not opts[b'sshstdio']:
3651 raise error.Abort(_(b'only --sshstdio is currently supported'))
3651 raise error.Abort(_(b'only --sshstdio is currently supported'))
3652
3652
3653 logfh = None
3653 logfh = None
3654
3654
3655 if opts[b'logiofd'] and opts[b'logiofile']:
3655 if opts[b'logiofd'] and opts[b'logiofile']:
3656 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3656 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3657
3657
3658 if opts[b'logiofd']:
3658 if opts[b'logiofd']:
3659 # Ideally we would be line buffered. But line buffering in binary
3659 # Ideally we would be line buffered. But line buffering in binary
3660 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3660 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3661 # buffering could have performance impacts. But since this isn't
3661 # buffering could have performance impacts. But since this isn't
3662 # performance critical code, it should be fine.
3662 # performance critical code, it should be fine.
3663 try:
3663 try:
3664 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3664 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3665 except OSError as e:
3665 except OSError as e:
3666 if e.errno != errno.ESPIPE:
3666 if e.errno != errno.ESPIPE:
3667 raise
3667 raise
3668 # can't seek a pipe, so `ab` mode fails on py3
3668 # can't seek a pipe, so `ab` mode fails on py3
3669 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3669 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3670 elif opts[b'logiofile']:
3670 elif opts[b'logiofile']:
3671 logfh = open(opts[b'logiofile'], b'ab', 0)
3671 logfh = open(opts[b'logiofile'], b'ab', 0)
3672
3672
3673 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3673 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3674 s.serve_forever()
3674 s.serve_forever()
3675
3675
3676
3676
3677 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3677 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3678 def debugsetparents(ui, repo, rev1, rev2=None):
3678 def debugsetparents(ui, repo, rev1, rev2=None):
3679 """manually set the parents of the current working directory (DANGEROUS)
3679 """manually set the parents of the current working directory (DANGEROUS)
3680
3680
3681 This command is not what you are looking for and should not be used. Using
3681 This command is not what you are looking for and should not be used. Using
3682 this command will most certainly results in slight corruption of the file
3682 this command will most certainly results in slight corruption of the file
3683 level histories withing your repository. DO NOT USE THIS COMMAND.
3683 level histories withing your repository. DO NOT USE THIS COMMAND.
3684
3684
3685 The command update the p1 and p2 field in the dirstate, and not touching
3685 The command update the p1 and p2 field in the dirstate, and not touching
3686 anything else. This useful for writing repository conversion tools, but
3686 anything else. This useful for writing repository conversion tools, but
3687 should be used with extreme care. For example, neither the working
3687 should be used with extreme care. For example, neither the working
3688 directory nor the dirstate is updated, so file status may be incorrect
3688 directory nor the dirstate is updated, so file status may be incorrect
3689 after running this command. Only used if you are one of the few people that
3689 after running this command. Only used if you are one of the few people that
3690 deeply unstand both conversion tools and file level histories. If you are
3690 deeply unstand both conversion tools and file level histories. If you are
3691 reading this help, you are not one of this people (most of them sailed west
3691 reading this help, you are not one of this people (most of them sailed west
3692 from Mithlond anyway.
3692 from Mithlond anyway.
3693
3693
3694 So one last time DO NOT USE THIS COMMAND.
3694 So one last time DO NOT USE THIS COMMAND.
3695
3695
3696 Returns 0 on success.
3696 Returns 0 on success.
3697 """
3697 """
3698
3698
3699 node1 = scmutil.revsingle(repo, rev1).node()
3699 node1 = scmutil.revsingle(repo, rev1).node()
3700 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3700 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3701
3701
3702 with repo.wlock():
3702 with repo.wlock():
3703 repo.setparents(node1, node2)
3703 repo.setparents(node1, node2)
3704
3704
3705
3705
3706 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3706 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3707 def debugsidedata(ui, repo, file_, rev=None, **opts):
3707 def debugsidedata(ui, repo, file_, rev=None, **opts):
3708 """dump the side data for a cl/manifest/file revision
3708 """dump the side data for a cl/manifest/file revision
3709
3709
3710 Use --verbose to dump the sidedata content."""
3710 Use --verbose to dump the sidedata content."""
3711 opts = pycompat.byteskwargs(opts)
3711 opts = pycompat.byteskwargs(opts)
3712 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3712 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3713 if rev is not None:
3713 if rev is not None:
3714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3715 file_, rev = None, file_
3715 file_, rev = None, file_
3716 elif rev is None:
3716 elif rev is None:
3717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3718 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3718 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3719 r = getattr(r, '_revlog', r)
3719 r = getattr(r, '_revlog', r)
3720 try:
3720 try:
3721 sidedata = r.sidedata(r.lookup(rev))
3721 sidedata = r.sidedata(r.lookup(rev))
3722 except KeyError:
3722 except KeyError:
3723 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3723 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3724 if sidedata:
3724 if sidedata:
3725 sidedata = list(sidedata.items())
3725 sidedata = list(sidedata.items())
3726 sidedata.sort()
3726 sidedata.sort()
3727 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3727 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3728 for key, value in sidedata:
3728 for key, value in sidedata:
3729 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3729 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3730 if ui.verbose:
3730 if ui.verbose:
3731 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3731 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3732
3732
3733
3733
3734 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3734 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3735 def debugssl(ui, repo, source=None, **opts):
3735 def debugssl(ui, repo, source=None, **opts):
3736 """test a secure connection to a server
3736 """test a secure connection to a server
3737
3737
3738 This builds the certificate chain for the server on Windows, installing the
3738 This builds the certificate chain for the server on Windows, installing the
3739 missing intermediates and trusted root via Windows Update if necessary. It
3739 missing intermediates and trusted root via Windows Update if necessary. It
3740 does nothing on other platforms.
3740 does nothing on other platforms.
3741
3741
3742 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3742 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3743 that server is used. See :hg:`help urls` for more information.
3743 that server is used. See :hg:`help urls` for more information.
3744
3744
3745 If the update succeeds, retry the original operation. Otherwise, the cause
3745 If the update succeeds, retry the original operation. Otherwise, the cause
3746 of the SSL error is likely another issue.
3746 of the SSL error is likely another issue.
3747 """
3747 """
3748 if not pycompat.iswindows:
3748 if not pycompat.iswindows:
3749 raise error.Abort(
3749 raise error.Abort(
3750 _(b'certificate chain building is only possible on Windows')
3750 _(b'certificate chain building is only possible on Windows')
3751 )
3751 )
3752
3752
3753 if not source:
3753 if not source:
3754 if not repo:
3754 if not repo:
3755 raise error.Abort(
3755 raise error.Abort(
3756 _(
3756 _(
3757 b"there is no Mercurial repository here, and no "
3757 b"there is no Mercurial repository here, and no "
3758 b"server specified"
3758 b"server specified"
3759 )
3759 )
3760 )
3760 )
3761 source = b"default"
3761 source = b"default"
3762
3762
3763 source, branches = urlutil.get_unique_pull_path(
3763 source, branches = urlutil.get_unique_pull_path(
3764 b'debugssl', repo, ui, source
3764 b'debugssl', repo, ui, source
3765 )
3765 )
3766 url = urlutil.url(source)
3766 url = urlutil.url(source)
3767
3767
3768 defaultport = {b'https': 443, b'ssh': 22}
3768 defaultport = {b'https': 443, b'ssh': 22}
3769 if url.scheme in defaultport:
3769 if url.scheme in defaultport:
3770 try:
3770 try:
3771 addr = (url.host, int(url.port or defaultport[url.scheme]))
3771 addr = (url.host, int(url.port or defaultport[url.scheme]))
3772 except ValueError:
3772 except ValueError:
3773 raise error.Abort(_(b"malformed port number in URL"))
3773 raise error.Abort(_(b"malformed port number in URL"))
3774 else:
3774 else:
3775 raise error.Abort(_(b"only https and ssh connections are supported"))
3775 raise error.Abort(_(b"only https and ssh connections are supported"))
3776
3776
3777 from . import win32
3777 from . import win32
3778
3778
3779 s = ssl.wrap_socket(
3779 s = ssl.wrap_socket(
3780 socket.socket(),
3780 socket.socket(),
3781 ssl_version=ssl.PROTOCOL_TLS,
3781 ssl_version=ssl.PROTOCOL_TLS,
3782 cert_reqs=ssl.CERT_NONE,
3782 cert_reqs=ssl.CERT_NONE,
3783 ca_certs=None,
3783 ca_certs=None,
3784 )
3784 )
3785
3785
3786 try:
3786 try:
3787 s.connect(addr)
3787 s.connect(addr)
3788 cert = s.getpeercert(True)
3788 cert = s.getpeercert(True)
3789
3789
3790 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3790 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3791
3791
3792 complete = win32.checkcertificatechain(cert, build=False)
3792 complete = win32.checkcertificatechain(cert, build=False)
3793
3793
3794 if not complete:
3794 if not complete:
3795 ui.status(_(b'certificate chain is incomplete, updating... '))
3795 ui.status(_(b'certificate chain is incomplete, updating... '))
3796
3796
3797 if not win32.checkcertificatechain(cert):
3797 if not win32.checkcertificatechain(cert):
3798 ui.status(_(b'failed.\n'))
3798 ui.status(_(b'failed.\n'))
3799 else:
3799 else:
3800 ui.status(_(b'done.\n'))
3800 ui.status(_(b'done.\n'))
3801 else:
3801 else:
3802 ui.status(_(b'full certificate chain is available\n'))
3802 ui.status(_(b'full certificate chain is available\n'))
3803 finally:
3803 finally:
3804 s.close()
3804 s.close()
3805
3805
3806
3806
3807 @command(
3807 @command(
3808 b"debugbackupbundle",
3808 b"debugbackupbundle",
3809 [
3809 [
3810 (
3810 (
3811 b"",
3811 b"",
3812 b"recover",
3812 b"recover",
3813 b"",
3813 b"",
3814 b"brings the specified changeset back into the repository",
3814 b"brings the specified changeset back into the repository",
3815 )
3815 )
3816 ]
3816 ]
3817 + cmdutil.logopts,
3817 + cmdutil.logopts,
3818 _(b"hg debugbackupbundle [--recover HASH]"),
3818 _(b"hg debugbackupbundle [--recover HASH]"),
3819 )
3819 )
3820 def debugbackupbundle(ui, repo, *pats, **opts):
3820 def debugbackupbundle(ui, repo, *pats, **opts):
3821 """lists the changesets available in backup bundles
3821 """lists the changesets available in backup bundles
3822
3822
3823 Without any arguments, this command prints a list of the changesets in each
3823 Without any arguments, this command prints a list of the changesets in each
3824 backup bundle.
3824 backup bundle.
3825
3825
3826 --recover takes a changeset hash and unbundles the first bundle that
3826 --recover takes a changeset hash and unbundles the first bundle that
3827 contains that hash, which puts that changeset back in your repository.
3827 contains that hash, which puts that changeset back in your repository.
3828
3828
3829 --verbose will print the entire commit message and the bundle path for that
3829 --verbose will print the entire commit message and the bundle path for that
3830 backup.
3830 backup.
3831 """
3831 """
3832 backups = list(
3832 backups = list(
3833 filter(
3833 filter(
3834 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3834 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3835 )
3835 )
3836 )
3836 )
3837 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3837 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3838
3838
3839 opts = pycompat.byteskwargs(opts)
3839 opts = pycompat.byteskwargs(opts)
3840 opts[b"bundle"] = b""
3840 opts[b"bundle"] = b""
3841 opts[b"force"] = None
3841 opts[b"force"] = None
3842 limit = logcmdutil.getlimit(opts)
3842 limit = logcmdutil.getlimit(opts)
3843
3843
3844 def display(other, chlist, displayer):
3844 def display(other, chlist, displayer):
3845 if opts.get(b"newest_first"):
3845 if opts.get(b"newest_first"):
3846 chlist.reverse()
3846 chlist.reverse()
3847 count = 0
3847 count = 0
3848 for n in chlist:
3848 for n in chlist:
3849 if limit is not None and count >= limit:
3849 if limit is not None and count >= limit:
3850 break
3850 break
3851 parents = [
3851 parents = [
3852 True for p in other.changelog.parents(n) if p != repo.nullid
3852 True for p in other.changelog.parents(n) if p != repo.nullid
3853 ]
3853 ]
3854 if opts.get(b"no_merges") and len(parents) == 2:
3854 if opts.get(b"no_merges") and len(parents) == 2:
3855 continue
3855 continue
3856 count += 1
3856 count += 1
3857 displayer.show(other[n])
3857 displayer.show(other[n])
3858
3858
3859 recovernode = opts.get(b"recover")
3859 recovernode = opts.get(b"recover")
3860 if recovernode:
3860 if recovernode:
3861 if scmutil.isrevsymbol(repo, recovernode):
3861 if scmutil.isrevsymbol(repo, recovernode):
3862 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3862 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3863 return
3863 return
3864 elif backups:
3864 elif backups:
3865 msg = _(
3865 msg = _(
3866 b"Recover changesets using: hg debugbackupbundle --recover "
3866 b"Recover changesets using: hg debugbackupbundle --recover "
3867 b"<changeset hash>\n\nAvailable backup changesets:"
3867 b"<changeset hash>\n\nAvailable backup changesets:"
3868 )
3868 )
3869 ui.status(msg, label=b"status.removed")
3869 ui.status(msg, label=b"status.removed")
3870 else:
3870 else:
3871 ui.status(_(b"no backup changesets found\n"))
3871 ui.status(_(b"no backup changesets found\n"))
3872 return
3872 return
3873
3873
3874 for backup in backups:
3874 for backup in backups:
3875 # Much of this is copied from the hg incoming logic
3875 # Much of this is copied from the hg incoming logic
3876 source = os.path.relpath(backup, encoding.getcwd())
3876 source = os.path.relpath(backup, encoding.getcwd())
3877 source, branches = urlutil.get_unique_pull_path(
3877 source, branches = urlutil.get_unique_pull_path(
3878 b'debugbackupbundle',
3878 b'debugbackupbundle',
3879 repo,
3879 repo,
3880 ui,
3880 ui,
3881 source,
3881 source,
3882 default_branches=opts.get(b'branch'),
3882 default_branches=opts.get(b'branch'),
3883 )
3883 )
3884 try:
3884 try:
3885 other = hg.peer(repo, opts, source)
3885 other = hg.peer(repo, opts, source)
3886 except error.LookupError as ex:
3886 except error.LookupError as ex:
3887 msg = _(b"\nwarning: unable to open bundle %s") % source
3887 msg = _(b"\nwarning: unable to open bundle %s") % source
3888 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3888 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3889 ui.warn(msg, hint=hint)
3889 ui.warn(msg, hint=hint)
3890 continue
3890 continue
3891 revs, checkout = hg.addbranchrevs(
3891 revs, checkout = hg.addbranchrevs(
3892 repo, other, branches, opts.get(b"rev")
3892 repo, other, branches, opts.get(b"rev")
3893 )
3893 )
3894
3894
3895 if revs:
3895 if revs:
3896 revs = [other.lookup(rev) for rev in revs]
3896 revs = [other.lookup(rev) for rev in revs]
3897
3897
3898 with ui.silent():
3898 with ui.silent():
3899 try:
3899 try:
3900 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3900 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3901 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3901 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3902 )
3902 )
3903 except error.LookupError:
3903 except error.LookupError:
3904 continue
3904 continue
3905
3905
3906 try:
3906 try:
3907 if not chlist:
3907 if not chlist:
3908 continue
3908 continue
3909 if recovernode:
3909 if recovernode:
3910 with repo.lock(), repo.transaction(b"unbundle") as tr:
3910 with repo.lock(), repo.transaction(b"unbundle") as tr:
3911 if scmutil.isrevsymbol(other, recovernode):
3911 if scmutil.isrevsymbol(other, recovernode):
3912 ui.status(_(b"Unbundling %s\n") % (recovernode))
3912 ui.status(_(b"Unbundling %s\n") % (recovernode))
3913 f = hg.openpath(ui, source)
3913 f = hg.openpath(ui, source)
3914 gen = exchange.readbundle(ui, f, source)
3914 gen = exchange.readbundle(ui, f, source)
3915 if isinstance(gen, bundle2.unbundle20):
3915 if isinstance(gen, bundle2.unbundle20):
3916 bundle2.applybundle(
3916 bundle2.applybundle(
3917 repo,
3917 repo,
3918 gen,
3918 gen,
3919 tr,
3919 tr,
3920 source=b"unbundle",
3920 source=b"unbundle",
3921 url=b"bundle:" + source,
3921 url=b"bundle:" + source,
3922 )
3922 )
3923 else:
3923 else:
3924 gen.apply(repo, b"unbundle", b"bundle:" + source)
3924 gen.apply(repo, b"unbundle", b"bundle:" + source)
3925 break
3925 break
3926 else:
3926 else:
3927 backupdate = encoding.strtolocal(
3927 backupdate = encoding.strtolocal(
3928 time.strftime(
3928 time.strftime(
3929 "%a %H:%M, %Y-%m-%d",
3929 "%a %H:%M, %Y-%m-%d",
3930 time.localtime(os.path.getmtime(source)),
3930 time.localtime(os.path.getmtime(source)),
3931 )
3931 )
3932 )
3932 )
3933 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3933 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3934 if ui.verbose:
3934 if ui.verbose:
3935 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3935 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3936 else:
3936 else:
3937 opts[
3937 opts[
3938 b"template"
3938 b"template"
3939 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3939 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3940 displayer = logcmdutil.changesetdisplayer(
3940 displayer = logcmdutil.changesetdisplayer(
3941 ui, other, opts, False
3941 ui, other, opts, False
3942 )
3942 )
3943 display(other, chlist, displayer)
3943 display(other, chlist, displayer)
3944 displayer.close()
3944 displayer.close()
3945 finally:
3945 finally:
3946 cleanupfn()
3946 cleanupfn()
3947
3947
3948
3948
3949 @command(
3949 @command(
3950 b'debugsub',
3950 b'debugsub',
3951 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3951 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3952 _(b'[-r REV] [REV]'),
3952 _(b'[-r REV] [REV]'),
3953 )
3953 )
3954 def debugsub(ui, repo, rev=None):
3954 def debugsub(ui, repo, rev=None):
3955 ctx = scmutil.revsingle(repo, rev, None)
3955 ctx = scmutil.revsingle(repo, rev, None)
3956 for k, v in sorted(ctx.substate.items()):
3956 for k, v in sorted(ctx.substate.items()):
3957 ui.writenoi18n(b'path %s\n' % k)
3957 ui.writenoi18n(b'path %s\n' % k)
3958 ui.writenoi18n(b' source %s\n' % v[0])
3958 ui.writenoi18n(b' source %s\n' % v[0])
3959 ui.writenoi18n(b' revision %s\n' % v[1])
3959 ui.writenoi18n(b' revision %s\n' % v[1])
3960
3960
3961
3961
3962 @command(b'debugshell', optionalrepo=True)
3962 @command(b'debugshell', optionalrepo=True)
3963 def debugshell(ui, repo):
3963 def debugshell(ui, repo):
3964 """run an interactive Python interpreter
3964 """run an interactive Python interpreter
3965
3965
3966 The local namespace is provided with a reference to the ui and
3966 The local namespace is provided with a reference to the ui and
3967 the repo instance (if available).
3967 the repo instance (if available).
3968 """
3968 """
3969 import code
3969 import code
3970
3970
3971 imported_objects = {
3971 imported_objects = {
3972 'ui': ui,
3972 'ui': ui,
3973 'repo': repo,
3973 'repo': repo,
3974 }
3974 }
3975
3975
3976 code.interact(local=imported_objects)
3976 code.interact(local=imported_objects)
3977
3977
3978
3978
3979 @command(
3979 @command(
3980 b'debugsuccessorssets',
3980 b'debugsuccessorssets',
3981 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3981 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3982 _(b'[REV]'),
3982 _(b'[REV]'),
3983 )
3983 )
3984 def debugsuccessorssets(ui, repo, *revs, **opts):
3984 def debugsuccessorssets(ui, repo, *revs, **opts):
3985 """show set of successors for revision
3985 """show set of successors for revision
3986
3986
3987 A successors set of changeset A is a consistent group of revisions that
3987 A successors set of changeset A is a consistent group of revisions that
3988 succeed A. It contains non-obsolete changesets only unless closests
3988 succeed A. It contains non-obsolete changesets only unless closests
3989 successors set is set.
3989 successors set is set.
3990
3990
3991 In most cases a changeset A has a single successors set containing a single
3991 In most cases a changeset A has a single successors set containing a single
3992 successor (changeset A replaced by A').
3992 successor (changeset A replaced by A').
3993
3993
3994 A changeset that is made obsolete with no successors are called "pruned".
3994 A changeset that is made obsolete with no successors are called "pruned".
3995 Such changesets have no successors sets at all.
3995 Such changesets have no successors sets at all.
3996
3996
3997 A changeset that has been "split" will have a successors set containing
3997 A changeset that has been "split" will have a successors set containing
3998 more than one successor.
3998 more than one successor.
3999
3999
4000 A changeset that has been rewritten in multiple different ways is called
4000 A changeset that has been rewritten in multiple different ways is called
4001 "divergent". Such changesets have multiple successor sets (each of which
4001 "divergent". Such changesets have multiple successor sets (each of which
4002 may also be split, i.e. have multiple successors).
4002 may also be split, i.e. have multiple successors).
4003
4003
4004 Results are displayed as follows::
4004 Results are displayed as follows::
4005
4005
4006 <rev1>
4006 <rev1>
4007 <successors-1A>
4007 <successors-1A>
4008 <rev2>
4008 <rev2>
4009 <successors-2A>
4009 <successors-2A>
4010 <successors-2B1> <successors-2B2> <successors-2B3>
4010 <successors-2B1> <successors-2B2> <successors-2B3>
4011
4011
4012 Here rev2 has two possible (i.e. divergent) successors sets. The first
4012 Here rev2 has two possible (i.e. divergent) successors sets. The first
4013 holds one element, whereas the second holds three (i.e. the changeset has
4013 holds one element, whereas the second holds three (i.e. the changeset has
4014 been split).
4014 been split).
4015 """
4015 """
4016 # passed to successorssets caching computation from one call to another
4016 # passed to successorssets caching computation from one call to another
4017 cache = {}
4017 cache = {}
4018 ctx2str = bytes
4018 ctx2str = bytes
4019 node2str = short
4019 node2str = short
4020 for rev in logcmdutil.revrange(repo, revs):
4020 for rev in logcmdutil.revrange(repo, revs):
4021 ctx = repo[rev]
4021 ctx = repo[rev]
4022 ui.write(b'%s\n' % ctx2str(ctx))
4022 ui.write(b'%s\n' % ctx2str(ctx))
4023 for succsset in obsutil.successorssets(
4023 for succsset in obsutil.successorssets(
4024 repo, ctx.node(), closest=opts['closest'], cache=cache
4024 repo, ctx.node(), closest=opts['closest'], cache=cache
4025 ):
4025 ):
4026 if succsset:
4026 if succsset:
4027 ui.write(b' ')
4027 ui.write(b' ')
4028 ui.write(node2str(succsset[0]))
4028 ui.write(node2str(succsset[0]))
4029 for node in succsset[1:]:
4029 for node in succsset[1:]:
4030 ui.write(b' ')
4030 ui.write(b' ')
4031 ui.write(node2str(node))
4031 ui.write(node2str(node))
4032 ui.write(b'\n')
4032 ui.write(b'\n')
4033
4033
4034
4034
4035 @command(b'debugtagscache', [])
4035 @command(b'debugtagscache', [])
4036 def debugtagscache(ui, repo):
4036 def debugtagscache(ui, repo):
4037 """display the contents of .hg/cache/hgtagsfnodes1"""
4037 """display the contents of .hg/cache/hgtagsfnodes1"""
4038 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4038 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4039 flog = repo.file(b'.hgtags')
4039 flog = repo.file(b'.hgtags')
4040 for r in repo:
4040 for r in repo:
4041 node = repo[r].node()
4041 node = repo[r].node()
4042 tagsnode = cache.getfnode(node, computemissing=False)
4042 tagsnode = cache.getfnode(node, computemissing=False)
4043 if tagsnode:
4043 if tagsnode:
4044 tagsnodedisplay = hex(tagsnode)
4044 tagsnodedisplay = hex(tagsnode)
4045 if not flog.hasnode(tagsnode):
4045 if not flog.hasnode(tagsnode):
4046 tagsnodedisplay += b' (unknown node)'
4046 tagsnodedisplay += b' (unknown node)'
4047 elif tagsnode is None:
4047 elif tagsnode is None:
4048 tagsnodedisplay = b'missing'
4048 tagsnodedisplay = b'missing'
4049 else:
4049 else:
4050 tagsnodedisplay = b'invalid'
4050 tagsnodedisplay = b'invalid'
4051
4051
4052 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4052 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4053
4053
4054
4054
4055 @command(
4055 @command(
4056 b'debugtemplate',
4056 b'debugtemplate',
4057 [
4057 [
4058 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4058 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4059 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4059 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4060 ],
4060 ],
4061 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4061 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4062 optionalrepo=True,
4062 optionalrepo=True,
4063 )
4063 )
4064 def debugtemplate(ui, repo, tmpl, **opts):
4064 def debugtemplate(ui, repo, tmpl, **opts):
4065 """parse and apply a template
4065 """parse and apply a template
4066
4066
4067 If -r/--rev is given, the template is processed as a log template and
4067 If -r/--rev is given, the template is processed as a log template and
4068 applied to the given changesets. Otherwise, it is processed as a generic
4068 applied to the given changesets. Otherwise, it is processed as a generic
4069 template.
4069 template.
4070
4070
4071 Use --verbose to print the parsed tree.
4071 Use --verbose to print the parsed tree.
4072 """
4072 """
4073 revs = None
4073 revs = None
4074 if opts['rev']:
4074 if opts['rev']:
4075 if repo is None:
4075 if repo is None:
4076 raise error.RepoError(
4076 raise error.RepoError(
4077 _(b'there is no Mercurial repository here (.hg not found)')
4077 _(b'there is no Mercurial repository here (.hg not found)')
4078 )
4078 )
4079 revs = logcmdutil.revrange(repo, opts['rev'])
4079 revs = logcmdutil.revrange(repo, opts['rev'])
4080
4080
4081 props = {}
4081 props = {}
4082 for d in opts['define']:
4082 for d in opts['define']:
4083 try:
4083 try:
4084 k, v = (e.strip() for e in d.split(b'=', 1))
4084 k, v = (e.strip() for e in d.split(b'=', 1))
4085 if not k or k == b'ui':
4085 if not k or k == b'ui':
4086 raise ValueError
4086 raise ValueError
4087 props[k] = v
4087 props[k] = v
4088 except ValueError:
4088 except ValueError:
4089 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4089 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4090
4090
4091 if ui.verbose:
4091 if ui.verbose:
4092 aliases = ui.configitems(b'templatealias')
4092 aliases = ui.configitems(b'templatealias')
4093 tree = templater.parse(tmpl)
4093 tree = templater.parse(tmpl)
4094 ui.note(templater.prettyformat(tree), b'\n')
4094 ui.note(templater.prettyformat(tree), b'\n')
4095 newtree = templater.expandaliases(tree, aliases)
4095 newtree = templater.expandaliases(tree, aliases)
4096 if newtree != tree:
4096 if newtree != tree:
4097 ui.notenoi18n(
4097 ui.notenoi18n(
4098 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4098 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4099 )
4099 )
4100
4100
4101 if revs is None:
4101 if revs is None:
4102 tres = formatter.templateresources(ui, repo)
4102 tres = formatter.templateresources(ui, repo)
4103 t = formatter.maketemplater(ui, tmpl, resources=tres)
4103 t = formatter.maketemplater(ui, tmpl, resources=tres)
4104 if ui.verbose:
4104 if ui.verbose:
4105 kwds, funcs = t.symbolsuseddefault()
4105 kwds, funcs = t.symbolsuseddefault()
4106 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4106 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4107 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4107 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4108 ui.write(t.renderdefault(props))
4108 ui.write(t.renderdefault(props))
4109 else:
4109 else:
4110 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4110 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4111 if ui.verbose:
4111 if ui.verbose:
4112 kwds, funcs = displayer.t.symbolsuseddefault()
4112 kwds, funcs = displayer.t.symbolsuseddefault()
4113 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4113 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4114 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4114 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4115 for r in revs:
4115 for r in revs:
4116 displayer.show(repo[r], **pycompat.strkwargs(props))
4116 displayer.show(repo[r], **pycompat.strkwargs(props))
4117 displayer.close()
4117 displayer.close()
4118
4118
4119
4119
4120 @command(
4120 @command(
4121 b'debuguigetpass',
4121 b'debuguigetpass',
4122 [
4122 [
4123 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4123 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4124 ],
4124 ],
4125 _(b'[-p TEXT]'),
4125 _(b'[-p TEXT]'),
4126 norepo=True,
4126 norepo=True,
4127 )
4127 )
4128 def debuguigetpass(ui, prompt=b''):
4128 def debuguigetpass(ui, prompt=b''):
4129 """show prompt to type password"""
4129 """show prompt to type password"""
4130 r = ui.getpass(prompt)
4130 r = ui.getpass(prompt)
4131 if r is None:
4131 if r is None:
4132 r = b"<default response>"
4132 r = b"<default response>"
4133 ui.writenoi18n(b'response: %s\n' % r)
4133 ui.writenoi18n(b'response: %s\n' % r)
4134
4134
4135
4135
4136 @command(
4136 @command(
4137 b'debuguiprompt',
4137 b'debuguiprompt',
4138 [
4138 [
4139 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4139 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4140 ],
4140 ],
4141 _(b'[-p TEXT]'),
4141 _(b'[-p TEXT]'),
4142 norepo=True,
4142 norepo=True,
4143 )
4143 )
4144 def debuguiprompt(ui, prompt=b''):
4144 def debuguiprompt(ui, prompt=b''):
4145 """show plain prompt"""
4145 """show plain prompt"""
4146 r = ui.prompt(prompt)
4146 r = ui.prompt(prompt)
4147 ui.writenoi18n(b'response: %s\n' % r)
4147 ui.writenoi18n(b'response: %s\n' % r)
4148
4148
4149
4149
4150 @command(b'debugupdatecaches', [])
4150 @command(b'debugupdatecaches', [])
4151 def debugupdatecaches(ui, repo, *pats, **opts):
4151 def debugupdatecaches(ui, repo, *pats, **opts):
4152 """warm all known caches in the repository"""
4152 """warm all known caches in the repository"""
4153 with repo.wlock(), repo.lock():
4153 with repo.wlock(), repo.lock():
4154 repo.updatecaches(caches=repository.CACHES_ALL)
4154 repo.updatecaches(caches=repository.CACHES_ALL)
4155
4155
4156
4156
4157 @command(
4157 @command(
4158 b'debugupgraderepo',
4158 b'debugupgraderepo',
4159 [
4159 [
4160 (
4160 (
4161 b'o',
4161 b'o',
4162 b'optimize',
4162 b'optimize',
4163 [],
4163 [],
4164 _(b'extra optimization to perform'),
4164 _(b'extra optimization to perform'),
4165 _(b'NAME'),
4165 _(b'NAME'),
4166 ),
4166 ),
4167 (b'', b'run', False, _(b'performs an upgrade')),
4167 (b'', b'run', False, _(b'performs an upgrade')),
4168 (b'', b'backup', True, _(b'keep the old repository content around')),
4168 (b'', b'backup', True, _(b'keep the old repository content around')),
4169 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4169 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4170 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4170 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4171 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4171 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4172 ],
4172 ],
4173 )
4173 )
4174 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4174 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4175 """upgrade a repository to use different features
4175 """upgrade a repository to use different features
4176
4176
4177 If no arguments are specified, the repository is evaluated for upgrade
4177 If no arguments are specified, the repository is evaluated for upgrade
4178 and a list of problems and potential optimizations is printed.
4178 and a list of problems and potential optimizations is printed.
4179
4179
4180 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4180 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4181 can be influenced via additional arguments. More details will be provided
4181 can be influenced via additional arguments. More details will be provided
4182 by the command output when run without ``--run``.
4182 by the command output when run without ``--run``.
4183
4183
4184 During the upgrade, the repository will be locked and no writes will be
4184 During the upgrade, the repository will be locked and no writes will be
4185 allowed.
4185 allowed.
4186
4186
4187 At the end of the upgrade, the repository may not be readable while new
4187 At the end of the upgrade, the repository may not be readable while new
4188 repository data is swapped in. This window will be as long as it takes to
4188 repository data is swapped in. This window will be as long as it takes to
4189 rename some directories inside the ``.hg`` directory. On most machines, this
4189 rename some directories inside the ``.hg`` directory. On most machines, this
4190 should complete almost instantaneously and the chances of a consumer being
4190 should complete almost instantaneously and the chances of a consumer being
4191 unable to access the repository should be low.
4191 unable to access the repository should be low.
4192
4192
4193 By default, all revlogs will be upgraded. You can restrict this using flags
4193 By default, all revlogs will be upgraded. You can restrict this using flags
4194 such as `--manifest`:
4194 such as `--manifest`:
4195
4195
4196 * `--manifest`: only optimize the manifest
4196 * `--manifest`: only optimize the manifest
4197 * `--no-manifest`: optimize all revlog but the manifest
4197 * `--no-manifest`: optimize all revlog but the manifest
4198 * `--changelog`: optimize the changelog only
4198 * `--changelog`: optimize the changelog only
4199 * `--no-changelog --no-manifest`: optimize filelogs only
4199 * `--no-changelog --no-manifest`: optimize filelogs only
4200 * `--filelogs`: optimize the filelogs only
4200 * `--filelogs`: optimize the filelogs only
4201 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4201 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4202 """
4202 """
4203 return upgrade.upgraderepo(
4203 return upgrade.upgraderepo(
4204 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4204 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4205 )
4205 )
4206
4206
4207
4207
4208 @command(
4208 @command(
4209 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4209 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4210 )
4210 )
4211 def debugwalk(ui, repo, *pats, **opts):
4211 def debugwalk(ui, repo, *pats, **opts):
4212 """show how files match on given patterns"""
4212 """show how files match on given patterns"""
4213 opts = pycompat.byteskwargs(opts)
4213 opts = pycompat.byteskwargs(opts)
4214 m = scmutil.match(repo[None], pats, opts)
4214 m = scmutil.match(repo[None], pats, opts)
4215 if ui.verbose:
4215 if ui.verbose:
4216 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4216 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4217 items = list(repo[None].walk(m))
4217 items = list(repo[None].walk(m))
4218 if not items:
4218 if not items:
4219 return
4219 return
4220 f = lambda fn: fn
4220 f = lambda fn: fn
4221 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4221 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4222 f = lambda fn: util.normpath(fn)
4222 f = lambda fn: util.normpath(fn)
4223 fmt = b'f %%-%ds %%-%ds %%s' % (
4223 fmt = b'f %%-%ds %%-%ds %%s' % (
4224 max([len(abs) for abs in items]),
4224 max([len(abs) for abs in items]),
4225 max([len(repo.pathto(abs)) for abs in items]),
4225 max([len(repo.pathto(abs)) for abs in items]),
4226 )
4226 )
4227 for abs in items:
4227 for abs in items:
4228 line = fmt % (
4228 line = fmt % (
4229 abs,
4229 abs,
4230 f(repo.pathto(abs)),
4230 f(repo.pathto(abs)),
4231 m.exact(abs) and b'exact' or b'',
4231 m.exact(abs) and b'exact' or b'',
4232 )
4232 )
4233 ui.write(b"%s\n" % line.rstrip())
4233 ui.write(b"%s\n" % line.rstrip())
4234
4234
4235
4235
4236 @command(b'debugwhyunstable', [], _(b'REV'))
4236 @command(b'debugwhyunstable', [], _(b'REV'))
4237 def debugwhyunstable(ui, repo, rev):
4237 def debugwhyunstable(ui, repo, rev):
4238 """explain instabilities of a changeset"""
4238 """explain instabilities of a changeset"""
4239 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4239 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4240 dnodes = b''
4240 dnodes = b''
4241 if entry.get(b'divergentnodes'):
4241 if entry.get(b'divergentnodes'):
4242 dnodes = (
4242 dnodes = (
4243 b' '.join(
4243 b' '.join(
4244 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4244 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4245 for ctx in entry[b'divergentnodes']
4245 for ctx in entry[b'divergentnodes']
4246 )
4246 )
4247 + b' '
4247 + b' '
4248 )
4248 )
4249 ui.write(
4249 ui.write(
4250 b'%s: %s%s %s\n'
4250 b'%s: %s%s %s\n'
4251 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4251 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4252 )
4252 )
4253
4253
4254
4254
4255 @command(
4255 @command(
4256 b'debugwireargs',
4256 b'debugwireargs',
4257 [
4257 [
4258 (b'', b'three', b'', b'three'),
4258 (b'', b'three', b'', b'three'),
4259 (b'', b'four', b'', b'four'),
4259 (b'', b'four', b'', b'four'),
4260 (b'', b'five', b'', b'five'),
4260 (b'', b'five', b'', b'five'),
4261 ]
4261 ]
4262 + cmdutil.remoteopts,
4262 + cmdutil.remoteopts,
4263 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4263 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4264 norepo=True,
4264 norepo=True,
4265 )
4265 )
4266 def debugwireargs(ui, repopath, *vals, **opts):
4266 def debugwireargs(ui, repopath, *vals, **opts):
4267 opts = pycompat.byteskwargs(opts)
4267 opts = pycompat.byteskwargs(opts)
4268 repo = hg.peer(ui, opts, repopath)
4268 repo = hg.peer(ui, opts, repopath)
4269 try:
4269 try:
4270 for opt in cmdutil.remoteopts:
4270 for opt in cmdutil.remoteopts:
4271 del opts[opt[1]]
4271 del opts[opt[1]]
4272 args = {}
4272 args = {}
4273 for k, v in opts.items():
4273 for k, v in opts.items():
4274 if v:
4274 if v:
4275 args[k] = v
4275 args[k] = v
4276 args = pycompat.strkwargs(args)
4276 args = pycompat.strkwargs(args)
4277 # run twice to check that we don't mess up the stream for the next command
4277 # run twice to check that we don't mess up the stream for the next command
4278 res1 = repo.debugwireargs(*vals, **args)
4278 res1 = repo.debugwireargs(*vals, **args)
4279 res2 = repo.debugwireargs(*vals, **args)
4279 res2 = repo.debugwireargs(*vals, **args)
4280 ui.write(b"%s\n" % res1)
4280 ui.write(b"%s\n" % res1)
4281 if res1 != res2:
4281 if res1 != res2:
4282 ui.warn(b"%s\n" % res2)
4282 ui.warn(b"%s\n" % res2)
4283 finally:
4283 finally:
4284 repo.close()
4284 repo.close()
4285
4285
4286
4286
4287 def _parsewirelangblocks(fh):
4287 def _parsewirelangblocks(fh):
4288 activeaction = None
4288 activeaction = None
4289 blocklines = []
4289 blocklines = []
4290 lastindent = 0
4290 lastindent = 0
4291
4291
4292 for line in fh:
4292 for line in fh:
4293 line = line.rstrip()
4293 line = line.rstrip()
4294 if not line:
4294 if not line:
4295 continue
4295 continue
4296
4296
4297 if line.startswith(b'#'):
4297 if line.startswith(b'#'):
4298 continue
4298 continue
4299
4299
4300 if not line.startswith(b' '):
4300 if not line.startswith(b' '):
4301 # New block. Flush previous one.
4301 # New block. Flush previous one.
4302 if activeaction:
4302 if activeaction:
4303 yield activeaction, blocklines
4303 yield activeaction, blocklines
4304
4304
4305 activeaction = line
4305 activeaction = line
4306 blocklines = []
4306 blocklines = []
4307 lastindent = 0
4307 lastindent = 0
4308 continue
4308 continue
4309
4309
4310 # Else we start with an indent.
4310 # Else we start with an indent.
4311
4311
4312 if not activeaction:
4312 if not activeaction:
4313 raise error.Abort(_(b'indented line outside of block'))
4313 raise error.Abort(_(b'indented line outside of block'))
4314
4314
4315 indent = len(line) - len(line.lstrip())
4315 indent = len(line) - len(line.lstrip())
4316
4316
4317 # If this line is indented more than the last line, concatenate it.
4317 # If this line is indented more than the last line, concatenate it.
4318 if indent > lastindent and blocklines:
4318 if indent > lastindent and blocklines:
4319 blocklines[-1] += line.lstrip()
4319 blocklines[-1] += line.lstrip()
4320 else:
4320 else:
4321 blocklines.append(line)
4321 blocklines.append(line)
4322 lastindent = indent
4322 lastindent = indent
4323
4323
4324 # Flush last block.
4324 # Flush last block.
4325 if activeaction:
4325 if activeaction:
4326 yield activeaction, blocklines
4326 yield activeaction, blocklines
4327
4327
4328
4328
4329 @command(
4329 @command(
4330 b'debugwireproto',
4330 b'debugwireproto',
4331 [
4331 [
4332 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4332 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4333 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4333 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4334 (
4334 (
4335 b'',
4335 b'',
4336 b'noreadstderr',
4336 b'noreadstderr',
4337 False,
4337 False,
4338 _(b'do not read from stderr of the remote'),
4338 _(b'do not read from stderr of the remote'),
4339 ),
4339 ),
4340 (
4340 (
4341 b'',
4341 b'',
4342 b'nologhandshake',
4342 b'nologhandshake',
4343 False,
4343 False,
4344 _(b'do not log I/O related to the peer handshake'),
4344 _(b'do not log I/O related to the peer handshake'),
4345 ),
4345 ),
4346 ]
4346 ]
4347 + cmdutil.remoteopts,
4347 + cmdutil.remoteopts,
4348 _(b'[PATH]'),
4348 _(b'[PATH]'),
4349 optionalrepo=True,
4349 optionalrepo=True,
4350 )
4350 )
4351 def debugwireproto(ui, repo, path=None, **opts):
4351 def debugwireproto(ui, repo, path=None, **opts):
4352 """send wire protocol commands to a server
4352 """send wire protocol commands to a server
4353
4353
4354 This command can be used to issue wire protocol commands to remote
4354 This command can be used to issue wire protocol commands to remote
4355 peers and to debug the raw data being exchanged.
4355 peers and to debug the raw data being exchanged.
4356
4356
4357 ``--localssh`` will start an SSH server against the current repository
4357 ``--localssh`` will start an SSH server against the current repository
4358 and connect to that. By default, the connection will perform a handshake
4358 and connect to that. By default, the connection will perform a handshake
4359 and establish an appropriate peer instance.
4359 and establish an appropriate peer instance.
4360
4360
4361 ``--peer`` can be used to bypass the handshake protocol and construct a
4361 ``--peer`` can be used to bypass the handshake protocol and construct a
4362 peer instance using the specified class type. Valid values are ``raw``,
4362 peer instance using the specified class type. Valid values are ``raw``,
4363 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4363 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4364 don't support higher-level command actions.
4364 don't support higher-level command actions.
4365
4365
4366 ``--noreadstderr`` can be used to disable automatic reading from stderr
4366 ``--noreadstderr`` can be used to disable automatic reading from stderr
4367 of the peer (for SSH connections only). Disabling automatic reading of
4367 of the peer (for SSH connections only). Disabling automatic reading of
4368 stderr is useful for making output more deterministic.
4368 stderr is useful for making output more deterministic.
4369
4369
4370 Commands are issued via a mini language which is specified via stdin.
4370 Commands are issued via a mini language which is specified via stdin.
4371 The language consists of individual actions to perform. An action is
4371 The language consists of individual actions to perform. An action is
4372 defined by a block. A block is defined as a line with no leading
4372 defined by a block. A block is defined as a line with no leading
4373 space followed by 0 or more lines with leading space. Blocks are
4373 space followed by 0 or more lines with leading space. Blocks are
4374 effectively a high-level command with additional metadata.
4374 effectively a high-level command with additional metadata.
4375
4375
4376 Lines beginning with ``#`` are ignored.
4376 Lines beginning with ``#`` are ignored.
4377
4377
4378 The following sections denote available actions.
4378 The following sections denote available actions.
4379
4379
4380 raw
4380 raw
4381 ---
4381 ---
4382
4382
4383 Send raw data to the server.
4383 Send raw data to the server.
4384
4384
4385 The block payload contains the raw data to send as one atomic send
4385 The block payload contains the raw data to send as one atomic send
4386 operation. The data may not actually be delivered in a single system
4386 operation. The data may not actually be delivered in a single system
4387 call: it depends on the abilities of the transport being used.
4387 call: it depends on the abilities of the transport being used.
4388
4388
4389 Each line in the block is de-indented and concatenated. Then, that
4389 Each line in the block is de-indented and concatenated. Then, that
4390 value is evaluated as a Python b'' literal. This allows the use of
4390 value is evaluated as a Python b'' literal. This allows the use of
4391 backslash escaping, etc.
4391 backslash escaping, etc.
4392
4392
4393 raw+
4393 raw+
4394 ----
4394 ----
4395
4395
4396 Behaves like ``raw`` except flushes output afterwards.
4396 Behaves like ``raw`` except flushes output afterwards.
4397
4397
4398 command <X>
4398 command <X>
4399 -----------
4399 -----------
4400
4400
4401 Send a request to run a named command, whose name follows the ``command``
4401 Send a request to run a named command, whose name follows the ``command``
4402 string.
4402 string.
4403
4403
4404 Arguments to the command are defined as lines in this block. The format of
4404 Arguments to the command are defined as lines in this block. The format of
4405 each line is ``<key> <value>``. e.g.::
4405 each line is ``<key> <value>``. e.g.::
4406
4406
4407 command listkeys
4407 command listkeys
4408 namespace bookmarks
4408 namespace bookmarks
4409
4409
4410 If the value begins with ``eval:``, it will be interpreted as a Python
4410 If the value begins with ``eval:``, it will be interpreted as a Python
4411 literal expression. Otherwise values are interpreted as Python b'' literals.
4411 literal expression. Otherwise values are interpreted as Python b'' literals.
4412 This allows sending complex types and encoding special byte sequences via
4412 This allows sending complex types and encoding special byte sequences via
4413 backslash escaping.
4413 backslash escaping.
4414
4414
4415 The following arguments have special meaning:
4415 The following arguments have special meaning:
4416
4416
4417 ``PUSHFILE``
4417 ``PUSHFILE``
4418 When defined, the *push* mechanism of the peer will be used instead
4418 When defined, the *push* mechanism of the peer will be used instead
4419 of the static request-response mechanism and the content of the
4419 of the static request-response mechanism and the content of the
4420 file specified in the value of this argument will be sent as the
4420 file specified in the value of this argument will be sent as the
4421 command payload.
4421 command payload.
4422
4422
4423 This can be used to submit a local bundle file to the remote.
4423 This can be used to submit a local bundle file to the remote.
4424
4424
4425 batchbegin
4425 batchbegin
4426 ----------
4426 ----------
4427
4427
4428 Instruct the peer to begin a batched send.
4428 Instruct the peer to begin a batched send.
4429
4429
4430 All ``command`` blocks are queued for execution until the next
4430 All ``command`` blocks are queued for execution until the next
4431 ``batchsubmit`` block.
4431 ``batchsubmit`` block.
4432
4432
4433 batchsubmit
4433 batchsubmit
4434 -----------
4434 -----------
4435
4435
4436 Submit previously queued ``command`` blocks as a batch request.
4436 Submit previously queued ``command`` blocks as a batch request.
4437
4437
4438 This action MUST be paired with a ``batchbegin`` action.
4438 This action MUST be paired with a ``batchbegin`` action.
4439
4439
4440 httprequest <method> <path>
4440 httprequest <method> <path>
4441 ---------------------------
4441 ---------------------------
4442
4442
4443 (HTTP peer only)
4443 (HTTP peer only)
4444
4444
4445 Send an HTTP request to the peer.
4445 Send an HTTP request to the peer.
4446
4446
4447 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4447 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4448
4448
4449 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4449 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4450 headers to add to the request. e.g. ``Accept: foo``.
4450 headers to add to the request. e.g. ``Accept: foo``.
4451
4451
4452 The following arguments are special:
4452 The following arguments are special:
4453
4453
4454 ``BODYFILE``
4454 ``BODYFILE``
4455 The content of the file defined as the value to this argument will be
4455 The content of the file defined as the value to this argument will be
4456 transferred verbatim as the HTTP request body.
4456 transferred verbatim as the HTTP request body.
4457
4457
4458 ``frame <type> <flags> <payload>``
4458 ``frame <type> <flags> <payload>``
4459 Send a unified protocol frame as part of the request body.
4459 Send a unified protocol frame as part of the request body.
4460
4460
4461 All frames will be collected and sent as the body to the HTTP
4461 All frames will be collected and sent as the body to the HTTP
4462 request.
4462 request.
4463
4463
4464 close
4464 close
4465 -----
4465 -----
4466
4466
4467 Close the connection to the server.
4467 Close the connection to the server.
4468
4468
4469 flush
4469 flush
4470 -----
4470 -----
4471
4471
4472 Flush data written to the server.
4472 Flush data written to the server.
4473
4473
4474 readavailable
4474 readavailable
4475 -------------
4475 -------------
4476
4476
4477 Close the write end of the connection and read all available data from
4477 Close the write end of the connection and read all available data from
4478 the server.
4478 the server.
4479
4479
4480 If the connection to the server encompasses multiple pipes, we poll both
4480 If the connection to the server encompasses multiple pipes, we poll both
4481 pipes and read available data.
4481 pipes and read available data.
4482
4482
4483 readline
4483 readline
4484 --------
4484 --------
4485
4485
4486 Read a line of output from the server. If there are multiple output
4486 Read a line of output from the server. If there are multiple output
4487 pipes, reads only the main pipe.
4487 pipes, reads only the main pipe.
4488
4488
4489 ereadline
4489 ereadline
4490 ---------
4490 ---------
4491
4491
4492 Like ``readline``, but read from the stderr pipe, if available.
4492 Like ``readline``, but read from the stderr pipe, if available.
4493
4493
4494 read <X>
4494 read <X>
4495 --------
4495 --------
4496
4496
4497 ``read()`` N bytes from the server's main output pipe.
4497 ``read()`` N bytes from the server's main output pipe.
4498
4498
4499 eread <X>
4499 eread <X>
4500 ---------
4500 ---------
4501
4501
4502 ``read()`` N bytes from the server's stderr pipe, if available.
4502 ``read()`` N bytes from the server's stderr pipe, if available.
4503
4503
4504 Specifying Unified Frame-Based Protocol Frames
4504 Specifying Unified Frame-Based Protocol Frames
4505 ----------------------------------------------
4505 ----------------------------------------------
4506
4506
4507 It is possible to emit a *Unified Frame-Based Protocol* by using special
4507 It is possible to emit a *Unified Frame-Based Protocol* by using special
4508 syntax.
4508 syntax.
4509
4509
4510 A frame is composed as a type, flags, and payload. These can be parsed
4510 A frame is composed as a type, flags, and payload. These can be parsed
4511 from a string of the form:
4511 from a string of the form:
4512
4512
4513 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4513 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4514
4514
4515 ``request-id`` and ``stream-id`` are integers defining the request and
4515 ``request-id`` and ``stream-id`` are integers defining the request and
4516 stream identifiers.
4516 stream identifiers.
4517
4517
4518 ``type`` can be an integer value for the frame type or the string name
4518 ``type`` can be an integer value for the frame type or the string name
4519 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4519 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4520 ``command-name``.
4520 ``command-name``.
4521
4521
4522 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4522 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4523 components. Each component (and there can be just one) can be an integer
4523 components. Each component (and there can be just one) can be an integer
4524 or a flag name for stream flags or frame flags, respectively. Values are
4524 or a flag name for stream flags or frame flags, respectively. Values are
4525 resolved to integers and then bitwise OR'd together.
4525 resolved to integers and then bitwise OR'd together.
4526
4526
4527 ``payload`` represents the raw frame payload. If it begins with
4527 ``payload`` represents the raw frame payload. If it begins with
4528 ``cbor:``, the following string is evaluated as Python code and the
4528 ``cbor:``, the following string is evaluated as Python code and the
4529 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4529 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4530 as a Python byte string literal.
4530 as a Python byte string literal.
4531 """
4531 """
4532 opts = pycompat.byteskwargs(opts)
4532 opts = pycompat.byteskwargs(opts)
4533
4533
4534 if opts[b'localssh'] and not repo:
4534 if opts[b'localssh'] and not repo:
4535 raise error.Abort(_(b'--localssh requires a repository'))
4535 raise error.Abort(_(b'--localssh requires a repository'))
4536
4536
4537 if opts[b'peer'] and opts[b'peer'] not in (
4537 if opts[b'peer'] and opts[b'peer'] not in (
4538 b'raw',
4538 b'raw',
4539 b'ssh1',
4539 b'ssh1',
4540 ):
4540 ):
4541 raise error.Abort(
4541 raise error.Abort(
4542 _(b'invalid value for --peer'),
4542 _(b'invalid value for --peer'),
4543 hint=_(b'valid values are "raw" and "ssh1"'),
4543 hint=_(b'valid values are "raw" and "ssh1"'),
4544 )
4544 )
4545
4545
4546 if path and opts[b'localssh']:
4546 if path and opts[b'localssh']:
4547 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4547 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4548
4548
4549 if ui.interactive():
4549 if ui.interactive():
4550 ui.write(_(b'(waiting for commands on stdin)\n'))
4550 ui.write(_(b'(waiting for commands on stdin)\n'))
4551
4551
4552 blocks = list(_parsewirelangblocks(ui.fin))
4552 blocks = list(_parsewirelangblocks(ui.fin))
4553
4553
4554 proc = None
4554 proc = None
4555 stdin = None
4555 stdin = None
4556 stdout = None
4556 stdout = None
4557 stderr = None
4557 stderr = None
4558 opener = None
4558 opener = None
4559
4559
4560 if opts[b'localssh']:
4560 if opts[b'localssh']:
4561 # We start the SSH server in its own process so there is process
4561 # We start the SSH server in its own process so there is process
4562 # separation. This prevents a whole class of potential bugs around
4562 # separation. This prevents a whole class of potential bugs around
4563 # shared state from interfering with server operation.
4563 # shared state from interfering with server operation.
4564 args = procutil.hgcmd() + [
4564 args = procutil.hgcmd() + [
4565 b'-R',
4565 b'-R',
4566 repo.root,
4566 repo.root,
4567 b'debugserve',
4567 b'debugserve',
4568 b'--sshstdio',
4568 b'--sshstdio',
4569 ]
4569 ]
4570 proc = subprocess.Popen(
4570 proc = subprocess.Popen(
4571 pycompat.rapply(procutil.tonativestr, args),
4571 pycompat.rapply(procutil.tonativestr, args),
4572 stdin=subprocess.PIPE,
4572 stdin=subprocess.PIPE,
4573 stdout=subprocess.PIPE,
4573 stdout=subprocess.PIPE,
4574 stderr=subprocess.PIPE,
4574 stderr=subprocess.PIPE,
4575 bufsize=0,
4575 bufsize=0,
4576 )
4576 )
4577
4577
4578 stdin = proc.stdin
4578 stdin = proc.stdin
4579 stdout = proc.stdout
4579 stdout = proc.stdout
4580 stderr = proc.stderr
4580 stderr = proc.stderr
4581
4581
4582 # We turn the pipes into observers so we can log I/O.
4582 # We turn the pipes into observers so we can log I/O.
4583 if ui.verbose or opts[b'peer'] == b'raw':
4583 if ui.verbose or opts[b'peer'] == b'raw':
4584 stdin = util.makeloggingfileobject(
4584 stdin = util.makeloggingfileobject(
4585 ui, proc.stdin, b'i', logdata=True
4585 ui, proc.stdin, b'i', logdata=True
4586 )
4586 )
4587 stdout = util.makeloggingfileobject(
4587 stdout = util.makeloggingfileobject(
4588 ui, proc.stdout, b'o', logdata=True
4588 ui, proc.stdout, b'o', logdata=True
4589 )
4589 )
4590 stderr = util.makeloggingfileobject(
4590 stderr = util.makeloggingfileobject(
4591 ui, proc.stderr, b'e', logdata=True
4591 ui, proc.stderr, b'e', logdata=True
4592 )
4592 )
4593
4593
4594 # --localssh also implies the peer connection settings.
4594 # --localssh also implies the peer connection settings.
4595
4595
4596 url = b'ssh://localserver'
4596 url = b'ssh://localserver'
4597 autoreadstderr = not opts[b'noreadstderr']
4597 autoreadstderr = not opts[b'noreadstderr']
4598
4598
4599 if opts[b'peer'] == b'ssh1':
4599 if opts[b'peer'] == b'ssh1':
4600 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4600 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4601 peer = sshpeer.sshv1peer(
4601 peer = sshpeer.sshv1peer(
4602 ui,
4602 ui,
4603 url,
4603 url,
4604 proc,
4604 proc,
4605 stdin,
4605 stdin,
4606 stdout,
4606 stdout,
4607 stderr,
4607 stderr,
4608 None,
4608 None,
4609 autoreadstderr=autoreadstderr,
4609 autoreadstderr=autoreadstderr,
4610 )
4610 )
4611 elif opts[b'peer'] == b'raw':
4611 elif opts[b'peer'] == b'raw':
4612 ui.write(_(b'using raw connection to peer\n'))
4612 ui.write(_(b'using raw connection to peer\n'))
4613 peer = None
4613 peer = None
4614 else:
4614 else:
4615 ui.write(_(b'creating ssh peer from handshake results\n'))
4615 ui.write(_(b'creating ssh peer from handshake results\n'))
4616 peer = sshpeer.makepeer(
4616 peer = sshpeer.makepeer(
4617 ui,
4617 ui,
4618 url,
4618 url,
4619 proc,
4619 proc,
4620 stdin,
4620 stdin,
4621 stdout,
4621 stdout,
4622 stderr,
4622 stderr,
4623 autoreadstderr=autoreadstderr,
4623 autoreadstderr=autoreadstderr,
4624 )
4624 )
4625
4625
4626 elif path:
4626 elif path:
4627 # We bypass hg.peer() so we can proxy the sockets.
4627 # We bypass hg.peer() so we can proxy the sockets.
4628 # TODO consider not doing this because we skip
4628 # TODO consider not doing this because we skip
4629 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4629 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4630 u = urlutil.url(path)
4630 u = urlutil.url(path)
4631 if u.scheme != b'http':
4631 if u.scheme != b'http':
4632 raise error.Abort(_(b'only http:// paths are currently supported'))
4632 raise error.Abort(_(b'only http:// paths are currently supported'))
4633
4633
4634 url, authinfo = u.authinfo()
4634 url, authinfo = u.authinfo()
4635 openerargs = {
4635 openerargs = {
4636 'useragent': b'Mercurial debugwireproto',
4636 'useragent': b'Mercurial debugwireproto',
4637 }
4637 }
4638
4638
4639 # Turn pipes/sockets into observers so we can log I/O.
4639 # Turn pipes/sockets into observers so we can log I/O.
4640 if ui.verbose:
4640 if ui.verbose:
4641 openerargs.update(
4641 openerargs.update(
4642 {
4642 {
4643 'loggingfh': ui,
4643 'loggingfh': ui,
4644 'loggingname': b's',
4644 'loggingname': b's',
4645 'loggingopts': {
4645 'loggingopts': {
4646 'logdata': True,
4646 'logdata': True,
4647 'logdataapis': False,
4647 'logdataapis': False,
4648 },
4648 },
4649 }
4649 }
4650 )
4650 )
4651
4651
4652 if ui.debugflag:
4652 if ui.debugflag:
4653 openerargs['loggingopts']['logdataapis'] = True
4653 openerargs['loggingopts']['logdataapis'] = True
4654
4654
4655 # Don't send default headers when in raw mode. This allows us to
4655 # Don't send default headers when in raw mode. This allows us to
4656 # bypass most of the behavior of our URL handling code so we can
4656 # bypass most of the behavior of our URL handling code so we can
4657 # have near complete control over what's sent on the wire.
4657 # have near complete control over what's sent on the wire.
4658 if opts[b'peer'] == b'raw':
4658 if opts[b'peer'] == b'raw':
4659 openerargs['sendaccept'] = False
4659 openerargs['sendaccept'] = False
4660
4660
4661 opener = urlmod.opener(ui, authinfo, **openerargs)
4661 opener = urlmod.opener(ui, authinfo, **openerargs)
4662
4662
4663 if opts[b'peer'] == b'raw':
4663 if opts[b'peer'] == b'raw':
4664 ui.write(_(b'using raw connection to peer\n'))
4664 ui.write(_(b'using raw connection to peer\n'))
4665 peer = None
4665 peer = None
4666 elif opts[b'peer']:
4666 elif opts[b'peer']:
4667 raise error.Abort(
4667 raise error.Abort(
4668 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4668 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4669 )
4669 )
4670 else:
4670 else:
4671 peer = httppeer.makepeer(ui, path, opener=opener)
4671 peer = httppeer.makepeer(ui, path, opener=opener)
4672
4672
4673 # We /could/ populate stdin/stdout with sock.makefile()...
4673 # We /could/ populate stdin/stdout with sock.makefile()...
4674 else:
4674 else:
4675 raise error.Abort(_(b'unsupported connection configuration'))
4675 raise error.Abort(_(b'unsupported connection configuration'))
4676
4676
4677 batchedcommands = None
4677 batchedcommands = None
4678
4678
4679 # Now perform actions based on the parsed wire language instructions.
4679 # Now perform actions based on the parsed wire language instructions.
4680 for action, lines in blocks:
4680 for action, lines in blocks:
4681 if action in (b'raw', b'raw+'):
4681 if action in (b'raw', b'raw+'):
4682 if not stdin:
4682 if not stdin:
4683 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4683 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4684
4684
4685 # Concatenate the data together.
4685 # Concatenate the data together.
4686 data = b''.join(l.lstrip() for l in lines)
4686 data = b''.join(l.lstrip() for l in lines)
4687 data = stringutil.unescapestr(data)
4687 data = stringutil.unescapestr(data)
4688 stdin.write(data)
4688 stdin.write(data)
4689
4689
4690 if action == b'raw+':
4690 if action == b'raw+':
4691 stdin.flush()
4691 stdin.flush()
4692 elif action == b'flush':
4692 elif action == b'flush':
4693 if not stdin:
4693 if not stdin:
4694 raise error.Abort(_(b'cannot call flush on this peer'))
4694 raise error.Abort(_(b'cannot call flush on this peer'))
4695 stdin.flush()
4695 stdin.flush()
4696 elif action.startswith(b'command'):
4696 elif action.startswith(b'command'):
4697 if not peer:
4697 if not peer:
4698 raise error.Abort(
4698 raise error.Abort(
4699 _(
4699 _(
4700 b'cannot send commands unless peer instance '
4700 b'cannot send commands unless peer instance '
4701 b'is available'
4701 b'is available'
4702 )
4702 )
4703 )
4703 )
4704
4704
4705 command = action.split(b' ', 1)[1]
4705 command = action.split(b' ', 1)[1]
4706
4706
4707 args = {}
4707 args = {}
4708 for line in lines:
4708 for line in lines:
4709 # We need to allow empty values.
4709 # We need to allow empty values.
4710 fields = line.lstrip().split(b' ', 1)
4710 fields = line.lstrip().split(b' ', 1)
4711 if len(fields) == 1:
4711 if len(fields) == 1:
4712 key = fields[0]
4712 key = fields[0]
4713 value = b''
4713 value = b''
4714 else:
4714 else:
4715 key, value = fields
4715 key, value = fields
4716
4716
4717 if value.startswith(b'eval:'):
4717 if value.startswith(b'eval:'):
4718 value = stringutil.evalpythonliteral(value[5:])
4718 value = stringutil.evalpythonliteral(value[5:])
4719 else:
4719 else:
4720 value = stringutil.unescapestr(value)
4720 value = stringutil.unescapestr(value)
4721
4721
4722 args[key] = value
4722 args[key] = value
4723
4723
4724 if batchedcommands is not None:
4724 if batchedcommands is not None:
4725 batchedcommands.append((command, args))
4725 batchedcommands.append((command, args))
4726 continue
4726 continue
4727
4727
4728 ui.status(_(b'sending %s command\n') % command)
4728 ui.status(_(b'sending %s command\n') % command)
4729
4729
4730 if b'PUSHFILE' in args:
4730 if b'PUSHFILE' in args:
4731 with open(args[b'PUSHFILE'], 'rb') as fh:
4731 with open(args[b'PUSHFILE'], 'rb') as fh:
4732 del args[b'PUSHFILE']
4732 del args[b'PUSHFILE']
4733 res, output = peer._callpush(
4733 res, output = peer._callpush(
4734 command, fh, **pycompat.strkwargs(args)
4734 command, fh, **pycompat.strkwargs(args)
4735 )
4735 )
4736 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4736 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4737 ui.status(
4737 ui.status(
4738 _(b'remote output: %s\n') % stringutil.escapestr(output)
4738 _(b'remote output: %s\n') % stringutil.escapestr(output)
4739 )
4739 )
4740 else:
4740 else:
4741 with peer.commandexecutor() as e:
4741 with peer.commandexecutor() as e:
4742 res = e.callcommand(command, args).result()
4742 res = e.callcommand(command, args).result()
4743
4743
4744 ui.status(
4744 ui.status(
4745 _(b'response: %s\n')
4745 _(b'response: %s\n')
4746 % stringutil.pprint(res, bprefix=True, indent=2)
4746 % stringutil.pprint(res, bprefix=True, indent=2)
4747 )
4747 )
4748
4748
4749 elif action == b'batchbegin':
4749 elif action == b'batchbegin':
4750 if batchedcommands is not None:
4750 if batchedcommands is not None:
4751 raise error.Abort(_(b'nested batchbegin not allowed'))
4751 raise error.Abort(_(b'nested batchbegin not allowed'))
4752
4752
4753 batchedcommands = []
4753 batchedcommands = []
4754 elif action == b'batchsubmit':
4754 elif action == b'batchsubmit':
4755 # There is a batching API we could go through. But it would be
4755 # There is a batching API we could go through. But it would be
4756 # difficult to normalize requests into function calls. It is easier
4756 # difficult to normalize requests into function calls. It is easier
4757 # to bypass this layer and normalize to commands + args.
4757 # to bypass this layer and normalize to commands + args.
4758 ui.status(
4758 ui.status(
4759 _(b'sending batch with %d sub-commands\n')
4759 _(b'sending batch with %d sub-commands\n')
4760 % len(batchedcommands)
4760 % len(batchedcommands)
4761 )
4761 )
4762 assert peer is not None
4762 assert peer is not None
4763 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4763 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4764 ui.status(
4764 ui.status(
4765 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4765 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4766 )
4766 )
4767
4767
4768 batchedcommands = None
4768 batchedcommands = None
4769
4769
4770 elif action.startswith(b'httprequest '):
4770 elif action.startswith(b'httprequest '):
4771 if not opener:
4771 if not opener:
4772 raise error.Abort(
4772 raise error.Abort(
4773 _(b'cannot use httprequest without an HTTP peer')
4773 _(b'cannot use httprequest without an HTTP peer')
4774 )
4774 )
4775
4775
4776 request = action.split(b' ', 2)
4776 request = action.split(b' ', 2)
4777 if len(request) != 3:
4777 if len(request) != 3:
4778 raise error.Abort(
4778 raise error.Abort(
4779 _(
4779 _(
4780 b'invalid httprequest: expected format is '
4780 b'invalid httprequest: expected format is '
4781 b'"httprequest <method> <path>'
4781 b'"httprequest <method> <path>'
4782 )
4782 )
4783 )
4783 )
4784
4784
4785 method, httppath = request[1:]
4785 method, httppath = request[1:]
4786 headers = {}
4786 headers = {}
4787 body = None
4787 body = None
4788 frames = []
4788 frames = []
4789 for line in lines:
4789 for line in lines:
4790 line = line.lstrip()
4790 line = line.lstrip()
4791 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4791 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4792 if m:
4792 if m:
4793 # Headers need to use native strings.
4793 # Headers need to use native strings.
4794 key = pycompat.strurl(m.group(1))
4794 key = pycompat.strurl(m.group(1))
4795 value = pycompat.strurl(m.group(2))
4795 value = pycompat.strurl(m.group(2))
4796 headers[key] = value
4796 headers[key] = value
4797 continue
4797 continue
4798
4798
4799 if line.startswith(b'BODYFILE '):
4799 if line.startswith(b'BODYFILE '):
4800 with open(line.split(b' ', 1), b'rb') as fh:
4800 with open(line.split(b' ', 1), b'rb') as fh:
4801 body = fh.read()
4801 body = fh.read()
4802 elif line.startswith(b'frame '):
4802 elif line.startswith(b'frame '):
4803 frame = wireprotoframing.makeframefromhumanstring(
4803 frame = wireprotoframing.makeframefromhumanstring(
4804 line[len(b'frame ') :]
4804 line[len(b'frame ') :]
4805 )
4805 )
4806
4806
4807 frames.append(frame)
4807 frames.append(frame)
4808 else:
4808 else:
4809 raise error.Abort(
4809 raise error.Abort(
4810 _(b'unknown argument to httprequest: %s') % line
4810 _(b'unknown argument to httprequest: %s') % line
4811 )
4811 )
4812
4812
4813 url = path + httppath
4813 url = path + httppath
4814
4814
4815 if frames:
4815 if frames:
4816 body = b''.join(bytes(f) for f in frames)
4816 body = b''.join(bytes(f) for f in frames)
4817
4817
4818 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4818 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4819
4819
4820 # urllib.Request insists on using has_data() as a proxy for
4820 # urllib.Request insists on using has_data() as a proxy for
4821 # determining the request method. Override that to use our
4821 # determining the request method. Override that to use our
4822 # explicitly requested method.
4822 # explicitly requested method.
4823 req.get_method = lambda: pycompat.sysstr(method)
4823 req.get_method = lambda: pycompat.sysstr(method)
4824
4824
4825 try:
4825 try:
4826 res = opener.open(req)
4826 res = opener.open(req)
4827 body = res.read()
4827 body = res.read()
4828 except util.urlerr.urlerror as e:
4828 except util.urlerr.urlerror as e:
4829 # read() method must be called, but only exists in Python 2
4829 # read() method must be called, but only exists in Python 2
4830 getattr(e, 'read', lambda: None)()
4830 getattr(e, 'read', lambda: None)()
4831 continue
4831 continue
4832
4832
4833 ct = res.headers.get('Content-Type')
4833 ct = res.headers.get('Content-Type')
4834 if ct == 'application/mercurial-cbor':
4834 if ct == 'application/mercurial-cbor':
4835 ui.write(
4835 ui.write(
4836 _(b'cbor> %s\n')
4836 _(b'cbor> %s\n')
4837 % stringutil.pprint(
4837 % stringutil.pprint(
4838 cborutil.decodeall(body), bprefix=True, indent=2
4838 cborutil.decodeall(body), bprefix=True, indent=2
4839 )
4839 )
4840 )
4840 )
4841
4841
4842 elif action == b'close':
4842 elif action == b'close':
4843 assert peer is not None
4843 assert peer is not None
4844 peer.close()
4844 peer.close()
4845 elif action == b'readavailable':
4845 elif action == b'readavailable':
4846 if not stdout or not stderr:
4846 if not stdout or not stderr:
4847 raise error.Abort(
4847 raise error.Abort(
4848 _(b'readavailable not available on this peer')
4848 _(b'readavailable not available on this peer')
4849 )
4849 )
4850
4850
4851 stdin.close()
4851 stdin.close()
4852 stdout.read()
4852 stdout.read()
4853 stderr.read()
4853 stderr.read()
4854
4854
4855 elif action == b'readline':
4855 elif action == b'readline':
4856 if not stdout:
4856 if not stdout:
4857 raise error.Abort(_(b'readline not available on this peer'))
4857 raise error.Abort(_(b'readline not available on this peer'))
4858 stdout.readline()
4858 stdout.readline()
4859 elif action == b'ereadline':
4859 elif action == b'ereadline':
4860 if not stderr:
4860 if not stderr:
4861 raise error.Abort(_(b'ereadline not available on this peer'))
4861 raise error.Abort(_(b'ereadline not available on this peer'))
4862 stderr.readline()
4862 stderr.readline()
4863 elif action.startswith(b'read '):
4863 elif action.startswith(b'read '):
4864 count = int(action.split(b' ', 1)[1])
4864 count = int(action.split(b' ', 1)[1])
4865 if not stdout:
4865 if not stdout:
4866 raise error.Abort(_(b'read not available on this peer'))
4866 raise error.Abort(_(b'read not available on this peer'))
4867 stdout.read(count)
4867 stdout.read(count)
4868 elif action.startswith(b'eread '):
4868 elif action.startswith(b'eread '):
4869 count = int(action.split(b' ', 1)[1])
4869 count = int(action.split(b' ', 1)[1])
4870 if not stderr:
4870 if not stderr:
4871 raise error.Abort(_(b'eread not available on this peer'))
4871 raise error.Abort(_(b'eread not available on this peer'))
4872 stderr.read(count)
4872 stderr.read(count)
4873 else:
4873 else:
4874 raise error.Abort(_(b'unknown action: %s') % action)
4874 raise error.Abort(_(b'unknown action: %s') % action)
4875
4875
4876 if batchedcommands is not None:
4876 if batchedcommands is not None:
4877 raise error.Abort(_(b'unclosed "batchbegin" request'))
4877 raise error.Abort(_(b'unclosed "batchbegin" request'))
4878
4878
4879 if peer:
4879 if peer:
4880 peer.close()
4880 peer.close()
4881
4881
4882 if proc:
4882 if proc:
4883 proc.kill()
4883 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now