##// END OF EJS Templates
debuglock: ignore ENOENT error when unlocking...
marmoute -
r49924:020378f3 stable
parent child Browse files
Show More
@@ -1,4884 +1,4884 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import codecs
11 import codecs
12 import collections
12 import collections
13 import contextlib
13 import contextlib
14 import difflib
14 import difflib
15 import errno
15 import errno
16 import glob
16 import glob
17 import operator
17 import operator
18 import os
18 import os
19 import platform
19 import platform
20 import random
20 import random
21 import re
21 import re
22 import socket
22 import socket
23 import ssl
23 import ssl
24 import stat
24 import stat
25 import string
25 import string
26 import subprocess
26 import subprocess
27 import sys
27 import sys
28 import time
28 import time
29
29
30 from .i18n import _
30 from .i18n import _
31 from .node import (
31 from .node import (
32 bin,
32 bin,
33 hex,
33 hex,
34 nullrev,
34 nullrev,
35 short,
35 short,
36 )
36 )
37 from .pycompat import (
37 from .pycompat import (
38 getattr,
38 getattr,
39 open,
39 open,
40 )
40 )
41 from . import (
41 from . import (
42 bundle2,
42 bundle2,
43 bundlerepo,
43 bundlerepo,
44 changegroup,
44 changegroup,
45 cmdutil,
45 cmdutil,
46 color,
46 color,
47 context,
47 context,
48 copies,
48 copies,
49 dagparser,
49 dagparser,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 )
94 )
95 from .interfaces import repository
95 from .interfaces import repository
96 from .utils import (
96 from .utils import (
97 cborutil,
97 cborutil,
98 compression,
98 compression,
99 dateutil,
99 dateutil,
100 procutil,
100 procutil,
101 stringutil,
101 stringutil,
102 urlutil,
102 urlutil,
103 )
103 )
104
104
105 from .revlogutils import (
105 from .revlogutils import (
106 deltas as deltautil,
106 deltas as deltautil,
107 nodemap,
107 nodemap,
108 rewrite,
108 rewrite,
109 sidedata,
109 sidedata,
110 )
110 )
111
111
112 release = lockmod.release
112 release = lockmod.release
113
113
114 table = {}
114 table = {}
115 table.update(strip.command._table)
115 table.update(strip.command._table)
116 command = registrar.command(table)
116 command = registrar.command(table)
117
117
118
118
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 def debugancestor(ui, repo, *args):
120 def debugancestor(ui, repo, *args):
121 """find the ancestor revision of two revisions in a given index"""
121 """find the ancestor revision of two revisions in a given index"""
122 if len(args) == 3:
122 if len(args) == 3:
123 index, rev1, rev2 = args
123 index, rev1, rev2 = args
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 lookup = r.lookup
125 lookup = r.lookup
126 elif len(args) == 2:
126 elif len(args) == 2:
127 if not repo:
127 if not repo:
128 raise error.Abort(
128 raise error.Abort(
129 _(b'there is no Mercurial repository here (.hg not found)')
129 _(b'there is no Mercurial repository here (.hg not found)')
130 )
130 )
131 rev1, rev2 = args
131 rev1, rev2 = args
132 r = repo.changelog
132 r = repo.changelog
133 lookup = repo.lookup
133 lookup = repo.lookup
134 else:
134 else:
135 raise error.Abort(_(b'either two or three arguments required'))
135 raise error.Abort(_(b'either two or three arguments required'))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138
138
139
139
140 @command(b'debugantivirusrunning', [])
140 @command(b'debugantivirusrunning', [])
141 def debugantivirusrunning(ui, repo):
141 def debugantivirusrunning(ui, repo):
142 """attempt to trigger an antivirus scanner to see if one is active"""
142 """attempt to trigger an antivirus scanner to see if one is active"""
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 f.write(
144 f.write(
145 util.b85decode(
145 util.b85decode(
146 # This is a base85-armored version of the EICAR test file. See
146 # This is a base85-armored version of the EICAR test file. See
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 )
150 )
151 )
151 )
152 # Give an AV engine time to scan the file.
152 # Give an AV engine time to scan the file.
153 time.sleep(2)
153 time.sleep(2)
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155
155
156
156
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 def debugapplystreamclonebundle(ui, repo, fname):
158 def debugapplystreamclonebundle(ui, repo, fname):
159 """apply a stream clone bundle file"""
159 """apply a stream clone bundle file"""
160 f = hg.openpath(ui, fname)
160 f = hg.openpath(ui, fname)
161 gen = exchange.readbundle(ui, f, fname)
161 gen = exchange.readbundle(ui, f, fname)
162 gen.apply(repo)
162 gen.apply(repo)
163
163
164
164
165 @command(
165 @command(
166 b'debugbuilddag',
166 b'debugbuilddag',
167 [
167 [
168 (
168 (
169 b'm',
169 b'm',
170 b'mergeable-file',
170 b'mergeable-file',
171 None,
171 None,
172 _(b'add single file mergeable changes'),
172 _(b'add single file mergeable changes'),
173 ),
173 ),
174 (
174 (
175 b'o',
175 b'o',
176 b'overwritten-file',
176 b'overwritten-file',
177 None,
177 None,
178 _(b'add single file all revs overwrite'),
178 _(b'add single file all revs overwrite'),
179 ),
179 ),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (
181 (
182 b'',
182 b'',
183 b'from-existing',
183 b'from-existing',
184 None,
184 None,
185 _(b'continue from a non-empty repository'),
185 _(b'continue from a non-empty repository'),
186 ),
186 ),
187 ],
187 ],
188 _(b'[OPTION]... [TEXT]'),
188 _(b'[OPTION]... [TEXT]'),
189 )
189 )
190 def debugbuilddag(
190 def debugbuilddag(
191 ui,
191 ui,
192 repo,
192 repo,
193 text=None,
193 text=None,
194 mergeable_file=False,
194 mergeable_file=False,
195 overwritten_file=False,
195 overwritten_file=False,
196 new_file=False,
196 new_file=False,
197 from_existing=False,
197 from_existing=False,
198 ):
198 ):
199 """builds a repo with a given DAG from scratch in the current empty repo
199 """builds a repo with a given DAG from scratch in the current empty repo
200
200
201 The description of the DAG is read from stdin if not given on the
201 The description of the DAG is read from stdin if not given on the
202 command line.
202 command line.
203
203
204 Elements:
204 Elements:
205
205
206 - "+n" is a linear run of n nodes based on the current default parent
206 - "+n" is a linear run of n nodes based on the current default parent
207 - "." is a single node based on the current default parent
207 - "." is a single node based on the current default parent
208 - "$" resets the default parent to null (implied at the start);
208 - "$" resets the default parent to null (implied at the start);
209 otherwise the default parent is always the last node created
209 otherwise the default parent is always the last node created
210 - "<p" sets the default parent to the backref p
210 - "<p" sets the default parent to the backref p
211 - "*p" is a fork at parent p, which is a backref
211 - "*p" is a fork at parent p, which is a backref
212 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
212 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 - "/p2" is a merge of the preceding node and p2
213 - "/p2" is a merge of the preceding node and p2
214 - ":tag" defines a local tag for the preceding node
214 - ":tag" defines a local tag for the preceding node
215 - "@branch" sets the named branch for subsequent nodes
215 - "@branch" sets the named branch for subsequent nodes
216 - "#...\\n" is a comment up to the end of the line
216 - "#...\\n" is a comment up to the end of the line
217
217
218 Whitespace between the above elements is ignored.
218 Whitespace between the above elements is ignored.
219
219
220 A backref is either
220 A backref is either
221
221
222 - a number n, which references the node curr-n, where curr is the current
222 - a number n, which references the node curr-n, where curr is the current
223 node, or
223 node, or
224 - the name of a local tag you placed earlier using ":tag", or
224 - the name of a local tag you placed earlier using ":tag", or
225 - empty to denote the default parent.
225 - empty to denote the default parent.
226
226
227 All string valued-elements are either strictly alphanumeric, or must
227 All string valued-elements are either strictly alphanumeric, or must
228 be enclosed in double quotes ("..."), with "\\" as escape character.
228 be enclosed in double quotes ("..."), with "\\" as escape character.
229 """
229 """
230
230
231 if text is None:
231 if text is None:
232 ui.status(_(b"reading DAG from stdin\n"))
232 ui.status(_(b"reading DAG from stdin\n"))
233 text = ui.fin.read()
233 text = ui.fin.read()
234
234
235 cl = repo.changelog
235 cl = repo.changelog
236 if len(cl) > 0 and not from_existing:
236 if len(cl) > 0 and not from_existing:
237 raise error.Abort(_(b'repository is not empty'))
237 raise error.Abort(_(b'repository is not empty'))
238
238
239 # determine number of revs in DAG
239 # determine number of revs in DAG
240 total = 0
240 total = 0
241 for type, data in dagparser.parsedag(text):
241 for type, data in dagparser.parsedag(text):
242 if type == b'n':
242 if type == b'n':
243 total += 1
243 total += 1
244
244
245 if mergeable_file:
245 if mergeable_file:
246 linesperrev = 2
246 linesperrev = 2
247 # make a file with k lines per rev
247 # make a file with k lines per rev
248 initialmergedlines = [
248 initialmergedlines = [
249 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
249 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
250 ]
250 ]
251 initialmergedlines.append(b"")
251 initialmergedlines.append(b"")
252
252
253 tags = []
253 tags = []
254 progress = ui.makeprogress(
254 progress = ui.makeprogress(
255 _(b'building'), unit=_(b'revisions'), total=total
255 _(b'building'), unit=_(b'revisions'), total=total
256 )
256 )
257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 at = -1
258 at = -1
259 atbranch = b'default'
259 atbranch = b'default'
260 nodeids = []
260 nodeids = []
261 id = 0
261 id = 0
262 progress.update(id)
262 progress.update(id)
263 for type, data in dagparser.parsedag(text):
263 for type, data in dagparser.parsedag(text):
264 if type == b'n':
264 if type == b'n':
265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 id, ps = data
266 id, ps = data
267
267
268 files = []
268 files = []
269 filecontent = {}
269 filecontent = {}
270
270
271 p2 = None
271 p2 = None
272 if mergeable_file:
272 if mergeable_file:
273 fn = b"mf"
273 fn = b"mf"
274 p1 = repo[ps[0]]
274 p1 = repo[ps[0]]
275 if len(ps) > 1:
275 if len(ps) > 1:
276 p2 = repo[ps[1]]
276 p2 = repo[ps[1]]
277 pa = p1.ancestor(p2)
277 pa = p1.ancestor(p2)
278 base, local, other = [
278 base, local, other = [
279 x[fn].data() for x in (pa, p1, p2)
279 x[fn].data() for x in (pa, p1, p2)
280 ]
280 ]
281 m3 = simplemerge.Merge3Text(base, local, other)
281 m3 = simplemerge.Merge3Text(base, local, other)
282 ml = [
282 ml = [
283 l.strip()
283 l.strip()
284 for l in simplemerge.render_minimized(m3)[0]
284 for l in simplemerge.render_minimized(m3)[0]
285 ]
285 ]
286 ml.append(b"")
286 ml.append(b"")
287 elif at > 0:
287 elif at > 0:
288 ml = p1[fn].data().split(b"\n")
288 ml = p1[fn].data().split(b"\n")
289 else:
289 else:
290 ml = initialmergedlines
290 ml = initialmergedlines
291 ml[id * linesperrev] += b" r%i" % id
291 ml[id * linesperrev] += b" r%i" % id
292 mergedtext = b"\n".join(ml)
292 mergedtext = b"\n".join(ml)
293 files.append(fn)
293 files.append(fn)
294 filecontent[fn] = mergedtext
294 filecontent[fn] = mergedtext
295
295
296 if overwritten_file:
296 if overwritten_file:
297 fn = b"of"
297 fn = b"of"
298 files.append(fn)
298 files.append(fn)
299 filecontent[fn] = b"r%i\n" % id
299 filecontent[fn] = b"r%i\n" % id
300
300
301 if new_file:
301 if new_file:
302 fn = b"nf%i" % id
302 fn = b"nf%i" % id
303 files.append(fn)
303 files.append(fn)
304 filecontent[fn] = b"r%i\n" % id
304 filecontent[fn] = b"r%i\n" % id
305 if len(ps) > 1:
305 if len(ps) > 1:
306 if not p2:
306 if not p2:
307 p2 = repo[ps[1]]
307 p2 = repo[ps[1]]
308 for fn in p2:
308 for fn in p2:
309 if fn.startswith(b"nf"):
309 if fn.startswith(b"nf"):
310 files.append(fn)
310 files.append(fn)
311 filecontent[fn] = p2[fn].data()
311 filecontent[fn] = p2[fn].data()
312
312
313 def fctxfn(repo, cx, path):
313 def fctxfn(repo, cx, path):
314 if path in filecontent:
314 if path in filecontent:
315 return context.memfilectx(
315 return context.memfilectx(
316 repo, cx, path, filecontent[path]
316 repo, cx, path, filecontent[path]
317 )
317 )
318 return None
318 return None
319
319
320 if len(ps) == 0 or ps[0] < 0:
320 if len(ps) == 0 or ps[0] < 0:
321 pars = [None, None]
321 pars = [None, None]
322 elif len(ps) == 1:
322 elif len(ps) == 1:
323 pars = [nodeids[ps[0]], None]
323 pars = [nodeids[ps[0]], None]
324 else:
324 else:
325 pars = [nodeids[p] for p in ps]
325 pars = [nodeids[p] for p in ps]
326 cx = context.memctx(
326 cx = context.memctx(
327 repo,
327 repo,
328 pars,
328 pars,
329 b"r%i" % id,
329 b"r%i" % id,
330 files,
330 files,
331 fctxfn,
331 fctxfn,
332 date=(id, 0),
332 date=(id, 0),
333 user=b"debugbuilddag",
333 user=b"debugbuilddag",
334 extra={b'branch': atbranch},
334 extra={b'branch': atbranch},
335 )
335 )
336 nodeid = repo.commitctx(cx)
336 nodeid = repo.commitctx(cx)
337 nodeids.append(nodeid)
337 nodeids.append(nodeid)
338 at = id
338 at = id
339 elif type == b'l':
339 elif type == b'l':
340 id, name = data
340 id, name = data
341 ui.note((b'tag %s\n' % name))
341 ui.note((b'tag %s\n' % name))
342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 elif type == b'a':
343 elif type == b'a':
344 ui.note((b'branch %s\n' % data))
344 ui.note((b'branch %s\n' % data))
345 atbranch = data
345 atbranch = data
346 progress.update(id)
346 progress.update(id)
347
347
348 if tags:
348 if tags:
349 repo.vfs.write(b"localtags", b"".join(tags))
349 repo.vfs.write(b"localtags", b"".join(tags))
350
350
351
351
352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 indent_string = b' ' * indent
353 indent_string = b' ' * indent
354 if all:
354 if all:
355 ui.writenoi18n(
355 ui.writenoi18n(
356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 % indent_string
357 % indent_string
358 )
358 )
359
359
360 def showchunks(named):
360 def showchunks(named):
361 ui.write(b"\n%s%s\n" % (indent_string, named))
361 ui.write(b"\n%s%s\n" % (indent_string, named))
362 for deltadata in gen.deltaiter():
362 for deltadata in gen.deltaiter():
363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 ui.write(
364 ui.write(
365 b"%s%s %s %s %s %s %d\n"
365 b"%s%s %s %s %s %s %d\n"
366 % (
366 % (
367 indent_string,
367 indent_string,
368 hex(node),
368 hex(node),
369 hex(p1),
369 hex(p1),
370 hex(p2),
370 hex(p2),
371 hex(cs),
371 hex(cs),
372 hex(deltabase),
372 hex(deltabase),
373 len(delta),
373 len(delta),
374 )
374 )
375 )
375 )
376
376
377 gen.changelogheader()
377 gen.changelogheader()
378 showchunks(b"changelog")
378 showchunks(b"changelog")
379 gen.manifestheader()
379 gen.manifestheader()
380 showchunks(b"manifest")
380 showchunks(b"manifest")
381 for chunkdata in iter(gen.filelogheader, {}):
381 for chunkdata in iter(gen.filelogheader, {}):
382 fname = chunkdata[b'filename']
382 fname = chunkdata[b'filename']
383 showchunks(fname)
383 showchunks(fname)
384 else:
384 else:
385 if isinstance(gen, bundle2.unbundle20):
385 if isinstance(gen, bundle2.unbundle20):
386 raise error.Abort(_(b'use debugbundle2 for this file'))
386 raise error.Abort(_(b'use debugbundle2 for this file'))
387 gen.changelogheader()
387 gen.changelogheader()
388 for deltadata in gen.deltaiter():
388 for deltadata in gen.deltaiter():
389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391
391
392
392
393 def _debugobsmarkers(ui, part, indent=0, **opts):
393 def _debugobsmarkers(ui, part, indent=0, **opts):
394 """display version and markers contained in 'data'"""
394 """display version and markers contained in 'data'"""
395 opts = pycompat.byteskwargs(opts)
395 opts = pycompat.byteskwargs(opts)
396 data = part.read()
396 data = part.read()
397 indent_string = b' ' * indent
397 indent_string = b' ' * indent
398 try:
398 try:
399 version, markers = obsolete._readmarkers(data)
399 version, markers = obsolete._readmarkers(data)
400 except error.UnknownVersion as exc:
400 except error.UnknownVersion as exc:
401 msg = b"%sunsupported version: %s (%d bytes)\n"
401 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg %= indent_string, exc.version, len(data)
402 msg %= indent_string, exc.version, len(data)
403 ui.write(msg)
403 ui.write(msg)
404 else:
404 else:
405 msg = b"%sversion: %d (%d bytes)\n"
405 msg = b"%sversion: %d (%d bytes)\n"
406 msg %= indent_string, version, len(data)
406 msg %= indent_string, version, len(data)
407 ui.write(msg)
407 ui.write(msg)
408 fm = ui.formatter(b'debugobsolete', opts)
408 fm = ui.formatter(b'debugobsolete', opts)
409 for rawmarker in sorted(markers):
409 for rawmarker in sorted(markers):
410 m = obsutil.marker(None, rawmarker)
410 m = obsutil.marker(None, rawmarker)
411 fm.startitem()
411 fm.startitem()
412 fm.plain(indent_string)
412 fm.plain(indent_string)
413 cmdutil.showmarker(fm, m)
413 cmdutil.showmarker(fm, m)
414 fm.end()
414 fm.end()
415
415
416
416
417 def _debugphaseheads(ui, data, indent=0):
417 def _debugphaseheads(ui, data, indent=0):
418 """display version and markers contained in 'data'"""
418 """display version and markers contained in 'data'"""
419 indent_string = b' ' * indent
419 indent_string = b' ' * indent
420 headsbyphase = phases.binarydecode(data)
420 headsbyphase = phases.binarydecode(data)
421 for phase in phases.allphases:
421 for phase in phases.allphases:
422 for head in headsbyphase[phase]:
422 for head in headsbyphase[phase]:
423 ui.write(indent_string)
423 ui.write(indent_string)
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425
425
426
426
427 def _quasirepr(thing):
427 def _quasirepr(thing):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 return b'{%s}' % (
429 return b'{%s}' % (
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 )
431 )
432 return pycompat.bytestr(repr(thing))
432 return pycompat.bytestr(repr(thing))
433
433
434
434
435 def _debugbundle2(ui, gen, all=None, **opts):
435 def _debugbundle2(ui, gen, all=None, **opts):
436 """lists the contents of a bundle2"""
436 """lists the contents of a bundle2"""
437 if not isinstance(gen, bundle2.unbundle20):
437 if not isinstance(gen, bundle2.unbundle20):
438 raise error.Abort(_(b'not a bundle2 file'))
438 raise error.Abort(_(b'not a bundle2 file'))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 parttypes = opts.get('part_type', [])
440 parttypes = opts.get('part_type', [])
441 for part in gen.iterparts():
441 for part in gen.iterparts():
442 if parttypes and part.type not in parttypes:
442 if parttypes and part.type not in parttypes:
443 continue
443 continue
444 msg = b'%s -- %s (mandatory: %r)\n'
444 msg = b'%s -- %s (mandatory: %r)\n'
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 if part.type == b'changegroup':
446 if part.type == b'changegroup':
447 version = part.params.get(b'version', b'01')
447 version = part.params.get(b'version', b'01')
448 cg = changegroup.getunbundler(version, part, b'UN')
448 cg = changegroup.getunbundler(version, part, b'UN')
449 if not ui.quiet:
449 if not ui.quiet:
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 if part.type == b'obsmarkers':
451 if part.type == b'obsmarkers':
452 if not ui.quiet:
452 if not ui.quiet:
453 _debugobsmarkers(ui, part, indent=4, **opts)
453 _debugobsmarkers(ui, part, indent=4, **opts)
454 if part.type == b'phase-heads':
454 if part.type == b'phase-heads':
455 if not ui.quiet:
455 if not ui.quiet:
456 _debugphaseheads(ui, part, indent=4)
456 _debugphaseheads(ui, part, indent=4)
457
457
458
458
459 @command(
459 @command(
460 b'debugbundle',
460 b'debugbundle',
461 [
461 [
462 (b'a', b'all', None, _(b'show all details')),
462 (b'a', b'all', None, _(b'show all details')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 ],
465 ],
466 _(b'FILE'),
466 _(b'FILE'),
467 norepo=True,
467 norepo=True,
468 )
468 )
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 """lists the contents of a bundle"""
470 """lists the contents of a bundle"""
471 with hg.openpath(ui, bundlepath) as f:
471 with hg.openpath(ui, bundlepath) as f:
472 if spec:
472 if spec:
473 spec = exchange.getbundlespec(ui, f)
473 spec = exchange.getbundlespec(ui, f)
474 ui.write(b'%s\n' % spec)
474 ui.write(b'%s\n' % spec)
475 return
475 return
476
476
477 gen = exchange.readbundle(ui, f, bundlepath)
477 gen = exchange.readbundle(ui, f, bundlepath)
478 if isinstance(gen, bundle2.unbundle20):
478 if isinstance(gen, bundle2.unbundle20):
479 return _debugbundle2(ui, gen, all=all, **opts)
479 return _debugbundle2(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
481
481
482
482
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 def debugcapabilities(ui, path, **opts):
484 def debugcapabilities(ui, path, **opts):
485 """lists the capabilities of a remote peer"""
485 """lists the capabilities of a remote peer"""
486 opts = pycompat.byteskwargs(opts)
486 opts = pycompat.byteskwargs(opts)
487 peer = hg.peer(ui, opts, path)
487 peer = hg.peer(ui, opts, path)
488 try:
488 try:
489 caps = peer.capabilities()
489 caps = peer.capabilities()
490 ui.writenoi18n(b'Main capabilities:\n')
490 ui.writenoi18n(b'Main capabilities:\n')
491 for c in sorted(caps):
491 for c in sorted(caps):
492 ui.write(b' %s\n' % c)
492 ui.write(b' %s\n' % c)
493 b2caps = bundle2.bundle2caps(peer)
493 b2caps = bundle2.bundle2caps(peer)
494 if b2caps:
494 if b2caps:
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 for key, values in sorted(pycompat.iteritems(b2caps)):
496 for key, values in sorted(pycompat.iteritems(b2caps)):
497 ui.write(b' %s\n' % key)
497 ui.write(b' %s\n' % key)
498 for v in values:
498 for v in values:
499 ui.write(b' %s\n' % v)
499 ui.write(b' %s\n' % v)
500 finally:
500 finally:
501 peer.close()
501 peer.close()
502
502
503
503
504 @command(
504 @command(
505 b'debugchangedfiles',
505 b'debugchangedfiles',
506 [
506 [
507 (
507 (
508 b'',
508 b'',
509 b'compute',
509 b'compute',
510 False,
510 False,
511 b"compute information instead of reading it from storage",
511 b"compute information instead of reading it from storage",
512 ),
512 ),
513 ],
513 ],
514 b'REV',
514 b'REV',
515 )
515 )
516 def debugchangedfiles(ui, repo, rev, **opts):
516 def debugchangedfiles(ui, repo, rev, **opts):
517 """list the stored files changes for a revision"""
517 """list the stored files changes for a revision"""
518 ctx = logcmdutil.revsingle(repo, rev, None)
518 ctx = logcmdutil.revsingle(repo, rev, None)
519 files = None
519 files = None
520
520
521 if opts['compute']:
521 if opts['compute']:
522 files = metadata.compute_all_files_changes(ctx)
522 files = metadata.compute_all_files_changes(ctx)
523 else:
523 else:
524 sd = repo.changelog.sidedata(ctx.rev())
524 sd = repo.changelog.sidedata(ctx.rev())
525 files_block = sd.get(sidedata.SD_FILES)
525 files_block = sd.get(sidedata.SD_FILES)
526 if files_block is not None:
526 if files_block is not None:
527 files = metadata.decode_files_sidedata(sd)
527 files = metadata.decode_files_sidedata(sd)
528 if files is not None:
528 if files is not None:
529 for f in sorted(files.touched):
529 for f in sorted(files.touched):
530 if f in files.added:
530 if f in files.added:
531 action = b"added"
531 action = b"added"
532 elif f in files.removed:
532 elif f in files.removed:
533 action = b"removed"
533 action = b"removed"
534 elif f in files.merged:
534 elif f in files.merged:
535 action = b"merged"
535 action = b"merged"
536 elif f in files.salvaged:
536 elif f in files.salvaged:
537 action = b"salvaged"
537 action = b"salvaged"
538 else:
538 else:
539 action = b"touched"
539 action = b"touched"
540
540
541 copy_parent = b""
541 copy_parent = b""
542 copy_source = b""
542 copy_source = b""
543 if f in files.copied_from_p1:
543 if f in files.copied_from_p1:
544 copy_parent = b"p1"
544 copy_parent = b"p1"
545 copy_source = files.copied_from_p1[f]
545 copy_source = files.copied_from_p1[f]
546 elif f in files.copied_from_p2:
546 elif f in files.copied_from_p2:
547 copy_parent = b"p2"
547 copy_parent = b"p2"
548 copy_source = files.copied_from_p2[f]
548 copy_source = files.copied_from_p2[f]
549
549
550 data = (action, copy_parent, f, copy_source)
550 data = (action, copy_parent, f, copy_source)
551 template = b"%-8s %2s: %s, %s;\n"
551 template = b"%-8s %2s: %s, %s;\n"
552 ui.write(template % data)
552 ui.write(template % data)
553
553
554
554
555 @command(b'debugcheckstate', [], b'')
555 @command(b'debugcheckstate', [], b'')
556 def debugcheckstate(ui, repo):
556 def debugcheckstate(ui, repo):
557 """validate the correctness of the current dirstate"""
557 """validate the correctness of the current dirstate"""
558 parent1, parent2 = repo.dirstate.parents()
558 parent1, parent2 = repo.dirstate.parents()
559 m1 = repo[parent1].manifest()
559 m1 = repo[parent1].manifest()
560 m2 = repo[parent2].manifest()
560 m2 = repo[parent2].manifest()
561 errors = 0
561 errors = 0
562 for err in repo.dirstate.verify(m1, m2):
562 for err in repo.dirstate.verify(m1, m2):
563 ui.warn(err[0] % err[1:])
563 ui.warn(err[0] % err[1:])
564 errors += 1
564 errors += 1
565 if errors:
565 if errors:
566 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
566 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 raise error.Abort(errstr)
567 raise error.Abort(errstr)
568
568
569
569
570 @command(
570 @command(
571 b'debugcolor',
571 b'debugcolor',
572 [(b'', b'style', None, _(b'show all configured styles'))],
572 [(b'', b'style', None, _(b'show all configured styles'))],
573 b'hg debugcolor',
573 b'hg debugcolor',
574 )
574 )
575 def debugcolor(ui, repo, **opts):
575 def debugcolor(ui, repo, **opts):
576 """show available color, effects or style"""
576 """show available color, effects or style"""
577 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
577 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 if opts.get('style'):
578 if opts.get('style'):
579 return _debugdisplaystyle(ui)
579 return _debugdisplaystyle(ui)
580 else:
580 else:
581 return _debugdisplaycolor(ui)
581 return _debugdisplaycolor(ui)
582
582
583
583
584 def _debugdisplaycolor(ui):
584 def _debugdisplaycolor(ui):
585 ui = ui.copy()
585 ui = ui.copy()
586 ui._styles.clear()
586 ui._styles.clear()
587 for effect in color._activeeffects(ui).keys():
587 for effect in color._activeeffects(ui).keys():
588 ui._styles[effect] = effect
588 ui._styles[effect] = effect
589 if ui._terminfoparams:
589 if ui._terminfoparams:
590 for k, v in ui.configitems(b'color'):
590 for k, v in ui.configitems(b'color'):
591 if k.startswith(b'color.'):
591 if k.startswith(b'color.'):
592 ui._styles[k] = k[6:]
592 ui._styles[k] = k[6:]
593 elif k.startswith(b'terminfo.'):
593 elif k.startswith(b'terminfo.'):
594 ui._styles[k] = k[9:]
594 ui._styles[k] = k[9:]
595 ui.write(_(b'available colors:\n'))
595 ui.write(_(b'available colors:\n'))
596 # sort label with a '_' after the other to group '_background' entry.
596 # sort label with a '_' after the other to group '_background' entry.
597 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
597 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 for colorname, label in items:
598 for colorname, label in items:
599 ui.write(b'%s\n' % colorname, label=label)
599 ui.write(b'%s\n' % colorname, label=label)
600
600
601
601
602 def _debugdisplaystyle(ui):
602 def _debugdisplaystyle(ui):
603 ui.write(_(b'available style:\n'))
603 ui.write(_(b'available style:\n'))
604 if not ui._styles:
604 if not ui._styles:
605 return
605 return
606 width = max(len(s) for s in ui._styles)
606 width = max(len(s) for s in ui._styles)
607 for label, effects in sorted(ui._styles.items()):
607 for label, effects in sorted(ui._styles.items()):
608 ui.write(b'%s' % label, label=label)
608 ui.write(b'%s' % label, label=label)
609 if effects:
609 if effects:
610 # 50
610 # 50
611 ui.write(b': ')
611 ui.write(b': ')
612 ui.write(b' ' * (max(0, width - len(label))))
612 ui.write(b' ' * (max(0, width - len(label))))
613 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
613 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 ui.write(b'\n')
614 ui.write(b'\n')
615
615
616
616
617 @command(b'debugcreatestreamclonebundle', [], b'FILE')
617 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 def debugcreatestreamclonebundle(ui, repo, fname):
618 def debugcreatestreamclonebundle(ui, repo, fname):
619 """create a stream clone bundle file
619 """create a stream clone bundle file
620
620
621 Stream bundles are special bundles that are essentially archives of
621 Stream bundles are special bundles that are essentially archives of
622 revlog files. They are commonly used for cloning very quickly.
622 revlog files. They are commonly used for cloning very quickly.
623 """
623 """
624 # TODO we may want to turn this into an abort when this functionality
624 # TODO we may want to turn this into an abort when this functionality
625 # is moved into `hg bundle`.
625 # is moved into `hg bundle`.
626 if phases.hassecret(repo):
626 if phases.hassecret(repo):
627 ui.warn(
627 ui.warn(
628 _(
628 _(
629 b'(warning: stream clone bundle will contain secret '
629 b'(warning: stream clone bundle will contain secret '
630 b'revisions)\n'
630 b'revisions)\n'
631 )
631 )
632 )
632 )
633
633
634 requirements, gen = streamclone.generatebundlev1(repo)
634 requirements, gen = streamclone.generatebundlev1(repo)
635 changegroup.writechunks(ui, gen, fname)
635 changegroup.writechunks(ui, gen, fname)
636
636
637 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
637 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638
638
639
639
640 @command(
640 @command(
641 b'debugdag',
641 b'debugdag',
642 [
642 [
643 (b't', b'tags', None, _(b'use tags as labels')),
643 (b't', b'tags', None, _(b'use tags as labels')),
644 (b'b', b'branches', None, _(b'annotate with branch names')),
644 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'', b'dots', None, _(b'use dots for runs')),
645 (b'', b'dots', None, _(b'use dots for runs')),
646 (b's', b'spaces', None, _(b'separate elements by spaces')),
646 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 ],
647 ],
648 _(b'[OPTION]... [FILE [REV]...]'),
648 _(b'[OPTION]... [FILE [REV]...]'),
649 optionalrepo=True,
649 optionalrepo=True,
650 )
650 )
651 def debugdag(ui, repo, file_=None, *revs, **opts):
651 def debugdag(ui, repo, file_=None, *revs, **opts):
652 """format the changelog or an index DAG as a concise textual description
652 """format the changelog or an index DAG as a concise textual description
653
653
654 If you pass a revlog index, the revlog's DAG is emitted. If you list
654 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 revision numbers, they get labeled in the output as rN.
655 revision numbers, they get labeled in the output as rN.
656
656
657 Otherwise, the changelog DAG of the current repo is emitted.
657 Otherwise, the changelog DAG of the current repo is emitted.
658 """
658 """
659 spaces = opts.get('spaces')
659 spaces = opts.get('spaces')
660 dots = opts.get('dots')
660 dots = opts.get('dots')
661 if file_:
661 if file_:
662 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
662 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 revs = {int(r) for r in revs}
663 revs = {int(r) for r in revs}
664
664
665 def events():
665 def events():
666 for r in rlog:
666 for r in rlog:
667 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
667 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 if r in revs:
668 if r in revs:
669 yield b'l', (r, b"r%i" % r)
669 yield b'l', (r, b"r%i" % r)
670
670
671 elif repo:
671 elif repo:
672 cl = repo.changelog
672 cl = repo.changelog
673 tags = opts.get('tags')
673 tags = opts.get('tags')
674 branches = opts.get('branches')
674 branches = opts.get('branches')
675 if tags:
675 if tags:
676 labels = {}
676 labels = {}
677 for l, n in repo.tags().items():
677 for l, n in repo.tags().items():
678 labels.setdefault(cl.rev(n), []).append(l)
678 labels.setdefault(cl.rev(n), []).append(l)
679
679
680 def events():
680 def events():
681 b = b"default"
681 b = b"default"
682 for r in cl:
682 for r in cl:
683 if branches:
683 if branches:
684 newb = cl.read(cl.node(r))[5][b'branch']
684 newb = cl.read(cl.node(r))[5][b'branch']
685 if newb != b:
685 if newb != b:
686 yield b'a', newb
686 yield b'a', newb
687 b = newb
687 b = newb
688 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
688 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 if tags:
689 if tags:
690 ls = labels.get(r)
690 ls = labels.get(r)
691 if ls:
691 if ls:
692 for l in ls:
692 for l in ls:
693 yield b'l', (r, l)
693 yield b'l', (r, l)
694
694
695 else:
695 else:
696 raise error.Abort(_(b'need repo for changelog dag'))
696 raise error.Abort(_(b'need repo for changelog dag'))
697
697
698 for line in dagparser.dagtextlines(
698 for line in dagparser.dagtextlines(
699 events(),
699 events(),
700 addspaces=spaces,
700 addspaces=spaces,
701 wraplabels=True,
701 wraplabels=True,
702 wrapannotations=True,
702 wrapannotations=True,
703 wrapnonlinear=dots,
703 wrapnonlinear=dots,
704 usedots=dots,
704 usedots=dots,
705 maxlinewidth=70,
705 maxlinewidth=70,
706 ):
706 ):
707 ui.write(line)
707 ui.write(line)
708 ui.write(b"\n")
708 ui.write(b"\n")
709
709
710
710
711 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
711 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 def debugdata(ui, repo, file_, rev=None, **opts):
712 def debugdata(ui, repo, file_, rev=None, **opts):
713 """dump the contents of a data file revision"""
713 """dump the contents of a data file revision"""
714 opts = pycompat.byteskwargs(opts)
714 opts = pycompat.byteskwargs(opts)
715 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
715 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if rev is not None:
716 if rev is not None:
717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 file_, rev = None, file_
718 file_, rev = None, file_
719 elif rev is None:
719 elif rev is None:
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 try:
722 try:
723 ui.write(r.rawdata(r.lookup(rev)))
723 ui.write(r.rawdata(r.lookup(rev)))
724 except KeyError:
724 except KeyError:
725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726
726
727
727
728 @command(
728 @command(
729 b'debugdate',
729 b'debugdate',
730 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 _(b'[-e] DATE [RANGE]'),
731 _(b'[-e] DATE [RANGE]'),
732 norepo=True,
732 norepo=True,
733 optionalrepo=True,
733 optionalrepo=True,
734 )
734 )
735 def debugdate(ui, date, range=None, **opts):
735 def debugdate(ui, date, range=None, **opts):
736 """parse and display a date"""
736 """parse and display a date"""
737 if opts["extended"]:
737 if opts["extended"]:
738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 else:
739 else:
740 d = dateutil.parsedate(date)
740 d = dateutil.parsedate(date)
741 ui.writenoi18n(b"internal: %d %d\n" % d)
741 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 if range:
743 if range:
744 m = dateutil.matchdate(range)
744 m = dateutil.matchdate(range)
745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746
746
747
747
748 @command(
748 @command(
749 b'debugdeltachain',
749 b'debugdeltachain',
750 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 _(b'-c|-m|FILE'),
751 _(b'-c|-m|FILE'),
752 optionalrepo=True,
752 optionalrepo=True,
753 )
753 )
754 def debugdeltachain(ui, repo, file_=None, **opts):
754 def debugdeltachain(ui, repo, file_=None, **opts):
755 """dump information about delta chains in a revlog
755 """dump information about delta chains in a revlog
756
756
757 Output can be templatized. Available template keywords are:
757 Output can be templatized. Available template keywords are:
758
758
759 :``rev``: revision number
759 :``rev``: revision number
760 :``chainid``: delta chain identifier (numbered by unique base)
760 :``chainid``: delta chain identifier (numbered by unique base)
761 :``chainlen``: delta chain length to this revision
761 :``chainlen``: delta chain length to this revision
762 :``prevrev``: previous revision in delta chain
762 :``prevrev``: previous revision in delta chain
763 :``deltatype``: role of delta / how it was computed
763 :``deltatype``: role of delta / how it was computed
764 :``compsize``: compressed size of revision
764 :``compsize``: compressed size of revision
765 :``uncompsize``: uncompressed size of revision
765 :``uncompsize``: uncompressed size of revision
766 :``chainsize``: total size of compressed revisions in chain
766 :``chainsize``: total size of compressed revisions in chain
767 :``chainratio``: total chain size divided by uncompressed revision size
767 :``chainratio``: total chain size divided by uncompressed revision size
768 (new delta chains typically start at ratio 2.00)
768 (new delta chains typically start at ratio 2.00)
769 :``lindist``: linear distance from base revision in delta chain to end
769 :``lindist``: linear distance from base revision in delta chain to end
770 of this revision
770 of this revision
771 :``extradist``: total size of revisions not part of this delta chain from
771 :``extradist``: total size of revisions not part of this delta chain from
772 base of delta chain to end of this revision; a measurement
772 base of delta chain to end of this revision; a measurement
773 of how much extra data we need to read/seek across to read
773 of how much extra data we need to read/seek across to read
774 the delta chain for this revision
774 the delta chain for this revision
775 :``extraratio``: extradist divided by chainsize; another representation of
775 :``extraratio``: extradist divided by chainsize; another representation of
776 how much unrelated data is needed to load this delta chain
776 how much unrelated data is needed to load this delta chain
777
777
778 If the repository is configured to use the sparse read, additional keywords
778 If the repository is configured to use the sparse read, additional keywords
779 are available:
779 are available:
780
780
781 :``readsize``: total size of data read from the disk for a revision
781 :``readsize``: total size of data read from the disk for a revision
782 (sum of the sizes of all the blocks)
782 (sum of the sizes of all the blocks)
783 :``largestblock``: size of the largest block of data read from the disk
783 :``largestblock``: size of the largest block of data read from the disk
784 :``readdensity``: density of useful bytes in the data read from the disk
784 :``readdensity``: density of useful bytes in the data read from the disk
785 :``srchunks``: in how many data hunks the whole revision would be read
785 :``srchunks``: in how many data hunks the whole revision would be read
786
786
787 The sparse read can be enabled with experimental.sparse-read = True
787 The sparse read can be enabled with experimental.sparse-read = True
788 """
788 """
789 opts = pycompat.byteskwargs(opts)
789 opts = pycompat.byteskwargs(opts)
790 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
790 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
791 index = r.index
791 index = r.index
792 start = r.start
792 start = r.start
793 length = r.length
793 length = r.length
794 generaldelta = r._generaldelta
794 generaldelta = r._generaldelta
795 withsparseread = getattr(r, '_withsparseread', False)
795 withsparseread = getattr(r, '_withsparseread', False)
796
796
797 def revinfo(rev):
797 def revinfo(rev):
798 e = index[rev]
798 e = index[rev]
799 compsize = e[1]
799 compsize = e[1]
800 uncompsize = e[2]
800 uncompsize = e[2]
801 chainsize = 0
801 chainsize = 0
802
802
803 if generaldelta:
803 if generaldelta:
804 if e[3] == e[5]:
804 if e[3] == e[5]:
805 deltatype = b'p1'
805 deltatype = b'p1'
806 elif e[3] == e[6]:
806 elif e[3] == e[6]:
807 deltatype = b'p2'
807 deltatype = b'p2'
808 elif e[3] == rev - 1:
808 elif e[3] == rev - 1:
809 deltatype = b'prev'
809 deltatype = b'prev'
810 elif e[3] == rev:
810 elif e[3] == rev:
811 deltatype = b'base'
811 deltatype = b'base'
812 else:
812 else:
813 deltatype = b'other'
813 deltatype = b'other'
814 else:
814 else:
815 if e[3] == rev:
815 if e[3] == rev:
816 deltatype = b'base'
816 deltatype = b'base'
817 else:
817 else:
818 deltatype = b'prev'
818 deltatype = b'prev'
819
819
820 chain = r._deltachain(rev)[0]
820 chain = r._deltachain(rev)[0]
821 for iterrev in chain:
821 for iterrev in chain:
822 e = index[iterrev]
822 e = index[iterrev]
823 chainsize += e[1]
823 chainsize += e[1]
824
824
825 return compsize, uncompsize, deltatype, chain, chainsize
825 return compsize, uncompsize, deltatype, chain, chainsize
826
826
827 fm = ui.formatter(b'debugdeltachain', opts)
827 fm = ui.formatter(b'debugdeltachain', opts)
828
828
829 fm.plain(
829 fm.plain(
830 b' rev chain# chainlen prev delta '
830 b' rev chain# chainlen prev delta '
831 b'size rawsize chainsize ratio lindist extradist '
831 b'size rawsize chainsize ratio lindist extradist '
832 b'extraratio'
832 b'extraratio'
833 )
833 )
834 if withsparseread:
834 if withsparseread:
835 fm.plain(b' readsize largestblk rddensity srchunks')
835 fm.plain(b' readsize largestblk rddensity srchunks')
836 fm.plain(b'\n')
836 fm.plain(b'\n')
837
837
838 chainbases = {}
838 chainbases = {}
839 for rev in r:
839 for rev in r:
840 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
840 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
841 chainbase = chain[0]
841 chainbase = chain[0]
842 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
842 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
843 basestart = start(chainbase)
843 basestart = start(chainbase)
844 revstart = start(rev)
844 revstart = start(rev)
845 lineardist = revstart + comp - basestart
845 lineardist = revstart + comp - basestart
846 extradist = lineardist - chainsize
846 extradist = lineardist - chainsize
847 try:
847 try:
848 prevrev = chain[-2]
848 prevrev = chain[-2]
849 except IndexError:
849 except IndexError:
850 prevrev = -1
850 prevrev = -1
851
851
852 if uncomp != 0:
852 if uncomp != 0:
853 chainratio = float(chainsize) / float(uncomp)
853 chainratio = float(chainsize) / float(uncomp)
854 else:
854 else:
855 chainratio = chainsize
855 chainratio = chainsize
856
856
857 if chainsize != 0:
857 if chainsize != 0:
858 extraratio = float(extradist) / float(chainsize)
858 extraratio = float(extradist) / float(chainsize)
859 else:
859 else:
860 extraratio = extradist
860 extraratio = extradist
861
861
862 fm.startitem()
862 fm.startitem()
863 fm.write(
863 fm.write(
864 b'rev chainid chainlen prevrev deltatype compsize '
864 b'rev chainid chainlen prevrev deltatype compsize '
865 b'uncompsize chainsize chainratio lindist extradist '
865 b'uncompsize chainsize chainratio lindist extradist '
866 b'extraratio',
866 b'extraratio',
867 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
867 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
868 rev,
868 rev,
869 chainid,
869 chainid,
870 len(chain),
870 len(chain),
871 prevrev,
871 prevrev,
872 deltatype,
872 deltatype,
873 comp,
873 comp,
874 uncomp,
874 uncomp,
875 chainsize,
875 chainsize,
876 chainratio,
876 chainratio,
877 lineardist,
877 lineardist,
878 extradist,
878 extradist,
879 extraratio,
879 extraratio,
880 rev=rev,
880 rev=rev,
881 chainid=chainid,
881 chainid=chainid,
882 chainlen=len(chain),
882 chainlen=len(chain),
883 prevrev=prevrev,
883 prevrev=prevrev,
884 deltatype=deltatype,
884 deltatype=deltatype,
885 compsize=comp,
885 compsize=comp,
886 uncompsize=uncomp,
886 uncompsize=uncomp,
887 chainsize=chainsize,
887 chainsize=chainsize,
888 chainratio=chainratio,
888 chainratio=chainratio,
889 lindist=lineardist,
889 lindist=lineardist,
890 extradist=extradist,
890 extradist=extradist,
891 extraratio=extraratio,
891 extraratio=extraratio,
892 )
892 )
893 if withsparseread:
893 if withsparseread:
894 readsize = 0
894 readsize = 0
895 largestblock = 0
895 largestblock = 0
896 srchunks = 0
896 srchunks = 0
897
897
898 for revschunk in deltautil.slicechunk(r, chain):
898 for revschunk in deltautil.slicechunk(r, chain):
899 srchunks += 1
899 srchunks += 1
900 blkend = start(revschunk[-1]) + length(revschunk[-1])
900 blkend = start(revschunk[-1]) + length(revschunk[-1])
901 blksize = blkend - start(revschunk[0])
901 blksize = blkend - start(revschunk[0])
902
902
903 readsize += blksize
903 readsize += blksize
904 if largestblock < blksize:
904 if largestblock < blksize:
905 largestblock = blksize
905 largestblock = blksize
906
906
907 if readsize:
907 if readsize:
908 readdensity = float(chainsize) / float(readsize)
908 readdensity = float(chainsize) / float(readsize)
909 else:
909 else:
910 readdensity = 1
910 readdensity = 1
911
911
912 fm.write(
912 fm.write(
913 b'readsize largestblock readdensity srchunks',
913 b'readsize largestblock readdensity srchunks',
914 b' %10d %10d %9.5f %8d',
914 b' %10d %10d %9.5f %8d',
915 readsize,
915 readsize,
916 largestblock,
916 largestblock,
917 readdensity,
917 readdensity,
918 srchunks,
918 srchunks,
919 readsize=readsize,
919 readsize=readsize,
920 largestblock=largestblock,
920 largestblock=largestblock,
921 readdensity=readdensity,
921 readdensity=readdensity,
922 srchunks=srchunks,
922 srchunks=srchunks,
923 )
923 )
924
924
925 fm.plain(b'\n')
925 fm.plain(b'\n')
926
926
927 fm.end()
927 fm.end()
928
928
929
929
930 @command(
930 @command(
931 b'debugdirstate|debugstate',
931 b'debugdirstate|debugstate',
932 [
932 [
933 (
933 (
934 b'',
934 b'',
935 b'nodates',
935 b'nodates',
936 None,
936 None,
937 _(b'do not display the saved mtime (DEPRECATED)'),
937 _(b'do not display the saved mtime (DEPRECATED)'),
938 ),
938 ),
939 (b'', b'dates', True, _(b'display the saved mtime')),
939 (b'', b'dates', True, _(b'display the saved mtime')),
940 (b'', b'datesort', None, _(b'sort by saved mtime')),
940 (b'', b'datesort', None, _(b'sort by saved mtime')),
941 (
941 (
942 b'',
942 b'',
943 b'all',
943 b'all',
944 False,
944 False,
945 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
945 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
946 ),
946 ),
947 ],
947 ],
948 _(b'[OPTION]...'),
948 _(b'[OPTION]...'),
949 )
949 )
950 def debugstate(ui, repo, **opts):
950 def debugstate(ui, repo, **opts):
951 """show the contents of the current dirstate"""
951 """show the contents of the current dirstate"""
952
952
953 nodates = not opts['dates']
953 nodates = not opts['dates']
954 if opts.get('nodates') is not None:
954 if opts.get('nodates') is not None:
955 nodates = True
955 nodates = True
956 datesort = opts.get('datesort')
956 datesort = opts.get('datesort')
957
957
958 if datesort:
958 if datesort:
959
959
960 def keyfunc(entry):
960 def keyfunc(entry):
961 filename, _state, _mode, _size, mtime = entry
961 filename, _state, _mode, _size, mtime = entry
962 return (mtime, filename)
962 return (mtime, filename)
963
963
964 else:
964 else:
965 keyfunc = None # sort by filename
965 keyfunc = None # sort by filename
966 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
966 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
967 entries.sort(key=keyfunc)
967 entries.sort(key=keyfunc)
968 for entry in entries:
968 for entry in entries:
969 filename, state, mode, size, mtime = entry
969 filename, state, mode, size, mtime = entry
970 if mtime == -1:
970 if mtime == -1:
971 timestr = b'unset '
971 timestr = b'unset '
972 elif nodates:
972 elif nodates:
973 timestr = b'set '
973 timestr = b'set '
974 else:
974 else:
975 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
975 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
976 timestr = encoding.strtolocal(timestr)
976 timestr = encoding.strtolocal(timestr)
977 if mode & 0o20000:
977 if mode & 0o20000:
978 mode = b'lnk'
978 mode = b'lnk'
979 else:
979 else:
980 mode = b'%3o' % (mode & 0o777 & ~util.umask)
980 mode = b'%3o' % (mode & 0o777 & ~util.umask)
981 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
981 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
982 for f in repo.dirstate.copies():
982 for f in repo.dirstate.copies():
983 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
983 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
984
984
985
985
986 @command(
986 @command(
987 b'debugdirstateignorepatternshash',
987 b'debugdirstateignorepatternshash',
988 [],
988 [],
989 _(b''),
989 _(b''),
990 )
990 )
991 def debugdirstateignorepatternshash(ui, repo, **opts):
991 def debugdirstateignorepatternshash(ui, repo, **opts):
992 """show the hash of ignore patterns stored in dirstate if v2,
992 """show the hash of ignore patterns stored in dirstate if v2,
993 or nothing for dirstate-v2
993 or nothing for dirstate-v2
994 """
994 """
995 if repo.dirstate._use_dirstate_v2:
995 if repo.dirstate._use_dirstate_v2:
996 docket = repo.dirstate._map.docket
996 docket = repo.dirstate._map.docket
997 hash_len = 20 # 160 bits for SHA-1
997 hash_len = 20 # 160 bits for SHA-1
998 hash_bytes = docket.tree_metadata[-hash_len:]
998 hash_bytes = docket.tree_metadata[-hash_len:]
999 ui.write(binascii.hexlify(hash_bytes) + b'\n')
999 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1000
1000
1001
1001
1002 @command(
1002 @command(
1003 b'debugdiscovery',
1003 b'debugdiscovery',
1004 [
1004 [
1005 (b'', b'old', None, _(b'use old-style discovery')),
1005 (b'', b'old', None, _(b'use old-style discovery')),
1006 (
1006 (
1007 b'',
1007 b'',
1008 b'nonheads',
1008 b'nonheads',
1009 None,
1009 None,
1010 _(b'use old-style discovery with non-heads included'),
1010 _(b'use old-style discovery with non-heads included'),
1011 ),
1011 ),
1012 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1012 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1013 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1013 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1014 (
1014 (
1015 b'',
1015 b'',
1016 b'local-as-revs',
1016 b'local-as-revs',
1017 b"",
1017 b"",
1018 b'treat local has having these revisions only',
1018 b'treat local has having these revisions only',
1019 ),
1019 ),
1020 (
1020 (
1021 b'',
1021 b'',
1022 b'remote-as-revs',
1022 b'remote-as-revs',
1023 b"",
1023 b"",
1024 b'use local as remote, with only these these revisions',
1024 b'use local as remote, with only these these revisions',
1025 ),
1025 ),
1026 ]
1026 ]
1027 + cmdutil.remoteopts
1027 + cmdutil.remoteopts
1028 + cmdutil.formatteropts,
1028 + cmdutil.formatteropts,
1029 _(b'[--rev REV] [OTHER]'),
1029 _(b'[--rev REV] [OTHER]'),
1030 )
1030 )
1031 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1031 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1032 """runs the changeset discovery protocol in isolation
1032 """runs the changeset discovery protocol in isolation
1033
1033
1034 The local peer can be "replaced" by a subset of the local repository by
1034 The local peer can be "replaced" by a subset of the local repository by
1035 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1035 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1036 be "replaced" by a subset of the local repository using the
1036 be "replaced" by a subset of the local repository using the
1037 `--local-as-revs` flag. This is useful to efficiently debug pathological
1037 `--local-as-revs` flag. This is useful to efficiently debug pathological
1038 discovery situation.
1038 discovery situation.
1039
1039
1040 The following developer oriented config are relevant for people playing with this command:
1040 The following developer oriented config are relevant for people playing with this command:
1041
1041
1042 * devel.discovery.exchange-heads=True
1042 * devel.discovery.exchange-heads=True
1043
1043
1044 If False, the discovery will not start with
1044 If False, the discovery will not start with
1045 remote head fetching and local head querying.
1045 remote head fetching and local head querying.
1046
1046
1047 * devel.discovery.grow-sample=True
1047 * devel.discovery.grow-sample=True
1048
1048
1049 If False, the sample size used in set discovery will not be increased
1049 If False, the sample size used in set discovery will not be increased
1050 through the process
1050 through the process
1051
1051
1052 * devel.discovery.grow-sample.dynamic=True
1052 * devel.discovery.grow-sample.dynamic=True
1053
1053
1054 When discovery.grow-sample.dynamic is True, the default, the sample size is
1054 When discovery.grow-sample.dynamic is True, the default, the sample size is
1055 adapted to the shape of the undecided set (it is set to the max of:
1055 adapted to the shape of the undecided set (it is set to the max of:
1056 <target-size>, len(roots(undecided)), len(heads(undecided)
1056 <target-size>, len(roots(undecided)), len(heads(undecided)
1057
1057
1058 * devel.discovery.grow-sample.rate=1.05
1058 * devel.discovery.grow-sample.rate=1.05
1059
1059
1060 the rate at which the sample grow
1060 the rate at which the sample grow
1061
1061
1062 * devel.discovery.randomize=True
1062 * devel.discovery.randomize=True
1063
1063
1064 If andom sampling during discovery are deterministic. It is meant for
1064 If andom sampling during discovery are deterministic. It is meant for
1065 integration tests.
1065 integration tests.
1066
1066
1067 * devel.discovery.sample-size=200
1067 * devel.discovery.sample-size=200
1068
1068
1069 Control the initial size of the discovery sample
1069 Control the initial size of the discovery sample
1070
1070
1071 * devel.discovery.sample-size.initial=100
1071 * devel.discovery.sample-size.initial=100
1072
1072
1073 Control the initial size of the discovery for initial change
1073 Control the initial size of the discovery for initial change
1074 """
1074 """
1075 opts = pycompat.byteskwargs(opts)
1075 opts = pycompat.byteskwargs(opts)
1076 unfi = repo.unfiltered()
1076 unfi = repo.unfiltered()
1077
1077
1078 # setup potential extra filtering
1078 # setup potential extra filtering
1079 local_revs = opts[b"local_as_revs"]
1079 local_revs = opts[b"local_as_revs"]
1080 remote_revs = opts[b"remote_as_revs"]
1080 remote_revs = opts[b"remote_as_revs"]
1081
1081
1082 # make sure tests are repeatable
1082 # make sure tests are repeatable
1083 random.seed(int(opts[b'seed']))
1083 random.seed(int(opts[b'seed']))
1084
1084
1085 if not remote_revs:
1085 if not remote_revs:
1086
1086
1087 remoteurl, branches = urlutil.get_unique_pull_path(
1087 remoteurl, branches = urlutil.get_unique_pull_path(
1088 b'debugdiscovery', repo, ui, remoteurl
1088 b'debugdiscovery', repo, ui, remoteurl
1089 )
1089 )
1090 remote = hg.peer(repo, opts, remoteurl)
1090 remote = hg.peer(repo, opts, remoteurl)
1091 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1091 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1092 else:
1092 else:
1093 branches = (None, [])
1093 branches = (None, [])
1094 remote_filtered_revs = logcmdutil.revrange(
1094 remote_filtered_revs = logcmdutil.revrange(
1095 unfi, [b"not (::(%s))" % remote_revs]
1095 unfi, [b"not (::(%s))" % remote_revs]
1096 )
1096 )
1097 remote_filtered_revs = frozenset(remote_filtered_revs)
1097 remote_filtered_revs = frozenset(remote_filtered_revs)
1098
1098
1099 def remote_func(x):
1099 def remote_func(x):
1100 return remote_filtered_revs
1100 return remote_filtered_revs
1101
1101
1102 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1102 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1103
1103
1104 remote = repo.peer()
1104 remote = repo.peer()
1105 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1105 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1106
1106
1107 if local_revs:
1107 if local_revs:
1108 local_filtered_revs = logcmdutil.revrange(
1108 local_filtered_revs = logcmdutil.revrange(
1109 unfi, [b"not (::(%s))" % local_revs]
1109 unfi, [b"not (::(%s))" % local_revs]
1110 )
1110 )
1111 local_filtered_revs = frozenset(local_filtered_revs)
1111 local_filtered_revs = frozenset(local_filtered_revs)
1112
1112
1113 def local_func(x):
1113 def local_func(x):
1114 return local_filtered_revs
1114 return local_filtered_revs
1115
1115
1116 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1116 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1117 repo = repo.filtered(b'debug-discovery-local-filter')
1117 repo = repo.filtered(b'debug-discovery-local-filter')
1118
1118
1119 data = {}
1119 data = {}
1120 if opts.get(b'old'):
1120 if opts.get(b'old'):
1121
1121
1122 def doit(pushedrevs, remoteheads, remote=remote):
1122 def doit(pushedrevs, remoteheads, remote=remote):
1123 if not util.safehasattr(remote, b'branches'):
1123 if not util.safehasattr(remote, b'branches'):
1124 # enable in-client legacy support
1124 # enable in-client legacy support
1125 remote = localrepo.locallegacypeer(remote.local())
1125 remote = localrepo.locallegacypeer(remote.local())
1126 common, _in, hds = treediscovery.findcommonincoming(
1126 common, _in, hds = treediscovery.findcommonincoming(
1127 repo, remote, force=True, audit=data
1127 repo, remote, force=True, audit=data
1128 )
1128 )
1129 common = set(common)
1129 common = set(common)
1130 if not opts.get(b'nonheads'):
1130 if not opts.get(b'nonheads'):
1131 ui.writenoi18n(
1131 ui.writenoi18n(
1132 b"unpruned common: %s\n"
1132 b"unpruned common: %s\n"
1133 % b" ".join(sorted(short(n) for n in common))
1133 % b" ".join(sorted(short(n) for n in common))
1134 )
1134 )
1135
1135
1136 clnode = repo.changelog.node
1136 clnode = repo.changelog.node
1137 common = repo.revs(b'heads(::%ln)', common)
1137 common = repo.revs(b'heads(::%ln)', common)
1138 common = {clnode(r) for r in common}
1138 common = {clnode(r) for r in common}
1139 return common, hds
1139 return common, hds
1140
1140
1141 else:
1141 else:
1142
1142
1143 def doit(pushedrevs, remoteheads, remote=remote):
1143 def doit(pushedrevs, remoteheads, remote=remote):
1144 nodes = None
1144 nodes = None
1145 if pushedrevs:
1145 if pushedrevs:
1146 revs = logcmdutil.revrange(repo, pushedrevs)
1146 revs = logcmdutil.revrange(repo, pushedrevs)
1147 nodes = [repo[r].node() for r in revs]
1147 nodes = [repo[r].node() for r in revs]
1148 common, any, hds = setdiscovery.findcommonheads(
1148 common, any, hds = setdiscovery.findcommonheads(
1149 ui, repo, remote, ancestorsof=nodes, audit=data
1149 ui, repo, remote, ancestorsof=nodes, audit=data
1150 )
1150 )
1151 return common, hds
1151 return common, hds
1152
1152
1153 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1153 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1154 localrevs = opts[b'rev']
1154 localrevs = opts[b'rev']
1155
1155
1156 fm = ui.formatter(b'debugdiscovery', opts)
1156 fm = ui.formatter(b'debugdiscovery', opts)
1157 if fm.strict_format:
1157 if fm.strict_format:
1158
1158
1159 @contextlib.contextmanager
1159 @contextlib.contextmanager
1160 def may_capture_output():
1160 def may_capture_output():
1161 ui.pushbuffer()
1161 ui.pushbuffer()
1162 yield
1162 yield
1163 data[b'output'] = ui.popbuffer()
1163 data[b'output'] = ui.popbuffer()
1164
1164
1165 else:
1165 else:
1166 may_capture_output = util.nullcontextmanager
1166 may_capture_output = util.nullcontextmanager
1167 with may_capture_output():
1167 with may_capture_output():
1168 with util.timedcm('debug-discovery') as t:
1168 with util.timedcm('debug-discovery') as t:
1169 common, hds = doit(localrevs, remoterevs)
1169 common, hds = doit(localrevs, remoterevs)
1170
1170
1171 # compute all statistics
1171 # compute all statistics
1172 heads_common = set(common)
1172 heads_common = set(common)
1173 heads_remote = set(hds)
1173 heads_remote = set(hds)
1174 heads_local = set(repo.heads())
1174 heads_local = set(repo.heads())
1175 # note: they cannot be a local or remote head that is in common and not
1175 # note: they cannot be a local or remote head that is in common and not
1176 # itself a head of common.
1176 # itself a head of common.
1177 heads_common_local = heads_common & heads_local
1177 heads_common_local = heads_common & heads_local
1178 heads_common_remote = heads_common & heads_remote
1178 heads_common_remote = heads_common & heads_remote
1179 heads_common_both = heads_common & heads_remote & heads_local
1179 heads_common_both = heads_common & heads_remote & heads_local
1180
1180
1181 all = repo.revs(b'all()')
1181 all = repo.revs(b'all()')
1182 common = repo.revs(b'::%ln', common)
1182 common = repo.revs(b'::%ln', common)
1183 roots_common = repo.revs(b'roots(::%ld)', common)
1183 roots_common = repo.revs(b'roots(::%ld)', common)
1184 missing = repo.revs(b'not ::%ld', common)
1184 missing = repo.revs(b'not ::%ld', common)
1185 heads_missing = repo.revs(b'heads(%ld)', missing)
1185 heads_missing = repo.revs(b'heads(%ld)', missing)
1186 roots_missing = repo.revs(b'roots(%ld)', missing)
1186 roots_missing = repo.revs(b'roots(%ld)', missing)
1187 assert len(common) + len(missing) == len(all)
1187 assert len(common) + len(missing) == len(all)
1188
1188
1189 initial_undecided = repo.revs(
1189 initial_undecided = repo.revs(
1190 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1190 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1191 )
1191 )
1192 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1192 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1193 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1193 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1194 common_initial_undecided = initial_undecided & common
1194 common_initial_undecided = initial_undecided & common
1195 missing_initial_undecided = initial_undecided & missing
1195 missing_initial_undecided = initial_undecided & missing
1196
1196
1197 data[b'elapsed'] = t.elapsed
1197 data[b'elapsed'] = t.elapsed
1198 data[b'nb-common-heads'] = len(heads_common)
1198 data[b'nb-common-heads'] = len(heads_common)
1199 data[b'nb-common-heads-local'] = len(heads_common_local)
1199 data[b'nb-common-heads-local'] = len(heads_common_local)
1200 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1200 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1201 data[b'nb-common-heads-both'] = len(heads_common_both)
1201 data[b'nb-common-heads-both'] = len(heads_common_both)
1202 data[b'nb-common-roots'] = len(roots_common)
1202 data[b'nb-common-roots'] = len(roots_common)
1203 data[b'nb-head-local'] = len(heads_local)
1203 data[b'nb-head-local'] = len(heads_local)
1204 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1204 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1205 data[b'nb-head-remote'] = len(heads_remote)
1205 data[b'nb-head-remote'] = len(heads_remote)
1206 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1206 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1207 heads_common_remote
1207 heads_common_remote
1208 )
1208 )
1209 data[b'nb-revs'] = len(all)
1209 data[b'nb-revs'] = len(all)
1210 data[b'nb-revs-common'] = len(common)
1210 data[b'nb-revs-common'] = len(common)
1211 data[b'nb-revs-missing'] = len(missing)
1211 data[b'nb-revs-missing'] = len(missing)
1212 data[b'nb-missing-heads'] = len(heads_missing)
1212 data[b'nb-missing-heads'] = len(heads_missing)
1213 data[b'nb-missing-roots'] = len(roots_missing)
1213 data[b'nb-missing-roots'] = len(roots_missing)
1214 data[b'nb-ini_und'] = len(initial_undecided)
1214 data[b'nb-ini_und'] = len(initial_undecided)
1215 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1215 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1216 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1216 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1217 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1217 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1218 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1218 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1219
1219
1220 fm.startitem()
1220 fm.startitem()
1221 fm.data(**pycompat.strkwargs(data))
1221 fm.data(**pycompat.strkwargs(data))
1222 # display discovery summary
1222 # display discovery summary
1223 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1223 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1224 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1224 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1225 fm.plain(b"heads summary:\n")
1225 fm.plain(b"heads summary:\n")
1226 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1226 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1227 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1227 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1228 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1228 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1229 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1229 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1230 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1230 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1231 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1231 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1232 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1232 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1233 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1233 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1234 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1234 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1235 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1235 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1236 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1236 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1237 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1237 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1238 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1238 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1239 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1239 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1240 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1240 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1241 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1241 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1242 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1242 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1243 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1243 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1244 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1244 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1245 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1245 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1246 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1246 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1247 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1247 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1248
1248
1249 if ui.verbose:
1249 if ui.verbose:
1250 fm.plain(
1250 fm.plain(
1251 b"common heads: %s\n"
1251 b"common heads: %s\n"
1252 % b" ".join(sorted(short(n) for n in heads_common))
1252 % b" ".join(sorted(short(n) for n in heads_common))
1253 )
1253 )
1254 fm.end()
1254 fm.end()
1255
1255
1256
1256
1257 _chunksize = 4 << 10
1257 _chunksize = 4 << 10
1258
1258
1259
1259
1260 @command(
1260 @command(
1261 b'debugdownload',
1261 b'debugdownload',
1262 [
1262 [
1263 (b'o', b'output', b'', _(b'path')),
1263 (b'o', b'output', b'', _(b'path')),
1264 ],
1264 ],
1265 optionalrepo=True,
1265 optionalrepo=True,
1266 )
1266 )
1267 def debugdownload(ui, repo, url, output=None, **opts):
1267 def debugdownload(ui, repo, url, output=None, **opts):
1268 """download a resource using Mercurial logic and config"""
1268 """download a resource using Mercurial logic and config"""
1269 fh = urlmod.open(ui, url, output)
1269 fh = urlmod.open(ui, url, output)
1270
1270
1271 dest = ui
1271 dest = ui
1272 if output:
1272 if output:
1273 dest = open(output, b"wb", _chunksize)
1273 dest = open(output, b"wb", _chunksize)
1274 try:
1274 try:
1275 data = fh.read(_chunksize)
1275 data = fh.read(_chunksize)
1276 while data:
1276 while data:
1277 dest.write(data)
1277 dest.write(data)
1278 data = fh.read(_chunksize)
1278 data = fh.read(_chunksize)
1279 finally:
1279 finally:
1280 if output:
1280 if output:
1281 dest.close()
1281 dest.close()
1282
1282
1283
1283
1284 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1284 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1285 def debugextensions(ui, repo, **opts):
1285 def debugextensions(ui, repo, **opts):
1286 '''show information about active extensions'''
1286 '''show information about active extensions'''
1287 opts = pycompat.byteskwargs(opts)
1287 opts = pycompat.byteskwargs(opts)
1288 exts = extensions.extensions(ui)
1288 exts = extensions.extensions(ui)
1289 hgver = util.version()
1289 hgver = util.version()
1290 fm = ui.formatter(b'debugextensions', opts)
1290 fm = ui.formatter(b'debugextensions', opts)
1291 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1291 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1292 isinternal = extensions.ismoduleinternal(extmod)
1292 isinternal = extensions.ismoduleinternal(extmod)
1293 extsource = None
1293 extsource = None
1294
1294
1295 if util.safehasattr(extmod, '__file__'):
1295 if util.safehasattr(extmod, '__file__'):
1296 extsource = pycompat.fsencode(extmod.__file__)
1296 extsource = pycompat.fsencode(extmod.__file__)
1297 elif getattr(sys, 'oxidized', False):
1297 elif getattr(sys, 'oxidized', False):
1298 extsource = pycompat.sysexecutable
1298 extsource = pycompat.sysexecutable
1299 if isinternal:
1299 if isinternal:
1300 exttestedwith = [] # never expose magic string to users
1300 exttestedwith = [] # never expose magic string to users
1301 else:
1301 else:
1302 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1302 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1303 extbuglink = getattr(extmod, 'buglink', None)
1303 extbuglink = getattr(extmod, 'buglink', None)
1304
1304
1305 fm.startitem()
1305 fm.startitem()
1306
1306
1307 if ui.quiet or ui.verbose:
1307 if ui.quiet or ui.verbose:
1308 fm.write(b'name', b'%s\n', extname)
1308 fm.write(b'name', b'%s\n', extname)
1309 else:
1309 else:
1310 fm.write(b'name', b'%s', extname)
1310 fm.write(b'name', b'%s', extname)
1311 if isinternal or hgver in exttestedwith:
1311 if isinternal or hgver in exttestedwith:
1312 fm.plain(b'\n')
1312 fm.plain(b'\n')
1313 elif not exttestedwith:
1313 elif not exttestedwith:
1314 fm.plain(_(b' (untested!)\n'))
1314 fm.plain(_(b' (untested!)\n'))
1315 else:
1315 else:
1316 lasttestedversion = exttestedwith[-1]
1316 lasttestedversion = exttestedwith[-1]
1317 fm.plain(b' (%s!)\n' % lasttestedversion)
1317 fm.plain(b' (%s!)\n' % lasttestedversion)
1318
1318
1319 fm.condwrite(
1319 fm.condwrite(
1320 ui.verbose and extsource,
1320 ui.verbose and extsource,
1321 b'source',
1321 b'source',
1322 _(b' location: %s\n'),
1322 _(b' location: %s\n'),
1323 extsource or b"",
1323 extsource or b"",
1324 )
1324 )
1325
1325
1326 if ui.verbose:
1326 if ui.verbose:
1327 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1327 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1328 fm.data(bundled=isinternal)
1328 fm.data(bundled=isinternal)
1329
1329
1330 fm.condwrite(
1330 fm.condwrite(
1331 ui.verbose and exttestedwith,
1331 ui.verbose and exttestedwith,
1332 b'testedwith',
1332 b'testedwith',
1333 _(b' tested with: %s\n'),
1333 _(b' tested with: %s\n'),
1334 fm.formatlist(exttestedwith, name=b'ver'),
1334 fm.formatlist(exttestedwith, name=b'ver'),
1335 )
1335 )
1336
1336
1337 fm.condwrite(
1337 fm.condwrite(
1338 ui.verbose and extbuglink,
1338 ui.verbose and extbuglink,
1339 b'buglink',
1339 b'buglink',
1340 _(b' bug reporting: %s\n'),
1340 _(b' bug reporting: %s\n'),
1341 extbuglink or b"",
1341 extbuglink or b"",
1342 )
1342 )
1343
1343
1344 fm.end()
1344 fm.end()
1345
1345
1346
1346
1347 @command(
1347 @command(
1348 b'debugfileset',
1348 b'debugfileset',
1349 [
1349 [
1350 (
1350 (
1351 b'r',
1351 b'r',
1352 b'rev',
1352 b'rev',
1353 b'',
1353 b'',
1354 _(b'apply the filespec on this revision'),
1354 _(b'apply the filespec on this revision'),
1355 _(b'REV'),
1355 _(b'REV'),
1356 ),
1356 ),
1357 (
1357 (
1358 b'',
1358 b'',
1359 b'all-files',
1359 b'all-files',
1360 False,
1360 False,
1361 _(b'test files from all revisions and working directory'),
1361 _(b'test files from all revisions and working directory'),
1362 ),
1362 ),
1363 (
1363 (
1364 b's',
1364 b's',
1365 b'show-matcher',
1365 b'show-matcher',
1366 None,
1366 None,
1367 _(b'print internal representation of matcher'),
1367 _(b'print internal representation of matcher'),
1368 ),
1368 ),
1369 (
1369 (
1370 b'p',
1370 b'p',
1371 b'show-stage',
1371 b'show-stage',
1372 [],
1372 [],
1373 _(b'print parsed tree at the given stage'),
1373 _(b'print parsed tree at the given stage'),
1374 _(b'NAME'),
1374 _(b'NAME'),
1375 ),
1375 ),
1376 ],
1376 ],
1377 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1377 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1378 )
1378 )
1379 def debugfileset(ui, repo, expr, **opts):
1379 def debugfileset(ui, repo, expr, **opts):
1380 '''parse and apply a fileset specification'''
1380 '''parse and apply a fileset specification'''
1381 from . import fileset
1381 from . import fileset
1382
1382
1383 fileset.symbols # force import of fileset so we have predicates to optimize
1383 fileset.symbols # force import of fileset so we have predicates to optimize
1384 opts = pycompat.byteskwargs(opts)
1384 opts = pycompat.byteskwargs(opts)
1385 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1385 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1386
1386
1387 stages = [
1387 stages = [
1388 (b'parsed', pycompat.identity),
1388 (b'parsed', pycompat.identity),
1389 (b'analyzed', filesetlang.analyze),
1389 (b'analyzed', filesetlang.analyze),
1390 (b'optimized', filesetlang.optimize),
1390 (b'optimized', filesetlang.optimize),
1391 ]
1391 ]
1392 stagenames = {n for n, f in stages}
1392 stagenames = {n for n, f in stages}
1393
1393
1394 showalways = set()
1394 showalways = set()
1395 if ui.verbose and not opts[b'show_stage']:
1395 if ui.verbose and not opts[b'show_stage']:
1396 # show parsed tree by --verbose (deprecated)
1396 # show parsed tree by --verbose (deprecated)
1397 showalways.add(b'parsed')
1397 showalways.add(b'parsed')
1398 if opts[b'show_stage'] == [b'all']:
1398 if opts[b'show_stage'] == [b'all']:
1399 showalways.update(stagenames)
1399 showalways.update(stagenames)
1400 else:
1400 else:
1401 for n in opts[b'show_stage']:
1401 for n in opts[b'show_stage']:
1402 if n not in stagenames:
1402 if n not in stagenames:
1403 raise error.Abort(_(b'invalid stage name: %s') % n)
1403 raise error.Abort(_(b'invalid stage name: %s') % n)
1404 showalways.update(opts[b'show_stage'])
1404 showalways.update(opts[b'show_stage'])
1405
1405
1406 tree = filesetlang.parse(expr)
1406 tree = filesetlang.parse(expr)
1407 for n, f in stages:
1407 for n, f in stages:
1408 tree = f(tree)
1408 tree = f(tree)
1409 if n in showalways:
1409 if n in showalways:
1410 if opts[b'show_stage'] or n != b'parsed':
1410 if opts[b'show_stage'] or n != b'parsed':
1411 ui.write(b"* %s:\n" % n)
1411 ui.write(b"* %s:\n" % n)
1412 ui.write(filesetlang.prettyformat(tree), b"\n")
1412 ui.write(filesetlang.prettyformat(tree), b"\n")
1413
1413
1414 files = set()
1414 files = set()
1415 if opts[b'all_files']:
1415 if opts[b'all_files']:
1416 for r in repo:
1416 for r in repo:
1417 c = repo[r]
1417 c = repo[r]
1418 files.update(c.files())
1418 files.update(c.files())
1419 files.update(c.substate)
1419 files.update(c.substate)
1420 if opts[b'all_files'] or ctx.rev() is None:
1420 if opts[b'all_files'] or ctx.rev() is None:
1421 wctx = repo[None]
1421 wctx = repo[None]
1422 files.update(
1422 files.update(
1423 repo.dirstate.walk(
1423 repo.dirstate.walk(
1424 scmutil.matchall(repo),
1424 scmutil.matchall(repo),
1425 subrepos=list(wctx.substate),
1425 subrepos=list(wctx.substate),
1426 unknown=True,
1426 unknown=True,
1427 ignored=True,
1427 ignored=True,
1428 )
1428 )
1429 )
1429 )
1430 files.update(wctx.substate)
1430 files.update(wctx.substate)
1431 else:
1431 else:
1432 files.update(ctx.files())
1432 files.update(ctx.files())
1433 files.update(ctx.substate)
1433 files.update(ctx.substate)
1434
1434
1435 m = ctx.matchfileset(repo.getcwd(), expr)
1435 m = ctx.matchfileset(repo.getcwd(), expr)
1436 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1436 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1437 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1437 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1438 for f in sorted(files):
1438 for f in sorted(files):
1439 if not m(f):
1439 if not m(f):
1440 continue
1440 continue
1441 ui.write(b"%s\n" % f)
1441 ui.write(b"%s\n" % f)
1442
1442
1443
1443
1444 @command(
1444 @command(
1445 b"debug-repair-issue6528",
1445 b"debug-repair-issue6528",
1446 [
1446 [
1447 (
1447 (
1448 b'',
1448 b'',
1449 b'to-report',
1449 b'to-report',
1450 b'',
1450 b'',
1451 _(b'build a report of affected revisions to this file'),
1451 _(b'build a report of affected revisions to this file'),
1452 _(b'FILE'),
1452 _(b'FILE'),
1453 ),
1453 ),
1454 (
1454 (
1455 b'',
1455 b'',
1456 b'from-report',
1456 b'from-report',
1457 b'',
1457 b'',
1458 _(b'repair revisions listed in this report file'),
1458 _(b'repair revisions listed in this report file'),
1459 _(b'FILE'),
1459 _(b'FILE'),
1460 ),
1460 ),
1461 (
1461 (
1462 b'',
1462 b'',
1463 b'paranoid',
1463 b'paranoid',
1464 False,
1464 False,
1465 _(b'check that both detection methods do the same thing'),
1465 _(b'check that both detection methods do the same thing'),
1466 ),
1466 ),
1467 ]
1467 ]
1468 + cmdutil.dryrunopts,
1468 + cmdutil.dryrunopts,
1469 )
1469 )
1470 def debug_repair_issue6528(ui, repo, **opts):
1470 def debug_repair_issue6528(ui, repo, **opts):
1471 """find affected revisions and repair them. See issue6528 for more details.
1471 """find affected revisions and repair them. See issue6528 for more details.
1472
1472
1473 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1473 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1474 computation of affected revisions for a given repository across clones.
1474 computation of affected revisions for a given repository across clones.
1475 The report format is line-based (with empty lines ignored):
1475 The report format is line-based (with empty lines ignored):
1476
1476
1477 ```
1477 ```
1478 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1478 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1479 ```
1479 ```
1480
1480
1481 There can be multiple broken revisions per filelog, they are separated by
1481 There can be multiple broken revisions per filelog, they are separated by
1482 a comma with no spaces. The only space is between the revision(s) and the
1482 a comma with no spaces. The only space is between the revision(s) and the
1483 filename.
1483 filename.
1484
1484
1485 Note that this does *not* mean that this repairs future affected revisions,
1485 Note that this does *not* mean that this repairs future affected revisions,
1486 that needs a separate fix at the exchange level that was introduced in
1486 that needs a separate fix at the exchange level that was introduced in
1487 Mercurial 5.9.1.
1487 Mercurial 5.9.1.
1488
1488
1489 There is a `--paranoid` flag to test that the fast implementation is correct
1489 There is a `--paranoid` flag to test that the fast implementation is correct
1490 by checking it against the slow implementation. Since this matter is quite
1490 by checking it against the slow implementation. Since this matter is quite
1491 urgent and testing every edge-case is probably quite costly, we use this
1491 urgent and testing every edge-case is probably quite costly, we use this
1492 method to test on large repositories as a fuzzing method of sorts.
1492 method to test on large repositories as a fuzzing method of sorts.
1493 """
1493 """
1494 cmdutil.check_incompatible_arguments(
1494 cmdutil.check_incompatible_arguments(
1495 opts, 'to_report', ['from_report', 'dry_run']
1495 opts, 'to_report', ['from_report', 'dry_run']
1496 )
1496 )
1497 dry_run = opts.get('dry_run')
1497 dry_run = opts.get('dry_run')
1498 to_report = opts.get('to_report')
1498 to_report = opts.get('to_report')
1499 from_report = opts.get('from_report')
1499 from_report = opts.get('from_report')
1500 paranoid = opts.get('paranoid')
1500 paranoid = opts.get('paranoid')
1501 # TODO maybe add filelog pattern and revision pattern parameters to help
1501 # TODO maybe add filelog pattern and revision pattern parameters to help
1502 # narrow down the search for users that know what they're looking for?
1502 # narrow down the search for users that know what they're looking for?
1503
1503
1504 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1504 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1505 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1505 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1506 raise error.Abort(_(msg))
1506 raise error.Abort(_(msg))
1507
1507
1508 rewrite.repair_issue6528(
1508 rewrite.repair_issue6528(
1509 ui,
1509 ui,
1510 repo,
1510 repo,
1511 dry_run=dry_run,
1511 dry_run=dry_run,
1512 to_report=to_report,
1512 to_report=to_report,
1513 from_report=from_report,
1513 from_report=from_report,
1514 paranoid=paranoid,
1514 paranoid=paranoid,
1515 )
1515 )
1516
1516
1517
1517
1518 @command(b'debugformat', [] + cmdutil.formatteropts)
1518 @command(b'debugformat', [] + cmdutil.formatteropts)
1519 def debugformat(ui, repo, **opts):
1519 def debugformat(ui, repo, **opts):
1520 """display format information about the current repository
1520 """display format information about the current repository
1521
1521
1522 Use --verbose to get extra information about current config value and
1522 Use --verbose to get extra information about current config value and
1523 Mercurial default."""
1523 Mercurial default."""
1524 opts = pycompat.byteskwargs(opts)
1524 opts = pycompat.byteskwargs(opts)
1525 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1525 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1526 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1526 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1527
1527
1528 def makeformatname(name):
1528 def makeformatname(name):
1529 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1529 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1530
1530
1531 fm = ui.formatter(b'debugformat', opts)
1531 fm = ui.formatter(b'debugformat', opts)
1532 if fm.isplain():
1532 if fm.isplain():
1533
1533
1534 def formatvalue(value):
1534 def formatvalue(value):
1535 if util.safehasattr(value, b'startswith'):
1535 if util.safehasattr(value, b'startswith'):
1536 return value
1536 return value
1537 if value:
1537 if value:
1538 return b'yes'
1538 return b'yes'
1539 else:
1539 else:
1540 return b'no'
1540 return b'no'
1541
1541
1542 else:
1542 else:
1543 formatvalue = pycompat.identity
1543 formatvalue = pycompat.identity
1544
1544
1545 fm.plain(b'format-variant')
1545 fm.plain(b'format-variant')
1546 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1546 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1547 fm.plain(b' repo')
1547 fm.plain(b' repo')
1548 if ui.verbose:
1548 if ui.verbose:
1549 fm.plain(b' config default')
1549 fm.plain(b' config default')
1550 fm.plain(b'\n')
1550 fm.plain(b'\n')
1551 for fv in upgrade.allformatvariant:
1551 for fv in upgrade.allformatvariant:
1552 fm.startitem()
1552 fm.startitem()
1553 repovalue = fv.fromrepo(repo)
1553 repovalue = fv.fromrepo(repo)
1554 configvalue = fv.fromconfig(repo)
1554 configvalue = fv.fromconfig(repo)
1555
1555
1556 if repovalue != configvalue:
1556 if repovalue != configvalue:
1557 namelabel = b'formatvariant.name.mismatchconfig'
1557 namelabel = b'formatvariant.name.mismatchconfig'
1558 repolabel = b'formatvariant.repo.mismatchconfig'
1558 repolabel = b'formatvariant.repo.mismatchconfig'
1559 elif repovalue != fv.default:
1559 elif repovalue != fv.default:
1560 namelabel = b'formatvariant.name.mismatchdefault'
1560 namelabel = b'formatvariant.name.mismatchdefault'
1561 repolabel = b'formatvariant.repo.mismatchdefault'
1561 repolabel = b'formatvariant.repo.mismatchdefault'
1562 else:
1562 else:
1563 namelabel = b'formatvariant.name.uptodate'
1563 namelabel = b'formatvariant.name.uptodate'
1564 repolabel = b'formatvariant.repo.uptodate'
1564 repolabel = b'formatvariant.repo.uptodate'
1565
1565
1566 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1566 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1567 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1567 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1568 if fv.default != configvalue:
1568 if fv.default != configvalue:
1569 configlabel = b'formatvariant.config.special'
1569 configlabel = b'formatvariant.config.special'
1570 else:
1570 else:
1571 configlabel = b'formatvariant.config.default'
1571 configlabel = b'formatvariant.config.default'
1572 fm.condwrite(
1572 fm.condwrite(
1573 ui.verbose,
1573 ui.verbose,
1574 b'config',
1574 b'config',
1575 b' %6s',
1575 b' %6s',
1576 formatvalue(configvalue),
1576 formatvalue(configvalue),
1577 label=configlabel,
1577 label=configlabel,
1578 )
1578 )
1579 fm.condwrite(
1579 fm.condwrite(
1580 ui.verbose,
1580 ui.verbose,
1581 b'default',
1581 b'default',
1582 b' %7s',
1582 b' %7s',
1583 formatvalue(fv.default),
1583 formatvalue(fv.default),
1584 label=b'formatvariant.default',
1584 label=b'formatvariant.default',
1585 )
1585 )
1586 fm.plain(b'\n')
1586 fm.plain(b'\n')
1587 fm.end()
1587 fm.end()
1588
1588
1589
1589
1590 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1590 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1591 def debugfsinfo(ui, path=b"."):
1591 def debugfsinfo(ui, path=b"."):
1592 """show information detected about current filesystem"""
1592 """show information detected about current filesystem"""
1593 ui.writenoi18n(b'path: %s\n' % path)
1593 ui.writenoi18n(b'path: %s\n' % path)
1594 ui.writenoi18n(
1594 ui.writenoi18n(
1595 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1595 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1596 )
1596 )
1597 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1597 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1598 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1598 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1599 ui.writenoi18n(
1599 ui.writenoi18n(
1600 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1600 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1601 )
1601 )
1602 ui.writenoi18n(
1602 ui.writenoi18n(
1603 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1603 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1604 )
1604 )
1605 casesensitive = b'(unknown)'
1605 casesensitive = b'(unknown)'
1606 try:
1606 try:
1607 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1607 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1608 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1608 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1609 except OSError:
1609 except OSError:
1610 pass
1610 pass
1611 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1611 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1612
1612
1613
1613
1614 @command(
1614 @command(
1615 b'debuggetbundle',
1615 b'debuggetbundle',
1616 [
1616 [
1617 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1617 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1618 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1618 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1619 (
1619 (
1620 b't',
1620 b't',
1621 b'type',
1621 b'type',
1622 b'bzip2',
1622 b'bzip2',
1623 _(b'bundle compression type to use'),
1623 _(b'bundle compression type to use'),
1624 _(b'TYPE'),
1624 _(b'TYPE'),
1625 ),
1625 ),
1626 ],
1626 ],
1627 _(b'REPO FILE [-H|-C ID]...'),
1627 _(b'REPO FILE [-H|-C ID]...'),
1628 norepo=True,
1628 norepo=True,
1629 )
1629 )
1630 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1630 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1631 """retrieves a bundle from a repo
1631 """retrieves a bundle from a repo
1632
1632
1633 Every ID must be a full-length hex node id string. Saves the bundle to the
1633 Every ID must be a full-length hex node id string. Saves the bundle to the
1634 given file.
1634 given file.
1635 """
1635 """
1636 opts = pycompat.byteskwargs(opts)
1636 opts = pycompat.byteskwargs(opts)
1637 repo = hg.peer(ui, opts, repopath)
1637 repo = hg.peer(ui, opts, repopath)
1638 if not repo.capable(b'getbundle'):
1638 if not repo.capable(b'getbundle'):
1639 raise error.Abort(b"getbundle() not supported by target repository")
1639 raise error.Abort(b"getbundle() not supported by target repository")
1640 args = {}
1640 args = {}
1641 if common:
1641 if common:
1642 args['common'] = [bin(s) for s in common]
1642 args['common'] = [bin(s) for s in common]
1643 if head:
1643 if head:
1644 args['heads'] = [bin(s) for s in head]
1644 args['heads'] = [bin(s) for s in head]
1645 # TODO: get desired bundlecaps from command line.
1645 # TODO: get desired bundlecaps from command line.
1646 args['bundlecaps'] = None
1646 args['bundlecaps'] = None
1647 bundle = repo.getbundle(b'debug', **args)
1647 bundle = repo.getbundle(b'debug', **args)
1648
1648
1649 bundletype = opts.get(b'type', b'bzip2').lower()
1649 bundletype = opts.get(b'type', b'bzip2').lower()
1650 btypes = {
1650 btypes = {
1651 b'none': b'HG10UN',
1651 b'none': b'HG10UN',
1652 b'bzip2': b'HG10BZ',
1652 b'bzip2': b'HG10BZ',
1653 b'gzip': b'HG10GZ',
1653 b'gzip': b'HG10GZ',
1654 b'bundle2': b'HG20',
1654 b'bundle2': b'HG20',
1655 }
1655 }
1656 bundletype = btypes.get(bundletype)
1656 bundletype = btypes.get(bundletype)
1657 if bundletype not in bundle2.bundletypes:
1657 if bundletype not in bundle2.bundletypes:
1658 raise error.Abort(_(b'unknown bundle type specified with --type'))
1658 raise error.Abort(_(b'unknown bundle type specified with --type'))
1659 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1659 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1660
1660
1661
1661
1662 @command(b'debugignore', [], b'[FILE]')
1662 @command(b'debugignore', [], b'[FILE]')
1663 def debugignore(ui, repo, *files, **opts):
1663 def debugignore(ui, repo, *files, **opts):
1664 """display the combined ignore pattern and information about ignored files
1664 """display the combined ignore pattern and information about ignored files
1665
1665
1666 With no argument display the combined ignore pattern.
1666 With no argument display the combined ignore pattern.
1667
1667
1668 Given space separated file names, shows if the given file is ignored and
1668 Given space separated file names, shows if the given file is ignored and
1669 if so, show the ignore rule (file and line number) that matched it.
1669 if so, show the ignore rule (file and line number) that matched it.
1670 """
1670 """
1671 ignore = repo.dirstate._ignore
1671 ignore = repo.dirstate._ignore
1672 if not files:
1672 if not files:
1673 # Show all the patterns
1673 # Show all the patterns
1674 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1674 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1675 else:
1675 else:
1676 m = scmutil.match(repo[None], pats=files)
1676 m = scmutil.match(repo[None], pats=files)
1677 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1677 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1678 for f in m.files():
1678 for f in m.files():
1679 nf = util.normpath(f)
1679 nf = util.normpath(f)
1680 ignored = None
1680 ignored = None
1681 ignoredata = None
1681 ignoredata = None
1682 if nf != b'.':
1682 if nf != b'.':
1683 if ignore(nf):
1683 if ignore(nf):
1684 ignored = nf
1684 ignored = nf
1685 ignoredata = repo.dirstate._ignorefileandline(nf)
1685 ignoredata = repo.dirstate._ignorefileandline(nf)
1686 else:
1686 else:
1687 for p in pathutil.finddirs(nf):
1687 for p in pathutil.finddirs(nf):
1688 if ignore(p):
1688 if ignore(p):
1689 ignored = p
1689 ignored = p
1690 ignoredata = repo.dirstate._ignorefileandline(p)
1690 ignoredata = repo.dirstate._ignorefileandline(p)
1691 break
1691 break
1692 if ignored:
1692 if ignored:
1693 if ignored == nf:
1693 if ignored == nf:
1694 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1694 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1695 else:
1695 else:
1696 ui.write(
1696 ui.write(
1697 _(
1697 _(
1698 b"%s is ignored because of "
1698 b"%s is ignored because of "
1699 b"containing directory %s\n"
1699 b"containing directory %s\n"
1700 )
1700 )
1701 % (uipathfn(f), ignored)
1701 % (uipathfn(f), ignored)
1702 )
1702 )
1703 ignorefile, lineno, line = ignoredata
1703 ignorefile, lineno, line = ignoredata
1704 ui.write(
1704 ui.write(
1705 _(b"(ignore rule in %s, line %d: '%s')\n")
1705 _(b"(ignore rule in %s, line %d: '%s')\n")
1706 % (ignorefile, lineno, line)
1706 % (ignorefile, lineno, line)
1707 )
1707 )
1708 else:
1708 else:
1709 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1709 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1710
1710
1711
1711
1712 @command(
1712 @command(
1713 b'debugindex',
1713 b'debugindex',
1714 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1714 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1715 _(b'-c|-m|FILE'),
1715 _(b'-c|-m|FILE'),
1716 )
1716 )
1717 def debugindex(ui, repo, file_=None, **opts):
1717 def debugindex(ui, repo, file_=None, **opts):
1718 """dump index data for a storage primitive"""
1718 """dump index data for a storage primitive"""
1719 opts = pycompat.byteskwargs(opts)
1719 opts = pycompat.byteskwargs(opts)
1720 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1720 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1721
1721
1722 if ui.debugflag:
1722 if ui.debugflag:
1723 shortfn = hex
1723 shortfn = hex
1724 else:
1724 else:
1725 shortfn = short
1725 shortfn = short
1726
1726
1727 idlen = 12
1727 idlen = 12
1728 for i in store:
1728 for i in store:
1729 idlen = len(shortfn(store.node(i)))
1729 idlen = len(shortfn(store.node(i)))
1730 break
1730 break
1731
1731
1732 fm = ui.formatter(b'debugindex', opts)
1732 fm = ui.formatter(b'debugindex', opts)
1733 fm.plain(
1733 fm.plain(
1734 b' rev linkrev %s %s p2\n'
1734 b' rev linkrev %s %s p2\n'
1735 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1735 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1736 )
1736 )
1737
1737
1738 for rev in store:
1738 for rev in store:
1739 node = store.node(rev)
1739 node = store.node(rev)
1740 parents = store.parents(node)
1740 parents = store.parents(node)
1741
1741
1742 fm.startitem()
1742 fm.startitem()
1743 fm.write(b'rev', b'%6d ', rev)
1743 fm.write(b'rev', b'%6d ', rev)
1744 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1744 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1745 fm.write(b'node', b'%s ', shortfn(node))
1745 fm.write(b'node', b'%s ', shortfn(node))
1746 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1746 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1747 fm.write(b'p2', b'%s', shortfn(parents[1]))
1747 fm.write(b'p2', b'%s', shortfn(parents[1]))
1748 fm.plain(b'\n')
1748 fm.plain(b'\n')
1749
1749
1750 fm.end()
1750 fm.end()
1751
1751
1752
1752
1753 @command(
1753 @command(
1754 b'debugindexdot',
1754 b'debugindexdot',
1755 cmdutil.debugrevlogopts,
1755 cmdutil.debugrevlogopts,
1756 _(b'-c|-m|FILE'),
1756 _(b'-c|-m|FILE'),
1757 optionalrepo=True,
1757 optionalrepo=True,
1758 )
1758 )
1759 def debugindexdot(ui, repo, file_=None, **opts):
1759 def debugindexdot(ui, repo, file_=None, **opts):
1760 """dump an index DAG as a graphviz dot file"""
1760 """dump an index DAG as a graphviz dot file"""
1761 opts = pycompat.byteskwargs(opts)
1761 opts = pycompat.byteskwargs(opts)
1762 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1762 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1763 ui.writenoi18n(b"digraph G {\n")
1763 ui.writenoi18n(b"digraph G {\n")
1764 for i in r:
1764 for i in r:
1765 node = r.node(i)
1765 node = r.node(i)
1766 pp = r.parents(node)
1766 pp = r.parents(node)
1767 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1767 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1768 if pp[1] != repo.nullid:
1768 if pp[1] != repo.nullid:
1769 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1769 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1770 ui.write(b"}\n")
1770 ui.write(b"}\n")
1771
1771
1772
1772
1773 @command(b'debugindexstats', [])
1773 @command(b'debugindexstats', [])
1774 def debugindexstats(ui, repo):
1774 def debugindexstats(ui, repo):
1775 """show stats related to the changelog index"""
1775 """show stats related to the changelog index"""
1776 repo.changelog.shortest(repo.nullid, 1)
1776 repo.changelog.shortest(repo.nullid, 1)
1777 index = repo.changelog.index
1777 index = repo.changelog.index
1778 if not util.safehasattr(index, b'stats'):
1778 if not util.safehasattr(index, b'stats'):
1779 raise error.Abort(_(b'debugindexstats only works with native code'))
1779 raise error.Abort(_(b'debugindexstats only works with native code'))
1780 for k, v in sorted(index.stats().items()):
1780 for k, v in sorted(index.stats().items()):
1781 ui.write(b'%s: %d\n' % (k, v))
1781 ui.write(b'%s: %d\n' % (k, v))
1782
1782
1783
1783
1784 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1784 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1785 def debuginstall(ui, **opts):
1785 def debuginstall(ui, **opts):
1786 """test Mercurial installation
1786 """test Mercurial installation
1787
1787
1788 Returns 0 on success.
1788 Returns 0 on success.
1789 """
1789 """
1790 opts = pycompat.byteskwargs(opts)
1790 opts = pycompat.byteskwargs(opts)
1791
1791
1792 problems = 0
1792 problems = 0
1793
1793
1794 fm = ui.formatter(b'debuginstall', opts)
1794 fm = ui.formatter(b'debuginstall', opts)
1795 fm.startitem()
1795 fm.startitem()
1796
1796
1797 # encoding might be unknown or wrong. don't translate these messages.
1797 # encoding might be unknown or wrong. don't translate these messages.
1798 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1798 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1799 err = None
1799 err = None
1800 try:
1800 try:
1801 codecs.lookup(pycompat.sysstr(encoding.encoding))
1801 codecs.lookup(pycompat.sysstr(encoding.encoding))
1802 except LookupError as inst:
1802 except LookupError as inst:
1803 err = stringutil.forcebytestr(inst)
1803 err = stringutil.forcebytestr(inst)
1804 problems += 1
1804 problems += 1
1805 fm.condwrite(
1805 fm.condwrite(
1806 err,
1806 err,
1807 b'encodingerror',
1807 b'encodingerror',
1808 b" %s\n (check that your locale is properly set)\n",
1808 b" %s\n (check that your locale is properly set)\n",
1809 err,
1809 err,
1810 )
1810 )
1811
1811
1812 # Python
1812 # Python
1813 pythonlib = None
1813 pythonlib = None
1814 if util.safehasattr(os, '__file__'):
1814 if util.safehasattr(os, '__file__'):
1815 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1815 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1816 elif getattr(sys, 'oxidized', False):
1816 elif getattr(sys, 'oxidized', False):
1817 pythonlib = pycompat.sysexecutable
1817 pythonlib = pycompat.sysexecutable
1818
1818
1819 fm.write(
1819 fm.write(
1820 b'pythonexe',
1820 b'pythonexe',
1821 _(b"checking Python executable (%s)\n"),
1821 _(b"checking Python executable (%s)\n"),
1822 pycompat.sysexecutable or _(b"unknown"),
1822 pycompat.sysexecutable or _(b"unknown"),
1823 )
1823 )
1824 fm.write(
1824 fm.write(
1825 b'pythonimplementation',
1825 b'pythonimplementation',
1826 _(b"checking Python implementation (%s)\n"),
1826 _(b"checking Python implementation (%s)\n"),
1827 pycompat.sysbytes(platform.python_implementation()),
1827 pycompat.sysbytes(platform.python_implementation()),
1828 )
1828 )
1829 fm.write(
1829 fm.write(
1830 b'pythonver',
1830 b'pythonver',
1831 _(b"checking Python version (%s)\n"),
1831 _(b"checking Python version (%s)\n"),
1832 (b"%d.%d.%d" % sys.version_info[:3]),
1832 (b"%d.%d.%d" % sys.version_info[:3]),
1833 )
1833 )
1834 fm.write(
1834 fm.write(
1835 b'pythonlib',
1835 b'pythonlib',
1836 _(b"checking Python lib (%s)...\n"),
1836 _(b"checking Python lib (%s)...\n"),
1837 pythonlib or _(b"unknown"),
1837 pythonlib or _(b"unknown"),
1838 )
1838 )
1839
1839
1840 try:
1840 try:
1841 from . import rustext # pytype: disable=import-error
1841 from . import rustext # pytype: disable=import-error
1842
1842
1843 rustext.__doc__ # trigger lazy import
1843 rustext.__doc__ # trigger lazy import
1844 except ImportError:
1844 except ImportError:
1845 rustext = None
1845 rustext = None
1846
1846
1847 security = set(sslutil.supportedprotocols)
1847 security = set(sslutil.supportedprotocols)
1848 if sslutil.hassni:
1848 if sslutil.hassni:
1849 security.add(b'sni')
1849 security.add(b'sni')
1850
1850
1851 fm.write(
1851 fm.write(
1852 b'pythonsecurity',
1852 b'pythonsecurity',
1853 _(b"checking Python security support (%s)\n"),
1853 _(b"checking Python security support (%s)\n"),
1854 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1854 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1855 )
1855 )
1856
1856
1857 # These are warnings, not errors. So don't increment problem count. This
1857 # These are warnings, not errors. So don't increment problem count. This
1858 # may change in the future.
1858 # may change in the future.
1859 if b'tls1.2' not in security:
1859 if b'tls1.2' not in security:
1860 fm.plain(
1860 fm.plain(
1861 _(
1861 _(
1862 b' TLS 1.2 not supported by Python install; '
1862 b' TLS 1.2 not supported by Python install; '
1863 b'network connections lack modern security\n'
1863 b'network connections lack modern security\n'
1864 )
1864 )
1865 )
1865 )
1866 if b'sni' not in security:
1866 if b'sni' not in security:
1867 fm.plain(
1867 fm.plain(
1868 _(
1868 _(
1869 b' SNI not supported by Python install; may have '
1869 b' SNI not supported by Python install; may have '
1870 b'connectivity issues with some servers\n'
1870 b'connectivity issues with some servers\n'
1871 )
1871 )
1872 )
1872 )
1873
1873
1874 fm.plain(
1874 fm.plain(
1875 _(
1875 _(
1876 b"checking Rust extensions (%s)\n"
1876 b"checking Rust extensions (%s)\n"
1877 % (b'missing' if rustext is None else b'installed')
1877 % (b'missing' if rustext is None else b'installed')
1878 ),
1878 ),
1879 )
1879 )
1880
1880
1881 # TODO print CA cert info
1881 # TODO print CA cert info
1882
1882
1883 # hg version
1883 # hg version
1884 hgver = util.version()
1884 hgver = util.version()
1885 fm.write(
1885 fm.write(
1886 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1886 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1887 )
1887 )
1888 fm.write(
1888 fm.write(
1889 b'hgverextra',
1889 b'hgverextra',
1890 _(b"checking Mercurial custom build (%s)\n"),
1890 _(b"checking Mercurial custom build (%s)\n"),
1891 b'+'.join(hgver.split(b'+')[1:]),
1891 b'+'.join(hgver.split(b'+')[1:]),
1892 )
1892 )
1893
1893
1894 # compiled modules
1894 # compiled modules
1895 hgmodules = None
1895 hgmodules = None
1896 if util.safehasattr(sys.modules[__name__], '__file__'):
1896 if util.safehasattr(sys.modules[__name__], '__file__'):
1897 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1897 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1898 elif getattr(sys, 'oxidized', False):
1898 elif getattr(sys, 'oxidized', False):
1899 hgmodules = pycompat.sysexecutable
1899 hgmodules = pycompat.sysexecutable
1900
1900
1901 fm.write(
1901 fm.write(
1902 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1902 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1903 )
1903 )
1904 fm.write(
1904 fm.write(
1905 b'hgmodules',
1905 b'hgmodules',
1906 _(b"checking installed modules (%s)...\n"),
1906 _(b"checking installed modules (%s)...\n"),
1907 hgmodules or _(b"unknown"),
1907 hgmodules or _(b"unknown"),
1908 )
1908 )
1909
1909
1910 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1910 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1911 rustext = rustandc # for now, that's the only case
1911 rustext = rustandc # for now, that's the only case
1912 cext = policy.policy in (b'c', b'allow') or rustandc
1912 cext = policy.policy in (b'c', b'allow') or rustandc
1913 nopure = cext or rustext
1913 nopure = cext or rustext
1914 if nopure:
1914 if nopure:
1915 err = None
1915 err = None
1916 try:
1916 try:
1917 if cext:
1917 if cext:
1918 from .cext import ( # pytype: disable=import-error
1918 from .cext import ( # pytype: disable=import-error
1919 base85,
1919 base85,
1920 bdiff,
1920 bdiff,
1921 mpatch,
1921 mpatch,
1922 osutil,
1922 osutil,
1923 )
1923 )
1924
1924
1925 # quiet pyflakes
1925 # quiet pyflakes
1926 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1926 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1927 if rustext:
1927 if rustext:
1928 from .rustext import ( # pytype: disable=import-error
1928 from .rustext import ( # pytype: disable=import-error
1929 ancestor,
1929 ancestor,
1930 dirstate,
1930 dirstate,
1931 )
1931 )
1932
1932
1933 dir(ancestor), dir(dirstate) # quiet pyflakes
1933 dir(ancestor), dir(dirstate) # quiet pyflakes
1934 except Exception as inst:
1934 except Exception as inst:
1935 err = stringutil.forcebytestr(inst)
1935 err = stringutil.forcebytestr(inst)
1936 problems += 1
1936 problems += 1
1937 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1937 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1938
1938
1939 compengines = util.compengines._engines.values()
1939 compengines = util.compengines._engines.values()
1940 fm.write(
1940 fm.write(
1941 b'compengines',
1941 b'compengines',
1942 _(b'checking registered compression engines (%s)\n'),
1942 _(b'checking registered compression engines (%s)\n'),
1943 fm.formatlist(
1943 fm.formatlist(
1944 sorted(e.name() for e in compengines),
1944 sorted(e.name() for e in compengines),
1945 name=b'compengine',
1945 name=b'compengine',
1946 fmt=b'%s',
1946 fmt=b'%s',
1947 sep=b', ',
1947 sep=b', ',
1948 ),
1948 ),
1949 )
1949 )
1950 fm.write(
1950 fm.write(
1951 b'compenginesavail',
1951 b'compenginesavail',
1952 _(b'checking available compression engines (%s)\n'),
1952 _(b'checking available compression engines (%s)\n'),
1953 fm.formatlist(
1953 fm.formatlist(
1954 sorted(e.name() for e in compengines if e.available()),
1954 sorted(e.name() for e in compengines if e.available()),
1955 name=b'compengine',
1955 name=b'compengine',
1956 fmt=b'%s',
1956 fmt=b'%s',
1957 sep=b', ',
1957 sep=b', ',
1958 ),
1958 ),
1959 )
1959 )
1960 wirecompengines = compression.compengines.supportedwireengines(
1960 wirecompengines = compression.compengines.supportedwireengines(
1961 compression.SERVERROLE
1961 compression.SERVERROLE
1962 )
1962 )
1963 fm.write(
1963 fm.write(
1964 b'compenginesserver',
1964 b'compenginesserver',
1965 _(
1965 _(
1966 b'checking available compression engines '
1966 b'checking available compression engines '
1967 b'for wire protocol (%s)\n'
1967 b'for wire protocol (%s)\n'
1968 ),
1968 ),
1969 fm.formatlist(
1969 fm.formatlist(
1970 [e.name() for e in wirecompengines if e.wireprotosupport()],
1970 [e.name() for e in wirecompengines if e.wireprotosupport()],
1971 name=b'compengine',
1971 name=b'compengine',
1972 fmt=b'%s',
1972 fmt=b'%s',
1973 sep=b', ',
1973 sep=b', ',
1974 ),
1974 ),
1975 )
1975 )
1976 re2 = b'missing'
1976 re2 = b'missing'
1977 if util._re2:
1977 if util._re2:
1978 re2 = b'available'
1978 re2 = b'available'
1979 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1979 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1980 fm.data(re2=bool(util._re2))
1980 fm.data(re2=bool(util._re2))
1981
1981
1982 # templates
1982 # templates
1983 p = templater.templatedir()
1983 p = templater.templatedir()
1984 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1984 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1985 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1985 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1986 if p:
1986 if p:
1987 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1987 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1988 if m:
1988 if m:
1989 # template found, check if it is working
1989 # template found, check if it is working
1990 err = None
1990 err = None
1991 try:
1991 try:
1992 templater.templater.frommapfile(m)
1992 templater.templater.frommapfile(m)
1993 except Exception as inst:
1993 except Exception as inst:
1994 err = stringutil.forcebytestr(inst)
1994 err = stringutil.forcebytestr(inst)
1995 p = None
1995 p = None
1996 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1996 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1997 else:
1997 else:
1998 p = None
1998 p = None
1999 fm.condwrite(
1999 fm.condwrite(
2000 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2000 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2001 )
2001 )
2002 fm.condwrite(
2002 fm.condwrite(
2003 not m,
2003 not m,
2004 b'defaulttemplatenotfound',
2004 b'defaulttemplatenotfound',
2005 _(b" template '%s' not found\n"),
2005 _(b" template '%s' not found\n"),
2006 b"default",
2006 b"default",
2007 )
2007 )
2008 if not p:
2008 if not p:
2009 problems += 1
2009 problems += 1
2010 fm.condwrite(
2010 fm.condwrite(
2011 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2011 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2012 )
2012 )
2013
2013
2014 # editor
2014 # editor
2015 editor = ui.geteditor()
2015 editor = ui.geteditor()
2016 editor = util.expandpath(editor)
2016 editor = util.expandpath(editor)
2017 editorbin = procutil.shellsplit(editor)[0]
2017 editorbin = procutil.shellsplit(editor)[0]
2018 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2018 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2019 cmdpath = procutil.findexe(editorbin)
2019 cmdpath = procutil.findexe(editorbin)
2020 fm.condwrite(
2020 fm.condwrite(
2021 not cmdpath and editor == b'vi',
2021 not cmdpath and editor == b'vi',
2022 b'vinotfound',
2022 b'vinotfound',
2023 _(
2023 _(
2024 b" No commit editor set and can't find %s in PATH\n"
2024 b" No commit editor set and can't find %s in PATH\n"
2025 b" (specify a commit editor in your configuration"
2025 b" (specify a commit editor in your configuration"
2026 b" file)\n"
2026 b" file)\n"
2027 ),
2027 ),
2028 not cmdpath and editor == b'vi' and editorbin,
2028 not cmdpath and editor == b'vi' and editorbin,
2029 )
2029 )
2030 fm.condwrite(
2030 fm.condwrite(
2031 not cmdpath and editor != b'vi',
2031 not cmdpath and editor != b'vi',
2032 b'editornotfound',
2032 b'editornotfound',
2033 _(
2033 _(
2034 b" Can't find editor '%s' in PATH\n"
2034 b" Can't find editor '%s' in PATH\n"
2035 b" (specify a commit editor in your configuration"
2035 b" (specify a commit editor in your configuration"
2036 b" file)\n"
2036 b" file)\n"
2037 ),
2037 ),
2038 not cmdpath and editorbin,
2038 not cmdpath and editorbin,
2039 )
2039 )
2040 if not cmdpath and editor != b'vi':
2040 if not cmdpath and editor != b'vi':
2041 problems += 1
2041 problems += 1
2042
2042
2043 # check username
2043 # check username
2044 username = None
2044 username = None
2045 err = None
2045 err = None
2046 try:
2046 try:
2047 username = ui.username()
2047 username = ui.username()
2048 except error.Abort as e:
2048 except error.Abort as e:
2049 err = e.message
2049 err = e.message
2050 problems += 1
2050 problems += 1
2051
2051
2052 fm.condwrite(
2052 fm.condwrite(
2053 username, b'username', _(b"checking username (%s)\n"), username
2053 username, b'username', _(b"checking username (%s)\n"), username
2054 )
2054 )
2055 fm.condwrite(
2055 fm.condwrite(
2056 err,
2056 err,
2057 b'usernameerror',
2057 b'usernameerror',
2058 _(
2058 _(
2059 b"checking username...\n %s\n"
2059 b"checking username...\n %s\n"
2060 b" (specify a username in your configuration file)\n"
2060 b" (specify a username in your configuration file)\n"
2061 ),
2061 ),
2062 err,
2062 err,
2063 )
2063 )
2064
2064
2065 for name, mod in extensions.extensions():
2065 for name, mod in extensions.extensions():
2066 handler = getattr(mod, 'debuginstall', None)
2066 handler = getattr(mod, 'debuginstall', None)
2067 if handler is not None:
2067 if handler is not None:
2068 problems += handler(ui, fm)
2068 problems += handler(ui, fm)
2069
2069
2070 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2070 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2071 if not problems:
2071 if not problems:
2072 fm.data(problems=problems)
2072 fm.data(problems=problems)
2073 fm.condwrite(
2073 fm.condwrite(
2074 problems,
2074 problems,
2075 b'problems',
2075 b'problems',
2076 _(b"%d problems detected, please check your install!\n"),
2076 _(b"%d problems detected, please check your install!\n"),
2077 problems,
2077 problems,
2078 )
2078 )
2079 fm.end()
2079 fm.end()
2080
2080
2081 return problems
2081 return problems
2082
2082
2083
2083
2084 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2084 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2085 def debugknown(ui, repopath, *ids, **opts):
2085 def debugknown(ui, repopath, *ids, **opts):
2086 """test whether node ids are known to a repo
2086 """test whether node ids are known to a repo
2087
2087
2088 Every ID must be a full-length hex node id string. Returns a list of 0s
2088 Every ID must be a full-length hex node id string. Returns a list of 0s
2089 and 1s indicating unknown/known.
2089 and 1s indicating unknown/known.
2090 """
2090 """
2091 opts = pycompat.byteskwargs(opts)
2091 opts = pycompat.byteskwargs(opts)
2092 repo = hg.peer(ui, opts, repopath)
2092 repo = hg.peer(ui, opts, repopath)
2093 if not repo.capable(b'known'):
2093 if not repo.capable(b'known'):
2094 raise error.Abort(b"known() not supported by target repository")
2094 raise error.Abort(b"known() not supported by target repository")
2095 flags = repo.known([bin(s) for s in ids])
2095 flags = repo.known([bin(s) for s in ids])
2096 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2096 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2097
2097
2098
2098
2099 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2099 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2100 def debuglabelcomplete(ui, repo, *args):
2100 def debuglabelcomplete(ui, repo, *args):
2101 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2101 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2102 debugnamecomplete(ui, repo, *args)
2102 debugnamecomplete(ui, repo, *args)
2103
2103
2104
2104
2105 @command(
2105 @command(
2106 b'debuglocks',
2106 b'debuglocks',
2107 [
2107 [
2108 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2108 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2109 (
2109 (
2110 b'W',
2110 b'W',
2111 b'force-free-wlock',
2111 b'force-free-wlock',
2112 None,
2112 None,
2113 _(b'free the working state lock (DANGEROUS)'),
2113 _(b'free the working state lock (DANGEROUS)'),
2114 ),
2114 ),
2115 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2115 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2116 (
2116 (
2117 b'S',
2117 b'S',
2118 b'set-wlock',
2118 b'set-wlock',
2119 None,
2119 None,
2120 _(b'set the working state lock until stopped'),
2120 _(b'set the working state lock until stopped'),
2121 ),
2121 ),
2122 ],
2122 ],
2123 _(b'[OPTION]...'),
2123 _(b'[OPTION]...'),
2124 )
2124 )
2125 def debuglocks(ui, repo, **opts):
2125 def debuglocks(ui, repo, **opts):
2126 """show or modify state of locks
2126 """show or modify state of locks
2127
2127
2128 By default, this command will show which locks are held. This
2128 By default, this command will show which locks are held. This
2129 includes the user and process holding the lock, the amount of time
2129 includes the user and process holding the lock, the amount of time
2130 the lock has been held, and the machine name where the process is
2130 the lock has been held, and the machine name where the process is
2131 running if it's not local.
2131 running if it's not local.
2132
2132
2133 Locks protect the integrity of Mercurial's data, so should be
2133 Locks protect the integrity of Mercurial's data, so should be
2134 treated with care. System crashes or other interruptions may cause
2134 treated with care. System crashes or other interruptions may cause
2135 locks to not be properly released, though Mercurial will usually
2135 locks to not be properly released, though Mercurial will usually
2136 detect and remove such stale locks automatically.
2136 detect and remove such stale locks automatically.
2137
2137
2138 However, detecting stale locks may not always be possible (for
2138 However, detecting stale locks may not always be possible (for
2139 instance, on a shared filesystem). Removing locks may also be
2139 instance, on a shared filesystem). Removing locks may also be
2140 blocked by filesystem permissions.
2140 blocked by filesystem permissions.
2141
2141
2142 Setting a lock will prevent other commands from changing the data.
2142 Setting a lock will prevent other commands from changing the data.
2143 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2143 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2144 The set locks are removed when the command exits.
2144 The set locks are removed when the command exits.
2145
2145
2146 Returns 0 if no locks are held.
2146 Returns 0 if no locks are held.
2147
2147
2148 """
2148 """
2149
2149
2150 if opts.get('force_free_lock'):
2150 if opts.get('force_free_lock'):
2151 repo.svfs.unlink(b'lock')
2151 repo.svfs.tryunlink(b'lock')
2152 if opts.get('force_free_wlock'):
2152 if opts.get('force_free_wlock'):
2153 repo.vfs.unlink(b'wlock')
2153 repo.vfs.tryunlink(b'wlock')
2154 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2154 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2155 return 0
2155 return 0
2156
2156
2157 locks = []
2157 locks = []
2158 try:
2158 try:
2159 if opts.get('set_wlock'):
2159 if opts.get('set_wlock'):
2160 try:
2160 try:
2161 locks.append(repo.wlock(False))
2161 locks.append(repo.wlock(False))
2162 except error.LockHeld:
2162 except error.LockHeld:
2163 raise error.Abort(_(b'wlock is already held'))
2163 raise error.Abort(_(b'wlock is already held'))
2164 if opts.get('set_lock'):
2164 if opts.get('set_lock'):
2165 try:
2165 try:
2166 locks.append(repo.lock(False))
2166 locks.append(repo.lock(False))
2167 except error.LockHeld:
2167 except error.LockHeld:
2168 raise error.Abort(_(b'lock is already held'))
2168 raise error.Abort(_(b'lock is already held'))
2169 if len(locks):
2169 if len(locks):
2170 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2170 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2171 return 0
2171 return 0
2172 finally:
2172 finally:
2173 release(*locks)
2173 release(*locks)
2174
2174
2175 now = time.time()
2175 now = time.time()
2176 held = 0
2176 held = 0
2177
2177
2178 def report(vfs, name, method):
2178 def report(vfs, name, method):
2179 # this causes stale locks to get reaped for more accurate reporting
2179 # this causes stale locks to get reaped for more accurate reporting
2180 try:
2180 try:
2181 l = method(False)
2181 l = method(False)
2182 except error.LockHeld:
2182 except error.LockHeld:
2183 l = None
2183 l = None
2184
2184
2185 if l:
2185 if l:
2186 l.release()
2186 l.release()
2187 else:
2187 else:
2188 try:
2188 try:
2189 st = vfs.lstat(name)
2189 st = vfs.lstat(name)
2190 age = now - st[stat.ST_MTIME]
2190 age = now - st[stat.ST_MTIME]
2191 user = util.username(st.st_uid)
2191 user = util.username(st.st_uid)
2192 locker = vfs.readlock(name)
2192 locker = vfs.readlock(name)
2193 if b":" in locker:
2193 if b":" in locker:
2194 host, pid = locker.split(b':')
2194 host, pid = locker.split(b':')
2195 if host == socket.gethostname():
2195 if host == socket.gethostname():
2196 locker = b'user %s, process %s' % (user or b'None', pid)
2196 locker = b'user %s, process %s' % (user or b'None', pid)
2197 else:
2197 else:
2198 locker = b'user %s, process %s, host %s' % (
2198 locker = b'user %s, process %s, host %s' % (
2199 user or b'None',
2199 user or b'None',
2200 pid,
2200 pid,
2201 host,
2201 host,
2202 )
2202 )
2203 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2203 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2204 return 1
2204 return 1
2205 except OSError as e:
2205 except OSError as e:
2206 if e.errno != errno.ENOENT:
2206 if e.errno != errno.ENOENT:
2207 raise
2207 raise
2208
2208
2209 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2209 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2210 return 0
2210 return 0
2211
2211
2212 held += report(repo.svfs, b"lock", repo.lock)
2212 held += report(repo.svfs, b"lock", repo.lock)
2213 held += report(repo.vfs, b"wlock", repo.wlock)
2213 held += report(repo.vfs, b"wlock", repo.wlock)
2214
2214
2215 return held
2215 return held
2216
2216
2217
2217
2218 @command(
2218 @command(
2219 b'debugmanifestfulltextcache',
2219 b'debugmanifestfulltextcache',
2220 [
2220 [
2221 (b'', b'clear', False, _(b'clear the cache')),
2221 (b'', b'clear', False, _(b'clear the cache')),
2222 (
2222 (
2223 b'a',
2223 b'a',
2224 b'add',
2224 b'add',
2225 [],
2225 [],
2226 _(b'add the given manifest nodes to the cache'),
2226 _(b'add the given manifest nodes to the cache'),
2227 _(b'NODE'),
2227 _(b'NODE'),
2228 ),
2228 ),
2229 ],
2229 ],
2230 b'',
2230 b'',
2231 )
2231 )
2232 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2232 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2233 """show, clear or amend the contents of the manifest fulltext cache"""
2233 """show, clear or amend the contents of the manifest fulltext cache"""
2234
2234
2235 def getcache():
2235 def getcache():
2236 r = repo.manifestlog.getstorage(b'')
2236 r = repo.manifestlog.getstorage(b'')
2237 try:
2237 try:
2238 return r._fulltextcache
2238 return r._fulltextcache
2239 except AttributeError:
2239 except AttributeError:
2240 msg = _(
2240 msg = _(
2241 b"Current revlog implementation doesn't appear to have a "
2241 b"Current revlog implementation doesn't appear to have a "
2242 b"manifest fulltext cache\n"
2242 b"manifest fulltext cache\n"
2243 )
2243 )
2244 raise error.Abort(msg)
2244 raise error.Abort(msg)
2245
2245
2246 if opts.get('clear'):
2246 if opts.get('clear'):
2247 with repo.wlock():
2247 with repo.wlock():
2248 cache = getcache()
2248 cache = getcache()
2249 cache.clear(clear_persisted_data=True)
2249 cache.clear(clear_persisted_data=True)
2250 return
2250 return
2251
2251
2252 if add:
2252 if add:
2253 with repo.wlock():
2253 with repo.wlock():
2254 m = repo.manifestlog
2254 m = repo.manifestlog
2255 store = m.getstorage(b'')
2255 store = m.getstorage(b'')
2256 for n in add:
2256 for n in add:
2257 try:
2257 try:
2258 manifest = m[store.lookup(n)]
2258 manifest = m[store.lookup(n)]
2259 except error.LookupError as e:
2259 except error.LookupError as e:
2260 raise error.Abort(
2260 raise error.Abort(
2261 bytes(e), hint=b"Check your manifest node id"
2261 bytes(e), hint=b"Check your manifest node id"
2262 )
2262 )
2263 manifest.read() # stores revisision in cache too
2263 manifest.read() # stores revisision in cache too
2264 return
2264 return
2265
2265
2266 cache = getcache()
2266 cache = getcache()
2267 if not len(cache):
2267 if not len(cache):
2268 ui.write(_(b'cache empty\n'))
2268 ui.write(_(b'cache empty\n'))
2269 else:
2269 else:
2270 ui.write(
2270 ui.write(
2271 _(
2271 _(
2272 b'cache contains %d manifest entries, in order of most to '
2272 b'cache contains %d manifest entries, in order of most to '
2273 b'least recent:\n'
2273 b'least recent:\n'
2274 )
2274 )
2275 % (len(cache),)
2275 % (len(cache),)
2276 )
2276 )
2277 totalsize = 0
2277 totalsize = 0
2278 for nodeid in cache:
2278 for nodeid in cache:
2279 # Use cache.get to not update the LRU order
2279 # Use cache.get to not update the LRU order
2280 data = cache.peek(nodeid)
2280 data = cache.peek(nodeid)
2281 size = len(data)
2281 size = len(data)
2282 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2282 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2283 ui.write(
2283 ui.write(
2284 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2284 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2285 )
2285 )
2286 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2286 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2287 ui.write(
2287 ui.write(
2288 _(b'total cache data size %s, on-disk %s\n')
2288 _(b'total cache data size %s, on-disk %s\n')
2289 % (util.bytecount(totalsize), util.bytecount(ondisk))
2289 % (util.bytecount(totalsize), util.bytecount(ondisk))
2290 )
2290 )
2291
2291
2292
2292
2293 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2293 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2294 def debugmergestate(ui, repo, *args, **opts):
2294 def debugmergestate(ui, repo, *args, **opts):
2295 """print merge state
2295 """print merge state
2296
2296
2297 Use --verbose to print out information about whether v1 or v2 merge state
2297 Use --verbose to print out information about whether v1 or v2 merge state
2298 was chosen."""
2298 was chosen."""
2299
2299
2300 if ui.verbose:
2300 if ui.verbose:
2301 ms = mergestatemod.mergestate(repo)
2301 ms = mergestatemod.mergestate(repo)
2302
2302
2303 # sort so that reasonable information is on top
2303 # sort so that reasonable information is on top
2304 v1records = ms._readrecordsv1()
2304 v1records = ms._readrecordsv1()
2305 v2records = ms._readrecordsv2()
2305 v2records = ms._readrecordsv2()
2306
2306
2307 if not v1records and not v2records:
2307 if not v1records and not v2records:
2308 pass
2308 pass
2309 elif not v2records:
2309 elif not v2records:
2310 ui.writenoi18n(b'no version 2 merge state\n')
2310 ui.writenoi18n(b'no version 2 merge state\n')
2311 elif ms._v1v2match(v1records, v2records):
2311 elif ms._v1v2match(v1records, v2records):
2312 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2312 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2313 else:
2313 else:
2314 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2314 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2315
2315
2316 opts = pycompat.byteskwargs(opts)
2316 opts = pycompat.byteskwargs(opts)
2317 if not opts[b'template']:
2317 if not opts[b'template']:
2318 opts[b'template'] = (
2318 opts[b'template'] = (
2319 b'{if(commits, "", "no merge state found\n")}'
2319 b'{if(commits, "", "no merge state found\n")}'
2320 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2320 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2321 b'{files % "file: {path} (state \\"{state}\\")\n'
2321 b'{files % "file: {path} (state \\"{state}\\")\n'
2322 b'{if(local_path, "'
2322 b'{if(local_path, "'
2323 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2323 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2324 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2324 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2325 b' other path: {other_path} (node {other_node})\n'
2325 b' other path: {other_path} (node {other_node})\n'
2326 b'")}'
2326 b'")}'
2327 b'{if(rename_side, "'
2327 b'{if(rename_side, "'
2328 b' rename side: {rename_side}\n'
2328 b' rename side: {rename_side}\n'
2329 b' renamed path: {renamed_path}\n'
2329 b' renamed path: {renamed_path}\n'
2330 b'")}'
2330 b'")}'
2331 b'{extras % " extra: {key} = {value}\n"}'
2331 b'{extras % " extra: {key} = {value}\n"}'
2332 b'"}'
2332 b'"}'
2333 b'{extras % "extra: {file} ({key} = {value})\n"}'
2333 b'{extras % "extra: {file} ({key} = {value})\n"}'
2334 )
2334 )
2335
2335
2336 ms = mergestatemod.mergestate.read(repo)
2336 ms = mergestatemod.mergestate.read(repo)
2337
2337
2338 fm = ui.formatter(b'debugmergestate', opts)
2338 fm = ui.formatter(b'debugmergestate', opts)
2339 fm.startitem()
2339 fm.startitem()
2340
2340
2341 fm_commits = fm.nested(b'commits')
2341 fm_commits = fm.nested(b'commits')
2342 if ms.active():
2342 if ms.active():
2343 for name, node, label_index in (
2343 for name, node, label_index in (
2344 (b'local', ms.local, 0),
2344 (b'local', ms.local, 0),
2345 (b'other', ms.other, 1),
2345 (b'other', ms.other, 1),
2346 ):
2346 ):
2347 fm_commits.startitem()
2347 fm_commits.startitem()
2348 fm_commits.data(name=name)
2348 fm_commits.data(name=name)
2349 fm_commits.data(node=hex(node))
2349 fm_commits.data(node=hex(node))
2350 if ms._labels and len(ms._labels) > label_index:
2350 if ms._labels and len(ms._labels) > label_index:
2351 fm_commits.data(label=ms._labels[label_index])
2351 fm_commits.data(label=ms._labels[label_index])
2352 fm_commits.end()
2352 fm_commits.end()
2353
2353
2354 fm_files = fm.nested(b'files')
2354 fm_files = fm.nested(b'files')
2355 if ms.active():
2355 if ms.active():
2356 for f in ms:
2356 for f in ms:
2357 fm_files.startitem()
2357 fm_files.startitem()
2358 fm_files.data(path=f)
2358 fm_files.data(path=f)
2359 state = ms._state[f]
2359 state = ms._state[f]
2360 fm_files.data(state=state[0])
2360 fm_files.data(state=state[0])
2361 if state[0] in (
2361 if state[0] in (
2362 mergestatemod.MERGE_RECORD_UNRESOLVED,
2362 mergestatemod.MERGE_RECORD_UNRESOLVED,
2363 mergestatemod.MERGE_RECORD_RESOLVED,
2363 mergestatemod.MERGE_RECORD_RESOLVED,
2364 ):
2364 ):
2365 fm_files.data(local_key=state[1])
2365 fm_files.data(local_key=state[1])
2366 fm_files.data(local_path=state[2])
2366 fm_files.data(local_path=state[2])
2367 fm_files.data(ancestor_path=state[3])
2367 fm_files.data(ancestor_path=state[3])
2368 fm_files.data(ancestor_node=state[4])
2368 fm_files.data(ancestor_node=state[4])
2369 fm_files.data(other_path=state[5])
2369 fm_files.data(other_path=state[5])
2370 fm_files.data(other_node=state[6])
2370 fm_files.data(other_node=state[6])
2371 fm_files.data(local_flags=state[7])
2371 fm_files.data(local_flags=state[7])
2372 elif state[0] in (
2372 elif state[0] in (
2373 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2373 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2374 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2374 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2375 ):
2375 ):
2376 fm_files.data(renamed_path=state[1])
2376 fm_files.data(renamed_path=state[1])
2377 fm_files.data(rename_side=state[2])
2377 fm_files.data(rename_side=state[2])
2378 fm_extras = fm_files.nested(b'extras')
2378 fm_extras = fm_files.nested(b'extras')
2379 for k, v in sorted(ms.extras(f).items()):
2379 for k, v in sorted(ms.extras(f).items()):
2380 fm_extras.startitem()
2380 fm_extras.startitem()
2381 fm_extras.data(key=k)
2381 fm_extras.data(key=k)
2382 fm_extras.data(value=v)
2382 fm_extras.data(value=v)
2383 fm_extras.end()
2383 fm_extras.end()
2384
2384
2385 fm_files.end()
2385 fm_files.end()
2386
2386
2387 fm_extras = fm.nested(b'extras')
2387 fm_extras = fm.nested(b'extras')
2388 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2388 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2389 if f in ms:
2389 if f in ms:
2390 # If file is in mergestate, we have already processed it's extras
2390 # If file is in mergestate, we have already processed it's extras
2391 continue
2391 continue
2392 for k, v in pycompat.iteritems(d):
2392 for k, v in pycompat.iteritems(d):
2393 fm_extras.startitem()
2393 fm_extras.startitem()
2394 fm_extras.data(file=f)
2394 fm_extras.data(file=f)
2395 fm_extras.data(key=k)
2395 fm_extras.data(key=k)
2396 fm_extras.data(value=v)
2396 fm_extras.data(value=v)
2397 fm_extras.end()
2397 fm_extras.end()
2398
2398
2399 fm.end()
2399 fm.end()
2400
2400
2401
2401
2402 @command(b'debugnamecomplete', [], _(b'NAME...'))
2402 @command(b'debugnamecomplete', [], _(b'NAME...'))
2403 def debugnamecomplete(ui, repo, *args):
2403 def debugnamecomplete(ui, repo, *args):
2404 '''complete "names" - tags, open branch names, bookmark names'''
2404 '''complete "names" - tags, open branch names, bookmark names'''
2405
2405
2406 names = set()
2406 names = set()
2407 # since we previously only listed open branches, we will handle that
2407 # since we previously only listed open branches, we will handle that
2408 # specially (after this for loop)
2408 # specially (after this for loop)
2409 for name, ns in pycompat.iteritems(repo.names):
2409 for name, ns in pycompat.iteritems(repo.names):
2410 if name != b'branches':
2410 if name != b'branches':
2411 names.update(ns.listnames(repo))
2411 names.update(ns.listnames(repo))
2412 names.update(
2412 names.update(
2413 tag
2413 tag
2414 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2414 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2415 if not closed
2415 if not closed
2416 )
2416 )
2417 completions = set()
2417 completions = set()
2418 if not args:
2418 if not args:
2419 args = [b'']
2419 args = [b'']
2420 for a in args:
2420 for a in args:
2421 completions.update(n for n in names if n.startswith(a))
2421 completions.update(n for n in names if n.startswith(a))
2422 ui.write(b'\n'.join(sorted(completions)))
2422 ui.write(b'\n'.join(sorted(completions)))
2423 ui.write(b'\n')
2423 ui.write(b'\n')
2424
2424
2425
2425
2426 @command(
2426 @command(
2427 b'debugnodemap',
2427 b'debugnodemap',
2428 [
2428 [
2429 (
2429 (
2430 b'',
2430 b'',
2431 b'dump-new',
2431 b'dump-new',
2432 False,
2432 False,
2433 _(b'write a (new) persistent binary nodemap on stdout'),
2433 _(b'write a (new) persistent binary nodemap on stdout'),
2434 ),
2434 ),
2435 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2435 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2436 (
2436 (
2437 b'',
2437 b'',
2438 b'check',
2438 b'check',
2439 False,
2439 False,
2440 _(b'check that the data on disk data are correct.'),
2440 _(b'check that the data on disk data are correct.'),
2441 ),
2441 ),
2442 (
2442 (
2443 b'',
2443 b'',
2444 b'metadata',
2444 b'metadata',
2445 False,
2445 False,
2446 _(b'display the on disk meta data for the nodemap'),
2446 _(b'display the on disk meta data for the nodemap'),
2447 ),
2447 ),
2448 ],
2448 ],
2449 )
2449 )
2450 def debugnodemap(ui, repo, **opts):
2450 def debugnodemap(ui, repo, **opts):
2451 """write and inspect on disk nodemap"""
2451 """write and inspect on disk nodemap"""
2452 if opts['dump_new']:
2452 if opts['dump_new']:
2453 unfi = repo.unfiltered()
2453 unfi = repo.unfiltered()
2454 cl = unfi.changelog
2454 cl = unfi.changelog
2455 if util.safehasattr(cl.index, "nodemap_data_all"):
2455 if util.safehasattr(cl.index, "nodemap_data_all"):
2456 data = cl.index.nodemap_data_all()
2456 data = cl.index.nodemap_data_all()
2457 else:
2457 else:
2458 data = nodemap.persistent_data(cl.index)
2458 data = nodemap.persistent_data(cl.index)
2459 ui.write(data)
2459 ui.write(data)
2460 elif opts['dump_disk']:
2460 elif opts['dump_disk']:
2461 unfi = repo.unfiltered()
2461 unfi = repo.unfiltered()
2462 cl = unfi.changelog
2462 cl = unfi.changelog
2463 nm_data = nodemap.persisted_data(cl)
2463 nm_data = nodemap.persisted_data(cl)
2464 if nm_data is not None:
2464 if nm_data is not None:
2465 docket, data = nm_data
2465 docket, data = nm_data
2466 ui.write(data[:])
2466 ui.write(data[:])
2467 elif opts['check']:
2467 elif opts['check']:
2468 unfi = repo.unfiltered()
2468 unfi = repo.unfiltered()
2469 cl = unfi.changelog
2469 cl = unfi.changelog
2470 nm_data = nodemap.persisted_data(cl)
2470 nm_data = nodemap.persisted_data(cl)
2471 if nm_data is not None:
2471 if nm_data is not None:
2472 docket, data = nm_data
2472 docket, data = nm_data
2473 return nodemap.check_data(ui, cl.index, data)
2473 return nodemap.check_data(ui, cl.index, data)
2474 elif opts['metadata']:
2474 elif opts['metadata']:
2475 unfi = repo.unfiltered()
2475 unfi = repo.unfiltered()
2476 cl = unfi.changelog
2476 cl = unfi.changelog
2477 nm_data = nodemap.persisted_data(cl)
2477 nm_data = nodemap.persisted_data(cl)
2478 if nm_data is not None:
2478 if nm_data is not None:
2479 docket, data = nm_data
2479 docket, data = nm_data
2480 ui.write((b"uid: %s\n") % docket.uid)
2480 ui.write((b"uid: %s\n") % docket.uid)
2481 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2481 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2482 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2482 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2483 ui.write((b"data-length: %d\n") % docket.data_length)
2483 ui.write((b"data-length: %d\n") % docket.data_length)
2484 ui.write((b"data-unused: %d\n") % docket.data_unused)
2484 ui.write((b"data-unused: %d\n") % docket.data_unused)
2485 unused_perc = docket.data_unused * 100.0 / docket.data_length
2485 unused_perc = docket.data_unused * 100.0 / docket.data_length
2486 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2486 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2487
2487
2488
2488
2489 @command(
2489 @command(
2490 b'debugobsolete',
2490 b'debugobsolete',
2491 [
2491 [
2492 (b'', b'flags', 0, _(b'markers flag')),
2492 (b'', b'flags', 0, _(b'markers flag')),
2493 (
2493 (
2494 b'',
2494 b'',
2495 b'record-parents',
2495 b'record-parents',
2496 False,
2496 False,
2497 _(b'record parent information for the precursor'),
2497 _(b'record parent information for the precursor'),
2498 ),
2498 ),
2499 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2499 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2500 (
2500 (
2501 b'',
2501 b'',
2502 b'exclusive',
2502 b'exclusive',
2503 False,
2503 False,
2504 _(b'restrict display to markers only relevant to REV'),
2504 _(b'restrict display to markers only relevant to REV'),
2505 ),
2505 ),
2506 (b'', b'index', False, _(b'display index of the marker')),
2506 (b'', b'index', False, _(b'display index of the marker')),
2507 (b'', b'delete', [], _(b'delete markers specified by indices')),
2507 (b'', b'delete', [], _(b'delete markers specified by indices')),
2508 ]
2508 ]
2509 + cmdutil.commitopts2
2509 + cmdutil.commitopts2
2510 + cmdutil.formatteropts,
2510 + cmdutil.formatteropts,
2511 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2511 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2512 )
2512 )
2513 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2513 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2514 """create arbitrary obsolete marker
2514 """create arbitrary obsolete marker
2515
2515
2516 With no arguments, displays the list of obsolescence markers."""
2516 With no arguments, displays the list of obsolescence markers."""
2517
2517
2518 opts = pycompat.byteskwargs(opts)
2518 opts = pycompat.byteskwargs(opts)
2519
2519
2520 def parsenodeid(s):
2520 def parsenodeid(s):
2521 try:
2521 try:
2522 # We do not use revsingle/revrange functions here to accept
2522 # We do not use revsingle/revrange functions here to accept
2523 # arbitrary node identifiers, possibly not present in the
2523 # arbitrary node identifiers, possibly not present in the
2524 # local repository.
2524 # local repository.
2525 n = bin(s)
2525 n = bin(s)
2526 if len(n) != repo.nodeconstants.nodelen:
2526 if len(n) != repo.nodeconstants.nodelen:
2527 raise TypeError()
2527 raise TypeError()
2528 return n
2528 return n
2529 except TypeError:
2529 except TypeError:
2530 raise error.InputError(
2530 raise error.InputError(
2531 b'changeset references must be full hexadecimal '
2531 b'changeset references must be full hexadecimal '
2532 b'node identifiers'
2532 b'node identifiers'
2533 )
2533 )
2534
2534
2535 if opts.get(b'delete'):
2535 if opts.get(b'delete'):
2536 indices = []
2536 indices = []
2537 for v in opts.get(b'delete'):
2537 for v in opts.get(b'delete'):
2538 try:
2538 try:
2539 indices.append(int(v))
2539 indices.append(int(v))
2540 except ValueError:
2540 except ValueError:
2541 raise error.InputError(
2541 raise error.InputError(
2542 _(b'invalid index value: %r') % v,
2542 _(b'invalid index value: %r') % v,
2543 hint=_(b'use integers for indices'),
2543 hint=_(b'use integers for indices'),
2544 )
2544 )
2545
2545
2546 if repo.currenttransaction():
2546 if repo.currenttransaction():
2547 raise error.Abort(
2547 raise error.Abort(
2548 _(b'cannot delete obsmarkers in the middle of transaction.')
2548 _(b'cannot delete obsmarkers in the middle of transaction.')
2549 )
2549 )
2550
2550
2551 with repo.lock():
2551 with repo.lock():
2552 n = repair.deleteobsmarkers(repo.obsstore, indices)
2552 n = repair.deleteobsmarkers(repo.obsstore, indices)
2553 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2553 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2554
2554
2555 return
2555 return
2556
2556
2557 if precursor is not None:
2557 if precursor is not None:
2558 if opts[b'rev']:
2558 if opts[b'rev']:
2559 raise error.InputError(
2559 raise error.InputError(
2560 b'cannot select revision when creating marker'
2560 b'cannot select revision when creating marker'
2561 )
2561 )
2562 metadata = {}
2562 metadata = {}
2563 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2563 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2564 succs = tuple(parsenodeid(succ) for succ in successors)
2564 succs = tuple(parsenodeid(succ) for succ in successors)
2565 l = repo.lock()
2565 l = repo.lock()
2566 try:
2566 try:
2567 tr = repo.transaction(b'debugobsolete')
2567 tr = repo.transaction(b'debugobsolete')
2568 try:
2568 try:
2569 date = opts.get(b'date')
2569 date = opts.get(b'date')
2570 if date:
2570 if date:
2571 date = dateutil.parsedate(date)
2571 date = dateutil.parsedate(date)
2572 else:
2572 else:
2573 date = None
2573 date = None
2574 prec = parsenodeid(precursor)
2574 prec = parsenodeid(precursor)
2575 parents = None
2575 parents = None
2576 if opts[b'record_parents']:
2576 if opts[b'record_parents']:
2577 if prec not in repo.unfiltered():
2577 if prec not in repo.unfiltered():
2578 raise error.Abort(
2578 raise error.Abort(
2579 b'cannot used --record-parents on '
2579 b'cannot used --record-parents on '
2580 b'unknown changesets'
2580 b'unknown changesets'
2581 )
2581 )
2582 parents = repo.unfiltered()[prec].parents()
2582 parents = repo.unfiltered()[prec].parents()
2583 parents = tuple(p.node() for p in parents)
2583 parents = tuple(p.node() for p in parents)
2584 repo.obsstore.create(
2584 repo.obsstore.create(
2585 tr,
2585 tr,
2586 prec,
2586 prec,
2587 succs,
2587 succs,
2588 opts[b'flags'],
2588 opts[b'flags'],
2589 parents=parents,
2589 parents=parents,
2590 date=date,
2590 date=date,
2591 metadata=metadata,
2591 metadata=metadata,
2592 ui=ui,
2592 ui=ui,
2593 )
2593 )
2594 tr.close()
2594 tr.close()
2595 except ValueError as exc:
2595 except ValueError as exc:
2596 raise error.Abort(
2596 raise error.Abort(
2597 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2597 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2598 )
2598 )
2599 finally:
2599 finally:
2600 tr.release()
2600 tr.release()
2601 finally:
2601 finally:
2602 l.release()
2602 l.release()
2603 else:
2603 else:
2604 if opts[b'rev']:
2604 if opts[b'rev']:
2605 revs = logcmdutil.revrange(repo, opts[b'rev'])
2605 revs = logcmdutil.revrange(repo, opts[b'rev'])
2606 nodes = [repo[r].node() for r in revs]
2606 nodes = [repo[r].node() for r in revs]
2607 markers = list(
2607 markers = list(
2608 obsutil.getmarkers(
2608 obsutil.getmarkers(
2609 repo, nodes=nodes, exclusive=opts[b'exclusive']
2609 repo, nodes=nodes, exclusive=opts[b'exclusive']
2610 )
2610 )
2611 )
2611 )
2612 markers.sort(key=lambda x: x._data)
2612 markers.sort(key=lambda x: x._data)
2613 else:
2613 else:
2614 markers = obsutil.getmarkers(repo)
2614 markers = obsutil.getmarkers(repo)
2615
2615
2616 markerstoiter = markers
2616 markerstoiter = markers
2617 isrelevant = lambda m: True
2617 isrelevant = lambda m: True
2618 if opts.get(b'rev') and opts.get(b'index'):
2618 if opts.get(b'rev') and opts.get(b'index'):
2619 markerstoiter = obsutil.getmarkers(repo)
2619 markerstoiter = obsutil.getmarkers(repo)
2620 markerset = set(markers)
2620 markerset = set(markers)
2621 isrelevant = lambda m: m in markerset
2621 isrelevant = lambda m: m in markerset
2622
2622
2623 fm = ui.formatter(b'debugobsolete', opts)
2623 fm = ui.formatter(b'debugobsolete', opts)
2624 for i, m in enumerate(markerstoiter):
2624 for i, m in enumerate(markerstoiter):
2625 if not isrelevant(m):
2625 if not isrelevant(m):
2626 # marker can be irrelevant when we're iterating over a set
2626 # marker can be irrelevant when we're iterating over a set
2627 # of markers (markerstoiter) which is bigger than the set
2627 # of markers (markerstoiter) which is bigger than the set
2628 # of markers we want to display (markers)
2628 # of markers we want to display (markers)
2629 # this can happen if both --index and --rev options are
2629 # this can happen if both --index and --rev options are
2630 # provided and thus we need to iterate over all of the markers
2630 # provided and thus we need to iterate over all of the markers
2631 # to get the correct indices, but only display the ones that
2631 # to get the correct indices, but only display the ones that
2632 # are relevant to --rev value
2632 # are relevant to --rev value
2633 continue
2633 continue
2634 fm.startitem()
2634 fm.startitem()
2635 ind = i if opts.get(b'index') else None
2635 ind = i if opts.get(b'index') else None
2636 cmdutil.showmarker(fm, m, index=ind)
2636 cmdutil.showmarker(fm, m, index=ind)
2637 fm.end()
2637 fm.end()
2638
2638
2639
2639
2640 @command(
2640 @command(
2641 b'debugp1copies',
2641 b'debugp1copies',
2642 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2642 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2643 _(b'[-r REV]'),
2643 _(b'[-r REV]'),
2644 )
2644 )
2645 def debugp1copies(ui, repo, **opts):
2645 def debugp1copies(ui, repo, **opts):
2646 """dump copy information compared to p1"""
2646 """dump copy information compared to p1"""
2647
2647
2648 opts = pycompat.byteskwargs(opts)
2648 opts = pycompat.byteskwargs(opts)
2649 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2649 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2650 for dst, src in ctx.p1copies().items():
2650 for dst, src in ctx.p1copies().items():
2651 ui.write(b'%s -> %s\n' % (src, dst))
2651 ui.write(b'%s -> %s\n' % (src, dst))
2652
2652
2653
2653
2654 @command(
2654 @command(
2655 b'debugp2copies',
2655 b'debugp2copies',
2656 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2656 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2657 _(b'[-r REV]'),
2657 _(b'[-r REV]'),
2658 )
2658 )
2659 def debugp1copies(ui, repo, **opts):
2659 def debugp1copies(ui, repo, **opts):
2660 """dump copy information compared to p2"""
2660 """dump copy information compared to p2"""
2661
2661
2662 opts = pycompat.byteskwargs(opts)
2662 opts = pycompat.byteskwargs(opts)
2663 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2663 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2664 for dst, src in ctx.p2copies().items():
2664 for dst, src in ctx.p2copies().items():
2665 ui.write(b'%s -> %s\n' % (src, dst))
2665 ui.write(b'%s -> %s\n' % (src, dst))
2666
2666
2667
2667
2668 @command(
2668 @command(
2669 b'debugpathcomplete',
2669 b'debugpathcomplete',
2670 [
2670 [
2671 (b'f', b'full', None, _(b'complete an entire path')),
2671 (b'f', b'full', None, _(b'complete an entire path')),
2672 (b'n', b'normal', None, _(b'show only normal files')),
2672 (b'n', b'normal', None, _(b'show only normal files')),
2673 (b'a', b'added', None, _(b'show only added files')),
2673 (b'a', b'added', None, _(b'show only added files')),
2674 (b'r', b'removed', None, _(b'show only removed files')),
2674 (b'r', b'removed', None, _(b'show only removed files')),
2675 ],
2675 ],
2676 _(b'FILESPEC...'),
2676 _(b'FILESPEC...'),
2677 )
2677 )
2678 def debugpathcomplete(ui, repo, *specs, **opts):
2678 def debugpathcomplete(ui, repo, *specs, **opts):
2679 """complete part or all of a tracked path
2679 """complete part or all of a tracked path
2680
2680
2681 This command supports shells that offer path name completion. It
2681 This command supports shells that offer path name completion. It
2682 currently completes only files already known to the dirstate.
2682 currently completes only files already known to the dirstate.
2683
2683
2684 Completion extends only to the next path segment unless
2684 Completion extends only to the next path segment unless
2685 --full is specified, in which case entire paths are used."""
2685 --full is specified, in which case entire paths are used."""
2686
2686
2687 def complete(path, acceptable):
2687 def complete(path, acceptable):
2688 dirstate = repo.dirstate
2688 dirstate = repo.dirstate
2689 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2689 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2690 rootdir = repo.root + pycompat.ossep
2690 rootdir = repo.root + pycompat.ossep
2691 if spec != repo.root and not spec.startswith(rootdir):
2691 if spec != repo.root and not spec.startswith(rootdir):
2692 return [], []
2692 return [], []
2693 if os.path.isdir(spec):
2693 if os.path.isdir(spec):
2694 spec += b'/'
2694 spec += b'/'
2695 spec = spec[len(rootdir) :]
2695 spec = spec[len(rootdir) :]
2696 fixpaths = pycompat.ossep != b'/'
2696 fixpaths = pycompat.ossep != b'/'
2697 if fixpaths:
2697 if fixpaths:
2698 spec = spec.replace(pycompat.ossep, b'/')
2698 spec = spec.replace(pycompat.ossep, b'/')
2699 speclen = len(spec)
2699 speclen = len(spec)
2700 fullpaths = opts['full']
2700 fullpaths = opts['full']
2701 files, dirs = set(), set()
2701 files, dirs = set(), set()
2702 adddir, addfile = dirs.add, files.add
2702 adddir, addfile = dirs.add, files.add
2703 for f, st in pycompat.iteritems(dirstate):
2703 for f, st in pycompat.iteritems(dirstate):
2704 if f.startswith(spec) and st.state in acceptable:
2704 if f.startswith(spec) and st.state in acceptable:
2705 if fixpaths:
2705 if fixpaths:
2706 f = f.replace(b'/', pycompat.ossep)
2706 f = f.replace(b'/', pycompat.ossep)
2707 if fullpaths:
2707 if fullpaths:
2708 addfile(f)
2708 addfile(f)
2709 continue
2709 continue
2710 s = f.find(pycompat.ossep, speclen)
2710 s = f.find(pycompat.ossep, speclen)
2711 if s >= 0:
2711 if s >= 0:
2712 adddir(f[:s])
2712 adddir(f[:s])
2713 else:
2713 else:
2714 addfile(f)
2714 addfile(f)
2715 return files, dirs
2715 return files, dirs
2716
2716
2717 acceptable = b''
2717 acceptable = b''
2718 if opts['normal']:
2718 if opts['normal']:
2719 acceptable += b'nm'
2719 acceptable += b'nm'
2720 if opts['added']:
2720 if opts['added']:
2721 acceptable += b'a'
2721 acceptable += b'a'
2722 if opts['removed']:
2722 if opts['removed']:
2723 acceptable += b'r'
2723 acceptable += b'r'
2724 cwd = repo.getcwd()
2724 cwd = repo.getcwd()
2725 if not specs:
2725 if not specs:
2726 specs = [b'.']
2726 specs = [b'.']
2727
2727
2728 files, dirs = set(), set()
2728 files, dirs = set(), set()
2729 for spec in specs:
2729 for spec in specs:
2730 f, d = complete(spec, acceptable or b'nmar')
2730 f, d = complete(spec, acceptable or b'nmar')
2731 files.update(f)
2731 files.update(f)
2732 dirs.update(d)
2732 dirs.update(d)
2733 files.update(dirs)
2733 files.update(dirs)
2734 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2734 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2735 ui.write(b'\n')
2735 ui.write(b'\n')
2736
2736
2737
2737
2738 @command(
2738 @command(
2739 b'debugpathcopies',
2739 b'debugpathcopies',
2740 cmdutil.walkopts,
2740 cmdutil.walkopts,
2741 b'hg debugpathcopies REV1 REV2 [FILE]',
2741 b'hg debugpathcopies REV1 REV2 [FILE]',
2742 inferrepo=True,
2742 inferrepo=True,
2743 )
2743 )
2744 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2744 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2745 """show copies between two revisions"""
2745 """show copies between two revisions"""
2746 ctx1 = scmutil.revsingle(repo, rev1)
2746 ctx1 = scmutil.revsingle(repo, rev1)
2747 ctx2 = scmutil.revsingle(repo, rev2)
2747 ctx2 = scmutil.revsingle(repo, rev2)
2748 m = scmutil.match(ctx1, pats, opts)
2748 m = scmutil.match(ctx1, pats, opts)
2749 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2749 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2750 ui.write(b'%s -> %s\n' % (src, dst))
2750 ui.write(b'%s -> %s\n' % (src, dst))
2751
2751
2752
2752
2753 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2753 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2754 def debugpeer(ui, path):
2754 def debugpeer(ui, path):
2755 """establish a connection to a peer repository"""
2755 """establish a connection to a peer repository"""
2756 # Always enable peer request logging. Requires --debug to display
2756 # Always enable peer request logging. Requires --debug to display
2757 # though.
2757 # though.
2758 overrides = {
2758 overrides = {
2759 (b'devel', b'debug.peer-request'): True,
2759 (b'devel', b'debug.peer-request'): True,
2760 }
2760 }
2761
2761
2762 with ui.configoverride(overrides):
2762 with ui.configoverride(overrides):
2763 peer = hg.peer(ui, {}, path)
2763 peer = hg.peer(ui, {}, path)
2764
2764
2765 try:
2765 try:
2766 local = peer.local() is not None
2766 local = peer.local() is not None
2767 canpush = peer.canpush()
2767 canpush = peer.canpush()
2768
2768
2769 ui.write(_(b'url: %s\n') % peer.url())
2769 ui.write(_(b'url: %s\n') % peer.url())
2770 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2770 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2771 ui.write(
2771 ui.write(
2772 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2772 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2773 )
2773 )
2774 finally:
2774 finally:
2775 peer.close()
2775 peer.close()
2776
2776
2777
2777
2778 @command(
2778 @command(
2779 b'debugpickmergetool',
2779 b'debugpickmergetool',
2780 [
2780 [
2781 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2781 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2782 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2782 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2783 ]
2783 ]
2784 + cmdutil.walkopts
2784 + cmdutil.walkopts
2785 + cmdutil.mergetoolopts,
2785 + cmdutil.mergetoolopts,
2786 _(b'[PATTERN]...'),
2786 _(b'[PATTERN]...'),
2787 inferrepo=True,
2787 inferrepo=True,
2788 )
2788 )
2789 def debugpickmergetool(ui, repo, *pats, **opts):
2789 def debugpickmergetool(ui, repo, *pats, **opts):
2790 """examine which merge tool is chosen for specified file
2790 """examine which merge tool is chosen for specified file
2791
2791
2792 As described in :hg:`help merge-tools`, Mercurial examines
2792 As described in :hg:`help merge-tools`, Mercurial examines
2793 configurations below in this order to decide which merge tool is
2793 configurations below in this order to decide which merge tool is
2794 chosen for specified file.
2794 chosen for specified file.
2795
2795
2796 1. ``--tool`` option
2796 1. ``--tool`` option
2797 2. ``HGMERGE`` environment variable
2797 2. ``HGMERGE`` environment variable
2798 3. configurations in ``merge-patterns`` section
2798 3. configurations in ``merge-patterns`` section
2799 4. configuration of ``ui.merge``
2799 4. configuration of ``ui.merge``
2800 5. configurations in ``merge-tools`` section
2800 5. configurations in ``merge-tools`` section
2801 6. ``hgmerge`` tool (for historical reason only)
2801 6. ``hgmerge`` tool (for historical reason only)
2802 7. default tool for fallback (``:merge`` or ``:prompt``)
2802 7. default tool for fallback (``:merge`` or ``:prompt``)
2803
2803
2804 This command writes out examination result in the style below::
2804 This command writes out examination result in the style below::
2805
2805
2806 FILE = MERGETOOL
2806 FILE = MERGETOOL
2807
2807
2808 By default, all files known in the first parent context of the
2808 By default, all files known in the first parent context of the
2809 working directory are examined. Use file patterns and/or -I/-X
2809 working directory are examined. Use file patterns and/or -I/-X
2810 options to limit target files. -r/--rev is also useful to examine
2810 options to limit target files. -r/--rev is also useful to examine
2811 files in another context without actual updating to it.
2811 files in another context without actual updating to it.
2812
2812
2813 With --debug, this command shows warning messages while matching
2813 With --debug, this command shows warning messages while matching
2814 against ``merge-patterns`` and so on, too. It is recommended to
2814 against ``merge-patterns`` and so on, too. It is recommended to
2815 use this option with explicit file patterns and/or -I/-X options,
2815 use this option with explicit file patterns and/or -I/-X options,
2816 because this option increases amount of output per file according
2816 because this option increases amount of output per file according
2817 to configurations in hgrc.
2817 to configurations in hgrc.
2818
2818
2819 With -v/--verbose, this command shows configurations below at
2819 With -v/--verbose, this command shows configurations below at
2820 first (only if specified).
2820 first (only if specified).
2821
2821
2822 - ``--tool`` option
2822 - ``--tool`` option
2823 - ``HGMERGE`` environment variable
2823 - ``HGMERGE`` environment variable
2824 - configuration of ``ui.merge``
2824 - configuration of ``ui.merge``
2825
2825
2826 If merge tool is chosen before matching against
2826 If merge tool is chosen before matching against
2827 ``merge-patterns``, this command can't show any helpful
2827 ``merge-patterns``, this command can't show any helpful
2828 information, even with --debug. In such case, information above is
2828 information, even with --debug. In such case, information above is
2829 useful to know why a merge tool is chosen.
2829 useful to know why a merge tool is chosen.
2830 """
2830 """
2831 opts = pycompat.byteskwargs(opts)
2831 opts = pycompat.byteskwargs(opts)
2832 overrides = {}
2832 overrides = {}
2833 if opts[b'tool']:
2833 if opts[b'tool']:
2834 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2834 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2835 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2835 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2836
2836
2837 with ui.configoverride(overrides, b'debugmergepatterns'):
2837 with ui.configoverride(overrides, b'debugmergepatterns'):
2838 hgmerge = encoding.environ.get(b"HGMERGE")
2838 hgmerge = encoding.environ.get(b"HGMERGE")
2839 if hgmerge is not None:
2839 if hgmerge is not None:
2840 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2840 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2841 uimerge = ui.config(b"ui", b"merge")
2841 uimerge = ui.config(b"ui", b"merge")
2842 if uimerge:
2842 if uimerge:
2843 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2843 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2844
2844
2845 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2845 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2846 m = scmutil.match(ctx, pats, opts)
2846 m = scmutil.match(ctx, pats, opts)
2847 changedelete = opts[b'changedelete']
2847 changedelete = opts[b'changedelete']
2848 for path in ctx.walk(m):
2848 for path in ctx.walk(m):
2849 fctx = ctx[path]
2849 fctx = ctx[path]
2850 with ui.silent(
2850 with ui.silent(
2851 error=True
2851 error=True
2852 ) if not ui.debugflag else util.nullcontextmanager():
2852 ) if not ui.debugflag else util.nullcontextmanager():
2853 tool, toolpath = filemerge._picktool(
2853 tool, toolpath = filemerge._picktool(
2854 repo,
2854 repo,
2855 ui,
2855 ui,
2856 path,
2856 path,
2857 fctx.isbinary(),
2857 fctx.isbinary(),
2858 b'l' in fctx.flags(),
2858 b'l' in fctx.flags(),
2859 changedelete,
2859 changedelete,
2860 )
2860 )
2861 ui.write(b'%s = %s\n' % (path, tool))
2861 ui.write(b'%s = %s\n' % (path, tool))
2862
2862
2863
2863
2864 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2864 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2865 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2865 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2866 """access the pushkey key/value protocol
2866 """access the pushkey key/value protocol
2867
2867
2868 With two args, list the keys in the given namespace.
2868 With two args, list the keys in the given namespace.
2869
2869
2870 With five args, set a key to new if it currently is set to old.
2870 With five args, set a key to new if it currently is set to old.
2871 Reports success or failure.
2871 Reports success or failure.
2872 """
2872 """
2873
2873
2874 target = hg.peer(ui, {}, repopath)
2874 target = hg.peer(ui, {}, repopath)
2875 try:
2875 try:
2876 if keyinfo:
2876 if keyinfo:
2877 key, old, new = keyinfo
2877 key, old, new = keyinfo
2878 with target.commandexecutor() as e:
2878 with target.commandexecutor() as e:
2879 r = e.callcommand(
2879 r = e.callcommand(
2880 b'pushkey',
2880 b'pushkey',
2881 {
2881 {
2882 b'namespace': namespace,
2882 b'namespace': namespace,
2883 b'key': key,
2883 b'key': key,
2884 b'old': old,
2884 b'old': old,
2885 b'new': new,
2885 b'new': new,
2886 },
2886 },
2887 ).result()
2887 ).result()
2888
2888
2889 ui.status(pycompat.bytestr(r) + b'\n')
2889 ui.status(pycompat.bytestr(r) + b'\n')
2890 return not r
2890 return not r
2891 else:
2891 else:
2892 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2892 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2893 ui.write(
2893 ui.write(
2894 b"%s\t%s\n"
2894 b"%s\t%s\n"
2895 % (stringutil.escapestr(k), stringutil.escapestr(v))
2895 % (stringutil.escapestr(k), stringutil.escapestr(v))
2896 )
2896 )
2897 finally:
2897 finally:
2898 target.close()
2898 target.close()
2899
2899
2900
2900
2901 @command(b'debugpvec', [], _(b'A B'))
2901 @command(b'debugpvec', [], _(b'A B'))
2902 def debugpvec(ui, repo, a, b=None):
2902 def debugpvec(ui, repo, a, b=None):
2903 ca = scmutil.revsingle(repo, a)
2903 ca = scmutil.revsingle(repo, a)
2904 cb = scmutil.revsingle(repo, b)
2904 cb = scmutil.revsingle(repo, b)
2905 pa = pvec.ctxpvec(ca)
2905 pa = pvec.ctxpvec(ca)
2906 pb = pvec.ctxpvec(cb)
2906 pb = pvec.ctxpvec(cb)
2907 if pa == pb:
2907 if pa == pb:
2908 rel = b"="
2908 rel = b"="
2909 elif pa > pb:
2909 elif pa > pb:
2910 rel = b">"
2910 rel = b">"
2911 elif pa < pb:
2911 elif pa < pb:
2912 rel = b"<"
2912 rel = b"<"
2913 elif pa | pb:
2913 elif pa | pb:
2914 rel = b"|"
2914 rel = b"|"
2915 ui.write(_(b"a: %s\n") % pa)
2915 ui.write(_(b"a: %s\n") % pa)
2916 ui.write(_(b"b: %s\n") % pb)
2916 ui.write(_(b"b: %s\n") % pb)
2917 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2917 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2918 ui.write(
2918 ui.write(
2919 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2919 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2920 % (
2920 % (
2921 abs(pa._depth - pb._depth),
2921 abs(pa._depth - pb._depth),
2922 pvec._hamming(pa._vec, pb._vec),
2922 pvec._hamming(pa._vec, pb._vec),
2923 pa.distance(pb),
2923 pa.distance(pb),
2924 rel,
2924 rel,
2925 )
2925 )
2926 )
2926 )
2927
2927
2928
2928
2929 @command(
2929 @command(
2930 b'debugrebuilddirstate|debugrebuildstate',
2930 b'debugrebuilddirstate|debugrebuildstate',
2931 [
2931 [
2932 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2932 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2933 (
2933 (
2934 b'',
2934 b'',
2935 b'minimal',
2935 b'minimal',
2936 None,
2936 None,
2937 _(
2937 _(
2938 b'only rebuild files that are inconsistent with '
2938 b'only rebuild files that are inconsistent with '
2939 b'the working copy parent'
2939 b'the working copy parent'
2940 ),
2940 ),
2941 ),
2941 ),
2942 ],
2942 ],
2943 _(b'[-r REV]'),
2943 _(b'[-r REV]'),
2944 )
2944 )
2945 def debugrebuilddirstate(ui, repo, rev, **opts):
2945 def debugrebuilddirstate(ui, repo, rev, **opts):
2946 """rebuild the dirstate as it would look like for the given revision
2946 """rebuild the dirstate as it would look like for the given revision
2947
2947
2948 If no revision is specified the first current parent will be used.
2948 If no revision is specified the first current parent will be used.
2949
2949
2950 The dirstate will be set to the files of the given revision.
2950 The dirstate will be set to the files of the given revision.
2951 The actual working directory content or existing dirstate
2951 The actual working directory content or existing dirstate
2952 information such as adds or removes is not considered.
2952 information such as adds or removes is not considered.
2953
2953
2954 ``minimal`` will only rebuild the dirstate status for files that claim to be
2954 ``minimal`` will only rebuild the dirstate status for files that claim to be
2955 tracked but are not in the parent manifest, or that exist in the parent
2955 tracked but are not in the parent manifest, or that exist in the parent
2956 manifest but are not in the dirstate. It will not change adds, removes, or
2956 manifest but are not in the dirstate. It will not change adds, removes, or
2957 modified files that are in the working copy parent.
2957 modified files that are in the working copy parent.
2958
2958
2959 One use of this command is to make the next :hg:`status` invocation
2959 One use of this command is to make the next :hg:`status` invocation
2960 check the actual file content.
2960 check the actual file content.
2961 """
2961 """
2962 ctx = scmutil.revsingle(repo, rev)
2962 ctx = scmutil.revsingle(repo, rev)
2963 with repo.wlock():
2963 with repo.wlock():
2964 dirstate = repo.dirstate
2964 dirstate = repo.dirstate
2965 changedfiles = None
2965 changedfiles = None
2966 # See command doc for what minimal does.
2966 # See command doc for what minimal does.
2967 if opts.get('minimal'):
2967 if opts.get('minimal'):
2968 manifestfiles = set(ctx.manifest().keys())
2968 manifestfiles = set(ctx.manifest().keys())
2969 dirstatefiles = set(dirstate)
2969 dirstatefiles = set(dirstate)
2970 manifestonly = manifestfiles - dirstatefiles
2970 manifestonly = manifestfiles - dirstatefiles
2971 dsonly = dirstatefiles - manifestfiles
2971 dsonly = dirstatefiles - manifestfiles
2972 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2972 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2973 changedfiles = manifestonly | dsnotadded
2973 changedfiles = manifestonly | dsnotadded
2974
2974
2975 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2975 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2976
2976
2977
2977
2978 @command(
2978 @command(
2979 b'debugrebuildfncache',
2979 b'debugrebuildfncache',
2980 [
2980 [
2981 (
2981 (
2982 b'',
2982 b'',
2983 b'only-data',
2983 b'only-data',
2984 False,
2984 False,
2985 _(b'only look for wrong .d files (much faster)'),
2985 _(b'only look for wrong .d files (much faster)'),
2986 )
2986 )
2987 ],
2987 ],
2988 b'',
2988 b'',
2989 )
2989 )
2990 def debugrebuildfncache(ui, repo, **opts):
2990 def debugrebuildfncache(ui, repo, **opts):
2991 """rebuild the fncache file"""
2991 """rebuild the fncache file"""
2992 opts = pycompat.byteskwargs(opts)
2992 opts = pycompat.byteskwargs(opts)
2993 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2993 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2994
2994
2995
2995
2996 @command(
2996 @command(
2997 b'debugrename',
2997 b'debugrename',
2998 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2998 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2999 _(b'[-r REV] [FILE]...'),
2999 _(b'[-r REV] [FILE]...'),
3000 )
3000 )
3001 def debugrename(ui, repo, *pats, **opts):
3001 def debugrename(ui, repo, *pats, **opts):
3002 """dump rename information"""
3002 """dump rename information"""
3003
3003
3004 opts = pycompat.byteskwargs(opts)
3004 opts = pycompat.byteskwargs(opts)
3005 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3005 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3006 m = scmutil.match(ctx, pats, opts)
3006 m = scmutil.match(ctx, pats, opts)
3007 for abs in ctx.walk(m):
3007 for abs in ctx.walk(m):
3008 fctx = ctx[abs]
3008 fctx = ctx[abs]
3009 o = fctx.filelog().renamed(fctx.filenode())
3009 o = fctx.filelog().renamed(fctx.filenode())
3010 rel = repo.pathto(abs)
3010 rel = repo.pathto(abs)
3011 if o:
3011 if o:
3012 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3012 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3013 else:
3013 else:
3014 ui.write(_(b"%s not renamed\n") % rel)
3014 ui.write(_(b"%s not renamed\n") % rel)
3015
3015
3016
3016
3017 @command(b'debugrequires|debugrequirements', [], b'')
3017 @command(b'debugrequires|debugrequirements', [], b'')
3018 def debugrequirements(ui, repo):
3018 def debugrequirements(ui, repo):
3019 """print the current repo requirements"""
3019 """print the current repo requirements"""
3020 for r in sorted(repo.requirements):
3020 for r in sorted(repo.requirements):
3021 ui.write(b"%s\n" % r)
3021 ui.write(b"%s\n" % r)
3022
3022
3023
3023
3024 @command(
3024 @command(
3025 b'debugrevlog',
3025 b'debugrevlog',
3026 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3026 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3027 _(b'-c|-m|FILE'),
3027 _(b'-c|-m|FILE'),
3028 optionalrepo=True,
3028 optionalrepo=True,
3029 )
3029 )
3030 def debugrevlog(ui, repo, file_=None, **opts):
3030 def debugrevlog(ui, repo, file_=None, **opts):
3031 """show data and statistics about a revlog"""
3031 """show data and statistics about a revlog"""
3032 opts = pycompat.byteskwargs(opts)
3032 opts = pycompat.byteskwargs(opts)
3033 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3033 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3034
3034
3035 if opts.get(b"dump"):
3035 if opts.get(b"dump"):
3036 numrevs = len(r)
3036 numrevs = len(r)
3037 ui.write(
3037 ui.write(
3038 (
3038 (
3039 b"# rev p1rev p2rev start end deltastart base p1 p2"
3039 b"# rev p1rev p2rev start end deltastart base p1 p2"
3040 b" rawsize totalsize compression heads chainlen\n"
3040 b" rawsize totalsize compression heads chainlen\n"
3041 )
3041 )
3042 )
3042 )
3043 ts = 0
3043 ts = 0
3044 heads = set()
3044 heads = set()
3045
3045
3046 for rev in pycompat.xrange(numrevs):
3046 for rev in pycompat.xrange(numrevs):
3047 dbase = r.deltaparent(rev)
3047 dbase = r.deltaparent(rev)
3048 if dbase == -1:
3048 if dbase == -1:
3049 dbase = rev
3049 dbase = rev
3050 cbase = r.chainbase(rev)
3050 cbase = r.chainbase(rev)
3051 clen = r.chainlen(rev)
3051 clen = r.chainlen(rev)
3052 p1, p2 = r.parentrevs(rev)
3052 p1, p2 = r.parentrevs(rev)
3053 rs = r.rawsize(rev)
3053 rs = r.rawsize(rev)
3054 ts = ts + rs
3054 ts = ts + rs
3055 heads -= set(r.parentrevs(rev))
3055 heads -= set(r.parentrevs(rev))
3056 heads.add(rev)
3056 heads.add(rev)
3057 try:
3057 try:
3058 compression = ts / r.end(rev)
3058 compression = ts / r.end(rev)
3059 except ZeroDivisionError:
3059 except ZeroDivisionError:
3060 compression = 0
3060 compression = 0
3061 ui.write(
3061 ui.write(
3062 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3062 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3063 b"%11d %5d %8d\n"
3063 b"%11d %5d %8d\n"
3064 % (
3064 % (
3065 rev,
3065 rev,
3066 p1,
3066 p1,
3067 p2,
3067 p2,
3068 r.start(rev),
3068 r.start(rev),
3069 r.end(rev),
3069 r.end(rev),
3070 r.start(dbase),
3070 r.start(dbase),
3071 r.start(cbase),
3071 r.start(cbase),
3072 r.start(p1),
3072 r.start(p1),
3073 r.start(p2),
3073 r.start(p2),
3074 rs,
3074 rs,
3075 ts,
3075 ts,
3076 compression,
3076 compression,
3077 len(heads),
3077 len(heads),
3078 clen,
3078 clen,
3079 )
3079 )
3080 )
3080 )
3081 return 0
3081 return 0
3082
3082
3083 format = r._format_version
3083 format = r._format_version
3084 v = r._format_flags
3084 v = r._format_flags
3085 flags = []
3085 flags = []
3086 gdelta = False
3086 gdelta = False
3087 if v & revlog.FLAG_INLINE_DATA:
3087 if v & revlog.FLAG_INLINE_DATA:
3088 flags.append(b'inline')
3088 flags.append(b'inline')
3089 if v & revlog.FLAG_GENERALDELTA:
3089 if v & revlog.FLAG_GENERALDELTA:
3090 gdelta = True
3090 gdelta = True
3091 flags.append(b'generaldelta')
3091 flags.append(b'generaldelta')
3092 if not flags:
3092 if not flags:
3093 flags = [b'(none)']
3093 flags = [b'(none)']
3094
3094
3095 ### tracks merge vs single parent
3095 ### tracks merge vs single parent
3096 nummerges = 0
3096 nummerges = 0
3097
3097
3098 ### tracks ways the "delta" are build
3098 ### tracks ways the "delta" are build
3099 # nodelta
3099 # nodelta
3100 numempty = 0
3100 numempty = 0
3101 numemptytext = 0
3101 numemptytext = 0
3102 numemptydelta = 0
3102 numemptydelta = 0
3103 # full file content
3103 # full file content
3104 numfull = 0
3104 numfull = 0
3105 # intermediate snapshot against a prior snapshot
3105 # intermediate snapshot against a prior snapshot
3106 numsemi = 0
3106 numsemi = 0
3107 # snapshot count per depth
3107 # snapshot count per depth
3108 numsnapdepth = collections.defaultdict(lambda: 0)
3108 numsnapdepth = collections.defaultdict(lambda: 0)
3109 # delta against previous revision
3109 # delta against previous revision
3110 numprev = 0
3110 numprev = 0
3111 # delta against first or second parent (not prev)
3111 # delta against first or second parent (not prev)
3112 nump1 = 0
3112 nump1 = 0
3113 nump2 = 0
3113 nump2 = 0
3114 # delta against neither prev nor parents
3114 # delta against neither prev nor parents
3115 numother = 0
3115 numother = 0
3116 # delta against prev that are also first or second parent
3116 # delta against prev that are also first or second parent
3117 # (details of `numprev`)
3117 # (details of `numprev`)
3118 nump1prev = 0
3118 nump1prev = 0
3119 nump2prev = 0
3119 nump2prev = 0
3120
3120
3121 # data about delta chain of each revs
3121 # data about delta chain of each revs
3122 chainlengths = []
3122 chainlengths = []
3123 chainbases = []
3123 chainbases = []
3124 chainspans = []
3124 chainspans = []
3125
3125
3126 # data about each revision
3126 # data about each revision
3127 datasize = [None, 0, 0]
3127 datasize = [None, 0, 0]
3128 fullsize = [None, 0, 0]
3128 fullsize = [None, 0, 0]
3129 semisize = [None, 0, 0]
3129 semisize = [None, 0, 0]
3130 # snapshot count per depth
3130 # snapshot count per depth
3131 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3131 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3132 deltasize = [None, 0, 0]
3132 deltasize = [None, 0, 0]
3133 chunktypecounts = {}
3133 chunktypecounts = {}
3134 chunktypesizes = {}
3134 chunktypesizes = {}
3135
3135
3136 def addsize(size, l):
3136 def addsize(size, l):
3137 if l[0] is None or size < l[0]:
3137 if l[0] is None or size < l[0]:
3138 l[0] = size
3138 l[0] = size
3139 if size > l[1]:
3139 if size > l[1]:
3140 l[1] = size
3140 l[1] = size
3141 l[2] += size
3141 l[2] += size
3142
3142
3143 numrevs = len(r)
3143 numrevs = len(r)
3144 for rev in pycompat.xrange(numrevs):
3144 for rev in pycompat.xrange(numrevs):
3145 p1, p2 = r.parentrevs(rev)
3145 p1, p2 = r.parentrevs(rev)
3146 delta = r.deltaparent(rev)
3146 delta = r.deltaparent(rev)
3147 if format > 0:
3147 if format > 0:
3148 addsize(r.rawsize(rev), datasize)
3148 addsize(r.rawsize(rev), datasize)
3149 if p2 != nullrev:
3149 if p2 != nullrev:
3150 nummerges += 1
3150 nummerges += 1
3151 size = r.length(rev)
3151 size = r.length(rev)
3152 if delta == nullrev:
3152 if delta == nullrev:
3153 chainlengths.append(0)
3153 chainlengths.append(0)
3154 chainbases.append(r.start(rev))
3154 chainbases.append(r.start(rev))
3155 chainspans.append(size)
3155 chainspans.append(size)
3156 if size == 0:
3156 if size == 0:
3157 numempty += 1
3157 numempty += 1
3158 numemptytext += 1
3158 numemptytext += 1
3159 else:
3159 else:
3160 numfull += 1
3160 numfull += 1
3161 numsnapdepth[0] += 1
3161 numsnapdepth[0] += 1
3162 addsize(size, fullsize)
3162 addsize(size, fullsize)
3163 addsize(size, snapsizedepth[0])
3163 addsize(size, snapsizedepth[0])
3164 else:
3164 else:
3165 chainlengths.append(chainlengths[delta] + 1)
3165 chainlengths.append(chainlengths[delta] + 1)
3166 baseaddr = chainbases[delta]
3166 baseaddr = chainbases[delta]
3167 revaddr = r.start(rev)
3167 revaddr = r.start(rev)
3168 chainbases.append(baseaddr)
3168 chainbases.append(baseaddr)
3169 chainspans.append((revaddr - baseaddr) + size)
3169 chainspans.append((revaddr - baseaddr) + size)
3170 if size == 0:
3170 if size == 0:
3171 numempty += 1
3171 numempty += 1
3172 numemptydelta += 1
3172 numemptydelta += 1
3173 elif r.issnapshot(rev):
3173 elif r.issnapshot(rev):
3174 addsize(size, semisize)
3174 addsize(size, semisize)
3175 numsemi += 1
3175 numsemi += 1
3176 depth = r.snapshotdepth(rev)
3176 depth = r.snapshotdepth(rev)
3177 numsnapdepth[depth] += 1
3177 numsnapdepth[depth] += 1
3178 addsize(size, snapsizedepth[depth])
3178 addsize(size, snapsizedepth[depth])
3179 else:
3179 else:
3180 addsize(size, deltasize)
3180 addsize(size, deltasize)
3181 if delta == rev - 1:
3181 if delta == rev - 1:
3182 numprev += 1
3182 numprev += 1
3183 if delta == p1:
3183 if delta == p1:
3184 nump1prev += 1
3184 nump1prev += 1
3185 elif delta == p2:
3185 elif delta == p2:
3186 nump2prev += 1
3186 nump2prev += 1
3187 elif delta == p1:
3187 elif delta == p1:
3188 nump1 += 1
3188 nump1 += 1
3189 elif delta == p2:
3189 elif delta == p2:
3190 nump2 += 1
3190 nump2 += 1
3191 elif delta != nullrev:
3191 elif delta != nullrev:
3192 numother += 1
3192 numother += 1
3193
3193
3194 # Obtain data on the raw chunks in the revlog.
3194 # Obtain data on the raw chunks in the revlog.
3195 if util.safehasattr(r, b'_getsegmentforrevs'):
3195 if util.safehasattr(r, b'_getsegmentforrevs'):
3196 segment = r._getsegmentforrevs(rev, rev)[1]
3196 segment = r._getsegmentforrevs(rev, rev)[1]
3197 else:
3197 else:
3198 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3198 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3199 if segment:
3199 if segment:
3200 chunktype = bytes(segment[0:1])
3200 chunktype = bytes(segment[0:1])
3201 else:
3201 else:
3202 chunktype = b'empty'
3202 chunktype = b'empty'
3203
3203
3204 if chunktype not in chunktypecounts:
3204 if chunktype not in chunktypecounts:
3205 chunktypecounts[chunktype] = 0
3205 chunktypecounts[chunktype] = 0
3206 chunktypesizes[chunktype] = 0
3206 chunktypesizes[chunktype] = 0
3207
3207
3208 chunktypecounts[chunktype] += 1
3208 chunktypecounts[chunktype] += 1
3209 chunktypesizes[chunktype] += size
3209 chunktypesizes[chunktype] += size
3210
3210
3211 # Adjust size min value for empty cases
3211 # Adjust size min value for empty cases
3212 for size in (datasize, fullsize, semisize, deltasize):
3212 for size in (datasize, fullsize, semisize, deltasize):
3213 if size[0] is None:
3213 if size[0] is None:
3214 size[0] = 0
3214 size[0] = 0
3215
3215
3216 numdeltas = numrevs - numfull - numempty - numsemi
3216 numdeltas = numrevs - numfull - numempty - numsemi
3217 numoprev = numprev - nump1prev - nump2prev
3217 numoprev = numprev - nump1prev - nump2prev
3218 totalrawsize = datasize[2]
3218 totalrawsize = datasize[2]
3219 datasize[2] /= numrevs
3219 datasize[2] /= numrevs
3220 fulltotal = fullsize[2]
3220 fulltotal = fullsize[2]
3221 if numfull == 0:
3221 if numfull == 0:
3222 fullsize[2] = 0
3222 fullsize[2] = 0
3223 else:
3223 else:
3224 fullsize[2] /= numfull
3224 fullsize[2] /= numfull
3225 semitotal = semisize[2]
3225 semitotal = semisize[2]
3226 snaptotal = {}
3226 snaptotal = {}
3227 if numsemi > 0:
3227 if numsemi > 0:
3228 semisize[2] /= numsemi
3228 semisize[2] /= numsemi
3229 for depth in snapsizedepth:
3229 for depth in snapsizedepth:
3230 snaptotal[depth] = snapsizedepth[depth][2]
3230 snaptotal[depth] = snapsizedepth[depth][2]
3231 snapsizedepth[depth][2] /= numsnapdepth[depth]
3231 snapsizedepth[depth][2] /= numsnapdepth[depth]
3232
3232
3233 deltatotal = deltasize[2]
3233 deltatotal = deltasize[2]
3234 if numdeltas > 0:
3234 if numdeltas > 0:
3235 deltasize[2] /= numdeltas
3235 deltasize[2] /= numdeltas
3236 totalsize = fulltotal + semitotal + deltatotal
3236 totalsize = fulltotal + semitotal + deltatotal
3237 avgchainlen = sum(chainlengths) / numrevs
3237 avgchainlen = sum(chainlengths) / numrevs
3238 maxchainlen = max(chainlengths)
3238 maxchainlen = max(chainlengths)
3239 maxchainspan = max(chainspans)
3239 maxchainspan = max(chainspans)
3240 compratio = 1
3240 compratio = 1
3241 if totalsize:
3241 if totalsize:
3242 compratio = totalrawsize / totalsize
3242 compratio = totalrawsize / totalsize
3243
3243
3244 basedfmtstr = b'%%%dd\n'
3244 basedfmtstr = b'%%%dd\n'
3245 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3245 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3246
3246
3247 def dfmtstr(max):
3247 def dfmtstr(max):
3248 return basedfmtstr % len(str(max))
3248 return basedfmtstr % len(str(max))
3249
3249
3250 def pcfmtstr(max, padding=0):
3250 def pcfmtstr(max, padding=0):
3251 return basepcfmtstr % (len(str(max)), b' ' * padding)
3251 return basepcfmtstr % (len(str(max)), b' ' * padding)
3252
3252
3253 def pcfmt(value, total):
3253 def pcfmt(value, total):
3254 if total:
3254 if total:
3255 return (value, 100 * float(value) / total)
3255 return (value, 100 * float(value) / total)
3256 else:
3256 else:
3257 return value, 100.0
3257 return value, 100.0
3258
3258
3259 ui.writenoi18n(b'format : %d\n' % format)
3259 ui.writenoi18n(b'format : %d\n' % format)
3260 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3260 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3261
3261
3262 ui.write(b'\n')
3262 ui.write(b'\n')
3263 fmt = pcfmtstr(totalsize)
3263 fmt = pcfmtstr(totalsize)
3264 fmt2 = dfmtstr(totalsize)
3264 fmt2 = dfmtstr(totalsize)
3265 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3265 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3266 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3266 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3267 ui.writenoi18n(
3267 ui.writenoi18n(
3268 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3268 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3269 )
3269 )
3270 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3270 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3271 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3271 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3272 ui.writenoi18n(
3272 ui.writenoi18n(
3273 b' text : '
3273 b' text : '
3274 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3274 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3275 )
3275 )
3276 ui.writenoi18n(
3276 ui.writenoi18n(
3277 b' delta : '
3277 b' delta : '
3278 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3278 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3279 )
3279 )
3280 ui.writenoi18n(
3280 ui.writenoi18n(
3281 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3281 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3282 )
3282 )
3283 for depth in sorted(numsnapdepth):
3283 for depth in sorted(numsnapdepth):
3284 ui.write(
3284 ui.write(
3285 (b' lvl-%-3d : ' % depth)
3285 (b' lvl-%-3d : ' % depth)
3286 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3286 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3287 )
3287 )
3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3289 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3289 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3290 ui.writenoi18n(
3290 ui.writenoi18n(
3291 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3291 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3292 )
3292 )
3293 for depth in sorted(numsnapdepth):
3293 for depth in sorted(numsnapdepth):
3294 ui.write(
3294 ui.write(
3295 (b' lvl-%-3d : ' % depth)
3295 (b' lvl-%-3d : ' % depth)
3296 + fmt % pcfmt(snaptotal[depth], totalsize)
3296 + fmt % pcfmt(snaptotal[depth], totalsize)
3297 )
3297 )
3298 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3298 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3299
3299
3300 def fmtchunktype(chunktype):
3300 def fmtchunktype(chunktype):
3301 if chunktype == b'empty':
3301 if chunktype == b'empty':
3302 return b' %s : ' % chunktype
3302 return b' %s : ' % chunktype
3303 elif chunktype in pycompat.bytestr(string.ascii_letters):
3303 elif chunktype in pycompat.bytestr(string.ascii_letters):
3304 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3304 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3305 else:
3305 else:
3306 return b' 0x%s : ' % hex(chunktype)
3306 return b' 0x%s : ' % hex(chunktype)
3307
3307
3308 ui.write(b'\n')
3308 ui.write(b'\n')
3309 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3309 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3310 for chunktype in sorted(chunktypecounts):
3310 for chunktype in sorted(chunktypecounts):
3311 ui.write(fmtchunktype(chunktype))
3311 ui.write(fmtchunktype(chunktype))
3312 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3312 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3313 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3313 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3314 for chunktype in sorted(chunktypecounts):
3314 for chunktype in sorted(chunktypecounts):
3315 ui.write(fmtchunktype(chunktype))
3315 ui.write(fmtchunktype(chunktype))
3316 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3316 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3317
3317
3318 ui.write(b'\n')
3318 ui.write(b'\n')
3319 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3319 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3320 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3320 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3321 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3321 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3322 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3322 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3323 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3323 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3324
3324
3325 if format > 0:
3325 if format > 0:
3326 ui.write(b'\n')
3326 ui.write(b'\n')
3327 ui.writenoi18n(
3327 ui.writenoi18n(
3328 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3328 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3329 % tuple(datasize)
3329 % tuple(datasize)
3330 )
3330 )
3331 ui.writenoi18n(
3331 ui.writenoi18n(
3332 b'full revision size (min/max/avg) : %d / %d / %d\n'
3332 b'full revision size (min/max/avg) : %d / %d / %d\n'
3333 % tuple(fullsize)
3333 % tuple(fullsize)
3334 )
3334 )
3335 ui.writenoi18n(
3335 ui.writenoi18n(
3336 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3336 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3337 % tuple(semisize)
3337 % tuple(semisize)
3338 )
3338 )
3339 for depth in sorted(snapsizedepth):
3339 for depth in sorted(snapsizedepth):
3340 if depth == 0:
3340 if depth == 0:
3341 continue
3341 continue
3342 ui.writenoi18n(
3342 ui.writenoi18n(
3343 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3343 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3344 % ((depth,) + tuple(snapsizedepth[depth]))
3344 % ((depth,) + tuple(snapsizedepth[depth]))
3345 )
3345 )
3346 ui.writenoi18n(
3346 ui.writenoi18n(
3347 b'delta size (min/max/avg) : %d / %d / %d\n'
3347 b'delta size (min/max/avg) : %d / %d / %d\n'
3348 % tuple(deltasize)
3348 % tuple(deltasize)
3349 )
3349 )
3350
3350
3351 if numdeltas > 0:
3351 if numdeltas > 0:
3352 ui.write(b'\n')
3352 ui.write(b'\n')
3353 fmt = pcfmtstr(numdeltas)
3353 fmt = pcfmtstr(numdeltas)
3354 fmt2 = pcfmtstr(numdeltas, 4)
3354 fmt2 = pcfmtstr(numdeltas, 4)
3355 ui.writenoi18n(
3355 ui.writenoi18n(
3356 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3356 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3357 )
3357 )
3358 if numprev > 0:
3358 if numprev > 0:
3359 ui.writenoi18n(
3359 ui.writenoi18n(
3360 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3360 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3361 )
3361 )
3362 ui.writenoi18n(
3362 ui.writenoi18n(
3363 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3363 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3364 )
3364 )
3365 ui.writenoi18n(
3365 ui.writenoi18n(
3366 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3366 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3367 )
3367 )
3368 if gdelta:
3368 if gdelta:
3369 ui.writenoi18n(
3369 ui.writenoi18n(
3370 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3370 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3371 )
3371 )
3372 ui.writenoi18n(
3372 ui.writenoi18n(
3373 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3373 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3374 )
3374 )
3375 ui.writenoi18n(
3375 ui.writenoi18n(
3376 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3376 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3377 )
3377 )
3378
3378
3379
3379
3380 @command(
3380 @command(
3381 b'debugrevlogindex',
3381 b'debugrevlogindex',
3382 cmdutil.debugrevlogopts
3382 cmdutil.debugrevlogopts
3383 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3383 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3384 _(b'[-f FORMAT] -c|-m|FILE'),
3384 _(b'[-f FORMAT] -c|-m|FILE'),
3385 optionalrepo=True,
3385 optionalrepo=True,
3386 )
3386 )
3387 def debugrevlogindex(ui, repo, file_=None, **opts):
3387 def debugrevlogindex(ui, repo, file_=None, **opts):
3388 """dump the contents of a revlog index"""
3388 """dump the contents of a revlog index"""
3389 opts = pycompat.byteskwargs(opts)
3389 opts = pycompat.byteskwargs(opts)
3390 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3390 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3391 format = opts.get(b'format', 0)
3391 format = opts.get(b'format', 0)
3392 if format not in (0, 1):
3392 if format not in (0, 1):
3393 raise error.Abort(_(b"unknown format %d") % format)
3393 raise error.Abort(_(b"unknown format %d") % format)
3394
3394
3395 if ui.debugflag:
3395 if ui.debugflag:
3396 shortfn = hex
3396 shortfn = hex
3397 else:
3397 else:
3398 shortfn = short
3398 shortfn = short
3399
3399
3400 # There might not be anything in r, so have a sane default
3400 # There might not be anything in r, so have a sane default
3401 idlen = 12
3401 idlen = 12
3402 for i in r:
3402 for i in r:
3403 idlen = len(shortfn(r.node(i)))
3403 idlen = len(shortfn(r.node(i)))
3404 break
3404 break
3405
3405
3406 if format == 0:
3406 if format == 0:
3407 if ui.verbose:
3407 if ui.verbose:
3408 ui.writenoi18n(
3408 ui.writenoi18n(
3409 b" rev offset length linkrev %s %s p2\n"
3409 b" rev offset length linkrev %s %s p2\n"
3410 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3410 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3411 )
3411 )
3412 else:
3412 else:
3413 ui.writenoi18n(
3413 ui.writenoi18n(
3414 b" rev linkrev %s %s p2\n"
3414 b" rev linkrev %s %s p2\n"
3415 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3415 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3416 )
3416 )
3417 elif format == 1:
3417 elif format == 1:
3418 if ui.verbose:
3418 if ui.verbose:
3419 ui.writenoi18n(
3419 ui.writenoi18n(
3420 (
3420 (
3421 b" rev flag offset length size link p1"
3421 b" rev flag offset length size link p1"
3422 b" p2 %s\n"
3422 b" p2 %s\n"
3423 )
3423 )
3424 % b"nodeid".rjust(idlen)
3424 % b"nodeid".rjust(idlen)
3425 )
3425 )
3426 else:
3426 else:
3427 ui.writenoi18n(
3427 ui.writenoi18n(
3428 b" rev flag size link p1 p2 %s\n"
3428 b" rev flag size link p1 p2 %s\n"
3429 % b"nodeid".rjust(idlen)
3429 % b"nodeid".rjust(idlen)
3430 )
3430 )
3431
3431
3432 for i in r:
3432 for i in r:
3433 node = r.node(i)
3433 node = r.node(i)
3434 if format == 0:
3434 if format == 0:
3435 try:
3435 try:
3436 pp = r.parents(node)
3436 pp = r.parents(node)
3437 except Exception:
3437 except Exception:
3438 pp = [repo.nullid, repo.nullid]
3438 pp = [repo.nullid, repo.nullid]
3439 if ui.verbose:
3439 if ui.verbose:
3440 ui.write(
3440 ui.write(
3441 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3441 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3442 % (
3442 % (
3443 i,
3443 i,
3444 r.start(i),
3444 r.start(i),
3445 r.length(i),
3445 r.length(i),
3446 r.linkrev(i),
3446 r.linkrev(i),
3447 shortfn(node),
3447 shortfn(node),
3448 shortfn(pp[0]),
3448 shortfn(pp[0]),
3449 shortfn(pp[1]),
3449 shortfn(pp[1]),
3450 )
3450 )
3451 )
3451 )
3452 else:
3452 else:
3453 ui.write(
3453 ui.write(
3454 b"% 6d % 7d %s %s %s\n"
3454 b"% 6d % 7d %s %s %s\n"
3455 % (
3455 % (
3456 i,
3456 i,
3457 r.linkrev(i),
3457 r.linkrev(i),
3458 shortfn(node),
3458 shortfn(node),
3459 shortfn(pp[0]),
3459 shortfn(pp[0]),
3460 shortfn(pp[1]),
3460 shortfn(pp[1]),
3461 )
3461 )
3462 )
3462 )
3463 elif format == 1:
3463 elif format == 1:
3464 pr = r.parentrevs(i)
3464 pr = r.parentrevs(i)
3465 if ui.verbose:
3465 if ui.verbose:
3466 ui.write(
3466 ui.write(
3467 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3467 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3468 % (
3468 % (
3469 i,
3469 i,
3470 r.flags(i),
3470 r.flags(i),
3471 r.start(i),
3471 r.start(i),
3472 r.length(i),
3472 r.length(i),
3473 r.rawsize(i),
3473 r.rawsize(i),
3474 r.linkrev(i),
3474 r.linkrev(i),
3475 pr[0],
3475 pr[0],
3476 pr[1],
3476 pr[1],
3477 shortfn(node),
3477 shortfn(node),
3478 )
3478 )
3479 )
3479 )
3480 else:
3480 else:
3481 ui.write(
3481 ui.write(
3482 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3482 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3483 % (
3483 % (
3484 i,
3484 i,
3485 r.flags(i),
3485 r.flags(i),
3486 r.rawsize(i),
3486 r.rawsize(i),
3487 r.linkrev(i),
3487 r.linkrev(i),
3488 pr[0],
3488 pr[0],
3489 pr[1],
3489 pr[1],
3490 shortfn(node),
3490 shortfn(node),
3491 )
3491 )
3492 )
3492 )
3493
3493
3494
3494
3495 @command(
3495 @command(
3496 b'debugrevspec',
3496 b'debugrevspec',
3497 [
3497 [
3498 (
3498 (
3499 b'',
3499 b'',
3500 b'optimize',
3500 b'optimize',
3501 None,
3501 None,
3502 _(b'print parsed tree after optimizing (DEPRECATED)'),
3502 _(b'print parsed tree after optimizing (DEPRECATED)'),
3503 ),
3503 ),
3504 (
3504 (
3505 b'',
3505 b'',
3506 b'show-revs',
3506 b'show-revs',
3507 True,
3507 True,
3508 _(b'print list of result revisions (default)'),
3508 _(b'print list of result revisions (default)'),
3509 ),
3509 ),
3510 (
3510 (
3511 b's',
3511 b's',
3512 b'show-set',
3512 b'show-set',
3513 None,
3513 None,
3514 _(b'print internal representation of result set'),
3514 _(b'print internal representation of result set'),
3515 ),
3515 ),
3516 (
3516 (
3517 b'p',
3517 b'p',
3518 b'show-stage',
3518 b'show-stage',
3519 [],
3519 [],
3520 _(b'print parsed tree at the given stage'),
3520 _(b'print parsed tree at the given stage'),
3521 _(b'NAME'),
3521 _(b'NAME'),
3522 ),
3522 ),
3523 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3523 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3524 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3524 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3525 ],
3525 ],
3526 b'REVSPEC',
3526 b'REVSPEC',
3527 )
3527 )
3528 def debugrevspec(ui, repo, expr, **opts):
3528 def debugrevspec(ui, repo, expr, **opts):
3529 """parse and apply a revision specification
3529 """parse and apply a revision specification
3530
3530
3531 Use -p/--show-stage option to print the parsed tree at the given stages.
3531 Use -p/--show-stage option to print the parsed tree at the given stages.
3532 Use -p all to print tree at every stage.
3532 Use -p all to print tree at every stage.
3533
3533
3534 Use --no-show-revs option with -s or -p to print only the set
3534 Use --no-show-revs option with -s or -p to print only the set
3535 representation or the parsed tree respectively.
3535 representation or the parsed tree respectively.
3536
3536
3537 Use --verify-optimized to compare the optimized result with the unoptimized
3537 Use --verify-optimized to compare the optimized result with the unoptimized
3538 one. Returns 1 if the optimized result differs.
3538 one. Returns 1 if the optimized result differs.
3539 """
3539 """
3540 opts = pycompat.byteskwargs(opts)
3540 opts = pycompat.byteskwargs(opts)
3541 aliases = ui.configitems(b'revsetalias')
3541 aliases = ui.configitems(b'revsetalias')
3542 stages = [
3542 stages = [
3543 (b'parsed', lambda tree: tree),
3543 (b'parsed', lambda tree: tree),
3544 (
3544 (
3545 b'expanded',
3545 b'expanded',
3546 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3546 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3547 ),
3547 ),
3548 (b'concatenated', revsetlang.foldconcat),
3548 (b'concatenated', revsetlang.foldconcat),
3549 (b'analyzed', revsetlang.analyze),
3549 (b'analyzed', revsetlang.analyze),
3550 (b'optimized', revsetlang.optimize),
3550 (b'optimized', revsetlang.optimize),
3551 ]
3551 ]
3552 if opts[b'no_optimized']:
3552 if opts[b'no_optimized']:
3553 stages = stages[:-1]
3553 stages = stages[:-1]
3554 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3554 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3555 raise error.Abort(
3555 raise error.Abort(
3556 _(b'cannot use --verify-optimized with --no-optimized')
3556 _(b'cannot use --verify-optimized with --no-optimized')
3557 )
3557 )
3558 stagenames = {n for n, f in stages}
3558 stagenames = {n for n, f in stages}
3559
3559
3560 showalways = set()
3560 showalways = set()
3561 showchanged = set()
3561 showchanged = set()
3562 if ui.verbose and not opts[b'show_stage']:
3562 if ui.verbose and not opts[b'show_stage']:
3563 # show parsed tree by --verbose (deprecated)
3563 # show parsed tree by --verbose (deprecated)
3564 showalways.add(b'parsed')
3564 showalways.add(b'parsed')
3565 showchanged.update([b'expanded', b'concatenated'])
3565 showchanged.update([b'expanded', b'concatenated'])
3566 if opts[b'optimize']:
3566 if opts[b'optimize']:
3567 showalways.add(b'optimized')
3567 showalways.add(b'optimized')
3568 if opts[b'show_stage'] and opts[b'optimize']:
3568 if opts[b'show_stage'] and opts[b'optimize']:
3569 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3569 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3570 if opts[b'show_stage'] == [b'all']:
3570 if opts[b'show_stage'] == [b'all']:
3571 showalways.update(stagenames)
3571 showalways.update(stagenames)
3572 else:
3572 else:
3573 for n in opts[b'show_stage']:
3573 for n in opts[b'show_stage']:
3574 if n not in stagenames:
3574 if n not in stagenames:
3575 raise error.Abort(_(b'invalid stage name: %s') % n)
3575 raise error.Abort(_(b'invalid stage name: %s') % n)
3576 showalways.update(opts[b'show_stage'])
3576 showalways.update(opts[b'show_stage'])
3577
3577
3578 treebystage = {}
3578 treebystage = {}
3579 printedtree = None
3579 printedtree = None
3580 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3580 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3581 for n, f in stages:
3581 for n, f in stages:
3582 treebystage[n] = tree = f(tree)
3582 treebystage[n] = tree = f(tree)
3583 if n in showalways or (n in showchanged and tree != printedtree):
3583 if n in showalways or (n in showchanged and tree != printedtree):
3584 if opts[b'show_stage'] or n != b'parsed':
3584 if opts[b'show_stage'] or n != b'parsed':
3585 ui.write(b"* %s:\n" % n)
3585 ui.write(b"* %s:\n" % n)
3586 ui.write(revsetlang.prettyformat(tree), b"\n")
3586 ui.write(revsetlang.prettyformat(tree), b"\n")
3587 printedtree = tree
3587 printedtree = tree
3588
3588
3589 if opts[b'verify_optimized']:
3589 if opts[b'verify_optimized']:
3590 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3590 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3591 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3591 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3592 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3592 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3593 ui.writenoi18n(
3593 ui.writenoi18n(
3594 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3594 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3595 )
3595 )
3596 ui.writenoi18n(
3596 ui.writenoi18n(
3597 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3597 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3598 )
3598 )
3599 arevs = list(arevs)
3599 arevs = list(arevs)
3600 brevs = list(brevs)
3600 brevs = list(brevs)
3601 if arevs == brevs:
3601 if arevs == brevs:
3602 return 0
3602 return 0
3603 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3603 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3604 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3604 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3605 sm = difflib.SequenceMatcher(None, arevs, brevs)
3605 sm = difflib.SequenceMatcher(None, arevs, brevs)
3606 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3606 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3607 if tag in ('delete', 'replace'):
3607 if tag in ('delete', 'replace'):
3608 for c in arevs[alo:ahi]:
3608 for c in arevs[alo:ahi]:
3609 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3609 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3610 if tag in ('insert', 'replace'):
3610 if tag in ('insert', 'replace'):
3611 for c in brevs[blo:bhi]:
3611 for c in brevs[blo:bhi]:
3612 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3612 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3613 if tag == 'equal':
3613 if tag == 'equal':
3614 for c in arevs[alo:ahi]:
3614 for c in arevs[alo:ahi]:
3615 ui.write(b' %d\n' % c)
3615 ui.write(b' %d\n' % c)
3616 return 1
3616 return 1
3617
3617
3618 func = revset.makematcher(tree)
3618 func = revset.makematcher(tree)
3619 revs = func(repo)
3619 revs = func(repo)
3620 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3620 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3621 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3621 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3622 if not opts[b'show_revs']:
3622 if not opts[b'show_revs']:
3623 return
3623 return
3624 for c in revs:
3624 for c in revs:
3625 ui.write(b"%d\n" % c)
3625 ui.write(b"%d\n" % c)
3626
3626
3627
3627
3628 @command(
3628 @command(
3629 b'debugserve',
3629 b'debugserve',
3630 [
3630 [
3631 (
3631 (
3632 b'',
3632 b'',
3633 b'sshstdio',
3633 b'sshstdio',
3634 False,
3634 False,
3635 _(b'run an SSH server bound to process handles'),
3635 _(b'run an SSH server bound to process handles'),
3636 ),
3636 ),
3637 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3637 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3638 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3638 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3639 ],
3639 ],
3640 b'',
3640 b'',
3641 )
3641 )
3642 def debugserve(ui, repo, **opts):
3642 def debugserve(ui, repo, **opts):
3643 """run a server with advanced settings
3643 """run a server with advanced settings
3644
3644
3645 This command is similar to :hg:`serve`. It exists partially as a
3645 This command is similar to :hg:`serve`. It exists partially as a
3646 workaround to the fact that ``hg serve --stdio`` must have specific
3646 workaround to the fact that ``hg serve --stdio`` must have specific
3647 arguments for security reasons.
3647 arguments for security reasons.
3648 """
3648 """
3649 opts = pycompat.byteskwargs(opts)
3649 opts = pycompat.byteskwargs(opts)
3650
3650
3651 if not opts[b'sshstdio']:
3651 if not opts[b'sshstdio']:
3652 raise error.Abort(_(b'only --sshstdio is currently supported'))
3652 raise error.Abort(_(b'only --sshstdio is currently supported'))
3653
3653
3654 logfh = None
3654 logfh = None
3655
3655
3656 if opts[b'logiofd'] and opts[b'logiofile']:
3656 if opts[b'logiofd'] and opts[b'logiofile']:
3657 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3657 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3658
3658
3659 if opts[b'logiofd']:
3659 if opts[b'logiofd']:
3660 # Ideally we would be line buffered. But line buffering in binary
3660 # Ideally we would be line buffered. But line buffering in binary
3661 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3661 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3662 # buffering could have performance impacts. But since this isn't
3662 # buffering could have performance impacts. But since this isn't
3663 # performance critical code, it should be fine.
3663 # performance critical code, it should be fine.
3664 try:
3664 try:
3665 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3665 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3666 except OSError as e:
3666 except OSError as e:
3667 if e.errno != errno.ESPIPE:
3667 if e.errno != errno.ESPIPE:
3668 raise
3668 raise
3669 # can't seek a pipe, so `ab` mode fails on py3
3669 # can't seek a pipe, so `ab` mode fails on py3
3670 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3670 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3671 elif opts[b'logiofile']:
3671 elif opts[b'logiofile']:
3672 logfh = open(opts[b'logiofile'], b'ab', 0)
3672 logfh = open(opts[b'logiofile'], b'ab', 0)
3673
3673
3674 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3674 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3675 s.serve_forever()
3675 s.serve_forever()
3676
3676
3677
3677
3678 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3678 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3679 def debugsetparents(ui, repo, rev1, rev2=None):
3679 def debugsetparents(ui, repo, rev1, rev2=None):
3680 """manually set the parents of the current working directory (DANGEROUS)
3680 """manually set the parents of the current working directory (DANGEROUS)
3681
3681
3682 This command is not what you are looking for and should not be used. Using
3682 This command is not what you are looking for and should not be used. Using
3683 this command will most certainly results in slight corruption of the file
3683 this command will most certainly results in slight corruption of the file
3684 level histories withing your repository. DO NOT USE THIS COMMAND.
3684 level histories withing your repository. DO NOT USE THIS COMMAND.
3685
3685
3686 The command update the p1 and p2 field in the dirstate, and not touching
3686 The command update the p1 and p2 field in the dirstate, and not touching
3687 anything else. This useful for writing repository conversion tools, but
3687 anything else. This useful for writing repository conversion tools, but
3688 should be used with extreme care. For example, neither the working
3688 should be used with extreme care. For example, neither the working
3689 directory nor the dirstate is updated, so file status may be incorrect
3689 directory nor the dirstate is updated, so file status may be incorrect
3690 after running this command. Only used if you are one of the few people that
3690 after running this command. Only used if you are one of the few people that
3691 deeply unstand both conversion tools and file level histories. If you are
3691 deeply unstand both conversion tools and file level histories. If you are
3692 reading this help, you are not one of this people (most of them sailed west
3692 reading this help, you are not one of this people (most of them sailed west
3693 from Mithlond anyway.
3693 from Mithlond anyway.
3694
3694
3695 So one last time DO NOT USE THIS COMMAND.
3695 So one last time DO NOT USE THIS COMMAND.
3696
3696
3697 Returns 0 on success.
3697 Returns 0 on success.
3698 """
3698 """
3699
3699
3700 node1 = scmutil.revsingle(repo, rev1).node()
3700 node1 = scmutil.revsingle(repo, rev1).node()
3701 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3701 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3702
3702
3703 with repo.wlock():
3703 with repo.wlock():
3704 repo.setparents(node1, node2)
3704 repo.setparents(node1, node2)
3705
3705
3706
3706
3707 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3707 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3708 def debugsidedata(ui, repo, file_, rev=None, **opts):
3708 def debugsidedata(ui, repo, file_, rev=None, **opts):
3709 """dump the side data for a cl/manifest/file revision
3709 """dump the side data for a cl/manifest/file revision
3710
3710
3711 Use --verbose to dump the sidedata content."""
3711 Use --verbose to dump the sidedata content."""
3712 opts = pycompat.byteskwargs(opts)
3712 opts = pycompat.byteskwargs(opts)
3713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3714 if rev is not None:
3714 if rev is not None:
3715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3716 file_, rev = None, file_
3716 file_, rev = None, file_
3717 elif rev is None:
3717 elif rev is None:
3718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3720 r = getattr(r, '_revlog', r)
3720 r = getattr(r, '_revlog', r)
3721 try:
3721 try:
3722 sidedata = r.sidedata(r.lookup(rev))
3722 sidedata = r.sidedata(r.lookup(rev))
3723 except KeyError:
3723 except KeyError:
3724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3725 if sidedata:
3725 if sidedata:
3726 sidedata = list(sidedata.items())
3726 sidedata = list(sidedata.items())
3727 sidedata.sort()
3727 sidedata.sort()
3728 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3728 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3729 for key, value in sidedata:
3729 for key, value in sidedata:
3730 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3730 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3731 if ui.verbose:
3731 if ui.verbose:
3732 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3732 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3733
3733
3734
3734
3735 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3735 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3736 def debugssl(ui, repo, source=None, **opts):
3736 def debugssl(ui, repo, source=None, **opts):
3737 """test a secure connection to a server
3737 """test a secure connection to a server
3738
3738
3739 This builds the certificate chain for the server on Windows, installing the
3739 This builds the certificate chain for the server on Windows, installing the
3740 missing intermediates and trusted root via Windows Update if necessary. It
3740 missing intermediates and trusted root via Windows Update if necessary. It
3741 does nothing on other platforms.
3741 does nothing on other platforms.
3742
3742
3743 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3743 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3744 that server is used. See :hg:`help urls` for more information.
3744 that server is used. See :hg:`help urls` for more information.
3745
3745
3746 If the update succeeds, retry the original operation. Otherwise, the cause
3746 If the update succeeds, retry the original operation. Otherwise, the cause
3747 of the SSL error is likely another issue.
3747 of the SSL error is likely another issue.
3748 """
3748 """
3749 if not pycompat.iswindows:
3749 if not pycompat.iswindows:
3750 raise error.Abort(
3750 raise error.Abort(
3751 _(b'certificate chain building is only possible on Windows')
3751 _(b'certificate chain building is only possible on Windows')
3752 )
3752 )
3753
3753
3754 if not source:
3754 if not source:
3755 if not repo:
3755 if not repo:
3756 raise error.Abort(
3756 raise error.Abort(
3757 _(
3757 _(
3758 b"there is no Mercurial repository here, and no "
3758 b"there is no Mercurial repository here, and no "
3759 b"server specified"
3759 b"server specified"
3760 )
3760 )
3761 )
3761 )
3762 source = b"default"
3762 source = b"default"
3763
3763
3764 source, branches = urlutil.get_unique_pull_path(
3764 source, branches = urlutil.get_unique_pull_path(
3765 b'debugssl', repo, ui, source
3765 b'debugssl', repo, ui, source
3766 )
3766 )
3767 url = urlutil.url(source)
3767 url = urlutil.url(source)
3768
3768
3769 defaultport = {b'https': 443, b'ssh': 22}
3769 defaultport = {b'https': 443, b'ssh': 22}
3770 if url.scheme in defaultport:
3770 if url.scheme in defaultport:
3771 try:
3771 try:
3772 addr = (url.host, int(url.port or defaultport[url.scheme]))
3772 addr = (url.host, int(url.port or defaultport[url.scheme]))
3773 except ValueError:
3773 except ValueError:
3774 raise error.Abort(_(b"malformed port number in URL"))
3774 raise error.Abort(_(b"malformed port number in URL"))
3775 else:
3775 else:
3776 raise error.Abort(_(b"only https and ssh connections are supported"))
3776 raise error.Abort(_(b"only https and ssh connections are supported"))
3777
3777
3778 from . import win32
3778 from . import win32
3779
3779
3780 s = ssl.wrap_socket(
3780 s = ssl.wrap_socket(
3781 socket.socket(),
3781 socket.socket(),
3782 ssl_version=ssl.PROTOCOL_TLS,
3782 ssl_version=ssl.PROTOCOL_TLS,
3783 cert_reqs=ssl.CERT_NONE,
3783 cert_reqs=ssl.CERT_NONE,
3784 ca_certs=None,
3784 ca_certs=None,
3785 )
3785 )
3786
3786
3787 try:
3787 try:
3788 s.connect(addr)
3788 s.connect(addr)
3789 cert = s.getpeercert(True)
3789 cert = s.getpeercert(True)
3790
3790
3791 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3791 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3792
3792
3793 complete = win32.checkcertificatechain(cert, build=False)
3793 complete = win32.checkcertificatechain(cert, build=False)
3794
3794
3795 if not complete:
3795 if not complete:
3796 ui.status(_(b'certificate chain is incomplete, updating... '))
3796 ui.status(_(b'certificate chain is incomplete, updating... '))
3797
3797
3798 if not win32.checkcertificatechain(cert):
3798 if not win32.checkcertificatechain(cert):
3799 ui.status(_(b'failed.\n'))
3799 ui.status(_(b'failed.\n'))
3800 else:
3800 else:
3801 ui.status(_(b'done.\n'))
3801 ui.status(_(b'done.\n'))
3802 else:
3802 else:
3803 ui.status(_(b'full certificate chain is available\n'))
3803 ui.status(_(b'full certificate chain is available\n'))
3804 finally:
3804 finally:
3805 s.close()
3805 s.close()
3806
3806
3807
3807
3808 @command(
3808 @command(
3809 b"debugbackupbundle",
3809 b"debugbackupbundle",
3810 [
3810 [
3811 (
3811 (
3812 b"",
3812 b"",
3813 b"recover",
3813 b"recover",
3814 b"",
3814 b"",
3815 b"brings the specified changeset back into the repository",
3815 b"brings the specified changeset back into the repository",
3816 )
3816 )
3817 ]
3817 ]
3818 + cmdutil.logopts,
3818 + cmdutil.logopts,
3819 _(b"hg debugbackupbundle [--recover HASH]"),
3819 _(b"hg debugbackupbundle [--recover HASH]"),
3820 )
3820 )
3821 def debugbackupbundle(ui, repo, *pats, **opts):
3821 def debugbackupbundle(ui, repo, *pats, **opts):
3822 """lists the changesets available in backup bundles
3822 """lists the changesets available in backup bundles
3823
3823
3824 Without any arguments, this command prints a list of the changesets in each
3824 Without any arguments, this command prints a list of the changesets in each
3825 backup bundle.
3825 backup bundle.
3826
3826
3827 --recover takes a changeset hash and unbundles the first bundle that
3827 --recover takes a changeset hash and unbundles the first bundle that
3828 contains that hash, which puts that changeset back in your repository.
3828 contains that hash, which puts that changeset back in your repository.
3829
3829
3830 --verbose will print the entire commit message and the bundle path for that
3830 --verbose will print the entire commit message and the bundle path for that
3831 backup.
3831 backup.
3832 """
3832 """
3833 backups = list(
3833 backups = list(
3834 filter(
3834 filter(
3835 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3835 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3836 )
3836 )
3837 )
3837 )
3838 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3838 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3839
3839
3840 opts = pycompat.byteskwargs(opts)
3840 opts = pycompat.byteskwargs(opts)
3841 opts[b"bundle"] = b""
3841 opts[b"bundle"] = b""
3842 opts[b"force"] = None
3842 opts[b"force"] = None
3843 limit = logcmdutil.getlimit(opts)
3843 limit = logcmdutil.getlimit(opts)
3844
3844
3845 def display(other, chlist, displayer):
3845 def display(other, chlist, displayer):
3846 if opts.get(b"newest_first"):
3846 if opts.get(b"newest_first"):
3847 chlist.reverse()
3847 chlist.reverse()
3848 count = 0
3848 count = 0
3849 for n in chlist:
3849 for n in chlist:
3850 if limit is not None and count >= limit:
3850 if limit is not None and count >= limit:
3851 break
3851 break
3852 parents = [
3852 parents = [
3853 True for p in other.changelog.parents(n) if p != repo.nullid
3853 True for p in other.changelog.parents(n) if p != repo.nullid
3854 ]
3854 ]
3855 if opts.get(b"no_merges") and len(parents) == 2:
3855 if opts.get(b"no_merges") and len(parents) == 2:
3856 continue
3856 continue
3857 count += 1
3857 count += 1
3858 displayer.show(other[n])
3858 displayer.show(other[n])
3859
3859
3860 recovernode = opts.get(b"recover")
3860 recovernode = opts.get(b"recover")
3861 if recovernode:
3861 if recovernode:
3862 if scmutil.isrevsymbol(repo, recovernode):
3862 if scmutil.isrevsymbol(repo, recovernode):
3863 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3863 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3864 return
3864 return
3865 elif backups:
3865 elif backups:
3866 msg = _(
3866 msg = _(
3867 b"Recover changesets using: hg debugbackupbundle --recover "
3867 b"Recover changesets using: hg debugbackupbundle --recover "
3868 b"<changeset hash>\n\nAvailable backup changesets:"
3868 b"<changeset hash>\n\nAvailable backup changesets:"
3869 )
3869 )
3870 ui.status(msg, label=b"status.removed")
3870 ui.status(msg, label=b"status.removed")
3871 else:
3871 else:
3872 ui.status(_(b"no backup changesets found\n"))
3872 ui.status(_(b"no backup changesets found\n"))
3873 return
3873 return
3874
3874
3875 for backup in backups:
3875 for backup in backups:
3876 # Much of this is copied from the hg incoming logic
3876 # Much of this is copied from the hg incoming logic
3877 source = os.path.relpath(backup, encoding.getcwd())
3877 source = os.path.relpath(backup, encoding.getcwd())
3878 source, branches = urlutil.get_unique_pull_path(
3878 source, branches = urlutil.get_unique_pull_path(
3879 b'debugbackupbundle',
3879 b'debugbackupbundle',
3880 repo,
3880 repo,
3881 ui,
3881 ui,
3882 source,
3882 source,
3883 default_branches=opts.get(b'branch'),
3883 default_branches=opts.get(b'branch'),
3884 )
3884 )
3885 try:
3885 try:
3886 other = hg.peer(repo, opts, source)
3886 other = hg.peer(repo, opts, source)
3887 except error.LookupError as ex:
3887 except error.LookupError as ex:
3888 msg = _(b"\nwarning: unable to open bundle %s") % source
3888 msg = _(b"\nwarning: unable to open bundle %s") % source
3889 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3889 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3890 ui.warn(msg, hint=hint)
3890 ui.warn(msg, hint=hint)
3891 continue
3891 continue
3892 revs, checkout = hg.addbranchrevs(
3892 revs, checkout = hg.addbranchrevs(
3893 repo, other, branches, opts.get(b"rev")
3893 repo, other, branches, opts.get(b"rev")
3894 )
3894 )
3895
3895
3896 if revs:
3896 if revs:
3897 revs = [other.lookup(rev) for rev in revs]
3897 revs = [other.lookup(rev) for rev in revs]
3898
3898
3899 with ui.silent():
3899 with ui.silent():
3900 try:
3900 try:
3901 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3901 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3902 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3902 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3903 )
3903 )
3904 except error.LookupError:
3904 except error.LookupError:
3905 continue
3905 continue
3906
3906
3907 try:
3907 try:
3908 if not chlist:
3908 if not chlist:
3909 continue
3909 continue
3910 if recovernode:
3910 if recovernode:
3911 with repo.lock(), repo.transaction(b"unbundle") as tr:
3911 with repo.lock(), repo.transaction(b"unbundle") as tr:
3912 if scmutil.isrevsymbol(other, recovernode):
3912 if scmutil.isrevsymbol(other, recovernode):
3913 ui.status(_(b"Unbundling %s\n") % (recovernode))
3913 ui.status(_(b"Unbundling %s\n") % (recovernode))
3914 f = hg.openpath(ui, source)
3914 f = hg.openpath(ui, source)
3915 gen = exchange.readbundle(ui, f, source)
3915 gen = exchange.readbundle(ui, f, source)
3916 if isinstance(gen, bundle2.unbundle20):
3916 if isinstance(gen, bundle2.unbundle20):
3917 bundle2.applybundle(
3917 bundle2.applybundle(
3918 repo,
3918 repo,
3919 gen,
3919 gen,
3920 tr,
3920 tr,
3921 source=b"unbundle",
3921 source=b"unbundle",
3922 url=b"bundle:" + source,
3922 url=b"bundle:" + source,
3923 )
3923 )
3924 else:
3924 else:
3925 gen.apply(repo, b"unbundle", b"bundle:" + source)
3925 gen.apply(repo, b"unbundle", b"bundle:" + source)
3926 break
3926 break
3927 else:
3927 else:
3928 backupdate = encoding.strtolocal(
3928 backupdate = encoding.strtolocal(
3929 time.strftime(
3929 time.strftime(
3930 "%a %H:%M, %Y-%m-%d",
3930 "%a %H:%M, %Y-%m-%d",
3931 time.localtime(os.path.getmtime(source)),
3931 time.localtime(os.path.getmtime(source)),
3932 )
3932 )
3933 )
3933 )
3934 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3934 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3935 if ui.verbose:
3935 if ui.verbose:
3936 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3936 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3937 else:
3937 else:
3938 opts[
3938 opts[
3939 b"template"
3939 b"template"
3940 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3940 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3941 displayer = logcmdutil.changesetdisplayer(
3941 displayer = logcmdutil.changesetdisplayer(
3942 ui, other, opts, False
3942 ui, other, opts, False
3943 )
3943 )
3944 display(other, chlist, displayer)
3944 display(other, chlist, displayer)
3945 displayer.close()
3945 displayer.close()
3946 finally:
3946 finally:
3947 cleanupfn()
3947 cleanupfn()
3948
3948
3949
3949
3950 @command(
3950 @command(
3951 b'debugsub',
3951 b'debugsub',
3952 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3952 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3953 _(b'[-r REV] [REV]'),
3953 _(b'[-r REV] [REV]'),
3954 )
3954 )
3955 def debugsub(ui, repo, rev=None):
3955 def debugsub(ui, repo, rev=None):
3956 ctx = scmutil.revsingle(repo, rev, None)
3956 ctx = scmutil.revsingle(repo, rev, None)
3957 for k, v in sorted(ctx.substate.items()):
3957 for k, v in sorted(ctx.substate.items()):
3958 ui.writenoi18n(b'path %s\n' % k)
3958 ui.writenoi18n(b'path %s\n' % k)
3959 ui.writenoi18n(b' source %s\n' % v[0])
3959 ui.writenoi18n(b' source %s\n' % v[0])
3960 ui.writenoi18n(b' revision %s\n' % v[1])
3960 ui.writenoi18n(b' revision %s\n' % v[1])
3961
3961
3962
3962
3963 @command(b'debugshell', optionalrepo=True)
3963 @command(b'debugshell', optionalrepo=True)
3964 def debugshell(ui, repo):
3964 def debugshell(ui, repo):
3965 """run an interactive Python interpreter
3965 """run an interactive Python interpreter
3966
3966
3967 The local namespace is provided with a reference to the ui and
3967 The local namespace is provided with a reference to the ui and
3968 the repo instance (if available).
3968 the repo instance (if available).
3969 """
3969 """
3970 import code
3970 import code
3971
3971
3972 imported_objects = {
3972 imported_objects = {
3973 'ui': ui,
3973 'ui': ui,
3974 'repo': repo,
3974 'repo': repo,
3975 }
3975 }
3976
3976
3977 code.interact(local=imported_objects)
3977 code.interact(local=imported_objects)
3978
3978
3979
3979
3980 @command(
3980 @command(
3981 b'debugsuccessorssets',
3981 b'debugsuccessorssets',
3982 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3982 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3983 _(b'[REV]'),
3983 _(b'[REV]'),
3984 )
3984 )
3985 def debugsuccessorssets(ui, repo, *revs, **opts):
3985 def debugsuccessorssets(ui, repo, *revs, **opts):
3986 """show set of successors for revision
3986 """show set of successors for revision
3987
3987
3988 A successors set of changeset A is a consistent group of revisions that
3988 A successors set of changeset A is a consistent group of revisions that
3989 succeed A. It contains non-obsolete changesets only unless closests
3989 succeed A. It contains non-obsolete changesets only unless closests
3990 successors set is set.
3990 successors set is set.
3991
3991
3992 In most cases a changeset A has a single successors set containing a single
3992 In most cases a changeset A has a single successors set containing a single
3993 successor (changeset A replaced by A').
3993 successor (changeset A replaced by A').
3994
3994
3995 A changeset that is made obsolete with no successors are called "pruned".
3995 A changeset that is made obsolete with no successors are called "pruned".
3996 Such changesets have no successors sets at all.
3996 Such changesets have no successors sets at all.
3997
3997
3998 A changeset that has been "split" will have a successors set containing
3998 A changeset that has been "split" will have a successors set containing
3999 more than one successor.
3999 more than one successor.
4000
4000
4001 A changeset that has been rewritten in multiple different ways is called
4001 A changeset that has been rewritten in multiple different ways is called
4002 "divergent". Such changesets have multiple successor sets (each of which
4002 "divergent". Such changesets have multiple successor sets (each of which
4003 may also be split, i.e. have multiple successors).
4003 may also be split, i.e. have multiple successors).
4004
4004
4005 Results are displayed as follows::
4005 Results are displayed as follows::
4006
4006
4007 <rev1>
4007 <rev1>
4008 <successors-1A>
4008 <successors-1A>
4009 <rev2>
4009 <rev2>
4010 <successors-2A>
4010 <successors-2A>
4011 <successors-2B1> <successors-2B2> <successors-2B3>
4011 <successors-2B1> <successors-2B2> <successors-2B3>
4012
4012
4013 Here rev2 has two possible (i.e. divergent) successors sets. The first
4013 Here rev2 has two possible (i.e. divergent) successors sets. The first
4014 holds one element, whereas the second holds three (i.e. the changeset has
4014 holds one element, whereas the second holds three (i.e. the changeset has
4015 been split).
4015 been split).
4016 """
4016 """
4017 # passed to successorssets caching computation from one call to another
4017 # passed to successorssets caching computation from one call to another
4018 cache = {}
4018 cache = {}
4019 ctx2str = bytes
4019 ctx2str = bytes
4020 node2str = short
4020 node2str = short
4021 for rev in logcmdutil.revrange(repo, revs):
4021 for rev in logcmdutil.revrange(repo, revs):
4022 ctx = repo[rev]
4022 ctx = repo[rev]
4023 ui.write(b'%s\n' % ctx2str(ctx))
4023 ui.write(b'%s\n' % ctx2str(ctx))
4024 for succsset in obsutil.successorssets(
4024 for succsset in obsutil.successorssets(
4025 repo, ctx.node(), closest=opts['closest'], cache=cache
4025 repo, ctx.node(), closest=opts['closest'], cache=cache
4026 ):
4026 ):
4027 if succsset:
4027 if succsset:
4028 ui.write(b' ')
4028 ui.write(b' ')
4029 ui.write(node2str(succsset[0]))
4029 ui.write(node2str(succsset[0]))
4030 for node in succsset[1:]:
4030 for node in succsset[1:]:
4031 ui.write(b' ')
4031 ui.write(b' ')
4032 ui.write(node2str(node))
4032 ui.write(node2str(node))
4033 ui.write(b'\n')
4033 ui.write(b'\n')
4034
4034
4035
4035
4036 @command(b'debugtagscache', [])
4036 @command(b'debugtagscache', [])
4037 def debugtagscache(ui, repo):
4037 def debugtagscache(ui, repo):
4038 """display the contents of .hg/cache/hgtagsfnodes1"""
4038 """display the contents of .hg/cache/hgtagsfnodes1"""
4039 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4039 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4040 flog = repo.file(b'.hgtags')
4040 flog = repo.file(b'.hgtags')
4041 for r in repo:
4041 for r in repo:
4042 node = repo[r].node()
4042 node = repo[r].node()
4043 tagsnode = cache.getfnode(node, computemissing=False)
4043 tagsnode = cache.getfnode(node, computemissing=False)
4044 if tagsnode:
4044 if tagsnode:
4045 tagsnodedisplay = hex(tagsnode)
4045 tagsnodedisplay = hex(tagsnode)
4046 if not flog.hasnode(tagsnode):
4046 if not flog.hasnode(tagsnode):
4047 tagsnodedisplay += b' (unknown node)'
4047 tagsnodedisplay += b' (unknown node)'
4048 elif tagsnode is None:
4048 elif tagsnode is None:
4049 tagsnodedisplay = b'missing'
4049 tagsnodedisplay = b'missing'
4050 else:
4050 else:
4051 tagsnodedisplay = b'invalid'
4051 tagsnodedisplay = b'invalid'
4052
4052
4053 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4053 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4054
4054
4055
4055
4056 @command(
4056 @command(
4057 b'debugtemplate',
4057 b'debugtemplate',
4058 [
4058 [
4059 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4059 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4060 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4060 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4061 ],
4061 ],
4062 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4062 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4063 optionalrepo=True,
4063 optionalrepo=True,
4064 )
4064 )
4065 def debugtemplate(ui, repo, tmpl, **opts):
4065 def debugtemplate(ui, repo, tmpl, **opts):
4066 """parse and apply a template
4066 """parse and apply a template
4067
4067
4068 If -r/--rev is given, the template is processed as a log template and
4068 If -r/--rev is given, the template is processed as a log template and
4069 applied to the given changesets. Otherwise, it is processed as a generic
4069 applied to the given changesets. Otherwise, it is processed as a generic
4070 template.
4070 template.
4071
4071
4072 Use --verbose to print the parsed tree.
4072 Use --verbose to print the parsed tree.
4073 """
4073 """
4074 revs = None
4074 revs = None
4075 if opts['rev']:
4075 if opts['rev']:
4076 if repo is None:
4076 if repo is None:
4077 raise error.RepoError(
4077 raise error.RepoError(
4078 _(b'there is no Mercurial repository here (.hg not found)')
4078 _(b'there is no Mercurial repository here (.hg not found)')
4079 )
4079 )
4080 revs = logcmdutil.revrange(repo, opts['rev'])
4080 revs = logcmdutil.revrange(repo, opts['rev'])
4081
4081
4082 props = {}
4082 props = {}
4083 for d in opts['define']:
4083 for d in opts['define']:
4084 try:
4084 try:
4085 k, v = (e.strip() for e in d.split(b'=', 1))
4085 k, v = (e.strip() for e in d.split(b'=', 1))
4086 if not k or k == b'ui':
4086 if not k or k == b'ui':
4087 raise ValueError
4087 raise ValueError
4088 props[k] = v
4088 props[k] = v
4089 except ValueError:
4089 except ValueError:
4090 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4090 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4091
4091
4092 if ui.verbose:
4092 if ui.verbose:
4093 aliases = ui.configitems(b'templatealias')
4093 aliases = ui.configitems(b'templatealias')
4094 tree = templater.parse(tmpl)
4094 tree = templater.parse(tmpl)
4095 ui.note(templater.prettyformat(tree), b'\n')
4095 ui.note(templater.prettyformat(tree), b'\n')
4096 newtree = templater.expandaliases(tree, aliases)
4096 newtree = templater.expandaliases(tree, aliases)
4097 if newtree != tree:
4097 if newtree != tree:
4098 ui.notenoi18n(
4098 ui.notenoi18n(
4099 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4099 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4100 )
4100 )
4101
4101
4102 if revs is None:
4102 if revs is None:
4103 tres = formatter.templateresources(ui, repo)
4103 tres = formatter.templateresources(ui, repo)
4104 t = formatter.maketemplater(ui, tmpl, resources=tres)
4104 t = formatter.maketemplater(ui, tmpl, resources=tres)
4105 if ui.verbose:
4105 if ui.verbose:
4106 kwds, funcs = t.symbolsuseddefault()
4106 kwds, funcs = t.symbolsuseddefault()
4107 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4107 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4108 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4108 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4109 ui.write(t.renderdefault(props))
4109 ui.write(t.renderdefault(props))
4110 else:
4110 else:
4111 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4111 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4112 if ui.verbose:
4112 if ui.verbose:
4113 kwds, funcs = displayer.t.symbolsuseddefault()
4113 kwds, funcs = displayer.t.symbolsuseddefault()
4114 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4114 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4115 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4115 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4116 for r in revs:
4116 for r in revs:
4117 displayer.show(repo[r], **pycompat.strkwargs(props))
4117 displayer.show(repo[r], **pycompat.strkwargs(props))
4118 displayer.close()
4118 displayer.close()
4119
4119
4120
4120
4121 @command(
4121 @command(
4122 b'debuguigetpass',
4122 b'debuguigetpass',
4123 [
4123 [
4124 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4124 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4125 ],
4125 ],
4126 _(b'[-p TEXT]'),
4126 _(b'[-p TEXT]'),
4127 norepo=True,
4127 norepo=True,
4128 )
4128 )
4129 def debuguigetpass(ui, prompt=b''):
4129 def debuguigetpass(ui, prompt=b''):
4130 """show prompt to type password"""
4130 """show prompt to type password"""
4131 r = ui.getpass(prompt)
4131 r = ui.getpass(prompt)
4132 if r is None:
4132 if r is None:
4133 r = b"<default response>"
4133 r = b"<default response>"
4134 ui.writenoi18n(b'response: %s\n' % r)
4134 ui.writenoi18n(b'response: %s\n' % r)
4135
4135
4136
4136
4137 @command(
4137 @command(
4138 b'debuguiprompt',
4138 b'debuguiprompt',
4139 [
4139 [
4140 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4140 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4141 ],
4141 ],
4142 _(b'[-p TEXT]'),
4142 _(b'[-p TEXT]'),
4143 norepo=True,
4143 norepo=True,
4144 )
4144 )
4145 def debuguiprompt(ui, prompt=b''):
4145 def debuguiprompt(ui, prompt=b''):
4146 """show plain prompt"""
4146 """show plain prompt"""
4147 r = ui.prompt(prompt)
4147 r = ui.prompt(prompt)
4148 ui.writenoi18n(b'response: %s\n' % r)
4148 ui.writenoi18n(b'response: %s\n' % r)
4149
4149
4150
4150
4151 @command(b'debugupdatecaches', [])
4151 @command(b'debugupdatecaches', [])
4152 def debugupdatecaches(ui, repo, *pats, **opts):
4152 def debugupdatecaches(ui, repo, *pats, **opts):
4153 """warm all known caches in the repository"""
4153 """warm all known caches in the repository"""
4154 with repo.wlock(), repo.lock():
4154 with repo.wlock(), repo.lock():
4155 repo.updatecaches(caches=repository.CACHES_ALL)
4155 repo.updatecaches(caches=repository.CACHES_ALL)
4156
4156
4157
4157
4158 @command(
4158 @command(
4159 b'debugupgraderepo',
4159 b'debugupgraderepo',
4160 [
4160 [
4161 (
4161 (
4162 b'o',
4162 b'o',
4163 b'optimize',
4163 b'optimize',
4164 [],
4164 [],
4165 _(b'extra optimization to perform'),
4165 _(b'extra optimization to perform'),
4166 _(b'NAME'),
4166 _(b'NAME'),
4167 ),
4167 ),
4168 (b'', b'run', False, _(b'performs an upgrade')),
4168 (b'', b'run', False, _(b'performs an upgrade')),
4169 (b'', b'backup', True, _(b'keep the old repository content around')),
4169 (b'', b'backup', True, _(b'keep the old repository content around')),
4170 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4170 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4171 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4171 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4172 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4172 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4173 ],
4173 ],
4174 )
4174 )
4175 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4175 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4176 """upgrade a repository to use different features
4176 """upgrade a repository to use different features
4177
4177
4178 If no arguments are specified, the repository is evaluated for upgrade
4178 If no arguments are specified, the repository is evaluated for upgrade
4179 and a list of problems and potential optimizations is printed.
4179 and a list of problems and potential optimizations is printed.
4180
4180
4181 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4181 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4182 can be influenced via additional arguments. More details will be provided
4182 can be influenced via additional arguments. More details will be provided
4183 by the command output when run without ``--run``.
4183 by the command output when run without ``--run``.
4184
4184
4185 During the upgrade, the repository will be locked and no writes will be
4185 During the upgrade, the repository will be locked and no writes will be
4186 allowed.
4186 allowed.
4187
4187
4188 At the end of the upgrade, the repository may not be readable while new
4188 At the end of the upgrade, the repository may not be readable while new
4189 repository data is swapped in. This window will be as long as it takes to
4189 repository data is swapped in. This window will be as long as it takes to
4190 rename some directories inside the ``.hg`` directory. On most machines, this
4190 rename some directories inside the ``.hg`` directory. On most machines, this
4191 should complete almost instantaneously and the chances of a consumer being
4191 should complete almost instantaneously and the chances of a consumer being
4192 unable to access the repository should be low.
4192 unable to access the repository should be low.
4193
4193
4194 By default, all revlogs will be upgraded. You can restrict this using flags
4194 By default, all revlogs will be upgraded. You can restrict this using flags
4195 such as `--manifest`:
4195 such as `--manifest`:
4196
4196
4197 * `--manifest`: only optimize the manifest
4197 * `--manifest`: only optimize the manifest
4198 * `--no-manifest`: optimize all revlog but the manifest
4198 * `--no-manifest`: optimize all revlog but the manifest
4199 * `--changelog`: optimize the changelog only
4199 * `--changelog`: optimize the changelog only
4200 * `--no-changelog --no-manifest`: optimize filelogs only
4200 * `--no-changelog --no-manifest`: optimize filelogs only
4201 * `--filelogs`: optimize the filelogs only
4201 * `--filelogs`: optimize the filelogs only
4202 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4202 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4203 """
4203 """
4204 return upgrade.upgraderepo(
4204 return upgrade.upgraderepo(
4205 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4205 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4206 )
4206 )
4207
4207
4208
4208
4209 @command(
4209 @command(
4210 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4210 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4211 )
4211 )
4212 def debugwalk(ui, repo, *pats, **opts):
4212 def debugwalk(ui, repo, *pats, **opts):
4213 """show how files match on given patterns"""
4213 """show how files match on given patterns"""
4214 opts = pycompat.byteskwargs(opts)
4214 opts = pycompat.byteskwargs(opts)
4215 m = scmutil.match(repo[None], pats, opts)
4215 m = scmutil.match(repo[None], pats, opts)
4216 if ui.verbose:
4216 if ui.verbose:
4217 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4217 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4218 items = list(repo[None].walk(m))
4218 items = list(repo[None].walk(m))
4219 if not items:
4219 if not items:
4220 return
4220 return
4221 f = lambda fn: fn
4221 f = lambda fn: fn
4222 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4222 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4223 f = lambda fn: util.normpath(fn)
4223 f = lambda fn: util.normpath(fn)
4224 fmt = b'f %%-%ds %%-%ds %%s' % (
4224 fmt = b'f %%-%ds %%-%ds %%s' % (
4225 max([len(abs) for abs in items]),
4225 max([len(abs) for abs in items]),
4226 max([len(repo.pathto(abs)) for abs in items]),
4226 max([len(repo.pathto(abs)) for abs in items]),
4227 )
4227 )
4228 for abs in items:
4228 for abs in items:
4229 line = fmt % (
4229 line = fmt % (
4230 abs,
4230 abs,
4231 f(repo.pathto(abs)),
4231 f(repo.pathto(abs)),
4232 m.exact(abs) and b'exact' or b'',
4232 m.exact(abs) and b'exact' or b'',
4233 )
4233 )
4234 ui.write(b"%s\n" % line.rstrip())
4234 ui.write(b"%s\n" % line.rstrip())
4235
4235
4236
4236
4237 @command(b'debugwhyunstable', [], _(b'REV'))
4237 @command(b'debugwhyunstable', [], _(b'REV'))
4238 def debugwhyunstable(ui, repo, rev):
4238 def debugwhyunstable(ui, repo, rev):
4239 """explain instabilities of a changeset"""
4239 """explain instabilities of a changeset"""
4240 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4240 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4241 dnodes = b''
4241 dnodes = b''
4242 if entry.get(b'divergentnodes'):
4242 if entry.get(b'divergentnodes'):
4243 dnodes = (
4243 dnodes = (
4244 b' '.join(
4244 b' '.join(
4245 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4245 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4246 for ctx in entry[b'divergentnodes']
4246 for ctx in entry[b'divergentnodes']
4247 )
4247 )
4248 + b' '
4248 + b' '
4249 )
4249 )
4250 ui.write(
4250 ui.write(
4251 b'%s: %s%s %s\n'
4251 b'%s: %s%s %s\n'
4252 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4252 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4253 )
4253 )
4254
4254
4255
4255
4256 @command(
4256 @command(
4257 b'debugwireargs',
4257 b'debugwireargs',
4258 [
4258 [
4259 (b'', b'three', b'', b'three'),
4259 (b'', b'three', b'', b'three'),
4260 (b'', b'four', b'', b'four'),
4260 (b'', b'four', b'', b'four'),
4261 (b'', b'five', b'', b'five'),
4261 (b'', b'five', b'', b'five'),
4262 ]
4262 ]
4263 + cmdutil.remoteopts,
4263 + cmdutil.remoteopts,
4264 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4264 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4265 norepo=True,
4265 norepo=True,
4266 )
4266 )
4267 def debugwireargs(ui, repopath, *vals, **opts):
4267 def debugwireargs(ui, repopath, *vals, **opts):
4268 opts = pycompat.byteskwargs(opts)
4268 opts = pycompat.byteskwargs(opts)
4269 repo = hg.peer(ui, opts, repopath)
4269 repo = hg.peer(ui, opts, repopath)
4270 try:
4270 try:
4271 for opt in cmdutil.remoteopts:
4271 for opt in cmdutil.remoteopts:
4272 del opts[opt[1]]
4272 del opts[opt[1]]
4273 args = {}
4273 args = {}
4274 for k, v in pycompat.iteritems(opts):
4274 for k, v in pycompat.iteritems(opts):
4275 if v:
4275 if v:
4276 args[k] = v
4276 args[k] = v
4277 args = pycompat.strkwargs(args)
4277 args = pycompat.strkwargs(args)
4278 # run twice to check that we don't mess up the stream for the next command
4278 # run twice to check that we don't mess up the stream for the next command
4279 res1 = repo.debugwireargs(*vals, **args)
4279 res1 = repo.debugwireargs(*vals, **args)
4280 res2 = repo.debugwireargs(*vals, **args)
4280 res2 = repo.debugwireargs(*vals, **args)
4281 ui.write(b"%s\n" % res1)
4281 ui.write(b"%s\n" % res1)
4282 if res1 != res2:
4282 if res1 != res2:
4283 ui.warn(b"%s\n" % res2)
4283 ui.warn(b"%s\n" % res2)
4284 finally:
4284 finally:
4285 repo.close()
4285 repo.close()
4286
4286
4287
4287
4288 def _parsewirelangblocks(fh):
4288 def _parsewirelangblocks(fh):
4289 activeaction = None
4289 activeaction = None
4290 blocklines = []
4290 blocklines = []
4291 lastindent = 0
4291 lastindent = 0
4292
4292
4293 for line in fh:
4293 for line in fh:
4294 line = line.rstrip()
4294 line = line.rstrip()
4295 if not line:
4295 if not line:
4296 continue
4296 continue
4297
4297
4298 if line.startswith(b'#'):
4298 if line.startswith(b'#'):
4299 continue
4299 continue
4300
4300
4301 if not line.startswith(b' '):
4301 if not line.startswith(b' '):
4302 # New block. Flush previous one.
4302 # New block. Flush previous one.
4303 if activeaction:
4303 if activeaction:
4304 yield activeaction, blocklines
4304 yield activeaction, blocklines
4305
4305
4306 activeaction = line
4306 activeaction = line
4307 blocklines = []
4307 blocklines = []
4308 lastindent = 0
4308 lastindent = 0
4309 continue
4309 continue
4310
4310
4311 # Else we start with an indent.
4311 # Else we start with an indent.
4312
4312
4313 if not activeaction:
4313 if not activeaction:
4314 raise error.Abort(_(b'indented line outside of block'))
4314 raise error.Abort(_(b'indented line outside of block'))
4315
4315
4316 indent = len(line) - len(line.lstrip())
4316 indent = len(line) - len(line.lstrip())
4317
4317
4318 # If this line is indented more than the last line, concatenate it.
4318 # If this line is indented more than the last line, concatenate it.
4319 if indent > lastindent and blocklines:
4319 if indent > lastindent and blocklines:
4320 blocklines[-1] += line.lstrip()
4320 blocklines[-1] += line.lstrip()
4321 else:
4321 else:
4322 blocklines.append(line)
4322 blocklines.append(line)
4323 lastindent = indent
4323 lastindent = indent
4324
4324
4325 # Flush last block.
4325 # Flush last block.
4326 if activeaction:
4326 if activeaction:
4327 yield activeaction, blocklines
4327 yield activeaction, blocklines
4328
4328
4329
4329
4330 @command(
4330 @command(
4331 b'debugwireproto',
4331 b'debugwireproto',
4332 [
4332 [
4333 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4333 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4334 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4334 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4335 (
4335 (
4336 b'',
4336 b'',
4337 b'noreadstderr',
4337 b'noreadstderr',
4338 False,
4338 False,
4339 _(b'do not read from stderr of the remote'),
4339 _(b'do not read from stderr of the remote'),
4340 ),
4340 ),
4341 (
4341 (
4342 b'',
4342 b'',
4343 b'nologhandshake',
4343 b'nologhandshake',
4344 False,
4344 False,
4345 _(b'do not log I/O related to the peer handshake'),
4345 _(b'do not log I/O related to the peer handshake'),
4346 ),
4346 ),
4347 ]
4347 ]
4348 + cmdutil.remoteopts,
4348 + cmdutil.remoteopts,
4349 _(b'[PATH]'),
4349 _(b'[PATH]'),
4350 optionalrepo=True,
4350 optionalrepo=True,
4351 )
4351 )
4352 def debugwireproto(ui, repo, path=None, **opts):
4352 def debugwireproto(ui, repo, path=None, **opts):
4353 """send wire protocol commands to a server
4353 """send wire protocol commands to a server
4354
4354
4355 This command can be used to issue wire protocol commands to remote
4355 This command can be used to issue wire protocol commands to remote
4356 peers and to debug the raw data being exchanged.
4356 peers and to debug the raw data being exchanged.
4357
4357
4358 ``--localssh`` will start an SSH server against the current repository
4358 ``--localssh`` will start an SSH server against the current repository
4359 and connect to that. By default, the connection will perform a handshake
4359 and connect to that. By default, the connection will perform a handshake
4360 and establish an appropriate peer instance.
4360 and establish an appropriate peer instance.
4361
4361
4362 ``--peer`` can be used to bypass the handshake protocol and construct a
4362 ``--peer`` can be used to bypass the handshake protocol and construct a
4363 peer instance using the specified class type. Valid values are ``raw``,
4363 peer instance using the specified class type. Valid values are ``raw``,
4364 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4364 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4365 don't support higher-level command actions.
4365 don't support higher-level command actions.
4366
4366
4367 ``--noreadstderr`` can be used to disable automatic reading from stderr
4367 ``--noreadstderr`` can be used to disable automatic reading from stderr
4368 of the peer (for SSH connections only). Disabling automatic reading of
4368 of the peer (for SSH connections only). Disabling automatic reading of
4369 stderr is useful for making output more deterministic.
4369 stderr is useful for making output more deterministic.
4370
4370
4371 Commands are issued via a mini language which is specified via stdin.
4371 Commands are issued via a mini language which is specified via stdin.
4372 The language consists of individual actions to perform. An action is
4372 The language consists of individual actions to perform. An action is
4373 defined by a block. A block is defined as a line with no leading
4373 defined by a block. A block is defined as a line with no leading
4374 space followed by 0 or more lines with leading space. Blocks are
4374 space followed by 0 or more lines with leading space. Blocks are
4375 effectively a high-level command with additional metadata.
4375 effectively a high-level command with additional metadata.
4376
4376
4377 Lines beginning with ``#`` are ignored.
4377 Lines beginning with ``#`` are ignored.
4378
4378
4379 The following sections denote available actions.
4379 The following sections denote available actions.
4380
4380
4381 raw
4381 raw
4382 ---
4382 ---
4383
4383
4384 Send raw data to the server.
4384 Send raw data to the server.
4385
4385
4386 The block payload contains the raw data to send as one atomic send
4386 The block payload contains the raw data to send as one atomic send
4387 operation. The data may not actually be delivered in a single system
4387 operation. The data may not actually be delivered in a single system
4388 call: it depends on the abilities of the transport being used.
4388 call: it depends on the abilities of the transport being used.
4389
4389
4390 Each line in the block is de-indented and concatenated. Then, that
4390 Each line in the block is de-indented and concatenated. Then, that
4391 value is evaluated as a Python b'' literal. This allows the use of
4391 value is evaluated as a Python b'' literal. This allows the use of
4392 backslash escaping, etc.
4392 backslash escaping, etc.
4393
4393
4394 raw+
4394 raw+
4395 ----
4395 ----
4396
4396
4397 Behaves like ``raw`` except flushes output afterwards.
4397 Behaves like ``raw`` except flushes output afterwards.
4398
4398
4399 command <X>
4399 command <X>
4400 -----------
4400 -----------
4401
4401
4402 Send a request to run a named command, whose name follows the ``command``
4402 Send a request to run a named command, whose name follows the ``command``
4403 string.
4403 string.
4404
4404
4405 Arguments to the command are defined as lines in this block. The format of
4405 Arguments to the command are defined as lines in this block. The format of
4406 each line is ``<key> <value>``. e.g.::
4406 each line is ``<key> <value>``. e.g.::
4407
4407
4408 command listkeys
4408 command listkeys
4409 namespace bookmarks
4409 namespace bookmarks
4410
4410
4411 If the value begins with ``eval:``, it will be interpreted as a Python
4411 If the value begins with ``eval:``, it will be interpreted as a Python
4412 literal expression. Otherwise values are interpreted as Python b'' literals.
4412 literal expression. Otherwise values are interpreted as Python b'' literals.
4413 This allows sending complex types and encoding special byte sequences via
4413 This allows sending complex types and encoding special byte sequences via
4414 backslash escaping.
4414 backslash escaping.
4415
4415
4416 The following arguments have special meaning:
4416 The following arguments have special meaning:
4417
4417
4418 ``PUSHFILE``
4418 ``PUSHFILE``
4419 When defined, the *push* mechanism of the peer will be used instead
4419 When defined, the *push* mechanism of the peer will be used instead
4420 of the static request-response mechanism and the content of the
4420 of the static request-response mechanism and the content of the
4421 file specified in the value of this argument will be sent as the
4421 file specified in the value of this argument will be sent as the
4422 command payload.
4422 command payload.
4423
4423
4424 This can be used to submit a local bundle file to the remote.
4424 This can be used to submit a local bundle file to the remote.
4425
4425
4426 batchbegin
4426 batchbegin
4427 ----------
4427 ----------
4428
4428
4429 Instruct the peer to begin a batched send.
4429 Instruct the peer to begin a batched send.
4430
4430
4431 All ``command`` blocks are queued for execution until the next
4431 All ``command`` blocks are queued for execution until the next
4432 ``batchsubmit`` block.
4432 ``batchsubmit`` block.
4433
4433
4434 batchsubmit
4434 batchsubmit
4435 -----------
4435 -----------
4436
4436
4437 Submit previously queued ``command`` blocks as a batch request.
4437 Submit previously queued ``command`` blocks as a batch request.
4438
4438
4439 This action MUST be paired with a ``batchbegin`` action.
4439 This action MUST be paired with a ``batchbegin`` action.
4440
4440
4441 httprequest <method> <path>
4441 httprequest <method> <path>
4442 ---------------------------
4442 ---------------------------
4443
4443
4444 (HTTP peer only)
4444 (HTTP peer only)
4445
4445
4446 Send an HTTP request to the peer.
4446 Send an HTTP request to the peer.
4447
4447
4448 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4448 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4449
4449
4450 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4450 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4451 headers to add to the request. e.g. ``Accept: foo``.
4451 headers to add to the request. e.g. ``Accept: foo``.
4452
4452
4453 The following arguments are special:
4453 The following arguments are special:
4454
4454
4455 ``BODYFILE``
4455 ``BODYFILE``
4456 The content of the file defined as the value to this argument will be
4456 The content of the file defined as the value to this argument will be
4457 transferred verbatim as the HTTP request body.
4457 transferred verbatim as the HTTP request body.
4458
4458
4459 ``frame <type> <flags> <payload>``
4459 ``frame <type> <flags> <payload>``
4460 Send a unified protocol frame as part of the request body.
4460 Send a unified protocol frame as part of the request body.
4461
4461
4462 All frames will be collected and sent as the body to the HTTP
4462 All frames will be collected and sent as the body to the HTTP
4463 request.
4463 request.
4464
4464
4465 close
4465 close
4466 -----
4466 -----
4467
4467
4468 Close the connection to the server.
4468 Close the connection to the server.
4469
4469
4470 flush
4470 flush
4471 -----
4471 -----
4472
4472
4473 Flush data written to the server.
4473 Flush data written to the server.
4474
4474
4475 readavailable
4475 readavailable
4476 -------------
4476 -------------
4477
4477
4478 Close the write end of the connection and read all available data from
4478 Close the write end of the connection and read all available data from
4479 the server.
4479 the server.
4480
4480
4481 If the connection to the server encompasses multiple pipes, we poll both
4481 If the connection to the server encompasses multiple pipes, we poll both
4482 pipes and read available data.
4482 pipes and read available data.
4483
4483
4484 readline
4484 readline
4485 --------
4485 --------
4486
4486
4487 Read a line of output from the server. If there are multiple output
4487 Read a line of output from the server. If there are multiple output
4488 pipes, reads only the main pipe.
4488 pipes, reads only the main pipe.
4489
4489
4490 ereadline
4490 ereadline
4491 ---------
4491 ---------
4492
4492
4493 Like ``readline``, but read from the stderr pipe, if available.
4493 Like ``readline``, but read from the stderr pipe, if available.
4494
4494
4495 read <X>
4495 read <X>
4496 --------
4496 --------
4497
4497
4498 ``read()`` N bytes from the server's main output pipe.
4498 ``read()`` N bytes from the server's main output pipe.
4499
4499
4500 eread <X>
4500 eread <X>
4501 ---------
4501 ---------
4502
4502
4503 ``read()`` N bytes from the server's stderr pipe, if available.
4503 ``read()`` N bytes from the server's stderr pipe, if available.
4504
4504
4505 Specifying Unified Frame-Based Protocol Frames
4505 Specifying Unified Frame-Based Protocol Frames
4506 ----------------------------------------------
4506 ----------------------------------------------
4507
4507
4508 It is possible to emit a *Unified Frame-Based Protocol* by using special
4508 It is possible to emit a *Unified Frame-Based Protocol* by using special
4509 syntax.
4509 syntax.
4510
4510
4511 A frame is composed as a type, flags, and payload. These can be parsed
4511 A frame is composed as a type, flags, and payload. These can be parsed
4512 from a string of the form:
4512 from a string of the form:
4513
4513
4514 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4514 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4515
4515
4516 ``request-id`` and ``stream-id`` are integers defining the request and
4516 ``request-id`` and ``stream-id`` are integers defining the request and
4517 stream identifiers.
4517 stream identifiers.
4518
4518
4519 ``type`` can be an integer value for the frame type or the string name
4519 ``type`` can be an integer value for the frame type or the string name
4520 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4520 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4521 ``command-name``.
4521 ``command-name``.
4522
4522
4523 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4523 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4524 components. Each component (and there can be just one) can be an integer
4524 components. Each component (and there can be just one) can be an integer
4525 or a flag name for stream flags or frame flags, respectively. Values are
4525 or a flag name for stream flags or frame flags, respectively. Values are
4526 resolved to integers and then bitwise OR'd together.
4526 resolved to integers and then bitwise OR'd together.
4527
4527
4528 ``payload`` represents the raw frame payload. If it begins with
4528 ``payload`` represents the raw frame payload. If it begins with
4529 ``cbor:``, the following string is evaluated as Python code and the
4529 ``cbor:``, the following string is evaluated as Python code and the
4530 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4530 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4531 as a Python byte string literal.
4531 as a Python byte string literal.
4532 """
4532 """
4533 opts = pycompat.byteskwargs(opts)
4533 opts = pycompat.byteskwargs(opts)
4534
4534
4535 if opts[b'localssh'] and not repo:
4535 if opts[b'localssh'] and not repo:
4536 raise error.Abort(_(b'--localssh requires a repository'))
4536 raise error.Abort(_(b'--localssh requires a repository'))
4537
4537
4538 if opts[b'peer'] and opts[b'peer'] not in (
4538 if opts[b'peer'] and opts[b'peer'] not in (
4539 b'raw',
4539 b'raw',
4540 b'ssh1',
4540 b'ssh1',
4541 ):
4541 ):
4542 raise error.Abort(
4542 raise error.Abort(
4543 _(b'invalid value for --peer'),
4543 _(b'invalid value for --peer'),
4544 hint=_(b'valid values are "raw" and "ssh1"'),
4544 hint=_(b'valid values are "raw" and "ssh1"'),
4545 )
4545 )
4546
4546
4547 if path and opts[b'localssh']:
4547 if path and opts[b'localssh']:
4548 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4548 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4549
4549
4550 if ui.interactive():
4550 if ui.interactive():
4551 ui.write(_(b'(waiting for commands on stdin)\n'))
4551 ui.write(_(b'(waiting for commands on stdin)\n'))
4552
4552
4553 blocks = list(_parsewirelangblocks(ui.fin))
4553 blocks = list(_parsewirelangblocks(ui.fin))
4554
4554
4555 proc = None
4555 proc = None
4556 stdin = None
4556 stdin = None
4557 stdout = None
4557 stdout = None
4558 stderr = None
4558 stderr = None
4559 opener = None
4559 opener = None
4560
4560
4561 if opts[b'localssh']:
4561 if opts[b'localssh']:
4562 # We start the SSH server in its own process so there is process
4562 # We start the SSH server in its own process so there is process
4563 # separation. This prevents a whole class of potential bugs around
4563 # separation. This prevents a whole class of potential bugs around
4564 # shared state from interfering with server operation.
4564 # shared state from interfering with server operation.
4565 args = procutil.hgcmd() + [
4565 args = procutil.hgcmd() + [
4566 b'-R',
4566 b'-R',
4567 repo.root,
4567 repo.root,
4568 b'debugserve',
4568 b'debugserve',
4569 b'--sshstdio',
4569 b'--sshstdio',
4570 ]
4570 ]
4571 proc = subprocess.Popen(
4571 proc = subprocess.Popen(
4572 pycompat.rapply(procutil.tonativestr, args),
4572 pycompat.rapply(procutil.tonativestr, args),
4573 stdin=subprocess.PIPE,
4573 stdin=subprocess.PIPE,
4574 stdout=subprocess.PIPE,
4574 stdout=subprocess.PIPE,
4575 stderr=subprocess.PIPE,
4575 stderr=subprocess.PIPE,
4576 bufsize=0,
4576 bufsize=0,
4577 )
4577 )
4578
4578
4579 stdin = proc.stdin
4579 stdin = proc.stdin
4580 stdout = proc.stdout
4580 stdout = proc.stdout
4581 stderr = proc.stderr
4581 stderr = proc.stderr
4582
4582
4583 # We turn the pipes into observers so we can log I/O.
4583 # We turn the pipes into observers so we can log I/O.
4584 if ui.verbose or opts[b'peer'] == b'raw':
4584 if ui.verbose or opts[b'peer'] == b'raw':
4585 stdin = util.makeloggingfileobject(
4585 stdin = util.makeloggingfileobject(
4586 ui, proc.stdin, b'i', logdata=True
4586 ui, proc.stdin, b'i', logdata=True
4587 )
4587 )
4588 stdout = util.makeloggingfileobject(
4588 stdout = util.makeloggingfileobject(
4589 ui, proc.stdout, b'o', logdata=True
4589 ui, proc.stdout, b'o', logdata=True
4590 )
4590 )
4591 stderr = util.makeloggingfileobject(
4591 stderr = util.makeloggingfileobject(
4592 ui, proc.stderr, b'e', logdata=True
4592 ui, proc.stderr, b'e', logdata=True
4593 )
4593 )
4594
4594
4595 # --localssh also implies the peer connection settings.
4595 # --localssh also implies the peer connection settings.
4596
4596
4597 url = b'ssh://localserver'
4597 url = b'ssh://localserver'
4598 autoreadstderr = not opts[b'noreadstderr']
4598 autoreadstderr = not opts[b'noreadstderr']
4599
4599
4600 if opts[b'peer'] == b'ssh1':
4600 if opts[b'peer'] == b'ssh1':
4601 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4601 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4602 peer = sshpeer.sshv1peer(
4602 peer = sshpeer.sshv1peer(
4603 ui,
4603 ui,
4604 url,
4604 url,
4605 proc,
4605 proc,
4606 stdin,
4606 stdin,
4607 stdout,
4607 stdout,
4608 stderr,
4608 stderr,
4609 None,
4609 None,
4610 autoreadstderr=autoreadstderr,
4610 autoreadstderr=autoreadstderr,
4611 )
4611 )
4612 elif opts[b'peer'] == b'raw':
4612 elif opts[b'peer'] == b'raw':
4613 ui.write(_(b'using raw connection to peer\n'))
4613 ui.write(_(b'using raw connection to peer\n'))
4614 peer = None
4614 peer = None
4615 else:
4615 else:
4616 ui.write(_(b'creating ssh peer from handshake results\n'))
4616 ui.write(_(b'creating ssh peer from handshake results\n'))
4617 peer = sshpeer.makepeer(
4617 peer = sshpeer.makepeer(
4618 ui,
4618 ui,
4619 url,
4619 url,
4620 proc,
4620 proc,
4621 stdin,
4621 stdin,
4622 stdout,
4622 stdout,
4623 stderr,
4623 stderr,
4624 autoreadstderr=autoreadstderr,
4624 autoreadstderr=autoreadstderr,
4625 )
4625 )
4626
4626
4627 elif path:
4627 elif path:
4628 # We bypass hg.peer() so we can proxy the sockets.
4628 # We bypass hg.peer() so we can proxy the sockets.
4629 # TODO consider not doing this because we skip
4629 # TODO consider not doing this because we skip
4630 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4630 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4631 u = urlutil.url(path)
4631 u = urlutil.url(path)
4632 if u.scheme != b'http':
4632 if u.scheme != b'http':
4633 raise error.Abort(_(b'only http:// paths are currently supported'))
4633 raise error.Abort(_(b'only http:// paths are currently supported'))
4634
4634
4635 url, authinfo = u.authinfo()
4635 url, authinfo = u.authinfo()
4636 openerargs = {
4636 openerargs = {
4637 'useragent': b'Mercurial debugwireproto',
4637 'useragent': b'Mercurial debugwireproto',
4638 }
4638 }
4639
4639
4640 # Turn pipes/sockets into observers so we can log I/O.
4640 # Turn pipes/sockets into observers so we can log I/O.
4641 if ui.verbose:
4641 if ui.verbose:
4642 openerargs.update(
4642 openerargs.update(
4643 {
4643 {
4644 'loggingfh': ui,
4644 'loggingfh': ui,
4645 'loggingname': b's',
4645 'loggingname': b's',
4646 'loggingopts': {
4646 'loggingopts': {
4647 'logdata': True,
4647 'logdata': True,
4648 'logdataapis': False,
4648 'logdataapis': False,
4649 },
4649 },
4650 }
4650 }
4651 )
4651 )
4652
4652
4653 if ui.debugflag:
4653 if ui.debugflag:
4654 openerargs['loggingopts']['logdataapis'] = True
4654 openerargs['loggingopts']['logdataapis'] = True
4655
4655
4656 # Don't send default headers when in raw mode. This allows us to
4656 # Don't send default headers when in raw mode. This allows us to
4657 # bypass most of the behavior of our URL handling code so we can
4657 # bypass most of the behavior of our URL handling code so we can
4658 # have near complete control over what's sent on the wire.
4658 # have near complete control over what's sent on the wire.
4659 if opts[b'peer'] == b'raw':
4659 if opts[b'peer'] == b'raw':
4660 openerargs['sendaccept'] = False
4660 openerargs['sendaccept'] = False
4661
4661
4662 opener = urlmod.opener(ui, authinfo, **openerargs)
4662 opener = urlmod.opener(ui, authinfo, **openerargs)
4663
4663
4664 if opts[b'peer'] == b'raw':
4664 if opts[b'peer'] == b'raw':
4665 ui.write(_(b'using raw connection to peer\n'))
4665 ui.write(_(b'using raw connection to peer\n'))
4666 peer = None
4666 peer = None
4667 elif opts[b'peer']:
4667 elif opts[b'peer']:
4668 raise error.Abort(
4668 raise error.Abort(
4669 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4669 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4670 )
4670 )
4671 else:
4671 else:
4672 peer = httppeer.makepeer(ui, path, opener=opener)
4672 peer = httppeer.makepeer(ui, path, opener=opener)
4673
4673
4674 # We /could/ populate stdin/stdout with sock.makefile()...
4674 # We /could/ populate stdin/stdout with sock.makefile()...
4675 else:
4675 else:
4676 raise error.Abort(_(b'unsupported connection configuration'))
4676 raise error.Abort(_(b'unsupported connection configuration'))
4677
4677
4678 batchedcommands = None
4678 batchedcommands = None
4679
4679
4680 # Now perform actions based on the parsed wire language instructions.
4680 # Now perform actions based on the parsed wire language instructions.
4681 for action, lines in blocks:
4681 for action, lines in blocks:
4682 if action in (b'raw', b'raw+'):
4682 if action in (b'raw', b'raw+'):
4683 if not stdin:
4683 if not stdin:
4684 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4684 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4685
4685
4686 # Concatenate the data together.
4686 # Concatenate the data together.
4687 data = b''.join(l.lstrip() for l in lines)
4687 data = b''.join(l.lstrip() for l in lines)
4688 data = stringutil.unescapestr(data)
4688 data = stringutil.unescapestr(data)
4689 stdin.write(data)
4689 stdin.write(data)
4690
4690
4691 if action == b'raw+':
4691 if action == b'raw+':
4692 stdin.flush()
4692 stdin.flush()
4693 elif action == b'flush':
4693 elif action == b'flush':
4694 if not stdin:
4694 if not stdin:
4695 raise error.Abort(_(b'cannot call flush on this peer'))
4695 raise error.Abort(_(b'cannot call flush on this peer'))
4696 stdin.flush()
4696 stdin.flush()
4697 elif action.startswith(b'command'):
4697 elif action.startswith(b'command'):
4698 if not peer:
4698 if not peer:
4699 raise error.Abort(
4699 raise error.Abort(
4700 _(
4700 _(
4701 b'cannot send commands unless peer instance '
4701 b'cannot send commands unless peer instance '
4702 b'is available'
4702 b'is available'
4703 )
4703 )
4704 )
4704 )
4705
4705
4706 command = action.split(b' ', 1)[1]
4706 command = action.split(b' ', 1)[1]
4707
4707
4708 args = {}
4708 args = {}
4709 for line in lines:
4709 for line in lines:
4710 # We need to allow empty values.
4710 # We need to allow empty values.
4711 fields = line.lstrip().split(b' ', 1)
4711 fields = line.lstrip().split(b' ', 1)
4712 if len(fields) == 1:
4712 if len(fields) == 1:
4713 key = fields[0]
4713 key = fields[0]
4714 value = b''
4714 value = b''
4715 else:
4715 else:
4716 key, value = fields
4716 key, value = fields
4717
4717
4718 if value.startswith(b'eval:'):
4718 if value.startswith(b'eval:'):
4719 value = stringutil.evalpythonliteral(value[5:])
4719 value = stringutil.evalpythonliteral(value[5:])
4720 else:
4720 else:
4721 value = stringutil.unescapestr(value)
4721 value = stringutil.unescapestr(value)
4722
4722
4723 args[key] = value
4723 args[key] = value
4724
4724
4725 if batchedcommands is not None:
4725 if batchedcommands is not None:
4726 batchedcommands.append((command, args))
4726 batchedcommands.append((command, args))
4727 continue
4727 continue
4728
4728
4729 ui.status(_(b'sending %s command\n') % command)
4729 ui.status(_(b'sending %s command\n') % command)
4730
4730
4731 if b'PUSHFILE' in args:
4731 if b'PUSHFILE' in args:
4732 with open(args[b'PUSHFILE'], 'rb') as fh:
4732 with open(args[b'PUSHFILE'], 'rb') as fh:
4733 del args[b'PUSHFILE']
4733 del args[b'PUSHFILE']
4734 res, output = peer._callpush(
4734 res, output = peer._callpush(
4735 command, fh, **pycompat.strkwargs(args)
4735 command, fh, **pycompat.strkwargs(args)
4736 )
4736 )
4737 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4737 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4738 ui.status(
4738 ui.status(
4739 _(b'remote output: %s\n') % stringutil.escapestr(output)
4739 _(b'remote output: %s\n') % stringutil.escapestr(output)
4740 )
4740 )
4741 else:
4741 else:
4742 with peer.commandexecutor() as e:
4742 with peer.commandexecutor() as e:
4743 res = e.callcommand(command, args).result()
4743 res = e.callcommand(command, args).result()
4744
4744
4745 ui.status(
4745 ui.status(
4746 _(b'response: %s\n')
4746 _(b'response: %s\n')
4747 % stringutil.pprint(res, bprefix=True, indent=2)
4747 % stringutil.pprint(res, bprefix=True, indent=2)
4748 )
4748 )
4749
4749
4750 elif action == b'batchbegin':
4750 elif action == b'batchbegin':
4751 if batchedcommands is not None:
4751 if batchedcommands is not None:
4752 raise error.Abort(_(b'nested batchbegin not allowed'))
4752 raise error.Abort(_(b'nested batchbegin not allowed'))
4753
4753
4754 batchedcommands = []
4754 batchedcommands = []
4755 elif action == b'batchsubmit':
4755 elif action == b'batchsubmit':
4756 # There is a batching API we could go through. But it would be
4756 # There is a batching API we could go through. But it would be
4757 # difficult to normalize requests into function calls. It is easier
4757 # difficult to normalize requests into function calls. It is easier
4758 # to bypass this layer and normalize to commands + args.
4758 # to bypass this layer and normalize to commands + args.
4759 ui.status(
4759 ui.status(
4760 _(b'sending batch with %d sub-commands\n')
4760 _(b'sending batch with %d sub-commands\n')
4761 % len(batchedcommands)
4761 % len(batchedcommands)
4762 )
4762 )
4763 assert peer is not None
4763 assert peer is not None
4764 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4764 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4765 ui.status(
4765 ui.status(
4766 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4766 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4767 )
4767 )
4768
4768
4769 batchedcommands = None
4769 batchedcommands = None
4770
4770
4771 elif action.startswith(b'httprequest '):
4771 elif action.startswith(b'httprequest '):
4772 if not opener:
4772 if not opener:
4773 raise error.Abort(
4773 raise error.Abort(
4774 _(b'cannot use httprequest without an HTTP peer')
4774 _(b'cannot use httprequest without an HTTP peer')
4775 )
4775 )
4776
4776
4777 request = action.split(b' ', 2)
4777 request = action.split(b' ', 2)
4778 if len(request) != 3:
4778 if len(request) != 3:
4779 raise error.Abort(
4779 raise error.Abort(
4780 _(
4780 _(
4781 b'invalid httprequest: expected format is '
4781 b'invalid httprequest: expected format is '
4782 b'"httprequest <method> <path>'
4782 b'"httprequest <method> <path>'
4783 )
4783 )
4784 )
4784 )
4785
4785
4786 method, httppath = request[1:]
4786 method, httppath = request[1:]
4787 headers = {}
4787 headers = {}
4788 body = None
4788 body = None
4789 frames = []
4789 frames = []
4790 for line in lines:
4790 for line in lines:
4791 line = line.lstrip()
4791 line = line.lstrip()
4792 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4792 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4793 if m:
4793 if m:
4794 # Headers need to use native strings.
4794 # Headers need to use native strings.
4795 key = pycompat.strurl(m.group(1))
4795 key = pycompat.strurl(m.group(1))
4796 value = pycompat.strurl(m.group(2))
4796 value = pycompat.strurl(m.group(2))
4797 headers[key] = value
4797 headers[key] = value
4798 continue
4798 continue
4799
4799
4800 if line.startswith(b'BODYFILE '):
4800 if line.startswith(b'BODYFILE '):
4801 with open(line.split(b' ', 1), b'rb') as fh:
4801 with open(line.split(b' ', 1), b'rb') as fh:
4802 body = fh.read()
4802 body = fh.read()
4803 elif line.startswith(b'frame '):
4803 elif line.startswith(b'frame '):
4804 frame = wireprotoframing.makeframefromhumanstring(
4804 frame = wireprotoframing.makeframefromhumanstring(
4805 line[len(b'frame ') :]
4805 line[len(b'frame ') :]
4806 )
4806 )
4807
4807
4808 frames.append(frame)
4808 frames.append(frame)
4809 else:
4809 else:
4810 raise error.Abort(
4810 raise error.Abort(
4811 _(b'unknown argument to httprequest: %s') % line
4811 _(b'unknown argument to httprequest: %s') % line
4812 )
4812 )
4813
4813
4814 url = path + httppath
4814 url = path + httppath
4815
4815
4816 if frames:
4816 if frames:
4817 body = b''.join(bytes(f) for f in frames)
4817 body = b''.join(bytes(f) for f in frames)
4818
4818
4819 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4819 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4820
4820
4821 # urllib.Request insists on using has_data() as a proxy for
4821 # urllib.Request insists on using has_data() as a proxy for
4822 # determining the request method. Override that to use our
4822 # determining the request method. Override that to use our
4823 # explicitly requested method.
4823 # explicitly requested method.
4824 req.get_method = lambda: pycompat.sysstr(method)
4824 req.get_method = lambda: pycompat.sysstr(method)
4825
4825
4826 try:
4826 try:
4827 res = opener.open(req)
4827 res = opener.open(req)
4828 body = res.read()
4828 body = res.read()
4829 except util.urlerr.urlerror as e:
4829 except util.urlerr.urlerror as e:
4830 # read() method must be called, but only exists in Python 2
4830 # read() method must be called, but only exists in Python 2
4831 getattr(e, 'read', lambda: None)()
4831 getattr(e, 'read', lambda: None)()
4832 continue
4832 continue
4833
4833
4834 ct = res.headers.get('Content-Type')
4834 ct = res.headers.get('Content-Type')
4835 if ct == 'application/mercurial-cbor':
4835 if ct == 'application/mercurial-cbor':
4836 ui.write(
4836 ui.write(
4837 _(b'cbor> %s\n')
4837 _(b'cbor> %s\n')
4838 % stringutil.pprint(
4838 % stringutil.pprint(
4839 cborutil.decodeall(body), bprefix=True, indent=2
4839 cborutil.decodeall(body), bprefix=True, indent=2
4840 )
4840 )
4841 )
4841 )
4842
4842
4843 elif action == b'close':
4843 elif action == b'close':
4844 assert peer is not None
4844 assert peer is not None
4845 peer.close()
4845 peer.close()
4846 elif action == b'readavailable':
4846 elif action == b'readavailable':
4847 if not stdout or not stderr:
4847 if not stdout or not stderr:
4848 raise error.Abort(
4848 raise error.Abort(
4849 _(b'readavailable not available on this peer')
4849 _(b'readavailable not available on this peer')
4850 )
4850 )
4851
4851
4852 stdin.close()
4852 stdin.close()
4853 stdout.read()
4853 stdout.read()
4854 stderr.read()
4854 stderr.read()
4855
4855
4856 elif action == b'readline':
4856 elif action == b'readline':
4857 if not stdout:
4857 if not stdout:
4858 raise error.Abort(_(b'readline not available on this peer'))
4858 raise error.Abort(_(b'readline not available on this peer'))
4859 stdout.readline()
4859 stdout.readline()
4860 elif action == b'ereadline':
4860 elif action == b'ereadline':
4861 if not stderr:
4861 if not stderr:
4862 raise error.Abort(_(b'ereadline not available on this peer'))
4862 raise error.Abort(_(b'ereadline not available on this peer'))
4863 stderr.readline()
4863 stderr.readline()
4864 elif action.startswith(b'read '):
4864 elif action.startswith(b'read '):
4865 count = int(action.split(b' ', 1)[1])
4865 count = int(action.split(b' ', 1)[1])
4866 if not stdout:
4866 if not stdout:
4867 raise error.Abort(_(b'read not available on this peer'))
4867 raise error.Abort(_(b'read not available on this peer'))
4868 stdout.read(count)
4868 stdout.read(count)
4869 elif action.startswith(b'eread '):
4869 elif action.startswith(b'eread '):
4870 count = int(action.split(b' ', 1)[1])
4870 count = int(action.split(b' ', 1)[1])
4871 if not stderr:
4871 if not stderr:
4872 raise error.Abort(_(b'eread not available on this peer'))
4872 raise error.Abort(_(b'eread not available on this peer'))
4873 stderr.read(count)
4873 stderr.read(count)
4874 else:
4874 else:
4875 raise error.Abort(_(b'unknown action: %s') % action)
4875 raise error.Abort(_(b'unknown action: %s') % action)
4876
4876
4877 if batchedcommands is not None:
4877 if batchedcommands is not None:
4878 raise error.Abort(_(b'unclosed "batchbegin" request'))
4878 raise error.Abort(_(b'unclosed "batchbegin" request'))
4879
4879
4880 if peer:
4880 if peer:
4881 peer.close()
4881 peer.close()
4882
4882
4883 if proc:
4883 if proc:
4884 proc.kill()
4884 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now