##// END OF EJS Templates
dirstate-item: use `added` in debugrebuilddirstate...
marmoute -
r48913:78e66649 default
parent child Browse files
Show More
@@ -1,4923 +1,4923 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import codecs
11 import codecs
12 import collections
12 import collections
13 import contextlib
13 import contextlib
14 import difflib
14 import difflib
15 import errno
15 import errno
16 import glob
16 import glob
17 import operator
17 import operator
18 import os
18 import os
19 import platform
19 import platform
20 import random
20 import random
21 import re
21 import re
22 import socket
22 import socket
23 import ssl
23 import ssl
24 import stat
24 import stat
25 import string
25 import string
26 import subprocess
26 import subprocess
27 import sys
27 import sys
28 import time
28 import time
29
29
30 from .i18n import _
30 from .i18n import _
31 from .node import (
31 from .node import (
32 bin,
32 bin,
33 hex,
33 hex,
34 nullrev,
34 nullrev,
35 short,
35 short,
36 )
36 )
37 from .pycompat import (
37 from .pycompat import (
38 getattr,
38 getattr,
39 open,
39 open,
40 )
40 )
41 from . import (
41 from . import (
42 bundle2,
42 bundle2,
43 bundlerepo,
43 bundlerepo,
44 changegroup,
44 changegroup,
45 cmdutil,
45 cmdutil,
46 color,
46 color,
47 context,
47 context,
48 copies,
48 copies,
49 dagparser,
49 dagparser,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 wireprotov2peer,
94 wireprotov2peer,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .utils import (
97 from .utils import (
98 cborutil,
98 cborutil,
99 compression,
99 compression,
100 dateutil,
100 dateutil,
101 procutil,
101 procutil,
102 stringutil,
102 stringutil,
103 urlutil,
103 urlutil,
104 )
104 )
105
105
106 from .revlogutils import (
106 from .revlogutils import (
107 deltas as deltautil,
107 deltas as deltautil,
108 nodemap,
108 nodemap,
109 rewrite,
109 rewrite,
110 sidedata,
110 sidedata,
111 )
111 )
112
112
113 release = lockmod.release
113 release = lockmod.release
114
114
115 table = {}
115 table = {}
116 table.update(strip.command._table)
116 table.update(strip.command._table)
117 command = registrar.command(table)
117 command = registrar.command(table)
118
118
119
119
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 def debugancestor(ui, repo, *args):
121 def debugancestor(ui, repo, *args):
122 """find the ancestor revision of two revisions in a given index"""
122 """find the ancestor revision of two revisions in a given index"""
123 if len(args) == 3:
123 if len(args) == 3:
124 index, rev1, rev2 = args
124 index, rev1, rev2 = args
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 lookup = r.lookup
126 lookup = r.lookup
127 elif len(args) == 2:
127 elif len(args) == 2:
128 if not repo:
128 if not repo:
129 raise error.Abort(
129 raise error.Abort(
130 _(b'there is no Mercurial repository here (.hg not found)')
130 _(b'there is no Mercurial repository here (.hg not found)')
131 )
131 )
132 rev1, rev2 = args
132 rev1, rev2 = args
133 r = repo.changelog
133 r = repo.changelog
134 lookup = repo.lookup
134 lookup = repo.lookup
135 else:
135 else:
136 raise error.Abort(_(b'either two or three arguments required'))
136 raise error.Abort(_(b'either two or three arguments required'))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139
139
140
140
141 @command(b'debugantivirusrunning', [])
141 @command(b'debugantivirusrunning', [])
142 def debugantivirusrunning(ui, repo):
142 def debugantivirusrunning(ui, repo):
143 """attempt to trigger an antivirus scanner to see if one is active"""
143 """attempt to trigger an antivirus scanner to see if one is active"""
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 f.write(
145 f.write(
146 util.b85decode(
146 util.b85decode(
147 # This is a base85-armored version of the EICAR test file. See
147 # This is a base85-armored version of the EICAR test file. See
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 )
151 )
152 )
152 )
153 # Give an AV engine time to scan the file.
153 # Give an AV engine time to scan the file.
154 time.sleep(2)
154 time.sleep(2)
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156
156
157
157
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 def debugapplystreamclonebundle(ui, repo, fname):
159 def debugapplystreamclonebundle(ui, repo, fname):
160 """apply a stream clone bundle file"""
160 """apply a stream clone bundle file"""
161 f = hg.openpath(ui, fname)
161 f = hg.openpath(ui, fname)
162 gen = exchange.readbundle(ui, f, fname)
162 gen = exchange.readbundle(ui, f, fname)
163 gen.apply(repo)
163 gen.apply(repo)
164
164
165
165
166 @command(
166 @command(
167 b'debugbuilddag',
167 b'debugbuilddag',
168 [
168 [
169 (
169 (
170 b'm',
170 b'm',
171 b'mergeable-file',
171 b'mergeable-file',
172 None,
172 None,
173 _(b'add single file mergeable changes'),
173 _(b'add single file mergeable changes'),
174 ),
174 ),
175 (
175 (
176 b'o',
176 b'o',
177 b'overwritten-file',
177 b'overwritten-file',
178 None,
178 None,
179 _(b'add single file all revs overwrite'),
179 _(b'add single file all revs overwrite'),
180 ),
180 ),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 ],
182 ],
183 _(b'[OPTION]... [TEXT]'),
183 _(b'[OPTION]... [TEXT]'),
184 )
184 )
185 def debugbuilddag(
185 def debugbuilddag(
186 ui,
186 ui,
187 repo,
187 repo,
188 text=None,
188 text=None,
189 mergeable_file=False,
189 mergeable_file=False,
190 overwritten_file=False,
190 overwritten_file=False,
191 new_file=False,
191 new_file=False,
192 ):
192 ):
193 """builds a repo with a given DAG from scratch in the current empty repo
193 """builds a repo with a given DAG from scratch in the current empty repo
194
194
195 The description of the DAG is read from stdin if not given on the
195 The description of the DAG is read from stdin if not given on the
196 command line.
196 command line.
197
197
198 Elements:
198 Elements:
199
199
200 - "+n" is a linear run of n nodes based on the current default parent
200 - "+n" is a linear run of n nodes based on the current default parent
201 - "." is a single node based on the current default parent
201 - "." is a single node based on the current default parent
202 - "$" resets the default parent to null (implied at the start);
202 - "$" resets the default parent to null (implied at the start);
203 otherwise the default parent is always the last node created
203 otherwise the default parent is always the last node created
204 - "<p" sets the default parent to the backref p
204 - "<p" sets the default parent to the backref p
205 - "*p" is a fork at parent p, which is a backref
205 - "*p" is a fork at parent p, which is a backref
206 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
206 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
207 - "/p2" is a merge of the preceding node and p2
207 - "/p2" is a merge of the preceding node and p2
208 - ":tag" defines a local tag for the preceding node
208 - ":tag" defines a local tag for the preceding node
209 - "@branch" sets the named branch for subsequent nodes
209 - "@branch" sets the named branch for subsequent nodes
210 - "#...\\n" is a comment up to the end of the line
210 - "#...\\n" is a comment up to the end of the line
211
211
212 Whitespace between the above elements is ignored.
212 Whitespace between the above elements is ignored.
213
213
214 A backref is either
214 A backref is either
215
215
216 - a number n, which references the node curr-n, where curr is the current
216 - a number n, which references the node curr-n, where curr is the current
217 node, or
217 node, or
218 - the name of a local tag you placed earlier using ":tag", or
218 - the name of a local tag you placed earlier using ":tag", or
219 - empty to denote the default parent.
219 - empty to denote the default parent.
220
220
221 All string valued-elements are either strictly alphanumeric, or must
221 All string valued-elements are either strictly alphanumeric, or must
222 be enclosed in double quotes ("..."), with "\\" as escape character.
222 be enclosed in double quotes ("..."), with "\\" as escape character.
223 """
223 """
224
224
225 if text is None:
225 if text is None:
226 ui.status(_(b"reading DAG from stdin\n"))
226 ui.status(_(b"reading DAG from stdin\n"))
227 text = ui.fin.read()
227 text = ui.fin.read()
228
228
229 cl = repo.changelog
229 cl = repo.changelog
230 if len(cl) > 0:
230 if len(cl) > 0:
231 raise error.Abort(_(b'repository is not empty'))
231 raise error.Abort(_(b'repository is not empty'))
232
232
233 # determine number of revs in DAG
233 # determine number of revs in DAG
234 total = 0
234 total = 0
235 for type, data in dagparser.parsedag(text):
235 for type, data in dagparser.parsedag(text):
236 if type == b'n':
236 if type == b'n':
237 total += 1
237 total += 1
238
238
239 if mergeable_file:
239 if mergeable_file:
240 linesperrev = 2
240 linesperrev = 2
241 # make a file with k lines per rev
241 # make a file with k lines per rev
242 initialmergedlines = [
242 initialmergedlines = [
243 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
243 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
244 ]
244 ]
245 initialmergedlines.append(b"")
245 initialmergedlines.append(b"")
246
246
247 tags = []
247 tags = []
248 progress = ui.makeprogress(
248 progress = ui.makeprogress(
249 _(b'building'), unit=_(b'revisions'), total=total
249 _(b'building'), unit=_(b'revisions'), total=total
250 )
250 )
251 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
251 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
252 at = -1
252 at = -1
253 atbranch = b'default'
253 atbranch = b'default'
254 nodeids = []
254 nodeids = []
255 id = 0
255 id = 0
256 progress.update(id)
256 progress.update(id)
257 for type, data in dagparser.parsedag(text):
257 for type, data in dagparser.parsedag(text):
258 if type == b'n':
258 if type == b'n':
259 ui.note((b'node %s\n' % pycompat.bytestr(data)))
259 ui.note((b'node %s\n' % pycompat.bytestr(data)))
260 id, ps = data
260 id, ps = data
261
261
262 files = []
262 files = []
263 filecontent = {}
263 filecontent = {}
264
264
265 p2 = None
265 p2 = None
266 if mergeable_file:
266 if mergeable_file:
267 fn = b"mf"
267 fn = b"mf"
268 p1 = repo[ps[0]]
268 p1 = repo[ps[0]]
269 if len(ps) > 1:
269 if len(ps) > 1:
270 p2 = repo[ps[1]]
270 p2 = repo[ps[1]]
271 pa = p1.ancestor(p2)
271 pa = p1.ancestor(p2)
272 base, local, other = [
272 base, local, other = [
273 x[fn].data() for x in (pa, p1, p2)
273 x[fn].data() for x in (pa, p1, p2)
274 ]
274 ]
275 m3 = simplemerge.Merge3Text(base, local, other)
275 m3 = simplemerge.Merge3Text(base, local, other)
276 ml = [l.strip() for l in m3.merge_lines()]
276 ml = [l.strip() for l in m3.merge_lines()]
277 ml.append(b"")
277 ml.append(b"")
278 elif at > 0:
278 elif at > 0:
279 ml = p1[fn].data().split(b"\n")
279 ml = p1[fn].data().split(b"\n")
280 else:
280 else:
281 ml = initialmergedlines
281 ml = initialmergedlines
282 ml[id * linesperrev] += b" r%i" % id
282 ml[id * linesperrev] += b" r%i" % id
283 mergedtext = b"\n".join(ml)
283 mergedtext = b"\n".join(ml)
284 files.append(fn)
284 files.append(fn)
285 filecontent[fn] = mergedtext
285 filecontent[fn] = mergedtext
286
286
287 if overwritten_file:
287 if overwritten_file:
288 fn = b"of"
288 fn = b"of"
289 files.append(fn)
289 files.append(fn)
290 filecontent[fn] = b"r%i\n" % id
290 filecontent[fn] = b"r%i\n" % id
291
291
292 if new_file:
292 if new_file:
293 fn = b"nf%i" % id
293 fn = b"nf%i" % id
294 files.append(fn)
294 files.append(fn)
295 filecontent[fn] = b"r%i\n" % id
295 filecontent[fn] = b"r%i\n" % id
296 if len(ps) > 1:
296 if len(ps) > 1:
297 if not p2:
297 if not p2:
298 p2 = repo[ps[1]]
298 p2 = repo[ps[1]]
299 for fn in p2:
299 for fn in p2:
300 if fn.startswith(b"nf"):
300 if fn.startswith(b"nf"):
301 files.append(fn)
301 files.append(fn)
302 filecontent[fn] = p2[fn].data()
302 filecontent[fn] = p2[fn].data()
303
303
304 def fctxfn(repo, cx, path):
304 def fctxfn(repo, cx, path):
305 if path in filecontent:
305 if path in filecontent:
306 return context.memfilectx(
306 return context.memfilectx(
307 repo, cx, path, filecontent[path]
307 repo, cx, path, filecontent[path]
308 )
308 )
309 return None
309 return None
310
310
311 if len(ps) == 0 or ps[0] < 0:
311 if len(ps) == 0 or ps[0] < 0:
312 pars = [None, None]
312 pars = [None, None]
313 elif len(ps) == 1:
313 elif len(ps) == 1:
314 pars = [nodeids[ps[0]], None]
314 pars = [nodeids[ps[0]], None]
315 else:
315 else:
316 pars = [nodeids[p] for p in ps]
316 pars = [nodeids[p] for p in ps]
317 cx = context.memctx(
317 cx = context.memctx(
318 repo,
318 repo,
319 pars,
319 pars,
320 b"r%i" % id,
320 b"r%i" % id,
321 files,
321 files,
322 fctxfn,
322 fctxfn,
323 date=(id, 0),
323 date=(id, 0),
324 user=b"debugbuilddag",
324 user=b"debugbuilddag",
325 extra={b'branch': atbranch},
325 extra={b'branch': atbranch},
326 )
326 )
327 nodeid = repo.commitctx(cx)
327 nodeid = repo.commitctx(cx)
328 nodeids.append(nodeid)
328 nodeids.append(nodeid)
329 at = id
329 at = id
330 elif type == b'l':
330 elif type == b'l':
331 id, name = data
331 id, name = data
332 ui.note((b'tag %s\n' % name))
332 ui.note((b'tag %s\n' % name))
333 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
333 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
334 elif type == b'a':
334 elif type == b'a':
335 ui.note((b'branch %s\n' % data))
335 ui.note((b'branch %s\n' % data))
336 atbranch = data
336 atbranch = data
337 progress.update(id)
337 progress.update(id)
338
338
339 if tags:
339 if tags:
340 repo.vfs.write(b"localtags", b"".join(tags))
340 repo.vfs.write(b"localtags", b"".join(tags))
341
341
342
342
343 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
343 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
344 indent_string = b' ' * indent
344 indent_string = b' ' * indent
345 if all:
345 if all:
346 ui.writenoi18n(
346 ui.writenoi18n(
347 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
347 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
348 % indent_string
348 % indent_string
349 )
349 )
350
350
351 def showchunks(named):
351 def showchunks(named):
352 ui.write(b"\n%s%s\n" % (indent_string, named))
352 ui.write(b"\n%s%s\n" % (indent_string, named))
353 for deltadata in gen.deltaiter():
353 for deltadata in gen.deltaiter():
354 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
354 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
355 ui.write(
355 ui.write(
356 b"%s%s %s %s %s %s %d\n"
356 b"%s%s %s %s %s %s %d\n"
357 % (
357 % (
358 indent_string,
358 indent_string,
359 hex(node),
359 hex(node),
360 hex(p1),
360 hex(p1),
361 hex(p2),
361 hex(p2),
362 hex(cs),
362 hex(cs),
363 hex(deltabase),
363 hex(deltabase),
364 len(delta),
364 len(delta),
365 )
365 )
366 )
366 )
367
367
368 gen.changelogheader()
368 gen.changelogheader()
369 showchunks(b"changelog")
369 showchunks(b"changelog")
370 gen.manifestheader()
370 gen.manifestheader()
371 showchunks(b"manifest")
371 showchunks(b"manifest")
372 for chunkdata in iter(gen.filelogheader, {}):
372 for chunkdata in iter(gen.filelogheader, {}):
373 fname = chunkdata[b'filename']
373 fname = chunkdata[b'filename']
374 showchunks(fname)
374 showchunks(fname)
375 else:
375 else:
376 if isinstance(gen, bundle2.unbundle20):
376 if isinstance(gen, bundle2.unbundle20):
377 raise error.Abort(_(b'use debugbundle2 for this file'))
377 raise error.Abort(_(b'use debugbundle2 for this file'))
378 gen.changelogheader()
378 gen.changelogheader()
379 for deltadata in gen.deltaiter():
379 for deltadata in gen.deltaiter():
380 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
380 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
381 ui.write(b"%s%s\n" % (indent_string, hex(node)))
381 ui.write(b"%s%s\n" % (indent_string, hex(node)))
382
382
383
383
384 def _debugobsmarkers(ui, part, indent=0, **opts):
384 def _debugobsmarkers(ui, part, indent=0, **opts):
385 """display version and markers contained in 'data'"""
385 """display version and markers contained in 'data'"""
386 opts = pycompat.byteskwargs(opts)
386 opts = pycompat.byteskwargs(opts)
387 data = part.read()
387 data = part.read()
388 indent_string = b' ' * indent
388 indent_string = b' ' * indent
389 try:
389 try:
390 version, markers = obsolete._readmarkers(data)
390 version, markers = obsolete._readmarkers(data)
391 except error.UnknownVersion as exc:
391 except error.UnknownVersion as exc:
392 msg = b"%sunsupported version: %s (%d bytes)\n"
392 msg = b"%sunsupported version: %s (%d bytes)\n"
393 msg %= indent_string, exc.version, len(data)
393 msg %= indent_string, exc.version, len(data)
394 ui.write(msg)
394 ui.write(msg)
395 else:
395 else:
396 msg = b"%sversion: %d (%d bytes)\n"
396 msg = b"%sversion: %d (%d bytes)\n"
397 msg %= indent_string, version, len(data)
397 msg %= indent_string, version, len(data)
398 ui.write(msg)
398 ui.write(msg)
399 fm = ui.formatter(b'debugobsolete', opts)
399 fm = ui.formatter(b'debugobsolete', opts)
400 for rawmarker in sorted(markers):
400 for rawmarker in sorted(markers):
401 m = obsutil.marker(None, rawmarker)
401 m = obsutil.marker(None, rawmarker)
402 fm.startitem()
402 fm.startitem()
403 fm.plain(indent_string)
403 fm.plain(indent_string)
404 cmdutil.showmarker(fm, m)
404 cmdutil.showmarker(fm, m)
405 fm.end()
405 fm.end()
406
406
407
407
408 def _debugphaseheads(ui, data, indent=0):
408 def _debugphaseheads(ui, data, indent=0):
409 """display version and markers contained in 'data'"""
409 """display version and markers contained in 'data'"""
410 indent_string = b' ' * indent
410 indent_string = b' ' * indent
411 headsbyphase = phases.binarydecode(data)
411 headsbyphase = phases.binarydecode(data)
412 for phase in phases.allphases:
412 for phase in phases.allphases:
413 for head in headsbyphase[phase]:
413 for head in headsbyphase[phase]:
414 ui.write(indent_string)
414 ui.write(indent_string)
415 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
415 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
416
416
417
417
418 def _quasirepr(thing):
418 def _quasirepr(thing):
419 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
419 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
420 return b'{%s}' % (
420 return b'{%s}' % (
421 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
421 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
422 )
422 )
423 return pycompat.bytestr(repr(thing))
423 return pycompat.bytestr(repr(thing))
424
424
425
425
426 def _debugbundle2(ui, gen, all=None, **opts):
426 def _debugbundle2(ui, gen, all=None, **opts):
427 """lists the contents of a bundle2"""
427 """lists the contents of a bundle2"""
428 if not isinstance(gen, bundle2.unbundle20):
428 if not isinstance(gen, bundle2.unbundle20):
429 raise error.Abort(_(b'not a bundle2 file'))
429 raise error.Abort(_(b'not a bundle2 file'))
430 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
430 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
431 parttypes = opts.get('part_type', [])
431 parttypes = opts.get('part_type', [])
432 for part in gen.iterparts():
432 for part in gen.iterparts():
433 if parttypes and part.type not in parttypes:
433 if parttypes and part.type not in parttypes:
434 continue
434 continue
435 msg = b'%s -- %s (mandatory: %r)\n'
435 msg = b'%s -- %s (mandatory: %r)\n'
436 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
436 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
437 if part.type == b'changegroup':
437 if part.type == b'changegroup':
438 version = part.params.get(b'version', b'01')
438 version = part.params.get(b'version', b'01')
439 cg = changegroup.getunbundler(version, part, b'UN')
439 cg = changegroup.getunbundler(version, part, b'UN')
440 if not ui.quiet:
440 if not ui.quiet:
441 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
441 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
442 if part.type == b'obsmarkers':
442 if part.type == b'obsmarkers':
443 if not ui.quiet:
443 if not ui.quiet:
444 _debugobsmarkers(ui, part, indent=4, **opts)
444 _debugobsmarkers(ui, part, indent=4, **opts)
445 if part.type == b'phase-heads':
445 if part.type == b'phase-heads':
446 if not ui.quiet:
446 if not ui.quiet:
447 _debugphaseheads(ui, part, indent=4)
447 _debugphaseheads(ui, part, indent=4)
448
448
449
449
450 @command(
450 @command(
451 b'debugbundle',
451 b'debugbundle',
452 [
452 [
453 (b'a', b'all', None, _(b'show all details')),
453 (b'a', b'all', None, _(b'show all details')),
454 (b'', b'part-type', [], _(b'show only the named part type')),
454 (b'', b'part-type', [], _(b'show only the named part type')),
455 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
455 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
456 ],
456 ],
457 _(b'FILE'),
457 _(b'FILE'),
458 norepo=True,
458 norepo=True,
459 )
459 )
460 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
460 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
461 """lists the contents of a bundle"""
461 """lists the contents of a bundle"""
462 with hg.openpath(ui, bundlepath) as f:
462 with hg.openpath(ui, bundlepath) as f:
463 if spec:
463 if spec:
464 spec = exchange.getbundlespec(ui, f)
464 spec = exchange.getbundlespec(ui, f)
465 ui.write(b'%s\n' % spec)
465 ui.write(b'%s\n' % spec)
466 return
466 return
467
467
468 gen = exchange.readbundle(ui, f, bundlepath)
468 gen = exchange.readbundle(ui, f, bundlepath)
469 if isinstance(gen, bundle2.unbundle20):
469 if isinstance(gen, bundle2.unbundle20):
470 return _debugbundle2(ui, gen, all=all, **opts)
470 return _debugbundle2(ui, gen, all=all, **opts)
471 _debugchangegroup(ui, gen, all=all, **opts)
471 _debugchangegroup(ui, gen, all=all, **opts)
472
472
473
473
474 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
474 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
475 def debugcapabilities(ui, path, **opts):
475 def debugcapabilities(ui, path, **opts):
476 """lists the capabilities of a remote peer"""
476 """lists the capabilities of a remote peer"""
477 opts = pycompat.byteskwargs(opts)
477 opts = pycompat.byteskwargs(opts)
478 peer = hg.peer(ui, opts, path)
478 peer = hg.peer(ui, opts, path)
479 try:
479 try:
480 caps = peer.capabilities()
480 caps = peer.capabilities()
481 ui.writenoi18n(b'Main capabilities:\n')
481 ui.writenoi18n(b'Main capabilities:\n')
482 for c in sorted(caps):
482 for c in sorted(caps):
483 ui.write(b' %s\n' % c)
483 ui.write(b' %s\n' % c)
484 b2caps = bundle2.bundle2caps(peer)
484 b2caps = bundle2.bundle2caps(peer)
485 if b2caps:
485 if b2caps:
486 ui.writenoi18n(b'Bundle2 capabilities:\n')
486 ui.writenoi18n(b'Bundle2 capabilities:\n')
487 for key, values in sorted(pycompat.iteritems(b2caps)):
487 for key, values in sorted(pycompat.iteritems(b2caps)):
488 ui.write(b' %s\n' % key)
488 ui.write(b' %s\n' % key)
489 for v in values:
489 for v in values:
490 ui.write(b' %s\n' % v)
490 ui.write(b' %s\n' % v)
491 finally:
491 finally:
492 peer.close()
492 peer.close()
493
493
494
494
495 @command(
495 @command(
496 b'debugchangedfiles',
496 b'debugchangedfiles',
497 [
497 [
498 (
498 (
499 b'',
499 b'',
500 b'compute',
500 b'compute',
501 False,
501 False,
502 b"compute information instead of reading it from storage",
502 b"compute information instead of reading it from storage",
503 ),
503 ),
504 ],
504 ],
505 b'REV',
505 b'REV',
506 )
506 )
507 def debugchangedfiles(ui, repo, rev, **opts):
507 def debugchangedfiles(ui, repo, rev, **opts):
508 """list the stored files changes for a revision"""
508 """list the stored files changes for a revision"""
509 ctx = scmutil.revsingle(repo, rev, None)
509 ctx = scmutil.revsingle(repo, rev, None)
510 files = None
510 files = None
511
511
512 if opts['compute']:
512 if opts['compute']:
513 files = metadata.compute_all_files_changes(ctx)
513 files = metadata.compute_all_files_changes(ctx)
514 else:
514 else:
515 sd = repo.changelog.sidedata(ctx.rev())
515 sd = repo.changelog.sidedata(ctx.rev())
516 files_block = sd.get(sidedata.SD_FILES)
516 files_block = sd.get(sidedata.SD_FILES)
517 if files_block is not None:
517 if files_block is not None:
518 files = metadata.decode_files_sidedata(sd)
518 files = metadata.decode_files_sidedata(sd)
519 if files is not None:
519 if files is not None:
520 for f in sorted(files.touched):
520 for f in sorted(files.touched):
521 if f in files.added:
521 if f in files.added:
522 action = b"added"
522 action = b"added"
523 elif f in files.removed:
523 elif f in files.removed:
524 action = b"removed"
524 action = b"removed"
525 elif f in files.merged:
525 elif f in files.merged:
526 action = b"merged"
526 action = b"merged"
527 elif f in files.salvaged:
527 elif f in files.salvaged:
528 action = b"salvaged"
528 action = b"salvaged"
529 else:
529 else:
530 action = b"touched"
530 action = b"touched"
531
531
532 copy_parent = b""
532 copy_parent = b""
533 copy_source = b""
533 copy_source = b""
534 if f in files.copied_from_p1:
534 if f in files.copied_from_p1:
535 copy_parent = b"p1"
535 copy_parent = b"p1"
536 copy_source = files.copied_from_p1[f]
536 copy_source = files.copied_from_p1[f]
537 elif f in files.copied_from_p2:
537 elif f in files.copied_from_p2:
538 copy_parent = b"p2"
538 copy_parent = b"p2"
539 copy_source = files.copied_from_p2[f]
539 copy_source = files.copied_from_p2[f]
540
540
541 data = (action, copy_parent, f, copy_source)
541 data = (action, copy_parent, f, copy_source)
542 template = b"%-8s %2s: %s, %s;\n"
542 template = b"%-8s %2s: %s, %s;\n"
543 ui.write(template % data)
543 ui.write(template % data)
544
544
545
545
546 @command(b'debugcheckstate', [], b'')
546 @command(b'debugcheckstate', [], b'')
547 def debugcheckstate(ui, repo):
547 def debugcheckstate(ui, repo):
548 """validate the correctness of the current dirstate"""
548 """validate the correctness of the current dirstate"""
549 parent1, parent2 = repo.dirstate.parents()
549 parent1, parent2 = repo.dirstate.parents()
550 m1 = repo[parent1].manifest()
550 m1 = repo[parent1].manifest()
551 m2 = repo[parent2].manifest()
551 m2 = repo[parent2].manifest()
552 errors = 0
552 errors = 0
553 for err in repo.dirstate.verify(m1, m2):
553 for err in repo.dirstate.verify(m1, m2):
554 ui.warn(err[0] % err[1:])
554 ui.warn(err[0] % err[1:])
555 errors += 1
555 errors += 1
556 if errors:
556 if errors:
557 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
557 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
558 raise error.Abort(errstr)
558 raise error.Abort(errstr)
559
559
560
560
561 @command(
561 @command(
562 b'debugcolor',
562 b'debugcolor',
563 [(b'', b'style', None, _(b'show all configured styles'))],
563 [(b'', b'style', None, _(b'show all configured styles'))],
564 b'hg debugcolor',
564 b'hg debugcolor',
565 )
565 )
566 def debugcolor(ui, repo, **opts):
566 def debugcolor(ui, repo, **opts):
567 """show available color, effects or style"""
567 """show available color, effects or style"""
568 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
568 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
569 if opts.get('style'):
569 if opts.get('style'):
570 return _debugdisplaystyle(ui)
570 return _debugdisplaystyle(ui)
571 else:
571 else:
572 return _debugdisplaycolor(ui)
572 return _debugdisplaycolor(ui)
573
573
574
574
575 def _debugdisplaycolor(ui):
575 def _debugdisplaycolor(ui):
576 ui = ui.copy()
576 ui = ui.copy()
577 ui._styles.clear()
577 ui._styles.clear()
578 for effect in color._activeeffects(ui).keys():
578 for effect in color._activeeffects(ui).keys():
579 ui._styles[effect] = effect
579 ui._styles[effect] = effect
580 if ui._terminfoparams:
580 if ui._terminfoparams:
581 for k, v in ui.configitems(b'color'):
581 for k, v in ui.configitems(b'color'):
582 if k.startswith(b'color.'):
582 if k.startswith(b'color.'):
583 ui._styles[k] = k[6:]
583 ui._styles[k] = k[6:]
584 elif k.startswith(b'terminfo.'):
584 elif k.startswith(b'terminfo.'):
585 ui._styles[k] = k[9:]
585 ui._styles[k] = k[9:]
586 ui.write(_(b'available colors:\n'))
586 ui.write(_(b'available colors:\n'))
587 # sort label with a '_' after the other to group '_background' entry.
587 # sort label with a '_' after the other to group '_background' entry.
588 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
588 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
589 for colorname, label in items:
589 for colorname, label in items:
590 ui.write(b'%s\n' % colorname, label=label)
590 ui.write(b'%s\n' % colorname, label=label)
591
591
592
592
593 def _debugdisplaystyle(ui):
593 def _debugdisplaystyle(ui):
594 ui.write(_(b'available style:\n'))
594 ui.write(_(b'available style:\n'))
595 if not ui._styles:
595 if not ui._styles:
596 return
596 return
597 width = max(len(s) for s in ui._styles)
597 width = max(len(s) for s in ui._styles)
598 for label, effects in sorted(ui._styles.items()):
598 for label, effects in sorted(ui._styles.items()):
599 ui.write(b'%s' % label, label=label)
599 ui.write(b'%s' % label, label=label)
600 if effects:
600 if effects:
601 # 50
601 # 50
602 ui.write(b': ')
602 ui.write(b': ')
603 ui.write(b' ' * (max(0, width - len(label))))
603 ui.write(b' ' * (max(0, width - len(label))))
604 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
604 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
605 ui.write(b'\n')
605 ui.write(b'\n')
606
606
607
607
608 @command(b'debugcreatestreamclonebundle', [], b'FILE')
608 @command(b'debugcreatestreamclonebundle', [], b'FILE')
609 def debugcreatestreamclonebundle(ui, repo, fname):
609 def debugcreatestreamclonebundle(ui, repo, fname):
610 """create a stream clone bundle file
610 """create a stream clone bundle file
611
611
612 Stream bundles are special bundles that are essentially archives of
612 Stream bundles are special bundles that are essentially archives of
613 revlog files. They are commonly used for cloning very quickly.
613 revlog files. They are commonly used for cloning very quickly.
614 """
614 """
615 # TODO we may want to turn this into an abort when this functionality
615 # TODO we may want to turn this into an abort when this functionality
616 # is moved into `hg bundle`.
616 # is moved into `hg bundle`.
617 if phases.hassecret(repo):
617 if phases.hassecret(repo):
618 ui.warn(
618 ui.warn(
619 _(
619 _(
620 b'(warning: stream clone bundle will contain secret '
620 b'(warning: stream clone bundle will contain secret '
621 b'revisions)\n'
621 b'revisions)\n'
622 )
622 )
623 )
623 )
624
624
625 requirements, gen = streamclone.generatebundlev1(repo)
625 requirements, gen = streamclone.generatebundlev1(repo)
626 changegroup.writechunks(ui, gen, fname)
626 changegroup.writechunks(ui, gen, fname)
627
627
628 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
628 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
629
629
630
630
631 @command(
631 @command(
632 b'debugdag',
632 b'debugdag',
633 [
633 [
634 (b't', b'tags', None, _(b'use tags as labels')),
634 (b't', b'tags', None, _(b'use tags as labels')),
635 (b'b', b'branches', None, _(b'annotate with branch names')),
635 (b'b', b'branches', None, _(b'annotate with branch names')),
636 (b'', b'dots', None, _(b'use dots for runs')),
636 (b'', b'dots', None, _(b'use dots for runs')),
637 (b's', b'spaces', None, _(b'separate elements by spaces')),
637 (b's', b'spaces', None, _(b'separate elements by spaces')),
638 ],
638 ],
639 _(b'[OPTION]... [FILE [REV]...]'),
639 _(b'[OPTION]... [FILE [REV]...]'),
640 optionalrepo=True,
640 optionalrepo=True,
641 )
641 )
642 def debugdag(ui, repo, file_=None, *revs, **opts):
642 def debugdag(ui, repo, file_=None, *revs, **opts):
643 """format the changelog or an index DAG as a concise textual description
643 """format the changelog or an index DAG as a concise textual description
644
644
645 If you pass a revlog index, the revlog's DAG is emitted. If you list
645 If you pass a revlog index, the revlog's DAG is emitted. If you list
646 revision numbers, they get labeled in the output as rN.
646 revision numbers, they get labeled in the output as rN.
647
647
648 Otherwise, the changelog DAG of the current repo is emitted.
648 Otherwise, the changelog DAG of the current repo is emitted.
649 """
649 """
650 spaces = opts.get('spaces')
650 spaces = opts.get('spaces')
651 dots = opts.get('dots')
651 dots = opts.get('dots')
652 if file_:
652 if file_:
653 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
653 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
654 revs = {int(r) for r in revs}
654 revs = {int(r) for r in revs}
655
655
656 def events():
656 def events():
657 for r in rlog:
657 for r in rlog:
658 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
658 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
659 if r in revs:
659 if r in revs:
660 yield b'l', (r, b"r%i" % r)
660 yield b'l', (r, b"r%i" % r)
661
661
662 elif repo:
662 elif repo:
663 cl = repo.changelog
663 cl = repo.changelog
664 tags = opts.get('tags')
664 tags = opts.get('tags')
665 branches = opts.get('branches')
665 branches = opts.get('branches')
666 if tags:
666 if tags:
667 labels = {}
667 labels = {}
668 for l, n in repo.tags().items():
668 for l, n in repo.tags().items():
669 labels.setdefault(cl.rev(n), []).append(l)
669 labels.setdefault(cl.rev(n), []).append(l)
670
670
671 def events():
671 def events():
672 b = b"default"
672 b = b"default"
673 for r in cl:
673 for r in cl:
674 if branches:
674 if branches:
675 newb = cl.read(cl.node(r))[5][b'branch']
675 newb = cl.read(cl.node(r))[5][b'branch']
676 if newb != b:
676 if newb != b:
677 yield b'a', newb
677 yield b'a', newb
678 b = newb
678 b = newb
679 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
679 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
680 if tags:
680 if tags:
681 ls = labels.get(r)
681 ls = labels.get(r)
682 if ls:
682 if ls:
683 for l in ls:
683 for l in ls:
684 yield b'l', (r, l)
684 yield b'l', (r, l)
685
685
686 else:
686 else:
687 raise error.Abort(_(b'need repo for changelog dag'))
687 raise error.Abort(_(b'need repo for changelog dag'))
688
688
689 for line in dagparser.dagtextlines(
689 for line in dagparser.dagtextlines(
690 events(),
690 events(),
691 addspaces=spaces,
691 addspaces=spaces,
692 wraplabels=True,
692 wraplabels=True,
693 wrapannotations=True,
693 wrapannotations=True,
694 wrapnonlinear=dots,
694 wrapnonlinear=dots,
695 usedots=dots,
695 usedots=dots,
696 maxlinewidth=70,
696 maxlinewidth=70,
697 ):
697 ):
698 ui.write(line)
698 ui.write(line)
699 ui.write(b"\n")
699 ui.write(b"\n")
700
700
701
701
702 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
702 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
703 def debugdata(ui, repo, file_, rev=None, **opts):
703 def debugdata(ui, repo, file_, rev=None, **opts):
704 """dump the contents of a data file revision"""
704 """dump the contents of a data file revision"""
705 opts = pycompat.byteskwargs(opts)
705 opts = pycompat.byteskwargs(opts)
706 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
706 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
707 if rev is not None:
707 if rev is not None:
708 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
708 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
709 file_, rev = None, file_
709 file_, rev = None, file_
710 elif rev is None:
710 elif rev is None:
711 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
711 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
712 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
712 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
713 try:
713 try:
714 ui.write(r.rawdata(r.lookup(rev)))
714 ui.write(r.rawdata(r.lookup(rev)))
715 except KeyError:
715 except KeyError:
716 raise error.Abort(_(b'invalid revision identifier %s') % rev)
716 raise error.Abort(_(b'invalid revision identifier %s') % rev)
717
717
718
718
719 @command(
719 @command(
720 b'debugdate',
720 b'debugdate',
721 [(b'e', b'extended', None, _(b'try extended date formats'))],
721 [(b'e', b'extended', None, _(b'try extended date formats'))],
722 _(b'[-e] DATE [RANGE]'),
722 _(b'[-e] DATE [RANGE]'),
723 norepo=True,
723 norepo=True,
724 optionalrepo=True,
724 optionalrepo=True,
725 )
725 )
726 def debugdate(ui, date, range=None, **opts):
726 def debugdate(ui, date, range=None, **opts):
727 """parse and display a date"""
727 """parse and display a date"""
728 if opts["extended"]:
728 if opts["extended"]:
729 d = dateutil.parsedate(date, dateutil.extendeddateformats)
729 d = dateutil.parsedate(date, dateutil.extendeddateformats)
730 else:
730 else:
731 d = dateutil.parsedate(date)
731 d = dateutil.parsedate(date)
732 ui.writenoi18n(b"internal: %d %d\n" % d)
732 ui.writenoi18n(b"internal: %d %d\n" % d)
733 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
733 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
734 if range:
734 if range:
735 m = dateutil.matchdate(range)
735 m = dateutil.matchdate(range)
736 ui.writenoi18n(b"match: %s\n" % m(d[0]))
736 ui.writenoi18n(b"match: %s\n" % m(d[0]))
737
737
738
738
739 @command(
739 @command(
740 b'debugdeltachain',
740 b'debugdeltachain',
741 cmdutil.debugrevlogopts + cmdutil.formatteropts,
741 cmdutil.debugrevlogopts + cmdutil.formatteropts,
742 _(b'-c|-m|FILE'),
742 _(b'-c|-m|FILE'),
743 optionalrepo=True,
743 optionalrepo=True,
744 )
744 )
745 def debugdeltachain(ui, repo, file_=None, **opts):
745 def debugdeltachain(ui, repo, file_=None, **opts):
746 """dump information about delta chains in a revlog
746 """dump information about delta chains in a revlog
747
747
748 Output can be templatized. Available template keywords are:
748 Output can be templatized. Available template keywords are:
749
749
750 :``rev``: revision number
750 :``rev``: revision number
751 :``chainid``: delta chain identifier (numbered by unique base)
751 :``chainid``: delta chain identifier (numbered by unique base)
752 :``chainlen``: delta chain length to this revision
752 :``chainlen``: delta chain length to this revision
753 :``prevrev``: previous revision in delta chain
753 :``prevrev``: previous revision in delta chain
754 :``deltatype``: role of delta / how it was computed
754 :``deltatype``: role of delta / how it was computed
755 :``compsize``: compressed size of revision
755 :``compsize``: compressed size of revision
756 :``uncompsize``: uncompressed size of revision
756 :``uncompsize``: uncompressed size of revision
757 :``chainsize``: total size of compressed revisions in chain
757 :``chainsize``: total size of compressed revisions in chain
758 :``chainratio``: total chain size divided by uncompressed revision size
758 :``chainratio``: total chain size divided by uncompressed revision size
759 (new delta chains typically start at ratio 2.00)
759 (new delta chains typically start at ratio 2.00)
760 :``lindist``: linear distance from base revision in delta chain to end
760 :``lindist``: linear distance from base revision in delta chain to end
761 of this revision
761 of this revision
762 :``extradist``: total size of revisions not part of this delta chain from
762 :``extradist``: total size of revisions not part of this delta chain from
763 base of delta chain to end of this revision; a measurement
763 base of delta chain to end of this revision; a measurement
764 of how much extra data we need to read/seek across to read
764 of how much extra data we need to read/seek across to read
765 the delta chain for this revision
765 the delta chain for this revision
766 :``extraratio``: extradist divided by chainsize; another representation of
766 :``extraratio``: extradist divided by chainsize; another representation of
767 how much unrelated data is needed to load this delta chain
767 how much unrelated data is needed to load this delta chain
768
768
769 If the repository is configured to use the sparse read, additional keywords
769 If the repository is configured to use the sparse read, additional keywords
770 are available:
770 are available:
771
771
772 :``readsize``: total size of data read from the disk for a revision
772 :``readsize``: total size of data read from the disk for a revision
773 (sum of the sizes of all the blocks)
773 (sum of the sizes of all the blocks)
774 :``largestblock``: size of the largest block of data read from the disk
774 :``largestblock``: size of the largest block of data read from the disk
775 :``readdensity``: density of useful bytes in the data read from the disk
775 :``readdensity``: density of useful bytes in the data read from the disk
776 :``srchunks``: in how many data hunks the whole revision would be read
776 :``srchunks``: in how many data hunks the whole revision would be read
777
777
778 The sparse read can be enabled with experimental.sparse-read = True
778 The sparse read can be enabled with experimental.sparse-read = True
779 """
779 """
780 opts = pycompat.byteskwargs(opts)
780 opts = pycompat.byteskwargs(opts)
781 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
781 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
782 index = r.index
782 index = r.index
783 start = r.start
783 start = r.start
784 length = r.length
784 length = r.length
785 generaldelta = r._generaldelta
785 generaldelta = r._generaldelta
786 withsparseread = getattr(r, '_withsparseread', False)
786 withsparseread = getattr(r, '_withsparseread', False)
787
787
788 def revinfo(rev):
788 def revinfo(rev):
789 e = index[rev]
789 e = index[rev]
790 compsize = e[1]
790 compsize = e[1]
791 uncompsize = e[2]
791 uncompsize = e[2]
792 chainsize = 0
792 chainsize = 0
793
793
794 if generaldelta:
794 if generaldelta:
795 if e[3] == e[5]:
795 if e[3] == e[5]:
796 deltatype = b'p1'
796 deltatype = b'p1'
797 elif e[3] == e[6]:
797 elif e[3] == e[6]:
798 deltatype = b'p2'
798 deltatype = b'p2'
799 elif e[3] == rev - 1:
799 elif e[3] == rev - 1:
800 deltatype = b'prev'
800 deltatype = b'prev'
801 elif e[3] == rev:
801 elif e[3] == rev:
802 deltatype = b'base'
802 deltatype = b'base'
803 else:
803 else:
804 deltatype = b'other'
804 deltatype = b'other'
805 else:
805 else:
806 if e[3] == rev:
806 if e[3] == rev:
807 deltatype = b'base'
807 deltatype = b'base'
808 else:
808 else:
809 deltatype = b'prev'
809 deltatype = b'prev'
810
810
811 chain = r._deltachain(rev)[0]
811 chain = r._deltachain(rev)[0]
812 for iterrev in chain:
812 for iterrev in chain:
813 e = index[iterrev]
813 e = index[iterrev]
814 chainsize += e[1]
814 chainsize += e[1]
815
815
816 return compsize, uncompsize, deltatype, chain, chainsize
816 return compsize, uncompsize, deltatype, chain, chainsize
817
817
818 fm = ui.formatter(b'debugdeltachain', opts)
818 fm = ui.formatter(b'debugdeltachain', opts)
819
819
820 fm.plain(
820 fm.plain(
821 b' rev chain# chainlen prev delta '
821 b' rev chain# chainlen prev delta '
822 b'size rawsize chainsize ratio lindist extradist '
822 b'size rawsize chainsize ratio lindist extradist '
823 b'extraratio'
823 b'extraratio'
824 )
824 )
825 if withsparseread:
825 if withsparseread:
826 fm.plain(b' readsize largestblk rddensity srchunks')
826 fm.plain(b' readsize largestblk rddensity srchunks')
827 fm.plain(b'\n')
827 fm.plain(b'\n')
828
828
829 chainbases = {}
829 chainbases = {}
830 for rev in r:
830 for rev in r:
831 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
831 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
832 chainbase = chain[0]
832 chainbase = chain[0]
833 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
833 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
834 basestart = start(chainbase)
834 basestart = start(chainbase)
835 revstart = start(rev)
835 revstart = start(rev)
836 lineardist = revstart + comp - basestart
836 lineardist = revstart + comp - basestart
837 extradist = lineardist - chainsize
837 extradist = lineardist - chainsize
838 try:
838 try:
839 prevrev = chain[-2]
839 prevrev = chain[-2]
840 except IndexError:
840 except IndexError:
841 prevrev = -1
841 prevrev = -1
842
842
843 if uncomp != 0:
843 if uncomp != 0:
844 chainratio = float(chainsize) / float(uncomp)
844 chainratio = float(chainsize) / float(uncomp)
845 else:
845 else:
846 chainratio = chainsize
846 chainratio = chainsize
847
847
848 if chainsize != 0:
848 if chainsize != 0:
849 extraratio = float(extradist) / float(chainsize)
849 extraratio = float(extradist) / float(chainsize)
850 else:
850 else:
851 extraratio = extradist
851 extraratio = extradist
852
852
853 fm.startitem()
853 fm.startitem()
854 fm.write(
854 fm.write(
855 b'rev chainid chainlen prevrev deltatype compsize '
855 b'rev chainid chainlen prevrev deltatype compsize '
856 b'uncompsize chainsize chainratio lindist extradist '
856 b'uncompsize chainsize chainratio lindist extradist '
857 b'extraratio',
857 b'extraratio',
858 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
858 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
859 rev,
859 rev,
860 chainid,
860 chainid,
861 len(chain),
861 len(chain),
862 prevrev,
862 prevrev,
863 deltatype,
863 deltatype,
864 comp,
864 comp,
865 uncomp,
865 uncomp,
866 chainsize,
866 chainsize,
867 chainratio,
867 chainratio,
868 lineardist,
868 lineardist,
869 extradist,
869 extradist,
870 extraratio,
870 extraratio,
871 rev=rev,
871 rev=rev,
872 chainid=chainid,
872 chainid=chainid,
873 chainlen=len(chain),
873 chainlen=len(chain),
874 prevrev=prevrev,
874 prevrev=prevrev,
875 deltatype=deltatype,
875 deltatype=deltatype,
876 compsize=comp,
876 compsize=comp,
877 uncompsize=uncomp,
877 uncompsize=uncomp,
878 chainsize=chainsize,
878 chainsize=chainsize,
879 chainratio=chainratio,
879 chainratio=chainratio,
880 lindist=lineardist,
880 lindist=lineardist,
881 extradist=extradist,
881 extradist=extradist,
882 extraratio=extraratio,
882 extraratio=extraratio,
883 )
883 )
884 if withsparseread:
884 if withsparseread:
885 readsize = 0
885 readsize = 0
886 largestblock = 0
886 largestblock = 0
887 srchunks = 0
887 srchunks = 0
888
888
889 for revschunk in deltautil.slicechunk(r, chain):
889 for revschunk in deltautil.slicechunk(r, chain):
890 srchunks += 1
890 srchunks += 1
891 blkend = start(revschunk[-1]) + length(revschunk[-1])
891 blkend = start(revschunk[-1]) + length(revschunk[-1])
892 blksize = blkend - start(revschunk[0])
892 blksize = blkend - start(revschunk[0])
893
893
894 readsize += blksize
894 readsize += blksize
895 if largestblock < blksize:
895 if largestblock < blksize:
896 largestblock = blksize
896 largestblock = blksize
897
897
898 if readsize:
898 if readsize:
899 readdensity = float(chainsize) / float(readsize)
899 readdensity = float(chainsize) / float(readsize)
900 else:
900 else:
901 readdensity = 1
901 readdensity = 1
902
902
903 fm.write(
903 fm.write(
904 b'readsize largestblock readdensity srchunks',
904 b'readsize largestblock readdensity srchunks',
905 b' %10d %10d %9.5f %8d',
905 b' %10d %10d %9.5f %8d',
906 readsize,
906 readsize,
907 largestblock,
907 largestblock,
908 readdensity,
908 readdensity,
909 srchunks,
909 srchunks,
910 readsize=readsize,
910 readsize=readsize,
911 largestblock=largestblock,
911 largestblock=largestblock,
912 readdensity=readdensity,
912 readdensity=readdensity,
913 srchunks=srchunks,
913 srchunks=srchunks,
914 )
914 )
915
915
916 fm.plain(b'\n')
916 fm.plain(b'\n')
917
917
918 fm.end()
918 fm.end()
919
919
920
920
921 @command(
921 @command(
922 b'debugdirstate|debugstate',
922 b'debugdirstate|debugstate',
923 [
923 [
924 (
924 (
925 b'',
925 b'',
926 b'nodates',
926 b'nodates',
927 None,
927 None,
928 _(b'do not display the saved mtime (DEPRECATED)'),
928 _(b'do not display the saved mtime (DEPRECATED)'),
929 ),
929 ),
930 (b'', b'dates', True, _(b'display the saved mtime')),
930 (b'', b'dates', True, _(b'display the saved mtime')),
931 (b'', b'datesort', None, _(b'sort by saved mtime')),
931 (b'', b'datesort', None, _(b'sort by saved mtime')),
932 (
932 (
933 b'',
933 b'',
934 b'all',
934 b'all',
935 False,
935 False,
936 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
936 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
937 ),
937 ),
938 ],
938 ],
939 _(b'[OPTION]...'),
939 _(b'[OPTION]...'),
940 )
940 )
941 def debugstate(ui, repo, **opts):
941 def debugstate(ui, repo, **opts):
942 """show the contents of the current dirstate"""
942 """show the contents of the current dirstate"""
943
943
944 nodates = not opts['dates']
944 nodates = not opts['dates']
945 if opts.get('nodates') is not None:
945 if opts.get('nodates') is not None:
946 nodates = True
946 nodates = True
947 datesort = opts.get('datesort')
947 datesort = opts.get('datesort')
948
948
949 if datesort:
949 if datesort:
950
950
951 def keyfunc(entry):
951 def keyfunc(entry):
952 filename, _state, _mode, _size, mtime = entry
952 filename, _state, _mode, _size, mtime = entry
953 return (mtime, filename)
953 return (mtime, filename)
954
954
955 else:
955 else:
956 keyfunc = None # sort by filename
956 keyfunc = None # sort by filename
957 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
957 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
958 entries.sort(key=keyfunc)
958 entries.sort(key=keyfunc)
959 for entry in entries:
959 for entry in entries:
960 filename, state, mode, size, mtime = entry
960 filename, state, mode, size, mtime = entry
961 if mtime == -1:
961 if mtime == -1:
962 timestr = b'unset '
962 timestr = b'unset '
963 elif nodates:
963 elif nodates:
964 timestr = b'set '
964 timestr = b'set '
965 else:
965 else:
966 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
966 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
967 timestr = encoding.strtolocal(timestr)
967 timestr = encoding.strtolocal(timestr)
968 if mode & 0o20000:
968 if mode & 0o20000:
969 mode = b'lnk'
969 mode = b'lnk'
970 else:
970 else:
971 mode = b'%3o' % (mode & 0o777 & ~util.umask)
971 mode = b'%3o' % (mode & 0o777 & ~util.umask)
972 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
972 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
973 for f in repo.dirstate.copies():
973 for f in repo.dirstate.copies():
974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975
975
976
976
977 @command(
977 @command(
978 b'debugdirstateignorepatternshash',
978 b'debugdirstateignorepatternshash',
979 [],
979 [],
980 _(b''),
980 _(b''),
981 )
981 )
982 def debugdirstateignorepatternshash(ui, repo, **opts):
982 def debugdirstateignorepatternshash(ui, repo, **opts):
983 """show the hash of ignore patterns stored in dirstate if v2,
983 """show the hash of ignore patterns stored in dirstate if v2,
984 or nothing for dirstate-v2
984 or nothing for dirstate-v2
985 """
985 """
986 if repo.dirstate._use_dirstate_v2:
986 if repo.dirstate._use_dirstate_v2:
987 docket = repo.dirstate._map.docket
987 docket = repo.dirstate._map.docket
988 hash_len = 20 # 160 bits for SHA-1
988 hash_len = 20 # 160 bits for SHA-1
989 hash_bytes = docket.tree_metadata[-hash_len:]
989 hash_bytes = docket.tree_metadata[-hash_len:]
990 ui.write(binascii.hexlify(hash_bytes) + b'\n')
990 ui.write(binascii.hexlify(hash_bytes) + b'\n')
991
991
992
992
993 @command(
993 @command(
994 b'debugdiscovery',
994 b'debugdiscovery',
995 [
995 [
996 (b'', b'old', None, _(b'use old-style discovery')),
996 (b'', b'old', None, _(b'use old-style discovery')),
997 (
997 (
998 b'',
998 b'',
999 b'nonheads',
999 b'nonheads',
1000 None,
1000 None,
1001 _(b'use old-style discovery with non-heads included'),
1001 _(b'use old-style discovery with non-heads included'),
1002 ),
1002 ),
1003 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1003 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1004 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1004 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1005 (
1005 (
1006 b'',
1006 b'',
1007 b'local-as-revs',
1007 b'local-as-revs',
1008 b"",
1008 b"",
1009 b'treat local has having these revisions only',
1009 b'treat local has having these revisions only',
1010 ),
1010 ),
1011 (
1011 (
1012 b'',
1012 b'',
1013 b'remote-as-revs',
1013 b'remote-as-revs',
1014 b"",
1014 b"",
1015 b'use local as remote, with only these these revisions',
1015 b'use local as remote, with only these these revisions',
1016 ),
1016 ),
1017 ]
1017 ]
1018 + cmdutil.remoteopts
1018 + cmdutil.remoteopts
1019 + cmdutil.formatteropts,
1019 + cmdutil.formatteropts,
1020 _(b'[--rev REV] [OTHER]'),
1020 _(b'[--rev REV] [OTHER]'),
1021 )
1021 )
1022 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1022 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1023 """runs the changeset discovery protocol in isolation
1023 """runs the changeset discovery protocol in isolation
1024
1024
1025 The local peer can be "replaced" by a subset of the local repository by
1025 The local peer can be "replaced" by a subset of the local repository by
1026 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1026 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1027 be "replaced" by a subset of the local repository using the
1027 be "replaced" by a subset of the local repository using the
1028 `--local-as-revs` flag. This is useful to efficiently debug pathological
1028 `--local-as-revs` flag. This is useful to efficiently debug pathological
1029 discovery situation.
1029 discovery situation.
1030
1030
1031 The following developer oriented config are relevant for people playing with this command:
1031 The following developer oriented config are relevant for people playing with this command:
1032
1032
1033 * devel.discovery.exchange-heads=True
1033 * devel.discovery.exchange-heads=True
1034
1034
1035 If False, the discovery will not start with
1035 If False, the discovery will not start with
1036 remote head fetching and local head querying.
1036 remote head fetching and local head querying.
1037
1037
1038 * devel.discovery.grow-sample=True
1038 * devel.discovery.grow-sample=True
1039
1039
1040 If False, the sample size used in set discovery will not be increased
1040 If False, the sample size used in set discovery will not be increased
1041 through the process
1041 through the process
1042
1042
1043 * devel.discovery.grow-sample.dynamic=True
1043 * devel.discovery.grow-sample.dynamic=True
1044
1044
1045 When discovery.grow-sample.dynamic is True, the default, the sample size is
1045 When discovery.grow-sample.dynamic is True, the default, the sample size is
1046 adapted to the shape of the undecided set (it is set to the max of:
1046 adapted to the shape of the undecided set (it is set to the max of:
1047 <target-size>, len(roots(undecided)), len(heads(undecided)
1047 <target-size>, len(roots(undecided)), len(heads(undecided)
1048
1048
1049 * devel.discovery.grow-sample.rate=1.05
1049 * devel.discovery.grow-sample.rate=1.05
1050
1050
1051 the rate at which the sample grow
1051 the rate at which the sample grow
1052
1052
1053 * devel.discovery.randomize=True
1053 * devel.discovery.randomize=True
1054
1054
1055 If andom sampling during discovery are deterministic. It is meant for
1055 If andom sampling during discovery are deterministic. It is meant for
1056 integration tests.
1056 integration tests.
1057
1057
1058 * devel.discovery.sample-size=200
1058 * devel.discovery.sample-size=200
1059
1059
1060 Control the initial size of the discovery sample
1060 Control the initial size of the discovery sample
1061
1061
1062 * devel.discovery.sample-size.initial=100
1062 * devel.discovery.sample-size.initial=100
1063
1063
1064 Control the initial size of the discovery for initial change
1064 Control the initial size of the discovery for initial change
1065 """
1065 """
1066 opts = pycompat.byteskwargs(opts)
1066 opts = pycompat.byteskwargs(opts)
1067 unfi = repo.unfiltered()
1067 unfi = repo.unfiltered()
1068
1068
1069 # setup potential extra filtering
1069 # setup potential extra filtering
1070 local_revs = opts[b"local_as_revs"]
1070 local_revs = opts[b"local_as_revs"]
1071 remote_revs = opts[b"remote_as_revs"]
1071 remote_revs = opts[b"remote_as_revs"]
1072
1072
1073 # make sure tests are repeatable
1073 # make sure tests are repeatable
1074 random.seed(int(opts[b'seed']))
1074 random.seed(int(opts[b'seed']))
1075
1075
1076 if not remote_revs:
1076 if not remote_revs:
1077
1077
1078 remoteurl, branches = urlutil.get_unique_pull_path(
1078 remoteurl, branches = urlutil.get_unique_pull_path(
1079 b'debugdiscovery', repo, ui, remoteurl
1079 b'debugdiscovery', repo, ui, remoteurl
1080 )
1080 )
1081 remote = hg.peer(repo, opts, remoteurl)
1081 remote = hg.peer(repo, opts, remoteurl)
1082 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1082 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1083 else:
1083 else:
1084 branches = (None, [])
1084 branches = (None, [])
1085 remote_filtered_revs = scmutil.revrange(
1085 remote_filtered_revs = scmutil.revrange(
1086 unfi, [b"not (::(%s))" % remote_revs]
1086 unfi, [b"not (::(%s))" % remote_revs]
1087 )
1087 )
1088 remote_filtered_revs = frozenset(remote_filtered_revs)
1088 remote_filtered_revs = frozenset(remote_filtered_revs)
1089
1089
1090 def remote_func(x):
1090 def remote_func(x):
1091 return remote_filtered_revs
1091 return remote_filtered_revs
1092
1092
1093 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1093 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1094
1094
1095 remote = repo.peer()
1095 remote = repo.peer()
1096 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1096 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1097
1097
1098 if local_revs:
1098 if local_revs:
1099 local_filtered_revs = scmutil.revrange(
1099 local_filtered_revs = scmutil.revrange(
1100 unfi, [b"not (::(%s))" % local_revs]
1100 unfi, [b"not (::(%s))" % local_revs]
1101 )
1101 )
1102 local_filtered_revs = frozenset(local_filtered_revs)
1102 local_filtered_revs = frozenset(local_filtered_revs)
1103
1103
1104 def local_func(x):
1104 def local_func(x):
1105 return local_filtered_revs
1105 return local_filtered_revs
1106
1106
1107 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1107 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1108 repo = repo.filtered(b'debug-discovery-local-filter')
1108 repo = repo.filtered(b'debug-discovery-local-filter')
1109
1109
1110 data = {}
1110 data = {}
1111 if opts.get(b'old'):
1111 if opts.get(b'old'):
1112
1112
1113 def doit(pushedrevs, remoteheads, remote=remote):
1113 def doit(pushedrevs, remoteheads, remote=remote):
1114 if not util.safehasattr(remote, b'branches'):
1114 if not util.safehasattr(remote, b'branches'):
1115 # enable in-client legacy support
1115 # enable in-client legacy support
1116 remote = localrepo.locallegacypeer(remote.local())
1116 remote = localrepo.locallegacypeer(remote.local())
1117 common, _in, hds = treediscovery.findcommonincoming(
1117 common, _in, hds = treediscovery.findcommonincoming(
1118 repo, remote, force=True, audit=data
1118 repo, remote, force=True, audit=data
1119 )
1119 )
1120 common = set(common)
1120 common = set(common)
1121 if not opts.get(b'nonheads'):
1121 if not opts.get(b'nonheads'):
1122 ui.writenoi18n(
1122 ui.writenoi18n(
1123 b"unpruned common: %s\n"
1123 b"unpruned common: %s\n"
1124 % b" ".join(sorted(short(n) for n in common))
1124 % b" ".join(sorted(short(n) for n in common))
1125 )
1125 )
1126
1126
1127 clnode = repo.changelog.node
1127 clnode = repo.changelog.node
1128 common = repo.revs(b'heads(::%ln)', common)
1128 common = repo.revs(b'heads(::%ln)', common)
1129 common = {clnode(r) for r in common}
1129 common = {clnode(r) for r in common}
1130 return common, hds
1130 return common, hds
1131
1131
1132 else:
1132 else:
1133
1133
1134 def doit(pushedrevs, remoteheads, remote=remote):
1134 def doit(pushedrevs, remoteheads, remote=remote):
1135 nodes = None
1135 nodes = None
1136 if pushedrevs:
1136 if pushedrevs:
1137 revs = scmutil.revrange(repo, pushedrevs)
1137 revs = scmutil.revrange(repo, pushedrevs)
1138 nodes = [repo[r].node() for r in revs]
1138 nodes = [repo[r].node() for r in revs]
1139 common, any, hds = setdiscovery.findcommonheads(
1139 common, any, hds = setdiscovery.findcommonheads(
1140 ui, repo, remote, ancestorsof=nodes, audit=data
1140 ui, repo, remote, ancestorsof=nodes, audit=data
1141 )
1141 )
1142 return common, hds
1142 return common, hds
1143
1143
1144 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1144 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1145 localrevs = opts[b'rev']
1145 localrevs = opts[b'rev']
1146
1146
1147 fm = ui.formatter(b'debugdiscovery', opts)
1147 fm = ui.formatter(b'debugdiscovery', opts)
1148 if fm.strict_format:
1148 if fm.strict_format:
1149
1149
1150 @contextlib.contextmanager
1150 @contextlib.contextmanager
1151 def may_capture_output():
1151 def may_capture_output():
1152 ui.pushbuffer()
1152 ui.pushbuffer()
1153 yield
1153 yield
1154 data[b'output'] = ui.popbuffer()
1154 data[b'output'] = ui.popbuffer()
1155
1155
1156 else:
1156 else:
1157 may_capture_output = util.nullcontextmanager
1157 may_capture_output = util.nullcontextmanager
1158 with may_capture_output():
1158 with may_capture_output():
1159 with util.timedcm('debug-discovery') as t:
1159 with util.timedcm('debug-discovery') as t:
1160 common, hds = doit(localrevs, remoterevs)
1160 common, hds = doit(localrevs, remoterevs)
1161
1161
1162 # compute all statistics
1162 # compute all statistics
1163 heads_common = set(common)
1163 heads_common = set(common)
1164 heads_remote = set(hds)
1164 heads_remote = set(hds)
1165 heads_local = set(repo.heads())
1165 heads_local = set(repo.heads())
1166 # note: they cannot be a local or remote head that is in common and not
1166 # note: they cannot be a local or remote head that is in common and not
1167 # itself a head of common.
1167 # itself a head of common.
1168 heads_common_local = heads_common & heads_local
1168 heads_common_local = heads_common & heads_local
1169 heads_common_remote = heads_common & heads_remote
1169 heads_common_remote = heads_common & heads_remote
1170 heads_common_both = heads_common & heads_remote & heads_local
1170 heads_common_both = heads_common & heads_remote & heads_local
1171
1171
1172 all = repo.revs(b'all()')
1172 all = repo.revs(b'all()')
1173 common = repo.revs(b'::%ln', common)
1173 common = repo.revs(b'::%ln', common)
1174 roots_common = repo.revs(b'roots(::%ld)', common)
1174 roots_common = repo.revs(b'roots(::%ld)', common)
1175 missing = repo.revs(b'not ::%ld', common)
1175 missing = repo.revs(b'not ::%ld', common)
1176 heads_missing = repo.revs(b'heads(%ld)', missing)
1176 heads_missing = repo.revs(b'heads(%ld)', missing)
1177 roots_missing = repo.revs(b'roots(%ld)', missing)
1177 roots_missing = repo.revs(b'roots(%ld)', missing)
1178 assert len(common) + len(missing) == len(all)
1178 assert len(common) + len(missing) == len(all)
1179
1179
1180 initial_undecided = repo.revs(
1180 initial_undecided = repo.revs(
1181 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1181 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1182 )
1182 )
1183 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1183 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1184 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1184 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1185 common_initial_undecided = initial_undecided & common
1185 common_initial_undecided = initial_undecided & common
1186 missing_initial_undecided = initial_undecided & missing
1186 missing_initial_undecided = initial_undecided & missing
1187
1187
1188 data[b'elapsed'] = t.elapsed
1188 data[b'elapsed'] = t.elapsed
1189 data[b'nb-common-heads'] = len(heads_common)
1189 data[b'nb-common-heads'] = len(heads_common)
1190 data[b'nb-common-heads-local'] = len(heads_common_local)
1190 data[b'nb-common-heads-local'] = len(heads_common_local)
1191 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1191 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1192 data[b'nb-common-heads-both'] = len(heads_common_both)
1192 data[b'nb-common-heads-both'] = len(heads_common_both)
1193 data[b'nb-common-roots'] = len(roots_common)
1193 data[b'nb-common-roots'] = len(roots_common)
1194 data[b'nb-head-local'] = len(heads_local)
1194 data[b'nb-head-local'] = len(heads_local)
1195 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1195 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1196 data[b'nb-head-remote'] = len(heads_remote)
1196 data[b'nb-head-remote'] = len(heads_remote)
1197 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1197 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1198 heads_common_remote
1198 heads_common_remote
1199 )
1199 )
1200 data[b'nb-revs'] = len(all)
1200 data[b'nb-revs'] = len(all)
1201 data[b'nb-revs-common'] = len(common)
1201 data[b'nb-revs-common'] = len(common)
1202 data[b'nb-revs-missing'] = len(missing)
1202 data[b'nb-revs-missing'] = len(missing)
1203 data[b'nb-missing-heads'] = len(heads_missing)
1203 data[b'nb-missing-heads'] = len(heads_missing)
1204 data[b'nb-missing-roots'] = len(roots_missing)
1204 data[b'nb-missing-roots'] = len(roots_missing)
1205 data[b'nb-ini_und'] = len(initial_undecided)
1205 data[b'nb-ini_und'] = len(initial_undecided)
1206 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1206 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1207 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1207 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1208 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1208 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1209 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1209 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1210
1210
1211 fm.startitem()
1211 fm.startitem()
1212 fm.data(**pycompat.strkwargs(data))
1212 fm.data(**pycompat.strkwargs(data))
1213 # display discovery summary
1213 # display discovery summary
1214 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1214 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1215 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1215 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1216 fm.plain(b"heads summary:\n")
1216 fm.plain(b"heads summary:\n")
1217 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1217 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1218 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1218 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1219 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1219 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1220 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1220 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1221 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1221 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1222 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1222 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1223 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1223 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1224 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1224 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1225 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1225 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1226 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1226 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1227 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1227 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1228 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1228 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1229 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1229 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1230 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1230 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1231 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1231 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1232 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1232 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1233 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1233 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1234 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1234 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1235 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1235 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1236 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1236 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1237 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1237 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1238 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1238 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1239
1239
1240 if ui.verbose:
1240 if ui.verbose:
1241 fm.plain(
1241 fm.plain(
1242 b"common heads: %s\n"
1242 b"common heads: %s\n"
1243 % b" ".join(sorted(short(n) for n in heads_common))
1243 % b" ".join(sorted(short(n) for n in heads_common))
1244 )
1244 )
1245 fm.end()
1245 fm.end()
1246
1246
1247
1247
1248 _chunksize = 4 << 10
1248 _chunksize = 4 << 10
1249
1249
1250
1250
1251 @command(
1251 @command(
1252 b'debugdownload',
1252 b'debugdownload',
1253 [
1253 [
1254 (b'o', b'output', b'', _(b'path')),
1254 (b'o', b'output', b'', _(b'path')),
1255 ],
1255 ],
1256 optionalrepo=True,
1256 optionalrepo=True,
1257 )
1257 )
1258 def debugdownload(ui, repo, url, output=None, **opts):
1258 def debugdownload(ui, repo, url, output=None, **opts):
1259 """download a resource using Mercurial logic and config"""
1259 """download a resource using Mercurial logic and config"""
1260 fh = urlmod.open(ui, url, output)
1260 fh = urlmod.open(ui, url, output)
1261
1261
1262 dest = ui
1262 dest = ui
1263 if output:
1263 if output:
1264 dest = open(output, b"wb", _chunksize)
1264 dest = open(output, b"wb", _chunksize)
1265 try:
1265 try:
1266 data = fh.read(_chunksize)
1266 data = fh.read(_chunksize)
1267 while data:
1267 while data:
1268 dest.write(data)
1268 dest.write(data)
1269 data = fh.read(_chunksize)
1269 data = fh.read(_chunksize)
1270 finally:
1270 finally:
1271 if output:
1271 if output:
1272 dest.close()
1272 dest.close()
1273
1273
1274
1274
1275 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1275 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1276 def debugextensions(ui, repo, **opts):
1276 def debugextensions(ui, repo, **opts):
1277 '''show information about active extensions'''
1277 '''show information about active extensions'''
1278 opts = pycompat.byteskwargs(opts)
1278 opts = pycompat.byteskwargs(opts)
1279 exts = extensions.extensions(ui)
1279 exts = extensions.extensions(ui)
1280 hgver = util.version()
1280 hgver = util.version()
1281 fm = ui.formatter(b'debugextensions', opts)
1281 fm = ui.formatter(b'debugextensions', opts)
1282 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1282 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1283 isinternal = extensions.ismoduleinternal(extmod)
1283 isinternal = extensions.ismoduleinternal(extmod)
1284 extsource = None
1284 extsource = None
1285
1285
1286 if util.safehasattr(extmod, '__file__'):
1286 if util.safehasattr(extmod, '__file__'):
1287 extsource = pycompat.fsencode(extmod.__file__)
1287 extsource = pycompat.fsencode(extmod.__file__)
1288 elif getattr(sys, 'oxidized', False):
1288 elif getattr(sys, 'oxidized', False):
1289 extsource = pycompat.sysexecutable
1289 extsource = pycompat.sysexecutable
1290 if isinternal:
1290 if isinternal:
1291 exttestedwith = [] # never expose magic string to users
1291 exttestedwith = [] # never expose magic string to users
1292 else:
1292 else:
1293 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1293 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1294 extbuglink = getattr(extmod, 'buglink', None)
1294 extbuglink = getattr(extmod, 'buglink', None)
1295
1295
1296 fm.startitem()
1296 fm.startitem()
1297
1297
1298 if ui.quiet or ui.verbose:
1298 if ui.quiet or ui.verbose:
1299 fm.write(b'name', b'%s\n', extname)
1299 fm.write(b'name', b'%s\n', extname)
1300 else:
1300 else:
1301 fm.write(b'name', b'%s', extname)
1301 fm.write(b'name', b'%s', extname)
1302 if isinternal or hgver in exttestedwith:
1302 if isinternal or hgver in exttestedwith:
1303 fm.plain(b'\n')
1303 fm.plain(b'\n')
1304 elif not exttestedwith:
1304 elif not exttestedwith:
1305 fm.plain(_(b' (untested!)\n'))
1305 fm.plain(_(b' (untested!)\n'))
1306 else:
1306 else:
1307 lasttestedversion = exttestedwith[-1]
1307 lasttestedversion = exttestedwith[-1]
1308 fm.plain(b' (%s!)\n' % lasttestedversion)
1308 fm.plain(b' (%s!)\n' % lasttestedversion)
1309
1309
1310 fm.condwrite(
1310 fm.condwrite(
1311 ui.verbose and extsource,
1311 ui.verbose and extsource,
1312 b'source',
1312 b'source',
1313 _(b' location: %s\n'),
1313 _(b' location: %s\n'),
1314 extsource or b"",
1314 extsource or b"",
1315 )
1315 )
1316
1316
1317 if ui.verbose:
1317 if ui.verbose:
1318 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1318 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1319 fm.data(bundled=isinternal)
1319 fm.data(bundled=isinternal)
1320
1320
1321 fm.condwrite(
1321 fm.condwrite(
1322 ui.verbose and exttestedwith,
1322 ui.verbose and exttestedwith,
1323 b'testedwith',
1323 b'testedwith',
1324 _(b' tested with: %s\n'),
1324 _(b' tested with: %s\n'),
1325 fm.formatlist(exttestedwith, name=b'ver'),
1325 fm.formatlist(exttestedwith, name=b'ver'),
1326 )
1326 )
1327
1327
1328 fm.condwrite(
1328 fm.condwrite(
1329 ui.verbose and extbuglink,
1329 ui.verbose and extbuglink,
1330 b'buglink',
1330 b'buglink',
1331 _(b' bug reporting: %s\n'),
1331 _(b' bug reporting: %s\n'),
1332 extbuglink or b"",
1332 extbuglink or b"",
1333 )
1333 )
1334
1334
1335 fm.end()
1335 fm.end()
1336
1336
1337
1337
1338 @command(
1338 @command(
1339 b'debugfileset',
1339 b'debugfileset',
1340 [
1340 [
1341 (
1341 (
1342 b'r',
1342 b'r',
1343 b'rev',
1343 b'rev',
1344 b'',
1344 b'',
1345 _(b'apply the filespec on this revision'),
1345 _(b'apply the filespec on this revision'),
1346 _(b'REV'),
1346 _(b'REV'),
1347 ),
1347 ),
1348 (
1348 (
1349 b'',
1349 b'',
1350 b'all-files',
1350 b'all-files',
1351 False,
1351 False,
1352 _(b'test files from all revisions and working directory'),
1352 _(b'test files from all revisions and working directory'),
1353 ),
1353 ),
1354 (
1354 (
1355 b's',
1355 b's',
1356 b'show-matcher',
1356 b'show-matcher',
1357 None,
1357 None,
1358 _(b'print internal representation of matcher'),
1358 _(b'print internal representation of matcher'),
1359 ),
1359 ),
1360 (
1360 (
1361 b'p',
1361 b'p',
1362 b'show-stage',
1362 b'show-stage',
1363 [],
1363 [],
1364 _(b'print parsed tree at the given stage'),
1364 _(b'print parsed tree at the given stage'),
1365 _(b'NAME'),
1365 _(b'NAME'),
1366 ),
1366 ),
1367 ],
1367 ],
1368 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1368 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1369 )
1369 )
1370 def debugfileset(ui, repo, expr, **opts):
1370 def debugfileset(ui, repo, expr, **opts):
1371 '''parse and apply a fileset specification'''
1371 '''parse and apply a fileset specification'''
1372 from . import fileset
1372 from . import fileset
1373
1373
1374 fileset.symbols # force import of fileset so we have predicates to optimize
1374 fileset.symbols # force import of fileset so we have predicates to optimize
1375 opts = pycompat.byteskwargs(opts)
1375 opts = pycompat.byteskwargs(opts)
1376 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1376 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1377
1377
1378 stages = [
1378 stages = [
1379 (b'parsed', pycompat.identity),
1379 (b'parsed', pycompat.identity),
1380 (b'analyzed', filesetlang.analyze),
1380 (b'analyzed', filesetlang.analyze),
1381 (b'optimized', filesetlang.optimize),
1381 (b'optimized', filesetlang.optimize),
1382 ]
1382 ]
1383 stagenames = {n for n, f in stages}
1383 stagenames = {n for n, f in stages}
1384
1384
1385 showalways = set()
1385 showalways = set()
1386 if ui.verbose and not opts[b'show_stage']:
1386 if ui.verbose and not opts[b'show_stage']:
1387 # show parsed tree by --verbose (deprecated)
1387 # show parsed tree by --verbose (deprecated)
1388 showalways.add(b'parsed')
1388 showalways.add(b'parsed')
1389 if opts[b'show_stage'] == [b'all']:
1389 if opts[b'show_stage'] == [b'all']:
1390 showalways.update(stagenames)
1390 showalways.update(stagenames)
1391 else:
1391 else:
1392 for n in opts[b'show_stage']:
1392 for n in opts[b'show_stage']:
1393 if n not in stagenames:
1393 if n not in stagenames:
1394 raise error.Abort(_(b'invalid stage name: %s') % n)
1394 raise error.Abort(_(b'invalid stage name: %s') % n)
1395 showalways.update(opts[b'show_stage'])
1395 showalways.update(opts[b'show_stage'])
1396
1396
1397 tree = filesetlang.parse(expr)
1397 tree = filesetlang.parse(expr)
1398 for n, f in stages:
1398 for n, f in stages:
1399 tree = f(tree)
1399 tree = f(tree)
1400 if n in showalways:
1400 if n in showalways:
1401 if opts[b'show_stage'] or n != b'parsed':
1401 if opts[b'show_stage'] or n != b'parsed':
1402 ui.write(b"* %s:\n" % n)
1402 ui.write(b"* %s:\n" % n)
1403 ui.write(filesetlang.prettyformat(tree), b"\n")
1403 ui.write(filesetlang.prettyformat(tree), b"\n")
1404
1404
1405 files = set()
1405 files = set()
1406 if opts[b'all_files']:
1406 if opts[b'all_files']:
1407 for r in repo:
1407 for r in repo:
1408 c = repo[r]
1408 c = repo[r]
1409 files.update(c.files())
1409 files.update(c.files())
1410 files.update(c.substate)
1410 files.update(c.substate)
1411 if opts[b'all_files'] or ctx.rev() is None:
1411 if opts[b'all_files'] or ctx.rev() is None:
1412 wctx = repo[None]
1412 wctx = repo[None]
1413 files.update(
1413 files.update(
1414 repo.dirstate.walk(
1414 repo.dirstate.walk(
1415 scmutil.matchall(repo),
1415 scmutil.matchall(repo),
1416 subrepos=list(wctx.substate),
1416 subrepos=list(wctx.substate),
1417 unknown=True,
1417 unknown=True,
1418 ignored=True,
1418 ignored=True,
1419 )
1419 )
1420 )
1420 )
1421 files.update(wctx.substate)
1421 files.update(wctx.substate)
1422 else:
1422 else:
1423 files.update(ctx.files())
1423 files.update(ctx.files())
1424 files.update(ctx.substate)
1424 files.update(ctx.substate)
1425
1425
1426 m = ctx.matchfileset(repo.getcwd(), expr)
1426 m = ctx.matchfileset(repo.getcwd(), expr)
1427 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1427 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1428 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1428 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1429 for f in sorted(files):
1429 for f in sorted(files):
1430 if not m(f):
1430 if not m(f):
1431 continue
1431 continue
1432 ui.write(b"%s\n" % f)
1432 ui.write(b"%s\n" % f)
1433
1433
1434
1434
1435 @command(
1435 @command(
1436 b"debug-repair-issue6528",
1436 b"debug-repair-issue6528",
1437 [
1437 [
1438 (
1438 (
1439 b'',
1439 b'',
1440 b'to-report',
1440 b'to-report',
1441 b'',
1441 b'',
1442 _(b'build a report of affected revisions to this file'),
1442 _(b'build a report of affected revisions to this file'),
1443 _(b'FILE'),
1443 _(b'FILE'),
1444 ),
1444 ),
1445 (
1445 (
1446 b'',
1446 b'',
1447 b'from-report',
1447 b'from-report',
1448 b'',
1448 b'',
1449 _(b'repair revisions listed in this report file'),
1449 _(b'repair revisions listed in this report file'),
1450 _(b'FILE'),
1450 _(b'FILE'),
1451 ),
1451 ),
1452 (
1452 (
1453 b'',
1453 b'',
1454 b'paranoid',
1454 b'paranoid',
1455 False,
1455 False,
1456 _(b'check that both detection methods do the same thing'),
1456 _(b'check that both detection methods do the same thing'),
1457 ),
1457 ),
1458 ]
1458 ]
1459 + cmdutil.dryrunopts,
1459 + cmdutil.dryrunopts,
1460 )
1460 )
1461 def debug_repair_issue6528(ui, repo, **opts):
1461 def debug_repair_issue6528(ui, repo, **opts):
1462 """find affected revisions and repair them. See issue6528 for more details.
1462 """find affected revisions and repair them. See issue6528 for more details.
1463
1463
1464 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1464 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1465 computation of affected revisions for a given repository across clones.
1465 computation of affected revisions for a given repository across clones.
1466 The report format is line-based (with empty lines ignored):
1466 The report format is line-based (with empty lines ignored):
1467
1467
1468 ```
1468 ```
1469 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1469 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1470 ```
1470 ```
1471
1471
1472 There can be multiple broken revisions per filelog, they are separated by
1472 There can be multiple broken revisions per filelog, they are separated by
1473 a comma with no spaces. The only space is between the revision(s) and the
1473 a comma with no spaces. The only space is between the revision(s) and the
1474 filename.
1474 filename.
1475
1475
1476 Note that this does *not* mean that this repairs future affected revisions,
1476 Note that this does *not* mean that this repairs future affected revisions,
1477 that needs a separate fix at the exchange level that hasn't been written yet
1477 that needs a separate fix at the exchange level that hasn't been written yet
1478 (as of 5.9rc0).
1478 (as of 5.9rc0).
1479
1479
1480 There is a `--paranoid` flag to test that the fast implementation is correct
1480 There is a `--paranoid` flag to test that the fast implementation is correct
1481 by checking it against the slow implementation. Since this matter is quite
1481 by checking it against the slow implementation. Since this matter is quite
1482 urgent and testing every edge-case is probably quite costly, we use this
1482 urgent and testing every edge-case is probably quite costly, we use this
1483 method to test on large repositories as a fuzzing method of sorts.
1483 method to test on large repositories as a fuzzing method of sorts.
1484 """
1484 """
1485 cmdutil.check_incompatible_arguments(
1485 cmdutil.check_incompatible_arguments(
1486 opts, 'to_report', ['from_report', 'dry_run']
1486 opts, 'to_report', ['from_report', 'dry_run']
1487 )
1487 )
1488 dry_run = opts.get('dry_run')
1488 dry_run = opts.get('dry_run')
1489 to_report = opts.get('to_report')
1489 to_report = opts.get('to_report')
1490 from_report = opts.get('from_report')
1490 from_report = opts.get('from_report')
1491 paranoid = opts.get('paranoid')
1491 paranoid = opts.get('paranoid')
1492 # TODO maybe add filelog pattern and revision pattern parameters to help
1492 # TODO maybe add filelog pattern and revision pattern parameters to help
1493 # narrow down the search for users that know what they're looking for?
1493 # narrow down the search for users that know what they're looking for?
1494
1494
1495 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1495 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1496 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1496 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1497 raise error.Abort(_(msg))
1497 raise error.Abort(_(msg))
1498
1498
1499 rewrite.repair_issue6528(
1499 rewrite.repair_issue6528(
1500 ui,
1500 ui,
1501 repo,
1501 repo,
1502 dry_run=dry_run,
1502 dry_run=dry_run,
1503 to_report=to_report,
1503 to_report=to_report,
1504 from_report=from_report,
1504 from_report=from_report,
1505 paranoid=paranoid,
1505 paranoid=paranoid,
1506 )
1506 )
1507
1507
1508
1508
1509 @command(b'debugformat', [] + cmdutil.formatteropts)
1509 @command(b'debugformat', [] + cmdutil.formatteropts)
1510 def debugformat(ui, repo, **opts):
1510 def debugformat(ui, repo, **opts):
1511 """display format information about the current repository
1511 """display format information about the current repository
1512
1512
1513 Use --verbose to get extra information about current config value and
1513 Use --verbose to get extra information about current config value and
1514 Mercurial default."""
1514 Mercurial default."""
1515 opts = pycompat.byteskwargs(opts)
1515 opts = pycompat.byteskwargs(opts)
1516 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1516 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1517 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1517 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1518
1518
1519 def makeformatname(name):
1519 def makeformatname(name):
1520 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1520 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1521
1521
1522 fm = ui.formatter(b'debugformat', opts)
1522 fm = ui.formatter(b'debugformat', opts)
1523 if fm.isplain():
1523 if fm.isplain():
1524
1524
1525 def formatvalue(value):
1525 def formatvalue(value):
1526 if util.safehasattr(value, b'startswith'):
1526 if util.safehasattr(value, b'startswith'):
1527 return value
1527 return value
1528 if value:
1528 if value:
1529 return b'yes'
1529 return b'yes'
1530 else:
1530 else:
1531 return b'no'
1531 return b'no'
1532
1532
1533 else:
1533 else:
1534 formatvalue = pycompat.identity
1534 formatvalue = pycompat.identity
1535
1535
1536 fm.plain(b'format-variant')
1536 fm.plain(b'format-variant')
1537 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1537 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1538 fm.plain(b' repo')
1538 fm.plain(b' repo')
1539 if ui.verbose:
1539 if ui.verbose:
1540 fm.plain(b' config default')
1540 fm.plain(b' config default')
1541 fm.plain(b'\n')
1541 fm.plain(b'\n')
1542 for fv in upgrade.allformatvariant:
1542 for fv in upgrade.allformatvariant:
1543 fm.startitem()
1543 fm.startitem()
1544 repovalue = fv.fromrepo(repo)
1544 repovalue = fv.fromrepo(repo)
1545 configvalue = fv.fromconfig(repo)
1545 configvalue = fv.fromconfig(repo)
1546
1546
1547 if repovalue != configvalue:
1547 if repovalue != configvalue:
1548 namelabel = b'formatvariant.name.mismatchconfig'
1548 namelabel = b'formatvariant.name.mismatchconfig'
1549 repolabel = b'formatvariant.repo.mismatchconfig'
1549 repolabel = b'formatvariant.repo.mismatchconfig'
1550 elif repovalue != fv.default:
1550 elif repovalue != fv.default:
1551 namelabel = b'formatvariant.name.mismatchdefault'
1551 namelabel = b'formatvariant.name.mismatchdefault'
1552 repolabel = b'formatvariant.repo.mismatchdefault'
1552 repolabel = b'formatvariant.repo.mismatchdefault'
1553 else:
1553 else:
1554 namelabel = b'formatvariant.name.uptodate'
1554 namelabel = b'formatvariant.name.uptodate'
1555 repolabel = b'formatvariant.repo.uptodate'
1555 repolabel = b'formatvariant.repo.uptodate'
1556
1556
1557 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1557 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1558 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1558 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1559 if fv.default != configvalue:
1559 if fv.default != configvalue:
1560 configlabel = b'formatvariant.config.special'
1560 configlabel = b'formatvariant.config.special'
1561 else:
1561 else:
1562 configlabel = b'formatvariant.config.default'
1562 configlabel = b'formatvariant.config.default'
1563 fm.condwrite(
1563 fm.condwrite(
1564 ui.verbose,
1564 ui.verbose,
1565 b'config',
1565 b'config',
1566 b' %6s',
1566 b' %6s',
1567 formatvalue(configvalue),
1567 formatvalue(configvalue),
1568 label=configlabel,
1568 label=configlabel,
1569 )
1569 )
1570 fm.condwrite(
1570 fm.condwrite(
1571 ui.verbose,
1571 ui.verbose,
1572 b'default',
1572 b'default',
1573 b' %7s',
1573 b' %7s',
1574 formatvalue(fv.default),
1574 formatvalue(fv.default),
1575 label=b'formatvariant.default',
1575 label=b'formatvariant.default',
1576 )
1576 )
1577 fm.plain(b'\n')
1577 fm.plain(b'\n')
1578 fm.end()
1578 fm.end()
1579
1579
1580
1580
1581 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1581 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1582 def debugfsinfo(ui, path=b"."):
1582 def debugfsinfo(ui, path=b"."):
1583 """show information detected about current filesystem"""
1583 """show information detected about current filesystem"""
1584 ui.writenoi18n(b'path: %s\n' % path)
1584 ui.writenoi18n(b'path: %s\n' % path)
1585 ui.writenoi18n(
1585 ui.writenoi18n(
1586 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1586 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1587 )
1587 )
1588 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1588 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1589 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1589 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1590 ui.writenoi18n(
1590 ui.writenoi18n(
1591 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1591 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1592 )
1592 )
1593 ui.writenoi18n(
1593 ui.writenoi18n(
1594 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1594 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1595 )
1595 )
1596 casesensitive = b'(unknown)'
1596 casesensitive = b'(unknown)'
1597 try:
1597 try:
1598 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1598 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1599 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1599 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1600 except OSError:
1600 except OSError:
1601 pass
1601 pass
1602 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1602 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1603
1603
1604
1604
1605 @command(
1605 @command(
1606 b'debuggetbundle',
1606 b'debuggetbundle',
1607 [
1607 [
1608 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1608 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1609 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1609 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1610 (
1610 (
1611 b't',
1611 b't',
1612 b'type',
1612 b'type',
1613 b'bzip2',
1613 b'bzip2',
1614 _(b'bundle compression type to use'),
1614 _(b'bundle compression type to use'),
1615 _(b'TYPE'),
1615 _(b'TYPE'),
1616 ),
1616 ),
1617 ],
1617 ],
1618 _(b'REPO FILE [-H|-C ID]...'),
1618 _(b'REPO FILE [-H|-C ID]...'),
1619 norepo=True,
1619 norepo=True,
1620 )
1620 )
1621 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1621 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1622 """retrieves a bundle from a repo
1622 """retrieves a bundle from a repo
1623
1623
1624 Every ID must be a full-length hex node id string. Saves the bundle to the
1624 Every ID must be a full-length hex node id string. Saves the bundle to the
1625 given file.
1625 given file.
1626 """
1626 """
1627 opts = pycompat.byteskwargs(opts)
1627 opts = pycompat.byteskwargs(opts)
1628 repo = hg.peer(ui, opts, repopath)
1628 repo = hg.peer(ui, opts, repopath)
1629 if not repo.capable(b'getbundle'):
1629 if not repo.capable(b'getbundle'):
1630 raise error.Abort(b"getbundle() not supported by target repository")
1630 raise error.Abort(b"getbundle() not supported by target repository")
1631 args = {}
1631 args = {}
1632 if common:
1632 if common:
1633 args['common'] = [bin(s) for s in common]
1633 args['common'] = [bin(s) for s in common]
1634 if head:
1634 if head:
1635 args['heads'] = [bin(s) for s in head]
1635 args['heads'] = [bin(s) for s in head]
1636 # TODO: get desired bundlecaps from command line.
1636 # TODO: get desired bundlecaps from command line.
1637 args['bundlecaps'] = None
1637 args['bundlecaps'] = None
1638 bundle = repo.getbundle(b'debug', **args)
1638 bundle = repo.getbundle(b'debug', **args)
1639
1639
1640 bundletype = opts.get(b'type', b'bzip2').lower()
1640 bundletype = opts.get(b'type', b'bzip2').lower()
1641 btypes = {
1641 btypes = {
1642 b'none': b'HG10UN',
1642 b'none': b'HG10UN',
1643 b'bzip2': b'HG10BZ',
1643 b'bzip2': b'HG10BZ',
1644 b'gzip': b'HG10GZ',
1644 b'gzip': b'HG10GZ',
1645 b'bundle2': b'HG20',
1645 b'bundle2': b'HG20',
1646 }
1646 }
1647 bundletype = btypes.get(bundletype)
1647 bundletype = btypes.get(bundletype)
1648 if bundletype not in bundle2.bundletypes:
1648 if bundletype not in bundle2.bundletypes:
1649 raise error.Abort(_(b'unknown bundle type specified with --type'))
1649 raise error.Abort(_(b'unknown bundle type specified with --type'))
1650 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1650 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1651
1651
1652
1652
1653 @command(b'debugignore', [], b'[FILE]')
1653 @command(b'debugignore', [], b'[FILE]')
1654 def debugignore(ui, repo, *files, **opts):
1654 def debugignore(ui, repo, *files, **opts):
1655 """display the combined ignore pattern and information about ignored files
1655 """display the combined ignore pattern and information about ignored files
1656
1656
1657 With no argument display the combined ignore pattern.
1657 With no argument display the combined ignore pattern.
1658
1658
1659 Given space separated file names, shows if the given file is ignored and
1659 Given space separated file names, shows if the given file is ignored and
1660 if so, show the ignore rule (file and line number) that matched it.
1660 if so, show the ignore rule (file and line number) that matched it.
1661 """
1661 """
1662 ignore = repo.dirstate._ignore
1662 ignore = repo.dirstate._ignore
1663 if not files:
1663 if not files:
1664 # Show all the patterns
1664 # Show all the patterns
1665 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1665 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1666 else:
1666 else:
1667 m = scmutil.match(repo[None], pats=files)
1667 m = scmutil.match(repo[None], pats=files)
1668 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1668 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1669 for f in m.files():
1669 for f in m.files():
1670 nf = util.normpath(f)
1670 nf = util.normpath(f)
1671 ignored = None
1671 ignored = None
1672 ignoredata = None
1672 ignoredata = None
1673 if nf != b'.':
1673 if nf != b'.':
1674 if ignore(nf):
1674 if ignore(nf):
1675 ignored = nf
1675 ignored = nf
1676 ignoredata = repo.dirstate._ignorefileandline(nf)
1676 ignoredata = repo.dirstate._ignorefileandline(nf)
1677 else:
1677 else:
1678 for p in pathutil.finddirs(nf):
1678 for p in pathutil.finddirs(nf):
1679 if ignore(p):
1679 if ignore(p):
1680 ignored = p
1680 ignored = p
1681 ignoredata = repo.dirstate._ignorefileandline(p)
1681 ignoredata = repo.dirstate._ignorefileandline(p)
1682 break
1682 break
1683 if ignored:
1683 if ignored:
1684 if ignored == nf:
1684 if ignored == nf:
1685 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1685 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1686 else:
1686 else:
1687 ui.write(
1687 ui.write(
1688 _(
1688 _(
1689 b"%s is ignored because of "
1689 b"%s is ignored because of "
1690 b"containing directory %s\n"
1690 b"containing directory %s\n"
1691 )
1691 )
1692 % (uipathfn(f), ignored)
1692 % (uipathfn(f), ignored)
1693 )
1693 )
1694 ignorefile, lineno, line = ignoredata
1694 ignorefile, lineno, line = ignoredata
1695 ui.write(
1695 ui.write(
1696 _(b"(ignore rule in %s, line %d: '%s')\n")
1696 _(b"(ignore rule in %s, line %d: '%s')\n")
1697 % (ignorefile, lineno, line)
1697 % (ignorefile, lineno, line)
1698 )
1698 )
1699 else:
1699 else:
1700 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1700 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1701
1701
1702
1702
1703 @command(
1703 @command(
1704 b'debugindex',
1704 b'debugindex',
1705 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1705 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1706 _(b'-c|-m|FILE'),
1706 _(b'-c|-m|FILE'),
1707 )
1707 )
1708 def debugindex(ui, repo, file_=None, **opts):
1708 def debugindex(ui, repo, file_=None, **opts):
1709 """dump index data for a storage primitive"""
1709 """dump index data for a storage primitive"""
1710 opts = pycompat.byteskwargs(opts)
1710 opts = pycompat.byteskwargs(opts)
1711 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1711 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1712
1712
1713 if ui.debugflag:
1713 if ui.debugflag:
1714 shortfn = hex
1714 shortfn = hex
1715 else:
1715 else:
1716 shortfn = short
1716 shortfn = short
1717
1717
1718 idlen = 12
1718 idlen = 12
1719 for i in store:
1719 for i in store:
1720 idlen = len(shortfn(store.node(i)))
1720 idlen = len(shortfn(store.node(i)))
1721 break
1721 break
1722
1722
1723 fm = ui.formatter(b'debugindex', opts)
1723 fm = ui.formatter(b'debugindex', opts)
1724 fm.plain(
1724 fm.plain(
1725 b' rev linkrev %s %s p2\n'
1725 b' rev linkrev %s %s p2\n'
1726 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1726 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1727 )
1727 )
1728
1728
1729 for rev in store:
1729 for rev in store:
1730 node = store.node(rev)
1730 node = store.node(rev)
1731 parents = store.parents(node)
1731 parents = store.parents(node)
1732
1732
1733 fm.startitem()
1733 fm.startitem()
1734 fm.write(b'rev', b'%6d ', rev)
1734 fm.write(b'rev', b'%6d ', rev)
1735 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1735 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1736 fm.write(b'node', b'%s ', shortfn(node))
1736 fm.write(b'node', b'%s ', shortfn(node))
1737 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1737 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1738 fm.write(b'p2', b'%s', shortfn(parents[1]))
1738 fm.write(b'p2', b'%s', shortfn(parents[1]))
1739 fm.plain(b'\n')
1739 fm.plain(b'\n')
1740
1740
1741 fm.end()
1741 fm.end()
1742
1742
1743
1743
1744 @command(
1744 @command(
1745 b'debugindexdot',
1745 b'debugindexdot',
1746 cmdutil.debugrevlogopts,
1746 cmdutil.debugrevlogopts,
1747 _(b'-c|-m|FILE'),
1747 _(b'-c|-m|FILE'),
1748 optionalrepo=True,
1748 optionalrepo=True,
1749 )
1749 )
1750 def debugindexdot(ui, repo, file_=None, **opts):
1750 def debugindexdot(ui, repo, file_=None, **opts):
1751 """dump an index DAG as a graphviz dot file"""
1751 """dump an index DAG as a graphviz dot file"""
1752 opts = pycompat.byteskwargs(opts)
1752 opts = pycompat.byteskwargs(opts)
1753 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1753 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1754 ui.writenoi18n(b"digraph G {\n")
1754 ui.writenoi18n(b"digraph G {\n")
1755 for i in r:
1755 for i in r:
1756 node = r.node(i)
1756 node = r.node(i)
1757 pp = r.parents(node)
1757 pp = r.parents(node)
1758 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1758 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1759 if pp[1] != repo.nullid:
1759 if pp[1] != repo.nullid:
1760 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1760 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1761 ui.write(b"}\n")
1761 ui.write(b"}\n")
1762
1762
1763
1763
1764 @command(b'debugindexstats', [])
1764 @command(b'debugindexstats', [])
1765 def debugindexstats(ui, repo):
1765 def debugindexstats(ui, repo):
1766 """show stats related to the changelog index"""
1766 """show stats related to the changelog index"""
1767 repo.changelog.shortest(repo.nullid, 1)
1767 repo.changelog.shortest(repo.nullid, 1)
1768 index = repo.changelog.index
1768 index = repo.changelog.index
1769 if not util.safehasattr(index, b'stats'):
1769 if not util.safehasattr(index, b'stats'):
1770 raise error.Abort(_(b'debugindexstats only works with native code'))
1770 raise error.Abort(_(b'debugindexstats only works with native code'))
1771 for k, v in sorted(index.stats().items()):
1771 for k, v in sorted(index.stats().items()):
1772 ui.write(b'%s: %d\n' % (k, v))
1772 ui.write(b'%s: %d\n' % (k, v))
1773
1773
1774
1774
1775 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1775 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1776 def debuginstall(ui, **opts):
1776 def debuginstall(ui, **opts):
1777 """test Mercurial installation
1777 """test Mercurial installation
1778
1778
1779 Returns 0 on success.
1779 Returns 0 on success.
1780 """
1780 """
1781 opts = pycompat.byteskwargs(opts)
1781 opts = pycompat.byteskwargs(opts)
1782
1782
1783 problems = 0
1783 problems = 0
1784
1784
1785 fm = ui.formatter(b'debuginstall', opts)
1785 fm = ui.formatter(b'debuginstall', opts)
1786 fm.startitem()
1786 fm.startitem()
1787
1787
1788 # encoding might be unknown or wrong. don't translate these messages.
1788 # encoding might be unknown or wrong. don't translate these messages.
1789 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1789 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1790 err = None
1790 err = None
1791 try:
1791 try:
1792 codecs.lookup(pycompat.sysstr(encoding.encoding))
1792 codecs.lookup(pycompat.sysstr(encoding.encoding))
1793 except LookupError as inst:
1793 except LookupError as inst:
1794 err = stringutil.forcebytestr(inst)
1794 err = stringutil.forcebytestr(inst)
1795 problems += 1
1795 problems += 1
1796 fm.condwrite(
1796 fm.condwrite(
1797 err,
1797 err,
1798 b'encodingerror',
1798 b'encodingerror',
1799 b" %s\n (check that your locale is properly set)\n",
1799 b" %s\n (check that your locale is properly set)\n",
1800 err,
1800 err,
1801 )
1801 )
1802
1802
1803 # Python
1803 # Python
1804 pythonlib = None
1804 pythonlib = None
1805 if util.safehasattr(os, '__file__'):
1805 if util.safehasattr(os, '__file__'):
1806 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1806 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1807 elif getattr(sys, 'oxidized', False):
1807 elif getattr(sys, 'oxidized', False):
1808 pythonlib = pycompat.sysexecutable
1808 pythonlib = pycompat.sysexecutable
1809
1809
1810 fm.write(
1810 fm.write(
1811 b'pythonexe',
1811 b'pythonexe',
1812 _(b"checking Python executable (%s)\n"),
1812 _(b"checking Python executable (%s)\n"),
1813 pycompat.sysexecutable or _(b"unknown"),
1813 pycompat.sysexecutable or _(b"unknown"),
1814 )
1814 )
1815 fm.write(
1815 fm.write(
1816 b'pythonimplementation',
1816 b'pythonimplementation',
1817 _(b"checking Python implementation (%s)\n"),
1817 _(b"checking Python implementation (%s)\n"),
1818 pycompat.sysbytes(platform.python_implementation()),
1818 pycompat.sysbytes(platform.python_implementation()),
1819 )
1819 )
1820 fm.write(
1820 fm.write(
1821 b'pythonver',
1821 b'pythonver',
1822 _(b"checking Python version (%s)\n"),
1822 _(b"checking Python version (%s)\n"),
1823 (b"%d.%d.%d" % sys.version_info[:3]),
1823 (b"%d.%d.%d" % sys.version_info[:3]),
1824 )
1824 )
1825 fm.write(
1825 fm.write(
1826 b'pythonlib',
1826 b'pythonlib',
1827 _(b"checking Python lib (%s)...\n"),
1827 _(b"checking Python lib (%s)...\n"),
1828 pythonlib or _(b"unknown"),
1828 pythonlib or _(b"unknown"),
1829 )
1829 )
1830
1830
1831 try:
1831 try:
1832 from . import rustext # pytype: disable=import-error
1832 from . import rustext # pytype: disable=import-error
1833
1833
1834 rustext.__doc__ # trigger lazy import
1834 rustext.__doc__ # trigger lazy import
1835 except ImportError:
1835 except ImportError:
1836 rustext = None
1836 rustext = None
1837
1837
1838 security = set(sslutil.supportedprotocols)
1838 security = set(sslutil.supportedprotocols)
1839 if sslutil.hassni:
1839 if sslutil.hassni:
1840 security.add(b'sni')
1840 security.add(b'sni')
1841
1841
1842 fm.write(
1842 fm.write(
1843 b'pythonsecurity',
1843 b'pythonsecurity',
1844 _(b"checking Python security support (%s)\n"),
1844 _(b"checking Python security support (%s)\n"),
1845 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1845 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1846 )
1846 )
1847
1847
1848 # These are warnings, not errors. So don't increment problem count. This
1848 # These are warnings, not errors. So don't increment problem count. This
1849 # may change in the future.
1849 # may change in the future.
1850 if b'tls1.2' not in security:
1850 if b'tls1.2' not in security:
1851 fm.plain(
1851 fm.plain(
1852 _(
1852 _(
1853 b' TLS 1.2 not supported by Python install; '
1853 b' TLS 1.2 not supported by Python install; '
1854 b'network connections lack modern security\n'
1854 b'network connections lack modern security\n'
1855 )
1855 )
1856 )
1856 )
1857 if b'sni' not in security:
1857 if b'sni' not in security:
1858 fm.plain(
1858 fm.plain(
1859 _(
1859 _(
1860 b' SNI not supported by Python install; may have '
1860 b' SNI not supported by Python install; may have '
1861 b'connectivity issues with some servers\n'
1861 b'connectivity issues with some servers\n'
1862 )
1862 )
1863 )
1863 )
1864
1864
1865 fm.plain(
1865 fm.plain(
1866 _(
1866 _(
1867 b"checking Rust extensions (%s)\n"
1867 b"checking Rust extensions (%s)\n"
1868 % (b'missing' if rustext is None else b'installed')
1868 % (b'missing' if rustext is None else b'installed')
1869 ),
1869 ),
1870 )
1870 )
1871
1871
1872 # TODO print CA cert info
1872 # TODO print CA cert info
1873
1873
1874 # hg version
1874 # hg version
1875 hgver = util.version()
1875 hgver = util.version()
1876 fm.write(
1876 fm.write(
1877 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1877 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1878 )
1878 )
1879 fm.write(
1879 fm.write(
1880 b'hgverextra',
1880 b'hgverextra',
1881 _(b"checking Mercurial custom build (%s)\n"),
1881 _(b"checking Mercurial custom build (%s)\n"),
1882 b'+'.join(hgver.split(b'+')[1:]),
1882 b'+'.join(hgver.split(b'+')[1:]),
1883 )
1883 )
1884
1884
1885 # compiled modules
1885 # compiled modules
1886 hgmodules = None
1886 hgmodules = None
1887 if util.safehasattr(sys.modules[__name__], '__file__'):
1887 if util.safehasattr(sys.modules[__name__], '__file__'):
1888 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1888 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1889 elif getattr(sys, 'oxidized', False):
1889 elif getattr(sys, 'oxidized', False):
1890 hgmodules = pycompat.sysexecutable
1890 hgmodules = pycompat.sysexecutable
1891
1891
1892 fm.write(
1892 fm.write(
1893 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1893 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1894 )
1894 )
1895 fm.write(
1895 fm.write(
1896 b'hgmodules',
1896 b'hgmodules',
1897 _(b"checking installed modules (%s)...\n"),
1897 _(b"checking installed modules (%s)...\n"),
1898 hgmodules or _(b"unknown"),
1898 hgmodules or _(b"unknown"),
1899 )
1899 )
1900
1900
1901 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1901 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1902 rustext = rustandc # for now, that's the only case
1902 rustext = rustandc # for now, that's the only case
1903 cext = policy.policy in (b'c', b'allow') or rustandc
1903 cext = policy.policy in (b'c', b'allow') or rustandc
1904 nopure = cext or rustext
1904 nopure = cext or rustext
1905 if nopure:
1905 if nopure:
1906 err = None
1906 err = None
1907 try:
1907 try:
1908 if cext:
1908 if cext:
1909 from .cext import ( # pytype: disable=import-error
1909 from .cext import ( # pytype: disable=import-error
1910 base85,
1910 base85,
1911 bdiff,
1911 bdiff,
1912 mpatch,
1912 mpatch,
1913 osutil,
1913 osutil,
1914 )
1914 )
1915
1915
1916 # quiet pyflakes
1916 # quiet pyflakes
1917 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1917 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1918 if rustext:
1918 if rustext:
1919 from .rustext import ( # pytype: disable=import-error
1919 from .rustext import ( # pytype: disable=import-error
1920 ancestor,
1920 ancestor,
1921 dirstate,
1921 dirstate,
1922 )
1922 )
1923
1923
1924 dir(ancestor), dir(dirstate) # quiet pyflakes
1924 dir(ancestor), dir(dirstate) # quiet pyflakes
1925 except Exception as inst:
1925 except Exception as inst:
1926 err = stringutil.forcebytestr(inst)
1926 err = stringutil.forcebytestr(inst)
1927 problems += 1
1927 problems += 1
1928 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1928 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1929
1929
1930 compengines = util.compengines._engines.values()
1930 compengines = util.compengines._engines.values()
1931 fm.write(
1931 fm.write(
1932 b'compengines',
1932 b'compengines',
1933 _(b'checking registered compression engines (%s)\n'),
1933 _(b'checking registered compression engines (%s)\n'),
1934 fm.formatlist(
1934 fm.formatlist(
1935 sorted(e.name() for e in compengines),
1935 sorted(e.name() for e in compengines),
1936 name=b'compengine',
1936 name=b'compengine',
1937 fmt=b'%s',
1937 fmt=b'%s',
1938 sep=b', ',
1938 sep=b', ',
1939 ),
1939 ),
1940 )
1940 )
1941 fm.write(
1941 fm.write(
1942 b'compenginesavail',
1942 b'compenginesavail',
1943 _(b'checking available compression engines (%s)\n'),
1943 _(b'checking available compression engines (%s)\n'),
1944 fm.formatlist(
1944 fm.formatlist(
1945 sorted(e.name() for e in compengines if e.available()),
1945 sorted(e.name() for e in compengines if e.available()),
1946 name=b'compengine',
1946 name=b'compengine',
1947 fmt=b'%s',
1947 fmt=b'%s',
1948 sep=b', ',
1948 sep=b', ',
1949 ),
1949 ),
1950 )
1950 )
1951 wirecompengines = compression.compengines.supportedwireengines(
1951 wirecompengines = compression.compengines.supportedwireengines(
1952 compression.SERVERROLE
1952 compression.SERVERROLE
1953 )
1953 )
1954 fm.write(
1954 fm.write(
1955 b'compenginesserver',
1955 b'compenginesserver',
1956 _(
1956 _(
1957 b'checking available compression engines '
1957 b'checking available compression engines '
1958 b'for wire protocol (%s)\n'
1958 b'for wire protocol (%s)\n'
1959 ),
1959 ),
1960 fm.formatlist(
1960 fm.formatlist(
1961 [e.name() for e in wirecompengines if e.wireprotosupport()],
1961 [e.name() for e in wirecompengines if e.wireprotosupport()],
1962 name=b'compengine',
1962 name=b'compengine',
1963 fmt=b'%s',
1963 fmt=b'%s',
1964 sep=b', ',
1964 sep=b', ',
1965 ),
1965 ),
1966 )
1966 )
1967 re2 = b'missing'
1967 re2 = b'missing'
1968 if util._re2:
1968 if util._re2:
1969 re2 = b'available'
1969 re2 = b'available'
1970 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1970 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1971 fm.data(re2=bool(util._re2))
1971 fm.data(re2=bool(util._re2))
1972
1972
1973 # templates
1973 # templates
1974 p = templater.templatedir()
1974 p = templater.templatedir()
1975 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1975 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1976 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1976 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1977 if p:
1977 if p:
1978 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1978 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1979 if m:
1979 if m:
1980 # template found, check if it is working
1980 # template found, check if it is working
1981 err = None
1981 err = None
1982 try:
1982 try:
1983 templater.templater.frommapfile(m)
1983 templater.templater.frommapfile(m)
1984 except Exception as inst:
1984 except Exception as inst:
1985 err = stringutil.forcebytestr(inst)
1985 err = stringutil.forcebytestr(inst)
1986 p = None
1986 p = None
1987 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1987 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1988 else:
1988 else:
1989 p = None
1989 p = None
1990 fm.condwrite(
1990 fm.condwrite(
1991 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1991 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1992 )
1992 )
1993 fm.condwrite(
1993 fm.condwrite(
1994 not m,
1994 not m,
1995 b'defaulttemplatenotfound',
1995 b'defaulttemplatenotfound',
1996 _(b" template '%s' not found\n"),
1996 _(b" template '%s' not found\n"),
1997 b"default",
1997 b"default",
1998 )
1998 )
1999 if not p:
1999 if not p:
2000 problems += 1
2000 problems += 1
2001 fm.condwrite(
2001 fm.condwrite(
2002 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2002 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2003 )
2003 )
2004
2004
2005 # editor
2005 # editor
2006 editor = ui.geteditor()
2006 editor = ui.geteditor()
2007 editor = util.expandpath(editor)
2007 editor = util.expandpath(editor)
2008 editorbin = procutil.shellsplit(editor)[0]
2008 editorbin = procutil.shellsplit(editor)[0]
2009 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2009 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2010 cmdpath = procutil.findexe(editorbin)
2010 cmdpath = procutil.findexe(editorbin)
2011 fm.condwrite(
2011 fm.condwrite(
2012 not cmdpath and editor == b'vi',
2012 not cmdpath and editor == b'vi',
2013 b'vinotfound',
2013 b'vinotfound',
2014 _(
2014 _(
2015 b" No commit editor set and can't find %s in PATH\n"
2015 b" No commit editor set and can't find %s in PATH\n"
2016 b" (specify a commit editor in your configuration"
2016 b" (specify a commit editor in your configuration"
2017 b" file)\n"
2017 b" file)\n"
2018 ),
2018 ),
2019 not cmdpath and editor == b'vi' and editorbin,
2019 not cmdpath and editor == b'vi' and editorbin,
2020 )
2020 )
2021 fm.condwrite(
2021 fm.condwrite(
2022 not cmdpath and editor != b'vi',
2022 not cmdpath and editor != b'vi',
2023 b'editornotfound',
2023 b'editornotfound',
2024 _(
2024 _(
2025 b" Can't find editor '%s' in PATH\n"
2025 b" Can't find editor '%s' in PATH\n"
2026 b" (specify a commit editor in your configuration"
2026 b" (specify a commit editor in your configuration"
2027 b" file)\n"
2027 b" file)\n"
2028 ),
2028 ),
2029 not cmdpath and editorbin,
2029 not cmdpath and editorbin,
2030 )
2030 )
2031 if not cmdpath and editor != b'vi':
2031 if not cmdpath and editor != b'vi':
2032 problems += 1
2032 problems += 1
2033
2033
2034 # check username
2034 # check username
2035 username = None
2035 username = None
2036 err = None
2036 err = None
2037 try:
2037 try:
2038 username = ui.username()
2038 username = ui.username()
2039 except error.Abort as e:
2039 except error.Abort as e:
2040 err = e.message
2040 err = e.message
2041 problems += 1
2041 problems += 1
2042
2042
2043 fm.condwrite(
2043 fm.condwrite(
2044 username, b'username', _(b"checking username (%s)\n"), username
2044 username, b'username', _(b"checking username (%s)\n"), username
2045 )
2045 )
2046 fm.condwrite(
2046 fm.condwrite(
2047 err,
2047 err,
2048 b'usernameerror',
2048 b'usernameerror',
2049 _(
2049 _(
2050 b"checking username...\n %s\n"
2050 b"checking username...\n %s\n"
2051 b" (specify a username in your configuration file)\n"
2051 b" (specify a username in your configuration file)\n"
2052 ),
2052 ),
2053 err,
2053 err,
2054 )
2054 )
2055
2055
2056 for name, mod in extensions.extensions():
2056 for name, mod in extensions.extensions():
2057 handler = getattr(mod, 'debuginstall', None)
2057 handler = getattr(mod, 'debuginstall', None)
2058 if handler is not None:
2058 if handler is not None:
2059 problems += handler(ui, fm)
2059 problems += handler(ui, fm)
2060
2060
2061 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2061 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2062 if not problems:
2062 if not problems:
2063 fm.data(problems=problems)
2063 fm.data(problems=problems)
2064 fm.condwrite(
2064 fm.condwrite(
2065 problems,
2065 problems,
2066 b'problems',
2066 b'problems',
2067 _(b"%d problems detected, please check your install!\n"),
2067 _(b"%d problems detected, please check your install!\n"),
2068 problems,
2068 problems,
2069 )
2069 )
2070 fm.end()
2070 fm.end()
2071
2071
2072 return problems
2072 return problems
2073
2073
2074
2074
2075 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2075 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2076 def debugknown(ui, repopath, *ids, **opts):
2076 def debugknown(ui, repopath, *ids, **opts):
2077 """test whether node ids are known to a repo
2077 """test whether node ids are known to a repo
2078
2078
2079 Every ID must be a full-length hex node id string. Returns a list of 0s
2079 Every ID must be a full-length hex node id string. Returns a list of 0s
2080 and 1s indicating unknown/known.
2080 and 1s indicating unknown/known.
2081 """
2081 """
2082 opts = pycompat.byteskwargs(opts)
2082 opts = pycompat.byteskwargs(opts)
2083 repo = hg.peer(ui, opts, repopath)
2083 repo = hg.peer(ui, opts, repopath)
2084 if not repo.capable(b'known'):
2084 if not repo.capable(b'known'):
2085 raise error.Abort(b"known() not supported by target repository")
2085 raise error.Abort(b"known() not supported by target repository")
2086 flags = repo.known([bin(s) for s in ids])
2086 flags = repo.known([bin(s) for s in ids])
2087 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2087 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2088
2088
2089
2089
2090 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2090 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2091 def debuglabelcomplete(ui, repo, *args):
2091 def debuglabelcomplete(ui, repo, *args):
2092 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2092 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2093 debugnamecomplete(ui, repo, *args)
2093 debugnamecomplete(ui, repo, *args)
2094
2094
2095
2095
2096 @command(
2096 @command(
2097 b'debuglocks',
2097 b'debuglocks',
2098 [
2098 [
2099 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2099 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2100 (
2100 (
2101 b'W',
2101 b'W',
2102 b'force-free-wlock',
2102 b'force-free-wlock',
2103 None,
2103 None,
2104 _(b'free the working state lock (DANGEROUS)'),
2104 _(b'free the working state lock (DANGEROUS)'),
2105 ),
2105 ),
2106 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2106 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2107 (
2107 (
2108 b'S',
2108 b'S',
2109 b'set-wlock',
2109 b'set-wlock',
2110 None,
2110 None,
2111 _(b'set the working state lock until stopped'),
2111 _(b'set the working state lock until stopped'),
2112 ),
2112 ),
2113 ],
2113 ],
2114 _(b'[OPTION]...'),
2114 _(b'[OPTION]...'),
2115 )
2115 )
2116 def debuglocks(ui, repo, **opts):
2116 def debuglocks(ui, repo, **opts):
2117 """show or modify state of locks
2117 """show or modify state of locks
2118
2118
2119 By default, this command will show which locks are held. This
2119 By default, this command will show which locks are held. This
2120 includes the user and process holding the lock, the amount of time
2120 includes the user and process holding the lock, the amount of time
2121 the lock has been held, and the machine name where the process is
2121 the lock has been held, and the machine name where the process is
2122 running if it's not local.
2122 running if it's not local.
2123
2123
2124 Locks protect the integrity of Mercurial's data, so should be
2124 Locks protect the integrity of Mercurial's data, so should be
2125 treated with care. System crashes or other interruptions may cause
2125 treated with care. System crashes or other interruptions may cause
2126 locks to not be properly released, though Mercurial will usually
2126 locks to not be properly released, though Mercurial will usually
2127 detect and remove such stale locks automatically.
2127 detect and remove such stale locks automatically.
2128
2128
2129 However, detecting stale locks may not always be possible (for
2129 However, detecting stale locks may not always be possible (for
2130 instance, on a shared filesystem). Removing locks may also be
2130 instance, on a shared filesystem). Removing locks may also be
2131 blocked by filesystem permissions.
2131 blocked by filesystem permissions.
2132
2132
2133 Setting a lock will prevent other commands from changing the data.
2133 Setting a lock will prevent other commands from changing the data.
2134 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2134 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2135 The set locks are removed when the command exits.
2135 The set locks are removed when the command exits.
2136
2136
2137 Returns 0 if no locks are held.
2137 Returns 0 if no locks are held.
2138
2138
2139 """
2139 """
2140
2140
2141 if opts.get('force_free_lock'):
2141 if opts.get('force_free_lock'):
2142 repo.svfs.unlink(b'lock')
2142 repo.svfs.unlink(b'lock')
2143 if opts.get('force_free_wlock'):
2143 if opts.get('force_free_wlock'):
2144 repo.vfs.unlink(b'wlock')
2144 repo.vfs.unlink(b'wlock')
2145 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2145 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2146 return 0
2146 return 0
2147
2147
2148 locks = []
2148 locks = []
2149 try:
2149 try:
2150 if opts.get('set_wlock'):
2150 if opts.get('set_wlock'):
2151 try:
2151 try:
2152 locks.append(repo.wlock(False))
2152 locks.append(repo.wlock(False))
2153 except error.LockHeld:
2153 except error.LockHeld:
2154 raise error.Abort(_(b'wlock is already held'))
2154 raise error.Abort(_(b'wlock is already held'))
2155 if opts.get('set_lock'):
2155 if opts.get('set_lock'):
2156 try:
2156 try:
2157 locks.append(repo.lock(False))
2157 locks.append(repo.lock(False))
2158 except error.LockHeld:
2158 except error.LockHeld:
2159 raise error.Abort(_(b'lock is already held'))
2159 raise error.Abort(_(b'lock is already held'))
2160 if len(locks):
2160 if len(locks):
2161 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2161 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2162 return 0
2162 return 0
2163 finally:
2163 finally:
2164 release(*locks)
2164 release(*locks)
2165
2165
2166 now = time.time()
2166 now = time.time()
2167 held = 0
2167 held = 0
2168
2168
2169 def report(vfs, name, method):
2169 def report(vfs, name, method):
2170 # this causes stale locks to get reaped for more accurate reporting
2170 # this causes stale locks to get reaped for more accurate reporting
2171 try:
2171 try:
2172 l = method(False)
2172 l = method(False)
2173 except error.LockHeld:
2173 except error.LockHeld:
2174 l = None
2174 l = None
2175
2175
2176 if l:
2176 if l:
2177 l.release()
2177 l.release()
2178 else:
2178 else:
2179 try:
2179 try:
2180 st = vfs.lstat(name)
2180 st = vfs.lstat(name)
2181 age = now - st[stat.ST_MTIME]
2181 age = now - st[stat.ST_MTIME]
2182 user = util.username(st.st_uid)
2182 user = util.username(st.st_uid)
2183 locker = vfs.readlock(name)
2183 locker = vfs.readlock(name)
2184 if b":" in locker:
2184 if b":" in locker:
2185 host, pid = locker.split(b':')
2185 host, pid = locker.split(b':')
2186 if host == socket.gethostname():
2186 if host == socket.gethostname():
2187 locker = b'user %s, process %s' % (user or b'None', pid)
2187 locker = b'user %s, process %s' % (user or b'None', pid)
2188 else:
2188 else:
2189 locker = b'user %s, process %s, host %s' % (
2189 locker = b'user %s, process %s, host %s' % (
2190 user or b'None',
2190 user or b'None',
2191 pid,
2191 pid,
2192 host,
2192 host,
2193 )
2193 )
2194 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2194 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2195 return 1
2195 return 1
2196 except OSError as e:
2196 except OSError as e:
2197 if e.errno != errno.ENOENT:
2197 if e.errno != errno.ENOENT:
2198 raise
2198 raise
2199
2199
2200 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2200 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2201 return 0
2201 return 0
2202
2202
2203 held += report(repo.svfs, b"lock", repo.lock)
2203 held += report(repo.svfs, b"lock", repo.lock)
2204 held += report(repo.vfs, b"wlock", repo.wlock)
2204 held += report(repo.vfs, b"wlock", repo.wlock)
2205
2205
2206 return held
2206 return held
2207
2207
2208
2208
2209 @command(
2209 @command(
2210 b'debugmanifestfulltextcache',
2210 b'debugmanifestfulltextcache',
2211 [
2211 [
2212 (b'', b'clear', False, _(b'clear the cache')),
2212 (b'', b'clear', False, _(b'clear the cache')),
2213 (
2213 (
2214 b'a',
2214 b'a',
2215 b'add',
2215 b'add',
2216 [],
2216 [],
2217 _(b'add the given manifest nodes to the cache'),
2217 _(b'add the given manifest nodes to the cache'),
2218 _(b'NODE'),
2218 _(b'NODE'),
2219 ),
2219 ),
2220 ],
2220 ],
2221 b'',
2221 b'',
2222 )
2222 )
2223 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2223 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2224 """show, clear or amend the contents of the manifest fulltext cache"""
2224 """show, clear or amend the contents of the manifest fulltext cache"""
2225
2225
2226 def getcache():
2226 def getcache():
2227 r = repo.manifestlog.getstorage(b'')
2227 r = repo.manifestlog.getstorage(b'')
2228 try:
2228 try:
2229 return r._fulltextcache
2229 return r._fulltextcache
2230 except AttributeError:
2230 except AttributeError:
2231 msg = _(
2231 msg = _(
2232 b"Current revlog implementation doesn't appear to have a "
2232 b"Current revlog implementation doesn't appear to have a "
2233 b"manifest fulltext cache\n"
2233 b"manifest fulltext cache\n"
2234 )
2234 )
2235 raise error.Abort(msg)
2235 raise error.Abort(msg)
2236
2236
2237 if opts.get('clear'):
2237 if opts.get('clear'):
2238 with repo.wlock():
2238 with repo.wlock():
2239 cache = getcache()
2239 cache = getcache()
2240 cache.clear(clear_persisted_data=True)
2240 cache.clear(clear_persisted_data=True)
2241 return
2241 return
2242
2242
2243 if add:
2243 if add:
2244 with repo.wlock():
2244 with repo.wlock():
2245 m = repo.manifestlog
2245 m = repo.manifestlog
2246 store = m.getstorage(b'')
2246 store = m.getstorage(b'')
2247 for n in add:
2247 for n in add:
2248 try:
2248 try:
2249 manifest = m[store.lookup(n)]
2249 manifest = m[store.lookup(n)]
2250 except error.LookupError as e:
2250 except error.LookupError as e:
2251 raise error.Abort(
2251 raise error.Abort(
2252 bytes(e), hint=b"Check your manifest node id"
2252 bytes(e), hint=b"Check your manifest node id"
2253 )
2253 )
2254 manifest.read() # stores revisision in cache too
2254 manifest.read() # stores revisision in cache too
2255 return
2255 return
2256
2256
2257 cache = getcache()
2257 cache = getcache()
2258 if not len(cache):
2258 if not len(cache):
2259 ui.write(_(b'cache empty\n'))
2259 ui.write(_(b'cache empty\n'))
2260 else:
2260 else:
2261 ui.write(
2261 ui.write(
2262 _(
2262 _(
2263 b'cache contains %d manifest entries, in order of most to '
2263 b'cache contains %d manifest entries, in order of most to '
2264 b'least recent:\n'
2264 b'least recent:\n'
2265 )
2265 )
2266 % (len(cache),)
2266 % (len(cache),)
2267 )
2267 )
2268 totalsize = 0
2268 totalsize = 0
2269 for nodeid in cache:
2269 for nodeid in cache:
2270 # Use cache.get to not update the LRU order
2270 # Use cache.get to not update the LRU order
2271 data = cache.peek(nodeid)
2271 data = cache.peek(nodeid)
2272 size = len(data)
2272 size = len(data)
2273 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2273 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2274 ui.write(
2274 ui.write(
2275 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2275 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2276 )
2276 )
2277 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2277 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2278 ui.write(
2278 ui.write(
2279 _(b'total cache data size %s, on-disk %s\n')
2279 _(b'total cache data size %s, on-disk %s\n')
2280 % (util.bytecount(totalsize), util.bytecount(ondisk))
2280 % (util.bytecount(totalsize), util.bytecount(ondisk))
2281 )
2281 )
2282
2282
2283
2283
2284 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2284 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2285 def debugmergestate(ui, repo, *args, **opts):
2285 def debugmergestate(ui, repo, *args, **opts):
2286 """print merge state
2286 """print merge state
2287
2287
2288 Use --verbose to print out information about whether v1 or v2 merge state
2288 Use --verbose to print out information about whether v1 or v2 merge state
2289 was chosen."""
2289 was chosen."""
2290
2290
2291 if ui.verbose:
2291 if ui.verbose:
2292 ms = mergestatemod.mergestate(repo)
2292 ms = mergestatemod.mergestate(repo)
2293
2293
2294 # sort so that reasonable information is on top
2294 # sort so that reasonable information is on top
2295 v1records = ms._readrecordsv1()
2295 v1records = ms._readrecordsv1()
2296 v2records = ms._readrecordsv2()
2296 v2records = ms._readrecordsv2()
2297
2297
2298 if not v1records and not v2records:
2298 if not v1records and not v2records:
2299 pass
2299 pass
2300 elif not v2records:
2300 elif not v2records:
2301 ui.writenoi18n(b'no version 2 merge state\n')
2301 ui.writenoi18n(b'no version 2 merge state\n')
2302 elif ms._v1v2match(v1records, v2records):
2302 elif ms._v1v2match(v1records, v2records):
2303 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2303 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2304 else:
2304 else:
2305 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2305 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2306
2306
2307 opts = pycompat.byteskwargs(opts)
2307 opts = pycompat.byteskwargs(opts)
2308 if not opts[b'template']:
2308 if not opts[b'template']:
2309 opts[b'template'] = (
2309 opts[b'template'] = (
2310 b'{if(commits, "", "no merge state found\n")}'
2310 b'{if(commits, "", "no merge state found\n")}'
2311 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2311 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2312 b'{files % "file: {path} (state \\"{state}\\")\n'
2312 b'{files % "file: {path} (state \\"{state}\\")\n'
2313 b'{if(local_path, "'
2313 b'{if(local_path, "'
2314 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2314 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2315 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2315 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2316 b' other path: {other_path} (node {other_node})\n'
2316 b' other path: {other_path} (node {other_node})\n'
2317 b'")}'
2317 b'")}'
2318 b'{if(rename_side, "'
2318 b'{if(rename_side, "'
2319 b' rename side: {rename_side}\n'
2319 b' rename side: {rename_side}\n'
2320 b' renamed path: {renamed_path}\n'
2320 b' renamed path: {renamed_path}\n'
2321 b'")}'
2321 b'")}'
2322 b'{extras % " extra: {key} = {value}\n"}'
2322 b'{extras % " extra: {key} = {value}\n"}'
2323 b'"}'
2323 b'"}'
2324 b'{extras % "extra: {file} ({key} = {value})\n"}'
2324 b'{extras % "extra: {file} ({key} = {value})\n"}'
2325 )
2325 )
2326
2326
2327 ms = mergestatemod.mergestate.read(repo)
2327 ms = mergestatemod.mergestate.read(repo)
2328
2328
2329 fm = ui.formatter(b'debugmergestate', opts)
2329 fm = ui.formatter(b'debugmergestate', opts)
2330 fm.startitem()
2330 fm.startitem()
2331
2331
2332 fm_commits = fm.nested(b'commits')
2332 fm_commits = fm.nested(b'commits')
2333 if ms.active():
2333 if ms.active():
2334 for name, node, label_index in (
2334 for name, node, label_index in (
2335 (b'local', ms.local, 0),
2335 (b'local', ms.local, 0),
2336 (b'other', ms.other, 1),
2336 (b'other', ms.other, 1),
2337 ):
2337 ):
2338 fm_commits.startitem()
2338 fm_commits.startitem()
2339 fm_commits.data(name=name)
2339 fm_commits.data(name=name)
2340 fm_commits.data(node=hex(node))
2340 fm_commits.data(node=hex(node))
2341 if ms._labels and len(ms._labels) > label_index:
2341 if ms._labels and len(ms._labels) > label_index:
2342 fm_commits.data(label=ms._labels[label_index])
2342 fm_commits.data(label=ms._labels[label_index])
2343 fm_commits.end()
2343 fm_commits.end()
2344
2344
2345 fm_files = fm.nested(b'files')
2345 fm_files = fm.nested(b'files')
2346 if ms.active():
2346 if ms.active():
2347 for f in ms:
2347 for f in ms:
2348 fm_files.startitem()
2348 fm_files.startitem()
2349 fm_files.data(path=f)
2349 fm_files.data(path=f)
2350 state = ms._state[f]
2350 state = ms._state[f]
2351 fm_files.data(state=state[0])
2351 fm_files.data(state=state[0])
2352 if state[0] in (
2352 if state[0] in (
2353 mergestatemod.MERGE_RECORD_UNRESOLVED,
2353 mergestatemod.MERGE_RECORD_UNRESOLVED,
2354 mergestatemod.MERGE_RECORD_RESOLVED,
2354 mergestatemod.MERGE_RECORD_RESOLVED,
2355 ):
2355 ):
2356 fm_files.data(local_key=state[1])
2356 fm_files.data(local_key=state[1])
2357 fm_files.data(local_path=state[2])
2357 fm_files.data(local_path=state[2])
2358 fm_files.data(ancestor_path=state[3])
2358 fm_files.data(ancestor_path=state[3])
2359 fm_files.data(ancestor_node=state[4])
2359 fm_files.data(ancestor_node=state[4])
2360 fm_files.data(other_path=state[5])
2360 fm_files.data(other_path=state[5])
2361 fm_files.data(other_node=state[6])
2361 fm_files.data(other_node=state[6])
2362 fm_files.data(local_flags=state[7])
2362 fm_files.data(local_flags=state[7])
2363 elif state[0] in (
2363 elif state[0] in (
2364 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2364 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2365 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2365 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2366 ):
2366 ):
2367 fm_files.data(renamed_path=state[1])
2367 fm_files.data(renamed_path=state[1])
2368 fm_files.data(rename_side=state[2])
2368 fm_files.data(rename_side=state[2])
2369 fm_extras = fm_files.nested(b'extras')
2369 fm_extras = fm_files.nested(b'extras')
2370 for k, v in sorted(ms.extras(f).items()):
2370 for k, v in sorted(ms.extras(f).items()):
2371 fm_extras.startitem()
2371 fm_extras.startitem()
2372 fm_extras.data(key=k)
2372 fm_extras.data(key=k)
2373 fm_extras.data(value=v)
2373 fm_extras.data(value=v)
2374 fm_extras.end()
2374 fm_extras.end()
2375
2375
2376 fm_files.end()
2376 fm_files.end()
2377
2377
2378 fm_extras = fm.nested(b'extras')
2378 fm_extras = fm.nested(b'extras')
2379 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2379 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2380 if f in ms:
2380 if f in ms:
2381 # If file is in mergestate, we have already processed it's extras
2381 # If file is in mergestate, we have already processed it's extras
2382 continue
2382 continue
2383 for k, v in pycompat.iteritems(d):
2383 for k, v in pycompat.iteritems(d):
2384 fm_extras.startitem()
2384 fm_extras.startitem()
2385 fm_extras.data(file=f)
2385 fm_extras.data(file=f)
2386 fm_extras.data(key=k)
2386 fm_extras.data(key=k)
2387 fm_extras.data(value=v)
2387 fm_extras.data(value=v)
2388 fm_extras.end()
2388 fm_extras.end()
2389
2389
2390 fm.end()
2390 fm.end()
2391
2391
2392
2392
2393 @command(b'debugnamecomplete', [], _(b'NAME...'))
2393 @command(b'debugnamecomplete', [], _(b'NAME...'))
2394 def debugnamecomplete(ui, repo, *args):
2394 def debugnamecomplete(ui, repo, *args):
2395 '''complete "names" - tags, open branch names, bookmark names'''
2395 '''complete "names" - tags, open branch names, bookmark names'''
2396
2396
2397 names = set()
2397 names = set()
2398 # since we previously only listed open branches, we will handle that
2398 # since we previously only listed open branches, we will handle that
2399 # specially (after this for loop)
2399 # specially (after this for loop)
2400 for name, ns in pycompat.iteritems(repo.names):
2400 for name, ns in pycompat.iteritems(repo.names):
2401 if name != b'branches':
2401 if name != b'branches':
2402 names.update(ns.listnames(repo))
2402 names.update(ns.listnames(repo))
2403 names.update(
2403 names.update(
2404 tag
2404 tag
2405 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2405 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2406 if not closed
2406 if not closed
2407 )
2407 )
2408 completions = set()
2408 completions = set()
2409 if not args:
2409 if not args:
2410 args = [b'']
2410 args = [b'']
2411 for a in args:
2411 for a in args:
2412 completions.update(n for n in names if n.startswith(a))
2412 completions.update(n for n in names if n.startswith(a))
2413 ui.write(b'\n'.join(sorted(completions)))
2413 ui.write(b'\n'.join(sorted(completions)))
2414 ui.write(b'\n')
2414 ui.write(b'\n')
2415
2415
2416
2416
2417 @command(
2417 @command(
2418 b'debugnodemap',
2418 b'debugnodemap',
2419 [
2419 [
2420 (
2420 (
2421 b'',
2421 b'',
2422 b'dump-new',
2422 b'dump-new',
2423 False,
2423 False,
2424 _(b'write a (new) persistent binary nodemap on stdout'),
2424 _(b'write a (new) persistent binary nodemap on stdout'),
2425 ),
2425 ),
2426 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2426 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2427 (
2427 (
2428 b'',
2428 b'',
2429 b'check',
2429 b'check',
2430 False,
2430 False,
2431 _(b'check that the data on disk data are correct.'),
2431 _(b'check that the data on disk data are correct.'),
2432 ),
2432 ),
2433 (
2433 (
2434 b'',
2434 b'',
2435 b'metadata',
2435 b'metadata',
2436 False,
2436 False,
2437 _(b'display the on disk meta data for the nodemap'),
2437 _(b'display the on disk meta data for the nodemap'),
2438 ),
2438 ),
2439 ],
2439 ],
2440 )
2440 )
2441 def debugnodemap(ui, repo, **opts):
2441 def debugnodemap(ui, repo, **opts):
2442 """write and inspect on disk nodemap"""
2442 """write and inspect on disk nodemap"""
2443 if opts['dump_new']:
2443 if opts['dump_new']:
2444 unfi = repo.unfiltered()
2444 unfi = repo.unfiltered()
2445 cl = unfi.changelog
2445 cl = unfi.changelog
2446 if util.safehasattr(cl.index, "nodemap_data_all"):
2446 if util.safehasattr(cl.index, "nodemap_data_all"):
2447 data = cl.index.nodemap_data_all()
2447 data = cl.index.nodemap_data_all()
2448 else:
2448 else:
2449 data = nodemap.persistent_data(cl.index)
2449 data = nodemap.persistent_data(cl.index)
2450 ui.write(data)
2450 ui.write(data)
2451 elif opts['dump_disk']:
2451 elif opts['dump_disk']:
2452 unfi = repo.unfiltered()
2452 unfi = repo.unfiltered()
2453 cl = unfi.changelog
2453 cl = unfi.changelog
2454 nm_data = nodemap.persisted_data(cl)
2454 nm_data = nodemap.persisted_data(cl)
2455 if nm_data is not None:
2455 if nm_data is not None:
2456 docket, data = nm_data
2456 docket, data = nm_data
2457 ui.write(data[:])
2457 ui.write(data[:])
2458 elif opts['check']:
2458 elif opts['check']:
2459 unfi = repo.unfiltered()
2459 unfi = repo.unfiltered()
2460 cl = unfi.changelog
2460 cl = unfi.changelog
2461 nm_data = nodemap.persisted_data(cl)
2461 nm_data = nodemap.persisted_data(cl)
2462 if nm_data is not None:
2462 if nm_data is not None:
2463 docket, data = nm_data
2463 docket, data = nm_data
2464 return nodemap.check_data(ui, cl.index, data)
2464 return nodemap.check_data(ui, cl.index, data)
2465 elif opts['metadata']:
2465 elif opts['metadata']:
2466 unfi = repo.unfiltered()
2466 unfi = repo.unfiltered()
2467 cl = unfi.changelog
2467 cl = unfi.changelog
2468 nm_data = nodemap.persisted_data(cl)
2468 nm_data = nodemap.persisted_data(cl)
2469 if nm_data is not None:
2469 if nm_data is not None:
2470 docket, data = nm_data
2470 docket, data = nm_data
2471 ui.write((b"uid: %s\n") % docket.uid)
2471 ui.write((b"uid: %s\n") % docket.uid)
2472 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2472 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2473 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2473 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2474 ui.write((b"data-length: %d\n") % docket.data_length)
2474 ui.write((b"data-length: %d\n") % docket.data_length)
2475 ui.write((b"data-unused: %d\n") % docket.data_unused)
2475 ui.write((b"data-unused: %d\n") % docket.data_unused)
2476 unused_perc = docket.data_unused * 100.0 / docket.data_length
2476 unused_perc = docket.data_unused * 100.0 / docket.data_length
2477 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2477 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2478
2478
2479
2479
2480 @command(
2480 @command(
2481 b'debugobsolete',
2481 b'debugobsolete',
2482 [
2482 [
2483 (b'', b'flags', 0, _(b'markers flag')),
2483 (b'', b'flags', 0, _(b'markers flag')),
2484 (
2484 (
2485 b'',
2485 b'',
2486 b'record-parents',
2486 b'record-parents',
2487 False,
2487 False,
2488 _(b'record parent information for the precursor'),
2488 _(b'record parent information for the precursor'),
2489 ),
2489 ),
2490 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2490 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2491 (
2491 (
2492 b'',
2492 b'',
2493 b'exclusive',
2493 b'exclusive',
2494 False,
2494 False,
2495 _(b'restrict display to markers only relevant to REV'),
2495 _(b'restrict display to markers only relevant to REV'),
2496 ),
2496 ),
2497 (b'', b'index', False, _(b'display index of the marker')),
2497 (b'', b'index', False, _(b'display index of the marker')),
2498 (b'', b'delete', [], _(b'delete markers specified by indices')),
2498 (b'', b'delete', [], _(b'delete markers specified by indices')),
2499 ]
2499 ]
2500 + cmdutil.commitopts2
2500 + cmdutil.commitopts2
2501 + cmdutil.formatteropts,
2501 + cmdutil.formatteropts,
2502 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2502 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2503 )
2503 )
2504 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2504 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2505 """create arbitrary obsolete marker
2505 """create arbitrary obsolete marker
2506
2506
2507 With no arguments, displays the list of obsolescence markers."""
2507 With no arguments, displays the list of obsolescence markers."""
2508
2508
2509 opts = pycompat.byteskwargs(opts)
2509 opts = pycompat.byteskwargs(opts)
2510
2510
2511 def parsenodeid(s):
2511 def parsenodeid(s):
2512 try:
2512 try:
2513 # We do not use revsingle/revrange functions here to accept
2513 # We do not use revsingle/revrange functions here to accept
2514 # arbitrary node identifiers, possibly not present in the
2514 # arbitrary node identifiers, possibly not present in the
2515 # local repository.
2515 # local repository.
2516 n = bin(s)
2516 n = bin(s)
2517 if len(n) != repo.nodeconstants.nodelen:
2517 if len(n) != repo.nodeconstants.nodelen:
2518 raise TypeError()
2518 raise TypeError()
2519 return n
2519 return n
2520 except TypeError:
2520 except TypeError:
2521 raise error.InputError(
2521 raise error.InputError(
2522 b'changeset references must be full hexadecimal '
2522 b'changeset references must be full hexadecimal '
2523 b'node identifiers'
2523 b'node identifiers'
2524 )
2524 )
2525
2525
2526 if opts.get(b'delete'):
2526 if opts.get(b'delete'):
2527 indices = []
2527 indices = []
2528 for v in opts.get(b'delete'):
2528 for v in opts.get(b'delete'):
2529 try:
2529 try:
2530 indices.append(int(v))
2530 indices.append(int(v))
2531 except ValueError:
2531 except ValueError:
2532 raise error.InputError(
2532 raise error.InputError(
2533 _(b'invalid index value: %r') % v,
2533 _(b'invalid index value: %r') % v,
2534 hint=_(b'use integers for indices'),
2534 hint=_(b'use integers for indices'),
2535 )
2535 )
2536
2536
2537 if repo.currenttransaction():
2537 if repo.currenttransaction():
2538 raise error.Abort(
2538 raise error.Abort(
2539 _(b'cannot delete obsmarkers in the middle of transaction.')
2539 _(b'cannot delete obsmarkers in the middle of transaction.')
2540 )
2540 )
2541
2541
2542 with repo.lock():
2542 with repo.lock():
2543 n = repair.deleteobsmarkers(repo.obsstore, indices)
2543 n = repair.deleteobsmarkers(repo.obsstore, indices)
2544 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2544 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2545
2545
2546 return
2546 return
2547
2547
2548 if precursor is not None:
2548 if precursor is not None:
2549 if opts[b'rev']:
2549 if opts[b'rev']:
2550 raise error.InputError(
2550 raise error.InputError(
2551 b'cannot select revision when creating marker'
2551 b'cannot select revision when creating marker'
2552 )
2552 )
2553 metadata = {}
2553 metadata = {}
2554 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2554 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2555 succs = tuple(parsenodeid(succ) for succ in successors)
2555 succs = tuple(parsenodeid(succ) for succ in successors)
2556 l = repo.lock()
2556 l = repo.lock()
2557 try:
2557 try:
2558 tr = repo.transaction(b'debugobsolete')
2558 tr = repo.transaction(b'debugobsolete')
2559 try:
2559 try:
2560 date = opts.get(b'date')
2560 date = opts.get(b'date')
2561 if date:
2561 if date:
2562 date = dateutil.parsedate(date)
2562 date = dateutil.parsedate(date)
2563 else:
2563 else:
2564 date = None
2564 date = None
2565 prec = parsenodeid(precursor)
2565 prec = parsenodeid(precursor)
2566 parents = None
2566 parents = None
2567 if opts[b'record_parents']:
2567 if opts[b'record_parents']:
2568 if prec not in repo.unfiltered():
2568 if prec not in repo.unfiltered():
2569 raise error.Abort(
2569 raise error.Abort(
2570 b'cannot used --record-parents on '
2570 b'cannot used --record-parents on '
2571 b'unknown changesets'
2571 b'unknown changesets'
2572 )
2572 )
2573 parents = repo.unfiltered()[prec].parents()
2573 parents = repo.unfiltered()[prec].parents()
2574 parents = tuple(p.node() for p in parents)
2574 parents = tuple(p.node() for p in parents)
2575 repo.obsstore.create(
2575 repo.obsstore.create(
2576 tr,
2576 tr,
2577 prec,
2577 prec,
2578 succs,
2578 succs,
2579 opts[b'flags'],
2579 opts[b'flags'],
2580 parents=parents,
2580 parents=parents,
2581 date=date,
2581 date=date,
2582 metadata=metadata,
2582 metadata=metadata,
2583 ui=ui,
2583 ui=ui,
2584 )
2584 )
2585 tr.close()
2585 tr.close()
2586 except ValueError as exc:
2586 except ValueError as exc:
2587 raise error.Abort(
2587 raise error.Abort(
2588 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2588 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2589 )
2589 )
2590 finally:
2590 finally:
2591 tr.release()
2591 tr.release()
2592 finally:
2592 finally:
2593 l.release()
2593 l.release()
2594 else:
2594 else:
2595 if opts[b'rev']:
2595 if opts[b'rev']:
2596 revs = scmutil.revrange(repo, opts[b'rev'])
2596 revs = scmutil.revrange(repo, opts[b'rev'])
2597 nodes = [repo[r].node() for r in revs]
2597 nodes = [repo[r].node() for r in revs]
2598 markers = list(
2598 markers = list(
2599 obsutil.getmarkers(
2599 obsutil.getmarkers(
2600 repo, nodes=nodes, exclusive=opts[b'exclusive']
2600 repo, nodes=nodes, exclusive=opts[b'exclusive']
2601 )
2601 )
2602 )
2602 )
2603 markers.sort(key=lambda x: x._data)
2603 markers.sort(key=lambda x: x._data)
2604 else:
2604 else:
2605 markers = obsutil.getmarkers(repo)
2605 markers = obsutil.getmarkers(repo)
2606
2606
2607 markerstoiter = markers
2607 markerstoiter = markers
2608 isrelevant = lambda m: True
2608 isrelevant = lambda m: True
2609 if opts.get(b'rev') and opts.get(b'index'):
2609 if opts.get(b'rev') and opts.get(b'index'):
2610 markerstoiter = obsutil.getmarkers(repo)
2610 markerstoiter = obsutil.getmarkers(repo)
2611 markerset = set(markers)
2611 markerset = set(markers)
2612 isrelevant = lambda m: m in markerset
2612 isrelevant = lambda m: m in markerset
2613
2613
2614 fm = ui.formatter(b'debugobsolete', opts)
2614 fm = ui.formatter(b'debugobsolete', opts)
2615 for i, m in enumerate(markerstoiter):
2615 for i, m in enumerate(markerstoiter):
2616 if not isrelevant(m):
2616 if not isrelevant(m):
2617 # marker can be irrelevant when we're iterating over a set
2617 # marker can be irrelevant when we're iterating over a set
2618 # of markers (markerstoiter) which is bigger than the set
2618 # of markers (markerstoiter) which is bigger than the set
2619 # of markers we want to display (markers)
2619 # of markers we want to display (markers)
2620 # this can happen if both --index and --rev options are
2620 # this can happen if both --index and --rev options are
2621 # provided and thus we need to iterate over all of the markers
2621 # provided and thus we need to iterate over all of the markers
2622 # to get the correct indices, but only display the ones that
2622 # to get the correct indices, but only display the ones that
2623 # are relevant to --rev value
2623 # are relevant to --rev value
2624 continue
2624 continue
2625 fm.startitem()
2625 fm.startitem()
2626 ind = i if opts.get(b'index') else None
2626 ind = i if opts.get(b'index') else None
2627 cmdutil.showmarker(fm, m, index=ind)
2627 cmdutil.showmarker(fm, m, index=ind)
2628 fm.end()
2628 fm.end()
2629
2629
2630
2630
2631 @command(
2631 @command(
2632 b'debugp1copies',
2632 b'debugp1copies',
2633 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2633 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2634 _(b'[-r REV]'),
2634 _(b'[-r REV]'),
2635 )
2635 )
2636 def debugp1copies(ui, repo, **opts):
2636 def debugp1copies(ui, repo, **opts):
2637 """dump copy information compared to p1"""
2637 """dump copy information compared to p1"""
2638
2638
2639 opts = pycompat.byteskwargs(opts)
2639 opts = pycompat.byteskwargs(opts)
2640 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2640 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2641 for dst, src in ctx.p1copies().items():
2641 for dst, src in ctx.p1copies().items():
2642 ui.write(b'%s -> %s\n' % (src, dst))
2642 ui.write(b'%s -> %s\n' % (src, dst))
2643
2643
2644
2644
2645 @command(
2645 @command(
2646 b'debugp2copies',
2646 b'debugp2copies',
2647 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2647 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2648 _(b'[-r REV]'),
2648 _(b'[-r REV]'),
2649 )
2649 )
2650 def debugp1copies(ui, repo, **opts):
2650 def debugp1copies(ui, repo, **opts):
2651 """dump copy information compared to p2"""
2651 """dump copy information compared to p2"""
2652
2652
2653 opts = pycompat.byteskwargs(opts)
2653 opts = pycompat.byteskwargs(opts)
2654 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2654 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2655 for dst, src in ctx.p2copies().items():
2655 for dst, src in ctx.p2copies().items():
2656 ui.write(b'%s -> %s\n' % (src, dst))
2656 ui.write(b'%s -> %s\n' % (src, dst))
2657
2657
2658
2658
2659 @command(
2659 @command(
2660 b'debugpathcomplete',
2660 b'debugpathcomplete',
2661 [
2661 [
2662 (b'f', b'full', None, _(b'complete an entire path')),
2662 (b'f', b'full', None, _(b'complete an entire path')),
2663 (b'n', b'normal', None, _(b'show only normal files')),
2663 (b'n', b'normal', None, _(b'show only normal files')),
2664 (b'a', b'added', None, _(b'show only added files')),
2664 (b'a', b'added', None, _(b'show only added files')),
2665 (b'r', b'removed', None, _(b'show only removed files')),
2665 (b'r', b'removed', None, _(b'show only removed files')),
2666 ],
2666 ],
2667 _(b'FILESPEC...'),
2667 _(b'FILESPEC...'),
2668 )
2668 )
2669 def debugpathcomplete(ui, repo, *specs, **opts):
2669 def debugpathcomplete(ui, repo, *specs, **opts):
2670 """complete part or all of a tracked path
2670 """complete part or all of a tracked path
2671
2671
2672 This command supports shells that offer path name completion. It
2672 This command supports shells that offer path name completion. It
2673 currently completes only files already known to the dirstate.
2673 currently completes only files already known to the dirstate.
2674
2674
2675 Completion extends only to the next path segment unless
2675 Completion extends only to the next path segment unless
2676 --full is specified, in which case entire paths are used."""
2676 --full is specified, in which case entire paths are used."""
2677
2677
2678 def complete(path, acceptable):
2678 def complete(path, acceptable):
2679 dirstate = repo.dirstate
2679 dirstate = repo.dirstate
2680 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2680 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2681 rootdir = repo.root + pycompat.ossep
2681 rootdir = repo.root + pycompat.ossep
2682 if spec != repo.root and not spec.startswith(rootdir):
2682 if spec != repo.root and not spec.startswith(rootdir):
2683 return [], []
2683 return [], []
2684 if os.path.isdir(spec):
2684 if os.path.isdir(spec):
2685 spec += b'/'
2685 spec += b'/'
2686 spec = spec[len(rootdir) :]
2686 spec = spec[len(rootdir) :]
2687 fixpaths = pycompat.ossep != b'/'
2687 fixpaths = pycompat.ossep != b'/'
2688 if fixpaths:
2688 if fixpaths:
2689 spec = spec.replace(pycompat.ossep, b'/')
2689 spec = spec.replace(pycompat.ossep, b'/')
2690 speclen = len(spec)
2690 speclen = len(spec)
2691 fullpaths = opts['full']
2691 fullpaths = opts['full']
2692 files, dirs = set(), set()
2692 files, dirs = set(), set()
2693 adddir, addfile = dirs.add, files.add
2693 adddir, addfile = dirs.add, files.add
2694 for f, st in pycompat.iteritems(dirstate):
2694 for f, st in pycompat.iteritems(dirstate):
2695 if f.startswith(spec) and st.state in acceptable:
2695 if f.startswith(spec) and st.state in acceptable:
2696 if fixpaths:
2696 if fixpaths:
2697 f = f.replace(b'/', pycompat.ossep)
2697 f = f.replace(b'/', pycompat.ossep)
2698 if fullpaths:
2698 if fullpaths:
2699 addfile(f)
2699 addfile(f)
2700 continue
2700 continue
2701 s = f.find(pycompat.ossep, speclen)
2701 s = f.find(pycompat.ossep, speclen)
2702 if s >= 0:
2702 if s >= 0:
2703 adddir(f[:s])
2703 adddir(f[:s])
2704 else:
2704 else:
2705 addfile(f)
2705 addfile(f)
2706 return files, dirs
2706 return files, dirs
2707
2707
2708 acceptable = b''
2708 acceptable = b''
2709 if opts['normal']:
2709 if opts['normal']:
2710 acceptable += b'nm'
2710 acceptable += b'nm'
2711 if opts['added']:
2711 if opts['added']:
2712 acceptable += b'a'
2712 acceptable += b'a'
2713 if opts['removed']:
2713 if opts['removed']:
2714 acceptable += b'r'
2714 acceptable += b'r'
2715 cwd = repo.getcwd()
2715 cwd = repo.getcwd()
2716 if not specs:
2716 if not specs:
2717 specs = [b'.']
2717 specs = [b'.']
2718
2718
2719 files, dirs = set(), set()
2719 files, dirs = set(), set()
2720 for spec in specs:
2720 for spec in specs:
2721 f, d = complete(spec, acceptable or b'nmar')
2721 f, d = complete(spec, acceptable or b'nmar')
2722 files.update(f)
2722 files.update(f)
2723 dirs.update(d)
2723 dirs.update(d)
2724 files.update(dirs)
2724 files.update(dirs)
2725 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2725 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2726 ui.write(b'\n')
2726 ui.write(b'\n')
2727
2727
2728
2728
2729 @command(
2729 @command(
2730 b'debugpathcopies',
2730 b'debugpathcopies',
2731 cmdutil.walkopts,
2731 cmdutil.walkopts,
2732 b'hg debugpathcopies REV1 REV2 [FILE]',
2732 b'hg debugpathcopies REV1 REV2 [FILE]',
2733 inferrepo=True,
2733 inferrepo=True,
2734 )
2734 )
2735 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2735 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2736 """show copies between two revisions"""
2736 """show copies between two revisions"""
2737 ctx1 = scmutil.revsingle(repo, rev1)
2737 ctx1 = scmutil.revsingle(repo, rev1)
2738 ctx2 = scmutil.revsingle(repo, rev2)
2738 ctx2 = scmutil.revsingle(repo, rev2)
2739 m = scmutil.match(ctx1, pats, opts)
2739 m = scmutil.match(ctx1, pats, opts)
2740 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2740 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2741 ui.write(b'%s -> %s\n' % (src, dst))
2741 ui.write(b'%s -> %s\n' % (src, dst))
2742
2742
2743
2743
2744 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2744 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2745 def debugpeer(ui, path):
2745 def debugpeer(ui, path):
2746 """establish a connection to a peer repository"""
2746 """establish a connection to a peer repository"""
2747 # Always enable peer request logging. Requires --debug to display
2747 # Always enable peer request logging. Requires --debug to display
2748 # though.
2748 # though.
2749 overrides = {
2749 overrides = {
2750 (b'devel', b'debug.peer-request'): True,
2750 (b'devel', b'debug.peer-request'): True,
2751 }
2751 }
2752
2752
2753 with ui.configoverride(overrides):
2753 with ui.configoverride(overrides):
2754 peer = hg.peer(ui, {}, path)
2754 peer = hg.peer(ui, {}, path)
2755
2755
2756 try:
2756 try:
2757 local = peer.local() is not None
2757 local = peer.local() is not None
2758 canpush = peer.canpush()
2758 canpush = peer.canpush()
2759
2759
2760 ui.write(_(b'url: %s\n') % peer.url())
2760 ui.write(_(b'url: %s\n') % peer.url())
2761 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2761 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2762 ui.write(
2762 ui.write(
2763 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2763 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2764 )
2764 )
2765 finally:
2765 finally:
2766 peer.close()
2766 peer.close()
2767
2767
2768
2768
2769 @command(
2769 @command(
2770 b'debugpickmergetool',
2770 b'debugpickmergetool',
2771 [
2771 [
2772 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2772 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2773 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2773 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2774 ]
2774 ]
2775 + cmdutil.walkopts
2775 + cmdutil.walkopts
2776 + cmdutil.mergetoolopts,
2776 + cmdutil.mergetoolopts,
2777 _(b'[PATTERN]...'),
2777 _(b'[PATTERN]...'),
2778 inferrepo=True,
2778 inferrepo=True,
2779 )
2779 )
2780 def debugpickmergetool(ui, repo, *pats, **opts):
2780 def debugpickmergetool(ui, repo, *pats, **opts):
2781 """examine which merge tool is chosen for specified file
2781 """examine which merge tool is chosen for specified file
2782
2782
2783 As described in :hg:`help merge-tools`, Mercurial examines
2783 As described in :hg:`help merge-tools`, Mercurial examines
2784 configurations below in this order to decide which merge tool is
2784 configurations below in this order to decide which merge tool is
2785 chosen for specified file.
2785 chosen for specified file.
2786
2786
2787 1. ``--tool`` option
2787 1. ``--tool`` option
2788 2. ``HGMERGE`` environment variable
2788 2. ``HGMERGE`` environment variable
2789 3. configurations in ``merge-patterns`` section
2789 3. configurations in ``merge-patterns`` section
2790 4. configuration of ``ui.merge``
2790 4. configuration of ``ui.merge``
2791 5. configurations in ``merge-tools`` section
2791 5. configurations in ``merge-tools`` section
2792 6. ``hgmerge`` tool (for historical reason only)
2792 6. ``hgmerge`` tool (for historical reason only)
2793 7. default tool for fallback (``:merge`` or ``:prompt``)
2793 7. default tool for fallback (``:merge`` or ``:prompt``)
2794
2794
2795 This command writes out examination result in the style below::
2795 This command writes out examination result in the style below::
2796
2796
2797 FILE = MERGETOOL
2797 FILE = MERGETOOL
2798
2798
2799 By default, all files known in the first parent context of the
2799 By default, all files known in the first parent context of the
2800 working directory are examined. Use file patterns and/or -I/-X
2800 working directory are examined. Use file patterns and/or -I/-X
2801 options to limit target files. -r/--rev is also useful to examine
2801 options to limit target files. -r/--rev is also useful to examine
2802 files in another context without actual updating to it.
2802 files in another context without actual updating to it.
2803
2803
2804 With --debug, this command shows warning messages while matching
2804 With --debug, this command shows warning messages while matching
2805 against ``merge-patterns`` and so on, too. It is recommended to
2805 against ``merge-patterns`` and so on, too. It is recommended to
2806 use this option with explicit file patterns and/or -I/-X options,
2806 use this option with explicit file patterns and/or -I/-X options,
2807 because this option increases amount of output per file according
2807 because this option increases amount of output per file according
2808 to configurations in hgrc.
2808 to configurations in hgrc.
2809
2809
2810 With -v/--verbose, this command shows configurations below at
2810 With -v/--verbose, this command shows configurations below at
2811 first (only if specified).
2811 first (only if specified).
2812
2812
2813 - ``--tool`` option
2813 - ``--tool`` option
2814 - ``HGMERGE`` environment variable
2814 - ``HGMERGE`` environment variable
2815 - configuration of ``ui.merge``
2815 - configuration of ``ui.merge``
2816
2816
2817 If merge tool is chosen before matching against
2817 If merge tool is chosen before matching against
2818 ``merge-patterns``, this command can't show any helpful
2818 ``merge-patterns``, this command can't show any helpful
2819 information, even with --debug. In such case, information above is
2819 information, even with --debug. In such case, information above is
2820 useful to know why a merge tool is chosen.
2820 useful to know why a merge tool is chosen.
2821 """
2821 """
2822 opts = pycompat.byteskwargs(opts)
2822 opts = pycompat.byteskwargs(opts)
2823 overrides = {}
2823 overrides = {}
2824 if opts[b'tool']:
2824 if opts[b'tool']:
2825 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2825 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2826 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2826 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2827
2827
2828 with ui.configoverride(overrides, b'debugmergepatterns'):
2828 with ui.configoverride(overrides, b'debugmergepatterns'):
2829 hgmerge = encoding.environ.get(b"HGMERGE")
2829 hgmerge = encoding.environ.get(b"HGMERGE")
2830 if hgmerge is not None:
2830 if hgmerge is not None:
2831 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2831 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2832 uimerge = ui.config(b"ui", b"merge")
2832 uimerge = ui.config(b"ui", b"merge")
2833 if uimerge:
2833 if uimerge:
2834 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2834 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2835
2835
2836 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2836 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2837 m = scmutil.match(ctx, pats, opts)
2837 m = scmutil.match(ctx, pats, opts)
2838 changedelete = opts[b'changedelete']
2838 changedelete = opts[b'changedelete']
2839 for path in ctx.walk(m):
2839 for path in ctx.walk(m):
2840 fctx = ctx[path]
2840 fctx = ctx[path]
2841 with ui.silent(
2841 with ui.silent(
2842 error=True
2842 error=True
2843 ) if not ui.debugflag else util.nullcontextmanager():
2843 ) if not ui.debugflag else util.nullcontextmanager():
2844 tool, toolpath = filemerge._picktool(
2844 tool, toolpath = filemerge._picktool(
2845 repo,
2845 repo,
2846 ui,
2846 ui,
2847 path,
2847 path,
2848 fctx.isbinary(),
2848 fctx.isbinary(),
2849 b'l' in fctx.flags(),
2849 b'l' in fctx.flags(),
2850 changedelete,
2850 changedelete,
2851 )
2851 )
2852 ui.write(b'%s = %s\n' % (path, tool))
2852 ui.write(b'%s = %s\n' % (path, tool))
2853
2853
2854
2854
2855 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2855 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2856 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2856 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2857 """access the pushkey key/value protocol
2857 """access the pushkey key/value protocol
2858
2858
2859 With two args, list the keys in the given namespace.
2859 With two args, list the keys in the given namespace.
2860
2860
2861 With five args, set a key to new if it currently is set to old.
2861 With five args, set a key to new if it currently is set to old.
2862 Reports success or failure.
2862 Reports success or failure.
2863 """
2863 """
2864
2864
2865 target = hg.peer(ui, {}, repopath)
2865 target = hg.peer(ui, {}, repopath)
2866 try:
2866 try:
2867 if keyinfo:
2867 if keyinfo:
2868 key, old, new = keyinfo
2868 key, old, new = keyinfo
2869 with target.commandexecutor() as e:
2869 with target.commandexecutor() as e:
2870 r = e.callcommand(
2870 r = e.callcommand(
2871 b'pushkey',
2871 b'pushkey',
2872 {
2872 {
2873 b'namespace': namespace,
2873 b'namespace': namespace,
2874 b'key': key,
2874 b'key': key,
2875 b'old': old,
2875 b'old': old,
2876 b'new': new,
2876 b'new': new,
2877 },
2877 },
2878 ).result()
2878 ).result()
2879
2879
2880 ui.status(pycompat.bytestr(r) + b'\n')
2880 ui.status(pycompat.bytestr(r) + b'\n')
2881 return not r
2881 return not r
2882 else:
2882 else:
2883 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2883 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2884 ui.write(
2884 ui.write(
2885 b"%s\t%s\n"
2885 b"%s\t%s\n"
2886 % (stringutil.escapestr(k), stringutil.escapestr(v))
2886 % (stringutil.escapestr(k), stringutil.escapestr(v))
2887 )
2887 )
2888 finally:
2888 finally:
2889 target.close()
2889 target.close()
2890
2890
2891
2891
2892 @command(b'debugpvec', [], _(b'A B'))
2892 @command(b'debugpvec', [], _(b'A B'))
2893 def debugpvec(ui, repo, a, b=None):
2893 def debugpvec(ui, repo, a, b=None):
2894 ca = scmutil.revsingle(repo, a)
2894 ca = scmutil.revsingle(repo, a)
2895 cb = scmutil.revsingle(repo, b)
2895 cb = scmutil.revsingle(repo, b)
2896 pa = pvec.ctxpvec(ca)
2896 pa = pvec.ctxpvec(ca)
2897 pb = pvec.ctxpvec(cb)
2897 pb = pvec.ctxpvec(cb)
2898 if pa == pb:
2898 if pa == pb:
2899 rel = b"="
2899 rel = b"="
2900 elif pa > pb:
2900 elif pa > pb:
2901 rel = b">"
2901 rel = b">"
2902 elif pa < pb:
2902 elif pa < pb:
2903 rel = b"<"
2903 rel = b"<"
2904 elif pa | pb:
2904 elif pa | pb:
2905 rel = b"|"
2905 rel = b"|"
2906 ui.write(_(b"a: %s\n") % pa)
2906 ui.write(_(b"a: %s\n") % pa)
2907 ui.write(_(b"b: %s\n") % pb)
2907 ui.write(_(b"b: %s\n") % pb)
2908 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2908 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2909 ui.write(
2909 ui.write(
2910 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2910 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2911 % (
2911 % (
2912 abs(pa._depth - pb._depth),
2912 abs(pa._depth - pb._depth),
2913 pvec._hamming(pa._vec, pb._vec),
2913 pvec._hamming(pa._vec, pb._vec),
2914 pa.distance(pb),
2914 pa.distance(pb),
2915 rel,
2915 rel,
2916 )
2916 )
2917 )
2917 )
2918
2918
2919
2919
2920 @command(
2920 @command(
2921 b'debugrebuilddirstate|debugrebuildstate',
2921 b'debugrebuilddirstate|debugrebuildstate',
2922 [
2922 [
2923 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2923 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2924 (
2924 (
2925 b'',
2925 b'',
2926 b'minimal',
2926 b'minimal',
2927 None,
2927 None,
2928 _(
2928 _(
2929 b'only rebuild files that are inconsistent with '
2929 b'only rebuild files that are inconsistent with '
2930 b'the working copy parent'
2930 b'the working copy parent'
2931 ),
2931 ),
2932 ),
2932 ),
2933 ],
2933 ],
2934 _(b'[-r REV]'),
2934 _(b'[-r REV]'),
2935 )
2935 )
2936 def debugrebuilddirstate(ui, repo, rev, **opts):
2936 def debugrebuilddirstate(ui, repo, rev, **opts):
2937 """rebuild the dirstate as it would look like for the given revision
2937 """rebuild the dirstate as it would look like for the given revision
2938
2938
2939 If no revision is specified the first current parent will be used.
2939 If no revision is specified the first current parent will be used.
2940
2940
2941 The dirstate will be set to the files of the given revision.
2941 The dirstate will be set to the files of the given revision.
2942 The actual working directory content or existing dirstate
2942 The actual working directory content or existing dirstate
2943 information such as adds or removes is not considered.
2943 information such as adds or removes is not considered.
2944
2944
2945 ``minimal`` will only rebuild the dirstate status for files that claim to be
2945 ``minimal`` will only rebuild the dirstate status for files that claim to be
2946 tracked but are not in the parent manifest, or that exist in the parent
2946 tracked but are not in the parent manifest, or that exist in the parent
2947 manifest but are not in the dirstate. It will not change adds, removes, or
2947 manifest but are not in the dirstate. It will not change adds, removes, or
2948 modified files that are in the working copy parent.
2948 modified files that are in the working copy parent.
2949
2949
2950 One use of this command is to make the next :hg:`status` invocation
2950 One use of this command is to make the next :hg:`status` invocation
2951 check the actual file content.
2951 check the actual file content.
2952 """
2952 """
2953 ctx = scmutil.revsingle(repo, rev)
2953 ctx = scmutil.revsingle(repo, rev)
2954 with repo.wlock():
2954 with repo.wlock():
2955 dirstate = repo.dirstate
2955 dirstate = repo.dirstate
2956 changedfiles = None
2956 changedfiles = None
2957 # See command doc for what minimal does.
2957 # See command doc for what minimal does.
2958 if opts.get('minimal'):
2958 if opts.get('minimal'):
2959 manifestfiles = set(ctx.manifest().keys())
2959 manifestfiles = set(ctx.manifest().keys())
2960 dirstatefiles = set(dirstate)
2960 dirstatefiles = set(dirstate)
2961 manifestonly = manifestfiles - dirstatefiles
2961 manifestonly = manifestfiles - dirstatefiles
2962 dsonly = dirstatefiles - manifestfiles
2962 dsonly = dirstatefiles - manifestfiles
2963 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2963 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2964 changedfiles = manifestonly | dsnotadded
2964 changedfiles = manifestonly | dsnotadded
2965
2965
2966 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2966 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2967
2967
2968
2968
2969 @command(
2969 @command(
2970 b'debugrebuildfncache',
2970 b'debugrebuildfncache',
2971 [
2971 [
2972 (
2972 (
2973 b'',
2973 b'',
2974 b'only-data',
2974 b'only-data',
2975 False,
2975 False,
2976 _(b'only look for wrong .d files (much faster)'),
2976 _(b'only look for wrong .d files (much faster)'),
2977 )
2977 )
2978 ],
2978 ],
2979 b'',
2979 b'',
2980 )
2980 )
2981 def debugrebuildfncache(ui, repo, **opts):
2981 def debugrebuildfncache(ui, repo, **opts):
2982 """rebuild the fncache file"""
2982 """rebuild the fncache file"""
2983 opts = pycompat.byteskwargs(opts)
2983 opts = pycompat.byteskwargs(opts)
2984 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2984 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2985
2985
2986
2986
2987 @command(
2987 @command(
2988 b'debugrename',
2988 b'debugrename',
2989 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2989 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2990 _(b'[-r REV] [FILE]...'),
2990 _(b'[-r REV] [FILE]...'),
2991 )
2991 )
2992 def debugrename(ui, repo, *pats, **opts):
2992 def debugrename(ui, repo, *pats, **opts):
2993 """dump rename information"""
2993 """dump rename information"""
2994
2994
2995 opts = pycompat.byteskwargs(opts)
2995 opts = pycompat.byteskwargs(opts)
2996 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2996 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2997 m = scmutil.match(ctx, pats, opts)
2997 m = scmutil.match(ctx, pats, opts)
2998 for abs in ctx.walk(m):
2998 for abs in ctx.walk(m):
2999 fctx = ctx[abs]
2999 fctx = ctx[abs]
3000 o = fctx.filelog().renamed(fctx.filenode())
3000 o = fctx.filelog().renamed(fctx.filenode())
3001 rel = repo.pathto(abs)
3001 rel = repo.pathto(abs)
3002 if o:
3002 if o:
3003 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3003 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3004 else:
3004 else:
3005 ui.write(_(b"%s not renamed\n") % rel)
3005 ui.write(_(b"%s not renamed\n") % rel)
3006
3006
3007
3007
3008 @command(b'debugrequires|debugrequirements', [], b'')
3008 @command(b'debugrequires|debugrequirements', [], b'')
3009 def debugrequirements(ui, repo):
3009 def debugrequirements(ui, repo):
3010 """print the current repo requirements"""
3010 """print the current repo requirements"""
3011 for r in sorted(repo.requirements):
3011 for r in sorted(repo.requirements):
3012 ui.write(b"%s\n" % r)
3012 ui.write(b"%s\n" % r)
3013
3013
3014
3014
3015 @command(
3015 @command(
3016 b'debugrevlog',
3016 b'debugrevlog',
3017 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3017 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3018 _(b'-c|-m|FILE'),
3018 _(b'-c|-m|FILE'),
3019 optionalrepo=True,
3019 optionalrepo=True,
3020 )
3020 )
3021 def debugrevlog(ui, repo, file_=None, **opts):
3021 def debugrevlog(ui, repo, file_=None, **opts):
3022 """show data and statistics about a revlog"""
3022 """show data and statistics about a revlog"""
3023 opts = pycompat.byteskwargs(opts)
3023 opts = pycompat.byteskwargs(opts)
3024 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3024 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3025
3025
3026 if opts.get(b"dump"):
3026 if opts.get(b"dump"):
3027 numrevs = len(r)
3027 numrevs = len(r)
3028 ui.write(
3028 ui.write(
3029 (
3029 (
3030 b"# rev p1rev p2rev start end deltastart base p1 p2"
3030 b"# rev p1rev p2rev start end deltastart base p1 p2"
3031 b" rawsize totalsize compression heads chainlen\n"
3031 b" rawsize totalsize compression heads chainlen\n"
3032 )
3032 )
3033 )
3033 )
3034 ts = 0
3034 ts = 0
3035 heads = set()
3035 heads = set()
3036
3036
3037 for rev in pycompat.xrange(numrevs):
3037 for rev in pycompat.xrange(numrevs):
3038 dbase = r.deltaparent(rev)
3038 dbase = r.deltaparent(rev)
3039 if dbase == -1:
3039 if dbase == -1:
3040 dbase = rev
3040 dbase = rev
3041 cbase = r.chainbase(rev)
3041 cbase = r.chainbase(rev)
3042 clen = r.chainlen(rev)
3042 clen = r.chainlen(rev)
3043 p1, p2 = r.parentrevs(rev)
3043 p1, p2 = r.parentrevs(rev)
3044 rs = r.rawsize(rev)
3044 rs = r.rawsize(rev)
3045 ts = ts + rs
3045 ts = ts + rs
3046 heads -= set(r.parentrevs(rev))
3046 heads -= set(r.parentrevs(rev))
3047 heads.add(rev)
3047 heads.add(rev)
3048 try:
3048 try:
3049 compression = ts / r.end(rev)
3049 compression = ts / r.end(rev)
3050 except ZeroDivisionError:
3050 except ZeroDivisionError:
3051 compression = 0
3051 compression = 0
3052 ui.write(
3052 ui.write(
3053 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3053 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3054 b"%11d %5d %8d\n"
3054 b"%11d %5d %8d\n"
3055 % (
3055 % (
3056 rev,
3056 rev,
3057 p1,
3057 p1,
3058 p2,
3058 p2,
3059 r.start(rev),
3059 r.start(rev),
3060 r.end(rev),
3060 r.end(rev),
3061 r.start(dbase),
3061 r.start(dbase),
3062 r.start(cbase),
3062 r.start(cbase),
3063 r.start(p1),
3063 r.start(p1),
3064 r.start(p2),
3064 r.start(p2),
3065 rs,
3065 rs,
3066 ts,
3066 ts,
3067 compression,
3067 compression,
3068 len(heads),
3068 len(heads),
3069 clen,
3069 clen,
3070 )
3070 )
3071 )
3071 )
3072 return 0
3072 return 0
3073
3073
3074 format = r._format_version
3074 format = r._format_version
3075 v = r._format_flags
3075 v = r._format_flags
3076 flags = []
3076 flags = []
3077 gdelta = False
3077 gdelta = False
3078 if v & revlog.FLAG_INLINE_DATA:
3078 if v & revlog.FLAG_INLINE_DATA:
3079 flags.append(b'inline')
3079 flags.append(b'inline')
3080 if v & revlog.FLAG_GENERALDELTA:
3080 if v & revlog.FLAG_GENERALDELTA:
3081 gdelta = True
3081 gdelta = True
3082 flags.append(b'generaldelta')
3082 flags.append(b'generaldelta')
3083 if not flags:
3083 if not flags:
3084 flags = [b'(none)']
3084 flags = [b'(none)']
3085
3085
3086 ### tracks merge vs single parent
3086 ### tracks merge vs single parent
3087 nummerges = 0
3087 nummerges = 0
3088
3088
3089 ### tracks ways the "delta" are build
3089 ### tracks ways the "delta" are build
3090 # nodelta
3090 # nodelta
3091 numempty = 0
3091 numempty = 0
3092 numemptytext = 0
3092 numemptytext = 0
3093 numemptydelta = 0
3093 numemptydelta = 0
3094 # full file content
3094 # full file content
3095 numfull = 0
3095 numfull = 0
3096 # intermediate snapshot against a prior snapshot
3096 # intermediate snapshot against a prior snapshot
3097 numsemi = 0
3097 numsemi = 0
3098 # snapshot count per depth
3098 # snapshot count per depth
3099 numsnapdepth = collections.defaultdict(lambda: 0)
3099 numsnapdepth = collections.defaultdict(lambda: 0)
3100 # delta against previous revision
3100 # delta against previous revision
3101 numprev = 0
3101 numprev = 0
3102 # delta against first or second parent (not prev)
3102 # delta against first or second parent (not prev)
3103 nump1 = 0
3103 nump1 = 0
3104 nump2 = 0
3104 nump2 = 0
3105 # delta against neither prev nor parents
3105 # delta against neither prev nor parents
3106 numother = 0
3106 numother = 0
3107 # delta against prev that are also first or second parent
3107 # delta against prev that are also first or second parent
3108 # (details of `numprev`)
3108 # (details of `numprev`)
3109 nump1prev = 0
3109 nump1prev = 0
3110 nump2prev = 0
3110 nump2prev = 0
3111
3111
3112 # data about delta chain of each revs
3112 # data about delta chain of each revs
3113 chainlengths = []
3113 chainlengths = []
3114 chainbases = []
3114 chainbases = []
3115 chainspans = []
3115 chainspans = []
3116
3116
3117 # data about each revision
3117 # data about each revision
3118 datasize = [None, 0, 0]
3118 datasize = [None, 0, 0]
3119 fullsize = [None, 0, 0]
3119 fullsize = [None, 0, 0]
3120 semisize = [None, 0, 0]
3120 semisize = [None, 0, 0]
3121 # snapshot count per depth
3121 # snapshot count per depth
3122 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3122 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3123 deltasize = [None, 0, 0]
3123 deltasize = [None, 0, 0]
3124 chunktypecounts = {}
3124 chunktypecounts = {}
3125 chunktypesizes = {}
3125 chunktypesizes = {}
3126
3126
3127 def addsize(size, l):
3127 def addsize(size, l):
3128 if l[0] is None or size < l[0]:
3128 if l[0] is None or size < l[0]:
3129 l[0] = size
3129 l[0] = size
3130 if size > l[1]:
3130 if size > l[1]:
3131 l[1] = size
3131 l[1] = size
3132 l[2] += size
3132 l[2] += size
3133
3133
3134 numrevs = len(r)
3134 numrevs = len(r)
3135 for rev in pycompat.xrange(numrevs):
3135 for rev in pycompat.xrange(numrevs):
3136 p1, p2 = r.parentrevs(rev)
3136 p1, p2 = r.parentrevs(rev)
3137 delta = r.deltaparent(rev)
3137 delta = r.deltaparent(rev)
3138 if format > 0:
3138 if format > 0:
3139 addsize(r.rawsize(rev), datasize)
3139 addsize(r.rawsize(rev), datasize)
3140 if p2 != nullrev:
3140 if p2 != nullrev:
3141 nummerges += 1
3141 nummerges += 1
3142 size = r.length(rev)
3142 size = r.length(rev)
3143 if delta == nullrev:
3143 if delta == nullrev:
3144 chainlengths.append(0)
3144 chainlengths.append(0)
3145 chainbases.append(r.start(rev))
3145 chainbases.append(r.start(rev))
3146 chainspans.append(size)
3146 chainspans.append(size)
3147 if size == 0:
3147 if size == 0:
3148 numempty += 1
3148 numempty += 1
3149 numemptytext += 1
3149 numemptytext += 1
3150 else:
3150 else:
3151 numfull += 1
3151 numfull += 1
3152 numsnapdepth[0] += 1
3152 numsnapdepth[0] += 1
3153 addsize(size, fullsize)
3153 addsize(size, fullsize)
3154 addsize(size, snapsizedepth[0])
3154 addsize(size, snapsizedepth[0])
3155 else:
3155 else:
3156 chainlengths.append(chainlengths[delta] + 1)
3156 chainlengths.append(chainlengths[delta] + 1)
3157 baseaddr = chainbases[delta]
3157 baseaddr = chainbases[delta]
3158 revaddr = r.start(rev)
3158 revaddr = r.start(rev)
3159 chainbases.append(baseaddr)
3159 chainbases.append(baseaddr)
3160 chainspans.append((revaddr - baseaddr) + size)
3160 chainspans.append((revaddr - baseaddr) + size)
3161 if size == 0:
3161 if size == 0:
3162 numempty += 1
3162 numempty += 1
3163 numemptydelta += 1
3163 numemptydelta += 1
3164 elif r.issnapshot(rev):
3164 elif r.issnapshot(rev):
3165 addsize(size, semisize)
3165 addsize(size, semisize)
3166 numsemi += 1
3166 numsemi += 1
3167 depth = r.snapshotdepth(rev)
3167 depth = r.snapshotdepth(rev)
3168 numsnapdepth[depth] += 1
3168 numsnapdepth[depth] += 1
3169 addsize(size, snapsizedepth[depth])
3169 addsize(size, snapsizedepth[depth])
3170 else:
3170 else:
3171 addsize(size, deltasize)
3171 addsize(size, deltasize)
3172 if delta == rev - 1:
3172 if delta == rev - 1:
3173 numprev += 1
3173 numprev += 1
3174 if delta == p1:
3174 if delta == p1:
3175 nump1prev += 1
3175 nump1prev += 1
3176 elif delta == p2:
3176 elif delta == p2:
3177 nump2prev += 1
3177 nump2prev += 1
3178 elif delta == p1:
3178 elif delta == p1:
3179 nump1 += 1
3179 nump1 += 1
3180 elif delta == p2:
3180 elif delta == p2:
3181 nump2 += 1
3181 nump2 += 1
3182 elif delta != nullrev:
3182 elif delta != nullrev:
3183 numother += 1
3183 numother += 1
3184
3184
3185 # Obtain data on the raw chunks in the revlog.
3185 # Obtain data on the raw chunks in the revlog.
3186 if util.safehasattr(r, b'_getsegmentforrevs'):
3186 if util.safehasattr(r, b'_getsegmentforrevs'):
3187 segment = r._getsegmentforrevs(rev, rev)[1]
3187 segment = r._getsegmentforrevs(rev, rev)[1]
3188 else:
3188 else:
3189 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3189 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3190 if segment:
3190 if segment:
3191 chunktype = bytes(segment[0:1])
3191 chunktype = bytes(segment[0:1])
3192 else:
3192 else:
3193 chunktype = b'empty'
3193 chunktype = b'empty'
3194
3194
3195 if chunktype not in chunktypecounts:
3195 if chunktype not in chunktypecounts:
3196 chunktypecounts[chunktype] = 0
3196 chunktypecounts[chunktype] = 0
3197 chunktypesizes[chunktype] = 0
3197 chunktypesizes[chunktype] = 0
3198
3198
3199 chunktypecounts[chunktype] += 1
3199 chunktypecounts[chunktype] += 1
3200 chunktypesizes[chunktype] += size
3200 chunktypesizes[chunktype] += size
3201
3201
3202 # Adjust size min value for empty cases
3202 # Adjust size min value for empty cases
3203 for size in (datasize, fullsize, semisize, deltasize):
3203 for size in (datasize, fullsize, semisize, deltasize):
3204 if size[0] is None:
3204 if size[0] is None:
3205 size[0] = 0
3205 size[0] = 0
3206
3206
3207 numdeltas = numrevs - numfull - numempty - numsemi
3207 numdeltas = numrevs - numfull - numempty - numsemi
3208 numoprev = numprev - nump1prev - nump2prev
3208 numoprev = numprev - nump1prev - nump2prev
3209 totalrawsize = datasize[2]
3209 totalrawsize = datasize[2]
3210 datasize[2] /= numrevs
3210 datasize[2] /= numrevs
3211 fulltotal = fullsize[2]
3211 fulltotal = fullsize[2]
3212 if numfull == 0:
3212 if numfull == 0:
3213 fullsize[2] = 0
3213 fullsize[2] = 0
3214 else:
3214 else:
3215 fullsize[2] /= numfull
3215 fullsize[2] /= numfull
3216 semitotal = semisize[2]
3216 semitotal = semisize[2]
3217 snaptotal = {}
3217 snaptotal = {}
3218 if numsemi > 0:
3218 if numsemi > 0:
3219 semisize[2] /= numsemi
3219 semisize[2] /= numsemi
3220 for depth in snapsizedepth:
3220 for depth in snapsizedepth:
3221 snaptotal[depth] = snapsizedepth[depth][2]
3221 snaptotal[depth] = snapsizedepth[depth][2]
3222 snapsizedepth[depth][2] /= numsnapdepth[depth]
3222 snapsizedepth[depth][2] /= numsnapdepth[depth]
3223
3223
3224 deltatotal = deltasize[2]
3224 deltatotal = deltasize[2]
3225 if numdeltas > 0:
3225 if numdeltas > 0:
3226 deltasize[2] /= numdeltas
3226 deltasize[2] /= numdeltas
3227 totalsize = fulltotal + semitotal + deltatotal
3227 totalsize = fulltotal + semitotal + deltatotal
3228 avgchainlen = sum(chainlengths) / numrevs
3228 avgchainlen = sum(chainlengths) / numrevs
3229 maxchainlen = max(chainlengths)
3229 maxchainlen = max(chainlengths)
3230 maxchainspan = max(chainspans)
3230 maxchainspan = max(chainspans)
3231 compratio = 1
3231 compratio = 1
3232 if totalsize:
3232 if totalsize:
3233 compratio = totalrawsize / totalsize
3233 compratio = totalrawsize / totalsize
3234
3234
3235 basedfmtstr = b'%%%dd\n'
3235 basedfmtstr = b'%%%dd\n'
3236 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3236 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3237
3237
3238 def dfmtstr(max):
3238 def dfmtstr(max):
3239 return basedfmtstr % len(str(max))
3239 return basedfmtstr % len(str(max))
3240
3240
3241 def pcfmtstr(max, padding=0):
3241 def pcfmtstr(max, padding=0):
3242 return basepcfmtstr % (len(str(max)), b' ' * padding)
3242 return basepcfmtstr % (len(str(max)), b' ' * padding)
3243
3243
3244 def pcfmt(value, total):
3244 def pcfmt(value, total):
3245 if total:
3245 if total:
3246 return (value, 100 * float(value) / total)
3246 return (value, 100 * float(value) / total)
3247 else:
3247 else:
3248 return value, 100.0
3248 return value, 100.0
3249
3249
3250 ui.writenoi18n(b'format : %d\n' % format)
3250 ui.writenoi18n(b'format : %d\n' % format)
3251 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3251 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3252
3252
3253 ui.write(b'\n')
3253 ui.write(b'\n')
3254 fmt = pcfmtstr(totalsize)
3254 fmt = pcfmtstr(totalsize)
3255 fmt2 = dfmtstr(totalsize)
3255 fmt2 = dfmtstr(totalsize)
3256 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3256 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3257 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3257 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3258 ui.writenoi18n(
3258 ui.writenoi18n(
3259 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3259 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3260 )
3260 )
3261 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3261 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3262 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3262 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3263 ui.writenoi18n(
3263 ui.writenoi18n(
3264 b' text : '
3264 b' text : '
3265 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3265 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3266 )
3266 )
3267 ui.writenoi18n(
3267 ui.writenoi18n(
3268 b' delta : '
3268 b' delta : '
3269 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3269 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3270 )
3270 )
3271 ui.writenoi18n(
3271 ui.writenoi18n(
3272 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3272 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3273 )
3273 )
3274 for depth in sorted(numsnapdepth):
3274 for depth in sorted(numsnapdepth):
3275 ui.write(
3275 ui.write(
3276 (b' lvl-%-3d : ' % depth)
3276 (b' lvl-%-3d : ' % depth)
3277 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3277 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3278 )
3278 )
3279 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3279 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3280 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3280 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3281 ui.writenoi18n(
3281 ui.writenoi18n(
3282 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3282 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3283 )
3283 )
3284 for depth in sorted(numsnapdepth):
3284 for depth in sorted(numsnapdepth):
3285 ui.write(
3285 ui.write(
3286 (b' lvl-%-3d : ' % depth)
3286 (b' lvl-%-3d : ' % depth)
3287 + fmt % pcfmt(snaptotal[depth], totalsize)
3287 + fmt % pcfmt(snaptotal[depth], totalsize)
3288 )
3288 )
3289 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3289 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3290
3290
3291 def fmtchunktype(chunktype):
3291 def fmtchunktype(chunktype):
3292 if chunktype == b'empty':
3292 if chunktype == b'empty':
3293 return b' %s : ' % chunktype
3293 return b' %s : ' % chunktype
3294 elif chunktype in pycompat.bytestr(string.ascii_letters):
3294 elif chunktype in pycompat.bytestr(string.ascii_letters):
3295 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3295 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3296 else:
3296 else:
3297 return b' 0x%s : ' % hex(chunktype)
3297 return b' 0x%s : ' % hex(chunktype)
3298
3298
3299 ui.write(b'\n')
3299 ui.write(b'\n')
3300 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3300 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3301 for chunktype in sorted(chunktypecounts):
3301 for chunktype in sorted(chunktypecounts):
3302 ui.write(fmtchunktype(chunktype))
3302 ui.write(fmtchunktype(chunktype))
3303 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3303 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3304 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3304 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3305 for chunktype in sorted(chunktypecounts):
3305 for chunktype in sorted(chunktypecounts):
3306 ui.write(fmtchunktype(chunktype))
3306 ui.write(fmtchunktype(chunktype))
3307 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3307 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3308
3308
3309 ui.write(b'\n')
3309 ui.write(b'\n')
3310 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3310 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3311 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3311 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3312 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3312 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3313 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3313 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3314 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3314 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3315
3315
3316 if format > 0:
3316 if format > 0:
3317 ui.write(b'\n')
3317 ui.write(b'\n')
3318 ui.writenoi18n(
3318 ui.writenoi18n(
3319 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3319 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3320 % tuple(datasize)
3320 % tuple(datasize)
3321 )
3321 )
3322 ui.writenoi18n(
3322 ui.writenoi18n(
3323 b'full revision size (min/max/avg) : %d / %d / %d\n'
3323 b'full revision size (min/max/avg) : %d / %d / %d\n'
3324 % tuple(fullsize)
3324 % tuple(fullsize)
3325 )
3325 )
3326 ui.writenoi18n(
3326 ui.writenoi18n(
3327 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3327 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3328 % tuple(semisize)
3328 % tuple(semisize)
3329 )
3329 )
3330 for depth in sorted(snapsizedepth):
3330 for depth in sorted(snapsizedepth):
3331 if depth == 0:
3331 if depth == 0:
3332 continue
3332 continue
3333 ui.writenoi18n(
3333 ui.writenoi18n(
3334 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3334 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3335 % ((depth,) + tuple(snapsizedepth[depth]))
3335 % ((depth,) + tuple(snapsizedepth[depth]))
3336 )
3336 )
3337 ui.writenoi18n(
3337 ui.writenoi18n(
3338 b'delta size (min/max/avg) : %d / %d / %d\n'
3338 b'delta size (min/max/avg) : %d / %d / %d\n'
3339 % tuple(deltasize)
3339 % tuple(deltasize)
3340 )
3340 )
3341
3341
3342 if numdeltas > 0:
3342 if numdeltas > 0:
3343 ui.write(b'\n')
3343 ui.write(b'\n')
3344 fmt = pcfmtstr(numdeltas)
3344 fmt = pcfmtstr(numdeltas)
3345 fmt2 = pcfmtstr(numdeltas, 4)
3345 fmt2 = pcfmtstr(numdeltas, 4)
3346 ui.writenoi18n(
3346 ui.writenoi18n(
3347 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3347 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3348 )
3348 )
3349 if numprev > 0:
3349 if numprev > 0:
3350 ui.writenoi18n(
3350 ui.writenoi18n(
3351 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3351 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3352 )
3352 )
3353 ui.writenoi18n(
3353 ui.writenoi18n(
3354 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3354 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3355 )
3355 )
3356 ui.writenoi18n(
3356 ui.writenoi18n(
3357 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3357 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3358 )
3358 )
3359 if gdelta:
3359 if gdelta:
3360 ui.writenoi18n(
3360 ui.writenoi18n(
3361 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3361 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3362 )
3362 )
3363 ui.writenoi18n(
3363 ui.writenoi18n(
3364 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3364 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3365 )
3365 )
3366 ui.writenoi18n(
3366 ui.writenoi18n(
3367 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3367 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3368 )
3368 )
3369
3369
3370
3370
3371 @command(
3371 @command(
3372 b'debugrevlogindex',
3372 b'debugrevlogindex',
3373 cmdutil.debugrevlogopts
3373 cmdutil.debugrevlogopts
3374 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3374 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3375 _(b'[-f FORMAT] -c|-m|FILE'),
3375 _(b'[-f FORMAT] -c|-m|FILE'),
3376 optionalrepo=True,
3376 optionalrepo=True,
3377 )
3377 )
3378 def debugrevlogindex(ui, repo, file_=None, **opts):
3378 def debugrevlogindex(ui, repo, file_=None, **opts):
3379 """dump the contents of a revlog index"""
3379 """dump the contents of a revlog index"""
3380 opts = pycompat.byteskwargs(opts)
3380 opts = pycompat.byteskwargs(opts)
3381 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3381 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3382 format = opts.get(b'format', 0)
3382 format = opts.get(b'format', 0)
3383 if format not in (0, 1):
3383 if format not in (0, 1):
3384 raise error.Abort(_(b"unknown format %d") % format)
3384 raise error.Abort(_(b"unknown format %d") % format)
3385
3385
3386 if ui.debugflag:
3386 if ui.debugflag:
3387 shortfn = hex
3387 shortfn = hex
3388 else:
3388 else:
3389 shortfn = short
3389 shortfn = short
3390
3390
3391 # There might not be anything in r, so have a sane default
3391 # There might not be anything in r, so have a sane default
3392 idlen = 12
3392 idlen = 12
3393 for i in r:
3393 for i in r:
3394 idlen = len(shortfn(r.node(i)))
3394 idlen = len(shortfn(r.node(i)))
3395 break
3395 break
3396
3396
3397 if format == 0:
3397 if format == 0:
3398 if ui.verbose:
3398 if ui.verbose:
3399 ui.writenoi18n(
3399 ui.writenoi18n(
3400 b" rev offset length linkrev %s %s p2\n"
3400 b" rev offset length linkrev %s %s p2\n"
3401 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3401 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3402 )
3402 )
3403 else:
3403 else:
3404 ui.writenoi18n(
3404 ui.writenoi18n(
3405 b" rev linkrev %s %s p2\n"
3405 b" rev linkrev %s %s p2\n"
3406 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3406 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3407 )
3407 )
3408 elif format == 1:
3408 elif format == 1:
3409 if ui.verbose:
3409 if ui.verbose:
3410 ui.writenoi18n(
3410 ui.writenoi18n(
3411 (
3411 (
3412 b" rev flag offset length size link p1"
3412 b" rev flag offset length size link p1"
3413 b" p2 %s\n"
3413 b" p2 %s\n"
3414 )
3414 )
3415 % b"nodeid".rjust(idlen)
3415 % b"nodeid".rjust(idlen)
3416 )
3416 )
3417 else:
3417 else:
3418 ui.writenoi18n(
3418 ui.writenoi18n(
3419 b" rev flag size link p1 p2 %s\n"
3419 b" rev flag size link p1 p2 %s\n"
3420 % b"nodeid".rjust(idlen)
3420 % b"nodeid".rjust(idlen)
3421 )
3421 )
3422
3422
3423 for i in r:
3423 for i in r:
3424 node = r.node(i)
3424 node = r.node(i)
3425 if format == 0:
3425 if format == 0:
3426 try:
3426 try:
3427 pp = r.parents(node)
3427 pp = r.parents(node)
3428 except Exception:
3428 except Exception:
3429 pp = [repo.nullid, repo.nullid]
3429 pp = [repo.nullid, repo.nullid]
3430 if ui.verbose:
3430 if ui.verbose:
3431 ui.write(
3431 ui.write(
3432 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3432 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3433 % (
3433 % (
3434 i,
3434 i,
3435 r.start(i),
3435 r.start(i),
3436 r.length(i),
3436 r.length(i),
3437 r.linkrev(i),
3437 r.linkrev(i),
3438 shortfn(node),
3438 shortfn(node),
3439 shortfn(pp[0]),
3439 shortfn(pp[0]),
3440 shortfn(pp[1]),
3440 shortfn(pp[1]),
3441 )
3441 )
3442 )
3442 )
3443 else:
3443 else:
3444 ui.write(
3444 ui.write(
3445 b"% 6d % 7d %s %s %s\n"
3445 b"% 6d % 7d %s %s %s\n"
3446 % (
3446 % (
3447 i,
3447 i,
3448 r.linkrev(i),
3448 r.linkrev(i),
3449 shortfn(node),
3449 shortfn(node),
3450 shortfn(pp[0]),
3450 shortfn(pp[0]),
3451 shortfn(pp[1]),
3451 shortfn(pp[1]),
3452 )
3452 )
3453 )
3453 )
3454 elif format == 1:
3454 elif format == 1:
3455 pr = r.parentrevs(i)
3455 pr = r.parentrevs(i)
3456 if ui.verbose:
3456 if ui.verbose:
3457 ui.write(
3457 ui.write(
3458 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3458 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3459 % (
3459 % (
3460 i,
3460 i,
3461 r.flags(i),
3461 r.flags(i),
3462 r.start(i),
3462 r.start(i),
3463 r.length(i),
3463 r.length(i),
3464 r.rawsize(i),
3464 r.rawsize(i),
3465 r.linkrev(i),
3465 r.linkrev(i),
3466 pr[0],
3466 pr[0],
3467 pr[1],
3467 pr[1],
3468 shortfn(node),
3468 shortfn(node),
3469 )
3469 )
3470 )
3470 )
3471 else:
3471 else:
3472 ui.write(
3472 ui.write(
3473 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3473 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3474 % (
3474 % (
3475 i,
3475 i,
3476 r.flags(i),
3476 r.flags(i),
3477 r.rawsize(i),
3477 r.rawsize(i),
3478 r.linkrev(i),
3478 r.linkrev(i),
3479 pr[0],
3479 pr[0],
3480 pr[1],
3480 pr[1],
3481 shortfn(node),
3481 shortfn(node),
3482 )
3482 )
3483 )
3483 )
3484
3484
3485
3485
3486 @command(
3486 @command(
3487 b'debugrevspec',
3487 b'debugrevspec',
3488 [
3488 [
3489 (
3489 (
3490 b'',
3490 b'',
3491 b'optimize',
3491 b'optimize',
3492 None,
3492 None,
3493 _(b'print parsed tree after optimizing (DEPRECATED)'),
3493 _(b'print parsed tree after optimizing (DEPRECATED)'),
3494 ),
3494 ),
3495 (
3495 (
3496 b'',
3496 b'',
3497 b'show-revs',
3497 b'show-revs',
3498 True,
3498 True,
3499 _(b'print list of result revisions (default)'),
3499 _(b'print list of result revisions (default)'),
3500 ),
3500 ),
3501 (
3501 (
3502 b's',
3502 b's',
3503 b'show-set',
3503 b'show-set',
3504 None,
3504 None,
3505 _(b'print internal representation of result set'),
3505 _(b'print internal representation of result set'),
3506 ),
3506 ),
3507 (
3507 (
3508 b'p',
3508 b'p',
3509 b'show-stage',
3509 b'show-stage',
3510 [],
3510 [],
3511 _(b'print parsed tree at the given stage'),
3511 _(b'print parsed tree at the given stage'),
3512 _(b'NAME'),
3512 _(b'NAME'),
3513 ),
3513 ),
3514 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3514 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3515 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3515 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3516 ],
3516 ],
3517 b'REVSPEC',
3517 b'REVSPEC',
3518 )
3518 )
3519 def debugrevspec(ui, repo, expr, **opts):
3519 def debugrevspec(ui, repo, expr, **opts):
3520 """parse and apply a revision specification
3520 """parse and apply a revision specification
3521
3521
3522 Use -p/--show-stage option to print the parsed tree at the given stages.
3522 Use -p/--show-stage option to print the parsed tree at the given stages.
3523 Use -p all to print tree at every stage.
3523 Use -p all to print tree at every stage.
3524
3524
3525 Use --no-show-revs option with -s or -p to print only the set
3525 Use --no-show-revs option with -s or -p to print only the set
3526 representation or the parsed tree respectively.
3526 representation or the parsed tree respectively.
3527
3527
3528 Use --verify-optimized to compare the optimized result with the unoptimized
3528 Use --verify-optimized to compare the optimized result with the unoptimized
3529 one. Returns 1 if the optimized result differs.
3529 one. Returns 1 if the optimized result differs.
3530 """
3530 """
3531 opts = pycompat.byteskwargs(opts)
3531 opts = pycompat.byteskwargs(opts)
3532 aliases = ui.configitems(b'revsetalias')
3532 aliases = ui.configitems(b'revsetalias')
3533 stages = [
3533 stages = [
3534 (b'parsed', lambda tree: tree),
3534 (b'parsed', lambda tree: tree),
3535 (
3535 (
3536 b'expanded',
3536 b'expanded',
3537 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3537 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3538 ),
3538 ),
3539 (b'concatenated', revsetlang.foldconcat),
3539 (b'concatenated', revsetlang.foldconcat),
3540 (b'analyzed', revsetlang.analyze),
3540 (b'analyzed', revsetlang.analyze),
3541 (b'optimized', revsetlang.optimize),
3541 (b'optimized', revsetlang.optimize),
3542 ]
3542 ]
3543 if opts[b'no_optimized']:
3543 if opts[b'no_optimized']:
3544 stages = stages[:-1]
3544 stages = stages[:-1]
3545 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3545 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3546 raise error.Abort(
3546 raise error.Abort(
3547 _(b'cannot use --verify-optimized with --no-optimized')
3547 _(b'cannot use --verify-optimized with --no-optimized')
3548 )
3548 )
3549 stagenames = {n for n, f in stages}
3549 stagenames = {n for n, f in stages}
3550
3550
3551 showalways = set()
3551 showalways = set()
3552 showchanged = set()
3552 showchanged = set()
3553 if ui.verbose and not opts[b'show_stage']:
3553 if ui.verbose and not opts[b'show_stage']:
3554 # show parsed tree by --verbose (deprecated)
3554 # show parsed tree by --verbose (deprecated)
3555 showalways.add(b'parsed')
3555 showalways.add(b'parsed')
3556 showchanged.update([b'expanded', b'concatenated'])
3556 showchanged.update([b'expanded', b'concatenated'])
3557 if opts[b'optimize']:
3557 if opts[b'optimize']:
3558 showalways.add(b'optimized')
3558 showalways.add(b'optimized')
3559 if opts[b'show_stage'] and opts[b'optimize']:
3559 if opts[b'show_stage'] and opts[b'optimize']:
3560 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3560 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3561 if opts[b'show_stage'] == [b'all']:
3561 if opts[b'show_stage'] == [b'all']:
3562 showalways.update(stagenames)
3562 showalways.update(stagenames)
3563 else:
3563 else:
3564 for n in opts[b'show_stage']:
3564 for n in opts[b'show_stage']:
3565 if n not in stagenames:
3565 if n not in stagenames:
3566 raise error.Abort(_(b'invalid stage name: %s') % n)
3566 raise error.Abort(_(b'invalid stage name: %s') % n)
3567 showalways.update(opts[b'show_stage'])
3567 showalways.update(opts[b'show_stage'])
3568
3568
3569 treebystage = {}
3569 treebystage = {}
3570 printedtree = None
3570 printedtree = None
3571 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3571 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3572 for n, f in stages:
3572 for n, f in stages:
3573 treebystage[n] = tree = f(tree)
3573 treebystage[n] = tree = f(tree)
3574 if n in showalways or (n in showchanged and tree != printedtree):
3574 if n in showalways or (n in showchanged and tree != printedtree):
3575 if opts[b'show_stage'] or n != b'parsed':
3575 if opts[b'show_stage'] or n != b'parsed':
3576 ui.write(b"* %s:\n" % n)
3576 ui.write(b"* %s:\n" % n)
3577 ui.write(revsetlang.prettyformat(tree), b"\n")
3577 ui.write(revsetlang.prettyformat(tree), b"\n")
3578 printedtree = tree
3578 printedtree = tree
3579
3579
3580 if opts[b'verify_optimized']:
3580 if opts[b'verify_optimized']:
3581 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3581 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3582 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3582 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3583 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3583 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3584 ui.writenoi18n(
3584 ui.writenoi18n(
3585 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3585 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3586 )
3586 )
3587 ui.writenoi18n(
3587 ui.writenoi18n(
3588 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3588 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3589 )
3589 )
3590 arevs = list(arevs)
3590 arevs = list(arevs)
3591 brevs = list(brevs)
3591 brevs = list(brevs)
3592 if arevs == brevs:
3592 if arevs == brevs:
3593 return 0
3593 return 0
3594 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3594 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3595 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3595 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3596 sm = difflib.SequenceMatcher(None, arevs, brevs)
3596 sm = difflib.SequenceMatcher(None, arevs, brevs)
3597 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3597 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3598 if tag in ('delete', 'replace'):
3598 if tag in ('delete', 'replace'):
3599 for c in arevs[alo:ahi]:
3599 for c in arevs[alo:ahi]:
3600 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3600 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3601 if tag in ('insert', 'replace'):
3601 if tag in ('insert', 'replace'):
3602 for c in brevs[blo:bhi]:
3602 for c in brevs[blo:bhi]:
3603 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3603 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3604 if tag == 'equal':
3604 if tag == 'equal':
3605 for c in arevs[alo:ahi]:
3605 for c in arevs[alo:ahi]:
3606 ui.write(b' %d\n' % c)
3606 ui.write(b' %d\n' % c)
3607 return 1
3607 return 1
3608
3608
3609 func = revset.makematcher(tree)
3609 func = revset.makematcher(tree)
3610 revs = func(repo)
3610 revs = func(repo)
3611 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3611 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3612 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3612 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3613 if not opts[b'show_revs']:
3613 if not opts[b'show_revs']:
3614 return
3614 return
3615 for c in revs:
3615 for c in revs:
3616 ui.write(b"%d\n" % c)
3616 ui.write(b"%d\n" % c)
3617
3617
3618
3618
3619 @command(
3619 @command(
3620 b'debugserve',
3620 b'debugserve',
3621 [
3621 [
3622 (
3622 (
3623 b'',
3623 b'',
3624 b'sshstdio',
3624 b'sshstdio',
3625 False,
3625 False,
3626 _(b'run an SSH server bound to process handles'),
3626 _(b'run an SSH server bound to process handles'),
3627 ),
3627 ),
3628 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3628 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3629 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3629 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3630 ],
3630 ],
3631 b'',
3631 b'',
3632 )
3632 )
3633 def debugserve(ui, repo, **opts):
3633 def debugserve(ui, repo, **opts):
3634 """run a server with advanced settings
3634 """run a server with advanced settings
3635
3635
3636 This command is similar to :hg:`serve`. It exists partially as a
3636 This command is similar to :hg:`serve`. It exists partially as a
3637 workaround to the fact that ``hg serve --stdio`` must have specific
3637 workaround to the fact that ``hg serve --stdio`` must have specific
3638 arguments for security reasons.
3638 arguments for security reasons.
3639 """
3639 """
3640 opts = pycompat.byteskwargs(opts)
3640 opts = pycompat.byteskwargs(opts)
3641
3641
3642 if not opts[b'sshstdio']:
3642 if not opts[b'sshstdio']:
3643 raise error.Abort(_(b'only --sshstdio is currently supported'))
3643 raise error.Abort(_(b'only --sshstdio is currently supported'))
3644
3644
3645 logfh = None
3645 logfh = None
3646
3646
3647 if opts[b'logiofd'] and opts[b'logiofile']:
3647 if opts[b'logiofd'] and opts[b'logiofile']:
3648 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3648 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3649
3649
3650 if opts[b'logiofd']:
3650 if opts[b'logiofd']:
3651 # Ideally we would be line buffered. But line buffering in binary
3651 # Ideally we would be line buffered. But line buffering in binary
3652 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3652 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3653 # buffering could have performance impacts. But since this isn't
3653 # buffering could have performance impacts. But since this isn't
3654 # performance critical code, it should be fine.
3654 # performance critical code, it should be fine.
3655 try:
3655 try:
3656 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3656 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3657 except OSError as e:
3657 except OSError as e:
3658 if e.errno != errno.ESPIPE:
3658 if e.errno != errno.ESPIPE:
3659 raise
3659 raise
3660 # can't seek a pipe, so `ab` mode fails on py3
3660 # can't seek a pipe, so `ab` mode fails on py3
3661 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3661 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3662 elif opts[b'logiofile']:
3662 elif opts[b'logiofile']:
3663 logfh = open(opts[b'logiofile'], b'ab', 0)
3663 logfh = open(opts[b'logiofile'], b'ab', 0)
3664
3664
3665 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3665 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3666 s.serve_forever()
3666 s.serve_forever()
3667
3667
3668
3668
3669 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3669 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3670 def debugsetparents(ui, repo, rev1, rev2=None):
3670 def debugsetparents(ui, repo, rev1, rev2=None):
3671 """manually set the parents of the current working directory (DANGEROUS)
3671 """manually set the parents of the current working directory (DANGEROUS)
3672
3672
3673 This command is not what you are looking for and should not be used. Using
3673 This command is not what you are looking for and should not be used. Using
3674 this command will most certainly results in slight corruption of the file
3674 this command will most certainly results in slight corruption of the file
3675 level histories withing your repository. DO NOT USE THIS COMMAND.
3675 level histories withing your repository. DO NOT USE THIS COMMAND.
3676
3676
3677 The command update the p1 and p2 field in the dirstate, and not touching
3677 The command update the p1 and p2 field in the dirstate, and not touching
3678 anything else. This useful for writing repository conversion tools, but
3678 anything else. This useful for writing repository conversion tools, but
3679 should be used with extreme care. For example, neither the working
3679 should be used with extreme care. For example, neither the working
3680 directory nor the dirstate is updated, so file status may be incorrect
3680 directory nor the dirstate is updated, so file status may be incorrect
3681 after running this command. Only used if you are one of the few people that
3681 after running this command. Only used if you are one of the few people that
3682 deeply unstand both conversion tools and file level histories. If you are
3682 deeply unstand both conversion tools and file level histories. If you are
3683 reading this help, you are not one of this people (most of them sailed west
3683 reading this help, you are not one of this people (most of them sailed west
3684 from Mithlond anyway.
3684 from Mithlond anyway.
3685
3685
3686 So one last time DO NOT USE THIS COMMAND.
3686 So one last time DO NOT USE THIS COMMAND.
3687
3687
3688 Returns 0 on success.
3688 Returns 0 on success.
3689 """
3689 """
3690
3690
3691 node1 = scmutil.revsingle(repo, rev1).node()
3691 node1 = scmutil.revsingle(repo, rev1).node()
3692 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3692 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3693
3693
3694 with repo.wlock():
3694 with repo.wlock():
3695 repo.setparents(node1, node2)
3695 repo.setparents(node1, node2)
3696
3696
3697
3697
3698 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3698 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3699 def debugsidedata(ui, repo, file_, rev=None, **opts):
3699 def debugsidedata(ui, repo, file_, rev=None, **opts):
3700 """dump the side data for a cl/manifest/file revision
3700 """dump the side data for a cl/manifest/file revision
3701
3701
3702 Use --verbose to dump the sidedata content."""
3702 Use --verbose to dump the sidedata content."""
3703 opts = pycompat.byteskwargs(opts)
3703 opts = pycompat.byteskwargs(opts)
3704 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3704 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3705 if rev is not None:
3705 if rev is not None:
3706 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3706 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3707 file_, rev = None, file_
3707 file_, rev = None, file_
3708 elif rev is None:
3708 elif rev is None:
3709 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3709 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3710 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3710 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3711 r = getattr(r, '_revlog', r)
3711 r = getattr(r, '_revlog', r)
3712 try:
3712 try:
3713 sidedata = r.sidedata(r.lookup(rev))
3713 sidedata = r.sidedata(r.lookup(rev))
3714 except KeyError:
3714 except KeyError:
3715 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3715 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3716 if sidedata:
3716 if sidedata:
3717 sidedata = list(sidedata.items())
3717 sidedata = list(sidedata.items())
3718 sidedata.sort()
3718 sidedata.sort()
3719 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3719 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3720 for key, value in sidedata:
3720 for key, value in sidedata:
3721 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3721 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3722 if ui.verbose:
3722 if ui.verbose:
3723 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3723 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3724
3724
3725
3725
3726 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3726 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3727 def debugssl(ui, repo, source=None, **opts):
3727 def debugssl(ui, repo, source=None, **opts):
3728 """test a secure connection to a server
3728 """test a secure connection to a server
3729
3729
3730 This builds the certificate chain for the server on Windows, installing the
3730 This builds the certificate chain for the server on Windows, installing the
3731 missing intermediates and trusted root via Windows Update if necessary. It
3731 missing intermediates and trusted root via Windows Update if necessary. It
3732 does nothing on other platforms.
3732 does nothing on other platforms.
3733
3733
3734 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3734 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3735 that server is used. See :hg:`help urls` for more information.
3735 that server is used. See :hg:`help urls` for more information.
3736
3736
3737 If the update succeeds, retry the original operation. Otherwise, the cause
3737 If the update succeeds, retry the original operation. Otherwise, the cause
3738 of the SSL error is likely another issue.
3738 of the SSL error is likely another issue.
3739 """
3739 """
3740 if not pycompat.iswindows:
3740 if not pycompat.iswindows:
3741 raise error.Abort(
3741 raise error.Abort(
3742 _(b'certificate chain building is only possible on Windows')
3742 _(b'certificate chain building is only possible on Windows')
3743 )
3743 )
3744
3744
3745 if not source:
3745 if not source:
3746 if not repo:
3746 if not repo:
3747 raise error.Abort(
3747 raise error.Abort(
3748 _(
3748 _(
3749 b"there is no Mercurial repository here, and no "
3749 b"there is no Mercurial repository here, and no "
3750 b"server specified"
3750 b"server specified"
3751 )
3751 )
3752 )
3752 )
3753 source = b"default"
3753 source = b"default"
3754
3754
3755 source, branches = urlutil.get_unique_pull_path(
3755 source, branches = urlutil.get_unique_pull_path(
3756 b'debugssl', repo, ui, source
3756 b'debugssl', repo, ui, source
3757 )
3757 )
3758 url = urlutil.url(source)
3758 url = urlutil.url(source)
3759
3759
3760 defaultport = {b'https': 443, b'ssh': 22}
3760 defaultport = {b'https': 443, b'ssh': 22}
3761 if url.scheme in defaultport:
3761 if url.scheme in defaultport:
3762 try:
3762 try:
3763 addr = (url.host, int(url.port or defaultport[url.scheme]))
3763 addr = (url.host, int(url.port or defaultport[url.scheme]))
3764 except ValueError:
3764 except ValueError:
3765 raise error.Abort(_(b"malformed port number in URL"))
3765 raise error.Abort(_(b"malformed port number in URL"))
3766 else:
3766 else:
3767 raise error.Abort(_(b"only https and ssh connections are supported"))
3767 raise error.Abort(_(b"only https and ssh connections are supported"))
3768
3768
3769 from . import win32
3769 from . import win32
3770
3770
3771 s = ssl.wrap_socket(
3771 s = ssl.wrap_socket(
3772 socket.socket(),
3772 socket.socket(),
3773 ssl_version=ssl.PROTOCOL_TLS,
3773 ssl_version=ssl.PROTOCOL_TLS,
3774 cert_reqs=ssl.CERT_NONE,
3774 cert_reqs=ssl.CERT_NONE,
3775 ca_certs=None,
3775 ca_certs=None,
3776 )
3776 )
3777
3777
3778 try:
3778 try:
3779 s.connect(addr)
3779 s.connect(addr)
3780 cert = s.getpeercert(True)
3780 cert = s.getpeercert(True)
3781
3781
3782 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3782 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3783
3783
3784 complete = win32.checkcertificatechain(cert, build=False)
3784 complete = win32.checkcertificatechain(cert, build=False)
3785
3785
3786 if not complete:
3786 if not complete:
3787 ui.status(_(b'certificate chain is incomplete, updating... '))
3787 ui.status(_(b'certificate chain is incomplete, updating... '))
3788
3788
3789 if not win32.checkcertificatechain(cert):
3789 if not win32.checkcertificatechain(cert):
3790 ui.status(_(b'failed.\n'))
3790 ui.status(_(b'failed.\n'))
3791 else:
3791 else:
3792 ui.status(_(b'done.\n'))
3792 ui.status(_(b'done.\n'))
3793 else:
3793 else:
3794 ui.status(_(b'full certificate chain is available\n'))
3794 ui.status(_(b'full certificate chain is available\n'))
3795 finally:
3795 finally:
3796 s.close()
3796 s.close()
3797
3797
3798
3798
3799 @command(
3799 @command(
3800 b"debugbackupbundle",
3800 b"debugbackupbundle",
3801 [
3801 [
3802 (
3802 (
3803 b"",
3803 b"",
3804 b"recover",
3804 b"recover",
3805 b"",
3805 b"",
3806 b"brings the specified changeset back into the repository",
3806 b"brings the specified changeset back into the repository",
3807 )
3807 )
3808 ]
3808 ]
3809 + cmdutil.logopts,
3809 + cmdutil.logopts,
3810 _(b"hg debugbackupbundle [--recover HASH]"),
3810 _(b"hg debugbackupbundle [--recover HASH]"),
3811 )
3811 )
3812 def debugbackupbundle(ui, repo, *pats, **opts):
3812 def debugbackupbundle(ui, repo, *pats, **opts):
3813 """lists the changesets available in backup bundles
3813 """lists the changesets available in backup bundles
3814
3814
3815 Without any arguments, this command prints a list of the changesets in each
3815 Without any arguments, this command prints a list of the changesets in each
3816 backup bundle.
3816 backup bundle.
3817
3817
3818 --recover takes a changeset hash and unbundles the first bundle that
3818 --recover takes a changeset hash and unbundles the first bundle that
3819 contains that hash, which puts that changeset back in your repository.
3819 contains that hash, which puts that changeset back in your repository.
3820
3820
3821 --verbose will print the entire commit message and the bundle path for that
3821 --verbose will print the entire commit message and the bundle path for that
3822 backup.
3822 backup.
3823 """
3823 """
3824 backups = list(
3824 backups = list(
3825 filter(
3825 filter(
3826 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3826 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3827 )
3827 )
3828 )
3828 )
3829 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3829 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3830
3830
3831 opts = pycompat.byteskwargs(opts)
3831 opts = pycompat.byteskwargs(opts)
3832 opts[b"bundle"] = b""
3832 opts[b"bundle"] = b""
3833 opts[b"force"] = None
3833 opts[b"force"] = None
3834 limit = logcmdutil.getlimit(opts)
3834 limit = logcmdutil.getlimit(opts)
3835
3835
3836 def display(other, chlist, displayer):
3836 def display(other, chlist, displayer):
3837 if opts.get(b"newest_first"):
3837 if opts.get(b"newest_first"):
3838 chlist.reverse()
3838 chlist.reverse()
3839 count = 0
3839 count = 0
3840 for n in chlist:
3840 for n in chlist:
3841 if limit is not None and count >= limit:
3841 if limit is not None and count >= limit:
3842 break
3842 break
3843 parents = [
3843 parents = [
3844 True for p in other.changelog.parents(n) if p != repo.nullid
3844 True for p in other.changelog.parents(n) if p != repo.nullid
3845 ]
3845 ]
3846 if opts.get(b"no_merges") and len(parents) == 2:
3846 if opts.get(b"no_merges") and len(parents) == 2:
3847 continue
3847 continue
3848 count += 1
3848 count += 1
3849 displayer.show(other[n])
3849 displayer.show(other[n])
3850
3850
3851 recovernode = opts.get(b"recover")
3851 recovernode = opts.get(b"recover")
3852 if recovernode:
3852 if recovernode:
3853 if scmutil.isrevsymbol(repo, recovernode):
3853 if scmutil.isrevsymbol(repo, recovernode):
3854 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3854 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3855 return
3855 return
3856 elif backups:
3856 elif backups:
3857 msg = _(
3857 msg = _(
3858 b"Recover changesets using: hg debugbackupbundle --recover "
3858 b"Recover changesets using: hg debugbackupbundle --recover "
3859 b"<changeset hash>\n\nAvailable backup changesets:"
3859 b"<changeset hash>\n\nAvailable backup changesets:"
3860 )
3860 )
3861 ui.status(msg, label=b"status.removed")
3861 ui.status(msg, label=b"status.removed")
3862 else:
3862 else:
3863 ui.status(_(b"no backup changesets found\n"))
3863 ui.status(_(b"no backup changesets found\n"))
3864 return
3864 return
3865
3865
3866 for backup in backups:
3866 for backup in backups:
3867 # Much of this is copied from the hg incoming logic
3867 # Much of this is copied from the hg incoming logic
3868 source = os.path.relpath(backup, encoding.getcwd())
3868 source = os.path.relpath(backup, encoding.getcwd())
3869 source, branches = urlutil.get_unique_pull_path(
3869 source, branches = urlutil.get_unique_pull_path(
3870 b'debugbackupbundle',
3870 b'debugbackupbundle',
3871 repo,
3871 repo,
3872 ui,
3872 ui,
3873 source,
3873 source,
3874 default_branches=opts.get(b'branch'),
3874 default_branches=opts.get(b'branch'),
3875 )
3875 )
3876 try:
3876 try:
3877 other = hg.peer(repo, opts, source)
3877 other = hg.peer(repo, opts, source)
3878 except error.LookupError as ex:
3878 except error.LookupError as ex:
3879 msg = _(b"\nwarning: unable to open bundle %s") % source
3879 msg = _(b"\nwarning: unable to open bundle %s") % source
3880 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3880 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3881 ui.warn(msg, hint=hint)
3881 ui.warn(msg, hint=hint)
3882 continue
3882 continue
3883 revs, checkout = hg.addbranchrevs(
3883 revs, checkout = hg.addbranchrevs(
3884 repo, other, branches, opts.get(b"rev")
3884 repo, other, branches, opts.get(b"rev")
3885 )
3885 )
3886
3886
3887 if revs:
3887 if revs:
3888 revs = [other.lookup(rev) for rev in revs]
3888 revs = [other.lookup(rev) for rev in revs]
3889
3889
3890 with ui.silent():
3890 with ui.silent():
3891 try:
3891 try:
3892 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3892 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3893 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3893 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3894 )
3894 )
3895 except error.LookupError:
3895 except error.LookupError:
3896 continue
3896 continue
3897
3897
3898 try:
3898 try:
3899 if not chlist:
3899 if not chlist:
3900 continue
3900 continue
3901 if recovernode:
3901 if recovernode:
3902 with repo.lock(), repo.transaction(b"unbundle") as tr:
3902 with repo.lock(), repo.transaction(b"unbundle") as tr:
3903 if scmutil.isrevsymbol(other, recovernode):
3903 if scmutil.isrevsymbol(other, recovernode):
3904 ui.status(_(b"Unbundling %s\n") % (recovernode))
3904 ui.status(_(b"Unbundling %s\n") % (recovernode))
3905 f = hg.openpath(ui, source)
3905 f = hg.openpath(ui, source)
3906 gen = exchange.readbundle(ui, f, source)
3906 gen = exchange.readbundle(ui, f, source)
3907 if isinstance(gen, bundle2.unbundle20):
3907 if isinstance(gen, bundle2.unbundle20):
3908 bundle2.applybundle(
3908 bundle2.applybundle(
3909 repo,
3909 repo,
3910 gen,
3910 gen,
3911 tr,
3911 tr,
3912 source=b"unbundle",
3912 source=b"unbundle",
3913 url=b"bundle:" + source,
3913 url=b"bundle:" + source,
3914 )
3914 )
3915 else:
3915 else:
3916 gen.apply(repo, b"unbundle", b"bundle:" + source)
3916 gen.apply(repo, b"unbundle", b"bundle:" + source)
3917 break
3917 break
3918 else:
3918 else:
3919 backupdate = encoding.strtolocal(
3919 backupdate = encoding.strtolocal(
3920 time.strftime(
3920 time.strftime(
3921 "%a %H:%M, %Y-%m-%d",
3921 "%a %H:%M, %Y-%m-%d",
3922 time.localtime(os.path.getmtime(source)),
3922 time.localtime(os.path.getmtime(source)),
3923 )
3923 )
3924 )
3924 )
3925 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3925 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3926 if ui.verbose:
3926 if ui.verbose:
3927 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3927 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3928 else:
3928 else:
3929 opts[
3929 opts[
3930 b"template"
3930 b"template"
3931 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3931 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3932 displayer = logcmdutil.changesetdisplayer(
3932 displayer = logcmdutil.changesetdisplayer(
3933 ui, other, opts, False
3933 ui, other, opts, False
3934 )
3934 )
3935 display(other, chlist, displayer)
3935 display(other, chlist, displayer)
3936 displayer.close()
3936 displayer.close()
3937 finally:
3937 finally:
3938 cleanupfn()
3938 cleanupfn()
3939
3939
3940
3940
3941 @command(
3941 @command(
3942 b'debugsub',
3942 b'debugsub',
3943 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3943 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3944 _(b'[-r REV] [REV]'),
3944 _(b'[-r REV] [REV]'),
3945 )
3945 )
3946 def debugsub(ui, repo, rev=None):
3946 def debugsub(ui, repo, rev=None):
3947 ctx = scmutil.revsingle(repo, rev, None)
3947 ctx = scmutil.revsingle(repo, rev, None)
3948 for k, v in sorted(ctx.substate.items()):
3948 for k, v in sorted(ctx.substate.items()):
3949 ui.writenoi18n(b'path %s\n' % k)
3949 ui.writenoi18n(b'path %s\n' % k)
3950 ui.writenoi18n(b' source %s\n' % v[0])
3950 ui.writenoi18n(b' source %s\n' % v[0])
3951 ui.writenoi18n(b' revision %s\n' % v[1])
3951 ui.writenoi18n(b' revision %s\n' % v[1])
3952
3952
3953
3953
3954 @command(b'debugshell', optionalrepo=True)
3954 @command(b'debugshell', optionalrepo=True)
3955 def debugshell(ui, repo):
3955 def debugshell(ui, repo):
3956 """run an interactive Python interpreter
3956 """run an interactive Python interpreter
3957
3957
3958 The local namespace is provided with a reference to the ui and
3958 The local namespace is provided with a reference to the ui and
3959 the repo instance (if available).
3959 the repo instance (if available).
3960 """
3960 """
3961 import code
3961 import code
3962
3962
3963 imported_objects = {
3963 imported_objects = {
3964 'ui': ui,
3964 'ui': ui,
3965 'repo': repo,
3965 'repo': repo,
3966 }
3966 }
3967
3967
3968 code.interact(local=imported_objects)
3968 code.interact(local=imported_objects)
3969
3969
3970
3970
3971 @command(
3971 @command(
3972 b'debugsuccessorssets',
3972 b'debugsuccessorssets',
3973 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3973 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3974 _(b'[REV]'),
3974 _(b'[REV]'),
3975 )
3975 )
3976 def debugsuccessorssets(ui, repo, *revs, **opts):
3976 def debugsuccessorssets(ui, repo, *revs, **opts):
3977 """show set of successors for revision
3977 """show set of successors for revision
3978
3978
3979 A successors set of changeset A is a consistent group of revisions that
3979 A successors set of changeset A is a consistent group of revisions that
3980 succeed A. It contains non-obsolete changesets only unless closests
3980 succeed A. It contains non-obsolete changesets only unless closests
3981 successors set is set.
3981 successors set is set.
3982
3982
3983 In most cases a changeset A has a single successors set containing a single
3983 In most cases a changeset A has a single successors set containing a single
3984 successor (changeset A replaced by A').
3984 successor (changeset A replaced by A').
3985
3985
3986 A changeset that is made obsolete with no successors are called "pruned".
3986 A changeset that is made obsolete with no successors are called "pruned".
3987 Such changesets have no successors sets at all.
3987 Such changesets have no successors sets at all.
3988
3988
3989 A changeset that has been "split" will have a successors set containing
3989 A changeset that has been "split" will have a successors set containing
3990 more than one successor.
3990 more than one successor.
3991
3991
3992 A changeset that has been rewritten in multiple different ways is called
3992 A changeset that has been rewritten in multiple different ways is called
3993 "divergent". Such changesets have multiple successor sets (each of which
3993 "divergent". Such changesets have multiple successor sets (each of which
3994 may also be split, i.e. have multiple successors).
3994 may also be split, i.e. have multiple successors).
3995
3995
3996 Results are displayed as follows::
3996 Results are displayed as follows::
3997
3997
3998 <rev1>
3998 <rev1>
3999 <successors-1A>
3999 <successors-1A>
4000 <rev2>
4000 <rev2>
4001 <successors-2A>
4001 <successors-2A>
4002 <successors-2B1> <successors-2B2> <successors-2B3>
4002 <successors-2B1> <successors-2B2> <successors-2B3>
4003
4003
4004 Here rev2 has two possible (i.e. divergent) successors sets. The first
4004 Here rev2 has two possible (i.e. divergent) successors sets. The first
4005 holds one element, whereas the second holds three (i.e. the changeset has
4005 holds one element, whereas the second holds three (i.e. the changeset has
4006 been split).
4006 been split).
4007 """
4007 """
4008 # passed to successorssets caching computation from one call to another
4008 # passed to successorssets caching computation from one call to another
4009 cache = {}
4009 cache = {}
4010 ctx2str = bytes
4010 ctx2str = bytes
4011 node2str = short
4011 node2str = short
4012 for rev in scmutil.revrange(repo, revs):
4012 for rev in scmutil.revrange(repo, revs):
4013 ctx = repo[rev]
4013 ctx = repo[rev]
4014 ui.write(b'%s\n' % ctx2str(ctx))
4014 ui.write(b'%s\n' % ctx2str(ctx))
4015 for succsset in obsutil.successorssets(
4015 for succsset in obsutil.successorssets(
4016 repo, ctx.node(), closest=opts['closest'], cache=cache
4016 repo, ctx.node(), closest=opts['closest'], cache=cache
4017 ):
4017 ):
4018 if succsset:
4018 if succsset:
4019 ui.write(b' ')
4019 ui.write(b' ')
4020 ui.write(node2str(succsset[0]))
4020 ui.write(node2str(succsset[0]))
4021 for node in succsset[1:]:
4021 for node in succsset[1:]:
4022 ui.write(b' ')
4022 ui.write(b' ')
4023 ui.write(node2str(node))
4023 ui.write(node2str(node))
4024 ui.write(b'\n')
4024 ui.write(b'\n')
4025
4025
4026
4026
4027 @command(b'debugtagscache', [])
4027 @command(b'debugtagscache', [])
4028 def debugtagscache(ui, repo):
4028 def debugtagscache(ui, repo):
4029 """display the contents of .hg/cache/hgtagsfnodes1"""
4029 """display the contents of .hg/cache/hgtagsfnodes1"""
4030 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4030 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4031 flog = repo.file(b'.hgtags')
4031 flog = repo.file(b'.hgtags')
4032 for r in repo:
4032 for r in repo:
4033 node = repo[r].node()
4033 node = repo[r].node()
4034 tagsnode = cache.getfnode(node, computemissing=False)
4034 tagsnode = cache.getfnode(node, computemissing=False)
4035 if tagsnode:
4035 if tagsnode:
4036 tagsnodedisplay = hex(tagsnode)
4036 tagsnodedisplay = hex(tagsnode)
4037 if not flog.hasnode(tagsnode):
4037 if not flog.hasnode(tagsnode):
4038 tagsnodedisplay += b' (unknown node)'
4038 tagsnodedisplay += b' (unknown node)'
4039 elif tagsnode is None:
4039 elif tagsnode is None:
4040 tagsnodedisplay = b'missing'
4040 tagsnodedisplay = b'missing'
4041 else:
4041 else:
4042 tagsnodedisplay = b'invalid'
4042 tagsnodedisplay = b'invalid'
4043
4043
4044 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4044 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4045
4045
4046
4046
4047 @command(
4047 @command(
4048 b'debugtemplate',
4048 b'debugtemplate',
4049 [
4049 [
4050 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4050 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4051 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4051 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4052 ],
4052 ],
4053 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4053 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4054 optionalrepo=True,
4054 optionalrepo=True,
4055 )
4055 )
4056 def debugtemplate(ui, repo, tmpl, **opts):
4056 def debugtemplate(ui, repo, tmpl, **opts):
4057 """parse and apply a template
4057 """parse and apply a template
4058
4058
4059 If -r/--rev is given, the template is processed as a log template and
4059 If -r/--rev is given, the template is processed as a log template and
4060 applied to the given changesets. Otherwise, it is processed as a generic
4060 applied to the given changesets. Otherwise, it is processed as a generic
4061 template.
4061 template.
4062
4062
4063 Use --verbose to print the parsed tree.
4063 Use --verbose to print the parsed tree.
4064 """
4064 """
4065 revs = None
4065 revs = None
4066 if opts['rev']:
4066 if opts['rev']:
4067 if repo is None:
4067 if repo is None:
4068 raise error.RepoError(
4068 raise error.RepoError(
4069 _(b'there is no Mercurial repository here (.hg not found)')
4069 _(b'there is no Mercurial repository here (.hg not found)')
4070 )
4070 )
4071 revs = scmutil.revrange(repo, opts['rev'])
4071 revs = scmutil.revrange(repo, opts['rev'])
4072
4072
4073 props = {}
4073 props = {}
4074 for d in opts['define']:
4074 for d in opts['define']:
4075 try:
4075 try:
4076 k, v = (e.strip() for e in d.split(b'=', 1))
4076 k, v = (e.strip() for e in d.split(b'=', 1))
4077 if not k or k == b'ui':
4077 if not k or k == b'ui':
4078 raise ValueError
4078 raise ValueError
4079 props[k] = v
4079 props[k] = v
4080 except ValueError:
4080 except ValueError:
4081 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4081 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4082
4082
4083 if ui.verbose:
4083 if ui.verbose:
4084 aliases = ui.configitems(b'templatealias')
4084 aliases = ui.configitems(b'templatealias')
4085 tree = templater.parse(tmpl)
4085 tree = templater.parse(tmpl)
4086 ui.note(templater.prettyformat(tree), b'\n')
4086 ui.note(templater.prettyformat(tree), b'\n')
4087 newtree = templater.expandaliases(tree, aliases)
4087 newtree = templater.expandaliases(tree, aliases)
4088 if newtree != tree:
4088 if newtree != tree:
4089 ui.notenoi18n(
4089 ui.notenoi18n(
4090 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4090 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4091 )
4091 )
4092
4092
4093 if revs is None:
4093 if revs is None:
4094 tres = formatter.templateresources(ui, repo)
4094 tres = formatter.templateresources(ui, repo)
4095 t = formatter.maketemplater(ui, tmpl, resources=tres)
4095 t = formatter.maketemplater(ui, tmpl, resources=tres)
4096 if ui.verbose:
4096 if ui.verbose:
4097 kwds, funcs = t.symbolsuseddefault()
4097 kwds, funcs = t.symbolsuseddefault()
4098 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4098 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4099 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4099 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4100 ui.write(t.renderdefault(props))
4100 ui.write(t.renderdefault(props))
4101 else:
4101 else:
4102 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4102 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4103 if ui.verbose:
4103 if ui.verbose:
4104 kwds, funcs = displayer.t.symbolsuseddefault()
4104 kwds, funcs = displayer.t.symbolsuseddefault()
4105 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4105 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4106 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4106 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4107 for r in revs:
4107 for r in revs:
4108 displayer.show(repo[r], **pycompat.strkwargs(props))
4108 displayer.show(repo[r], **pycompat.strkwargs(props))
4109 displayer.close()
4109 displayer.close()
4110
4110
4111
4111
4112 @command(
4112 @command(
4113 b'debuguigetpass',
4113 b'debuguigetpass',
4114 [
4114 [
4115 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4115 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4116 ],
4116 ],
4117 _(b'[-p TEXT]'),
4117 _(b'[-p TEXT]'),
4118 norepo=True,
4118 norepo=True,
4119 )
4119 )
4120 def debuguigetpass(ui, prompt=b''):
4120 def debuguigetpass(ui, prompt=b''):
4121 """show prompt to type password"""
4121 """show prompt to type password"""
4122 r = ui.getpass(prompt)
4122 r = ui.getpass(prompt)
4123 if r is None:
4123 if r is None:
4124 r = b"<default response>"
4124 r = b"<default response>"
4125 ui.writenoi18n(b'response: %s\n' % r)
4125 ui.writenoi18n(b'response: %s\n' % r)
4126
4126
4127
4127
4128 @command(
4128 @command(
4129 b'debuguiprompt',
4129 b'debuguiprompt',
4130 [
4130 [
4131 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4131 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4132 ],
4132 ],
4133 _(b'[-p TEXT]'),
4133 _(b'[-p TEXT]'),
4134 norepo=True,
4134 norepo=True,
4135 )
4135 )
4136 def debuguiprompt(ui, prompt=b''):
4136 def debuguiprompt(ui, prompt=b''):
4137 """show plain prompt"""
4137 """show plain prompt"""
4138 r = ui.prompt(prompt)
4138 r = ui.prompt(prompt)
4139 ui.writenoi18n(b'response: %s\n' % r)
4139 ui.writenoi18n(b'response: %s\n' % r)
4140
4140
4141
4141
4142 @command(b'debugupdatecaches', [])
4142 @command(b'debugupdatecaches', [])
4143 def debugupdatecaches(ui, repo, *pats, **opts):
4143 def debugupdatecaches(ui, repo, *pats, **opts):
4144 """warm all known caches in the repository"""
4144 """warm all known caches in the repository"""
4145 with repo.wlock(), repo.lock():
4145 with repo.wlock(), repo.lock():
4146 repo.updatecaches(caches=repository.CACHES_ALL)
4146 repo.updatecaches(caches=repository.CACHES_ALL)
4147
4147
4148
4148
4149 @command(
4149 @command(
4150 b'debugupgraderepo',
4150 b'debugupgraderepo',
4151 [
4151 [
4152 (
4152 (
4153 b'o',
4153 b'o',
4154 b'optimize',
4154 b'optimize',
4155 [],
4155 [],
4156 _(b'extra optimization to perform'),
4156 _(b'extra optimization to perform'),
4157 _(b'NAME'),
4157 _(b'NAME'),
4158 ),
4158 ),
4159 (b'', b'run', False, _(b'performs an upgrade')),
4159 (b'', b'run', False, _(b'performs an upgrade')),
4160 (b'', b'backup', True, _(b'keep the old repository content around')),
4160 (b'', b'backup', True, _(b'keep the old repository content around')),
4161 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4161 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4162 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4162 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4163 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4163 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4164 ],
4164 ],
4165 )
4165 )
4166 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4166 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4167 """upgrade a repository to use different features
4167 """upgrade a repository to use different features
4168
4168
4169 If no arguments are specified, the repository is evaluated for upgrade
4169 If no arguments are specified, the repository is evaluated for upgrade
4170 and a list of problems and potential optimizations is printed.
4170 and a list of problems and potential optimizations is printed.
4171
4171
4172 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4172 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4173 can be influenced via additional arguments. More details will be provided
4173 can be influenced via additional arguments. More details will be provided
4174 by the command output when run without ``--run``.
4174 by the command output when run without ``--run``.
4175
4175
4176 During the upgrade, the repository will be locked and no writes will be
4176 During the upgrade, the repository will be locked and no writes will be
4177 allowed.
4177 allowed.
4178
4178
4179 At the end of the upgrade, the repository may not be readable while new
4179 At the end of the upgrade, the repository may not be readable while new
4180 repository data is swapped in. This window will be as long as it takes to
4180 repository data is swapped in. This window will be as long as it takes to
4181 rename some directories inside the ``.hg`` directory. On most machines, this
4181 rename some directories inside the ``.hg`` directory. On most machines, this
4182 should complete almost instantaneously and the chances of a consumer being
4182 should complete almost instantaneously and the chances of a consumer being
4183 unable to access the repository should be low.
4183 unable to access the repository should be low.
4184
4184
4185 By default, all revlogs will be upgraded. You can restrict this using flags
4185 By default, all revlogs will be upgraded. You can restrict this using flags
4186 such as `--manifest`:
4186 such as `--manifest`:
4187
4187
4188 * `--manifest`: only optimize the manifest
4188 * `--manifest`: only optimize the manifest
4189 * `--no-manifest`: optimize all revlog but the manifest
4189 * `--no-manifest`: optimize all revlog but the manifest
4190 * `--changelog`: optimize the changelog only
4190 * `--changelog`: optimize the changelog only
4191 * `--no-changelog --no-manifest`: optimize filelogs only
4191 * `--no-changelog --no-manifest`: optimize filelogs only
4192 * `--filelogs`: optimize the filelogs only
4192 * `--filelogs`: optimize the filelogs only
4193 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4193 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4194 """
4194 """
4195 return upgrade.upgraderepo(
4195 return upgrade.upgraderepo(
4196 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4196 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4197 )
4197 )
4198
4198
4199
4199
4200 @command(
4200 @command(
4201 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4201 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4202 )
4202 )
4203 def debugwalk(ui, repo, *pats, **opts):
4203 def debugwalk(ui, repo, *pats, **opts):
4204 """show how files match on given patterns"""
4204 """show how files match on given patterns"""
4205 opts = pycompat.byteskwargs(opts)
4205 opts = pycompat.byteskwargs(opts)
4206 m = scmutil.match(repo[None], pats, opts)
4206 m = scmutil.match(repo[None], pats, opts)
4207 if ui.verbose:
4207 if ui.verbose:
4208 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4208 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4209 items = list(repo[None].walk(m))
4209 items = list(repo[None].walk(m))
4210 if not items:
4210 if not items:
4211 return
4211 return
4212 f = lambda fn: fn
4212 f = lambda fn: fn
4213 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4213 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4214 f = lambda fn: util.normpath(fn)
4214 f = lambda fn: util.normpath(fn)
4215 fmt = b'f %%-%ds %%-%ds %%s' % (
4215 fmt = b'f %%-%ds %%-%ds %%s' % (
4216 max([len(abs) for abs in items]),
4216 max([len(abs) for abs in items]),
4217 max([len(repo.pathto(abs)) for abs in items]),
4217 max([len(repo.pathto(abs)) for abs in items]),
4218 )
4218 )
4219 for abs in items:
4219 for abs in items:
4220 line = fmt % (
4220 line = fmt % (
4221 abs,
4221 abs,
4222 f(repo.pathto(abs)),
4222 f(repo.pathto(abs)),
4223 m.exact(abs) and b'exact' or b'',
4223 m.exact(abs) and b'exact' or b'',
4224 )
4224 )
4225 ui.write(b"%s\n" % line.rstrip())
4225 ui.write(b"%s\n" % line.rstrip())
4226
4226
4227
4227
4228 @command(b'debugwhyunstable', [], _(b'REV'))
4228 @command(b'debugwhyunstable', [], _(b'REV'))
4229 def debugwhyunstable(ui, repo, rev):
4229 def debugwhyunstable(ui, repo, rev):
4230 """explain instabilities of a changeset"""
4230 """explain instabilities of a changeset"""
4231 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4231 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4232 dnodes = b''
4232 dnodes = b''
4233 if entry.get(b'divergentnodes'):
4233 if entry.get(b'divergentnodes'):
4234 dnodes = (
4234 dnodes = (
4235 b' '.join(
4235 b' '.join(
4236 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4236 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4237 for ctx in entry[b'divergentnodes']
4237 for ctx in entry[b'divergentnodes']
4238 )
4238 )
4239 + b' '
4239 + b' '
4240 )
4240 )
4241 ui.write(
4241 ui.write(
4242 b'%s: %s%s %s\n'
4242 b'%s: %s%s %s\n'
4243 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4243 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4244 )
4244 )
4245
4245
4246
4246
4247 @command(
4247 @command(
4248 b'debugwireargs',
4248 b'debugwireargs',
4249 [
4249 [
4250 (b'', b'three', b'', b'three'),
4250 (b'', b'three', b'', b'three'),
4251 (b'', b'four', b'', b'four'),
4251 (b'', b'four', b'', b'four'),
4252 (b'', b'five', b'', b'five'),
4252 (b'', b'five', b'', b'five'),
4253 ]
4253 ]
4254 + cmdutil.remoteopts,
4254 + cmdutil.remoteopts,
4255 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4255 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4256 norepo=True,
4256 norepo=True,
4257 )
4257 )
4258 def debugwireargs(ui, repopath, *vals, **opts):
4258 def debugwireargs(ui, repopath, *vals, **opts):
4259 opts = pycompat.byteskwargs(opts)
4259 opts = pycompat.byteskwargs(opts)
4260 repo = hg.peer(ui, opts, repopath)
4260 repo = hg.peer(ui, opts, repopath)
4261 try:
4261 try:
4262 for opt in cmdutil.remoteopts:
4262 for opt in cmdutil.remoteopts:
4263 del opts[opt[1]]
4263 del opts[opt[1]]
4264 args = {}
4264 args = {}
4265 for k, v in pycompat.iteritems(opts):
4265 for k, v in pycompat.iteritems(opts):
4266 if v:
4266 if v:
4267 args[k] = v
4267 args[k] = v
4268 args = pycompat.strkwargs(args)
4268 args = pycompat.strkwargs(args)
4269 # run twice to check that we don't mess up the stream for the next command
4269 # run twice to check that we don't mess up the stream for the next command
4270 res1 = repo.debugwireargs(*vals, **args)
4270 res1 = repo.debugwireargs(*vals, **args)
4271 res2 = repo.debugwireargs(*vals, **args)
4271 res2 = repo.debugwireargs(*vals, **args)
4272 ui.write(b"%s\n" % res1)
4272 ui.write(b"%s\n" % res1)
4273 if res1 != res2:
4273 if res1 != res2:
4274 ui.warn(b"%s\n" % res2)
4274 ui.warn(b"%s\n" % res2)
4275 finally:
4275 finally:
4276 repo.close()
4276 repo.close()
4277
4277
4278
4278
4279 def _parsewirelangblocks(fh):
4279 def _parsewirelangblocks(fh):
4280 activeaction = None
4280 activeaction = None
4281 blocklines = []
4281 blocklines = []
4282 lastindent = 0
4282 lastindent = 0
4283
4283
4284 for line in fh:
4284 for line in fh:
4285 line = line.rstrip()
4285 line = line.rstrip()
4286 if not line:
4286 if not line:
4287 continue
4287 continue
4288
4288
4289 if line.startswith(b'#'):
4289 if line.startswith(b'#'):
4290 continue
4290 continue
4291
4291
4292 if not line.startswith(b' '):
4292 if not line.startswith(b' '):
4293 # New block. Flush previous one.
4293 # New block. Flush previous one.
4294 if activeaction:
4294 if activeaction:
4295 yield activeaction, blocklines
4295 yield activeaction, blocklines
4296
4296
4297 activeaction = line
4297 activeaction = line
4298 blocklines = []
4298 blocklines = []
4299 lastindent = 0
4299 lastindent = 0
4300 continue
4300 continue
4301
4301
4302 # Else we start with an indent.
4302 # Else we start with an indent.
4303
4303
4304 if not activeaction:
4304 if not activeaction:
4305 raise error.Abort(_(b'indented line outside of block'))
4305 raise error.Abort(_(b'indented line outside of block'))
4306
4306
4307 indent = len(line) - len(line.lstrip())
4307 indent = len(line) - len(line.lstrip())
4308
4308
4309 # If this line is indented more than the last line, concatenate it.
4309 # If this line is indented more than the last line, concatenate it.
4310 if indent > lastindent and blocklines:
4310 if indent > lastindent and blocklines:
4311 blocklines[-1] += line.lstrip()
4311 blocklines[-1] += line.lstrip()
4312 else:
4312 else:
4313 blocklines.append(line)
4313 blocklines.append(line)
4314 lastindent = indent
4314 lastindent = indent
4315
4315
4316 # Flush last block.
4316 # Flush last block.
4317 if activeaction:
4317 if activeaction:
4318 yield activeaction, blocklines
4318 yield activeaction, blocklines
4319
4319
4320
4320
4321 @command(
4321 @command(
4322 b'debugwireproto',
4322 b'debugwireproto',
4323 [
4323 [
4324 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4324 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4325 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4325 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4326 (
4326 (
4327 b'',
4327 b'',
4328 b'noreadstderr',
4328 b'noreadstderr',
4329 False,
4329 False,
4330 _(b'do not read from stderr of the remote'),
4330 _(b'do not read from stderr of the remote'),
4331 ),
4331 ),
4332 (
4332 (
4333 b'',
4333 b'',
4334 b'nologhandshake',
4334 b'nologhandshake',
4335 False,
4335 False,
4336 _(b'do not log I/O related to the peer handshake'),
4336 _(b'do not log I/O related to the peer handshake'),
4337 ),
4337 ),
4338 ]
4338 ]
4339 + cmdutil.remoteopts,
4339 + cmdutil.remoteopts,
4340 _(b'[PATH]'),
4340 _(b'[PATH]'),
4341 optionalrepo=True,
4341 optionalrepo=True,
4342 )
4342 )
4343 def debugwireproto(ui, repo, path=None, **opts):
4343 def debugwireproto(ui, repo, path=None, **opts):
4344 """send wire protocol commands to a server
4344 """send wire protocol commands to a server
4345
4345
4346 This command can be used to issue wire protocol commands to remote
4346 This command can be used to issue wire protocol commands to remote
4347 peers and to debug the raw data being exchanged.
4347 peers and to debug the raw data being exchanged.
4348
4348
4349 ``--localssh`` will start an SSH server against the current repository
4349 ``--localssh`` will start an SSH server against the current repository
4350 and connect to that. By default, the connection will perform a handshake
4350 and connect to that. By default, the connection will perform a handshake
4351 and establish an appropriate peer instance.
4351 and establish an appropriate peer instance.
4352
4352
4353 ``--peer`` can be used to bypass the handshake protocol and construct a
4353 ``--peer`` can be used to bypass the handshake protocol and construct a
4354 peer instance using the specified class type. Valid values are ``raw``,
4354 peer instance using the specified class type. Valid values are ``raw``,
4355 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4355 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4356 raw data payloads and don't support higher-level command actions.
4356 raw data payloads and don't support higher-level command actions.
4357
4357
4358 ``--noreadstderr`` can be used to disable automatic reading from stderr
4358 ``--noreadstderr`` can be used to disable automatic reading from stderr
4359 of the peer (for SSH connections only). Disabling automatic reading of
4359 of the peer (for SSH connections only). Disabling automatic reading of
4360 stderr is useful for making output more deterministic.
4360 stderr is useful for making output more deterministic.
4361
4361
4362 Commands are issued via a mini language which is specified via stdin.
4362 Commands are issued via a mini language which is specified via stdin.
4363 The language consists of individual actions to perform. An action is
4363 The language consists of individual actions to perform. An action is
4364 defined by a block. A block is defined as a line with no leading
4364 defined by a block. A block is defined as a line with no leading
4365 space followed by 0 or more lines with leading space. Blocks are
4365 space followed by 0 or more lines with leading space. Blocks are
4366 effectively a high-level command with additional metadata.
4366 effectively a high-level command with additional metadata.
4367
4367
4368 Lines beginning with ``#`` are ignored.
4368 Lines beginning with ``#`` are ignored.
4369
4369
4370 The following sections denote available actions.
4370 The following sections denote available actions.
4371
4371
4372 raw
4372 raw
4373 ---
4373 ---
4374
4374
4375 Send raw data to the server.
4375 Send raw data to the server.
4376
4376
4377 The block payload contains the raw data to send as one atomic send
4377 The block payload contains the raw data to send as one atomic send
4378 operation. The data may not actually be delivered in a single system
4378 operation. The data may not actually be delivered in a single system
4379 call: it depends on the abilities of the transport being used.
4379 call: it depends on the abilities of the transport being used.
4380
4380
4381 Each line in the block is de-indented and concatenated. Then, that
4381 Each line in the block is de-indented and concatenated. Then, that
4382 value is evaluated as a Python b'' literal. This allows the use of
4382 value is evaluated as a Python b'' literal. This allows the use of
4383 backslash escaping, etc.
4383 backslash escaping, etc.
4384
4384
4385 raw+
4385 raw+
4386 ----
4386 ----
4387
4387
4388 Behaves like ``raw`` except flushes output afterwards.
4388 Behaves like ``raw`` except flushes output afterwards.
4389
4389
4390 command <X>
4390 command <X>
4391 -----------
4391 -----------
4392
4392
4393 Send a request to run a named command, whose name follows the ``command``
4393 Send a request to run a named command, whose name follows the ``command``
4394 string.
4394 string.
4395
4395
4396 Arguments to the command are defined as lines in this block. The format of
4396 Arguments to the command are defined as lines in this block. The format of
4397 each line is ``<key> <value>``. e.g.::
4397 each line is ``<key> <value>``. e.g.::
4398
4398
4399 command listkeys
4399 command listkeys
4400 namespace bookmarks
4400 namespace bookmarks
4401
4401
4402 If the value begins with ``eval:``, it will be interpreted as a Python
4402 If the value begins with ``eval:``, it will be interpreted as a Python
4403 literal expression. Otherwise values are interpreted as Python b'' literals.
4403 literal expression. Otherwise values are interpreted as Python b'' literals.
4404 This allows sending complex types and encoding special byte sequences via
4404 This allows sending complex types and encoding special byte sequences via
4405 backslash escaping.
4405 backslash escaping.
4406
4406
4407 The following arguments have special meaning:
4407 The following arguments have special meaning:
4408
4408
4409 ``PUSHFILE``
4409 ``PUSHFILE``
4410 When defined, the *push* mechanism of the peer will be used instead
4410 When defined, the *push* mechanism of the peer will be used instead
4411 of the static request-response mechanism and the content of the
4411 of the static request-response mechanism and the content of the
4412 file specified in the value of this argument will be sent as the
4412 file specified in the value of this argument will be sent as the
4413 command payload.
4413 command payload.
4414
4414
4415 This can be used to submit a local bundle file to the remote.
4415 This can be used to submit a local bundle file to the remote.
4416
4416
4417 batchbegin
4417 batchbegin
4418 ----------
4418 ----------
4419
4419
4420 Instruct the peer to begin a batched send.
4420 Instruct the peer to begin a batched send.
4421
4421
4422 All ``command`` blocks are queued for execution until the next
4422 All ``command`` blocks are queued for execution until the next
4423 ``batchsubmit`` block.
4423 ``batchsubmit`` block.
4424
4424
4425 batchsubmit
4425 batchsubmit
4426 -----------
4426 -----------
4427
4427
4428 Submit previously queued ``command`` blocks as a batch request.
4428 Submit previously queued ``command`` blocks as a batch request.
4429
4429
4430 This action MUST be paired with a ``batchbegin`` action.
4430 This action MUST be paired with a ``batchbegin`` action.
4431
4431
4432 httprequest <method> <path>
4432 httprequest <method> <path>
4433 ---------------------------
4433 ---------------------------
4434
4434
4435 (HTTP peer only)
4435 (HTTP peer only)
4436
4436
4437 Send an HTTP request to the peer.
4437 Send an HTTP request to the peer.
4438
4438
4439 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4439 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4440
4440
4441 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4441 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4442 headers to add to the request. e.g. ``Accept: foo``.
4442 headers to add to the request. e.g. ``Accept: foo``.
4443
4443
4444 The following arguments are special:
4444 The following arguments are special:
4445
4445
4446 ``BODYFILE``
4446 ``BODYFILE``
4447 The content of the file defined as the value to this argument will be
4447 The content of the file defined as the value to this argument will be
4448 transferred verbatim as the HTTP request body.
4448 transferred verbatim as the HTTP request body.
4449
4449
4450 ``frame <type> <flags> <payload>``
4450 ``frame <type> <flags> <payload>``
4451 Send a unified protocol frame as part of the request body.
4451 Send a unified protocol frame as part of the request body.
4452
4452
4453 All frames will be collected and sent as the body to the HTTP
4453 All frames will be collected and sent as the body to the HTTP
4454 request.
4454 request.
4455
4455
4456 close
4456 close
4457 -----
4457 -----
4458
4458
4459 Close the connection to the server.
4459 Close the connection to the server.
4460
4460
4461 flush
4461 flush
4462 -----
4462 -----
4463
4463
4464 Flush data written to the server.
4464 Flush data written to the server.
4465
4465
4466 readavailable
4466 readavailable
4467 -------------
4467 -------------
4468
4468
4469 Close the write end of the connection and read all available data from
4469 Close the write end of the connection and read all available data from
4470 the server.
4470 the server.
4471
4471
4472 If the connection to the server encompasses multiple pipes, we poll both
4472 If the connection to the server encompasses multiple pipes, we poll both
4473 pipes and read available data.
4473 pipes and read available data.
4474
4474
4475 readline
4475 readline
4476 --------
4476 --------
4477
4477
4478 Read a line of output from the server. If there are multiple output
4478 Read a line of output from the server. If there are multiple output
4479 pipes, reads only the main pipe.
4479 pipes, reads only the main pipe.
4480
4480
4481 ereadline
4481 ereadline
4482 ---------
4482 ---------
4483
4483
4484 Like ``readline``, but read from the stderr pipe, if available.
4484 Like ``readline``, but read from the stderr pipe, if available.
4485
4485
4486 read <X>
4486 read <X>
4487 --------
4487 --------
4488
4488
4489 ``read()`` N bytes from the server's main output pipe.
4489 ``read()`` N bytes from the server's main output pipe.
4490
4490
4491 eread <X>
4491 eread <X>
4492 ---------
4492 ---------
4493
4493
4494 ``read()`` N bytes from the server's stderr pipe, if available.
4494 ``read()`` N bytes from the server's stderr pipe, if available.
4495
4495
4496 Specifying Unified Frame-Based Protocol Frames
4496 Specifying Unified Frame-Based Protocol Frames
4497 ----------------------------------------------
4497 ----------------------------------------------
4498
4498
4499 It is possible to emit a *Unified Frame-Based Protocol* by using special
4499 It is possible to emit a *Unified Frame-Based Protocol* by using special
4500 syntax.
4500 syntax.
4501
4501
4502 A frame is composed as a type, flags, and payload. These can be parsed
4502 A frame is composed as a type, flags, and payload. These can be parsed
4503 from a string of the form:
4503 from a string of the form:
4504
4504
4505 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4505 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4506
4506
4507 ``request-id`` and ``stream-id`` are integers defining the request and
4507 ``request-id`` and ``stream-id`` are integers defining the request and
4508 stream identifiers.
4508 stream identifiers.
4509
4509
4510 ``type`` can be an integer value for the frame type or the string name
4510 ``type`` can be an integer value for the frame type or the string name
4511 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4511 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4512 ``command-name``.
4512 ``command-name``.
4513
4513
4514 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4514 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4515 components. Each component (and there can be just one) can be an integer
4515 components. Each component (and there can be just one) can be an integer
4516 or a flag name for stream flags or frame flags, respectively. Values are
4516 or a flag name for stream flags or frame flags, respectively. Values are
4517 resolved to integers and then bitwise OR'd together.
4517 resolved to integers and then bitwise OR'd together.
4518
4518
4519 ``payload`` represents the raw frame payload. If it begins with
4519 ``payload`` represents the raw frame payload. If it begins with
4520 ``cbor:``, the following string is evaluated as Python code and the
4520 ``cbor:``, the following string is evaluated as Python code and the
4521 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4521 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4522 as a Python byte string literal.
4522 as a Python byte string literal.
4523 """
4523 """
4524 opts = pycompat.byteskwargs(opts)
4524 opts = pycompat.byteskwargs(opts)
4525
4525
4526 if opts[b'localssh'] and not repo:
4526 if opts[b'localssh'] and not repo:
4527 raise error.Abort(_(b'--localssh requires a repository'))
4527 raise error.Abort(_(b'--localssh requires a repository'))
4528
4528
4529 if opts[b'peer'] and opts[b'peer'] not in (
4529 if opts[b'peer'] and opts[b'peer'] not in (
4530 b'raw',
4530 b'raw',
4531 b'http2',
4531 b'http2',
4532 b'ssh1',
4532 b'ssh1',
4533 b'ssh2',
4533 b'ssh2',
4534 ):
4534 ):
4535 raise error.Abort(
4535 raise error.Abort(
4536 _(b'invalid value for --peer'),
4536 _(b'invalid value for --peer'),
4537 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4537 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4538 )
4538 )
4539
4539
4540 if path and opts[b'localssh']:
4540 if path and opts[b'localssh']:
4541 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4541 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4542
4542
4543 if ui.interactive():
4543 if ui.interactive():
4544 ui.write(_(b'(waiting for commands on stdin)\n'))
4544 ui.write(_(b'(waiting for commands on stdin)\n'))
4545
4545
4546 blocks = list(_parsewirelangblocks(ui.fin))
4546 blocks = list(_parsewirelangblocks(ui.fin))
4547
4547
4548 proc = None
4548 proc = None
4549 stdin = None
4549 stdin = None
4550 stdout = None
4550 stdout = None
4551 stderr = None
4551 stderr = None
4552 opener = None
4552 opener = None
4553
4553
4554 if opts[b'localssh']:
4554 if opts[b'localssh']:
4555 # We start the SSH server in its own process so there is process
4555 # We start the SSH server in its own process so there is process
4556 # separation. This prevents a whole class of potential bugs around
4556 # separation. This prevents a whole class of potential bugs around
4557 # shared state from interfering with server operation.
4557 # shared state from interfering with server operation.
4558 args = procutil.hgcmd() + [
4558 args = procutil.hgcmd() + [
4559 b'-R',
4559 b'-R',
4560 repo.root,
4560 repo.root,
4561 b'debugserve',
4561 b'debugserve',
4562 b'--sshstdio',
4562 b'--sshstdio',
4563 ]
4563 ]
4564 proc = subprocess.Popen(
4564 proc = subprocess.Popen(
4565 pycompat.rapply(procutil.tonativestr, args),
4565 pycompat.rapply(procutil.tonativestr, args),
4566 stdin=subprocess.PIPE,
4566 stdin=subprocess.PIPE,
4567 stdout=subprocess.PIPE,
4567 stdout=subprocess.PIPE,
4568 stderr=subprocess.PIPE,
4568 stderr=subprocess.PIPE,
4569 bufsize=0,
4569 bufsize=0,
4570 )
4570 )
4571
4571
4572 stdin = proc.stdin
4572 stdin = proc.stdin
4573 stdout = proc.stdout
4573 stdout = proc.stdout
4574 stderr = proc.stderr
4574 stderr = proc.stderr
4575
4575
4576 # We turn the pipes into observers so we can log I/O.
4576 # We turn the pipes into observers so we can log I/O.
4577 if ui.verbose or opts[b'peer'] == b'raw':
4577 if ui.verbose or opts[b'peer'] == b'raw':
4578 stdin = util.makeloggingfileobject(
4578 stdin = util.makeloggingfileobject(
4579 ui, proc.stdin, b'i', logdata=True
4579 ui, proc.stdin, b'i', logdata=True
4580 )
4580 )
4581 stdout = util.makeloggingfileobject(
4581 stdout = util.makeloggingfileobject(
4582 ui, proc.stdout, b'o', logdata=True
4582 ui, proc.stdout, b'o', logdata=True
4583 )
4583 )
4584 stderr = util.makeloggingfileobject(
4584 stderr = util.makeloggingfileobject(
4585 ui, proc.stderr, b'e', logdata=True
4585 ui, proc.stderr, b'e', logdata=True
4586 )
4586 )
4587
4587
4588 # --localssh also implies the peer connection settings.
4588 # --localssh also implies the peer connection settings.
4589
4589
4590 url = b'ssh://localserver'
4590 url = b'ssh://localserver'
4591 autoreadstderr = not opts[b'noreadstderr']
4591 autoreadstderr = not opts[b'noreadstderr']
4592
4592
4593 if opts[b'peer'] == b'ssh1':
4593 if opts[b'peer'] == b'ssh1':
4594 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4594 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4595 peer = sshpeer.sshv1peer(
4595 peer = sshpeer.sshv1peer(
4596 ui,
4596 ui,
4597 url,
4597 url,
4598 proc,
4598 proc,
4599 stdin,
4599 stdin,
4600 stdout,
4600 stdout,
4601 stderr,
4601 stderr,
4602 None,
4602 None,
4603 autoreadstderr=autoreadstderr,
4603 autoreadstderr=autoreadstderr,
4604 )
4604 )
4605 elif opts[b'peer'] == b'ssh2':
4605 elif opts[b'peer'] == b'ssh2':
4606 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4606 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4607 peer = sshpeer.sshv2peer(
4607 peer = sshpeer.sshv2peer(
4608 ui,
4608 ui,
4609 url,
4609 url,
4610 proc,
4610 proc,
4611 stdin,
4611 stdin,
4612 stdout,
4612 stdout,
4613 stderr,
4613 stderr,
4614 None,
4614 None,
4615 autoreadstderr=autoreadstderr,
4615 autoreadstderr=autoreadstderr,
4616 )
4616 )
4617 elif opts[b'peer'] == b'raw':
4617 elif opts[b'peer'] == b'raw':
4618 ui.write(_(b'using raw connection to peer\n'))
4618 ui.write(_(b'using raw connection to peer\n'))
4619 peer = None
4619 peer = None
4620 else:
4620 else:
4621 ui.write(_(b'creating ssh peer from handshake results\n'))
4621 ui.write(_(b'creating ssh peer from handshake results\n'))
4622 peer = sshpeer.makepeer(
4622 peer = sshpeer.makepeer(
4623 ui,
4623 ui,
4624 url,
4624 url,
4625 proc,
4625 proc,
4626 stdin,
4626 stdin,
4627 stdout,
4627 stdout,
4628 stderr,
4628 stderr,
4629 autoreadstderr=autoreadstderr,
4629 autoreadstderr=autoreadstderr,
4630 )
4630 )
4631
4631
4632 elif path:
4632 elif path:
4633 # We bypass hg.peer() so we can proxy the sockets.
4633 # We bypass hg.peer() so we can proxy the sockets.
4634 # TODO consider not doing this because we skip
4634 # TODO consider not doing this because we skip
4635 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4635 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4636 u = urlutil.url(path)
4636 u = urlutil.url(path)
4637 if u.scheme != b'http':
4637 if u.scheme != b'http':
4638 raise error.Abort(_(b'only http:// paths are currently supported'))
4638 raise error.Abort(_(b'only http:// paths are currently supported'))
4639
4639
4640 url, authinfo = u.authinfo()
4640 url, authinfo = u.authinfo()
4641 openerargs = {
4641 openerargs = {
4642 'useragent': b'Mercurial debugwireproto',
4642 'useragent': b'Mercurial debugwireproto',
4643 }
4643 }
4644
4644
4645 # Turn pipes/sockets into observers so we can log I/O.
4645 # Turn pipes/sockets into observers so we can log I/O.
4646 if ui.verbose:
4646 if ui.verbose:
4647 openerargs.update(
4647 openerargs.update(
4648 {
4648 {
4649 'loggingfh': ui,
4649 'loggingfh': ui,
4650 'loggingname': b's',
4650 'loggingname': b's',
4651 'loggingopts': {
4651 'loggingopts': {
4652 'logdata': True,
4652 'logdata': True,
4653 'logdataapis': False,
4653 'logdataapis': False,
4654 },
4654 },
4655 }
4655 }
4656 )
4656 )
4657
4657
4658 if ui.debugflag:
4658 if ui.debugflag:
4659 openerargs['loggingopts']['logdataapis'] = True
4659 openerargs['loggingopts']['logdataapis'] = True
4660
4660
4661 # Don't send default headers when in raw mode. This allows us to
4661 # Don't send default headers when in raw mode. This allows us to
4662 # bypass most of the behavior of our URL handling code so we can
4662 # bypass most of the behavior of our URL handling code so we can
4663 # have near complete control over what's sent on the wire.
4663 # have near complete control over what's sent on the wire.
4664 if opts[b'peer'] == b'raw':
4664 if opts[b'peer'] == b'raw':
4665 openerargs['sendaccept'] = False
4665 openerargs['sendaccept'] = False
4666
4666
4667 opener = urlmod.opener(ui, authinfo, **openerargs)
4667 opener = urlmod.opener(ui, authinfo, **openerargs)
4668
4668
4669 if opts[b'peer'] == b'http2':
4669 if opts[b'peer'] == b'http2':
4670 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4670 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4671 # We go through makepeer() because we need an API descriptor for
4671 # We go through makepeer() because we need an API descriptor for
4672 # the peer instance to be useful.
4672 # the peer instance to be useful.
4673 maybe_silent = (
4673 maybe_silent = (
4674 ui.silent()
4674 ui.silent()
4675 if opts[b'nologhandshake']
4675 if opts[b'nologhandshake']
4676 else util.nullcontextmanager()
4676 else util.nullcontextmanager()
4677 )
4677 )
4678 with maybe_silent, ui.configoverride(
4678 with maybe_silent, ui.configoverride(
4679 {(b'experimental', b'httppeer.advertise-v2'): True}
4679 {(b'experimental', b'httppeer.advertise-v2'): True}
4680 ):
4680 ):
4681 peer = httppeer.makepeer(ui, path, opener=opener)
4681 peer = httppeer.makepeer(ui, path, opener=opener)
4682
4682
4683 if not isinstance(peer, httppeer.httpv2peer):
4683 if not isinstance(peer, httppeer.httpv2peer):
4684 raise error.Abort(
4684 raise error.Abort(
4685 _(
4685 _(
4686 b'could not instantiate HTTP peer for '
4686 b'could not instantiate HTTP peer for '
4687 b'wire protocol version 2'
4687 b'wire protocol version 2'
4688 ),
4688 ),
4689 hint=_(
4689 hint=_(
4690 b'the server may not have the feature '
4690 b'the server may not have the feature '
4691 b'enabled or is not allowing this '
4691 b'enabled or is not allowing this '
4692 b'client version'
4692 b'client version'
4693 ),
4693 ),
4694 )
4694 )
4695
4695
4696 elif opts[b'peer'] == b'raw':
4696 elif opts[b'peer'] == b'raw':
4697 ui.write(_(b'using raw connection to peer\n'))
4697 ui.write(_(b'using raw connection to peer\n'))
4698 peer = None
4698 peer = None
4699 elif opts[b'peer']:
4699 elif opts[b'peer']:
4700 raise error.Abort(
4700 raise error.Abort(
4701 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4701 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4702 )
4702 )
4703 else:
4703 else:
4704 peer = httppeer.makepeer(ui, path, opener=opener)
4704 peer = httppeer.makepeer(ui, path, opener=opener)
4705
4705
4706 # We /could/ populate stdin/stdout with sock.makefile()...
4706 # We /could/ populate stdin/stdout with sock.makefile()...
4707 else:
4707 else:
4708 raise error.Abort(_(b'unsupported connection configuration'))
4708 raise error.Abort(_(b'unsupported connection configuration'))
4709
4709
4710 batchedcommands = None
4710 batchedcommands = None
4711
4711
4712 # Now perform actions based on the parsed wire language instructions.
4712 # Now perform actions based on the parsed wire language instructions.
4713 for action, lines in blocks:
4713 for action, lines in blocks:
4714 if action in (b'raw', b'raw+'):
4714 if action in (b'raw', b'raw+'):
4715 if not stdin:
4715 if not stdin:
4716 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4716 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4717
4717
4718 # Concatenate the data together.
4718 # Concatenate the data together.
4719 data = b''.join(l.lstrip() for l in lines)
4719 data = b''.join(l.lstrip() for l in lines)
4720 data = stringutil.unescapestr(data)
4720 data = stringutil.unescapestr(data)
4721 stdin.write(data)
4721 stdin.write(data)
4722
4722
4723 if action == b'raw+':
4723 if action == b'raw+':
4724 stdin.flush()
4724 stdin.flush()
4725 elif action == b'flush':
4725 elif action == b'flush':
4726 if not stdin:
4726 if not stdin:
4727 raise error.Abort(_(b'cannot call flush on this peer'))
4727 raise error.Abort(_(b'cannot call flush on this peer'))
4728 stdin.flush()
4728 stdin.flush()
4729 elif action.startswith(b'command'):
4729 elif action.startswith(b'command'):
4730 if not peer:
4730 if not peer:
4731 raise error.Abort(
4731 raise error.Abort(
4732 _(
4732 _(
4733 b'cannot send commands unless peer instance '
4733 b'cannot send commands unless peer instance '
4734 b'is available'
4734 b'is available'
4735 )
4735 )
4736 )
4736 )
4737
4737
4738 command = action.split(b' ', 1)[1]
4738 command = action.split(b' ', 1)[1]
4739
4739
4740 args = {}
4740 args = {}
4741 for line in lines:
4741 for line in lines:
4742 # We need to allow empty values.
4742 # We need to allow empty values.
4743 fields = line.lstrip().split(b' ', 1)
4743 fields = line.lstrip().split(b' ', 1)
4744 if len(fields) == 1:
4744 if len(fields) == 1:
4745 key = fields[0]
4745 key = fields[0]
4746 value = b''
4746 value = b''
4747 else:
4747 else:
4748 key, value = fields
4748 key, value = fields
4749
4749
4750 if value.startswith(b'eval:'):
4750 if value.startswith(b'eval:'):
4751 value = stringutil.evalpythonliteral(value[5:])
4751 value = stringutil.evalpythonliteral(value[5:])
4752 else:
4752 else:
4753 value = stringutil.unescapestr(value)
4753 value = stringutil.unescapestr(value)
4754
4754
4755 args[key] = value
4755 args[key] = value
4756
4756
4757 if batchedcommands is not None:
4757 if batchedcommands is not None:
4758 batchedcommands.append((command, args))
4758 batchedcommands.append((command, args))
4759 continue
4759 continue
4760
4760
4761 ui.status(_(b'sending %s command\n') % command)
4761 ui.status(_(b'sending %s command\n') % command)
4762
4762
4763 if b'PUSHFILE' in args:
4763 if b'PUSHFILE' in args:
4764 with open(args[b'PUSHFILE'], 'rb') as fh:
4764 with open(args[b'PUSHFILE'], 'rb') as fh:
4765 del args[b'PUSHFILE']
4765 del args[b'PUSHFILE']
4766 res, output = peer._callpush(
4766 res, output = peer._callpush(
4767 command, fh, **pycompat.strkwargs(args)
4767 command, fh, **pycompat.strkwargs(args)
4768 )
4768 )
4769 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4769 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4770 ui.status(
4770 ui.status(
4771 _(b'remote output: %s\n') % stringutil.escapestr(output)
4771 _(b'remote output: %s\n') % stringutil.escapestr(output)
4772 )
4772 )
4773 else:
4773 else:
4774 with peer.commandexecutor() as e:
4774 with peer.commandexecutor() as e:
4775 res = e.callcommand(command, args).result()
4775 res = e.callcommand(command, args).result()
4776
4776
4777 if isinstance(res, wireprotov2peer.commandresponse):
4777 if isinstance(res, wireprotov2peer.commandresponse):
4778 val = res.objects()
4778 val = res.objects()
4779 ui.status(
4779 ui.status(
4780 _(b'response: %s\n')
4780 _(b'response: %s\n')
4781 % stringutil.pprint(val, bprefix=True, indent=2)
4781 % stringutil.pprint(val, bprefix=True, indent=2)
4782 )
4782 )
4783 else:
4783 else:
4784 ui.status(
4784 ui.status(
4785 _(b'response: %s\n')
4785 _(b'response: %s\n')
4786 % stringutil.pprint(res, bprefix=True, indent=2)
4786 % stringutil.pprint(res, bprefix=True, indent=2)
4787 )
4787 )
4788
4788
4789 elif action == b'batchbegin':
4789 elif action == b'batchbegin':
4790 if batchedcommands is not None:
4790 if batchedcommands is not None:
4791 raise error.Abort(_(b'nested batchbegin not allowed'))
4791 raise error.Abort(_(b'nested batchbegin not allowed'))
4792
4792
4793 batchedcommands = []
4793 batchedcommands = []
4794 elif action == b'batchsubmit':
4794 elif action == b'batchsubmit':
4795 # There is a batching API we could go through. But it would be
4795 # There is a batching API we could go through. But it would be
4796 # difficult to normalize requests into function calls. It is easier
4796 # difficult to normalize requests into function calls. It is easier
4797 # to bypass this layer and normalize to commands + args.
4797 # to bypass this layer and normalize to commands + args.
4798 ui.status(
4798 ui.status(
4799 _(b'sending batch with %d sub-commands\n')
4799 _(b'sending batch with %d sub-commands\n')
4800 % len(batchedcommands)
4800 % len(batchedcommands)
4801 )
4801 )
4802 assert peer is not None
4802 assert peer is not None
4803 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4803 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4804 ui.status(
4804 ui.status(
4805 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4805 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4806 )
4806 )
4807
4807
4808 batchedcommands = None
4808 batchedcommands = None
4809
4809
4810 elif action.startswith(b'httprequest '):
4810 elif action.startswith(b'httprequest '):
4811 if not opener:
4811 if not opener:
4812 raise error.Abort(
4812 raise error.Abort(
4813 _(b'cannot use httprequest without an HTTP peer')
4813 _(b'cannot use httprequest without an HTTP peer')
4814 )
4814 )
4815
4815
4816 request = action.split(b' ', 2)
4816 request = action.split(b' ', 2)
4817 if len(request) != 3:
4817 if len(request) != 3:
4818 raise error.Abort(
4818 raise error.Abort(
4819 _(
4819 _(
4820 b'invalid httprequest: expected format is '
4820 b'invalid httprequest: expected format is '
4821 b'"httprequest <method> <path>'
4821 b'"httprequest <method> <path>'
4822 )
4822 )
4823 )
4823 )
4824
4824
4825 method, httppath = request[1:]
4825 method, httppath = request[1:]
4826 headers = {}
4826 headers = {}
4827 body = None
4827 body = None
4828 frames = []
4828 frames = []
4829 for line in lines:
4829 for line in lines:
4830 line = line.lstrip()
4830 line = line.lstrip()
4831 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4831 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4832 if m:
4832 if m:
4833 # Headers need to use native strings.
4833 # Headers need to use native strings.
4834 key = pycompat.strurl(m.group(1))
4834 key = pycompat.strurl(m.group(1))
4835 value = pycompat.strurl(m.group(2))
4835 value = pycompat.strurl(m.group(2))
4836 headers[key] = value
4836 headers[key] = value
4837 continue
4837 continue
4838
4838
4839 if line.startswith(b'BODYFILE '):
4839 if line.startswith(b'BODYFILE '):
4840 with open(line.split(b' ', 1), b'rb') as fh:
4840 with open(line.split(b' ', 1), b'rb') as fh:
4841 body = fh.read()
4841 body = fh.read()
4842 elif line.startswith(b'frame '):
4842 elif line.startswith(b'frame '):
4843 frame = wireprotoframing.makeframefromhumanstring(
4843 frame = wireprotoframing.makeframefromhumanstring(
4844 line[len(b'frame ') :]
4844 line[len(b'frame ') :]
4845 )
4845 )
4846
4846
4847 frames.append(frame)
4847 frames.append(frame)
4848 else:
4848 else:
4849 raise error.Abort(
4849 raise error.Abort(
4850 _(b'unknown argument to httprequest: %s') % line
4850 _(b'unknown argument to httprequest: %s') % line
4851 )
4851 )
4852
4852
4853 url = path + httppath
4853 url = path + httppath
4854
4854
4855 if frames:
4855 if frames:
4856 body = b''.join(bytes(f) for f in frames)
4856 body = b''.join(bytes(f) for f in frames)
4857
4857
4858 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4858 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4859
4859
4860 # urllib.Request insists on using has_data() as a proxy for
4860 # urllib.Request insists on using has_data() as a proxy for
4861 # determining the request method. Override that to use our
4861 # determining the request method. Override that to use our
4862 # explicitly requested method.
4862 # explicitly requested method.
4863 req.get_method = lambda: pycompat.sysstr(method)
4863 req.get_method = lambda: pycompat.sysstr(method)
4864
4864
4865 try:
4865 try:
4866 res = opener.open(req)
4866 res = opener.open(req)
4867 body = res.read()
4867 body = res.read()
4868 except util.urlerr.urlerror as e:
4868 except util.urlerr.urlerror as e:
4869 # read() method must be called, but only exists in Python 2
4869 # read() method must be called, but only exists in Python 2
4870 getattr(e, 'read', lambda: None)()
4870 getattr(e, 'read', lambda: None)()
4871 continue
4871 continue
4872
4872
4873 ct = res.headers.get('Content-Type')
4873 ct = res.headers.get('Content-Type')
4874 if ct == 'application/mercurial-cbor':
4874 if ct == 'application/mercurial-cbor':
4875 ui.write(
4875 ui.write(
4876 _(b'cbor> %s\n')
4876 _(b'cbor> %s\n')
4877 % stringutil.pprint(
4877 % stringutil.pprint(
4878 cborutil.decodeall(body), bprefix=True, indent=2
4878 cborutil.decodeall(body), bprefix=True, indent=2
4879 )
4879 )
4880 )
4880 )
4881
4881
4882 elif action == b'close':
4882 elif action == b'close':
4883 assert peer is not None
4883 assert peer is not None
4884 peer.close()
4884 peer.close()
4885 elif action == b'readavailable':
4885 elif action == b'readavailable':
4886 if not stdout or not stderr:
4886 if not stdout or not stderr:
4887 raise error.Abort(
4887 raise error.Abort(
4888 _(b'readavailable not available on this peer')
4888 _(b'readavailable not available on this peer')
4889 )
4889 )
4890
4890
4891 stdin.close()
4891 stdin.close()
4892 stdout.read()
4892 stdout.read()
4893 stderr.read()
4893 stderr.read()
4894
4894
4895 elif action == b'readline':
4895 elif action == b'readline':
4896 if not stdout:
4896 if not stdout:
4897 raise error.Abort(_(b'readline not available on this peer'))
4897 raise error.Abort(_(b'readline not available on this peer'))
4898 stdout.readline()
4898 stdout.readline()
4899 elif action == b'ereadline':
4899 elif action == b'ereadline':
4900 if not stderr:
4900 if not stderr:
4901 raise error.Abort(_(b'ereadline not available on this peer'))
4901 raise error.Abort(_(b'ereadline not available on this peer'))
4902 stderr.readline()
4902 stderr.readline()
4903 elif action.startswith(b'read '):
4903 elif action.startswith(b'read '):
4904 count = int(action.split(b' ', 1)[1])
4904 count = int(action.split(b' ', 1)[1])
4905 if not stdout:
4905 if not stdout:
4906 raise error.Abort(_(b'read not available on this peer'))
4906 raise error.Abort(_(b'read not available on this peer'))
4907 stdout.read(count)
4907 stdout.read(count)
4908 elif action.startswith(b'eread '):
4908 elif action.startswith(b'eread '):
4909 count = int(action.split(b' ', 1)[1])
4909 count = int(action.split(b' ', 1)[1])
4910 if not stderr:
4910 if not stderr:
4911 raise error.Abort(_(b'eread not available on this peer'))
4911 raise error.Abort(_(b'eread not available on this peer'))
4912 stderr.read(count)
4912 stderr.read(count)
4913 else:
4913 else:
4914 raise error.Abort(_(b'unknown action: %s') % action)
4914 raise error.Abort(_(b'unknown action: %s') % action)
4915
4915
4916 if batchedcommands is not None:
4916 if batchedcommands is not None:
4917 raise error.Abort(_(b'unclosed "batchbegin" request'))
4917 raise error.Abort(_(b'unclosed "batchbegin" request'))
4918
4918
4919 if peer:
4919 if peer:
4920 peer.close()
4920 peer.close()
4921
4921
4922 if proc:
4922 if proc:
4923 proc.kill()
4923 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now