##// END OF EJS Templates
debugdeltachain: use the symbolic constant to access entry information...
marmoute -
r50114:0a86cb15 default
parent child Browse files
Show More
@@ -1,4932 +1,4937
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 )
94 )
95 from .interfaces import repository
95 from .interfaces import repository
96 from .utils import (
96 from .utils import (
97 cborutil,
97 cborutil,
98 compression,
98 compression,
99 dateutil,
99 dateutil,
100 procutil,
100 procutil,
101 stringutil,
101 stringutil,
102 urlutil,
102 urlutil,
103 )
103 )
104
104
105 from .revlogutils import (
105 from .revlogutils import (
106 constants as revlog_constants,
106 deltas as deltautil,
107 deltas as deltautil,
107 nodemap,
108 nodemap,
108 rewrite,
109 rewrite,
109 sidedata,
110 sidedata,
110 )
111 )
111
112
112 release = lockmod.release
113 release = lockmod.release
113
114
114 table = {}
115 table = {}
115 table.update(strip.command._table)
116 table.update(strip.command._table)
116 command = registrar.command(table)
117 command = registrar.command(table)
117
118
118
119
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 def debugancestor(ui, repo, *args):
121 def debugancestor(ui, repo, *args):
121 """find the ancestor revision of two revisions in a given index"""
122 """find the ancestor revision of two revisions in a given index"""
122 if len(args) == 3:
123 if len(args) == 3:
123 index, rev1, rev2 = args
124 index, rev1, rev2 = args
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 lookup = r.lookup
126 lookup = r.lookup
126 elif len(args) == 2:
127 elif len(args) == 2:
127 if not repo:
128 if not repo:
128 raise error.Abort(
129 raise error.Abort(
129 _(b'there is no Mercurial repository here (.hg not found)')
130 _(b'there is no Mercurial repository here (.hg not found)')
130 )
131 )
131 rev1, rev2 = args
132 rev1, rev2 = args
132 r = repo.changelog
133 r = repo.changelog
133 lookup = repo.lookup
134 lookup = repo.lookup
134 else:
135 else:
135 raise error.Abort(_(b'either two or three arguments required'))
136 raise error.Abort(_(b'either two or three arguments required'))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138
139
139
140
140 @command(b'debugantivirusrunning', [])
141 @command(b'debugantivirusrunning', [])
141 def debugantivirusrunning(ui, repo):
142 def debugantivirusrunning(ui, repo):
142 """attempt to trigger an antivirus scanner to see if one is active"""
143 """attempt to trigger an antivirus scanner to see if one is active"""
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 f.write(
145 f.write(
145 util.b85decode(
146 util.b85decode(
146 # This is a base85-armored version of the EICAR test file. See
147 # This is a base85-armored version of the EICAR test file. See
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 )
151 )
151 )
152 )
152 # Give an AV engine time to scan the file.
153 # Give an AV engine time to scan the file.
153 time.sleep(2)
154 time.sleep(2)
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155
156
156
157
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 def debugapplystreamclonebundle(ui, repo, fname):
159 def debugapplystreamclonebundle(ui, repo, fname):
159 """apply a stream clone bundle file"""
160 """apply a stream clone bundle file"""
160 f = hg.openpath(ui, fname)
161 f = hg.openpath(ui, fname)
161 gen = exchange.readbundle(ui, f, fname)
162 gen = exchange.readbundle(ui, f, fname)
162 gen.apply(repo)
163 gen.apply(repo)
163
164
164
165
165 @command(
166 @command(
166 b'debugbuilddag',
167 b'debugbuilddag',
167 [
168 [
168 (
169 (
169 b'm',
170 b'm',
170 b'mergeable-file',
171 b'mergeable-file',
171 None,
172 None,
172 _(b'add single file mergeable changes'),
173 _(b'add single file mergeable changes'),
173 ),
174 ),
174 (
175 (
175 b'o',
176 b'o',
176 b'overwritten-file',
177 b'overwritten-file',
177 None,
178 None,
178 _(b'add single file all revs overwrite'),
179 _(b'add single file all revs overwrite'),
179 ),
180 ),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (
182 (
182 b'',
183 b'',
183 b'from-existing',
184 b'from-existing',
184 None,
185 None,
185 _(b'continue from a non-empty repository'),
186 _(b'continue from a non-empty repository'),
186 ),
187 ),
187 ],
188 ],
188 _(b'[OPTION]... [TEXT]'),
189 _(b'[OPTION]... [TEXT]'),
189 )
190 )
190 def debugbuilddag(
191 def debugbuilddag(
191 ui,
192 ui,
192 repo,
193 repo,
193 text=None,
194 text=None,
194 mergeable_file=False,
195 mergeable_file=False,
195 overwritten_file=False,
196 overwritten_file=False,
196 new_file=False,
197 new_file=False,
197 from_existing=False,
198 from_existing=False,
198 ):
199 ):
199 """builds a repo with a given DAG from scratch in the current empty repo
200 """builds a repo with a given DAG from scratch in the current empty repo
200
201
201 The description of the DAG is read from stdin if not given on the
202 The description of the DAG is read from stdin if not given on the
202 command line.
203 command line.
203
204
204 Elements:
205 Elements:
205
206
206 - "+n" is a linear run of n nodes based on the current default parent
207 - "+n" is a linear run of n nodes based on the current default parent
207 - "." is a single node based on the current default parent
208 - "." is a single node based on the current default parent
208 - "$" resets the default parent to null (implied at the start);
209 - "$" resets the default parent to null (implied at the start);
209 otherwise the default parent is always the last node created
210 otherwise the default parent is always the last node created
210 - "<p" sets the default parent to the backref p
211 - "<p" sets the default parent to the backref p
211 - "*p" is a fork at parent p, which is a backref
212 - "*p" is a fork at parent p, which is a backref
212 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 - "/p2" is a merge of the preceding node and p2
214 - "/p2" is a merge of the preceding node and p2
214 - ":tag" defines a local tag for the preceding node
215 - ":tag" defines a local tag for the preceding node
215 - "@branch" sets the named branch for subsequent nodes
216 - "@branch" sets the named branch for subsequent nodes
216 - "#...\\n" is a comment up to the end of the line
217 - "#...\\n" is a comment up to the end of the line
217
218
218 Whitespace between the above elements is ignored.
219 Whitespace between the above elements is ignored.
219
220
220 A backref is either
221 A backref is either
221
222
222 - a number n, which references the node curr-n, where curr is the current
223 - a number n, which references the node curr-n, where curr is the current
223 node, or
224 node, or
224 - the name of a local tag you placed earlier using ":tag", or
225 - the name of a local tag you placed earlier using ":tag", or
225 - empty to denote the default parent.
226 - empty to denote the default parent.
226
227
227 All string valued-elements are either strictly alphanumeric, or must
228 All string valued-elements are either strictly alphanumeric, or must
228 be enclosed in double quotes ("..."), with "\\" as escape character.
229 be enclosed in double quotes ("..."), with "\\" as escape character.
229 """
230 """
230
231
231 if text is None:
232 if text is None:
232 ui.status(_(b"reading DAG from stdin\n"))
233 ui.status(_(b"reading DAG from stdin\n"))
233 text = ui.fin.read()
234 text = ui.fin.read()
234
235
235 cl = repo.changelog
236 cl = repo.changelog
236 if len(cl) > 0 and not from_existing:
237 if len(cl) > 0 and not from_existing:
237 raise error.Abort(_(b'repository is not empty'))
238 raise error.Abort(_(b'repository is not empty'))
238
239
239 # determine number of revs in DAG
240 # determine number of revs in DAG
240 total = 0
241 total = 0
241 for type, data in dagparser.parsedag(text):
242 for type, data in dagparser.parsedag(text):
242 if type == b'n':
243 if type == b'n':
243 total += 1
244 total += 1
244
245
245 if mergeable_file:
246 if mergeable_file:
246 linesperrev = 2
247 linesperrev = 2
247 # make a file with k lines per rev
248 # make a file with k lines per rev
248 initialmergedlines = [
249 initialmergedlines = [
249 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
250 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
250 ]
251 ]
251 initialmergedlines.append(b"")
252 initialmergedlines.append(b"")
252
253
253 tags = []
254 tags = []
254 progress = ui.makeprogress(
255 progress = ui.makeprogress(
255 _(b'building'), unit=_(b'revisions'), total=total
256 _(b'building'), unit=_(b'revisions'), total=total
256 )
257 )
257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 at = -1
259 at = -1
259 atbranch = b'default'
260 atbranch = b'default'
260 nodeids = []
261 nodeids = []
261 id = 0
262 id = 0
262 progress.update(id)
263 progress.update(id)
263 for type, data in dagparser.parsedag(text):
264 for type, data in dagparser.parsedag(text):
264 if type == b'n':
265 if type == b'n':
265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 id, ps = data
267 id, ps = data
267
268
268 files = []
269 files = []
269 filecontent = {}
270 filecontent = {}
270
271
271 p2 = None
272 p2 = None
272 if mergeable_file:
273 if mergeable_file:
273 fn = b"mf"
274 fn = b"mf"
274 p1 = repo[ps[0]]
275 p1 = repo[ps[0]]
275 if len(ps) > 1:
276 if len(ps) > 1:
276 p2 = repo[ps[1]]
277 p2 = repo[ps[1]]
277 pa = p1.ancestor(p2)
278 pa = p1.ancestor(p2)
278 base, local, other = [
279 base, local, other = [
279 x[fn].data() for x in (pa, p1, p2)
280 x[fn].data() for x in (pa, p1, p2)
280 ]
281 ]
281 m3 = simplemerge.Merge3Text(base, local, other)
282 m3 = simplemerge.Merge3Text(base, local, other)
282 ml = [
283 ml = [
283 l.strip()
284 l.strip()
284 for l in simplemerge.render_minimized(m3)[0]
285 for l in simplemerge.render_minimized(m3)[0]
285 ]
286 ]
286 ml.append(b"")
287 ml.append(b"")
287 elif at > 0:
288 elif at > 0:
288 ml = p1[fn].data().split(b"\n")
289 ml = p1[fn].data().split(b"\n")
289 else:
290 else:
290 ml = initialmergedlines
291 ml = initialmergedlines
291 ml[id * linesperrev] += b" r%i" % id
292 ml[id * linesperrev] += b" r%i" % id
292 mergedtext = b"\n".join(ml)
293 mergedtext = b"\n".join(ml)
293 files.append(fn)
294 files.append(fn)
294 filecontent[fn] = mergedtext
295 filecontent[fn] = mergedtext
295
296
296 if overwritten_file:
297 if overwritten_file:
297 fn = b"of"
298 fn = b"of"
298 files.append(fn)
299 files.append(fn)
299 filecontent[fn] = b"r%i\n" % id
300 filecontent[fn] = b"r%i\n" % id
300
301
301 if new_file:
302 if new_file:
302 fn = b"nf%i" % id
303 fn = b"nf%i" % id
303 files.append(fn)
304 files.append(fn)
304 filecontent[fn] = b"r%i\n" % id
305 filecontent[fn] = b"r%i\n" % id
305 if len(ps) > 1:
306 if len(ps) > 1:
306 if not p2:
307 if not p2:
307 p2 = repo[ps[1]]
308 p2 = repo[ps[1]]
308 for fn in p2:
309 for fn in p2:
309 if fn.startswith(b"nf"):
310 if fn.startswith(b"nf"):
310 files.append(fn)
311 files.append(fn)
311 filecontent[fn] = p2[fn].data()
312 filecontent[fn] = p2[fn].data()
312
313
313 def fctxfn(repo, cx, path):
314 def fctxfn(repo, cx, path):
314 if path in filecontent:
315 if path in filecontent:
315 return context.memfilectx(
316 return context.memfilectx(
316 repo, cx, path, filecontent[path]
317 repo, cx, path, filecontent[path]
317 )
318 )
318 return None
319 return None
319
320
320 if len(ps) == 0 or ps[0] < 0:
321 if len(ps) == 0 or ps[0] < 0:
321 pars = [None, None]
322 pars = [None, None]
322 elif len(ps) == 1:
323 elif len(ps) == 1:
323 pars = [nodeids[ps[0]], None]
324 pars = [nodeids[ps[0]], None]
324 else:
325 else:
325 pars = [nodeids[p] for p in ps]
326 pars = [nodeids[p] for p in ps]
326 cx = context.memctx(
327 cx = context.memctx(
327 repo,
328 repo,
328 pars,
329 pars,
329 b"r%i" % id,
330 b"r%i" % id,
330 files,
331 files,
331 fctxfn,
332 fctxfn,
332 date=(id, 0),
333 date=(id, 0),
333 user=b"debugbuilddag",
334 user=b"debugbuilddag",
334 extra={b'branch': atbranch},
335 extra={b'branch': atbranch},
335 )
336 )
336 nodeid = repo.commitctx(cx)
337 nodeid = repo.commitctx(cx)
337 nodeids.append(nodeid)
338 nodeids.append(nodeid)
338 at = id
339 at = id
339 elif type == b'l':
340 elif type == b'l':
340 id, name = data
341 id, name = data
341 ui.note((b'tag %s\n' % name))
342 ui.note((b'tag %s\n' % name))
342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 elif type == b'a':
344 elif type == b'a':
344 ui.note((b'branch %s\n' % data))
345 ui.note((b'branch %s\n' % data))
345 atbranch = data
346 atbranch = data
346 progress.update(id)
347 progress.update(id)
347
348
348 if tags:
349 if tags:
349 repo.vfs.write(b"localtags", b"".join(tags))
350 repo.vfs.write(b"localtags", b"".join(tags))
350
351
351
352
352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 indent_string = b' ' * indent
354 indent_string = b' ' * indent
354 if all:
355 if all:
355 ui.writenoi18n(
356 ui.writenoi18n(
356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 % indent_string
358 % indent_string
358 )
359 )
359
360
360 def showchunks(named):
361 def showchunks(named):
361 ui.write(b"\n%s%s\n" % (indent_string, named))
362 ui.write(b"\n%s%s\n" % (indent_string, named))
362 for deltadata in gen.deltaiter():
363 for deltadata in gen.deltaiter():
363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 ui.write(
365 ui.write(
365 b"%s%s %s %s %s %s %d\n"
366 b"%s%s %s %s %s %s %d\n"
366 % (
367 % (
367 indent_string,
368 indent_string,
368 hex(node),
369 hex(node),
369 hex(p1),
370 hex(p1),
370 hex(p2),
371 hex(p2),
371 hex(cs),
372 hex(cs),
372 hex(deltabase),
373 hex(deltabase),
373 len(delta),
374 len(delta),
374 )
375 )
375 )
376 )
376
377
377 gen.changelogheader()
378 gen.changelogheader()
378 showchunks(b"changelog")
379 showchunks(b"changelog")
379 gen.manifestheader()
380 gen.manifestheader()
380 showchunks(b"manifest")
381 showchunks(b"manifest")
381 for chunkdata in iter(gen.filelogheader, {}):
382 for chunkdata in iter(gen.filelogheader, {}):
382 fname = chunkdata[b'filename']
383 fname = chunkdata[b'filename']
383 showchunks(fname)
384 showchunks(fname)
384 else:
385 else:
385 if isinstance(gen, bundle2.unbundle20):
386 if isinstance(gen, bundle2.unbundle20):
386 raise error.Abort(_(b'use debugbundle2 for this file'))
387 raise error.Abort(_(b'use debugbundle2 for this file'))
387 gen.changelogheader()
388 gen.changelogheader()
388 for deltadata in gen.deltaiter():
389 for deltadata in gen.deltaiter():
389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391
392
392
393
393 def _debugobsmarkers(ui, part, indent=0, **opts):
394 def _debugobsmarkers(ui, part, indent=0, **opts):
394 """display version and markers contained in 'data'"""
395 """display version and markers contained in 'data'"""
395 opts = pycompat.byteskwargs(opts)
396 opts = pycompat.byteskwargs(opts)
396 data = part.read()
397 data = part.read()
397 indent_string = b' ' * indent
398 indent_string = b' ' * indent
398 try:
399 try:
399 version, markers = obsolete._readmarkers(data)
400 version, markers = obsolete._readmarkers(data)
400 except error.UnknownVersion as exc:
401 except error.UnknownVersion as exc:
401 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg %= indent_string, exc.version, len(data)
403 msg %= indent_string, exc.version, len(data)
403 ui.write(msg)
404 ui.write(msg)
404 else:
405 else:
405 msg = b"%sversion: %d (%d bytes)\n"
406 msg = b"%sversion: %d (%d bytes)\n"
406 msg %= indent_string, version, len(data)
407 msg %= indent_string, version, len(data)
407 ui.write(msg)
408 ui.write(msg)
408 fm = ui.formatter(b'debugobsolete', opts)
409 fm = ui.formatter(b'debugobsolete', opts)
409 for rawmarker in sorted(markers):
410 for rawmarker in sorted(markers):
410 m = obsutil.marker(None, rawmarker)
411 m = obsutil.marker(None, rawmarker)
411 fm.startitem()
412 fm.startitem()
412 fm.plain(indent_string)
413 fm.plain(indent_string)
413 cmdutil.showmarker(fm, m)
414 cmdutil.showmarker(fm, m)
414 fm.end()
415 fm.end()
415
416
416
417
417 def _debugphaseheads(ui, data, indent=0):
418 def _debugphaseheads(ui, data, indent=0):
418 """display version and markers contained in 'data'"""
419 """display version and markers contained in 'data'"""
419 indent_string = b' ' * indent
420 indent_string = b' ' * indent
420 headsbyphase = phases.binarydecode(data)
421 headsbyphase = phases.binarydecode(data)
421 for phase in phases.allphases:
422 for phase in phases.allphases:
422 for head in headsbyphase[phase]:
423 for head in headsbyphase[phase]:
423 ui.write(indent_string)
424 ui.write(indent_string)
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425
426
426
427
427 def _quasirepr(thing):
428 def _quasirepr(thing):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 return b'{%s}' % (
430 return b'{%s}' % (
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 )
432 )
432 return pycompat.bytestr(repr(thing))
433 return pycompat.bytestr(repr(thing))
433
434
434
435
435 def _debugbundle2(ui, gen, all=None, **opts):
436 def _debugbundle2(ui, gen, all=None, **opts):
436 """lists the contents of a bundle2"""
437 """lists the contents of a bundle2"""
437 if not isinstance(gen, bundle2.unbundle20):
438 if not isinstance(gen, bundle2.unbundle20):
438 raise error.Abort(_(b'not a bundle2 file'))
439 raise error.Abort(_(b'not a bundle2 file'))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 parttypes = opts.get('part_type', [])
441 parttypes = opts.get('part_type', [])
441 for part in gen.iterparts():
442 for part in gen.iterparts():
442 if parttypes and part.type not in parttypes:
443 if parttypes and part.type not in parttypes:
443 continue
444 continue
444 msg = b'%s -- %s (mandatory: %r)\n'
445 msg = b'%s -- %s (mandatory: %r)\n'
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 if part.type == b'changegroup':
447 if part.type == b'changegroup':
447 version = part.params.get(b'version', b'01')
448 version = part.params.get(b'version', b'01')
448 cg = changegroup.getunbundler(version, part, b'UN')
449 cg = changegroup.getunbundler(version, part, b'UN')
449 if not ui.quiet:
450 if not ui.quiet:
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 if part.type == b'obsmarkers':
452 if part.type == b'obsmarkers':
452 if not ui.quiet:
453 if not ui.quiet:
453 _debugobsmarkers(ui, part, indent=4, **opts)
454 _debugobsmarkers(ui, part, indent=4, **opts)
454 if part.type == b'phase-heads':
455 if part.type == b'phase-heads':
455 if not ui.quiet:
456 if not ui.quiet:
456 _debugphaseheads(ui, part, indent=4)
457 _debugphaseheads(ui, part, indent=4)
457
458
458
459
459 @command(
460 @command(
460 b'debugbundle',
461 b'debugbundle',
461 [
462 [
462 (b'a', b'all', None, _(b'show all details')),
463 (b'a', b'all', None, _(b'show all details')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 ],
466 ],
466 _(b'FILE'),
467 _(b'FILE'),
467 norepo=True,
468 norepo=True,
468 )
469 )
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 """lists the contents of a bundle"""
471 """lists the contents of a bundle"""
471 with hg.openpath(ui, bundlepath) as f:
472 with hg.openpath(ui, bundlepath) as f:
472 if spec:
473 if spec:
473 spec = exchange.getbundlespec(ui, f)
474 spec = exchange.getbundlespec(ui, f)
474 ui.write(b'%s\n' % spec)
475 ui.write(b'%s\n' % spec)
475 return
476 return
476
477
477 gen = exchange.readbundle(ui, f, bundlepath)
478 gen = exchange.readbundle(ui, f, bundlepath)
478 if isinstance(gen, bundle2.unbundle20):
479 if isinstance(gen, bundle2.unbundle20):
479 return _debugbundle2(ui, gen, all=all, **opts)
480 return _debugbundle2(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
481
482
482
483
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 def debugcapabilities(ui, path, **opts):
485 def debugcapabilities(ui, path, **opts):
485 """lists the capabilities of a remote peer"""
486 """lists the capabilities of a remote peer"""
486 opts = pycompat.byteskwargs(opts)
487 opts = pycompat.byteskwargs(opts)
487 peer = hg.peer(ui, opts, path)
488 peer = hg.peer(ui, opts, path)
488 try:
489 try:
489 caps = peer.capabilities()
490 caps = peer.capabilities()
490 ui.writenoi18n(b'Main capabilities:\n')
491 ui.writenoi18n(b'Main capabilities:\n')
491 for c in sorted(caps):
492 for c in sorted(caps):
492 ui.write(b' %s\n' % c)
493 ui.write(b' %s\n' % c)
493 b2caps = bundle2.bundle2caps(peer)
494 b2caps = bundle2.bundle2caps(peer)
494 if b2caps:
495 if b2caps:
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 for key, values in sorted(b2caps.items()):
497 for key, values in sorted(b2caps.items()):
497 ui.write(b' %s\n' % key)
498 ui.write(b' %s\n' % key)
498 for v in values:
499 for v in values:
499 ui.write(b' %s\n' % v)
500 ui.write(b' %s\n' % v)
500 finally:
501 finally:
501 peer.close()
502 peer.close()
502
503
503
504
504 @command(
505 @command(
505 b'debugchangedfiles',
506 b'debugchangedfiles',
506 [
507 [
507 (
508 (
508 b'',
509 b'',
509 b'compute',
510 b'compute',
510 False,
511 False,
511 b"compute information instead of reading it from storage",
512 b"compute information instead of reading it from storage",
512 ),
513 ),
513 ],
514 ],
514 b'REV',
515 b'REV',
515 )
516 )
516 def debugchangedfiles(ui, repo, rev, **opts):
517 def debugchangedfiles(ui, repo, rev, **opts):
517 """list the stored files changes for a revision"""
518 """list the stored files changes for a revision"""
518 ctx = logcmdutil.revsingle(repo, rev, None)
519 ctx = logcmdutil.revsingle(repo, rev, None)
519 files = None
520 files = None
520
521
521 if opts['compute']:
522 if opts['compute']:
522 files = metadata.compute_all_files_changes(ctx)
523 files = metadata.compute_all_files_changes(ctx)
523 else:
524 else:
524 sd = repo.changelog.sidedata(ctx.rev())
525 sd = repo.changelog.sidedata(ctx.rev())
525 files_block = sd.get(sidedata.SD_FILES)
526 files_block = sd.get(sidedata.SD_FILES)
526 if files_block is not None:
527 if files_block is not None:
527 files = metadata.decode_files_sidedata(sd)
528 files = metadata.decode_files_sidedata(sd)
528 if files is not None:
529 if files is not None:
529 for f in sorted(files.touched):
530 for f in sorted(files.touched):
530 if f in files.added:
531 if f in files.added:
531 action = b"added"
532 action = b"added"
532 elif f in files.removed:
533 elif f in files.removed:
533 action = b"removed"
534 action = b"removed"
534 elif f in files.merged:
535 elif f in files.merged:
535 action = b"merged"
536 action = b"merged"
536 elif f in files.salvaged:
537 elif f in files.salvaged:
537 action = b"salvaged"
538 action = b"salvaged"
538 else:
539 else:
539 action = b"touched"
540 action = b"touched"
540
541
541 copy_parent = b""
542 copy_parent = b""
542 copy_source = b""
543 copy_source = b""
543 if f in files.copied_from_p1:
544 if f in files.copied_from_p1:
544 copy_parent = b"p1"
545 copy_parent = b"p1"
545 copy_source = files.copied_from_p1[f]
546 copy_source = files.copied_from_p1[f]
546 elif f in files.copied_from_p2:
547 elif f in files.copied_from_p2:
547 copy_parent = b"p2"
548 copy_parent = b"p2"
548 copy_source = files.copied_from_p2[f]
549 copy_source = files.copied_from_p2[f]
549
550
550 data = (action, copy_parent, f, copy_source)
551 data = (action, copy_parent, f, copy_source)
551 template = b"%-8s %2s: %s, %s;\n"
552 template = b"%-8s %2s: %s, %s;\n"
552 ui.write(template % data)
553 ui.write(template % data)
553
554
554
555
555 @command(b'debugcheckstate', [], b'')
556 @command(b'debugcheckstate', [], b'')
556 def debugcheckstate(ui, repo):
557 def debugcheckstate(ui, repo):
557 """validate the correctness of the current dirstate"""
558 """validate the correctness of the current dirstate"""
558 parent1, parent2 = repo.dirstate.parents()
559 parent1, parent2 = repo.dirstate.parents()
559 m1 = repo[parent1].manifest()
560 m1 = repo[parent1].manifest()
560 m2 = repo[parent2].manifest()
561 m2 = repo[parent2].manifest()
561 errors = 0
562 errors = 0
562 for err in repo.dirstate.verify(m1, m2):
563 for err in repo.dirstate.verify(m1, m2):
563 ui.warn(err[0] % err[1:])
564 ui.warn(err[0] % err[1:])
564 errors += 1
565 errors += 1
565 if errors:
566 if errors:
566 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 raise error.Abort(errstr)
568 raise error.Abort(errstr)
568
569
569
570
570 @command(
571 @command(
571 b'debugcolor',
572 b'debugcolor',
572 [(b'', b'style', None, _(b'show all configured styles'))],
573 [(b'', b'style', None, _(b'show all configured styles'))],
573 b'hg debugcolor',
574 b'hg debugcolor',
574 )
575 )
575 def debugcolor(ui, repo, **opts):
576 def debugcolor(ui, repo, **opts):
576 """show available color, effects or style"""
577 """show available color, effects or style"""
577 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 if opts.get('style'):
579 if opts.get('style'):
579 return _debugdisplaystyle(ui)
580 return _debugdisplaystyle(ui)
580 else:
581 else:
581 return _debugdisplaycolor(ui)
582 return _debugdisplaycolor(ui)
582
583
583
584
584 def _debugdisplaycolor(ui):
585 def _debugdisplaycolor(ui):
585 ui = ui.copy()
586 ui = ui.copy()
586 ui._styles.clear()
587 ui._styles.clear()
587 for effect in color._activeeffects(ui).keys():
588 for effect in color._activeeffects(ui).keys():
588 ui._styles[effect] = effect
589 ui._styles[effect] = effect
589 if ui._terminfoparams:
590 if ui._terminfoparams:
590 for k, v in ui.configitems(b'color'):
591 for k, v in ui.configitems(b'color'):
591 if k.startswith(b'color.'):
592 if k.startswith(b'color.'):
592 ui._styles[k] = k[6:]
593 ui._styles[k] = k[6:]
593 elif k.startswith(b'terminfo.'):
594 elif k.startswith(b'terminfo.'):
594 ui._styles[k] = k[9:]
595 ui._styles[k] = k[9:]
595 ui.write(_(b'available colors:\n'))
596 ui.write(_(b'available colors:\n'))
596 # sort label with a '_' after the other to group '_background' entry.
597 # sort label with a '_' after the other to group '_background' entry.
597 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 for colorname, label in items:
599 for colorname, label in items:
599 ui.write(b'%s\n' % colorname, label=label)
600 ui.write(b'%s\n' % colorname, label=label)
600
601
601
602
602 def _debugdisplaystyle(ui):
603 def _debugdisplaystyle(ui):
603 ui.write(_(b'available style:\n'))
604 ui.write(_(b'available style:\n'))
604 if not ui._styles:
605 if not ui._styles:
605 return
606 return
606 width = max(len(s) for s in ui._styles)
607 width = max(len(s) for s in ui._styles)
607 for label, effects in sorted(ui._styles.items()):
608 for label, effects in sorted(ui._styles.items()):
608 ui.write(b'%s' % label, label=label)
609 ui.write(b'%s' % label, label=label)
609 if effects:
610 if effects:
610 # 50
611 # 50
611 ui.write(b': ')
612 ui.write(b': ')
612 ui.write(b' ' * (max(0, width - len(label))))
613 ui.write(b' ' * (max(0, width - len(label))))
613 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 ui.write(b'\n')
615 ui.write(b'\n')
615
616
616
617
617 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 def debugcreatestreamclonebundle(ui, repo, fname):
619 def debugcreatestreamclonebundle(ui, repo, fname):
619 """create a stream clone bundle file
620 """create a stream clone bundle file
620
621
621 Stream bundles are special bundles that are essentially archives of
622 Stream bundles are special bundles that are essentially archives of
622 revlog files. They are commonly used for cloning very quickly.
623 revlog files. They are commonly used for cloning very quickly.
623 """
624 """
624 # TODO we may want to turn this into an abort when this functionality
625 # TODO we may want to turn this into an abort when this functionality
625 # is moved into `hg bundle`.
626 # is moved into `hg bundle`.
626 if phases.hassecret(repo):
627 if phases.hassecret(repo):
627 ui.warn(
628 ui.warn(
628 _(
629 _(
629 b'(warning: stream clone bundle will contain secret '
630 b'(warning: stream clone bundle will contain secret '
630 b'revisions)\n'
631 b'revisions)\n'
631 )
632 )
632 )
633 )
633
634
634 requirements, gen = streamclone.generatebundlev1(repo)
635 requirements, gen = streamclone.generatebundlev1(repo)
635 changegroup.writechunks(ui, gen, fname)
636 changegroup.writechunks(ui, gen, fname)
636
637
637 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638
639
639
640
640 @command(
641 @command(
641 b'debugdag',
642 b'debugdag',
642 [
643 [
643 (b't', b'tags', None, _(b'use tags as labels')),
644 (b't', b'tags', None, _(b'use tags as labels')),
644 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'', b'dots', None, _(b'use dots for runs')),
646 (b'', b'dots', None, _(b'use dots for runs')),
646 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 ],
648 ],
648 _(b'[OPTION]... [FILE [REV]...]'),
649 _(b'[OPTION]... [FILE [REV]...]'),
649 optionalrepo=True,
650 optionalrepo=True,
650 )
651 )
651 def debugdag(ui, repo, file_=None, *revs, **opts):
652 def debugdag(ui, repo, file_=None, *revs, **opts):
652 """format the changelog or an index DAG as a concise textual description
653 """format the changelog or an index DAG as a concise textual description
653
654
654 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 revision numbers, they get labeled in the output as rN.
656 revision numbers, they get labeled in the output as rN.
656
657
657 Otherwise, the changelog DAG of the current repo is emitted.
658 Otherwise, the changelog DAG of the current repo is emitted.
658 """
659 """
659 spaces = opts.get('spaces')
660 spaces = opts.get('spaces')
660 dots = opts.get('dots')
661 dots = opts.get('dots')
661 if file_:
662 if file_:
662 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 revs = {int(r) for r in revs}
664 revs = {int(r) for r in revs}
664
665
665 def events():
666 def events():
666 for r in rlog:
667 for r in rlog:
667 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 if r in revs:
669 if r in revs:
669 yield b'l', (r, b"r%i" % r)
670 yield b'l', (r, b"r%i" % r)
670
671
671 elif repo:
672 elif repo:
672 cl = repo.changelog
673 cl = repo.changelog
673 tags = opts.get('tags')
674 tags = opts.get('tags')
674 branches = opts.get('branches')
675 branches = opts.get('branches')
675 if tags:
676 if tags:
676 labels = {}
677 labels = {}
677 for l, n in repo.tags().items():
678 for l, n in repo.tags().items():
678 labels.setdefault(cl.rev(n), []).append(l)
679 labels.setdefault(cl.rev(n), []).append(l)
679
680
680 def events():
681 def events():
681 b = b"default"
682 b = b"default"
682 for r in cl:
683 for r in cl:
683 if branches:
684 if branches:
684 newb = cl.read(cl.node(r))[5][b'branch']
685 newb = cl.read(cl.node(r))[5][b'branch']
685 if newb != b:
686 if newb != b:
686 yield b'a', newb
687 yield b'a', newb
687 b = newb
688 b = newb
688 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 if tags:
690 if tags:
690 ls = labels.get(r)
691 ls = labels.get(r)
691 if ls:
692 if ls:
692 for l in ls:
693 for l in ls:
693 yield b'l', (r, l)
694 yield b'l', (r, l)
694
695
695 else:
696 else:
696 raise error.Abort(_(b'need repo for changelog dag'))
697 raise error.Abort(_(b'need repo for changelog dag'))
697
698
698 for line in dagparser.dagtextlines(
699 for line in dagparser.dagtextlines(
699 events(),
700 events(),
700 addspaces=spaces,
701 addspaces=spaces,
701 wraplabels=True,
702 wraplabels=True,
702 wrapannotations=True,
703 wrapannotations=True,
703 wrapnonlinear=dots,
704 wrapnonlinear=dots,
704 usedots=dots,
705 usedots=dots,
705 maxlinewidth=70,
706 maxlinewidth=70,
706 ):
707 ):
707 ui.write(line)
708 ui.write(line)
708 ui.write(b"\n")
709 ui.write(b"\n")
709
710
710
711
711 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 def debugdata(ui, repo, file_, rev=None, **opts):
713 def debugdata(ui, repo, file_, rev=None, **opts):
713 """dump the contents of a data file revision"""
714 """dump the contents of a data file revision"""
714 opts = pycompat.byteskwargs(opts)
715 opts = pycompat.byteskwargs(opts)
715 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if rev is not None:
717 if rev is not None:
717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 file_, rev = None, file_
719 file_, rev = None, file_
719 elif rev is None:
720 elif rev is None:
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 try:
723 try:
723 ui.write(r.rawdata(r.lookup(rev)))
724 ui.write(r.rawdata(r.lookup(rev)))
724 except KeyError:
725 except KeyError:
725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726
727
727
728
728 @command(
729 @command(
729 b'debugdate',
730 b'debugdate',
730 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 _(b'[-e] DATE [RANGE]'),
732 _(b'[-e] DATE [RANGE]'),
732 norepo=True,
733 norepo=True,
733 optionalrepo=True,
734 optionalrepo=True,
734 )
735 )
735 def debugdate(ui, date, range=None, **opts):
736 def debugdate(ui, date, range=None, **opts):
736 """parse and display a date"""
737 """parse and display a date"""
737 if opts["extended"]:
738 if opts["extended"]:
738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 else:
740 else:
740 d = dateutil.parsedate(date)
741 d = dateutil.parsedate(date)
741 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 if range:
744 if range:
744 m = dateutil.matchdate(range)
745 m = dateutil.matchdate(range)
745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746
747
747
748
748 @command(
749 @command(
749 b'debugdeltachain',
750 b'debugdeltachain',
750 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 _(b'-c|-m|FILE'),
752 _(b'-c|-m|FILE'),
752 optionalrepo=True,
753 optionalrepo=True,
753 )
754 )
754 def debugdeltachain(ui, repo, file_=None, **opts):
755 def debugdeltachain(ui, repo, file_=None, **opts):
755 """dump information about delta chains in a revlog
756 """dump information about delta chains in a revlog
756
757
757 Output can be templatized. Available template keywords are:
758 Output can be templatized. Available template keywords are:
758
759
759 :``rev``: revision number
760 :``rev``: revision number
760 :``chainid``: delta chain identifier (numbered by unique base)
761 :``chainid``: delta chain identifier (numbered by unique base)
761 :``chainlen``: delta chain length to this revision
762 :``chainlen``: delta chain length to this revision
762 :``prevrev``: previous revision in delta chain
763 :``prevrev``: previous revision in delta chain
763 :``deltatype``: role of delta / how it was computed
764 :``deltatype``: role of delta / how it was computed
764 :``compsize``: compressed size of revision
765 :``compsize``: compressed size of revision
765 :``uncompsize``: uncompressed size of revision
766 :``uncompsize``: uncompressed size of revision
766 :``chainsize``: total size of compressed revisions in chain
767 :``chainsize``: total size of compressed revisions in chain
767 :``chainratio``: total chain size divided by uncompressed revision size
768 :``chainratio``: total chain size divided by uncompressed revision size
768 (new delta chains typically start at ratio 2.00)
769 (new delta chains typically start at ratio 2.00)
769 :``lindist``: linear distance from base revision in delta chain to end
770 :``lindist``: linear distance from base revision in delta chain to end
770 of this revision
771 of this revision
771 :``extradist``: total size of revisions not part of this delta chain from
772 :``extradist``: total size of revisions not part of this delta chain from
772 base of delta chain to end of this revision; a measurement
773 base of delta chain to end of this revision; a measurement
773 of how much extra data we need to read/seek across to read
774 of how much extra data we need to read/seek across to read
774 the delta chain for this revision
775 the delta chain for this revision
775 :``extraratio``: extradist divided by chainsize; another representation of
776 :``extraratio``: extradist divided by chainsize; another representation of
776 how much unrelated data is needed to load this delta chain
777 how much unrelated data is needed to load this delta chain
777
778
778 If the repository is configured to use the sparse read, additional keywords
779 If the repository is configured to use the sparse read, additional keywords
779 are available:
780 are available:
780
781
781 :``readsize``: total size of data read from the disk for a revision
782 :``readsize``: total size of data read from the disk for a revision
782 (sum of the sizes of all the blocks)
783 (sum of the sizes of all the blocks)
783 :``largestblock``: size of the largest block of data read from the disk
784 :``largestblock``: size of the largest block of data read from the disk
784 :``readdensity``: density of useful bytes in the data read from the disk
785 :``readdensity``: density of useful bytes in the data read from the disk
785 :``srchunks``: in how many data hunks the whole revision would be read
786 :``srchunks``: in how many data hunks the whole revision would be read
786
787
787 The sparse read can be enabled with experimental.sparse-read = True
788 The sparse read can be enabled with experimental.sparse-read = True
788 """
789 """
789 opts = pycompat.byteskwargs(opts)
790 opts = pycompat.byteskwargs(opts)
790 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
791 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
791 index = r.index
792 index = r.index
792 start = r.start
793 start = r.start
793 length = r.length
794 length = r.length
794 generaldelta = r._generaldelta
795 generaldelta = r._generaldelta
795 withsparseread = getattr(r, '_withsparseread', False)
796 withsparseread = getattr(r, '_withsparseread', False)
796
797
797 def revinfo(rev):
798 def revinfo(rev):
798 e = index[rev]
799 e = index[rev]
799 compsize = e[1]
800 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
800 uncompsize = e[2]
801 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
801 chainsize = 0
802 chainsize = 0
802
803
804 base = e[revlog_constants.ENTRY_DELTA_BASE]
805 p1 = e[revlog_constants.ENTRY_PARENT_1]
806 p2 = e[revlog_constants.ENTRY_PARENT_2]
807
803 if generaldelta:
808 if generaldelta:
804 if e[3] == e[5]:
809 if base == p1:
805 deltatype = b'p1'
810 deltatype = b'p1'
806 elif e[3] == e[6]:
811 elif base == p2:
807 deltatype = b'p2'
812 deltatype = b'p2'
808 elif e[3] == rev:
813 elif base == rev:
809 deltatype = b'base'
814 deltatype = b'base'
810 elif r.issnapshot(rev):
815 elif r.issnapshot(rev):
811 deltatype = b'snap'
816 deltatype = b'snap'
812 elif e[3] == rev - 1:
817 elif base == rev - 1:
813 deltatype = b'prev'
818 deltatype = b'prev'
814 else:
819 else:
815 deltatype = b'other'
820 deltatype = b'other'
816 else:
821 else:
817 if e[3] == rev:
822 if base == rev:
818 deltatype = b'base'
823 deltatype = b'base'
819 else:
824 else:
820 deltatype = b'prev'
825 deltatype = b'prev'
821
826
822 chain = r._deltachain(rev)[0]
827 chain = r._deltachain(rev)[0]
823 for iterrev in chain:
828 for iterrev in chain:
824 e = index[iterrev]
829 e = index[iterrev]
825 chainsize += e[1]
830 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
826
831
827 return compsize, uncompsize, deltatype, chain, chainsize
832 return compsize, uncompsize, deltatype, chain, chainsize
828
833
829 fm = ui.formatter(b'debugdeltachain', opts)
834 fm = ui.formatter(b'debugdeltachain', opts)
830
835
831 fm.plain(
836 fm.plain(
832 b' rev chain# chainlen prev delta '
837 b' rev chain# chainlen prev delta '
833 b'size rawsize chainsize ratio lindist extradist '
838 b'size rawsize chainsize ratio lindist extradist '
834 b'extraratio'
839 b'extraratio'
835 )
840 )
836 if withsparseread:
841 if withsparseread:
837 fm.plain(b' readsize largestblk rddensity srchunks')
842 fm.plain(b' readsize largestblk rddensity srchunks')
838 fm.plain(b'\n')
843 fm.plain(b'\n')
839
844
840 chainbases = {}
845 chainbases = {}
841 for rev in r:
846 for rev in r:
842 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
847 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 chainbase = chain[0]
848 chainbase = chain[0]
844 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
849 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 basestart = start(chainbase)
850 basestart = start(chainbase)
846 revstart = start(rev)
851 revstart = start(rev)
847 lineardist = revstart + comp - basestart
852 lineardist = revstart + comp - basestart
848 extradist = lineardist - chainsize
853 extradist = lineardist - chainsize
849 try:
854 try:
850 prevrev = chain[-2]
855 prevrev = chain[-2]
851 except IndexError:
856 except IndexError:
852 prevrev = -1
857 prevrev = -1
853
858
854 if uncomp != 0:
859 if uncomp != 0:
855 chainratio = float(chainsize) / float(uncomp)
860 chainratio = float(chainsize) / float(uncomp)
856 else:
861 else:
857 chainratio = chainsize
862 chainratio = chainsize
858
863
859 if chainsize != 0:
864 if chainsize != 0:
860 extraratio = float(extradist) / float(chainsize)
865 extraratio = float(extradist) / float(chainsize)
861 else:
866 else:
862 extraratio = extradist
867 extraratio = extradist
863
868
864 fm.startitem()
869 fm.startitem()
865 fm.write(
870 fm.write(
866 b'rev chainid chainlen prevrev deltatype compsize '
871 b'rev chainid chainlen prevrev deltatype compsize '
867 b'uncompsize chainsize chainratio lindist extradist '
872 b'uncompsize chainsize chainratio lindist extradist '
868 b'extraratio',
873 b'extraratio',
869 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
874 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 rev,
875 rev,
871 chainid,
876 chainid,
872 len(chain),
877 len(chain),
873 prevrev,
878 prevrev,
874 deltatype,
879 deltatype,
875 comp,
880 comp,
876 uncomp,
881 uncomp,
877 chainsize,
882 chainsize,
878 chainratio,
883 chainratio,
879 lineardist,
884 lineardist,
880 extradist,
885 extradist,
881 extraratio,
886 extraratio,
882 rev=rev,
887 rev=rev,
883 chainid=chainid,
888 chainid=chainid,
884 chainlen=len(chain),
889 chainlen=len(chain),
885 prevrev=prevrev,
890 prevrev=prevrev,
886 deltatype=deltatype,
891 deltatype=deltatype,
887 compsize=comp,
892 compsize=comp,
888 uncompsize=uncomp,
893 uncompsize=uncomp,
889 chainsize=chainsize,
894 chainsize=chainsize,
890 chainratio=chainratio,
895 chainratio=chainratio,
891 lindist=lineardist,
896 lindist=lineardist,
892 extradist=extradist,
897 extradist=extradist,
893 extraratio=extraratio,
898 extraratio=extraratio,
894 )
899 )
895 if withsparseread:
900 if withsparseread:
896 readsize = 0
901 readsize = 0
897 largestblock = 0
902 largestblock = 0
898 srchunks = 0
903 srchunks = 0
899
904
900 for revschunk in deltautil.slicechunk(r, chain):
905 for revschunk in deltautil.slicechunk(r, chain):
901 srchunks += 1
906 srchunks += 1
902 blkend = start(revschunk[-1]) + length(revschunk[-1])
907 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 blksize = blkend - start(revschunk[0])
908 blksize = blkend - start(revschunk[0])
904
909
905 readsize += blksize
910 readsize += blksize
906 if largestblock < blksize:
911 if largestblock < blksize:
907 largestblock = blksize
912 largestblock = blksize
908
913
909 if readsize:
914 if readsize:
910 readdensity = float(chainsize) / float(readsize)
915 readdensity = float(chainsize) / float(readsize)
911 else:
916 else:
912 readdensity = 1
917 readdensity = 1
913
918
914 fm.write(
919 fm.write(
915 b'readsize largestblock readdensity srchunks',
920 b'readsize largestblock readdensity srchunks',
916 b' %10d %10d %9.5f %8d',
921 b' %10d %10d %9.5f %8d',
917 readsize,
922 readsize,
918 largestblock,
923 largestblock,
919 readdensity,
924 readdensity,
920 srchunks,
925 srchunks,
921 readsize=readsize,
926 readsize=readsize,
922 largestblock=largestblock,
927 largestblock=largestblock,
923 readdensity=readdensity,
928 readdensity=readdensity,
924 srchunks=srchunks,
929 srchunks=srchunks,
925 )
930 )
926
931
927 fm.plain(b'\n')
932 fm.plain(b'\n')
928
933
929 fm.end()
934 fm.end()
930
935
931
936
932 @command(
937 @command(
933 b'debugdirstate|debugstate',
938 b'debugdirstate|debugstate',
934 [
939 [
935 (
940 (
936 b'',
941 b'',
937 b'nodates',
942 b'nodates',
938 None,
943 None,
939 _(b'do not display the saved mtime (DEPRECATED)'),
944 _(b'do not display the saved mtime (DEPRECATED)'),
940 ),
945 ),
941 (b'', b'dates', True, _(b'display the saved mtime')),
946 (b'', b'dates', True, _(b'display the saved mtime')),
942 (b'', b'datesort', None, _(b'sort by saved mtime')),
947 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 (
948 (
944 b'',
949 b'',
945 b'docket',
950 b'docket',
946 False,
951 False,
947 _(b'display the docket (metadata file) instead'),
952 _(b'display the docket (metadata file) instead'),
948 ),
953 ),
949 (
954 (
950 b'',
955 b'',
951 b'all',
956 b'all',
952 False,
957 False,
953 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
958 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
954 ),
959 ),
955 ],
960 ],
956 _(b'[OPTION]...'),
961 _(b'[OPTION]...'),
957 )
962 )
958 def debugstate(ui, repo, **opts):
963 def debugstate(ui, repo, **opts):
959 """show the contents of the current dirstate"""
964 """show the contents of the current dirstate"""
960
965
961 if opts.get("docket"):
966 if opts.get("docket"):
962 if not repo.dirstate._use_dirstate_v2:
967 if not repo.dirstate._use_dirstate_v2:
963 raise error.Abort(_(b'dirstate v1 does not have a docket'))
968 raise error.Abort(_(b'dirstate v1 does not have a docket'))
964
969
965 docket = repo.dirstate._map.docket
970 docket = repo.dirstate._map.docket
966 (
971 (
967 start_offset,
972 start_offset,
968 root_nodes,
973 root_nodes,
969 nodes_with_entry,
974 nodes_with_entry,
970 nodes_with_copy,
975 nodes_with_copy,
971 unused_bytes,
976 unused_bytes,
972 _unused,
977 _unused,
973 ignore_pattern,
978 ignore_pattern,
974 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
979 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
975
980
976 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
981 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
977 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
982 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
978 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
983 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
979 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
984 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
980 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
985 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
981 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
986 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
982 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
987 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
983 ui.write(
988 ui.write(
984 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
989 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
985 )
990 )
986 return
991 return
987
992
988 nodates = not opts['dates']
993 nodates = not opts['dates']
989 if opts.get('nodates') is not None:
994 if opts.get('nodates') is not None:
990 nodates = True
995 nodates = True
991 datesort = opts.get('datesort')
996 datesort = opts.get('datesort')
992
997
993 if datesort:
998 if datesort:
994
999
995 def keyfunc(entry):
1000 def keyfunc(entry):
996 filename, _state, _mode, _size, mtime = entry
1001 filename, _state, _mode, _size, mtime = entry
997 return (mtime, filename)
1002 return (mtime, filename)
998
1003
999 else:
1004 else:
1000 keyfunc = None # sort by filename
1005 keyfunc = None # sort by filename
1001 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1006 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1002 entries.sort(key=keyfunc)
1007 entries.sort(key=keyfunc)
1003 for entry in entries:
1008 for entry in entries:
1004 filename, state, mode, size, mtime = entry
1009 filename, state, mode, size, mtime = entry
1005 if mtime == -1:
1010 if mtime == -1:
1006 timestr = b'unset '
1011 timestr = b'unset '
1007 elif nodates:
1012 elif nodates:
1008 timestr = b'set '
1013 timestr = b'set '
1009 else:
1014 else:
1010 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1015 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1011 timestr = encoding.strtolocal(timestr)
1016 timestr = encoding.strtolocal(timestr)
1012 if mode & 0o20000:
1017 if mode & 0o20000:
1013 mode = b'lnk'
1018 mode = b'lnk'
1014 else:
1019 else:
1015 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1020 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1016 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1021 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1017 for f in repo.dirstate.copies():
1022 for f in repo.dirstate.copies():
1018 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1023 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1019
1024
1020
1025
1021 @command(
1026 @command(
1022 b'debugdirstateignorepatternshash',
1027 b'debugdirstateignorepatternshash',
1023 [],
1028 [],
1024 _(b''),
1029 _(b''),
1025 )
1030 )
1026 def debugdirstateignorepatternshash(ui, repo, **opts):
1031 def debugdirstateignorepatternshash(ui, repo, **opts):
1027 """show the hash of ignore patterns stored in dirstate if v2,
1032 """show the hash of ignore patterns stored in dirstate if v2,
1028 or nothing for dirstate-v2
1033 or nothing for dirstate-v2
1029 """
1034 """
1030 if repo.dirstate._use_dirstate_v2:
1035 if repo.dirstate._use_dirstate_v2:
1031 docket = repo.dirstate._map.docket
1036 docket = repo.dirstate._map.docket
1032 hash_len = 20 # 160 bits for SHA-1
1037 hash_len = 20 # 160 bits for SHA-1
1033 hash_bytes = docket.tree_metadata[-hash_len:]
1038 hash_bytes = docket.tree_metadata[-hash_len:]
1034 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1039 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1035
1040
1036
1041
1037 @command(
1042 @command(
1038 b'debugdiscovery',
1043 b'debugdiscovery',
1039 [
1044 [
1040 (b'', b'old', None, _(b'use old-style discovery')),
1045 (b'', b'old', None, _(b'use old-style discovery')),
1041 (
1046 (
1042 b'',
1047 b'',
1043 b'nonheads',
1048 b'nonheads',
1044 None,
1049 None,
1045 _(b'use old-style discovery with non-heads included'),
1050 _(b'use old-style discovery with non-heads included'),
1046 ),
1051 ),
1047 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1052 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1048 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1053 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1049 (
1054 (
1050 b'',
1055 b'',
1051 b'local-as-revs',
1056 b'local-as-revs',
1052 b"",
1057 b"",
1053 b'treat local has having these revisions only',
1058 b'treat local has having these revisions only',
1054 ),
1059 ),
1055 (
1060 (
1056 b'',
1061 b'',
1057 b'remote-as-revs',
1062 b'remote-as-revs',
1058 b"",
1063 b"",
1059 b'use local as remote, with only these revisions',
1064 b'use local as remote, with only these revisions',
1060 ),
1065 ),
1061 ]
1066 ]
1062 + cmdutil.remoteopts
1067 + cmdutil.remoteopts
1063 + cmdutil.formatteropts,
1068 + cmdutil.formatteropts,
1064 _(b'[--rev REV] [OTHER]'),
1069 _(b'[--rev REV] [OTHER]'),
1065 )
1070 )
1066 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1071 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1067 """runs the changeset discovery protocol in isolation
1072 """runs the changeset discovery protocol in isolation
1068
1073
1069 The local peer can be "replaced" by a subset of the local repository by
1074 The local peer can be "replaced" by a subset of the local repository by
1070 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1075 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1071 be "replaced" by a subset of the local repository using the
1076 be "replaced" by a subset of the local repository using the
1072 `--local-as-revs` flag. This is useful to efficiently debug pathological
1077 `--local-as-revs` flag. This is useful to efficiently debug pathological
1073 discovery situation.
1078 discovery situation.
1074
1079
1075 The following developer oriented config are relevant for people playing with this command:
1080 The following developer oriented config are relevant for people playing with this command:
1076
1081
1077 * devel.discovery.exchange-heads=True
1082 * devel.discovery.exchange-heads=True
1078
1083
1079 If False, the discovery will not start with
1084 If False, the discovery will not start with
1080 remote head fetching and local head querying.
1085 remote head fetching and local head querying.
1081
1086
1082 * devel.discovery.grow-sample=True
1087 * devel.discovery.grow-sample=True
1083
1088
1084 If False, the sample size used in set discovery will not be increased
1089 If False, the sample size used in set discovery will not be increased
1085 through the process
1090 through the process
1086
1091
1087 * devel.discovery.grow-sample.dynamic=True
1092 * devel.discovery.grow-sample.dynamic=True
1088
1093
1089 When discovery.grow-sample.dynamic is True, the default, the sample size is
1094 When discovery.grow-sample.dynamic is True, the default, the sample size is
1090 adapted to the shape of the undecided set (it is set to the max of:
1095 adapted to the shape of the undecided set (it is set to the max of:
1091 <target-size>, len(roots(undecided)), len(heads(undecided)
1096 <target-size>, len(roots(undecided)), len(heads(undecided)
1092
1097
1093 * devel.discovery.grow-sample.rate=1.05
1098 * devel.discovery.grow-sample.rate=1.05
1094
1099
1095 the rate at which the sample grow
1100 the rate at which the sample grow
1096
1101
1097 * devel.discovery.randomize=True
1102 * devel.discovery.randomize=True
1098
1103
1099 If andom sampling during discovery are deterministic. It is meant for
1104 If andom sampling during discovery are deterministic. It is meant for
1100 integration tests.
1105 integration tests.
1101
1106
1102 * devel.discovery.sample-size=200
1107 * devel.discovery.sample-size=200
1103
1108
1104 Control the initial size of the discovery sample
1109 Control the initial size of the discovery sample
1105
1110
1106 * devel.discovery.sample-size.initial=100
1111 * devel.discovery.sample-size.initial=100
1107
1112
1108 Control the initial size of the discovery for initial change
1113 Control the initial size of the discovery for initial change
1109 """
1114 """
1110 opts = pycompat.byteskwargs(opts)
1115 opts = pycompat.byteskwargs(opts)
1111 unfi = repo.unfiltered()
1116 unfi = repo.unfiltered()
1112
1117
1113 # setup potential extra filtering
1118 # setup potential extra filtering
1114 local_revs = opts[b"local_as_revs"]
1119 local_revs = opts[b"local_as_revs"]
1115 remote_revs = opts[b"remote_as_revs"]
1120 remote_revs = opts[b"remote_as_revs"]
1116
1121
1117 # make sure tests are repeatable
1122 # make sure tests are repeatable
1118 random.seed(int(opts[b'seed']))
1123 random.seed(int(opts[b'seed']))
1119
1124
1120 if not remote_revs:
1125 if not remote_revs:
1121
1126
1122 remoteurl, branches = urlutil.get_unique_pull_path(
1127 remoteurl, branches = urlutil.get_unique_pull_path(
1123 b'debugdiscovery', repo, ui, remoteurl
1128 b'debugdiscovery', repo, ui, remoteurl
1124 )
1129 )
1125 remote = hg.peer(repo, opts, remoteurl)
1130 remote = hg.peer(repo, opts, remoteurl)
1126 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1131 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1127 else:
1132 else:
1128 branches = (None, [])
1133 branches = (None, [])
1129 remote_filtered_revs = logcmdutil.revrange(
1134 remote_filtered_revs = logcmdutil.revrange(
1130 unfi, [b"not (::(%s))" % remote_revs]
1135 unfi, [b"not (::(%s))" % remote_revs]
1131 )
1136 )
1132 remote_filtered_revs = frozenset(remote_filtered_revs)
1137 remote_filtered_revs = frozenset(remote_filtered_revs)
1133
1138
1134 def remote_func(x):
1139 def remote_func(x):
1135 return remote_filtered_revs
1140 return remote_filtered_revs
1136
1141
1137 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1142 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1138
1143
1139 remote = repo.peer()
1144 remote = repo.peer()
1140 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1145 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1141
1146
1142 if local_revs:
1147 if local_revs:
1143 local_filtered_revs = logcmdutil.revrange(
1148 local_filtered_revs = logcmdutil.revrange(
1144 unfi, [b"not (::(%s))" % local_revs]
1149 unfi, [b"not (::(%s))" % local_revs]
1145 )
1150 )
1146 local_filtered_revs = frozenset(local_filtered_revs)
1151 local_filtered_revs = frozenset(local_filtered_revs)
1147
1152
1148 def local_func(x):
1153 def local_func(x):
1149 return local_filtered_revs
1154 return local_filtered_revs
1150
1155
1151 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1156 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1152 repo = repo.filtered(b'debug-discovery-local-filter')
1157 repo = repo.filtered(b'debug-discovery-local-filter')
1153
1158
1154 data = {}
1159 data = {}
1155 if opts.get(b'old'):
1160 if opts.get(b'old'):
1156
1161
1157 def doit(pushedrevs, remoteheads, remote=remote):
1162 def doit(pushedrevs, remoteheads, remote=remote):
1158 if not util.safehasattr(remote, b'branches'):
1163 if not util.safehasattr(remote, b'branches'):
1159 # enable in-client legacy support
1164 # enable in-client legacy support
1160 remote = localrepo.locallegacypeer(remote.local())
1165 remote = localrepo.locallegacypeer(remote.local())
1161 common, _in, hds = treediscovery.findcommonincoming(
1166 common, _in, hds = treediscovery.findcommonincoming(
1162 repo, remote, force=True, audit=data
1167 repo, remote, force=True, audit=data
1163 )
1168 )
1164 common = set(common)
1169 common = set(common)
1165 if not opts.get(b'nonheads'):
1170 if not opts.get(b'nonheads'):
1166 ui.writenoi18n(
1171 ui.writenoi18n(
1167 b"unpruned common: %s\n"
1172 b"unpruned common: %s\n"
1168 % b" ".join(sorted(short(n) for n in common))
1173 % b" ".join(sorted(short(n) for n in common))
1169 )
1174 )
1170
1175
1171 clnode = repo.changelog.node
1176 clnode = repo.changelog.node
1172 common = repo.revs(b'heads(::%ln)', common)
1177 common = repo.revs(b'heads(::%ln)', common)
1173 common = {clnode(r) for r in common}
1178 common = {clnode(r) for r in common}
1174 return common, hds
1179 return common, hds
1175
1180
1176 else:
1181 else:
1177
1182
1178 def doit(pushedrevs, remoteheads, remote=remote):
1183 def doit(pushedrevs, remoteheads, remote=remote):
1179 nodes = None
1184 nodes = None
1180 if pushedrevs:
1185 if pushedrevs:
1181 revs = logcmdutil.revrange(repo, pushedrevs)
1186 revs = logcmdutil.revrange(repo, pushedrevs)
1182 nodes = [repo[r].node() for r in revs]
1187 nodes = [repo[r].node() for r in revs]
1183 common, any, hds = setdiscovery.findcommonheads(
1188 common, any, hds = setdiscovery.findcommonheads(
1184 ui, repo, remote, ancestorsof=nodes, audit=data
1189 ui, repo, remote, ancestorsof=nodes, audit=data
1185 )
1190 )
1186 return common, hds
1191 return common, hds
1187
1192
1188 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1193 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1189 localrevs = opts[b'rev']
1194 localrevs = opts[b'rev']
1190
1195
1191 fm = ui.formatter(b'debugdiscovery', opts)
1196 fm = ui.formatter(b'debugdiscovery', opts)
1192 if fm.strict_format:
1197 if fm.strict_format:
1193
1198
1194 @contextlib.contextmanager
1199 @contextlib.contextmanager
1195 def may_capture_output():
1200 def may_capture_output():
1196 ui.pushbuffer()
1201 ui.pushbuffer()
1197 yield
1202 yield
1198 data[b'output'] = ui.popbuffer()
1203 data[b'output'] = ui.popbuffer()
1199
1204
1200 else:
1205 else:
1201 may_capture_output = util.nullcontextmanager
1206 may_capture_output = util.nullcontextmanager
1202 with may_capture_output():
1207 with may_capture_output():
1203 with util.timedcm('debug-discovery') as t:
1208 with util.timedcm('debug-discovery') as t:
1204 common, hds = doit(localrevs, remoterevs)
1209 common, hds = doit(localrevs, remoterevs)
1205
1210
1206 # compute all statistics
1211 # compute all statistics
1207 heads_common = set(common)
1212 heads_common = set(common)
1208 heads_remote = set(hds)
1213 heads_remote = set(hds)
1209 heads_local = set(repo.heads())
1214 heads_local = set(repo.heads())
1210 # note: they cannot be a local or remote head that is in common and not
1215 # note: they cannot be a local or remote head that is in common and not
1211 # itself a head of common.
1216 # itself a head of common.
1212 heads_common_local = heads_common & heads_local
1217 heads_common_local = heads_common & heads_local
1213 heads_common_remote = heads_common & heads_remote
1218 heads_common_remote = heads_common & heads_remote
1214 heads_common_both = heads_common & heads_remote & heads_local
1219 heads_common_both = heads_common & heads_remote & heads_local
1215
1220
1216 all = repo.revs(b'all()')
1221 all = repo.revs(b'all()')
1217 common = repo.revs(b'::%ln', common)
1222 common = repo.revs(b'::%ln', common)
1218 roots_common = repo.revs(b'roots(::%ld)', common)
1223 roots_common = repo.revs(b'roots(::%ld)', common)
1219 missing = repo.revs(b'not ::%ld', common)
1224 missing = repo.revs(b'not ::%ld', common)
1220 heads_missing = repo.revs(b'heads(%ld)', missing)
1225 heads_missing = repo.revs(b'heads(%ld)', missing)
1221 roots_missing = repo.revs(b'roots(%ld)', missing)
1226 roots_missing = repo.revs(b'roots(%ld)', missing)
1222 assert len(common) + len(missing) == len(all)
1227 assert len(common) + len(missing) == len(all)
1223
1228
1224 initial_undecided = repo.revs(
1229 initial_undecided = repo.revs(
1225 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1230 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1226 )
1231 )
1227 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1232 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1228 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1233 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1229 common_initial_undecided = initial_undecided & common
1234 common_initial_undecided = initial_undecided & common
1230 missing_initial_undecided = initial_undecided & missing
1235 missing_initial_undecided = initial_undecided & missing
1231
1236
1232 data[b'elapsed'] = t.elapsed
1237 data[b'elapsed'] = t.elapsed
1233 data[b'nb-common-heads'] = len(heads_common)
1238 data[b'nb-common-heads'] = len(heads_common)
1234 data[b'nb-common-heads-local'] = len(heads_common_local)
1239 data[b'nb-common-heads-local'] = len(heads_common_local)
1235 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1240 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1236 data[b'nb-common-heads-both'] = len(heads_common_both)
1241 data[b'nb-common-heads-both'] = len(heads_common_both)
1237 data[b'nb-common-roots'] = len(roots_common)
1242 data[b'nb-common-roots'] = len(roots_common)
1238 data[b'nb-head-local'] = len(heads_local)
1243 data[b'nb-head-local'] = len(heads_local)
1239 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1244 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1240 data[b'nb-head-remote'] = len(heads_remote)
1245 data[b'nb-head-remote'] = len(heads_remote)
1241 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1246 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1242 heads_common_remote
1247 heads_common_remote
1243 )
1248 )
1244 data[b'nb-revs'] = len(all)
1249 data[b'nb-revs'] = len(all)
1245 data[b'nb-revs-common'] = len(common)
1250 data[b'nb-revs-common'] = len(common)
1246 data[b'nb-revs-missing'] = len(missing)
1251 data[b'nb-revs-missing'] = len(missing)
1247 data[b'nb-missing-heads'] = len(heads_missing)
1252 data[b'nb-missing-heads'] = len(heads_missing)
1248 data[b'nb-missing-roots'] = len(roots_missing)
1253 data[b'nb-missing-roots'] = len(roots_missing)
1249 data[b'nb-ini_und'] = len(initial_undecided)
1254 data[b'nb-ini_und'] = len(initial_undecided)
1250 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1255 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1251 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1256 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1252 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1257 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1253 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1258 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1254
1259
1255 fm.startitem()
1260 fm.startitem()
1256 fm.data(**pycompat.strkwargs(data))
1261 fm.data(**pycompat.strkwargs(data))
1257 # display discovery summary
1262 # display discovery summary
1258 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1263 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1259 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1264 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1260 fm.plain(b"queries: %(total-queries)9d\n" % data)
1265 fm.plain(b"queries: %(total-queries)9d\n" % data)
1261 fm.plain(b"heads summary:\n")
1266 fm.plain(b"heads summary:\n")
1262 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1267 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1263 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1268 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1264 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1269 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1265 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1270 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1266 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1271 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1267 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1272 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1268 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1273 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1269 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1274 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1270 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1275 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1271 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1276 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1272 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1277 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1273 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1278 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1274 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1279 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1275 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1280 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1276 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1281 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1277 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1282 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1278 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1283 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1279 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1284 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1280 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1285 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1281 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1286 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1282 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1287 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1283 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1288 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1284
1289
1285 if ui.verbose:
1290 if ui.verbose:
1286 fm.plain(
1291 fm.plain(
1287 b"common heads: %s\n"
1292 b"common heads: %s\n"
1288 % b" ".join(sorted(short(n) for n in heads_common))
1293 % b" ".join(sorted(short(n) for n in heads_common))
1289 )
1294 )
1290 fm.end()
1295 fm.end()
1291
1296
1292
1297
1293 _chunksize = 4 << 10
1298 _chunksize = 4 << 10
1294
1299
1295
1300
1296 @command(
1301 @command(
1297 b'debugdownload',
1302 b'debugdownload',
1298 [
1303 [
1299 (b'o', b'output', b'', _(b'path')),
1304 (b'o', b'output', b'', _(b'path')),
1300 ],
1305 ],
1301 optionalrepo=True,
1306 optionalrepo=True,
1302 )
1307 )
1303 def debugdownload(ui, repo, url, output=None, **opts):
1308 def debugdownload(ui, repo, url, output=None, **opts):
1304 """download a resource using Mercurial logic and config"""
1309 """download a resource using Mercurial logic and config"""
1305 fh = urlmod.open(ui, url, output)
1310 fh = urlmod.open(ui, url, output)
1306
1311
1307 dest = ui
1312 dest = ui
1308 if output:
1313 if output:
1309 dest = open(output, b"wb", _chunksize)
1314 dest = open(output, b"wb", _chunksize)
1310 try:
1315 try:
1311 data = fh.read(_chunksize)
1316 data = fh.read(_chunksize)
1312 while data:
1317 while data:
1313 dest.write(data)
1318 dest.write(data)
1314 data = fh.read(_chunksize)
1319 data = fh.read(_chunksize)
1315 finally:
1320 finally:
1316 if output:
1321 if output:
1317 dest.close()
1322 dest.close()
1318
1323
1319
1324
1320 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1325 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1321 def debugextensions(ui, repo, **opts):
1326 def debugextensions(ui, repo, **opts):
1322 '''show information about active extensions'''
1327 '''show information about active extensions'''
1323 opts = pycompat.byteskwargs(opts)
1328 opts = pycompat.byteskwargs(opts)
1324 exts = extensions.extensions(ui)
1329 exts = extensions.extensions(ui)
1325 hgver = util.version()
1330 hgver = util.version()
1326 fm = ui.formatter(b'debugextensions', opts)
1331 fm = ui.formatter(b'debugextensions', opts)
1327 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1332 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1328 isinternal = extensions.ismoduleinternal(extmod)
1333 isinternal = extensions.ismoduleinternal(extmod)
1329 extsource = None
1334 extsource = None
1330
1335
1331 if util.safehasattr(extmod, '__file__'):
1336 if util.safehasattr(extmod, '__file__'):
1332 extsource = pycompat.fsencode(extmod.__file__)
1337 extsource = pycompat.fsencode(extmod.__file__)
1333 elif getattr(sys, 'oxidized', False):
1338 elif getattr(sys, 'oxidized', False):
1334 extsource = pycompat.sysexecutable
1339 extsource = pycompat.sysexecutable
1335 if isinternal:
1340 if isinternal:
1336 exttestedwith = [] # never expose magic string to users
1341 exttestedwith = [] # never expose magic string to users
1337 else:
1342 else:
1338 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1343 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1339 extbuglink = getattr(extmod, 'buglink', None)
1344 extbuglink = getattr(extmod, 'buglink', None)
1340
1345
1341 fm.startitem()
1346 fm.startitem()
1342
1347
1343 if ui.quiet or ui.verbose:
1348 if ui.quiet or ui.verbose:
1344 fm.write(b'name', b'%s\n', extname)
1349 fm.write(b'name', b'%s\n', extname)
1345 else:
1350 else:
1346 fm.write(b'name', b'%s', extname)
1351 fm.write(b'name', b'%s', extname)
1347 if isinternal or hgver in exttestedwith:
1352 if isinternal or hgver in exttestedwith:
1348 fm.plain(b'\n')
1353 fm.plain(b'\n')
1349 elif not exttestedwith:
1354 elif not exttestedwith:
1350 fm.plain(_(b' (untested!)\n'))
1355 fm.plain(_(b' (untested!)\n'))
1351 else:
1356 else:
1352 lasttestedversion = exttestedwith[-1]
1357 lasttestedversion = exttestedwith[-1]
1353 fm.plain(b' (%s!)\n' % lasttestedversion)
1358 fm.plain(b' (%s!)\n' % lasttestedversion)
1354
1359
1355 fm.condwrite(
1360 fm.condwrite(
1356 ui.verbose and extsource,
1361 ui.verbose and extsource,
1357 b'source',
1362 b'source',
1358 _(b' location: %s\n'),
1363 _(b' location: %s\n'),
1359 extsource or b"",
1364 extsource or b"",
1360 )
1365 )
1361
1366
1362 if ui.verbose:
1367 if ui.verbose:
1363 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1368 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1364 fm.data(bundled=isinternal)
1369 fm.data(bundled=isinternal)
1365
1370
1366 fm.condwrite(
1371 fm.condwrite(
1367 ui.verbose and exttestedwith,
1372 ui.verbose and exttestedwith,
1368 b'testedwith',
1373 b'testedwith',
1369 _(b' tested with: %s\n'),
1374 _(b' tested with: %s\n'),
1370 fm.formatlist(exttestedwith, name=b'ver'),
1375 fm.formatlist(exttestedwith, name=b'ver'),
1371 )
1376 )
1372
1377
1373 fm.condwrite(
1378 fm.condwrite(
1374 ui.verbose and extbuglink,
1379 ui.verbose and extbuglink,
1375 b'buglink',
1380 b'buglink',
1376 _(b' bug reporting: %s\n'),
1381 _(b' bug reporting: %s\n'),
1377 extbuglink or b"",
1382 extbuglink or b"",
1378 )
1383 )
1379
1384
1380 fm.end()
1385 fm.end()
1381
1386
1382
1387
1383 @command(
1388 @command(
1384 b'debugfileset',
1389 b'debugfileset',
1385 [
1390 [
1386 (
1391 (
1387 b'r',
1392 b'r',
1388 b'rev',
1393 b'rev',
1389 b'',
1394 b'',
1390 _(b'apply the filespec on this revision'),
1395 _(b'apply the filespec on this revision'),
1391 _(b'REV'),
1396 _(b'REV'),
1392 ),
1397 ),
1393 (
1398 (
1394 b'',
1399 b'',
1395 b'all-files',
1400 b'all-files',
1396 False,
1401 False,
1397 _(b'test files from all revisions and working directory'),
1402 _(b'test files from all revisions and working directory'),
1398 ),
1403 ),
1399 (
1404 (
1400 b's',
1405 b's',
1401 b'show-matcher',
1406 b'show-matcher',
1402 None,
1407 None,
1403 _(b'print internal representation of matcher'),
1408 _(b'print internal representation of matcher'),
1404 ),
1409 ),
1405 (
1410 (
1406 b'p',
1411 b'p',
1407 b'show-stage',
1412 b'show-stage',
1408 [],
1413 [],
1409 _(b'print parsed tree at the given stage'),
1414 _(b'print parsed tree at the given stage'),
1410 _(b'NAME'),
1415 _(b'NAME'),
1411 ),
1416 ),
1412 ],
1417 ],
1413 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1418 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1414 )
1419 )
1415 def debugfileset(ui, repo, expr, **opts):
1420 def debugfileset(ui, repo, expr, **opts):
1416 '''parse and apply a fileset specification'''
1421 '''parse and apply a fileset specification'''
1417 from . import fileset
1422 from . import fileset
1418
1423
1419 fileset.symbols # force import of fileset so we have predicates to optimize
1424 fileset.symbols # force import of fileset so we have predicates to optimize
1420 opts = pycompat.byteskwargs(opts)
1425 opts = pycompat.byteskwargs(opts)
1421 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1426 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1422
1427
1423 stages = [
1428 stages = [
1424 (b'parsed', pycompat.identity),
1429 (b'parsed', pycompat.identity),
1425 (b'analyzed', filesetlang.analyze),
1430 (b'analyzed', filesetlang.analyze),
1426 (b'optimized', filesetlang.optimize),
1431 (b'optimized', filesetlang.optimize),
1427 ]
1432 ]
1428 stagenames = {n for n, f in stages}
1433 stagenames = {n for n, f in stages}
1429
1434
1430 showalways = set()
1435 showalways = set()
1431 if ui.verbose and not opts[b'show_stage']:
1436 if ui.verbose and not opts[b'show_stage']:
1432 # show parsed tree by --verbose (deprecated)
1437 # show parsed tree by --verbose (deprecated)
1433 showalways.add(b'parsed')
1438 showalways.add(b'parsed')
1434 if opts[b'show_stage'] == [b'all']:
1439 if opts[b'show_stage'] == [b'all']:
1435 showalways.update(stagenames)
1440 showalways.update(stagenames)
1436 else:
1441 else:
1437 for n in opts[b'show_stage']:
1442 for n in opts[b'show_stage']:
1438 if n not in stagenames:
1443 if n not in stagenames:
1439 raise error.Abort(_(b'invalid stage name: %s') % n)
1444 raise error.Abort(_(b'invalid stage name: %s') % n)
1440 showalways.update(opts[b'show_stage'])
1445 showalways.update(opts[b'show_stage'])
1441
1446
1442 tree = filesetlang.parse(expr)
1447 tree = filesetlang.parse(expr)
1443 for n, f in stages:
1448 for n, f in stages:
1444 tree = f(tree)
1449 tree = f(tree)
1445 if n in showalways:
1450 if n in showalways:
1446 if opts[b'show_stage'] or n != b'parsed':
1451 if opts[b'show_stage'] or n != b'parsed':
1447 ui.write(b"* %s:\n" % n)
1452 ui.write(b"* %s:\n" % n)
1448 ui.write(filesetlang.prettyformat(tree), b"\n")
1453 ui.write(filesetlang.prettyformat(tree), b"\n")
1449
1454
1450 files = set()
1455 files = set()
1451 if opts[b'all_files']:
1456 if opts[b'all_files']:
1452 for r in repo:
1457 for r in repo:
1453 c = repo[r]
1458 c = repo[r]
1454 files.update(c.files())
1459 files.update(c.files())
1455 files.update(c.substate)
1460 files.update(c.substate)
1456 if opts[b'all_files'] or ctx.rev() is None:
1461 if opts[b'all_files'] or ctx.rev() is None:
1457 wctx = repo[None]
1462 wctx = repo[None]
1458 files.update(
1463 files.update(
1459 repo.dirstate.walk(
1464 repo.dirstate.walk(
1460 scmutil.matchall(repo),
1465 scmutil.matchall(repo),
1461 subrepos=list(wctx.substate),
1466 subrepos=list(wctx.substate),
1462 unknown=True,
1467 unknown=True,
1463 ignored=True,
1468 ignored=True,
1464 )
1469 )
1465 )
1470 )
1466 files.update(wctx.substate)
1471 files.update(wctx.substate)
1467 else:
1472 else:
1468 files.update(ctx.files())
1473 files.update(ctx.files())
1469 files.update(ctx.substate)
1474 files.update(ctx.substate)
1470
1475
1471 m = ctx.matchfileset(repo.getcwd(), expr)
1476 m = ctx.matchfileset(repo.getcwd(), expr)
1472 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1477 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1473 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1478 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1474 for f in sorted(files):
1479 for f in sorted(files):
1475 if not m(f):
1480 if not m(f):
1476 continue
1481 continue
1477 ui.write(b"%s\n" % f)
1482 ui.write(b"%s\n" % f)
1478
1483
1479
1484
1480 @command(
1485 @command(
1481 b"debug-repair-issue6528",
1486 b"debug-repair-issue6528",
1482 [
1487 [
1483 (
1488 (
1484 b'',
1489 b'',
1485 b'to-report',
1490 b'to-report',
1486 b'',
1491 b'',
1487 _(b'build a report of affected revisions to this file'),
1492 _(b'build a report of affected revisions to this file'),
1488 _(b'FILE'),
1493 _(b'FILE'),
1489 ),
1494 ),
1490 (
1495 (
1491 b'',
1496 b'',
1492 b'from-report',
1497 b'from-report',
1493 b'',
1498 b'',
1494 _(b'repair revisions listed in this report file'),
1499 _(b'repair revisions listed in this report file'),
1495 _(b'FILE'),
1500 _(b'FILE'),
1496 ),
1501 ),
1497 (
1502 (
1498 b'',
1503 b'',
1499 b'paranoid',
1504 b'paranoid',
1500 False,
1505 False,
1501 _(b'check that both detection methods do the same thing'),
1506 _(b'check that both detection methods do the same thing'),
1502 ),
1507 ),
1503 ]
1508 ]
1504 + cmdutil.dryrunopts,
1509 + cmdutil.dryrunopts,
1505 )
1510 )
1506 def debug_repair_issue6528(ui, repo, **opts):
1511 def debug_repair_issue6528(ui, repo, **opts):
1507 """find affected revisions and repair them. See issue6528 for more details.
1512 """find affected revisions and repair them. See issue6528 for more details.
1508
1513
1509 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1514 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1510 computation of affected revisions for a given repository across clones.
1515 computation of affected revisions for a given repository across clones.
1511 The report format is line-based (with empty lines ignored):
1516 The report format is line-based (with empty lines ignored):
1512
1517
1513 ```
1518 ```
1514 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1519 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1515 ```
1520 ```
1516
1521
1517 There can be multiple broken revisions per filelog, they are separated by
1522 There can be multiple broken revisions per filelog, they are separated by
1518 a comma with no spaces. The only space is between the revision(s) and the
1523 a comma with no spaces. The only space is between the revision(s) and the
1519 filename.
1524 filename.
1520
1525
1521 Note that this does *not* mean that this repairs future affected revisions,
1526 Note that this does *not* mean that this repairs future affected revisions,
1522 that needs a separate fix at the exchange level that was introduced in
1527 that needs a separate fix at the exchange level that was introduced in
1523 Mercurial 5.9.1.
1528 Mercurial 5.9.1.
1524
1529
1525 There is a `--paranoid` flag to test that the fast implementation is correct
1530 There is a `--paranoid` flag to test that the fast implementation is correct
1526 by checking it against the slow implementation. Since this matter is quite
1531 by checking it against the slow implementation. Since this matter is quite
1527 urgent and testing every edge-case is probably quite costly, we use this
1532 urgent and testing every edge-case is probably quite costly, we use this
1528 method to test on large repositories as a fuzzing method of sorts.
1533 method to test on large repositories as a fuzzing method of sorts.
1529 """
1534 """
1530 cmdutil.check_incompatible_arguments(
1535 cmdutil.check_incompatible_arguments(
1531 opts, 'to_report', ['from_report', 'dry_run']
1536 opts, 'to_report', ['from_report', 'dry_run']
1532 )
1537 )
1533 dry_run = opts.get('dry_run')
1538 dry_run = opts.get('dry_run')
1534 to_report = opts.get('to_report')
1539 to_report = opts.get('to_report')
1535 from_report = opts.get('from_report')
1540 from_report = opts.get('from_report')
1536 paranoid = opts.get('paranoid')
1541 paranoid = opts.get('paranoid')
1537 # TODO maybe add filelog pattern and revision pattern parameters to help
1542 # TODO maybe add filelog pattern and revision pattern parameters to help
1538 # narrow down the search for users that know what they're looking for?
1543 # narrow down the search for users that know what they're looking for?
1539
1544
1540 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1545 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1541 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1546 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1542 raise error.Abort(_(msg))
1547 raise error.Abort(_(msg))
1543
1548
1544 rewrite.repair_issue6528(
1549 rewrite.repair_issue6528(
1545 ui,
1550 ui,
1546 repo,
1551 repo,
1547 dry_run=dry_run,
1552 dry_run=dry_run,
1548 to_report=to_report,
1553 to_report=to_report,
1549 from_report=from_report,
1554 from_report=from_report,
1550 paranoid=paranoid,
1555 paranoid=paranoid,
1551 )
1556 )
1552
1557
1553
1558
1554 @command(b'debugformat', [] + cmdutil.formatteropts)
1559 @command(b'debugformat', [] + cmdutil.formatteropts)
1555 def debugformat(ui, repo, **opts):
1560 def debugformat(ui, repo, **opts):
1556 """display format information about the current repository
1561 """display format information about the current repository
1557
1562
1558 Use --verbose to get extra information about current config value and
1563 Use --verbose to get extra information about current config value and
1559 Mercurial default."""
1564 Mercurial default."""
1560 opts = pycompat.byteskwargs(opts)
1565 opts = pycompat.byteskwargs(opts)
1561 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1566 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1562 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1567 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1563
1568
1564 def makeformatname(name):
1569 def makeformatname(name):
1565 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1570 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1566
1571
1567 fm = ui.formatter(b'debugformat', opts)
1572 fm = ui.formatter(b'debugformat', opts)
1568 if fm.isplain():
1573 if fm.isplain():
1569
1574
1570 def formatvalue(value):
1575 def formatvalue(value):
1571 if util.safehasattr(value, b'startswith'):
1576 if util.safehasattr(value, b'startswith'):
1572 return value
1577 return value
1573 if value:
1578 if value:
1574 return b'yes'
1579 return b'yes'
1575 else:
1580 else:
1576 return b'no'
1581 return b'no'
1577
1582
1578 else:
1583 else:
1579 formatvalue = pycompat.identity
1584 formatvalue = pycompat.identity
1580
1585
1581 fm.plain(b'format-variant')
1586 fm.plain(b'format-variant')
1582 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1587 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1583 fm.plain(b' repo')
1588 fm.plain(b' repo')
1584 if ui.verbose:
1589 if ui.verbose:
1585 fm.plain(b' config default')
1590 fm.plain(b' config default')
1586 fm.plain(b'\n')
1591 fm.plain(b'\n')
1587 for fv in upgrade.allformatvariant:
1592 for fv in upgrade.allformatvariant:
1588 fm.startitem()
1593 fm.startitem()
1589 repovalue = fv.fromrepo(repo)
1594 repovalue = fv.fromrepo(repo)
1590 configvalue = fv.fromconfig(repo)
1595 configvalue = fv.fromconfig(repo)
1591
1596
1592 if repovalue != configvalue:
1597 if repovalue != configvalue:
1593 namelabel = b'formatvariant.name.mismatchconfig'
1598 namelabel = b'formatvariant.name.mismatchconfig'
1594 repolabel = b'formatvariant.repo.mismatchconfig'
1599 repolabel = b'formatvariant.repo.mismatchconfig'
1595 elif repovalue != fv.default:
1600 elif repovalue != fv.default:
1596 namelabel = b'formatvariant.name.mismatchdefault'
1601 namelabel = b'formatvariant.name.mismatchdefault'
1597 repolabel = b'formatvariant.repo.mismatchdefault'
1602 repolabel = b'formatvariant.repo.mismatchdefault'
1598 else:
1603 else:
1599 namelabel = b'formatvariant.name.uptodate'
1604 namelabel = b'formatvariant.name.uptodate'
1600 repolabel = b'formatvariant.repo.uptodate'
1605 repolabel = b'formatvariant.repo.uptodate'
1601
1606
1602 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1607 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1603 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1608 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1604 if fv.default != configvalue:
1609 if fv.default != configvalue:
1605 configlabel = b'formatvariant.config.special'
1610 configlabel = b'formatvariant.config.special'
1606 else:
1611 else:
1607 configlabel = b'formatvariant.config.default'
1612 configlabel = b'formatvariant.config.default'
1608 fm.condwrite(
1613 fm.condwrite(
1609 ui.verbose,
1614 ui.verbose,
1610 b'config',
1615 b'config',
1611 b' %6s',
1616 b' %6s',
1612 formatvalue(configvalue),
1617 formatvalue(configvalue),
1613 label=configlabel,
1618 label=configlabel,
1614 )
1619 )
1615 fm.condwrite(
1620 fm.condwrite(
1616 ui.verbose,
1621 ui.verbose,
1617 b'default',
1622 b'default',
1618 b' %7s',
1623 b' %7s',
1619 formatvalue(fv.default),
1624 formatvalue(fv.default),
1620 label=b'formatvariant.default',
1625 label=b'formatvariant.default',
1621 )
1626 )
1622 fm.plain(b'\n')
1627 fm.plain(b'\n')
1623 fm.end()
1628 fm.end()
1624
1629
1625
1630
1626 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1631 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1627 def debugfsinfo(ui, path=b"."):
1632 def debugfsinfo(ui, path=b"."):
1628 """show information detected about current filesystem"""
1633 """show information detected about current filesystem"""
1629 ui.writenoi18n(b'path: %s\n' % path)
1634 ui.writenoi18n(b'path: %s\n' % path)
1630 ui.writenoi18n(
1635 ui.writenoi18n(
1631 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1636 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1632 )
1637 )
1633 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1638 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1634 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1639 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1635 ui.writenoi18n(
1640 ui.writenoi18n(
1636 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1641 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1637 )
1642 )
1638 ui.writenoi18n(
1643 ui.writenoi18n(
1639 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1644 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1640 )
1645 )
1641 casesensitive = b'(unknown)'
1646 casesensitive = b'(unknown)'
1642 try:
1647 try:
1643 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1648 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1644 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1649 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1645 except OSError:
1650 except OSError:
1646 pass
1651 pass
1647 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1652 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1648
1653
1649
1654
1650 @command(
1655 @command(
1651 b'debuggetbundle',
1656 b'debuggetbundle',
1652 [
1657 [
1653 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1658 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1654 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1659 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1655 (
1660 (
1656 b't',
1661 b't',
1657 b'type',
1662 b'type',
1658 b'bzip2',
1663 b'bzip2',
1659 _(b'bundle compression type to use'),
1664 _(b'bundle compression type to use'),
1660 _(b'TYPE'),
1665 _(b'TYPE'),
1661 ),
1666 ),
1662 ],
1667 ],
1663 _(b'REPO FILE [-H|-C ID]...'),
1668 _(b'REPO FILE [-H|-C ID]...'),
1664 norepo=True,
1669 norepo=True,
1665 )
1670 )
1666 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1671 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1667 """retrieves a bundle from a repo
1672 """retrieves a bundle from a repo
1668
1673
1669 Every ID must be a full-length hex node id string. Saves the bundle to the
1674 Every ID must be a full-length hex node id string. Saves the bundle to the
1670 given file.
1675 given file.
1671 """
1676 """
1672 opts = pycompat.byteskwargs(opts)
1677 opts = pycompat.byteskwargs(opts)
1673 repo = hg.peer(ui, opts, repopath)
1678 repo = hg.peer(ui, opts, repopath)
1674 if not repo.capable(b'getbundle'):
1679 if not repo.capable(b'getbundle'):
1675 raise error.Abort(b"getbundle() not supported by target repository")
1680 raise error.Abort(b"getbundle() not supported by target repository")
1676 args = {}
1681 args = {}
1677 if common:
1682 if common:
1678 args['common'] = [bin(s) for s in common]
1683 args['common'] = [bin(s) for s in common]
1679 if head:
1684 if head:
1680 args['heads'] = [bin(s) for s in head]
1685 args['heads'] = [bin(s) for s in head]
1681 # TODO: get desired bundlecaps from command line.
1686 # TODO: get desired bundlecaps from command line.
1682 args['bundlecaps'] = None
1687 args['bundlecaps'] = None
1683 bundle = repo.getbundle(b'debug', **args)
1688 bundle = repo.getbundle(b'debug', **args)
1684
1689
1685 bundletype = opts.get(b'type', b'bzip2').lower()
1690 bundletype = opts.get(b'type', b'bzip2').lower()
1686 btypes = {
1691 btypes = {
1687 b'none': b'HG10UN',
1692 b'none': b'HG10UN',
1688 b'bzip2': b'HG10BZ',
1693 b'bzip2': b'HG10BZ',
1689 b'gzip': b'HG10GZ',
1694 b'gzip': b'HG10GZ',
1690 b'bundle2': b'HG20',
1695 b'bundle2': b'HG20',
1691 }
1696 }
1692 bundletype = btypes.get(bundletype)
1697 bundletype = btypes.get(bundletype)
1693 if bundletype not in bundle2.bundletypes:
1698 if bundletype not in bundle2.bundletypes:
1694 raise error.Abort(_(b'unknown bundle type specified with --type'))
1699 raise error.Abort(_(b'unknown bundle type specified with --type'))
1695 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1700 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1696
1701
1697
1702
1698 @command(b'debugignore', [], b'[FILE]')
1703 @command(b'debugignore', [], b'[FILE]')
1699 def debugignore(ui, repo, *files, **opts):
1704 def debugignore(ui, repo, *files, **opts):
1700 """display the combined ignore pattern and information about ignored files
1705 """display the combined ignore pattern and information about ignored files
1701
1706
1702 With no argument display the combined ignore pattern.
1707 With no argument display the combined ignore pattern.
1703
1708
1704 Given space separated file names, shows if the given file is ignored and
1709 Given space separated file names, shows if the given file is ignored and
1705 if so, show the ignore rule (file and line number) that matched it.
1710 if so, show the ignore rule (file and line number) that matched it.
1706 """
1711 """
1707 ignore = repo.dirstate._ignore
1712 ignore = repo.dirstate._ignore
1708 if not files:
1713 if not files:
1709 # Show all the patterns
1714 # Show all the patterns
1710 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1715 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1711 else:
1716 else:
1712 m = scmutil.match(repo[None], pats=files)
1717 m = scmutil.match(repo[None], pats=files)
1713 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1718 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1714 for f in m.files():
1719 for f in m.files():
1715 nf = util.normpath(f)
1720 nf = util.normpath(f)
1716 ignored = None
1721 ignored = None
1717 ignoredata = None
1722 ignoredata = None
1718 if nf != b'.':
1723 if nf != b'.':
1719 if ignore(nf):
1724 if ignore(nf):
1720 ignored = nf
1725 ignored = nf
1721 ignoredata = repo.dirstate._ignorefileandline(nf)
1726 ignoredata = repo.dirstate._ignorefileandline(nf)
1722 else:
1727 else:
1723 for p in pathutil.finddirs(nf):
1728 for p in pathutil.finddirs(nf):
1724 if ignore(p):
1729 if ignore(p):
1725 ignored = p
1730 ignored = p
1726 ignoredata = repo.dirstate._ignorefileandline(p)
1731 ignoredata = repo.dirstate._ignorefileandline(p)
1727 break
1732 break
1728 if ignored:
1733 if ignored:
1729 if ignored == nf:
1734 if ignored == nf:
1730 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1735 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1731 else:
1736 else:
1732 ui.write(
1737 ui.write(
1733 _(
1738 _(
1734 b"%s is ignored because of "
1739 b"%s is ignored because of "
1735 b"containing directory %s\n"
1740 b"containing directory %s\n"
1736 )
1741 )
1737 % (uipathfn(f), ignored)
1742 % (uipathfn(f), ignored)
1738 )
1743 )
1739 ignorefile, lineno, line = ignoredata
1744 ignorefile, lineno, line = ignoredata
1740 ui.write(
1745 ui.write(
1741 _(b"(ignore rule in %s, line %d: '%s')\n")
1746 _(b"(ignore rule in %s, line %d: '%s')\n")
1742 % (ignorefile, lineno, line)
1747 % (ignorefile, lineno, line)
1743 )
1748 )
1744 else:
1749 else:
1745 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1750 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1746
1751
1747
1752
1748 @command(
1753 @command(
1749 b'debugindex',
1754 b'debugindex',
1750 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1755 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1751 _(b'-c|-m|FILE'),
1756 _(b'-c|-m|FILE'),
1752 )
1757 )
1753 def debugindex(ui, repo, file_=None, **opts):
1758 def debugindex(ui, repo, file_=None, **opts):
1754 """dump index data for a storage primitive"""
1759 """dump index data for a storage primitive"""
1755 opts = pycompat.byteskwargs(opts)
1760 opts = pycompat.byteskwargs(opts)
1756 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1761 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1757
1762
1758 if ui.debugflag:
1763 if ui.debugflag:
1759 shortfn = hex
1764 shortfn = hex
1760 else:
1765 else:
1761 shortfn = short
1766 shortfn = short
1762
1767
1763 idlen = 12
1768 idlen = 12
1764 for i in store:
1769 for i in store:
1765 idlen = len(shortfn(store.node(i)))
1770 idlen = len(shortfn(store.node(i)))
1766 break
1771 break
1767
1772
1768 fm = ui.formatter(b'debugindex', opts)
1773 fm = ui.formatter(b'debugindex', opts)
1769 fm.plain(
1774 fm.plain(
1770 b' rev linkrev %s %s p2\n'
1775 b' rev linkrev %s %s p2\n'
1771 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1776 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1772 )
1777 )
1773
1778
1774 for rev in store:
1779 for rev in store:
1775 node = store.node(rev)
1780 node = store.node(rev)
1776 parents = store.parents(node)
1781 parents = store.parents(node)
1777
1782
1778 fm.startitem()
1783 fm.startitem()
1779 fm.write(b'rev', b'%6d ', rev)
1784 fm.write(b'rev', b'%6d ', rev)
1780 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1785 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1781 fm.write(b'node', b'%s ', shortfn(node))
1786 fm.write(b'node', b'%s ', shortfn(node))
1782 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1787 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1783 fm.write(b'p2', b'%s', shortfn(parents[1]))
1788 fm.write(b'p2', b'%s', shortfn(parents[1]))
1784 fm.plain(b'\n')
1789 fm.plain(b'\n')
1785
1790
1786 fm.end()
1791 fm.end()
1787
1792
1788
1793
1789 @command(
1794 @command(
1790 b'debugindexdot',
1795 b'debugindexdot',
1791 cmdutil.debugrevlogopts,
1796 cmdutil.debugrevlogopts,
1792 _(b'-c|-m|FILE'),
1797 _(b'-c|-m|FILE'),
1793 optionalrepo=True,
1798 optionalrepo=True,
1794 )
1799 )
1795 def debugindexdot(ui, repo, file_=None, **opts):
1800 def debugindexdot(ui, repo, file_=None, **opts):
1796 """dump an index DAG as a graphviz dot file"""
1801 """dump an index DAG as a graphviz dot file"""
1797 opts = pycompat.byteskwargs(opts)
1802 opts = pycompat.byteskwargs(opts)
1798 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1803 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1799 ui.writenoi18n(b"digraph G {\n")
1804 ui.writenoi18n(b"digraph G {\n")
1800 for i in r:
1805 for i in r:
1801 node = r.node(i)
1806 node = r.node(i)
1802 pp = r.parents(node)
1807 pp = r.parents(node)
1803 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1808 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1804 if pp[1] != repo.nullid:
1809 if pp[1] != repo.nullid:
1805 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1810 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1806 ui.write(b"}\n")
1811 ui.write(b"}\n")
1807
1812
1808
1813
1809 @command(b'debugindexstats', [])
1814 @command(b'debugindexstats', [])
1810 def debugindexstats(ui, repo):
1815 def debugindexstats(ui, repo):
1811 """show stats related to the changelog index"""
1816 """show stats related to the changelog index"""
1812 repo.changelog.shortest(repo.nullid, 1)
1817 repo.changelog.shortest(repo.nullid, 1)
1813 index = repo.changelog.index
1818 index = repo.changelog.index
1814 if not util.safehasattr(index, b'stats'):
1819 if not util.safehasattr(index, b'stats'):
1815 raise error.Abort(_(b'debugindexstats only works with native code'))
1820 raise error.Abort(_(b'debugindexstats only works with native code'))
1816 for k, v in sorted(index.stats().items()):
1821 for k, v in sorted(index.stats().items()):
1817 ui.write(b'%s: %d\n' % (k, v))
1822 ui.write(b'%s: %d\n' % (k, v))
1818
1823
1819
1824
1820 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1825 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1821 def debuginstall(ui, **opts):
1826 def debuginstall(ui, **opts):
1822 """test Mercurial installation
1827 """test Mercurial installation
1823
1828
1824 Returns 0 on success.
1829 Returns 0 on success.
1825 """
1830 """
1826 opts = pycompat.byteskwargs(opts)
1831 opts = pycompat.byteskwargs(opts)
1827
1832
1828 problems = 0
1833 problems = 0
1829
1834
1830 fm = ui.formatter(b'debuginstall', opts)
1835 fm = ui.formatter(b'debuginstall', opts)
1831 fm.startitem()
1836 fm.startitem()
1832
1837
1833 # encoding might be unknown or wrong. don't translate these messages.
1838 # encoding might be unknown or wrong. don't translate these messages.
1834 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1839 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1835 err = None
1840 err = None
1836 try:
1841 try:
1837 codecs.lookup(pycompat.sysstr(encoding.encoding))
1842 codecs.lookup(pycompat.sysstr(encoding.encoding))
1838 except LookupError as inst:
1843 except LookupError as inst:
1839 err = stringutil.forcebytestr(inst)
1844 err = stringutil.forcebytestr(inst)
1840 problems += 1
1845 problems += 1
1841 fm.condwrite(
1846 fm.condwrite(
1842 err,
1847 err,
1843 b'encodingerror',
1848 b'encodingerror',
1844 b" %s\n (check that your locale is properly set)\n",
1849 b" %s\n (check that your locale is properly set)\n",
1845 err,
1850 err,
1846 )
1851 )
1847
1852
1848 # Python
1853 # Python
1849 pythonlib = None
1854 pythonlib = None
1850 if util.safehasattr(os, '__file__'):
1855 if util.safehasattr(os, '__file__'):
1851 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1856 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1852 elif getattr(sys, 'oxidized', False):
1857 elif getattr(sys, 'oxidized', False):
1853 pythonlib = pycompat.sysexecutable
1858 pythonlib = pycompat.sysexecutable
1854
1859
1855 fm.write(
1860 fm.write(
1856 b'pythonexe',
1861 b'pythonexe',
1857 _(b"checking Python executable (%s)\n"),
1862 _(b"checking Python executable (%s)\n"),
1858 pycompat.sysexecutable or _(b"unknown"),
1863 pycompat.sysexecutable or _(b"unknown"),
1859 )
1864 )
1860 fm.write(
1865 fm.write(
1861 b'pythonimplementation',
1866 b'pythonimplementation',
1862 _(b"checking Python implementation (%s)\n"),
1867 _(b"checking Python implementation (%s)\n"),
1863 pycompat.sysbytes(platform.python_implementation()),
1868 pycompat.sysbytes(platform.python_implementation()),
1864 )
1869 )
1865 fm.write(
1870 fm.write(
1866 b'pythonver',
1871 b'pythonver',
1867 _(b"checking Python version (%s)\n"),
1872 _(b"checking Python version (%s)\n"),
1868 (b"%d.%d.%d" % sys.version_info[:3]),
1873 (b"%d.%d.%d" % sys.version_info[:3]),
1869 )
1874 )
1870 fm.write(
1875 fm.write(
1871 b'pythonlib',
1876 b'pythonlib',
1872 _(b"checking Python lib (%s)...\n"),
1877 _(b"checking Python lib (%s)...\n"),
1873 pythonlib or _(b"unknown"),
1878 pythonlib or _(b"unknown"),
1874 )
1879 )
1875
1880
1876 try:
1881 try:
1877 from . import rustext # pytype: disable=import-error
1882 from . import rustext # pytype: disable=import-error
1878
1883
1879 rustext.__doc__ # trigger lazy import
1884 rustext.__doc__ # trigger lazy import
1880 except ImportError:
1885 except ImportError:
1881 rustext = None
1886 rustext = None
1882
1887
1883 security = set(sslutil.supportedprotocols)
1888 security = set(sslutil.supportedprotocols)
1884 if sslutil.hassni:
1889 if sslutil.hassni:
1885 security.add(b'sni')
1890 security.add(b'sni')
1886
1891
1887 fm.write(
1892 fm.write(
1888 b'pythonsecurity',
1893 b'pythonsecurity',
1889 _(b"checking Python security support (%s)\n"),
1894 _(b"checking Python security support (%s)\n"),
1890 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1895 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1891 )
1896 )
1892
1897
1893 # These are warnings, not errors. So don't increment problem count. This
1898 # These are warnings, not errors. So don't increment problem count. This
1894 # may change in the future.
1899 # may change in the future.
1895 if b'tls1.2' not in security:
1900 if b'tls1.2' not in security:
1896 fm.plain(
1901 fm.plain(
1897 _(
1902 _(
1898 b' TLS 1.2 not supported by Python install; '
1903 b' TLS 1.2 not supported by Python install; '
1899 b'network connections lack modern security\n'
1904 b'network connections lack modern security\n'
1900 )
1905 )
1901 )
1906 )
1902 if b'sni' not in security:
1907 if b'sni' not in security:
1903 fm.plain(
1908 fm.plain(
1904 _(
1909 _(
1905 b' SNI not supported by Python install; may have '
1910 b' SNI not supported by Python install; may have '
1906 b'connectivity issues with some servers\n'
1911 b'connectivity issues with some servers\n'
1907 )
1912 )
1908 )
1913 )
1909
1914
1910 fm.plain(
1915 fm.plain(
1911 _(
1916 _(
1912 b"checking Rust extensions (%s)\n"
1917 b"checking Rust extensions (%s)\n"
1913 % (b'missing' if rustext is None else b'installed')
1918 % (b'missing' if rustext is None else b'installed')
1914 ),
1919 ),
1915 )
1920 )
1916
1921
1917 # TODO print CA cert info
1922 # TODO print CA cert info
1918
1923
1919 # hg version
1924 # hg version
1920 hgver = util.version()
1925 hgver = util.version()
1921 fm.write(
1926 fm.write(
1922 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1927 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1923 )
1928 )
1924 fm.write(
1929 fm.write(
1925 b'hgverextra',
1930 b'hgverextra',
1926 _(b"checking Mercurial custom build (%s)\n"),
1931 _(b"checking Mercurial custom build (%s)\n"),
1927 b'+'.join(hgver.split(b'+')[1:]),
1932 b'+'.join(hgver.split(b'+')[1:]),
1928 )
1933 )
1929
1934
1930 # compiled modules
1935 # compiled modules
1931 hgmodules = None
1936 hgmodules = None
1932 if util.safehasattr(sys.modules[__name__], '__file__'):
1937 if util.safehasattr(sys.modules[__name__], '__file__'):
1933 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1938 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1934 elif getattr(sys, 'oxidized', False):
1939 elif getattr(sys, 'oxidized', False):
1935 hgmodules = pycompat.sysexecutable
1940 hgmodules = pycompat.sysexecutable
1936
1941
1937 fm.write(
1942 fm.write(
1938 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1943 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1939 )
1944 )
1940 fm.write(
1945 fm.write(
1941 b'hgmodules',
1946 b'hgmodules',
1942 _(b"checking installed modules (%s)...\n"),
1947 _(b"checking installed modules (%s)...\n"),
1943 hgmodules or _(b"unknown"),
1948 hgmodules or _(b"unknown"),
1944 )
1949 )
1945
1950
1946 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1951 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1947 rustext = rustandc # for now, that's the only case
1952 rustext = rustandc # for now, that's the only case
1948 cext = policy.policy in (b'c', b'allow') or rustandc
1953 cext = policy.policy in (b'c', b'allow') or rustandc
1949 nopure = cext or rustext
1954 nopure = cext or rustext
1950 if nopure:
1955 if nopure:
1951 err = None
1956 err = None
1952 try:
1957 try:
1953 if cext:
1958 if cext:
1954 from .cext import ( # pytype: disable=import-error
1959 from .cext import ( # pytype: disable=import-error
1955 base85,
1960 base85,
1956 bdiff,
1961 bdiff,
1957 mpatch,
1962 mpatch,
1958 osutil,
1963 osutil,
1959 )
1964 )
1960
1965
1961 # quiet pyflakes
1966 # quiet pyflakes
1962 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1967 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1963 if rustext:
1968 if rustext:
1964 from .rustext import ( # pytype: disable=import-error
1969 from .rustext import ( # pytype: disable=import-error
1965 ancestor,
1970 ancestor,
1966 dirstate,
1971 dirstate,
1967 )
1972 )
1968
1973
1969 dir(ancestor), dir(dirstate) # quiet pyflakes
1974 dir(ancestor), dir(dirstate) # quiet pyflakes
1970 except Exception as inst:
1975 except Exception as inst:
1971 err = stringutil.forcebytestr(inst)
1976 err = stringutil.forcebytestr(inst)
1972 problems += 1
1977 problems += 1
1973 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1978 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1974
1979
1975 compengines = util.compengines._engines.values()
1980 compengines = util.compengines._engines.values()
1976 fm.write(
1981 fm.write(
1977 b'compengines',
1982 b'compengines',
1978 _(b'checking registered compression engines (%s)\n'),
1983 _(b'checking registered compression engines (%s)\n'),
1979 fm.formatlist(
1984 fm.formatlist(
1980 sorted(e.name() for e in compengines),
1985 sorted(e.name() for e in compengines),
1981 name=b'compengine',
1986 name=b'compengine',
1982 fmt=b'%s',
1987 fmt=b'%s',
1983 sep=b', ',
1988 sep=b', ',
1984 ),
1989 ),
1985 )
1990 )
1986 fm.write(
1991 fm.write(
1987 b'compenginesavail',
1992 b'compenginesavail',
1988 _(b'checking available compression engines (%s)\n'),
1993 _(b'checking available compression engines (%s)\n'),
1989 fm.formatlist(
1994 fm.formatlist(
1990 sorted(e.name() for e in compengines if e.available()),
1995 sorted(e.name() for e in compengines if e.available()),
1991 name=b'compengine',
1996 name=b'compengine',
1992 fmt=b'%s',
1997 fmt=b'%s',
1993 sep=b', ',
1998 sep=b', ',
1994 ),
1999 ),
1995 )
2000 )
1996 wirecompengines = compression.compengines.supportedwireengines(
2001 wirecompengines = compression.compengines.supportedwireengines(
1997 compression.SERVERROLE
2002 compression.SERVERROLE
1998 )
2003 )
1999 fm.write(
2004 fm.write(
2000 b'compenginesserver',
2005 b'compenginesserver',
2001 _(
2006 _(
2002 b'checking available compression engines '
2007 b'checking available compression engines '
2003 b'for wire protocol (%s)\n'
2008 b'for wire protocol (%s)\n'
2004 ),
2009 ),
2005 fm.formatlist(
2010 fm.formatlist(
2006 [e.name() for e in wirecompengines if e.wireprotosupport()],
2011 [e.name() for e in wirecompengines if e.wireprotosupport()],
2007 name=b'compengine',
2012 name=b'compengine',
2008 fmt=b'%s',
2013 fmt=b'%s',
2009 sep=b', ',
2014 sep=b', ',
2010 ),
2015 ),
2011 )
2016 )
2012 re2 = b'missing'
2017 re2 = b'missing'
2013 if util._re2:
2018 if util._re2:
2014 re2 = b'available'
2019 re2 = b'available'
2015 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2020 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2016 fm.data(re2=bool(util._re2))
2021 fm.data(re2=bool(util._re2))
2017
2022
2018 # templates
2023 # templates
2019 p = templater.templatedir()
2024 p = templater.templatedir()
2020 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2025 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2021 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2026 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2022 if p:
2027 if p:
2023 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2028 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2024 if m:
2029 if m:
2025 # template found, check if it is working
2030 # template found, check if it is working
2026 err = None
2031 err = None
2027 try:
2032 try:
2028 templater.templater.frommapfile(m)
2033 templater.templater.frommapfile(m)
2029 except Exception as inst:
2034 except Exception as inst:
2030 err = stringutil.forcebytestr(inst)
2035 err = stringutil.forcebytestr(inst)
2031 p = None
2036 p = None
2032 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2037 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2033 else:
2038 else:
2034 p = None
2039 p = None
2035 fm.condwrite(
2040 fm.condwrite(
2036 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2041 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2037 )
2042 )
2038 fm.condwrite(
2043 fm.condwrite(
2039 not m,
2044 not m,
2040 b'defaulttemplatenotfound',
2045 b'defaulttemplatenotfound',
2041 _(b" template '%s' not found\n"),
2046 _(b" template '%s' not found\n"),
2042 b"default",
2047 b"default",
2043 )
2048 )
2044 if not p:
2049 if not p:
2045 problems += 1
2050 problems += 1
2046 fm.condwrite(
2051 fm.condwrite(
2047 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2052 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2048 )
2053 )
2049
2054
2050 # editor
2055 # editor
2051 editor = ui.geteditor()
2056 editor = ui.geteditor()
2052 editor = util.expandpath(editor)
2057 editor = util.expandpath(editor)
2053 editorbin = procutil.shellsplit(editor)[0]
2058 editorbin = procutil.shellsplit(editor)[0]
2054 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2059 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2055 cmdpath = procutil.findexe(editorbin)
2060 cmdpath = procutil.findexe(editorbin)
2056 fm.condwrite(
2061 fm.condwrite(
2057 not cmdpath and editor == b'vi',
2062 not cmdpath and editor == b'vi',
2058 b'vinotfound',
2063 b'vinotfound',
2059 _(
2064 _(
2060 b" No commit editor set and can't find %s in PATH\n"
2065 b" No commit editor set and can't find %s in PATH\n"
2061 b" (specify a commit editor in your configuration"
2066 b" (specify a commit editor in your configuration"
2062 b" file)\n"
2067 b" file)\n"
2063 ),
2068 ),
2064 not cmdpath and editor == b'vi' and editorbin,
2069 not cmdpath and editor == b'vi' and editorbin,
2065 )
2070 )
2066 fm.condwrite(
2071 fm.condwrite(
2067 not cmdpath and editor != b'vi',
2072 not cmdpath and editor != b'vi',
2068 b'editornotfound',
2073 b'editornotfound',
2069 _(
2074 _(
2070 b" Can't find editor '%s' in PATH\n"
2075 b" Can't find editor '%s' in PATH\n"
2071 b" (specify a commit editor in your configuration"
2076 b" (specify a commit editor in your configuration"
2072 b" file)\n"
2077 b" file)\n"
2073 ),
2078 ),
2074 not cmdpath and editorbin,
2079 not cmdpath and editorbin,
2075 )
2080 )
2076 if not cmdpath and editor != b'vi':
2081 if not cmdpath and editor != b'vi':
2077 problems += 1
2082 problems += 1
2078
2083
2079 # check username
2084 # check username
2080 username = None
2085 username = None
2081 err = None
2086 err = None
2082 try:
2087 try:
2083 username = ui.username()
2088 username = ui.username()
2084 except error.Abort as e:
2089 except error.Abort as e:
2085 err = e.message
2090 err = e.message
2086 problems += 1
2091 problems += 1
2087
2092
2088 fm.condwrite(
2093 fm.condwrite(
2089 username, b'username', _(b"checking username (%s)\n"), username
2094 username, b'username', _(b"checking username (%s)\n"), username
2090 )
2095 )
2091 fm.condwrite(
2096 fm.condwrite(
2092 err,
2097 err,
2093 b'usernameerror',
2098 b'usernameerror',
2094 _(
2099 _(
2095 b"checking username...\n %s\n"
2100 b"checking username...\n %s\n"
2096 b" (specify a username in your configuration file)\n"
2101 b" (specify a username in your configuration file)\n"
2097 ),
2102 ),
2098 err,
2103 err,
2099 )
2104 )
2100
2105
2101 for name, mod in extensions.extensions():
2106 for name, mod in extensions.extensions():
2102 handler = getattr(mod, 'debuginstall', None)
2107 handler = getattr(mod, 'debuginstall', None)
2103 if handler is not None:
2108 if handler is not None:
2104 problems += handler(ui, fm)
2109 problems += handler(ui, fm)
2105
2110
2106 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2111 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2107 if not problems:
2112 if not problems:
2108 fm.data(problems=problems)
2113 fm.data(problems=problems)
2109 fm.condwrite(
2114 fm.condwrite(
2110 problems,
2115 problems,
2111 b'problems',
2116 b'problems',
2112 _(b"%d problems detected, please check your install!\n"),
2117 _(b"%d problems detected, please check your install!\n"),
2113 problems,
2118 problems,
2114 )
2119 )
2115 fm.end()
2120 fm.end()
2116
2121
2117 return problems
2122 return problems
2118
2123
2119
2124
2120 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2125 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2121 def debugknown(ui, repopath, *ids, **opts):
2126 def debugknown(ui, repopath, *ids, **opts):
2122 """test whether node ids are known to a repo
2127 """test whether node ids are known to a repo
2123
2128
2124 Every ID must be a full-length hex node id string. Returns a list of 0s
2129 Every ID must be a full-length hex node id string. Returns a list of 0s
2125 and 1s indicating unknown/known.
2130 and 1s indicating unknown/known.
2126 """
2131 """
2127 opts = pycompat.byteskwargs(opts)
2132 opts = pycompat.byteskwargs(opts)
2128 repo = hg.peer(ui, opts, repopath)
2133 repo = hg.peer(ui, opts, repopath)
2129 if not repo.capable(b'known'):
2134 if not repo.capable(b'known'):
2130 raise error.Abort(b"known() not supported by target repository")
2135 raise error.Abort(b"known() not supported by target repository")
2131 flags = repo.known([bin(s) for s in ids])
2136 flags = repo.known([bin(s) for s in ids])
2132 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2137 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2133
2138
2134
2139
2135 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2140 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2136 def debuglabelcomplete(ui, repo, *args):
2141 def debuglabelcomplete(ui, repo, *args):
2137 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2142 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2138 debugnamecomplete(ui, repo, *args)
2143 debugnamecomplete(ui, repo, *args)
2139
2144
2140
2145
2141 @command(
2146 @command(
2142 b'debuglocks',
2147 b'debuglocks',
2143 [
2148 [
2144 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2149 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2145 (
2150 (
2146 b'W',
2151 b'W',
2147 b'force-free-wlock',
2152 b'force-free-wlock',
2148 None,
2153 None,
2149 _(b'free the working state lock (DANGEROUS)'),
2154 _(b'free the working state lock (DANGEROUS)'),
2150 ),
2155 ),
2151 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2156 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2152 (
2157 (
2153 b'S',
2158 b'S',
2154 b'set-wlock',
2159 b'set-wlock',
2155 None,
2160 None,
2156 _(b'set the working state lock until stopped'),
2161 _(b'set the working state lock until stopped'),
2157 ),
2162 ),
2158 ],
2163 ],
2159 _(b'[OPTION]...'),
2164 _(b'[OPTION]...'),
2160 )
2165 )
2161 def debuglocks(ui, repo, **opts):
2166 def debuglocks(ui, repo, **opts):
2162 """show or modify state of locks
2167 """show or modify state of locks
2163
2168
2164 By default, this command will show which locks are held. This
2169 By default, this command will show which locks are held. This
2165 includes the user and process holding the lock, the amount of time
2170 includes the user and process holding the lock, the amount of time
2166 the lock has been held, and the machine name where the process is
2171 the lock has been held, and the machine name where the process is
2167 running if it's not local.
2172 running if it's not local.
2168
2173
2169 Locks protect the integrity of Mercurial's data, so should be
2174 Locks protect the integrity of Mercurial's data, so should be
2170 treated with care. System crashes or other interruptions may cause
2175 treated with care. System crashes or other interruptions may cause
2171 locks to not be properly released, though Mercurial will usually
2176 locks to not be properly released, though Mercurial will usually
2172 detect and remove such stale locks automatically.
2177 detect and remove such stale locks automatically.
2173
2178
2174 However, detecting stale locks may not always be possible (for
2179 However, detecting stale locks may not always be possible (for
2175 instance, on a shared filesystem). Removing locks may also be
2180 instance, on a shared filesystem). Removing locks may also be
2176 blocked by filesystem permissions.
2181 blocked by filesystem permissions.
2177
2182
2178 Setting a lock will prevent other commands from changing the data.
2183 Setting a lock will prevent other commands from changing the data.
2179 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2184 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2180 The set locks are removed when the command exits.
2185 The set locks are removed when the command exits.
2181
2186
2182 Returns 0 if no locks are held.
2187 Returns 0 if no locks are held.
2183
2188
2184 """
2189 """
2185
2190
2186 if opts.get('force_free_lock'):
2191 if opts.get('force_free_lock'):
2187 repo.svfs.tryunlink(b'lock')
2192 repo.svfs.tryunlink(b'lock')
2188 if opts.get('force_free_wlock'):
2193 if opts.get('force_free_wlock'):
2189 repo.vfs.tryunlink(b'wlock')
2194 repo.vfs.tryunlink(b'wlock')
2190 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2195 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2191 return 0
2196 return 0
2192
2197
2193 locks = []
2198 locks = []
2194 try:
2199 try:
2195 if opts.get('set_wlock'):
2200 if opts.get('set_wlock'):
2196 try:
2201 try:
2197 locks.append(repo.wlock(False))
2202 locks.append(repo.wlock(False))
2198 except error.LockHeld:
2203 except error.LockHeld:
2199 raise error.Abort(_(b'wlock is already held'))
2204 raise error.Abort(_(b'wlock is already held'))
2200 if opts.get('set_lock'):
2205 if opts.get('set_lock'):
2201 try:
2206 try:
2202 locks.append(repo.lock(False))
2207 locks.append(repo.lock(False))
2203 except error.LockHeld:
2208 except error.LockHeld:
2204 raise error.Abort(_(b'lock is already held'))
2209 raise error.Abort(_(b'lock is already held'))
2205 if len(locks):
2210 if len(locks):
2206 try:
2211 try:
2207 if ui.interactive():
2212 if ui.interactive():
2208 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2213 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2209 ui.promptchoice(prompt)
2214 ui.promptchoice(prompt)
2210 else:
2215 else:
2211 msg = b"%d locks held, waiting for signal\n"
2216 msg = b"%d locks held, waiting for signal\n"
2212 msg %= len(locks)
2217 msg %= len(locks)
2213 ui.status(msg)
2218 ui.status(msg)
2214 while True: # XXX wait for a signal
2219 while True: # XXX wait for a signal
2215 time.sleep(0.1)
2220 time.sleep(0.1)
2216 except KeyboardInterrupt:
2221 except KeyboardInterrupt:
2217 msg = b"signal-received releasing locks\n"
2222 msg = b"signal-received releasing locks\n"
2218 ui.status(msg)
2223 ui.status(msg)
2219 return 0
2224 return 0
2220 finally:
2225 finally:
2221 release(*locks)
2226 release(*locks)
2222
2227
2223 now = time.time()
2228 now = time.time()
2224 held = 0
2229 held = 0
2225
2230
2226 def report(vfs, name, method):
2231 def report(vfs, name, method):
2227 # this causes stale locks to get reaped for more accurate reporting
2232 # this causes stale locks to get reaped for more accurate reporting
2228 try:
2233 try:
2229 l = method(False)
2234 l = method(False)
2230 except error.LockHeld:
2235 except error.LockHeld:
2231 l = None
2236 l = None
2232
2237
2233 if l:
2238 if l:
2234 l.release()
2239 l.release()
2235 else:
2240 else:
2236 try:
2241 try:
2237 st = vfs.lstat(name)
2242 st = vfs.lstat(name)
2238 age = now - st[stat.ST_MTIME]
2243 age = now - st[stat.ST_MTIME]
2239 user = util.username(st.st_uid)
2244 user = util.username(st.st_uid)
2240 locker = vfs.readlock(name)
2245 locker = vfs.readlock(name)
2241 if b":" in locker:
2246 if b":" in locker:
2242 host, pid = locker.split(b':')
2247 host, pid = locker.split(b':')
2243 if host == socket.gethostname():
2248 if host == socket.gethostname():
2244 locker = b'user %s, process %s' % (user or b'None', pid)
2249 locker = b'user %s, process %s' % (user or b'None', pid)
2245 else:
2250 else:
2246 locker = b'user %s, process %s, host %s' % (
2251 locker = b'user %s, process %s, host %s' % (
2247 user or b'None',
2252 user or b'None',
2248 pid,
2253 pid,
2249 host,
2254 host,
2250 )
2255 )
2251 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2256 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2252 return 1
2257 return 1
2253 except OSError as e:
2258 except OSError as e:
2254 if e.errno != errno.ENOENT:
2259 if e.errno != errno.ENOENT:
2255 raise
2260 raise
2256
2261
2257 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2262 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2258 return 0
2263 return 0
2259
2264
2260 held += report(repo.svfs, b"lock", repo.lock)
2265 held += report(repo.svfs, b"lock", repo.lock)
2261 held += report(repo.vfs, b"wlock", repo.wlock)
2266 held += report(repo.vfs, b"wlock", repo.wlock)
2262
2267
2263 return held
2268 return held
2264
2269
2265
2270
2266 @command(
2271 @command(
2267 b'debugmanifestfulltextcache',
2272 b'debugmanifestfulltextcache',
2268 [
2273 [
2269 (b'', b'clear', False, _(b'clear the cache')),
2274 (b'', b'clear', False, _(b'clear the cache')),
2270 (
2275 (
2271 b'a',
2276 b'a',
2272 b'add',
2277 b'add',
2273 [],
2278 [],
2274 _(b'add the given manifest nodes to the cache'),
2279 _(b'add the given manifest nodes to the cache'),
2275 _(b'NODE'),
2280 _(b'NODE'),
2276 ),
2281 ),
2277 ],
2282 ],
2278 b'',
2283 b'',
2279 )
2284 )
2280 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2285 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2281 """show, clear or amend the contents of the manifest fulltext cache"""
2286 """show, clear or amend the contents of the manifest fulltext cache"""
2282
2287
2283 def getcache():
2288 def getcache():
2284 r = repo.manifestlog.getstorage(b'')
2289 r = repo.manifestlog.getstorage(b'')
2285 try:
2290 try:
2286 return r._fulltextcache
2291 return r._fulltextcache
2287 except AttributeError:
2292 except AttributeError:
2288 msg = _(
2293 msg = _(
2289 b"Current revlog implementation doesn't appear to have a "
2294 b"Current revlog implementation doesn't appear to have a "
2290 b"manifest fulltext cache\n"
2295 b"manifest fulltext cache\n"
2291 )
2296 )
2292 raise error.Abort(msg)
2297 raise error.Abort(msg)
2293
2298
2294 if opts.get('clear'):
2299 if opts.get('clear'):
2295 with repo.wlock():
2300 with repo.wlock():
2296 cache = getcache()
2301 cache = getcache()
2297 cache.clear(clear_persisted_data=True)
2302 cache.clear(clear_persisted_data=True)
2298 return
2303 return
2299
2304
2300 if add:
2305 if add:
2301 with repo.wlock():
2306 with repo.wlock():
2302 m = repo.manifestlog
2307 m = repo.manifestlog
2303 store = m.getstorage(b'')
2308 store = m.getstorage(b'')
2304 for n in add:
2309 for n in add:
2305 try:
2310 try:
2306 manifest = m[store.lookup(n)]
2311 manifest = m[store.lookup(n)]
2307 except error.LookupError as e:
2312 except error.LookupError as e:
2308 raise error.Abort(
2313 raise error.Abort(
2309 bytes(e), hint=b"Check your manifest node id"
2314 bytes(e), hint=b"Check your manifest node id"
2310 )
2315 )
2311 manifest.read() # stores revisision in cache too
2316 manifest.read() # stores revisision in cache too
2312 return
2317 return
2313
2318
2314 cache = getcache()
2319 cache = getcache()
2315 if not len(cache):
2320 if not len(cache):
2316 ui.write(_(b'cache empty\n'))
2321 ui.write(_(b'cache empty\n'))
2317 else:
2322 else:
2318 ui.write(
2323 ui.write(
2319 _(
2324 _(
2320 b'cache contains %d manifest entries, in order of most to '
2325 b'cache contains %d manifest entries, in order of most to '
2321 b'least recent:\n'
2326 b'least recent:\n'
2322 )
2327 )
2323 % (len(cache),)
2328 % (len(cache),)
2324 )
2329 )
2325 totalsize = 0
2330 totalsize = 0
2326 for nodeid in cache:
2331 for nodeid in cache:
2327 # Use cache.get to not update the LRU order
2332 # Use cache.get to not update the LRU order
2328 data = cache.peek(nodeid)
2333 data = cache.peek(nodeid)
2329 size = len(data)
2334 size = len(data)
2330 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2335 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2331 ui.write(
2336 ui.write(
2332 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2337 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2333 )
2338 )
2334 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2339 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2335 ui.write(
2340 ui.write(
2336 _(b'total cache data size %s, on-disk %s\n')
2341 _(b'total cache data size %s, on-disk %s\n')
2337 % (util.bytecount(totalsize), util.bytecount(ondisk))
2342 % (util.bytecount(totalsize), util.bytecount(ondisk))
2338 )
2343 )
2339
2344
2340
2345
2341 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2346 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2342 def debugmergestate(ui, repo, *args, **opts):
2347 def debugmergestate(ui, repo, *args, **opts):
2343 """print merge state
2348 """print merge state
2344
2349
2345 Use --verbose to print out information about whether v1 or v2 merge state
2350 Use --verbose to print out information about whether v1 or v2 merge state
2346 was chosen."""
2351 was chosen."""
2347
2352
2348 if ui.verbose:
2353 if ui.verbose:
2349 ms = mergestatemod.mergestate(repo)
2354 ms = mergestatemod.mergestate(repo)
2350
2355
2351 # sort so that reasonable information is on top
2356 # sort so that reasonable information is on top
2352 v1records = ms._readrecordsv1()
2357 v1records = ms._readrecordsv1()
2353 v2records = ms._readrecordsv2()
2358 v2records = ms._readrecordsv2()
2354
2359
2355 if not v1records and not v2records:
2360 if not v1records and not v2records:
2356 pass
2361 pass
2357 elif not v2records:
2362 elif not v2records:
2358 ui.writenoi18n(b'no version 2 merge state\n')
2363 ui.writenoi18n(b'no version 2 merge state\n')
2359 elif ms._v1v2match(v1records, v2records):
2364 elif ms._v1v2match(v1records, v2records):
2360 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2365 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2361 else:
2366 else:
2362 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2367 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2363
2368
2364 opts = pycompat.byteskwargs(opts)
2369 opts = pycompat.byteskwargs(opts)
2365 if not opts[b'template']:
2370 if not opts[b'template']:
2366 opts[b'template'] = (
2371 opts[b'template'] = (
2367 b'{if(commits, "", "no merge state found\n")}'
2372 b'{if(commits, "", "no merge state found\n")}'
2368 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2373 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2369 b'{files % "file: {path} (state \\"{state}\\")\n'
2374 b'{files % "file: {path} (state \\"{state}\\")\n'
2370 b'{if(local_path, "'
2375 b'{if(local_path, "'
2371 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2376 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2372 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2377 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2373 b' other path: {other_path} (node {other_node})\n'
2378 b' other path: {other_path} (node {other_node})\n'
2374 b'")}'
2379 b'")}'
2375 b'{if(rename_side, "'
2380 b'{if(rename_side, "'
2376 b' rename side: {rename_side}\n'
2381 b' rename side: {rename_side}\n'
2377 b' renamed path: {renamed_path}\n'
2382 b' renamed path: {renamed_path}\n'
2378 b'")}'
2383 b'")}'
2379 b'{extras % " extra: {key} = {value}\n"}'
2384 b'{extras % " extra: {key} = {value}\n"}'
2380 b'"}'
2385 b'"}'
2381 b'{extras % "extra: {file} ({key} = {value})\n"}'
2386 b'{extras % "extra: {file} ({key} = {value})\n"}'
2382 )
2387 )
2383
2388
2384 ms = mergestatemod.mergestate.read(repo)
2389 ms = mergestatemod.mergestate.read(repo)
2385
2390
2386 fm = ui.formatter(b'debugmergestate', opts)
2391 fm = ui.formatter(b'debugmergestate', opts)
2387 fm.startitem()
2392 fm.startitem()
2388
2393
2389 fm_commits = fm.nested(b'commits')
2394 fm_commits = fm.nested(b'commits')
2390 if ms.active():
2395 if ms.active():
2391 for name, node, label_index in (
2396 for name, node, label_index in (
2392 (b'local', ms.local, 0),
2397 (b'local', ms.local, 0),
2393 (b'other', ms.other, 1),
2398 (b'other', ms.other, 1),
2394 ):
2399 ):
2395 fm_commits.startitem()
2400 fm_commits.startitem()
2396 fm_commits.data(name=name)
2401 fm_commits.data(name=name)
2397 fm_commits.data(node=hex(node))
2402 fm_commits.data(node=hex(node))
2398 if ms._labels and len(ms._labels) > label_index:
2403 if ms._labels and len(ms._labels) > label_index:
2399 fm_commits.data(label=ms._labels[label_index])
2404 fm_commits.data(label=ms._labels[label_index])
2400 fm_commits.end()
2405 fm_commits.end()
2401
2406
2402 fm_files = fm.nested(b'files')
2407 fm_files = fm.nested(b'files')
2403 if ms.active():
2408 if ms.active():
2404 for f in ms:
2409 for f in ms:
2405 fm_files.startitem()
2410 fm_files.startitem()
2406 fm_files.data(path=f)
2411 fm_files.data(path=f)
2407 state = ms._state[f]
2412 state = ms._state[f]
2408 fm_files.data(state=state[0])
2413 fm_files.data(state=state[0])
2409 if state[0] in (
2414 if state[0] in (
2410 mergestatemod.MERGE_RECORD_UNRESOLVED,
2415 mergestatemod.MERGE_RECORD_UNRESOLVED,
2411 mergestatemod.MERGE_RECORD_RESOLVED,
2416 mergestatemod.MERGE_RECORD_RESOLVED,
2412 ):
2417 ):
2413 fm_files.data(local_key=state[1])
2418 fm_files.data(local_key=state[1])
2414 fm_files.data(local_path=state[2])
2419 fm_files.data(local_path=state[2])
2415 fm_files.data(ancestor_path=state[3])
2420 fm_files.data(ancestor_path=state[3])
2416 fm_files.data(ancestor_node=state[4])
2421 fm_files.data(ancestor_node=state[4])
2417 fm_files.data(other_path=state[5])
2422 fm_files.data(other_path=state[5])
2418 fm_files.data(other_node=state[6])
2423 fm_files.data(other_node=state[6])
2419 fm_files.data(local_flags=state[7])
2424 fm_files.data(local_flags=state[7])
2420 elif state[0] in (
2425 elif state[0] in (
2421 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2426 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2422 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2427 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2423 ):
2428 ):
2424 fm_files.data(renamed_path=state[1])
2429 fm_files.data(renamed_path=state[1])
2425 fm_files.data(rename_side=state[2])
2430 fm_files.data(rename_side=state[2])
2426 fm_extras = fm_files.nested(b'extras')
2431 fm_extras = fm_files.nested(b'extras')
2427 for k, v in sorted(ms.extras(f).items()):
2432 for k, v in sorted(ms.extras(f).items()):
2428 fm_extras.startitem()
2433 fm_extras.startitem()
2429 fm_extras.data(key=k)
2434 fm_extras.data(key=k)
2430 fm_extras.data(value=v)
2435 fm_extras.data(value=v)
2431 fm_extras.end()
2436 fm_extras.end()
2432
2437
2433 fm_files.end()
2438 fm_files.end()
2434
2439
2435 fm_extras = fm.nested(b'extras')
2440 fm_extras = fm.nested(b'extras')
2436 for f, d in sorted(ms.allextras().items()):
2441 for f, d in sorted(ms.allextras().items()):
2437 if f in ms:
2442 if f in ms:
2438 # If file is in mergestate, we have already processed it's extras
2443 # If file is in mergestate, we have already processed it's extras
2439 continue
2444 continue
2440 for k, v in d.items():
2445 for k, v in d.items():
2441 fm_extras.startitem()
2446 fm_extras.startitem()
2442 fm_extras.data(file=f)
2447 fm_extras.data(file=f)
2443 fm_extras.data(key=k)
2448 fm_extras.data(key=k)
2444 fm_extras.data(value=v)
2449 fm_extras.data(value=v)
2445 fm_extras.end()
2450 fm_extras.end()
2446
2451
2447 fm.end()
2452 fm.end()
2448
2453
2449
2454
2450 @command(b'debugnamecomplete', [], _(b'NAME...'))
2455 @command(b'debugnamecomplete', [], _(b'NAME...'))
2451 def debugnamecomplete(ui, repo, *args):
2456 def debugnamecomplete(ui, repo, *args):
2452 '''complete "names" - tags, open branch names, bookmark names'''
2457 '''complete "names" - tags, open branch names, bookmark names'''
2453
2458
2454 names = set()
2459 names = set()
2455 # since we previously only listed open branches, we will handle that
2460 # since we previously only listed open branches, we will handle that
2456 # specially (after this for loop)
2461 # specially (after this for loop)
2457 for name, ns in repo.names.items():
2462 for name, ns in repo.names.items():
2458 if name != b'branches':
2463 if name != b'branches':
2459 names.update(ns.listnames(repo))
2464 names.update(ns.listnames(repo))
2460 names.update(
2465 names.update(
2461 tag
2466 tag
2462 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2467 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2463 if not closed
2468 if not closed
2464 )
2469 )
2465 completions = set()
2470 completions = set()
2466 if not args:
2471 if not args:
2467 args = [b'']
2472 args = [b'']
2468 for a in args:
2473 for a in args:
2469 completions.update(n for n in names if n.startswith(a))
2474 completions.update(n for n in names if n.startswith(a))
2470 ui.write(b'\n'.join(sorted(completions)))
2475 ui.write(b'\n'.join(sorted(completions)))
2471 ui.write(b'\n')
2476 ui.write(b'\n')
2472
2477
2473
2478
2474 @command(
2479 @command(
2475 b'debugnodemap',
2480 b'debugnodemap',
2476 [
2481 [
2477 (
2482 (
2478 b'',
2483 b'',
2479 b'dump-new',
2484 b'dump-new',
2480 False,
2485 False,
2481 _(b'write a (new) persistent binary nodemap on stdout'),
2486 _(b'write a (new) persistent binary nodemap on stdout'),
2482 ),
2487 ),
2483 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2488 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2484 (
2489 (
2485 b'',
2490 b'',
2486 b'check',
2491 b'check',
2487 False,
2492 False,
2488 _(b'check that the data on disk data are correct.'),
2493 _(b'check that the data on disk data are correct.'),
2489 ),
2494 ),
2490 (
2495 (
2491 b'',
2496 b'',
2492 b'metadata',
2497 b'metadata',
2493 False,
2498 False,
2494 _(b'display the on disk meta data for the nodemap'),
2499 _(b'display the on disk meta data for the nodemap'),
2495 ),
2500 ),
2496 ],
2501 ],
2497 )
2502 )
2498 def debugnodemap(ui, repo, **opts):
2503 def debugnodemap(ui, repo, **opts):
2499 """write and inspect on disk nodemap"""
2504 """write and inspect on disk nodemap"""
2500 if opts['dump_new']:
2505 if opts['dump_new']:
2501 unfi = repo.unfiltered()
2506 unfi = repo.unfiltered()
2502 cl = unfi.changelog
2507 cl = unfi.changelog
2503 if util.safehasattr(cl.index, "nodemap_data_all"):
2508 if util.safehasattr(cl.index, "nodemap_data_all"):
2504 data = cl.index.nodemap_data_all()
2509 data = cl.index.nodemap_data_all()
2505 else:
2510 else:
2506 data = nodemap.persistent_data(cl.index)
2511 data = nodemap.persistent_data(cl.index)
2507 ui.write(data)
2512 ui.write(data)
2508 elif opts['dump_disk']:
2513 elif opts['dump_disk']:
2509 unfi = repo.unfiltered()
2514 unfi = repo.unfiltered()
2510 cl = unfi.changelog
2515 cl = unfi.changelog
2511 nm_data = nodemap.persisted_data(cl)
2516 nm_data = nodemap.persisted_data(cl)
2512 if nm_data is not None:
2517 if nm_data is not None:
2513 docket, data = nm_data
2518 docket, data = nm_data
2514 ui.write(data[:])
2519 ui.write(data[:])
2515 elif opts['check']:
2520 elif opts['check']:
2516 unfi = repo.unfiltered()
2521 unfi = repo.unfiltered()
2517 cl = unfi.changelog
2522 cl = unfi.changelog
2518 nm_data = nodemap.persisted_data(cl)
2523 nm_data = nodemap.persisted_data(cl)
2519 if nm_data is not None:
2524 if nm_data is not None:
2520 docket, data = nm_data
2525 docket, data = nm_data
2521 return nodemap.check_data(ui, cl.index, data)
2526 return nodemap.check_data(ui, cl.index, data)
2522 elif opts['metadata']:
2527 elif opts['metadata']:
2523 unfi = repo.unfiltered()
2528 unfi = repo.unfiltered()
2524 cl = unfi.changelog
2529 cl = unfi.changelog
2525 nm_data = nodemap.persisted_data(cl)
2530 nm_data = nodemap.persisted_data(cl)
2526 if nm_data is not None:
2531 if nm_data is not None:
2527 docket, data = nm_data
2532 docket, data = nm_data
2528 ui.write((b"uid: %s\n") % docket.uid)
2533 ui.write((b"uid: %s\n") % docket.uid)
2529 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2534 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2530 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2535 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2531 ui.write((b"data-length: %d\n") % docket.data_length)
2536 ui.write((b"data-length: %d\n") % docket.data_length)
2532 ui.write((b"data-unused: %d\n") % docket.data_unused)
2537 ui.write((b"data-unused: %d\n") % docket.data_unused)
2533 unused_perc = docket.data_unused * 100.0 / docket.data_length
2538 unused_perc = docket.data_unused * 100.0 / docket.data_length
2534 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2539 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2535
2540
2536
2541
2537 @command(
2542 @command(
2538 b'debugobsolete',
2543 b'debugobsolete',
2539 [
2544 [
2540 (b'', b'flags', 0, _(b'markers flag')),
2545 (b'', b'flags', 0, _(b'markers flag')),
2541 (
2546 (
2542 b'',
2547 b'',
2543 b'record-parents',
2548 b'record-parents',
2544 False,
2549 False,
2545 _(b'record parent information for the precursor'),
2550 _(b'record parent information for the precursor'),
2546 ),
2551 ),
2547 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2552 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2548 (
2553 (
2549 b'',
2554 b'',
2550 b'exclusive',
2555 b'exclusive',
2551 False,
2556 False,
2552 _(b'restrict display to markers only relevant to REV'),
2557 _(b'restrict display to markers only relevant to REV'),
2553 ),
2558 ),
2554 (b'', b'index', False, _(b'display index of the marker')),
2559 (b'', b'index', False, _(b'display index of the marker')),
2555 (b'', b'delete', [], _(b'delete markers specified by indices')),
2560 (b'', b'delete', [], _(b'delete markers specified by indices')),
2556 ]
2561 ]
2557 + cmdutil.commitopts2
2562 + cmdutil.commitopts2
2558 + cmdutil.formatteropts,
2563 + cmdutil.formatteropts,
2559 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2564 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2560 )
2565 )
2561 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2566 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2562 """create arbitrary obsolete marker
2567 """create arbitrary obsolete marker
2563
2568
2564 With no arguments, displays the list of obsolescence markers."""
2569 With no arguments, displays the list of obsolescence markers."""
2565
2570
2566 opts = pycompat.byteskwargs(opts)
2571 opts = pycompat.byteskwargs(opts)
2567
2572
2568 def parsenodeid(s):
2573 def parsenodeid(s):
2569 try:
2574 try:
2570 # We do not use revsingle/revrange functions here to accept
2575 # We do not use revsingle/revrange functions here to accept
2571 # arbitrary node identifiers, possibly not present in the
2576 # arbitrary node identifiers, possibly not present in the
2572 # local repository.
2577 # local repository.
2573 n = bin(s)
2578 n = bin(s)
2574 if len(n) != repo.nodeconstants.nodelen:
2579 if len(n) != repo.nodeconstants.nodelen:
2575 raise TypeError()
2580 raise TypeError()
2576 return n
2581 return n
2577 except TypeError:
2582 except TypeError:
2578 raise error.InputError(
2583 raise error.InputError(
2579 b'changeset references must be full hexadecimal '
2584 b'changeset references must be full hexadecimal '
2580 b'node identifiers'
2585 b'node identifiers'
2581 )
2586 )
2582
2587
2583 if opts.get(b'delete'):
2588 if opts.get(b'delete'):
2584 indices = []
2589 indices = []
2585 for v in opts.get(b'delete'):
2590 for v in opts.get(b'delete'):
2586 try:
2591 try:
2587 indices.append(int(v))
2592 indices.append(int(v))
2588 except ValueError:
2593 except ValueError:
2589 raise error.InputError(
2594 raise error.InputError(
2590 _(b'invalid index value: %r') % v,
2595 _(b'invalid index value: %r') % v,
2591 hint=_(b'use integers for indices'),
2596 hint=_(b'use integers for indices'),
2592 )
2597 )
2593
2598
2594 if repo.currenttransaction():
2599 if repo.currenttransaction():
2595 raise error.Abort(
2600 raise error.Abort(
2596 _(b'cannot delete obsmarkers in the middle of transaction.')
2601 _(b'cannot delete obsmarkers in the middle of transaction.')
2597 )
2602 )
2598
2603
2599 with repo.lock():
2604 with repo.lock():
2600 n = repair.deleteobsmarkers(repo.obsstore, indices)
2605 n = repair.deleteobsmarkers(repo.obsstore, indices)
2601 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2606 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2602
2607
2603 return
2608 return
2604
2609
2605 if precursor is not None:
2610 if precursor is not None:
2606 if opts[b'rev']:
2611 if opts[b'rev']:
2607 raise error.InputError(
2612 raise error.InputError(
2608 b'cannot select revision when creating marker'
2613 b'cannot select revision when creating marker'
2609 )
2614 )
2610 metadata = {}
2615 metadata = {}
2611 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2616 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2612 succs = tuple(parsenodeid(succ) for succ in successors)
2617 succs = tuple(parsenodeid(succ) for succ in successors)
2613 l = repo.lock()
2618 l = repo.lock()
2614 try:
2619 try:
2615 tr = repo.transaction(b'debugobsolete')
2620 tr = repo.transaction(b'debugobsolete')
2616 try:
2621 try:
2617 date = opts.get(b'date')
2622 date = opts.get(b'date')
2618 if date:
2623 if date:
2619 date = dateutil.parsedate(date)
2624 date = dateutil.parsedate(date)
2620 else:
2625 else:
2621 date = None
2626 date = None
2622 prec = parsenodeid(precursor)
2627 prec = parsenodeid(precursor)
2623 parents = None
2628 parents = None
2624 if opts[b'record_parents']:
2629 if opts[b'record_parents']:
2625 if prec not in repo.unfiltered():
2630 if prec not in repo.unfiltered():
2626 raise error.Abort(
2631 raise error.Abort(
2627 b'cannot used --record-parents on '
2632 b'cannot used --record-parents on '
2628 b'unknown changesets'
2633 b'unknown changesets'
2629 )
2634 )
2630 parents = repo.unfiltered()[prec].parents()
2635 parents = repo.unfiltered()[prec].parents()
2631 parents = tuple(p.node() for p in parents)
2636 parents = tuple(p.node() for p in parents)
2632 repo.obsstore.create(
2637 repo.obsstore.create(
2633 tr,
2638 tr,
2634 prec,
2639 prec,
2635 succs,
2640 succs,
2636 opts[b'flags'],
2641 opts[b'flags'],
2637 parents=parents,
2642 parents=parents,
2638 date=date,
2643 date=date,
2639 metadata=metadata,
2644 metadata=metadata,
2640 ui=ui,
2645 ui=ui,
2641 )
2646 )
2642 tr.close()
2647 tr.close()
2643 except ValueError as exc:
2648 except ValueError as exc:
2644 raise error.Abort(
2649 raise error.Abort(
2645 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2650 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2646 )
2651 )
2647 finally:
2652 finally:
2648 tr.release()
2653 tr.release()
2649 finally:
2654 finally:
2650 l.release()
2655 l.release()
2651 else:
2656 else:
2652 if opts[b'rev']:
2657 if opts[b'rev']:
2653 revs = logcmdutil.revrange(repo, opts[b'rev'])
2658 revs = logcmdutil.revrange(repo, opts[b'rev'])
2654 nodes = [repo[r].node() for r in revs]
2659 nodes = [repo[r].node() for r in revs]
2655 markers = list(
2660 markers = list(
2656 obsutil.getmarkers(
2661 obsutil.getmarkers(
2657 repo, nodes=nodes, exclusive=opts[b'exclusive']
2662 repo, nodes=nodes, exclusive=opts[b'exclusive']
2658 )
2663 )
2659 )
2664 )
2660 markers.sort(key=lambda x: x._data)
2665 markers.sort(key=lambda x: x._data)
2661 else:
2666 else:
2662 markers = obsutil.getmarkers(repo)
2667 markers = obsutil.getmarkers(repo)
2663
2668
2664 markerstoiter = markers
2669 markerstoiter = markers
2665 isrelevant = lambda m: True
2670 isrelevant = lambda m: True
2666 if opts.get(b'rev') and opts.get(b'index'):
2671 if opts.get(b'rev') and opts.get(b'index'):
2667 markerstoiter = obsutil.getmarkers(repo)
2672 markerstoiter = obsutil.getmarkers(repo)
2668 markerset = set(markers)
2673 markerset = set(markers)
2669 isrelevant = lambda m: m in markerset
2674 isrelevant = lambda m: m in markerset
2670
2675
2671 fm = ui.formatter(b'debugobsolete', opts)
2676 fm = ui.formatter(b'debugobsolete', opts)
2672 for i, m in enumerate(markerstoiter):
2677 for i, m in enumerate(markerstoiter):
2673 if not isrelevant(m):
2678 if not isrelevant(m):
2674 # marker can be irrelevant when we're iterating over a set
2679 # marker can be irrelevant when we're iterating over a set
2675 # of markers (markerstoiter) which is bigger than the set
2680 # of markers (markerstoiter) which is bigger than the set
2676 # of markers we want to display (markers)
2681 # of markers we want to display (markers)
2677 # this can happen if both --index and --rev options are
2682 # this can happen if both --index and --rev options are
2678 # provided and thus we need to iterate over all of the markers
2683 # provided and thus we need to iterate over all of the markers
2679 # to get the correct indices, but only display the ones that
2684 # to get the correct indices, but only display the ones that
2680 # are relevant to --rev value
2685 # are relevant to --rev value
2681 continue
2686 continue
2682 fm.startitem()
2687 fm.startitem()
2683 ind = i if opts.get(b'index') else None
2688 ind = i if opts.get(b'index') else None
2684 cmdutil.showmarker(fm, m, index=ind)
2689 cmdutil.showmarker(fm, m, index=ind)
2685 fm.end()
2690 fm.end()
2686
2691
2687
2692
2688 @command(
2693 @command(
2689 b'debugp1copies',
2694 b'debugp1copies',
2690 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2695 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2691 _(b'[-r REV]'),
2696 _(b'[-r REV]'),
2692 )
2697 )
2693 def debugp1copies(ui, repo, **opts):
2698 def debugp1copies(ui, repo, **opts):
2694 """dump copy information compared to p1"""
2699 """dump copy information compared to p1"""
2695
2700
2696 opts = pycompat.byteskwargs(opts)
2701 opts = pycompat.byteskwargs(opts)
2697 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2702 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2698 for dst, src in ctx.p1copies().items():
2703 for dst, src in ctx.p1copies().items():
2699 ui.write(b'%s -> %s\n' % (src, dst))
2704 ui.write(b'%s -> %s\n' % (src, dst))
2700
2705
2701
2706
2702 @command(
2707 @command(
2703 b'debugp2copies',
2708 b'debugp2copies',
2704 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2709 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2705 _(b'[-r REV]'),
2710 _(b'[-r REV]'),
2706 )
2711 )
2707 def debugp1copies(ui, repo, **opts):
2712 def debugp1copies(ui, repo, **opts):
2708 """dump copy information compared to p2"""
2713 """dump copy information compared to p2"""
2709
2714
2710 opts = pycompat.byteskwargs(opts)
2715 opts = pycompat.byteskwargs(opts)
2711 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2716 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2712 for dst, src in ctx.p2copies().items():
2717 for dst, src in ctx.p2copies().items():
2713 ui.write(b'%s -> %s\n' % (src, dst))
2718 ui.write(b'%s -> %s\n' % (src, dst))
2714
2719
2715
2720
2716 @command(
2721 @command(
2717 b'debugpathcomplete',
2722 b'debugpathcomplete',
2718 [
2723 [
2719 (b'f', b'full', None, _(b'complete an entire path')),
2724 (b'f', b'full', None, _(b'complete an entire path')),
2720 (b'n', b'normal', None, _(b'show only normal files')),
2725 (b'n', b'normal', None, _(b'show only normal files')),
2721 (b'a', b'added', None, _(b'show only added files')),
2726 (b'a', b'added', None, _(b'show only added files')),
2722 (b'r', b'removed', None, _(b'show only removed files')),
2727 (b'r', b'removed', None, _(b'show only removed files')),
2723 ],
2728 ],
2724 _(b'FILESPEC...'),
2729 _(b'FILESPEC...'),
2725 )
2730 )
2726 def debugpathcomplete(ui, repo, *specs, **opts):
2731 def debugpathcomplete(ui, repo, *specs, **opts):
2727 """complete part or all of a tracked path
2732 """complete part or all of a tracked path
2728
2733
2729 This command supports shells that offer path name completion. It
2734 This command supports shells that offer path name completion. It
2730 currently completes only files already known to the dirstate.
2735 currently completes only files already known to the dirstate.
2731
2736
2732 Completion extends only to the next path segment unless
2737 Completion extends only to the next path segment unless
2733 --full is specified, in which case entire paths are used."""
2738 --full is specified, in which case entire paths are used."""
2734
2739
2735 def complete(path, acceptable):
2740 def complete(path, acceptable):
2736 dirstate = repo.dirstate
2741 dirstate = repo.dirstate
2737 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2742 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2738 rootdir = repo.root + pycompat.ossep
2743 rootdir = repo.root + pycompat.ossep
2739 if spec != repo.root and not spec.startswith(rootdir):
2744 if spec != repo.root and not spec.startswith(rootdir):
2740 return [], []
2745 return [], []
2741 if os.path.isdir(spec):
2746 if os.path.isdir(spec):
2742 spec += b'/'
2747 spec += b'/'
2743 spec = spec[len(rootdir) :]
2748 spec = spec[len(rootdir) :]
2744 fixpaths = pycompat.ossep != b'/'
2749 fixpaths = pycompat.ossep != b'/'
2745 if fixpaths:
2750 if fixpaths:
2746 spec = spec.replace(pycompat.ossep, b'/')
2751 spec = spec.replace(pycompat.ossep, b'/')
2747 speclen = len(spec)
2752 speclen = len(spec)
2748 fullpaths = opts['full']
2753 fullpaths = opts['full']
2749 files, dirs = set(), set()
2754 files, dirs = set(), set()
2750 adddir, addfile = dirs.add, files.add
2755 adddir, addfile = dirs.add, files.add
2751 for f, st in dirstate.items():
2756 for f, st in dirstate.items():
2752 if f.startswith(spec) and st.state in acceptable:
2757 if f.startswith(spec) and st.state in acceptable:
2753 if fixpaths:
2758 if fixpaths:
2754 f = f.replace(b'/', pycompat.ossep)
2759 f = f.replace(b'/', pycompat.ossep)
2755 if fullpaths:
2760 if fullpaths:
2756 addfile(f)
2761 addfile(f)
2757 continue
2762 continue
2758 s = f.find(pycompat.ossep, speclen)
2763 s = f.find(pycompat.ossep, speclen)
2759 if s >= 0:
2764 if s >= 0:
2760 adddir(f[:s])
2765 adddir(f[:s])
2761 else:
2766 else:
2762 addfile(f)
2767 addfile(f)
2763 return files, dirs
2768 return files, dirs
2764
2769
2765 acceptable = b''
2770 acceptable = b''
2766 if opts['normal']:
2771 if opts['normal']:
2767 acceptable += b'nm'
2772 acceptable += b'nm'
2768 if opts['added']:
2773 if opts['added']:
2769 acceptable += b'a'
2774 acceptable += b'a'
2770 if opts['removed']:
2775 if opts['removed']:
2771 acceptable += b'r'
2776 acceptable += b'r'
2772 cwd = repo.getcwd()
2777 cwd = repo.getcwd()
2773 if not specs:
2778 if not specs:
2774 specs = [b'.']
2779 specs = [b'.']
2775
2780
2776 files, dirs = set(), set()
2781 files, dirs = set(), set()
2777 for spec in specs:
2782 for spec in specs:
2778 f, d = complete(spec, acceptable or b'nmar')
2783 f, d = complete(spec, acceptable or b'nmar')
2779 files.update(f)
2784 files.update(f)
2780 dirs.update(d)
2785 dirs.update(d)
2781 files.update(dirs)
2786 files.update(dirs)
2782 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2787 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2783 ui.write(b'\n')
2788 ui.write(b'\n')
2784
2789
2785
2790
2786 @command(
2791 @command(
2787 b'debugpathcopies',
2792 b'debugpathcopies',
2788 cmdutil.walkopts,
2793 cmdutil.walkopts,
2789 b'hg debugpathcopies REV1 REV2 [FILE]',
2794 b'hg debugpathcopies REV1 REV2 [FILE]',
2790 inferrepo=True,
2795 inferrepo=True,
2791 )
2796 )
2792 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2797 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2793 """show copies between two revisions"""
2798 """show copies between two revisions"""
2794 ctx1 = scmutil.revsingle(repo, rev1)
2799 ctx1 = scmutil.revsingle(repo, rev1)
2795 ctx2 = scmutil.revsingle(repo, rev2)
2800 ctx2 = scmutil.revsingle(repo, rev2)
2796 m = scmutil.match(ctx1, pats, opts)
2801 m = scmutil.match(ctx1, pats, opts)
2797 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2802 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2798 ui.write(b'%s -> %s\n' % (src, dst))
2803 ui.write(b'%s -> %s\n' % (src, dst))
2799
2804
2800
2805
2801 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2806 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2802 def debugpeer(ui, path):
2807 def debugpeer(ui, path):
2803 """establish a connection to a peer repository"""
2808 """establish a connection to a peer repository"""
2804 # Always enable peer request logging. Requires --debug to display
2809 # Always enable peer request logging. Requires --debug to display
2805 # though.
2810 # though.
2806 overrides = {
2811 overrides = {
2807 (b'devel', b'debug.peer-request'): True,
2812 (b'devel', b'debug.peer-request'): True,
2808 }
2813 }
2809
2814
2810 with ui.configoverride(overrides):
2815 with ui.configoverride(overrides):
2811 peer = hg.peer(ui, {}, path)
2816 peer = hg.peer(ui, {}, path)
2812
2817
2813 try:
2818 try:
2814 local = peer.local() is not None
2819 local = peer.local() is not None
2815 canpush = peer.canpush()
2820 canpush = peer.canpush()
2816
2821
2817 ui.write(_(b'url: %s\n') % peer.url())
2822 ui.write(_(b'url: %s\n') % peer.url())
2818 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2823 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2819 ui.write(
2824 ui.write(
2820 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2825 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2821 )
2826 )
2822 finally:
2827 finally:
2823 peer.close()
2828 peer.close()
2824
2829
2825
2830
2826 @command(
2831 @command(
2827 b'debugpickmergetool',
2832 b'debugpickmergetool',
2828 [
2833 [
2829 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2834 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2830 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2835 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2831 ]
2836 ]
2832 + cmdutil.walkopts
2837 + cmdutil.walkopts
2833 + cmdutil.mergetoolopts,
2838 + cmdutil.mergetoolopts,
2834 _(b'[PATTERN]...'),
2839 _(b'[PATTERN]...'),
2835 inferrepo=True,
2840 inferrepo=True,
2836 )
2841 )
2837 def debugpickmergetool(ui, repo, *pats, **opts):
2842 def debugpickmergetool(ui, repo, *pats, **opts):
2838 """examine which merge tool is chosen for specified file
2843 """examine which merge tool is chosen for specified file
2839
2844
2840 As described in :hg:`help merge-tools`, Mercurial examines
2845 As described in :hg:`help merge-tools`, Mercurial examines
2841 configurations below in this order to decide which merge tool is
2846 configurations below in this order to decide which merge tool is
2842 chosen for specified file.
2847 chosen for specified file.
2843
2848
2844 1. ``--tool`` option
2849 1. ``--tool`` option
2845 2. ``HGMERGE`` environment variable
2850 2. ``HGMERGE`` environment variable
2846 3. configurations in ``merge-patterns`` section
2851 3. configurations in ``merge-patterns`` section
2847 4. configuration of ``ui.merge``
2852 4. configuration of ``ui.merge``
2848 5. configurations in ``merge-tools`` section
2853 5. configurations in ``merge-tools`` section
2849 6. ``hgmerge`` tool (for historical reason only)
2854 6. ``hgmerge`` tool (for historical reason only)
2850 7. default tool for fallback (``:merge`` or ``:prompt``)
2855 7. default tool for fallback (``:merge`` or ``:prompt``)
2851
2856
2852 This command writes out examination result in the style below::
2857 This command writes out examination result in the style below::
2853
2858
2854 FILE = MERGETOOL
2859 FILE = MERGETOOL
2855
2860
2856 By default, all files known in the first parent context of the
2861 By default, all files known in the first parent context of the
2857 working directory are examined. Use file patterns and/or -I/-X
2862 working directory are examined. Use file patterns and/or -I/-X
2858 options to limit target files. -r/--rev is also useful to examine
2863 options to limit target files. -r/--rev is also useful to examine
2859 files in another context without actual updating to it.
2864 files in another context without actual updating to it.
2860
2865
2861 With --debug, this command shows warning messages while matching
2866 With --debug, this command shows warning messages while matching
2862 against ``merge-patterns`` and so on, too. It is recommended to
2867 against ``merge-patterns`` and so on, too. It is recommended to
2863 use this option with explicit file patterns and/or -I/-X options,
2868 use this option with explicit file patterns and/or -I/-X options,
2864 because this option increases amount of output per file according
2869 because this option increases amount of output per file according
2865 to configurations in hgrc.
2870 to configurations in hgrc.
2866
2871
2867 With -v/--verbose, this command shows configurations below at
2872 With -v/--verbose, this command shows configurations below at
2868 first (only if specified).
2873 first (only if specified).
2869
2874
2870 - ``--tool`` option
2875 - ``--tool`` option
2871 - ``HGMERGE`` environment variable
2876 - ``HGMERGE`` environment variable
2872 - configuration of ``ui.merge``
2877 - configuration of ``ui.merge``
2873
2878
2874 If merge tool is chosen before matching against
2879 If merge tool is chosen before matching against
2875 ``merge-patterns``, this command can't show any helpful
2880 ``merge-patterns``, this command can't show any helpful
2876 information, even with --debug. In such case, information above is
2881 information, even with --debug. In such case, information above is
2877 useful to know why a merge tool is chosen.
2882 useful to know why a merge tool is chosen.
2878 """
2883 """
2879 opts = pycompat.byteskwargs(opts)
2884 opts = pycompat.byteskwargs(opts)
2880 overrides = {}
2885 overrides = {}
2881 if opts[b'tool']:
2886 if opts[b'tool']:
2882 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2887 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2883 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2888 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2884
2889
2885 with ui.configoverride(overrides, b'debugmergepatterns'):
2890 with ui.configoverride(overrides, b'debugmergepatterns'):
2886 hgmerge = encoding.environ.get(b"HGMERGE")
2891 hgmerge = encoding.environ.get(b"HGMERGE")
2887 if hgmerge is not None:
2892 if hgmerge is not None:
2888 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2893 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2889 uimerge = ui.config(b"ui", b"merge")
2894 uimerge = ui.config(b"ui", b"merge")
2890 if uimerge:
2895 if uimerge:
2891 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2896 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2892
2897
2893 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2898 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2894 m = scmutil.match(ctx, pats, opts)
2899 m = scmutil.match(ctx, pats, opts)
2895 changedelete = opts[b'changedelete']
2900 changedelete = opts[b'changedelete']
2896 for path in ctx.walk(m):
2901 for path in ctx.walk(m):
2897 fctx = ctx[path]
2902 fctx = ctx[path]
2898 with ui.silent(
2903 with ui.silent(
2899 error=True
2904 error=True
2900 ) if not ui.debugflag else util.nullcontextmanager():
2905 ) if not ui.debugflag else util.nullcontextmanager():
2901 tool, toolpath = filemerge._picktool(
2906 tool, toolpath = filemerge._picktool(
2902 repo,
2907 repo,
2903 ui,
2908 ui,
2904 path,
2909 path,
2905 fctx.isbinary(),
2910 fctx.isbinary(),
2906 b'l' in fctx.flags(),
2911 b'l' in fctx.flags(),
2907 changedelete,
2912 changedelete,
2908 )
2913 )
2909 ui.write(b'%s = %s\n' % (path, tool))
2914 ui.write(b'%s = %s\n' % (path, tool))
2910
2915
2911
2916
2912 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2917 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2913 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2918 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2914 """access the pushkey key/value protocol
2919 """access the pushkey key/value protocol
2915
2920
2916 With two args, list the keys in the given namespace.
2921 With two args, list the keys in the given namespace.
2917
2922
2918 With five args, set a key to new if it currently is set to old.
2923 With five args, set a key to new if it currently is set to old.
2919 Reports success or failure.
2924 Reports success or failure.
2920 """
2925 """
2921
2926
2922 target = hg.peer(ui, {}, repopath)
2927 target = hg.peer(ui, {}, repopath)
2923 try:
2928 try:
2924 if keyinfo:
2929 if keyinfo:
2925 key, old, new = keyinfo
2930 key, old, new = keyinfo
2926 with target.commandexecutor() as e:
2931 with target.commandexecutor() as e:
2927 r = e.callcommand(
2932 r = e.callcommand(
2928 b'pushkey',
2933 b'pushkey',
2929 {
2934 {
2930 b'namespace': namespace,
2935 b'namespace': namespace,
2931 b'key': key,
2936 b'key': key,
2932 b'old': old,
2937 b'old': old,
2933 b'new': new,
2938 b'new': new,
2934 },
2939 },
2935 ).result()
2940 ).result()
2936
2941
2937 ui.status(pycompat.bytestr(r) + b'\n')
2942 ui.status(pycompat.bytestr(r) + b'\n')
2938 return not r
2943 return not r
2939 else:
2944 else:
2940 for k, v in sorted(target.listkeys(namespace).items()):
2945 for k, v in sorted(target.listkeys(namespace).items()):
2941 ui.write(
2946 ui.write(
2942 b"%s\t%s\n"
2947 b"%s\t%s\n"
2943 % (stringutil.escapestr(k), stringutil.escapestr(v))
2948 % (stringutil.escapestr(k), stringutil.escapestr(v))
2944 )
2949 )
2945 finally:
2950 finally:
2946 target.close()
2951 target.close()
2947
2952
2948
2953
2949 @command(b'debugpvec', [], _(b'A B'))
2954 @command(b'debugpvec', [], _(b'A B'))
2950 def debugpvec(ui, repo, a, b=None):
2955 def debugpvec(ui, repo, a, b=None):
2951 ca = scmutil.revsingle(repo, a)
2956 ca = scmutil.revsingle(repo, a)
2952 cb = scmutil.revsingle(repo, b)
2957 cb = scmutil.revsingle(repo, b)
2953 pa = pvec.ctxpvec(ca)
2958 pa = pvec.ctxpvec(ca)
2954 pb = pvec.ctxpvec(cb)
2959 pb = pvec.ctxpvec(cb)
2955 if pa == pb:
2960 if pa == pb:
2956 rel = b"="
2961 rel = b"="
2957 elif pa > pb:
2962 elif pa > pb:
2958 rel = b">"
2963 rel = b">"
2959 elif pa < pb:
2964 elif pa < pb:
2960 rel = b"<"
2965 rel = b"<"
2961 elif pa | pb:
2966 elif pa | pb:
2962 rel = b"|"
2967 rel = b"|"
2963 ui.write(_(b"a: %s\n") % pa)
2968 ui.write(_(b"a: %s\n") % pa)
2964 ui.write(_(b"b: %s\n") % pb)
2969 ui.write(_(b"b: %s\n") % pb)
2965 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2970 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2966 ui.write(
2971 ui.write(
2967 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2972 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2968 % (
2973 % (
2969 abs(pa._depth - pb._depth),
2974 abs(pa._depth - pb._depth),
2970 pvec._hamming(pa._vec, pb._vec),
2975 pvec._hamming(pa._vec, pb._vec),
2971 pa.distance(pb),
2976 pa.distance(pb),
2972 rel,
2977 rel,
2973 )
2978 )
2974 )
2979 )
2975
2980
2976
2981
2977 @command(
2982 @command(
2978 b'debugrebuilddirstate|debugrebuildstate',
2983 b'debugrebuilddirstate|debugrebuildstate',
2979 [
2984 [
2980 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2985 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2981 (
2986 (
2982 b'',
2987 b'',
2983 b'minimal',
2988 b'minimal',
2984 None,
2989 None,
2985 _(
2990 _(
2986 b'only rebuild files that are inconsistent with '
2991 b'only rebuild files that are inconsistent with '
2987 b'the working copy parent'
2992 b'the working copy parent'
2988 ),
2993 ),
2989 ),
2994 ),
2990 ],
2995 ],
2991 _(b'[-r REV]'),
2996 _(b'[-r REV]'),
2992 )
2997 )
2993 def debugrebuilddirstate(ui, repo, rev, **opts):
2998 def debugrebuilddirstate(ui, repo, rev, **opts):
2994 """rebuild the dirstate as it would look like for the given revision
2999 """rebuild the dirstate as it would look like for the given revision
2995
3000
2996 If no revision is specified the first current parent will be used.
3001 If no revision is specified the first current parent will be used.
2997
3002
2998 The dirstate will be set to the files of the given revision.
3003 The dirstate will be set to the files of the given revision.
2999 The actual working directory content or existing dirstate
3004 The actual working directory content or existing dirstate
3000 information such as adds or removes is not considered.
3005 information such as adds or removes is not considered.
3001
3006
3002 ``minimal`` will only rebuild the dirstate status for files that claim to be
3007 ``minimal`` will only rebuild the dirstate status for files that claim to be
3003 tracked but are not in the parent manifest, or that exist in the parent
3008 tracked but are not in the parent manifest, or that exist in the parent
3004 manifest but are not in the dirstate. It will not change adds, removes, or
3009 manifest but are not in the dirstate. It will not change adds, removes, or
3005 modified files that are in the working copy parent.
3010 modified files that are in the working copy parent.
3006
3011
3007 One use of this command is to make the next :hg:`status` invocation
3012 One use of this command is to make the next :hg:`status` invocation
3008 check the actual file content.
3013 check the actual file content.
3009 """
3014 """
3010 ctx = scmutil.revsingle(repo, rev)
3015 ctx = scmutil.revsingle(repo, rev)
3011 with repo.wlock():
3016 with repo.wlock():
3012 dirstate = repo.dirstate
3017 dirstate = repo.dirstate
3013 changedfiles = None
3018 changedfiles = None
3014 # See command doc for what minimal does.
3019 # See command doc for what minimal does.
3015 if opts.get('minimal'):
3020 if opts.get('minimal'):
3016 manifestfiles = set(ctx.manifest().keys())
3021 manifestfiles = set(ctx.manifest().keys())
3017 dirstatefiles = set(dirstate)
3022 dirstatefiles = set(dirstate)
3018 manifestonly = manifestfiles - dirstatefiles
3023 manifestonly = manifestfiles - dirstatefiles
3019 dsonly = dirstatefiles - manifestfiles
3024 dsonly = dirstatefiles - manifestfiles
3020 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3025 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3021 changedfiles = manifestonly | dsnotadded
3026 changedfiles = manifestonly | dsnotadded
3022
3027
3023 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3028 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3024
3029
3025
3030
3026 @command(
3031 @command(
3027 b'debugrebuildfncache',
3032 b'debugrebuildfncache',
3028 [
3033 [
3029 (
3034 (
3030 b'',
3035 b'',
3031 b'only-data',
3036 b'only-data',
3032 False,
3037 False,
3033 _(b'only look for wrong .d files (much faster)'),
3038 _(b'only look for wrong .d files (much faster)'),
3034 )
3039 )
3035 ],
3040 ],
3036 b'',
3041 b'',
3037 )
3042 )
3038 def debugrebuildfncache(ui, repo, **opts):
3043 def debugrebuildfncache(ui, repo, **opts):
3039 """rebuild the fncache file"""
3044 """rebuild the fncache file"""
3040 opts = pycompat.byteskwargs(opts)
3045 opts = pycompat.byteskwargs(opts)
3041 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3046 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3042
3047
3043
3048
3044 @command(
3049 @command(
3045 b'debugrename',
3050 b'debugrename',
3046 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3051 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3047 _(b'[-r REV] [FILE]...'),
3052 _(b'[-r REV] [FILE]...'),
3048 )
3053 )
3049 def debugrename(ui, repo, *pats, **opts):
3054 def debugrename(ui, repo, *pats, **opts):
3050 """dump rename information"""
3055 """dump rename information"""
3051
3056
3052 opts = pycompat.byteskwargs(opts)
3057 opts = pycompat.byteskwargs(opts)
3053 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3058 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3054 m = scmutil.match(ctx, pats, opts)
3059 m = scmutil.match(ctx, pats, opts)
3055 for abs in ctx.walk(m):
3060 for abs in ctx.walk(m):
3056 fctx = ctx[abs]
3061 fctx = ctx[abs]
3057 o = fctx.filelog().renamed(fctx.filenode())
3062 o = fctx.filelog().renamed(fctx.filenode())
3058 rel = repo.pathto(abs)
3063 rel = repo.pathto(abs)
3059 if o:
3064 if o:
3060 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3065 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3061 else:
3066 else:
3062 ui.write(_(b"%s not renamed\n") % rel)
3067 ui.write(_(b"%s not renamed\n") % rel)
3063
3068
3064
3069
3065 @command(b'debugrequires|debugrequirements', [], b'')
3070 @command(b'debugrequires|debugrequirements', [], b'')
3066 def debugrequirements(ui, repo):
3071 def debugrequirements(ui, repo):
3067 """print the current repo requirements"""
3072 """print the current repo requirements"""
3068 for r in sorted(repo.requirements):
3073 for r in sorted(repo.requirements):
3069 ui.write(b"%s\n" % r)
3074 ui.write(b"%s\n" % r)
3070
3075
3071
3076
3072 @command(
3077 @command(
3073 b'debugrevlog',
3078 b'debugrevlog',
3074 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3079 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3075 _(b'-c|-m|FILE'),
3080 _(b'-c|-m|FILE'),
3076 optionalrepo=True,
3081 optionalrepo=True,
3077 )
3082 )
3078 def debugrevlog(ui, repo, file_=None, **opts):
3083 def debugrevlog(ui, repo, file_=None, **opts):
3079 """show data and statistics about a revlog"""
3084 """show data and statistics about a revlog"""
3080 opts = pycompat.byteskwargs(opts)
3085 opts = pycompat.byteskwargs(opts)
3081 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3086 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3082
3087
3083 if opts.get(b"dump"):
3088 if opts.get(b"dump"):
3084 numrevs = len(r)
3089 numrevs = len(r)
3085 ui.write(
3090 ui.write(
3086 (
3091 (
3087 b"# rev p1rev p2rev start end deltastart base p1 p2"
3092 b"# rev p1rev p2rev start end deltastart base p1 p2"
3088 b" rawsize totalsize compression heads chainlen\n"
3093 b" rawsize totalsize compression heads chainlen\n"
3089 )
3094 )
3090 )
3095 )
3091 ts = 0
3096 ts = 0
3092 heads = set()
3097 heads = set()
3093
3098
3094 for rev in pycompat.xrange(numrevs):
3099 for rev in pycompat.xrange(numrevs):
3095 dbase = r.deltaparent(rev)
3100 dbase = r.deltaparent(rev)
3096 if dbase == -1:
3101 if dbase == -1:
3097 dbase = rev
3102 dbase = rev
3098 cbase = r.chainbase(rev)
3103 cbase = r.chainbase(rev)
3099 clen = r.chainlen(rev)
3104 clen = r.chainlen(rev)
3100 p1, p2 = r.parentrevs(rev)
3105 p1, p2 = r.parentrevs(rev)
3101 rs = r.rawsize(rev)
3106 rs = r.rawsize(rev)
3102 ts = ts + rs
3107 ts = ts + rs
3103 heads -= set(r.parentrevs(rev))
3108 heads -= set(r.parentrevs(rev))
3104 heads.add(rev)
3109 heads.add(rev)
3105 try:
3110 try:
3106 compression = ts / r.end(rev)
3111 compression = ts / r.end(rev)
3107 except ZeroDivisionError:
3112 except ZeroDivisionError:
3108 compression = 0
3113 compression = 0
3109 ui.write(
3114 ui.write(
3110 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3115 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3111 b"%11d %5d %8d\n"
3116 b"%11d %5d %8d\n"
3112 % (
3117 % (
3113 rev,
3118 rev,
3114 p1,
3119 p1,
3115 p2,
3120 p2,
3116 r.start(rev),
3121 r.start(rev),
3117 r.end(rev),
3122 r.end(rev),
3118 r.start(dbase),
3123 r.start(dbase),
3119 r.start(cbase),
3124 r.start(cbase),
3120 r.start(p1),
3125 r.start(p1),
3121 r.start(p2),
3126 r.start(p2),
3122 rs,
3127 rs,
3123 ts,
3128 ts,
3124 compression,
3129 compression,
3125 len(heads),
3130 len(heads),
3126 clen,
3131 clen,
3127 )
3132 )
3128 )
3133 )
3129 return 0
3134 return 0
3130
3135
3131 format = r._format_version
3136 format = r._format_version
3132 v = r._format_flags
3137 v = r._format_flags
3133 flags = []
3138 flags = []
3134 gdelta = False
3139 gdelta = False
3135 if v & revlog.FLAG_INLINE_DATA:
3140 if v & revlog.FLAG_INLINE_DATA:
3136 flags.append(b'inline')
3141 flags.append(b'inline')
3137 if v & revlog.FLAG_GENERALDELTA:
3142 if v & revlog.FLAG_GENERALDELTA:
3138 gdelta = True
3143 gdelta = True
3139 flags.append(b'generaldelta')
3144 flags.append(b'generaldelta')
3140 if not flags:
3145 if not flags:
3141 flags = [b'(none)']
3146 flags = [b'(none)']
3142
3147
3143 ### tracks merge vs single parent
3148 ### tracks merge vs single parent
3144 nummerges = 0
3149 nummerges = 0
3145
3150
3146 ### tracks ways the "delta" are build
3151 ### tracks ways the "delta" are build
3147 # nodelta
3152 # nodelta
3148 numempty = 0
3153 numempty = 0
3149 numemptytext = 0
3154 numemptytext = 0
3150 numemptydelta = 0
3155 numemptydelta = 0
3151 # full file content
3156 # full file content
3152 numfull = 0
3157 numfull = 0
3153 # intermediate snapshot against a prior snapshot
3158 # intermediate snapshot against a prior snapshot
3154 numsemi = 0
3159 numsemi = 0
3155 # snapshot count per depth
3160 # snapshot count per depth
3156 numsnapdepth = collections.defaultdict(lambda: 0)
3161 numsnapdepth = collections.defaultdict(lambda: 0)
3157 # delta against previous revision
3162 # delta against previous revision
3158 numprev = 0
3163 numprev = 0
3159 # delta against first or second parent (not prev)
3164 # delta against first or second parent (not prev)
3160 nump1 = 0
3165 nump1 = 0
3161 nump2 = 0
3166 nump2 = 0
3162 # delta against neither prev nor parents
3167 # delta against neither prev nor parents
3163 numother = 0
3168 numother = 0
3164 # delta against prev that are also first or second parent
3169 # delta against prev that are also first or second parent
3165 # (details of `numprev`)
3170 # (details of `numprev`)
3166 nump1prev = 0
3171 nump1prev = 0
3167 nump2prev = 0
3172 nump2prev = 0
3168
3173
3169 # data about delta chain of each revs
3174 # data about delta chain of each revs
3170 chainlengths = []
3175 chainlengths = []
3171 chainbases = []
3176 chainbases = []
3172 chainspans = []
3177 chainspans = []
3173
3178
3174 # data about each revision
3179 # data about each revision
3175 datasize = [None, 0, 0]
3180 datasize = [None, 0, 0]
3176 fullsize = [None, 0, 0]
3181 fullsize = [None, 0, 0]
3177 semisize = [None, 0, 0]
3182 semisize = [None, 0, 0]
3178 # snapshot count per depth
3183 # snapshot count per depth
3179 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3184 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3180 deltasize = [None, 0, 0]
3185 deltasize = [None, 0, 0]
3181 chunktypecounts = {}
3186 chunktypecounts = {}
3182 chunktypesizes = {}
3187 chunktypesizes = {}
3183
3188
3184 def addsize(size, l):
3189 def addsize(size, l):
3185 if l[0] is None or size < l[0]:
3190 if l[0] is None or size < l[0]:
3186 l[0] = size
3191 l[0] = size
3187 if size > l[1]:
3192 if size > l[1]:
3188 l[1] = size
3193 l[1] = size
3189 l[2] += size
3194 l[2] += size
3190
3195
3191 numrevs = len(r)
3196 numrevs = len(r)
3192 for rev in pycompat.xrange(numrevs):
3197 for rev in pycompat.xrange(numrevs):
3193 p1, p2 = r.parentrevs(rev)
3198 p1, p2 = r.parentrevs(rev)
3194 delta = r.deltaparent(rev)
3199 delta = r.deltaparent(rev)
3195 if format > 0:
3200 if format > 0:
3196 addsize(r.rawsize(rev), datasize)
3201 addsize(r.rawsize(rev), datasize)
3197 if p2 != nullrev:
3202 if p2 != nullrev:
3198 nummerges += 1
3203 nummerges += 1
3199 size = r.length(rev)
3204 size = r.length(rev)
3200 if delta == nullrev:
3205 if delta == nullrev:
3201 chainlengths.append(0)
3206 chainlengths.append(0)
3202 chainbases.append(r.start(rev))
3207 chainbases.append(r.start(rev))
3203 chainspans.append(size)
3208 chainspans.append(size)
3204 if size == 0:
3209 if size == 0:
3205 numempty += 1
3210 numempty += 1
3206 numemptytext += 1
3211 numemptytext += 1
3207 else:
3212 else:
3208 numfull += 1
3213 numfull += 1
3209 numsnapdepth[0] += 1
3214 numsnapdepth[0] += 1
3210 addsize(size, fullsize)
3215 addsize(size, fullsize)
3211 addsize(size, snapsizedepth[0])
3216 addsize(size, snapsizedepth[0])
3212 else:
3217 else:
3213 chainlengths.append(chainlengths[delta] + 1)
3218 chainlengths.append(chainlengths[delta] + 1)
3214 baseaddr = chainbases[delta]
3219 baseaddr = chainbases[delta]
3215 revaddr = r.start(rev)
3220 revaddr = r.start(rev)
3216 chainbases.append(baseaddr)
3221 chainbases.append(baseaddr)
3217 chainspans.append((revaddr - baseaddr) + size)
3222 chainspans.append((revaddr - baseaddr) + size)
3218 if size == 0:
3223 if size == 0:
3219 numempty += 1
3224 numempty += 1
3220 numemptydelta += 1
3225 numemptydelta += 1
3221 elif r.issnapshot(rev):
3226 elif r.issnapshot(rev):
3222 addsize(size, semisize)
3227 addsize(size, semisize)
3223 numsemi += 1
3228 numsemi += 1
3224 depth = r.snapshotdepth(rev)
3229 depth = r.snapshotdepth(rev)
3225 numsnapdepth[depth] += 1
3230 numsnapdepth[depth] += 1
3226 addsize(size, snapsizedepth[depth])
3231 addsize(size, snapsizedepth[depth])
3227 else:
3232 else:
3228 addsize(size, deltasize)
3233 addsize(size, deltasize)
3229 if delta == rev - 1:
3234 if delta == rev - 1:
3230 numprev += 1
3235 numprev += 1
3231 if delta == p1:
3236 if delta == p1:
3232 nump1prev += 1
3237 nump1prev += 1
3233 elif delta == p2:
3238 elif delta == p2:
3234 nump2prev += 1
3239 nump2prev += 1
3235 elif delta == p1:
3240 elif delta == p1:
3236 nump1 += 1
3241 nump1 += 1
3237 elif delta == p2:
3242 elif delta == p2:
3238 nump2 += 1
3243 nump2 += 1
3239 elif delta != nullrev:
3244 elif delta != nullrev:
3240 numother += 1
3245 numother += 1
3241
3246
3242 # Obtain data on the raw chunks in the revlog.
3247 # Obtain data on the raw chunks in the revlog.
3243 if util.safehasattr(r, b'_getsegmentforrevs'):
3248 if util.safehasattr(r, b'_getsegmentforrevs'):
3244 segment = r._getsegmentforrevs(rev, rev)[1]
3249 segment = r._getsegmentforrevs(rev, rev)[1]
3245 else:
3250 else:
3246 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3251 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3247 if segment:
3252 if segment:
3248 chunktype = bytes(segment[0:1])
3253 chunktype = bytes(segment[0:1])
3249 else:
3254 else:
3250 chunktype = b'empty'
3255 chunktype = b'empty'
3251
3256
3252 if chunktype not in chunktypecounts:
3257 if chunktype not in chunktypecounts:
3253 chunktypecounts[chunktype] = 0
3258 chunktypecounts[chunktype] = 0
3254 chunktypesizes[chunktype] = 0
3259 chunktypesizes[chunktype] = 0
3255
3260
3256 chunktypecounts[chunktype] += 1
3261 chunktypecounts[chunktype] += 1
3257 chunktypesizes[chunktype] += size
3262 chunktypesizes[chunktype] += size
3258
3263
3259 # Adjust size min value for empty cases
3264 # Adjust size min value for empty cases
3260 for size in (datasize, fullsize, semisize, deltasize):
3265 for size in (datasize, fullsize, semisize, deltasize):
3261 if size[0] is None:
3266 if size[0] is None:
3262 size[0] = 0
3267 size[0] = 0
3263
3268
3264 numdeltas = numrevs - numfull - numempty - numsemi
3269 numdeltas = numrevs - numfull - numempty - numsemi
3265 numoprev = numprev - nump1prev - nump2prev
3270 numoprev = numprev - nump1prev - nump2prev
3266 totalrawsize = datasize[2]
3271 totalrawsize = datasize[2]
3267 datasize[2] /= numrevs
3272 datasize[2] /= numrevs
3268 fulltotal = fullsize[2]
3273 fulltotal = fullsize[2]
3269 if numfull == 0:
3274 if numfull == 0:
3270 fullsize[2] = 0
3275 fullsize[2] = 0
3271 else:
3276 else:
3272 fullsize[2] /= numfull
3277 fullsize[2] /= numfull
3273 semitotal = semisize[2]
3278 semitotal = semisize[2]
3274 snaptotal = {}
3279 snaptotal = {}
3275 if numsemi > 0:
3280 if numsemi > 0:
3276 semisize[2] /= numsemi
3281 semisize[2] /= numsemi
3277 for depth in snapsizedepth:
3282 for depth in snapsizedepth:
3278 snaptotal[depth] = snapsizedepth[depth][2]
3283 snaptotal[depth] = snapsizedepth[depth][2]
3279 snapsizedepth[depth][2] /= numsnapdepth[depth]
3284 snapsizedepth[depth][2] /= numsnapdepth[depth]
3280
3285
3281 deltatotal = deltasize[2]
3286 deltatotal = deltasize[2]
3282 if numdeltas > 0:
3287 if numdeltas > 0:
3283 deltasize[2] /= numdeltas
3288 deltasize[2] /= numdeltas
3284 totalsize = fulltotal + semitotal + deltatotal
3289 totalsize = fulltotal + semitotal + deltatotal
3285 avgchainlen = sum(chainlengths) / numrevs
3290 avgchainlen = sum(chainlengths) / numrevs
3286 maxchainlen = max(chainlengths)
3291 maxchainlen = max(chainlengths)
3287 maxchainspan = max(chainspans)
3292 maxchainspan = max(chainspans)
3288 compratio = 1
3293 compratio = 1
3289 if totalsize:
3294 if totalsize:
3290 compratio = totalrawsize / totalsize
3295 compratio = totalrawsize / totalsize
3291
3296
3292 basedfmtstr = b'%%%dd\n'
3297 basedfmtstr = b'%%%dd\n'
3293 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3298 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3294
3299
3295 def dfmtstr(max):
3300 def dfmtstr(max):
3296 return basedfmtstr % len(str(max))
3301 return basedfmtstr % len(str(max))
3297
3302
3298 def pcfmtstr(max, padding=0):
3303 def pcfmtstr(max, padding=0):
3299 return basepcfmtstr % (len(str(max)), b' ' * padding)
3304 return basepcfmtstr % (len(str(max)), b' ' * padding)
3300
3305
3301 def pcfmt(value, total):
3306 def pcfmt(value, total):
3302 if total:
3307 if total:
3303 return (value, 100 * float(value) / total)
3308 return (value, 100 * float(value) / total)
3304 else:
3309 else:
3305 return value, 100.0
3310 return value, 100.0
3306
3311
3307 ui.writenoi18n(b'format : %d\n' % format)
3312 ui.writenoi18n(b'format : %d\n' % format)
3308 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3313 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3309
3314
3310 ui.write(b'\n')
3315 ui.write(b'\n')
3311 fmt = pcfmtstr(totalsize)
3316 fmt = pcfmtstr(totalsize)
3312 fmt2 = dfmtstr(totalsize)
3317 fmt2 = dfmtstr(totalsize)
3313 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3318 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3314 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3319 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3315 ui.writenoi18n(
3320 ui.writenoi18n(
3316 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3321 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3317 )
3322 )
3318 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3323 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3319 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3324 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3320 ui.writenoi18n(
3325 ui.writenoi18n(
3321 b' text : '
3326 b' text : '
3322 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3327 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3323 )
3328 )
3324 ui.writenoi18n(
3329 ui.writenoi18n(
3325 b' delta : '
3330 b' delta : '
3326 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3331 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3327 )
3332 )
3328 ui.writenoi18n(
3333 ui.writenoi18n(
3329 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3334 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3330 )
3335 )
3331 for depth in sorted(numsnapdepth):
3336 for depth in sorted(numsnapdepth):
3332 ui.write(
3337 ui.write(
3333 (b' lvl-%-3d : ' % depth)
3338 (b' lvl-%-3d : ' % depth)
3334 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3339 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3335 )
3340 )
3336 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3341 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3337 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3342 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3338 ui.writenoi18n(
3343 ui.writenoi18n(
3339 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3344 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3340 )
3345 )
3341 for depth in sorted(numsnapdepth):
3346 for depth in sorted(numsnapdepth):
3342 ui.write(
3347 ui.write(
3343 (b' lvl-%-3d : ' % depth)
3348 (b' lvl-%-3d : ' % depth)
3344 + fmt % pcfmt(snaptotal[depth], totalsize)
3349 + fmt % pcfmt(snaptotal[depth], totalsize)
3345 )
3350 )
3346 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3351 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3347
3352
3348 def fmtchunktype(chunktype):
3353 def fmtchunktype(chunktype):
3349 if chunktype == b'empty':
3354 if chunktype == b'empty':
3350 return b' %s : ' % chunktype
3355 return b' %s : ' % chunktype
3351 elif chunktype in pycompat.bytestr(string.ascii_letters):
3356 elif chunktype in pycompat.bytestr(string.ascii_letters):
3352 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3357 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3353 else:
3358 else:
3354 return b' 0x%s : ' % hex(chunktype)
3359 return b' 0x%s : ' % hex(chunktype)
3355
3360
3356 ui.write(b'\n')
3361 ui.write(b'\n')
3357 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3362 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3358 for chunktype in sorted(chunktypecounts):
3363 for chunktype in sorted(chunktypecounts):
3359 ui.write(fmtchunktype(chunktype))
3364 ui.write(fmtchunktype(chunktype))
3360 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3365 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3361 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3366 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3362 for chunktype in sorted(chunktypecounts):
3367 for chunktype in sorted(chunktypecounts):
3363 ui.write(fmtchunktype(chunktype))
3368 ui.write(fmtchunktype(chunktype))
3364 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3369 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3365
3370
3366 ui.write(b'\n')
3371 ui.write(b'\n')
3367 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3372 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3368 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3373 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3369 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3374 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3370 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3375 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3371 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3376 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3372
3377
3373 if format > 0:
3378 if format > 0:
3374 ui.write(b'\n')
3379 ui.write(b'\n')
3375 ui.writenoi18n(
3380 ui.writenoi18n(
3376 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3381 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3377 % tuple(datasize)
3382 % tuple(datasize)
3378 )
3383 )
3379 ui.writenoi18n(
3384 ui.writenoi18n(
3380 b'full revision size (min/max/avg) : %d / %d / %d\n'
3385 b'full revision size (min/max/avg) : %d / %d / %d\n'
3381 % tuple(fullsize)
3386 % tuple(fullsize)
3382 )
3387 )
3383 ui.writenoi18n(
3388 ui.writenoi18n(
3384 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3389 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3385 % tuple(semisize)
3390 % tuple(semisize)
3386 )
3391 )
3387 for depth in sorted(snapsizedepth):
3392 for depth in sorted(snapsizedepth):
3388 if depth == 0:
3393 if depth == 0:
3389 continue
3394 continue
3390 ui.writenoi18n(
3395 ui.writenoi18n(
3391 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3396 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3392 % ((depth,) + tuple(snapsizedepth[depth]))
3397 % ((depth,) + tuple(snapsizedepth[depth]))
3393 )
3398 )
3394 ui.writenoi18n(
3399 ui.writenoi18n(
3395 b'delta size (min/max/avg) : %d / %d / %d\n'
3400 b'delta size (min/max/avg) : %d / %d / %d\n'
3396 % tuple(deltasize)
3401 % tuple(deltasize)
3397 )
3402 )
3398
3403
3399 if numdeltas > 0:
3404 if numdeltas > 0:
3400 ui.write(b'\n')
3405 ui.write(b'\n')
3401 fmt = pcfmtstr(numdeltas)
3406 fmt = pcfmtstr(numdeltas)
3402 fmt2 = pcfmtstr(numdeltas, 4)
3407 fmt2 = pcfmtstr(numdeltas, 4)
3403 ui.writenoi18n(
3408 ui.writenoi18n(
3404 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3409 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3405 )
3410 )
3406 if numprev > 0:
3411 if numprev > 0:
3407 ui.writenoi18n(
3412 ui.writenoi18n(
3408 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3413 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3409 )
3414 )
3410 ui.writenoi18n(
3415 ui.writenoi18n(
3411 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3416 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3412 )
3417 )
3413 ui.writenoi18n(
3418 ui.writenoi18n(
3414 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3419 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3415 )
3420 )
3416 if gdelta:
3421 if gdelta:
3417 ui.writenoi18n(
3422 ui.writenoi18n(
3418 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3423 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3419 )
3424 )
3420 ui.writenoi18n(
3425 ui.writenoi18n(
3421 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3426 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3422 )
3427 )
3423 ui.writenoi18n(
3428 ui.writenoi18n(
3424 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3429 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3425 )
3430 )
3426
3431
3427
3432
3428 @command(
3433 @command(
3429 b'debugrevlogindex',
3434 b'debugrevlogindex',
3430 cmdutil.debugrevlogopts
3435 cmdutil.debugrevlogopts
3431 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3436 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3432 _(b'[-f FORMAT] -c|-m|FILE'),
3437 _(b'[-f FORMAT] -c|-m|FILE'),
3433 optionalrepo=True,
3438 optionalrepo=True,
3434 )
3439 )
3435 def debugrevlogindex(ui, repo, file_=None, **opts):
3440 def debugrevlogindex(ui, repo, file_=None, **opts):
3436 """dump the contents of a revlog index"""
3441 """dump the contents of a revlog index"""
3437 opts = pycompat.byteskwargs(opts)
3442 opts = pycompat.byteskwargs(opts)
3438 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3443 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3439 format = opts.get(b'format', 0)
3444 format = opts.get(b'format', 0)
3440 if format not in (0, 1):
3445 if format not in (0, 1):
3441 raise error.Abort(_(b"unknown format %d") % format)
3446 raise error.Abort(_(b"unknown format %d") % format)
3442
3447
3443 if ui.debugflag:
3448 if ui.debugflag:
3444 shortfn = hex
3449 shortfn = hex
3445 else:
3450 else:
3446 shortfn = short
3451 shortfn = short
3447
3452
3448 # There might not be anything in r, so have a sane default
3453 # There might not be anything in r, so have a sane default
3449 idlen = 12
3454 idlen = 12
3450 for i in r:
3455 for i in r:
3451 idlen = len(shortfn(r.node(i)))
3456 idlen = len(shortfn(r.node(i)))
3452 break
3457 break
3453
3458
3454 if format == 0:
3459 if format == 0:
3455 if ui.verbose:
3460 if ui.verbose:
3456 ui.writenoi18n(
3461 ui.writenoi18n(
3457 b" rev offset length linkrev %s %s p2\n"
3462 b" rev offset length linkrev %s %s p2\n"
3458 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3463 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3459 )
3464 )
3460 else:
3465 else:
3461 ui.writenoi18n(
3466 ui.writenoi18n(
3462 b" rev linkrev %s %s p2\n"
3467 b" rev linkrev %s %s p2\n"
3463 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3468 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3464 )
3469 )
3465 elif format == 1:
3470 elif format == 1:
3466 if ui.verbose:
3471 if ui.verbose:
3467 ui.writenoi18n(
3472 ui.writenoi18n(
3468 (
3473 (
3469 b" rev flag offset length size link p1"
3474 b" rev flag offset length size link p1"
3470 b" p2 %s\n"
3475 b" p2 %s\n"
3471 )
3476 )
3472 % b"nodeid".rjust(idlen)
3477 % b"nodeid".rjust(idlen)
3473 )
3478 )
3474 else:
3479 else:
3475 ui.writenoi18n(
3480 ui.writenoi18n(
3476 b" rev flag size link p1 p2 %s\n"
3481 b" rev flag size link p1 p2 %s\n"
3477 % b"nodeid".rjust(idlen)
3482 % b"nodeid".rjust(idlen)
3478 )
3483 )
3479
3484
3480 for i in r:
3485 for i in r:
3481 node = r.node(i)
3486 node = r.node(i)
3482 if format == 0:
3487 if format == 0:
3483 try:
3488 try:
3484 pp = r.parents(node)
3489 pp = r.parents(node)
3485 except Exception:
3490 except Exception:
3486 pp = [repo.nullid, repo.nullid]
3491 pp = [repo.nullid, repo.nullid]
3487 if ui.verbose:
3492 if ui.verbose:
3488 ui.write(
3493 ui.write(
3489 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3494 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3490 % (
3495 % (
3491 i,
3496 i,
3492 r.start(i),
3497 r.start(i),
3493 r.length(i),
3498 r.length(i),
3494 r.linkrev(i),
3499 r.linkrev(i),
3495 shortfn(node),
3500 shortfn(node),
3496 shortfn(pp[0]),
3501 shortfn(pp[0]),
3497 shortfn(pp[1]),
3502 shortfn(pp[1]),
3498 )
3503 )
3499 )
3504 )
3500 else:
3505 else:
3501 ui.write(
3506 ui.write(
3502 b"% 6d % 7d %s %s %s\n"
3507 b"% 6d % 7d %s %s %s\n"
3503 % (
3508 % (
3504 i,
3509 i,
3505 r.linkrev(i),
3510 r.linkrev(i),
3506 shortfn(node),
3511 shortfn(node),
3507 shortfn(pp[0]),
3512 shortfn(pp[0]),
3508 shortfn(pp[1]),
3513 shortfn(pp[1]),
3509 )
3514 )
3510 )
3515 )
3511 elif format == 1:
3516 elif format == 1:
3512 pr = r.parentrevs(i)
3517 pr = r.parentrevs(i)
3513 if ui.verbose:
3518 if ui.verbose:
3514 ui.write(
3519 ui.write(
3515 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3520 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3516 % (
3521 % (
3517 i,
3522 i,
3518 r.flags(i),
3523 r.flags(i),
3519 r.start(i),
3524 r.start(i),
3520 r.length(i),
3525 r.length(i),
3521 r.rawsize(i),
3526 r.rawsize(i),
3522 r.linkrev(i),
3527 r.linkrev(i),
3523 pr[0],
3528 pr[0],
3524 pr[1],
3529 pr[1],
3525 shortfn(node),
3530 shortfn(node),
3526 )
3531 )
3527 )
3532 )
3528 else:
3533 else:
3529 ui.write(
3534 ui.write(
3530 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3535 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3531 % (
3536 % (
3532 i,
3537 i,
3533 r.flags(i),
3538 r.flags(i),
3534 r.rawsize(i),
3539 r.rawsize(i),
3535 r.linkrev(i),
3540 r.linkrev(i),
3536 pr[0],
3541 pr[0],
3537 pr[1],
3542 pr[1],
3538 shortfn(node),
3543 shortfn(node),
3539 )
3544 )
3540 )
3545 )
3541
3546
3542
3547
3543 @command(
3548 @command(
3544 b'debugrevspec',
3549 b'debugrevspec',
3545 [
3550 [
3546 (
3551 (
3547 b'',
3552 b'',
3548 b'optimize',
3553 b'optimize',
3549 None,
3554 None,
3550 _(b'print parsed tree after optimizing (DEPRECATED)'),
3555 _(b'print parsed tree after optimizing (DEPRECATED)'),
3551 ),
3556 ),
3552 (
3557 (
3553 b'',
3558 b'',
3554 b'show-revs',
3559 b'show-revs',
3555 True,
3560 True,
3556 _(b'print list of result revisions (default)'),
3561 _(b'print list of result revisions (default)'),
3557 ),
3562 ),
3558 (
3563 (
3559 b's',
3564 b's',
3560 b'show-set',
3565 b'show-set',
3561 None,
3566 None,
3562 _(b'print internal representation of result set'),
3567 _(b'print internal representation of result set'),
3563 ),
3568 ),
3564 (
3569 (
3565 b'p',
3570 b'p',
3566 b'show-stage',
3571 b'show-stage',
3567 [],
3572 [],
3568 _(b'print parsed tree at the given stage'),
3573 _(b'print parsed tree at the given stage'),
3569 _(b'NAME'),
3574 _(b'NAME'),
3570 ),
3575 ),
3571 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3576 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3572 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3577 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3573 ],
3578 ],
3574 b'REVSPEC',
3579 b'REVSPEC',
3575 )
3580 )
3576 def debugrevspec(ui, repo, expr, **opts):
3581 def debugrevspec(ui, repo, expr, **opts):
3577 """parse and apply a revision specification
3582 """parse and apply a revision specification
3578
3583
3579 Use -p/--show-stage option to print the parsed tree at the given stages.
3584 Use -p/--show-stage option to print the parsed tree at the given stages.
3580 Use -p all to print tree at every stage.
3585 Use -p all to print tree at every stage.
3581
3586
3582 Use --no-show-revs option with -s or -p to print only the set
3587 Use --no-show-revs option with -s or -p to print only the set
3583 representation or the parsed tree respectively.
3588 representation or the parsed tree respectively.
3584
3589
3585 Use --verify-optimized to compare the optimized result with the unoptimized
3590 Use --verify-optimized to compare the optimized result with the unoptimized
3586 one. Returns 1 if the optimized result differs.
3591 one. Returns 1 if the optimized result differs.
3587 """
3592 """
3588 opts = pycompat.byteskwargs(opts)
3593 opts = pycompat.byteskwargs(opts)
3589 aliases = ui.configitems(b'revsetalias')
3594 aliases = ui.configitems(b'revsetalias')
3590 stages = [
3595 stages = [
3591 (b'parsed', lambda tree: tree),
3596 (b'parsed', lambda tree: tree),
3592 (
3597 (
3593 b'expanded',
3598 b'expanded',
3594 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3599 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3595 ),
3600 ),
3596 (b'concatenated', revsetlang.foldconcat),
3601 (b'concatenated', revsetlang.foldconcat),
3597 (b'analyzed', revsetlang.analyze),
3602 (b'analyzed', revsetlang.analyze),
3598 (b'optimized', revsetlang.optimize),
3603 (b'optimized', revsetlang.optimize),
3599 ]
3604 ]
3600 if opts[b'no_optimized']:
3605 if opts[b'no_optimized']:
3601 stages = stages[:-1]
3606 stages = stages[:-1]
3602 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3607 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3603 raise error.Abort(
3608 raise error.Abort(
3604 _(b'cannot use --verify-optimized with --no-optimized')
3609 _(b'cannot use --verify-optimized with --no-optimized')
3605 )
3610 )
3606 stagenames = {n for n, f in stages}
3611 stagenames = {n for n, f in stages}
3607
3612
3608 showalways = set()
3613 showalways = set()
3609 showchanged = set()
3614 showchanged = set()
3610 if ui.verbose and not opts[b'show_stage']:
3615 if ui.verbose and not opts[b'show_stage']:
3611 # show parsed tree by --verbose (deprecated)
3616 # show parsed tree by --verbose (deprecated)
3612 showalways.add(b'parsed')
3617 showalways.add(b'parsed')
3613 showchanged.update([b'expanded', b'concatenated'])
3618 showchanged.update([b'expanded', b'concatenated'])
3614 if opts[b'optimize']:
3619 if opts[b'optimize']:
3615 showalways.add(b'optimized')
3620 showalways.add(b'optimized')
3616 if opts[b'show_stage'] and opts[b'optimize']:
3621 if opts[b'show_stage'] and opts[b'optimize']:
3617 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3622 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3618 if opts[b'show_stage'] == [b'all']:
3623 if opts[b'show_stage'] == [b'all']:
3619 showalways.update(stagenames)
3624 showalways.update(stagenames)
3620 else:
3625 else:
3621 for n in opts[b'show_stage']:
3626 for n in opts[b'show_stage']:
3622 if n not in stagenames:
3627 if n not in stagenames:
3623 raise error.Abort(_(b'invalid stage name: %s') % n)
3628 raise error.Abort(_(b'invalid stage name: %s') % n)
3624 showalways.update(opts[b'show_stage'])
3629 showalways.update(opts[b'show_stage'])
3625
3630
3626 treebystage = {}
3631 treebystage = {}
3627 printedtree = None
3632 printedtree = None
3628 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3633 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3629 for n, f in stages:
3634 for n, f in stages:
3630 treebystage[n] = tree = f(tree)
3635 treebystage[n] = tree = f(tree)
3631 if n in showalways or (n in showchanged and tree != printedtree):
3636 if n in showalways or (n in showchanged and tree != printedtree):
3632 if opts[b'show_stage'] or n != b'parsed':
3637 if opts[b'show_stage'] or n != b'parsed':
3633 ui.write(b"* %s:\n" % n)
3638 ui.write(b"* %s:\n" % n)
3634 ui.write(revsetlang.prettyformat(tree), b"\n")
3639 ui.write(revsetlang.prettyformat(tree), b"\n")
3635 printedtree = tree
3640 printedtree = tree
3636
3641
3637 if opts[b'verify_optimized']:
3642 if opts[b'verify_optimized']:
3638 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3643 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3639 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3644 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3640 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3645 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3641 ui.writenoi18n(
3646 ui.writenoi18n(
3642 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3647 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3643 )
3648 )
3644 ui.writenoi18n(
3649 ui.writenoi18n(
3645 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3650 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3646 )
3651 )
3647 arevs = list(arevs)
3652 arevs = list(arevs)
3648 brevs = list(brevs)
3653 brevs = list(brevs)
3649 if arevs == brevs:
3654 if arevs == brevs:
3650 return 0
3655 return 0
3651 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3656 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3652 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3657 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3653 sm = difflib.SequenceMatcher(None, arevs, brevs)
3658 sm = difflib.SequenceMatcher(None, arevs, brevs)
3654 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3659 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3655 if tag in ('delete', 'replace'):
3660 if tag in ('delete', 'replace'):
3656 for c in arevs[alo:ahi]:
3661 for c in arevs[alo:ahi]:
3657 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3662 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3658 if tag in ('insert', 'replace'):
3663 if tag in ('insert', 'replace'):
3659 for c in brevs[blo:bhi]:
3664 for c in brevs[blo:bhi]:
3660 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3665 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3661 if tag == 'equal':
3666 if tag == 'equal':
3662 for c in arevs[alo:ahi]:
3667 for c in arevs[alo:ahi]:
3663 ui.write(b' %d\n' % c)
3668 ui.write(b' %d\n' % c)
3664 return 1
3669 return 1
3665
3670
3666 func = revset.makematcher(tree)
3671 func = revset.makematcher(tree)
3667 revs = func(repo)
3672 revs = func(repo)
3668 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3673 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3669 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3674 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3670 if not opts[b'show_revs']:
3675 if not opts[b'show_revs']:
3671 return
3676 return
3672 for c in revs:
3677 for c in revs:
3673 ui.write(b"%d\n" % c)
3678 ui.write(b"%d\n" % c)
3674
3679
3675
3680
3676 @command(
3681 @command(
3677 b'debugserve',
3682 b'debugserve',
3678 [
3683 [
3679 (
3684 (
3680 b'',
3685 b'',
3681 b'sshstdio',
3686 b'sshstdio',
3682 False,
3687 False,
3683 _(b'run an SSH server bound to process handles'),
3688 _(b'run an SSH server bound to process handles'),
3684 ),
3689 ),
3685 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3690 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3686 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3691 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3687 ],
3692 ],
3688 b'',
3693 b'',
3689 )
3694 )
3690 def debugserve(ui, repo, **opts):
3695 def debugserve(ui, repo, **opts):
3691 """run a server with advanced settings
3696 """run a server with advanced settings
3692
3697
3693 This command is similar to :hg:`serve`. It exists partially as a
3698 This command is similar to :hg:`serve`. It exists partially as a
3694 workaround to the fact that ``hg serve --stdio`` must have specific
3699 workaround to the fact that ``hg serve --stdio`` must have specific
3695 arguments for security reasons.
3700 arguments for security reasons.
3696 """
3701 """
3697 opts = pycompat.byteskwargs(opts)
3702 opts = pycompat.byteskwargs(opts)
3698
3703
3699 if not opts[b'sshstdio']:
3704 if not opts[b'sshstdio']:
3700 raise error.Abort(_(b'only --sshstdio is currently supported'))
3705 raise error.Abort(_(b'only --sshstdio is currently supported'))
3701
3706
3702 logfh = None
3707 logfh = None
3703
3708
3704 if opts[b'logiofd'] and opts[b'logiofile']:
3709 if opts[b'logiofd'] and opts[b'logiofile']:
3705 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3710 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3706
3711
3707 if opts[b'logiofd']:
3712 if opts[b'logiofd']:
3708 # Ideally we would be line buffered. But line buffering in binary
3713 # Ideally we would be line buffered. But line buffering in binary
3709 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3714 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3710 # buffering could have performance impacts. But since this isn't
3715 # buffering could have performance impacts. But since this isn't
3711 # performance critical code, it should be fine.
3716 # performance critical code, it should be fine.
3712 try:
3717 try:
3713 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3718 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3714 except OSError as e:
3719 except OSError as e:
3715 if e.errno != errno.ESPIPE:
3720 if e.errno != errno.ESPIPE:
3716 raise
3721 raise
3717 # can't seek a pipe, so `ab` mode fails on py3
3722 # can't seek a pipe, so `ab` mode fails on py3
3718 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3723 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3719 elif opts[b'logiofile']:
3724 elif opts[b'logiofile']:
3720 logfh = open(opts[b'logiofile'], b'ab', 0)
3725 logfh = open(opts[b'logiofile'], b'ab', 0)
3721
3726
3722 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3727 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3723 s.serve_forever()
3728 s.serve_forever()
3724
3729
3725
3730
3726 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3731 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3727 def debugsetparents(ui, repo, rev1, rev2=None):
3732 def debugsetparents(ui, repo, rev1, rev2=None):
3728 """manually set the parents of the current working directory (DANGEROUS)
3733 """manually set the parents of the current working directory (DANGEROUS)
3729
3734
3730 This command is not what you are looking for and should not be used. Using
3735 This command is not what you are looking for and should not be used. Using
3731 this command will most certainly results in slight corruption of the file
3736 this command will most certainly results in slight corruption of the file
3732 level histories withing your repository. DO NOT USE THIS COMMAND.
3737 level histories withing your repository. DO NOT USE THIS COMMAND.
3733
3738
3734 The command update the p1 and p2 field in the dirstate, and not touching
3739 The command update the p1 and p2 field in the dirstate, and not touching
3735 anything else. This useful for writing repository conversion tools, but
3740 anything else. This useful for writing repository conversion tools, but
3736 should be used with extreme care. For example, neither the working
3741 should be used with extreme care. For example, neither the working
3737 directory nor the dirstate is updated, so file status may be incorrect
3742 directory nor the dirstate is updated, so file status may be incorrect
3738 after running this command. Only used if you are one of the few people that
3743 after running this command. Only used if you are one of the few people that
3739 deeply unstand both conversion tools and file level histories. If you are
3744 deeply unstand both conversion tools and file level histories. If you are
3740 reading this help, you are not one of this people (most of them sailed west
3745 reading this help, you are not one of this people (most of them sailed west
3741 from Mithlond anyway.
3746 from Mithlond anyway.
3742
3747
3743 So one last time DO NOT USE THIS COMMAND.
3748 So one last time DO NOT USE THIS COMMAND.
3744
3749
3745 Returns 0 on success.
3750 Returns 0 on success.
3746 """
3751 """
3747
3752
3748 node1 = scmutil.revsingle(repo, rev1).node()
3753 node1 = scmutil.revsingle(repo, rev1).node()
3749 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3754 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3750
3755
3751 with repo.wlock():
3756 with repo.wlock():
3752 repo.setparents(node1, node2)
3757 repo.setparents(node1, node2)
3753
3758
3754
3759
3755 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3760 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3756 def debugsidedata(ui, repo, file_, rev=None, **opts):
3761 def debugsidedata(ui, repo, file_, rev=None, **opts):
3757 """dump the side data for a cl/manifest/file revision
3762 """dump the side data for a cl/manifest/file revision
3758
3763
3759 Use --verbose to dump the sidedata content."""
3764 Use --verbose to dump the sidedata content."""
3760 opts = pycompat.byteskwargs(opts)
3765 opts = pycompat.byteskwargs(opts)
3761 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3766 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3762 if rev is not None:
3767 if rev is not None:
3763 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3768 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3764 file_, rev = None, file_
3769 file_, rev = None, file_
3765 elif rev is None:
3770 elif rev is None:
3766 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3771 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3767 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3772 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3768 r = getattr(r, '_revlog', r)
3773 r = getattr(r, '_revlog', r)
3769 try:
3774 try:
3770 sidedata = r.sidedata(r.lookup(rev))
3775 sidedata = r.sidedata(r.lookup(rev))
3771 except KeyError:
3776 except KeyError:
3772 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3777 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3773 if sidedata:
3778 if sidedata:
3774 sidedata = list(sidedata.items())
3779 sidedata = list(sidedata.items())
3775 sidedata.sort()
3780 sidedata.sort()
3776 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3781 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3777 for key, value in sidedata:
3782 for key, value in sidedata:
3778 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3783 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3779 if ui.verbose:
3784 if ui.verbose:
3780 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3785 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3781
3786
3782
3787
3783 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3788 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3784 def debugssl(ui, repo, source=None, **opts):
3789 def debugssl(ui, repo, source=None, **opts):
3785 """test a secure connection to a server
3790 """test a secure connection to a server
3786
3791
3787 This builds the certificate chain for the server on Windows, installing the
3792 This builds the certificate chain for the server on Windows, installing the
3788 missing intermediates and trusted root via Windows Update if necessary. It
3793 missing intermediates and trusted root via Windows Update if necessary. It
3789 does nothing on other platforms.
3794 does nothing on other platforms.
3790
3795
3791 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3796 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3792 that server is used. See :hg:`help urls` for more information.
3797 that server is used. See :hg:`help urls` for more information.
3793
3798
3794 If the update succeeds, retry the original operation. Otherwise, the cause
3799 If the update succeeds, retry the original operation. Otherwise, the cause
3795 of the SSL error is likely another issue.
3800 of the SSL error is likely another issue.
3796 """
3801 """
3797 if not pycompat.iswindows:
3802 if not pycompat.iswindows:
3798 raise error.Abort(
3803 raise error.Abort(
3799 _(b'certificate chain building is only possible on Windows')
3804 _(b'certificate chain building is only possible on Windows')
3800 )
3805 )
3801
3806
3802 if not source:
3807 if not source:
3803 if not repo:
3808 if not repo:
3804 raise error.Abort(
3809 raise error.Abort(
3805 _(
3810 _(
3806 b"there is no Mercurial repository here, and no "
3811 b"there is no Mercurial repository here, and no "
3807 b"server specified"
3812 b"server specified"
3808 )
3813 )
3809 )
3814 )
3810 source = b"default"
3815 source = b"default"
3811
3816
3812 source, branches = urlutil.get_unique_pull_path(
3817 source, branches = urlutil.get_unique_pull_path(
3813 b'debugssl', repo, ui, source
3818 b'debugssl', repo, ui, source
3814 )
3819 )
3815 url = urlutil.url(source)
3820 url = urlutil.url(source)
3816
3821
3817 defaultport = {b'https': 443, b'ssh': 22}
3822 defaultport = {b'https': 443, b'ssh': 22}
3818 if url.scheme in defaultport:
3823 if url.scheme in defaultport:
3819 try:
3824 try:
3820 addr = (url.host, int(url.port or defaultport[url.scheme]))
3825 addr = (url.host, int(url.port or defaultport[url.scheme]))
3821 except ValueError:
3826 except ValueError:
3822 raise error.Abort(_(b"malformed port number in URL"))
3827 raise error.Abort(_(b"malformed port number in URL"))
3823 else:
3828 else:
3824 raise error.Abort(_(b"only https and ssh connections are supported"))
3829 raise error.Abort(_(b"only https and ssh connections are supported"))
3825
3830
3826 from . import win32
3831 from . import win32
3827
3832
3828 s = ssl.wrap_socket(
3833 s = ssl.wrap_socket(
3829 socket.socket(),
3834 socket.socket(),
3830 ssl_version=ssl.PROTOCOL_TLS,
3835 ssl_version=ssl.PROTOCOL_TLS,
3831 cert_reqs=ssl.CERT_NONE,
3836 cert_reqs=ssl.CERT_NONE,
3832 ca_certs=None,
3837 ca_certs=None,
3833 )
3838 )
3834
3839
3835 try:
3840 try:
3836 s.connect(addr)
3841 s.connect(addr)
3837 cert = s.getpeercert(True)
3842 cert = s.getpeercert(True)
3838
3843
3839 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3844 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3840
3845
3841 complete = win32.checkcertificatechain(cert, build=False)
3846 complete = win32.checkcertificatechain(cert, build=False)
3842
3847
3843 if not complete:
3848 if not complete:
3844 ui.status(_(b'certificate chain is incomplete, updating... '))
3849 ui.status(_(b'certificate chain is incomplete, updating... '))
3845
3850
3846 if not win32.checkcertificatechain(cert):
3851 if not win32.checkcertificatechain(cert):
3847 ui.status(_(b'failed.\n'))
3852 ui.status(_(b'failed.\n'))
3848 else:
3853 else:
3849 ui.status(_(b'done.\n'))
3854 ui.status(_(b'done.\n'))
3850 else:
3855 else:
3851 ui.status(_(b'full certificate chain is available\n'))
3856 ui.status(_(b'full certificate chain is available\n'))
3852 finally:
3857 finally:
3853 s.close()
3858 s.close()
3854
3859
3855
3860
3856 @command(
3861 @command(
3857 b"debugbackupbundle",
3862 b"debugbackupbundle",
3858 [
3863 [
3859 (
3864 (
3860 b"",
3865 b"",
3861 b"recover",
3866 b"recover",
3862 b"",
3867 b"",
3863 b"brings the specified changeset back into the repository",
3868 b"brings the specified changeset back into the repository",
3864 )
3869 )
3865 ]
3870 ]
3866 + cmdutil.logopts,
3871 + cmdutil.logopts,
3867 _(b"hg debugbackupbundle [--recover HASH]"),
3872 _(b"hg debugbackupbundle [--recover HASH]"),
3868 )
3873 )
3869 def debugbackupbundle(ui, repo, *pats, **opts):
3874 def debugbackupbundle(ui, repo, *pats, **opts):
3870 """lists the changesets available in backup bundles
3875 """lists the changesets available in backup bundles
3871
3876
3872 Without any arguments, this command prints a list of the changesets in each
3877 Without any arguments, this command prints a list of the changesets in each
3873 backup bundle.
3878 backup bundle.
3874
3879
3875 --recover takes a changeset hash and unbundles the first bundle that
3880 --recover takes a changeset hash and unbundles the first bundle that
3876 contains that hash, which puts that changeset back in your repository.
3881 contains that hash, which puts that changeset back in your repository.
3877
3882
3878 --verbose will print the entire commit message and the bundle path for that
3883 --verbose will print the entire commit message and the bundle path for that
3879 backup.
3884 backup.
3880 """
3885 """
3881 backups = list(
3886 backups = list(
3882 filter(
3887 filter(
3883 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3888 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3884 )
3889 )
3885 )
3890 )
3886 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3891 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3887
3892
3888 opts = pycompat.byteskwargs(opts)
3893 opts = pycompat.byteskwargs(opts)
3889 opts[b"bundle"] = b""
3894 opts[b"bundle"] = b""
3890 opts[b"force"] = None
3895 opts[b"force"] = None
3891 limit = logcmdutil.getlimit(opts)
3896 limit = logcmdutil.getlimit(opts)
3892
3897
3893 def display(other, chlist, displayer):
3898 def display(other, chlist, displayer):
3894 if opts.get(b"newest_first"):
3899 if opts.get(b"newest_first"):
3895 chlist.reverse()
3900 chlist.reverse()
3896 count = 0
3901 count = 0
3897 for n in chlist:
3902 for n in chlist:
3898 if limit is not None and count >= limit:
3903 if limit is not None and count >= limit:
3899 break
3904 break
3900 parents = [
3905 parents = [
3901 True for p in other.changelog.parents(n) if p != repo.nullid
3906 True for p in other.changelog.parents(n) if p != repo.nullid
3902 ]
3907 ]
3903 if opts.get(b"no_merges") and len(parents) == 2:
3908 if opts.get(b"no_merges") and len(parents) == 2:
3904 continue
3909 continue
3905 count += 1
3910 count += 1
3906 displayer.show(other[n])
3911 displayer.show(other[n])
3907
3912
3908 recovernode = opts.get(b"recover")
3913 recovernode = opts.get(b"recover")
3909 if recovernode:
3914 if recovernode:
3910 if scmutil.isrevsymbol(repo, recovernode):
3915 if scmutil.isrevsymbol(repo, recovernode):
3911 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3916 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3912 return
3917 return
3913 elif backups:
3918 elif backups:
3914 msg = _(
3919 msg = _(
3915 b"Recover changesets using: hg debugbackupbundle --recover "
3920 b"Recover changesets using: hg debugbackupbundle --recover "
3916 b"<changeset hash>\n\nAvailable backup changesets:"
3921 b"<changeset hash>\n\nAvailable backup changesets:"
3917 )
3922 )
3918 ui.status(msg, label=b"status.removed")
3923 ui.status(msg, label=b"status.removed")
3919 else:
3924 else:
3920 ui.status(_(b"no backup changesets found\n"))
3925 ui.status(_(b"no backup changesets found\n"))
3921 return
3926 return
3922
3927
3923 for backup in backups:
3928 for backup in backups:
3924 # Much of this is copied from the hg incoming logic
3929 # Much of this is copied from the hg incoming logic
3925 source = os.path.relpath(backup, encoding.getcwd())
3930 source = os.path.relpath(backup, encoding.getcwd())
3926 source, branches = urlutil.get_unique_pull_path(
3931 source, branches = urlutil.get_unique_pull_path(
3927 b'debugbackupbundle',
3932 b'debugbackupbundle',
3928 repo,
3933 repo,
3929 ui,
3934 ui,
3930 source,
3935 source,
3931 default_branches=opts.get(b'branch'),
3936 default_branches=opts.get(b'branch'),
3932 )
3937 )
3933 try:
3938 try:
3934 other = hg.peer(repo, opts, source)
3939 other = hg.peer(repo, opts, source)
3935 except error.LookupError as ex:
3940 except error.LookupError as ex:
3936 msg = _(b"\nwarning: unable to open bundle %s") % source
3941 msg = _(b"\nwarning: unable to open bundle %s") % source
3937 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3942 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3938 ui.warn(msg, hint=hint)
3943 ui.warn(msg, hint=hint)
3939 continue
3944 continue
3940 revs, checkout = hg.addbranchrevs(
3945 revs, checkout = hg.addbranchrevs(
3941 repo, other, branches, opts.get(b"rev")
3946 repo, other, branches, opts.get(b"rev")
3942 )
3947 )
3943
3948
3944 if revs:
3949 if revs:
3945 revs = [other.lookup(rev) for rev in revs]
3950 revs = [other.lookup(rev) for rev in revs]
3946
3951
3947 with ui.silent():
3952 with ui.silent():
3948 try:
3953 try:
3949 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3954 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3950 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3955 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3951 )
3956 )
3952 except error.LookupError:
3957 except error.LookupError:
3953 continue
3958 continue
3954
3959
3955 try:
3960 try:
3956 if not chlist:
3961 if not chlist:
3957 continue
3962 continue
3958 if recovernode:
3963 if recovernode:
3959 with repo.lock(), repo.transaction(b"unbundle") as tr:
3964 with repo.lock(), repo.transaction(b"unbundle") as tr:
3960 if scmutil.isrevsymbol(other, recovernode):
3965 if scmutil.isrevsymbol(other, recovernode):
3961 ui.status(_(b"Unbundling %s\n") % (recovernode))
3966 ui.status(_(b"Unbundling %s\n") % (recovernode))
3962 f = hg.openpath(ui, source)
3967 f = hg.openpath(ui, source)
3963 gen = exchange.readbundle(ui, f, source)
3968 gen = exchange.readbundle(ui, f, source)
3964 if isinstance(gen, bundle2.unbundle20):
3969 if isinstance(gen, bundle2.unbundle20):
3965 bundle2.applybundle(
3970 bundle2.applybundle(
3966 repo,
3971 repo,
3967 gen,
3972 gen,
3968 tr,
3973 tr,
3969 source=b"unbundle",
3974 source=b"unbundle",
3970 url=b"bundle:" + source,
3975 url=b"bundle:" + source,
3971 )
3976 )
3972 else:
3977 else:
3973 gen.apply(repo, b"unbundle", b"bundle:" + source)
3978 gen.apply(repo, b"unbundle", b"bundle:" + source)
3974 break
3979 break
3975 else:
3980 else:
3976 backupdate = encoding.strtolocal(
3981 backupdate = encoding.strtolocal(
3977 time.strftime(
3982 time.strftime(
3978 "%a %H:%M, %Y-%m-%d",
3983 "%a %H:%M, %Y-%m-%d",
3979 time.localtime(os.path.getmtime(source)),
3984 time.localtime(os.path.getmtime(source)),
3980 )
3985 )
3981 )
3986 )
3982 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3987 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3983 if ui.verbose:
3988 if ui.verbose:
3984 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3989 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3985 else:
3990 else:
3986 opts[
3991 opts[
3987 b"template"
3992 b"template"
3988 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3993 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3989 displayer = logcmdutil.changesetdisplayer(
3994 displayer = logcmdutil.changesetdisplayer(
3990 ui, other, opts, False
3995 ui, other, opts, False
3991 )
3996 )
3992 display(other, chlist, displayer)
3997 display(other, chlist, displayer)
3993 displayer.close()
3998 displayer.close()
3994 finally:
3999 finally:
3995 cleanupfn()
4000 cleanupfn()
3996
4001
3997
4002
3998 @command(
4003 @command(
3999 b'debugsub',
4004 b'debugsub',
4000 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4005 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4001 _(b'[-r REV] [REV]'),
4006 _(b'[-r REV] [REV]'),
4002 )
4007 )
4003 def debugsub(ui, repo, rev=None):
4008 def debugsub(ui, repo, rev=None):
4004 ctx = scmutil.revsingle(repo, rev, None)
4009 ctx = scmutil.revsingle(repo, rev, None)
4005 for k, v in sorted(ctx.substate.items()):
4010 for k, v in sorted(ctx.substate.items()):
4006 ui.writenoi18n(b'path %s\n' % k)
4011 ui.writenoi18n(b'path %s\n' % k)
4007 ui.writenoi18n(b' source %s\n' % v[0])
4012 ui.writenoi18n(b' source %s\n' % v[0])
4008 ui.writenoi18n(b' revision %s\n' % v[1])
4013 ui.writenoi18n(b' revision %s\n' % v[1])
4009
4014
4010
4015
4011 @command(b'debugshell', optionalrepo=True)
4016 @command(b'debugshell', optionalrepo=True)
4012 def debugshell(ui, repo):
4017 def debugshell(ui, repo):
4013 """run an interactive Python interpreter
4018 """run an interactive Python interpreter
4014
4019
4015 The local namespace is provided with a reference to the ui and
4020 The local namespace is provided with a reference to the ui and
4016 the repo instance (if available).
4021 the repo instance (if available).
4017 """
4022 """
4018 import code
4023 import code
4019
4024
4020 imported_objects = {
4025 imported_objects = {
4021 'ui': ui,
4026 'ui': ui,
4022 'repo': repo,
4027 'repo': repo,
4023 }
4028 }
4024
4029
4025 code.interact(local=imported_objects)
4030 code.interact(local=imported_objects)
4026
4031
4027
4032
4028 @command(
4033 @command(
4029 b'debugsuccessorssets',
4034 b'debugsuccessorssets',
4030 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4035 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4031 _(b'[REV]'),
4036 _(b'[REV]'),
4032 )
4037 )
4033 def debugsuccessorssets(ui, repo, *revs, **opts):
4038 def debugsuccessorssets(ui, repo, *revs, **opts):
4034 """show set of successors for revision
4039 """show set of successors for revision
4035
4040
4036 A successors set of changeset A is a consistent group of revisions that
4041 A successors set of changeset A is a consistent group of revisions that
4037 succeed A. It contains non-obsolete changesets only unless closests
4042 succeed A. It contains non-obsolete changesets only unless closests
4038 successors set is set.
4043 successors set is set.
4039
4044
4040 In most cases a changeset A has a single successors set containing a single
4045 In most cases a changeset A has a single successors set containing a single
4041 successor (changeset A replaced by A').
4046 successor (changeset A replaced by A').
4042
4047
4043 A changeset that is made obsolete with no successors are called "pruned".
4048 A changeset that is made obsolete with no successors are called "pruned".
4044 Such changesets have no successors sets at all.
4049 Such changesets have no successors sets at all.
4045
4050
4046 A changeset that has been "split" will have a successors set containing
4051 A changeset that has been "split" will have a successors set containing
4047 more than one successor.
4052 more than one successor.
4048
4053
4049 A changeset that has been rewritten in multiple different ways is called
4054 A changeset that has been rewritten in multiple different ways is called
4050 "divergent". Such changesets have multiple successor sets (each of which
4055 "divergent". Such changesets have multiple successor sets (each of which
4051 may also be split, i.e. have multiple successors).
4056 may also be split, i.e. have multiple successors).
4052
4057
4053 Results are displayed as follows::
4058 Results are displayed as follows::
4054
4059
4055 <rev1>
4060 <rev1>
4056 <successors-1A>
4061 <successors-1A>
4057 <rev2>
4062 <rev2>
4058 <successors-2A>
4063 <successors-2A>
4059 <successors-2B1> <successors-2B2> <successors-2B3>
4064 <successors-2B1> <successors-2B2> <successors-2B3>
4060
4065
4061 Here rev2 has two possible (i.e. divergent) successors sets. The first
4066 Here rev2 has two possible (i.e. divergent) successors sets. The first
4062 holds one element, whereas the second holds three (i.e. the changeset has
4067 holds one element, whereas the second holds three (i.e. the changeset has
4063 been split).
4068 been split).
4064 """
4069 """
4065 # passed to successorssets caching computation from one call to another
4070 # passed to successorssets caching computation from one call to another
4066 cache = {}
4071 cache = {}
4067 ctx2str = bytes
4072 ctx2str = bytes
4068 node2str = short
4073 node2str = short
4069 for rev in logcmdutil.revrange(repo, revs):
4074 for rev in logcmdutil.revrange(repo, revs):
4070 ctx = repo[rev]
4075 ctx = repo[rev]
4071 ui.write(b'%s\n' % ctx2str(ctx))
4076 ui.write(b'%s\n' % ctx2str(ctx))
4072 for succsset in obsutil.successorssets(
4077 for succsset in obsutil.successorssets(
4073 repo, ctx.node(), closest=opts['closest'], cache=cache
4078 repo, ctx.node(), closest=opts['closest'], cache=cache
4074 ):
4079 ):
4075 if succsset:
4080 if succsset:
4076 ui.write(b' ')
4081 ui.write(b' ')
4077 ui.write(node2str(succsset[0]))
4082 ui.write(node2str(succsset[0]))
4078 for node in succsset[1:]:
4083 for node in succsset[1:]:
4079 ui.write(b' ')
4084 ui.write(b' ')
4080 ui.write(node2str(node))
4085 ui.write(node2str(node))
4081 ui.write(b'\n')
4086 ui.write(b'\n')
4082
4087
4083
4088
4084 @command(b'debugtagscache', [])
4089 @command(b'debugtagscache', [])
4085 def debugtagscache(ui, repo):
4090 def debugtagscache(ui, repo):
4086 """display the contents of .hg/cache/hgtagsfnodes1"""
4091 """display the contents of .hg/cache/hgtagsfnodes1"""
4087 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4092 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4088 flog = repo.file(b'.hgtags')
4093 flog = repo.file(b'.hgtags')
4089 for r in repo:
4094 for r in repo:
4090 node = repo[r].node()
4095 node = repo[r].node()
4091 tagsnode = cache.getfnode(node, computemissing=False)
4096 tagsnode = cache.getfnode(node, computemissing=False)
4092 if tagsnode:
4097 if tagsnode:
4093 tagsnodedisplay = hex(tagsnode)
4098 tagsnodedisplay = hex(tagsnode)
4094 if not flog.hasnode(tagsnode):
4099 if not flog.hasnode(tagsnode):
4095 tagsnodedisplay += b' (unknown node)'
4100 tagsnodedisplay += b' (unknown node)'
4096 elif tagsnode is None:
4101 elif tagsnode is None:
4097 tagsnodedisplay = b'missing'
4102 tagsnodedisplay = b'missing'
4098 else:
4103 else:
4099 tagsnodedisplay = b'invalid'
4104 tagsnodedisplay = b'invalid'
4100
4105
4101 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4106 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4102
4107
4103
4108
4104 @command(
4109 @command(
4105 b'debugtemplate',
4110 b'debugtemplate',
4106 [
4111 [
4107 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4112 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4108 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4113 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4109 ],
4114 ],
4110 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4115 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4111 optionalrepo=True,
4116 optionalrepo=True,
4112 )
4117 )
4113 def debugtemplate(ui, repo, tmpl, **opts):
4118 def debugtemplate(ui, repo, tmpl, **opts):
4114 """parse and apply a template
4119 """parse and apply a template
4115
4120
4116 If -r/--rev is given, the template is processed as a log template and
4121 If -r/--rev is given, the template is processed as a log template and
4117 applied to the given changesets. Otherwise, it is processed as a generic
4122 applied to the given changesets. Otherwise, it is processed as a generic
4118 template.
4123 template.
4119
4124
4120 Use --verbose to print the parsed tree.
4125 Use --verbose to print the parsed tree.
4121 """
4126 """
4122 revs = None
4127 revs = None
4123 if opts['rev']:
4128 if opts['rev']:
4124 if repo is None:
4129 if repo is None:
4125 raise error.RepoError(
4130 raise error.RepoError(
4126 _(b'there is no Mercurial repository here (.hg not found)')
4131 _(b'there is no Mercurial repository here (.hg not found)')
4127 )
4132 )
4128 revs = logcmdutil.revrange(repo, opts['rev'])
4133 revs = logcmdutil.revrange(repo, opts['rev'])
4129
4134
4130 props = {}
4135 props = {}
4131 for d in opts['define']:
4136 for d in opts['define']:
4132 try:
4137 try:
4133 k, v = (e.strip() for e in d.split(b'=', 1))
4138 k, v = (e.strip() for e in d.split(b'=', 1))
4134 if not k or k == b'ui':
4139 if not k or k == b'ui':
4135 raise ValueError
4140 raise ValueError
4136 props[k] = v
4141 props[k] = v
4137 except ValueError:
4142 except ValueError:
4138 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4143 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4139
4144
4140 if ui.verbose:
4145 if ui.verbose:
4141 aliases = ui.configitems(b'templatealias')
4146 aliases = ui.configitems(b'templatealias')
4142 tree = templater.parse(tmpl)
4147 tree = templater.parse(tmpl)
4143 ui.note(templater.prettyformat(tree), b'\n')
4148 ui.note(templater.prettyformat(tree), b'\n')
4144 newtree = templater.expandaliases(tree, aliases)
4149 newtree = templater.expandaliases(tree, aliases)
4145 if newtree != tree:
4150 if newtree != tree:
4146 ui.notenoi18n(
4151 ui.notenoi18n(
4147 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4152 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4148 )
4153 )
4149
4154
4150 if revs is None:
4155 if revs is None:
4151 tres = formatter.templateresources(ui, repo)
4156 tres = formatter.templateresources(ui, repo)
4152 t = formatter.maketemplater(ui, tmpl, resources=tres)
4157 t = formatter.maketemplater(ui, tmpl, resources=tres)
4153 if ui.verbose:
4158 if ui.verbose:
4154 kwds, funcs = t.symbolsuseddefault()
4159 kwds, funcs = t.symbolsuseddefault()
4155 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4160 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4156 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4161 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4157 ui.write(t.renderdefault(props))
4162 ui.write(t.renderdefault(props))
4158 else:
4163 else:
4159 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4164 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4160 if ui.verbose:
4165 if ui.verbose:
4161 kwds, funcs = displayer.t.symbolsuseddefault()
4166 kwds, funcs = displayer.t.symbolsuseddefault()
4162 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4167 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4163 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4168 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4164 for r in revs:
4169 for r in revs:
4165 displayer.show(repo[r], **pycompat.strkwargs(props))
4170 displayer.show(repo[r], **pycompat.strkwargs(props))
4166 displayer.close()
4171 displayer.close()
4167
4172
4168
4173
4169 @command(
4174 @command(
4170 b'debuguigetpass',
4175 b'debuguigetpass',
4171 [
4176 [
4172 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4177 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4173 ],
4178 ],
4174 _(b'[-p TEXT]'),
4179 _(b'[-p TEXT]'),
4175 norepo=True,
4180 norepo=True,
4176 )
4181 )
4177 def debuguigetpass(ui, prompt=b''):
4182 def debuguigetpass(ui, prompt=b''):
4178 """show prompt to type password"""
4183 """show prompt to type password"""
4179 r = ui.getpass(prompt)
4184 r = ui.getpass(prompt)
4180 if r is None:
4185 if r is None:
4181 r = b"<default response>"
4186 r = b"<default response>"
4182 ui.writenoi18n(b'response: %s\n' % r)
4187 ui.writenoi18n(b'response: %s\n' % r)
4183
4188
4184
4189
4185 @command(
4190 @command(
4186 b'debuguiprompt',
4191 b'debuguiprompt',
4187 [
4192 [
4188 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4193 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4189 ],
4194 ],
4190 _(b'[-p TEXT]'),
4195 _(b'[-p TEXT]'),
4191 norepo=True,
4196 norepo=True,
4192 )
4197 )
4193 def debuguiprompt(ui, prompt=b''):
4198 def debuguiprompt(ui, prompt=b''):
4194 """show plain prompt"""
4199 """show plain prompt"""
4195 r = ui.prompt(prompt)
4200 r = ui.prompt(prompt)
4196 ui.writenoi18n(b'response: %s\n' % r)
4201 ui.writenoi18n(b'response: %s\n' % r)
4197
4202
4198
4203
4199 @command(b'debugupdatecaches', [])
4204 @command(b'debugupdatecaches', [])
4200 def debugupdatecaches(ui, repo, *pats, **opts):
4205 def debugupdatecaches(ui, repo, *pats, **opts):
4201 """warm all known caches in the repository"""
4206 """warm all known caches in the repository"""
4202 with repo.wlock(), repo.lock():
4207 with repo.wlock(), repo.lock():
4203 repo.updatecaches(caches=repository.CACHES_ALL)
4208 repo.updatecaches(caches=repository.CACHES_ALL)
4204
4209
4205
4210
4206 @command(
4211 @command(
4207 b'debugupgraderepo',
4212 b'debugupgraderepo',
4208 [
4213 [
4209 (
4214 (
4210 b'o',
4215 b'o',
4211 b'optimize',
4216 b'optimize',
4212 [],
4217 [],
4213 _(b'extra optimization to perform'),
4218 _(b'extra optimization to perform'),
4214 _(b'NAME'),
4219 _(b'NAME'),
4215 ),
4220 ),
4216 (b'', b'run', False, _(b'performs an upgrade')),
4221 (b'', b'run', False, _(b'performs an upgrade')),
4217 (b'', b'backup', True, _(b'keep the old repository content around')),
4222 (b'', b'backup', True, _(b'keep the old repository content around')),
4218 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4223 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4219 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4224 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4220 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4225 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4221 ],
4226 ],
4222 )
4227 )
4223 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4228 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4224 """upgrade a repository to use different features
4229 """upgrade a repository to use different features
4225
4230
4226 If no arguments are specified, the repository is evaluated for upgrade
4231 If no arguments are specified, the repository is evaluated for upgrade
4227 and a list of problems and potential optimizations is printed.
4232 and a list of problems and potential optimizations is printed.
4228
4233
4229 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4234 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4230 can be influenced via additional arguments. More details will be provided
4235 can be influenced via additional arguments. More details will be provided
4231 by the command output when run without ``--run``.
4236 by the command output when run without ``--run``.
4232
4237
4233 During the upgrade, the repository will be locked and no writes will be
4238 During the upgrade, the repository will be locked and no writes will be
4234 allowed.
4239 allowed.
4235
4240
4236 At the end of the upgrade, the repository may not be readable while new
4241 At the end of the upgrade, the repository may not be readable while new
4237 repository data is swapped in. This window will be as long as it takes to
4242 repository data is swapped in. This window will be as long as it takes to
4238 rename some directories inside the ``.hg`` directory. On most machines, this
4243 rename some directories inside the ``.hg`` directory. On most machines, this
4239 should complete almost instantaneously and the chances of a consumer being
4244 should complete almost instantaneously and the chances of a consumer being
4240 unable to access the repository should be low.
4245 unable to access the repository should be low.
4241
4246
4242 By default, all revlogs will be upgraded. You can restrict this using flags
4247 By default, all revlogs will be upgraded. You can restrict this using flags
4243 such as `--manifest`:
4248 such as `--manifest`:
4244
4249
4245 * `--manifest`: only optimize the manifest
4250 * `--manifest`: only optimize the manifest
4246 * `--no-manifest`: optimize all revlog but the manifest
4251 * `--no-manifest`: optimize all revlog but the manifest
4247 * `--changelog`: optimize the changelog only
4252 * `--changelog`: optimize the changelog only
4248 * `--no-changelog --no-manifest`: optimize filelogs only
4253 * `--no-changelog --no-manifest`: optimize filelogs only
4249 * `--filelogs`: optimize the filelogs only
4254 * `--filelogs`: optimize the filelogs only
4250 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4255 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4251 """
4256 """
4252 return upgrade.upgraderepo(
4257 return upgrade.upgraderepo(
4253 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4258 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4254 )
4259 )
4255
4260
4256
4261
4257 @command(
4262 @command(
4258 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4263 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4259 )
4264 )
4260 def debugwalk(ui, repo, *pats, **opts):
4265 def debugwalk(ui, repo, *pats, **opts):
4261 """show how files match on given patterns"""
4266 """show how files match on given patterns"""
4262 opts = pycompat.byteskwargs(opts)
4267 opts = pycompat.byteskwargs(opts)
4263 m = scmutil.match(repo[None], pats, opts)
4268 m = scmutil.match(repo[None], pats, opts)
4264 if ui.verbose:
4269 if ui.verbose:
4265 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4270 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4266 items = list(repo[None].walk(m))
4271 items = list(repo[None].walk(m))
4267 if not items:
4272 if not items:
4268 return
4273 return
4269 f = lambda fn: fn
4274 f = lambda fn: fn
4270 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4275 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4271 f = lambda fn: util.normpath(fn)
4276 f = lambda fn: util.normpath(fn)
4272 fmt = b'f %%-%ds %%-%ds %%s' % (
4277 fmt = b'f %%-%ds %%-%ds %%s' % (
4273 max([len(abs) for abs in items]),
4278 max([len(abs) for abs in items]),
4274 max([len(repo.pathto(abs)) for abs in items]),
4279 max([len(repo.pathto(abs)) for abs in items]),
4275 )
4280 )
4276 for abs in items:
4281 for abs in items:
4277 line = fmt % (
4282 line = fmt % (
4278 abs,
4283 abs,
4279 f(repo.pathto(abs)),
4284 f(repo.pathto(abs)),
4280 m.exact(abs) and b'exact' or b'',
4285 m.exact(abs) and b'exact' or b'',
4281 )
4286 )
4282 ui.write(b"%s\n" % line.rstrip())
4287 ui.write(b"%s\n" % line.rstrip())
4283
4288
4284
4289
4285 @command(b'debugwhyunstable', [], _(b'REV'))
4290 @command(b'debugwhyunstable', [], _(b'REV'))
4286 def debugwhyunstable(ui, repo, rev):
4291 def debugwhyunstable(ui, repo, rev):
4287 """explain instabilities of a changeset"""
4292 """explain instabilities of a changeset"""
4288 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4293 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4289 dnodes = b''
4294 dnodes = b''
4290 if entry.get(b'divergentnodes'):
4295 if entry.get(b'divergentnodes'):
4291 dnodes = (
4296 dnodes = (
4292 b' '.join(
4297 b' '.join(
4293 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4298 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4294 for ctx in entry[b'divergentnodes']
4299 for ctx in entry[b'divergentnodes']
4295 )
4300 )
4296 + b' '
4301 + b' '
4297 )
4302 )
4298 ui.write(
4303 ui.write(
4299 b'%s: %s%s %s\n'
4304 b'%s: %s%s %s\n'
4300 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4305 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4301 )
4306 )
4302
4307
4303
4308
4304 @command(
4309 @command(
4305 b'debugwireargs',
4310 b'debugwireargs',
4306 [
4311 [
4307 (b'', b'three', b'', b'three'),
4312 (b'', b'three', b'', b'three'),
4308 (b'', b'four', b'', b'four'),
4313 (b'', b'four', b'', b'four'),
4309 (b'', b'five', b'', b'five'),
4314 (b'', b'five', b'', b'five'),
4310 ]
4315 ]
4311 + cmdutil.remoteopts,
4316 + cmdutil.remoteopts,
4312 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4317 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4313 norepo=True,
4318 norepo=True,
4314 )
4319 )
4315 def debugwireargs(ui, repopath, *vals, **opts):
4320 def debugwireargs(ui, repopath, *vals, **opts):
4316 opts = pycompat.byteskwargs(opts)
4321 opts = pycompat.byteskwargs(opts)
4317 repo = hg.peer(ui, opts, repopath)
4322 repo = hg.peer(ui, opts, repopath)
4318 try:
4323 try:
4319 for opt in cmdutil.remoteopts:
4324 for opt in cmdutil.remoteopts:
4320 del opts[opt[1]]
4325 del opts[opt[1]]
4321 args = {}
4326 args = {}
4322 for k, v in opts.items():
4327 for k, v in opts.items():
4323 if v:
4328 if v:
4324 args[k] = v
4329 args[k] = v
4325 args = pycompat.strkwargs(args)
4330 args = pycompat.strkwargs(args)
4326 # run twice to check that we don't mess up the stream for the next command
4331 # run twice to check that we don't mess up the stream for the next command
4327 res1 = repo.debugwireargs(*vals, **args)
4332 res1 = repo.debugwireargs(*vals, **args)
4328 res2 = repo.debugwireargs(*vals, **args)
4333 res2 = repo.debugwireargs(*vals, **args)
4329 ui.write(b"%s\n" % res1)
4334 ui.write(b"%s\n" % res1)
4330 if res1 != res2:
4335 if res1 != res2:
4331 ui.warn(b"%s\n" % res2)
4336 ui.warn(b"%s\n" % res2)
4332 finally:
4337 finally:
4333 repo.close()
4338 repo.close()
4334
4339
4335
4340
4336 def _parsewirelangblocks(fh):
4341 def _parsewirelangblocks(fh):
4337 activeaction = None
4342 activeaction = None
4338 blocklines = []
4343 blocklines = []
4339 lastindent = 0
4344 lastindent = 0
4340
4345
4341 for line in fh:
4346 for line in fh:
4342 line = line.rstrip()
4347 line = line.rstrip()
4343 if not line:
4348 if not line:
4344 continue
4349 continue
4345
4350
4346 if line.startswith(b'#'):
4351 if line.startswith(b'#'):
4347 continue
4352 continue
4348
4353
4349 if not line.startswith(b' '):
4354 if not line.startswith(b' '):
4350 # New block. Flush previous one.
4355 # New block. Flush previous one.
4351 if activeaction:
4356 if activeaction:
4352 yield activeaction, blocklines
4357 yield activeaction, blocklines
4353
4358
4354 activeaction = line
4359 activeaction = line
4355 blocklines = []
4360 blocklines = []
4356 lastindent = 0
4361 lastindent = 0
4357 continue
4362 continue
4358
4363
4359 # Else we start with an indent.
4364 # Else we start with an indent.
4360
4365
4361 if not activeaction:
4366 if not activeaction:
4362 raise error.Abort(_(b'indented line outside of block'))
4367 raise error.Abort(_(b'indented line outside of block'))
4363
4368
4364 indent = len(line) - len(line.lstrip())
4369 indent = len(line) - len(line.lstrip())
4365
4370
4366 # If this line is indented more than the last line, concatenate it.
4371 # If this line is indented more than the last line, concatenate it.
4367 if indent > lastindent and blocklines:
4372 if indent > lastindent and blocklines:
4368 blocklines[-1] += line.lstrip()
4373 blocklines[-1] += line.lstrip()
4369 else:
4374 else:
4370 blocklines.append(line)
4375 blocklines.append(line)
4371 lastindent = indent
4376 lastindent = indent
4372
4377
4373 # Flush last block.
4378 # Flush last block.
4374 if activeaction:
4379 if activeaction:
4375 yield activeaction, blocklines
4380 yield activeaction, blocklines
4376
4381
4377
4382
4378 @command(
4383 @command(
4379 b'debugwireproto',
4384 b'debugwireproto',
4380 [
4385 [
4381 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4386 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4382 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4387 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4383 (
4388 (
4384 b'',
4389 b'',
4385 b'noreadstderr',
4390 b'noreadstderr',
4386 False,
4391 False,
4387 _(b'do not read from stderr of the remote'),
4392 _(b'do not read from stderr of the remote'),
4388 ),
4393 ),
4389 (
4394 (
4390 b'',
4395 b'',
4391 b'nologhandshake',
4396 b'nologhandshake',
4392 False,
4397 False,
4393 _(b'do not log I/O related to the peer handshake'),
4398 _(b'do not log I/O related to the peer handshake'),
4394 ),
4399 ),
4395 ]
4400 ]
4396 + cmdutil.remoteopts,
4401 + cmdutil.remoteopts,
4397 _(b'[PATH]'),
4402 _(b'[PATH]'),
4398 optionalrepo=True,
4403 optionalrepo=True,
4399 )
4404 )
4400 def debugwireproto(ui, repo, path=None, **opts):
4405 def debugwireproto(ui, repo, path=None, **opts):
4401 """send wire protocol commands to a server
4406 """send wire protocol commands to a server
4402
4407
4403 This command can be used to issue wire protocol commands to remote
4408 This command can be used to issue wire protocol commands to remote
4404 peers and to debug the raw data being exchanged.
4409 peers and to debug the raw data being exchanged.
4405
4410
4406 ``--localssh`` will start an SSH server against the current repository
4411 ``--localssh`` will start an SSH server against the current repository
4407 and connect to that. By default, the connection will perform a handshake
4412 and connect to that. By default, the connection will perform a handshake
4408 and establish an appropriate peer instance.
4413 and establish an appropriate peer instance.
4409
4414
4410 ``--peer`` can be used to bypass the handshake protocol and construct a
4415 ``--peer`` can be used to bypass the handshake protocol and construct a
4411 peer instance using the specified class type. Valid values are ``raw``,
4416 peer instance using the specified class type. Valid values are ``raw``,
4412 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4417 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4413 don't support higher-level command actions.
4418 don't support higher-level command actions.
4414
4419
4415 ``--noreadstderr`` can be used to disable automatic reading from stderr
4420 ``--noreadstderr`` can be used to disable automatic reading from stderr
4416 of the peer (for SSH connections only). Disabling automatic reading of
4421 of the peer (for SSH connections only). Disabling automatic reading of
4417 stderr is useful for making output more deterministic.
4422 stderr is useful for making output more deterministic.
4418
4423
4419 Commands are issued via a mini language which is specified via stdin.
4424 Commands are issued via a mini language which is specified via stdin.
4420 The language consists of individual actions to perform. An action is
4425 The language consists of individual actions to perform. An action is
4421 defined by a block. A block is defined as a line with no leading
4426 defined by a block. A block is defined as a line with no leading
4422 space followed by 0 or more lines with leading space. Blocks are
4427 space followed by 0 or more lines with leading space. Blocks are
4423 effectively a high-level command with additional metadata.
4428 effectively a high-level command with additional metadata.
4424
4429
4425 Lines beginning with ``#`` are ignored.
4430 Lines beginning with ``#`` are ignored.
4426
4431
4427 The following sections denote available actions.
4432 The following sections denote available actions.
4428
4433
4429 raw
4434 raw
4430 ---
4435 ---
4431
4436
4432 Send raw data to the server.
4437 Send raw data to the server.
4433
4438
4434 The block payload contains the raw data to send as one atomic send
4439 The block payload contains the raw data to send as one atomic send
4435 operation. The data may not actually be delivered in a single system
4440 operation. The data may not actually be delivered in a single system
4436 call: it depends on the abilities of the transport being used.
4441 call: it depends on the abilities of the transport being used.
4437
4442
4438 Each line in the block is de-indented and concatenated. Then, that
4443 Each line in the block is de-indented and concatenated. Then, that
4439 value is evaluated as a Python b'' literal. This allows the use of
4444 value is evaluated as a Python b'' literal. This allows the use of
4440 backslash escaping, etc.
4445 backslash escaping, etc.
4441
4446
4442 raw+
4447 raw+
4443 ----
4448 ----
4444
4449
4445 Behaves like ``raw`` except flushes output afterwards.
4450 Behaves like ``raw`` except flushes output afterwards.
4446
4451
4447 command <X>
4452 command <X>
4448 -----------
4453 -----------
4449
4454
4450 Send a request to run a named command, whose name follows the ``command``
4455 Send a request to run a named command, whose name follows the ``command``
4451 string.
4456 string.
4452
4457
4453 Arguments to the command are defined as lines in this block. The format of
4458 Arguments to the command are defined as lines in this block. The format of
4454 each line is ``<key> <value>``. e.g.::
4459 each line is ``<key> <value>``. e.g.::
4455
4460
4456 command listkeys
4461 command listkeys
4457 namespace bookmarks
4462 namespace bookmarks
4458
4463
4459 If the value begins with ``eval:``, it will be interpreted as a Python
4464 If the value begins with ``eval:``, it will be interpreted as a Python
4460 literal expression. Otherwise values are interpreted as Python b'' literals.
4465 literal expression. Otherwise values are interpreted as Python b'' literals.
4461 This allows sending complex types and encoding special byte sequences via
4466 This allows sending complex types and encoding special byte sequences via
4462 backslash escaping.
4467 backslash escaping.
4463
4468
4464 The following arguments have special meaning:
4469 The following arguments have special meaning:
4465
4470
4466 ``PUSHFILE``
4471 ``PUSHFILE``
4467 When defined, the *push* mechanism of the peer will be used instead
4472 When defined, the *push* mechanism of the peer will be used instead
4468 of the static request-response mechanism and the content of the
4473 of the static request-response mechanism and the content of the
4469 file specified in the value of this argument will be sent as the
4474 file specified in the value of this argument will be sent as the
4470 command payload.
4475 command payload.
4471
4476
4472 This can be used to submit a local bundle file to the remote.
4477 This can be used to submit a local bundle file to the remote.
4473
4478
4474 batchbegin
4479 batchbegin
4475 ----------
4480 ----------
4476
4481
4477 Instruct the peer to begin a batched send.
4482 Instruct the peer to begin a batched send.
4478
4483
4479 All ``command`` blocks are queued for execution until the next
4484 All ``command`` blocks are queued for execution until the next
4480 ``batchsubmit`` block.
4485 ``batchsubmit`` block.
4481
4486
4482 batchsubmit
4487 batchsubmit
4483 -----------
4488 -----------
4484
4489
4485 Submit previously queued ``command`` blocks as a batch request.
4490 Submit previously queued ``command`` blocks as a batch request.
4486
4491
4487 This action MUST be paired with a ``batchbegin`` action.
4492 This action MUST be paired with a ``batchbegin`` action.
4488
4493
4489 httprequest <method> <path>
4494 httprequest <method> <path>
4490 ---------------------------
4495 ---------------------------
4491
4496
4492 (HTTP peer only)
4497 (HTTP peer only)
4493
4498
4494 Send an HTTP request to the peer.
4499 Send an HTTP request to the peer.
4495
4500
4496 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4501 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4497
4502
4498 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4503 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4499 headers to add to the request. e.g. ``Accept: foo``.
4504 headers to add to the request. e.g. ``Accept: foo``.
4500
4505
4501 The following arguments are special:
4506 The following arguments are special:
4502
4507
4503 ``BODYFILE``
4508 ``BODYFILE``
4504 The content of the file defined as the value to this argument will be
4509 The content of the file defined as the value to this argument will be
4505 transferred verbatim as the HTTP request body.
4510 transferred verbatim as the HTTP request body.
4506
4511
4507 ``frame <type> <flags> <payload>``
4512 ``frame <type> <flags> <payload>``
4508 Send a unified protocol frame as part of the request body.
4513 Send a unified protocol frame as part of the request body.
4509
4514
4510 All frames will be collected and sent as the body to the HTTP
4515 All frames will be collected and sent as the body to the HTTP
4511 request.
4516 request.
4512
4517
4513 close
4518 close
4514 -----
4519 -----
4515
4520
4516 Close the connection to the server.
4521 Close the connection to the server.
4517
4522
4518 flush
4523 flush
4519 -----
4524 -----
4520
4525
4521 Flush data written to the server.
4526 Flush data written to the server.
4522
4527
4523 readavailable
4528 readavailable
4524 -------------
4529 -------------
4525
4530
4526 Close the write end of the connection and read all available data from
4531 Close the write end of the connection and read all available data from
4527 the server.
4532 the server.
4528
4533
4529 If the connection to the server encompasses multiple pipes, we poll both
4534 If the connection to the server encompasses multiple pipes, we poll both
4530 pipes and read available data.
4535 pipes and read available data.
4531
4536
4532 readline
4537 readline
4533 --------
4538 --------
4534
4539
4535 Read a line of output from the server. If there are multiple output
4540 Read a line of output from the server. If there are multiple output
4536 pipes, reads only the main pipe.
4541 pipes, reads only the main pipe.
4537
4542
4538 ereadline
4543 ereadline
4539 ---------
4544 ---------
4540
4545
4541 Like ``readline``, but read from the stderr pipe, if available.
4546 Like ``readline``, but read from the stderr pipe, if available.
4542
4547
4543 read <X>
4548 read <X>
4544 --------
4549 --------
4545
4550
4546 ``read()`` N bytes from the server's main output pipe.
4551 ``read()`` N bytes from the server's main output pipe.
4547
4552
4548 eread <X>
4553 eread <X>
4549 ---------
4554 ---------
4550
4555
4551 ``read()`` N bytes from the server's stderr pipe, if available.
4556 ``read()`` N bytes from the server's stderr pipe, if available.
4552
4557
4553 Specifying Unified Frame-Based Protocol Frames
4558 Specifying Unified Frame-Based Protocol Frames
4554 ----------------------------------------------
4559 ----------------------------------------------
4555
4560
4556 It is possible to emit a *Unified Frame-Based Protocol* by using special
4561 It is possible to emit a *Unified Frame-Based Protocol* by using special
4557 syntax.
4562 syntax.
4558
4563
4559 A frame is composed as a type, flags, and payload. These can be parsed
4564 A frame is composed as a type, flags, and payload. These can be parsed
4560 from a string of the form:
4565 from a string of the form:
4561
4566
4562 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4567 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4563
4568
4564 ``request-id`` and ``stream-id`` are integers defining the request and
4569 ``request-id`` and ``stream-id`` are integers defining the request and
4565 stream identifiers.
4570 stream identifiers.
4566
4571
4567 ``type`` can be an integer value for the frame type or the string name
4572 ``type`` can be an integer value for the frame type or the string name
4568 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4573 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4569 ``command-name``.
4574 ``command-name``.
4570
4575
4571 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4576 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4572 components. Each component (and there can be just one) can be an integer
4577 components. Each component (and there can be just one) can be an integer
4573 or a flag name for stream flags or frame flags, respectively. Values are
4578 or a flag name for stream flags or frame flags, respectively. Values are
4574 resolved to integers and then bitwise OR'd together.
4579 resolved to integers and then bitwise OR'd together.
4575
4580
4576 ``payload`` represents the raw frame payload. If it begins with
4581 ``payload`` represents the raw frame payload. If it begins with
4577 ``cbor:``, the following string is evaluated as Python code and the
4582 ``cbor:``, the following string is evaluated as Python code and the
4578 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4583 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4579 as a Python byte string literal.
4584 as a Python byte string literal.
4580 """
4585 """
4581 opts = pycompat.byteskwargs(opts)
4586 opts = pycompat.byteskwargs(opts)
4582
4587
4583 if opts[b'localssh'] and not repo:
4588 if opts[b'localssh'] and not repo:
4584 raise error.Abort(_(b'--localssh requires a repository'))
4589 raise error.Abort(_(b'--localssh requires a repository'))
4585
4590
4586 if opts[b'peer'] and opts[b'peer'] not in (
4591 if opts[b'peer'] and opts[b'peer'] not in (
4587 b'raw',
4592 b'raw',
4588 b'ssh1',
4593 b'ssh1',
4589 ):
4594 ):
4590 raise error.Abort(
4595 raise error.Abort(
4591 _(b'invalid value for --peer'),
4596 _(b'invalid value for --peer'),
4592 hint=_(b'valid values are "raw" and "ssh1"'),
4597 hint=_(b'valid values are "raw" and "ssh1"'),
4593 )
4598 )
4594
4599
4595 if path and opts[b'localssh']:
4600 if path and opts[b'localssh']:
4596 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4601 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4597
4602
4598 if ui.interactive():
4603 if ui.interactive():
4599 ui.write(_(b'(waiting for commands on stdin)\n'))
4604 ui.write(_(b'(waiting for commands on stdin)\n'))
4600
4605
4601 blocks = list(_parsewirelangblocks(ui.fin))
4606 blocks = list(_parsewirelangblocks(ui.fin))
4602
4607
4603 proc = None
4608 proc = None
4604 stdin = None
4609 stdin = None
4605 stdout = None
4610 stdout = None
4606 stderr = None
4611 stderr = None
4607 opener = None
4612 opener = None
4608
4613
4609 if opts[b'localssh']:
4614 if opts[b'localssh']:
4610 # We start the SSH server in its own process so there is process
4615 # We start the SSH server in its own process so there is process
4611 # separation. This prevents a whole class of potential bugs around
4616 # separation. This prevents a whole class of potential bugs around
4612 # shared state from interfering with server operation.
4617 # shared state from interfering with server operation.
4613 args = procutil.hgcmd() + [
4618 args = procutil.hgcmd() + [
4614 b'-R',
4619 b'-R',
4615 repo.root,
4620 repo.root,
4616 b'debugserve',
4621 b'debugserve',
4617 b'--sshstdio',
4622 b'--sshstdio',
4618 ]
4623 ]
4619 proc = subprocess.Popen(
4624 proc = subprocess.Popen(
4620 pycompat.rapply(procutil.tonativestr, args),
4625 pycompat.rapply(procutil.tonativestr, args),
4621 stdin=subprocess.PIPE,
4626 stdin=subprocess.PIPE,
4622 stdout=subprocess.PIPE,
4627 stdout=subprocess.PIPE,
4623 stderr=subprocess.PIPE,
4628 stderr=subprocess.PIPE,
4624 bufsize=0,
4629 bufsize=0,
4625 )
4630 )
4626
4631
4627 stdin = proc.stdin
4632 stdin = proc.stdin
4628 stdout = proc.stdout
4633 stdout = proc.stdout
4629 stderr = proc.stderr
4634 stderr = proc.stderr
4630
4635
4631 # We turn the pipes into observers so we can log I/O.
4636 # We turn the pipes into observers so we can log I/O.
4632 if ui.verbose or opts[b'peer'] == b'raw':
4637 if ui.verbose or opts[b'peer'] == b'raw':
4633 stdin = util.makeloggingfileobject(
4638 stdin = util.makeloggingfileobject(
4634 ui, proc.stdin, b'i', logdata=True
4639 ui, proc.stdin, b'i', logdata=True
4635 )
4640 )
4636 stdout = util.makeloggingfileobject(
4641 stdout = util.makeloggingfileobject(
4637 ui, proc.stdout, b'o', logdata=True
4642 ui, proc.stdout, b'o', logdata=True
4638 )
4643 )
4639 stderr = util.makeloggingfileobject(
4644 stderr = util.makeloggingfileobject(
4640 ui, proc.stderr, b'e', logdata=True
4645 ui, proc.stderr, b'e', logdata=True
4641 )
4646 )
4642
4647
4643 # --localssh also implies the peer connection settings.
4648 # --localssh also implies the peer connection settings.
4644
4649
4645 url = b'ssh://localserver'
4650 url = b'ssh://localserver'
4646 autoreadstderr = not opts[b'noreadstderr']
4651 autoreadstderr = not opts[b'noreadstderr']
4647
4652
4648 if opts[b'peer'] == b'ssh1':
4653 if opts[b'peer'] == b'ssh1':
4649 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4654 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4650 peer = sshpeer.sshv1peer(
4655 peer = sshpeer.sshv1peer(
4651 ui,
4656 ui,
4652 url,
4657 url,
4653 proc,
4658 proc,
4654 stdin,
4659 stdin,
4655 stdout,
4660 stdout,
4656 stderr,
4661 stderr,
4657 None,
4662 None,
4658 autoreadstderr=autoreadstderr,
4663 autoreadstderr=autoreadstderr,
4659 )
4664 )
4660 elif opts[b'peer'] == b'raw':
4665 elif opts[b'peer'] == b'raw':
4661 ui.write(_(b'using raw connection to peer\n'))
4666 ui.write(_(b'using raw connection to peer\n'))
4662 peer = None
4667 peer = None
4663 else:
4668 else:
4664 ui.write(_(b'creating ssh peer from handshake results\n'))
4669 ui.write(_(b'creating ssh peer from handshake results\n'))
4665 peer = sshpeer.makepeer(
4670 peer = sshpeer.makepeer(
4666 ui,
4671 ui,
4667 url,
4672 url,
4668 proc,
4673 proc,
4669 stdin,
4674 stdin,
4670 stdout,
4675 stdout,
4671 stderr,
4676 stderr,
4672 autoreadstderr=autoreadstderr,
4677 autoreadstderr=autoreadstderr,
4673 )
4678 )
4674
4679
4675 elif path:
4680 elif path:
4676 # We bypass hg.peer() so we can proxy the sockets.
4681 # We bypass hg.peer() so we can proxy the sockets.
4677 # TODO consider not doing this because we skip
4682 # TODO consider not doing this because we skip
4678 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4683 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4679 u = urlutil.url(path)
4684 u = urlutil.url(path)
4680 if u.scheme != b'http':
4685 if u.scheme != b'http':
4681 raise error.Abort(_(b'only http:// paths are currently supported'))
4686 raise error.Abort(_(b'only http:// paths are currently supported'))
4682
4687
4683 url, authinfo = u.authinfo()
4688 url, authinfo = u.authinfo()
4684 openerargs = {
4689 openerargs = {
4685 'useragent': b'Mercurial debugwireproto',
4690 'useragent': b'Mercurial debugwireproto',
4686 }
4691 }
4687
4692
4688 # Turn pipes/sockets into observers so we can log I/O.
4693 # Turn pipes/sockets into observers so we can log I/O.
4689 if ui.verbose:
4694 if ui.verbose:
4690 openerargs.update(
4695 openerargs.update(
4691 {
4696 {
4692 'loggingfh': ui,
4697 'loggingfh': ui,
4693 'loggingname': b's',
4698 'loggingname': b's',
4694 'loggingopts': {
4699 'loggingopts': {
4695 'logdata': True,
4700 'logdata': True,
4696 'logdataapis': False,
4701 'logdataapis': False,
4697 },
4702 },
4698 }
4703 }
4699 )
4704 )
4700
4705
4701 if ui.debugflag:
4706 if ui.debugflag:
4702 openerargs['loggingopts']['logdataapis'] = True
4707 openerargs['loggingopts']['logdataapis'] = True
4703
4708
4704 # Don't send default headers when in raw mode. This allows us to
4709 # Don't send default headers when in raw mode. This allows us to
4705 # bypass most of the behavior of our URL handling code so we can
4710 # bypass most of the behavior of our URL handling code so we can
4706 # have near complete control over what's sent on the wire.
4711 # have near complete control over what's sent on the wire.
4707 if opts[b'peer'] == b'raw':
4712 if opts[b'peer'] == b'raw':
4708 openerargs['sendaccept'] = False
4713 openerargs['sendaccept'] = False
4709
4714
4710 opener = urlmod.opener(ui, authinfo, **openerargs)
4715 opener = urlmod.opener(ui, authinfo, **openerargs)
4711
4716
4712 if opts[b'peer'] == b'raw':
4717 if opts[b'peer'] == b'raw':
4713 ui.write(_(b'using raw connection to peer\n'))
4718 ui.write(_(b'using raw connection to peer\n'))
4714 peer = None
4719 peer = None
4715 elif opts[b'peer']:
4720 elif opts[b'peer']:
4716 raise error.Abort(
4721 raise error.Abort(
4717 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4722 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4718 )
4723 )
4719 else:
4724 else:
4720 peer = httppeer.makepeer(ui, path, opener=opener)
4725 peer = httppeer.makepeer(ui, path, opener=opener)
4721
4726
4722 # We /could/ populate stdin/stdout with sock.makefile()...
4727 # We /could/ populate stdin/stdout with sock.makefile()...
4723 else:
4728 else:
4724 raise error.Abort(_(b'unsupported connection configuration'))
4729 raise error.Abort(_(b'unsupported connection configuration'))
4725
4730
4726 batchedcommands = None
4731 batchedcommands = None
4727
4732
4728 # Now perform actions based on the parsed wire language instructions.
4733 # Now perform actions based on the parsed wire language instructions.
4729 for action, lines in blocks:
4734 for action, lines in blocks:
4730 if action in (b'raw', b'raw+'):
4735 if action in (b'raw', b'raw+'):
4731 if not stdin:
4736 if not stdin:
4732 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4737 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4733
4738
4734 # Concatenate the data together.
4739 # Concatenate the data together.
4735 data = b''.join(l.lstrip() for l in lines)
4740 data = b''.join(l.lstrip() for l in lines)
4736 data = stringutil.unescapestr(data)
4741 data = stringutil.unescapestr(data)
4737 stdin.write(data)
4742 stdin.write(data)
4738
4743
4739 if action == b'raw+':
4744 if action == b'raw+':
4740 stdin.flush()
4745 stdin.flush()
4741 elif action == b'flush':
4746 elif action == b'flush':
4742 if not stdin:
4747 if not stdin:
4743 raise error.Abort(_(b'cannot call flush on this peer'))
4748 raise error.Abort(_(b'cannot call flush on this peer'))
4744 stdin.flush()
4749 stdin.flush()
4745 elif action.startswith(b'command'):
4750 elif action.startswith(b'command'):
4746 if not peer:
4751 if not peer:
4747 raise error.Abort(
4752 raise error.Abort(
4748 _(
4753 _(
4749 b'cannot send commands unless peer instance '
4754 b'cannot send commands unless peer instance '
4750 b'is available'
4755 b'is available'
4751 )
4756 )
4752 )
4757 )
4753
4758
4754 command = action.split(b' ', 1)[1]
4759 command = action.split(b' ', 1)[1]
4755
4760
4756 args = {}
4761 args = {}
4757 for line in lines:
4762 for line in lines:
4758 # We need to allow empty values.
4763 # We need to allow empty values.
4759 fields = line.lstrip().split(b' ', 1)
4764 fields = line.lstrip().split(b' ', 1)
4760 if len(fields) == 1:
4765 if len(fields) == 1:
4761 key = fields[0]
4766 key = fields[0]
4762 value = b''
4767 value = b''
4763 else:
4768 else:
4764 key, value = fields
4769 key, value = fields
4765
4770
4766 if value.startswith(b'eval:'):
4771 if value.startswith(b'eval:'):
4767 value = stringutil.evalpythonliteral(value[5:])
4772 value = stringutil.evalpythonliteral(value[5:])
4768 else:
4773 else:
4769 value = stringutil.unescapestr(value)
4774 value = stringutil.unescapestr(value)
4770
4775
4771 args[key] = value
4776 args[key] = value
4772
4777
4773 if batchedcommands is not None:
4778 if batchedcommands is not None:
4774 batchedcommands.append((command, args))
4779 batchedcommands.append((command, args))
4775 continue
4780 continue
4776
4781
4777 ui.status(_(b'sending %s command\n') % command)
4782 ui.status(_(b'sending %s command\n') % command)
4778
4783
4779 if b'PUSHFILE' in args:
4784 if b'PUSHFILE' in args:
4780 with open(args[b'PUSHFILE'], 'rb') as fh:
4785 with open(args[b'PUSHFILE'], 'rb') as fh:
4781 del args[b'PUSHFILE']
4786 del args[b'PUSHFILE']
4782 res, output = peer._callpush(
4787 res, output = peer._callpush(
4783 command, fh, **pycompat.strkwargs(args)
4788 command, fh, **pycompat.strkwargs(args)
4784 )
4789 )
4785 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4790 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4786 ui.status(
4791 ui.status(
4787 _(b'remote output: %s\n') % stringutil.escapestr(output)
4792 _(b'remote output: %s\n') % stringutil.escapestr(output)
4788 )
4793 )
4789 else:
4794 else:
4790 with peer.commandexecutor() as e:
4795 with peer.commandexecutor() as e:
4791 res = e.callcommand(command, args).result()
4796 res = e.callcommand(command, args).result()
4792
4797
4793 ui.status(
4798 ui.status(
4794 _(b'response: %s\n')
4799 _(b'response: %s\n')
4795 % stringutil.pprint(res, bprefix=True, indent=2)
4800 % stringutil.pprint(res, bprefix=True, indent=2)
4796 )
4801 )
4797
4802
4798 elif action == b'batchbegin':
4803 elif action == b'batchbegin':
4799 if batchedcommands is not None:
4804 if batchedcommands is not None:
4800 raise error.Abort(_(b'nested batchbegin not allowed'))
4805 raise error.Abort(_(b'nested batchbegin not allowed'))
4801
4806
4802 batchedcommands = []
4807 batchedcommands = []
4803 elif action == b'batchsubmit':
4808 elif action == b'batchsubmit':
4804 # There is a batching API we could go through. But it would be
4809 # There is a batching API we could go through. But it would be
4805 # difficult to normalize requests into function calls. It is easier
4810 # difficult to normalize requests into function calls. It is easier
4806 # to bypass this layer and normalize to commands + args.
4811 # to bypass this layer and normalize to commands + args.
4807 ui.status(
4812 ui.status(
4808 _(b'sending batch with %d sub-commands\n')
4813 _(b'sending batch with %d sub-commands\n')
4809 % len(batchedcommands)
4814 % len(batchedcommands)
4810 )
4815 )
4811 assert peer is not None
4816 assert peer is not None
4812 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4817 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4813 ui.status(
4818 ui.status(
4814 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4819 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4815 )
4820 )
4816
4821
4817 batchedcommands = None
4822 batchedcommands = None
4818
4823
4819 elif action.startswith(b'httprequest '):
4824 elif action.startswith(b'httprequest '):
4820 if not opener:
4825 if not opener:
4821 raise error.Abort(
4826 raise error.Abort(
4822 _(b'cannot use httprequest without an HTTP peer')
4827 _(b'cannot use httprequest without an HTTP peer')
4823 )
4828 )
4824
4829
4825 request = action.split(b' ', 2)
4830 request = action.split(b' ', 2)
4826 if len(request) != 3:
4831 if len(request) != 3:
4827 raise error.Abort(
4832 raise error.Abort(
4828 _(
4833 _(
4829 b'invalid httprequest: expected format is '
4834 b'invalid httprequest: expected format is '
4830 b'"httprequest <method> <path>'
4835 b'"httprequest <method> <path>'
4831 )
4836 )
4832 )
4837 )
4833
4838
4834 method, httppath = request[1:]
4839 method, httppath = request[1:]
4835 headers = {}
4840 headers = {}
4836 body = None
4841 body = None
4837 frames = []
4842 frames = []
4838 for line in lines:
4843 for line in lines:
4839 line = line.lstrip()
4844 line = line.lstrip()
4840 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4845 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4841 if m:
4846 if m:
4842 # Headers need to use native strings.
4847 # Headers need to use native strings.
4843 key = pycompat.strurl(m.group(1))
4848 key = pycompat.strurl(m.group(1))
4844 value = pycompat.strurl(m.group(2))
4849 value = pycompat.strurl(m.group(2))
4845 headers[key] = value
4850 headers[key] = value
4846 continue
4851 continue
4847
4852
4848 if line.startswith(b'BODYFILE '):
4853 if line.startswith(b'BODYFILE '):
4849 with open(line.split(b' ', 1), b'rb') as fh:
4854 with open(line.split(b' ', 1), b'rb') as fh:
4850 body = fh.read()
4855 body = fh.read()
4851 elif line.startswith(b'frame '):
4856 elif line.startswith(b'frame '):
4852 frame = wireprotoframing.makeframefromhumanstring(
4857 frame = wireprotoframing.makeframefromhumanstring(
4853 line[len(b'frame ') :]
4858 line[len(b'frame ') :]
4854 )
4859 )
4855
4860
4856 frames.append(frame)
4861 frames.append(frame)
4857 else:
4862 else:
4858 raise error.Abort(
4863 raise error.Abort(
4859 _(b'unknown argument to httprequest: %s') % line
4864 _(b'unknown argument to httprequest: %s') % line
4860 )
4865 )
4861
4866
4862 url = path + httppath
4867 url = path + httppath
4863
4868
4864 if frames:
4869 if frames:
4865 body = b''.join(bytes(f) for f in frames)
4870 body = b''.join(bytes(f) for f in frames)
4866
4871
4867 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4872 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4868
4873
4869 # urllib.Request insists on using has_data() as a proxy for
4874 # urllib.Request insists on using has_data() as a proxy for
4870 # determining the request method. Override that to use our
4875 # determining the request method. Override that to use our
4871 # explicitly requested method.
4876 # explicitly requested method.
4872 req.get_method = lambda: pycompat.sysstr(method)
4877 req.get_method = lambda: pycompat.sysstr(method)
4873
4878
4874 try:
4879 try:
4875 res = opener.open(req)
4880 res = opener.open(req)
4876 body = res.read()
4881 body = res.read()
4877 except util.urlerr.urlerror as e:
4882 except util.urlerr.urlerror as e:
4878 # read() method must be called, but only exists in Python 2
4883 # read() method must be called, but only exists in Python 2
4879 getattr(e, 'read', lambda: None)()
4884 getattr(e, 'read', lambda: None)()
4880 continue
4885 continue
4881
4886
4882 ct = res.headers.get('Content-Type')
4887 ct = res.headers.get('Content-Type')
4883 if ct == 'application/mercurial-cbor':
4888 if ct == 'application/mercurial-cbor':
4884 ui.write(
4889 ui.write(
4885 _(b'cbor> %s\n')
4890 _(b'cbor> %s\n')
4886 % stringutil.pprint(
4891 % stringutil.pprint(
4887 cborutil.decodeall(body), bprefix=True, indent=2
4892 cborutil.decodeall(body), bprefix=True, indent=2
4888 )
4893 )
4889 )
4894 )
4890
4895
4891 elif action == b'close':
4896 elif action == b'close':
4892 assert peer is not None
4897 assert peer is not None
4893 peer.close()
4898 peer.close()
4894 elif action == b'readavailable':
4899 elif action == b'readavailable':
4895 if not stdout or not stderr:
4900 if not stdout or not stderr:
4896 raise error.Abort(
4901 raise error.Abort(
4897 _(b'readavailable not available on this peer')
4902 _(b'readavailable not available on this peer')
4898 )
4903 )
4899
4904
4900 stdin.close()
4905 stdin.close()
4901 stdout.read()
4906 stdout.read()
4902 stderr.read()
4907 stderr.read()
4903
4908
4904 elif action == b'readline':
4909 elif action == b'readline':
4905 if not stdout:
4910 if not stdout:
4906 raise error.Abort(_(b'readline not available on this peer'))
4911 raise error.Abort(_(b'readline not available on this peer'))
4907 stdout.readline()
4912 stdout.readline()
4908 elif action == b'ereadline':
4913 elif action == b'ereadline':
4909 if not stderr:
4914 if not stderr:
4910 raise error.Abort(_(b'ereadline not available on this peer'))
4915 raise error.Abort(_(b'ereadline not available on this peer'))
4911 stderr.readline()
4916 stderr.readline()
4912 elif action.startswith(b'read '):
4917 elif action.startswith(b'read '):
4913 count = int(action.split(b' ', 1)[1])
4918 count = int(action.split(b' ', 1)[1])
4914 if not stdout:
4919 if not stdout:
4915 raise error.Abort(_(b'read not available on this peer'))
4920 raise error.Abort(_(b'read not available on this peer'))
4916 stdout.read(count)
4921 stdout.read(count)
4917 elif action.startswith(b'eread '):
4922 elif action.startswith(b'eread '):
4918 count = int(action.split(b' ', 1)[1])
4923 count = int(action.split(b' ', 1)[1])
4919 if not stderr:
4924 if not stderr:
4920 raise error.Abort(_(b'eread not available on this peer'))
4925 raise error.Abort(_(b'eread not available on this peer'))
4921 stderr.read(count)
4926 stderr.read(count)
4922 else:
4927 else:
4923 raise error.Abort(_(b'unknown action: %s') % action)
4928 raise error.Abort(_(b'unknown action: %s') % action)
4924
4929
4925 if batchedcommands is not None:
4930 if batchedcommands is not None:
4926 raise error.Abort(_(b'unclosed "batchbegin" request'))
4931 raise error.Abort(_(b'unclosed "batchbegin" request'))
4927
4932
4928 if peer:
4933 if peer:
4929 peer.close()
4934 peer.close()
4930
4935
4931 if proc:
4936 if proc:
4932 proc.kill()
4937 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now