##// END OF EJS Templates
find-delta: move most of the debug-find-delta code in the debug module...
marmoute -
r50571:4302db0f default
parent child Browse files
Show More
@@ -1,4752 +1,4718
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 bundlerepo,
41 bundlerepo,
42 changegroup,
42 changegroup,
43 cmdutil,
43 cmdutil,
44 color,
44 color,
45 context,
45 context,
46 copies,
46 copies,
47 dagparser,
47 dagparser,
48 dirstateutils,
48 dirstateutils,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mdiff,
62 mergestate as mergestatemod,
61 mergestate as mergestatemod,
63 metadata,
62 metadata,
64 obsolete,
63 obsolete,
65 obsutil,
64 obsutil,
66 pathutil,
65 pathutil,
67 phases,
66 phases,
68 policy,
67 policy,
69 pvec,
68 pvec,
70 pycompat,
69 pycompat,
71 registrar,
70 registrar,
72 repair,
71 repair,
73 repoview,
72 repoview,
74 requirements,
73 requirements,
75 revlog,
74 revlog,
76 revlogutils,
77 revset,
75 revset,
78 revsetlang,
76 revsetlang,
79 scmutil,
77 scmutil,
80 setdiscovery,
78 setdiscovery,
81 simplemerge,
79 simplemerge,
82 sshpeer,
80 sshpeer,
83 sslutil,
81 sslutil,
84 streamclone,
82 streamclone,
85 strip,
83 strip,
86 tags as tagsmod,
84 tags as tagsmod,
87 templater,
85 templater,
88 treediscovery,
86 treediscovery,
89 upgrade,
87 upgrade,
90 url as urlmod,
88 url as urlmod,
91 util,
89 util,
92 vfs as vfsmod,
90 vfs as vfsmod,
93 wireprotoframing,
91 wireprotoframing,
94 wireprotoserver,
92 wireprotoserver,
95 )
93 )
96 from .interfaces import repository
94 from .interfaces import repository
97 from .utils import (
95 from .utils import (
98 cborutil,
96 cborutil,
99 compression,
97 compression,
100 dateutil,
98 dateutil,
101 procutil,
99 procutil,
102 stringutil,
100 stringutil,
103 urlutil,
101 urlutil,
104 )
102 )
105
103
106 from .revlogutils import (
104 from .revlogutils import (
107 constants as revlog_constants,
105 constants as revlog_constants,
108 debug as revlog_debug,
106 debug as revlog_debug,
109 deltas as deltautil,
107 deltas as deltautil,
110 nodemap,
108 nodemap,
111 rewrite,
109 rewrite,
112 sidedata,
110 sidedata,
113 )
111 )
114
112
115 release = lockmod.release
113 release = lockmod.release
116
114
117 table = {}
115 table = {}
118 table.update(strip.command._table)
116 table.update(strip.command._table)
119 command = registrar.command(table)
117 command = registrar.command(table)
120
118
121
119
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 def debugancestor(ui, repo, *args):
121 def debugancestor(ui, repo, *args):
124 """find the ancestor revision of two revisions in a given index"""
122 """find the ancestor revision of two revisions in a given index"""
125 if len(args) == 3:
123 if len(args) == 3:
126 index, rev1, rev2 = args
124 index, rev1, rev2 = args
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 lookup = r.lookup
126 lookup = r.lookup
129 elif len(args) == 2:
127 elif len(args) == 2:
130 if not repo:
128 if not repo:
131 raise error.Abort(
129 raise error.Abort(
132 _(b'there is no Mercurial repository here (.hg not found)')
130 _(b'there is no Mercurial repository here (.hg not found)')
133 )
131 )
134 rev1, rev2 = args
132 rev1, rev2 = args
135 r = repo.changelog
133 r = repo.changelog
136 lookup = repo.lookup
134 lookup = repo.lookup
137 else:
135 else:
138 raise error.Abort(_(b'either two or three arguments required'))
136 raise error.Abort(_(b'either two or three arguments required'))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141
139
142
140
143 @command(b'debugantivirusrunning', [])
141 @command(b'debugantivirusrunning', [])
144 def debugantivirusrunning(ui, repo):
142 def debugantivirusrunning(ui, repo):
145 """attempt to trigger an antivirus scanner to see if one is active"""
143 """attempt to trigger an antivirus scanner to see if one is active"""
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 f.write(
145 f.write(
148 util.b85decode(
146 util.b85decode(
149 # This is a base85-armored version of the EICAR test file. See
147 # This is a base85-armored version of the EICAR test file. See
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 )
151 )
154 )
152 )
155 # Give an AV engine time to scan the file.
153 # Give an AV engine time to scan the file.
156 time.sleep(2)
154 time.sleep(2)
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158
156
159
157
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 def debugapplystreamclonebundle(ui, repo, fname):
159 def debugapplystreamclonebundle(ui, repo, fname):
162 """apply a stream clone bundle file"""
160 """apply a stream clone bundle file"""
163 f = hg.openpath(ui, fname)
161 f = hg.openpath(ui, fname)
164 gen = exchange.readbundle(ui, f, fname)
162 gen = exchange.readbundle(ui, f, fname)
165 gen.apply(repo)
163 gen.apply(repo)
166
164
167
165
168 @command(
166 @command(
169 b'debugbuilddag',
167 b'debugbuilddag',
170 [
168 [
171 (
169 (
172 b'm',
170 b'm',
173 b'mergeable-file',
171 b'mergeable-file',
174 None,
172 None,
175 _(b'add single file mergeable changes'),
173 _(b'add single file mergeable changes'),
176 ),
174 ),
177 (
175 (
178 b'o',
176 b'o',
179 b'overwritten-file',
177 b'overwritten-file',
180 None,
178 None,
181 _(b'add single file all revs overwrite'),
179 _(b'add single file all revs overwrite'),
182 ),
180 ),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (
182 (
185 b'',
183 b'',
186 b'from-existing',
184 b'from-existing',
187 None,
185 None,
188 _(b'continue from a non-empty repository'),
186 _(b'continue from a non-empty repository'),
189 ),
187 ),
190 ],
188 ],
191 _(b'[OPTION]... [TEXT]'),
189 _(b'[OPTION]... [TEXT]'),
192 )
190 )
193 def debugbuilddag(
191 def debugbuilddag(
194 ui,
192 ui,
195 repo,
193 repo,
196 text=None,
194 text=None,
197 mergeable_file=False,
195 mergeable_file=False,
198 overwritten_file=False,
196 overwritten_file=False,
199 new_file=False,
197 new_file=False,
200 from_existing=False,
198 from_existing=False,
201 ):
199 ):
202 """builds a repo with a given DAG from scratch in the current empty repo
200 """builds a repo with a given DAG from scratch in the current empty repo
203
201
204 The description of the DAG is read from stdin if not given on the
202 The description of the DAG is read from stdin if not given on the
205 command line.
203 command line.
206
204
207 Elements:
205 Elements:
208
206
209 - "+n" is a linear run of n nodes based on the current default parent
207 - "+n" is a linear run of n nodes based on the current default parent
210 - "." is a single node based on the current default parent
208 - "." is a single node based on the current default parent
211 - "$" resets the default parent to null (implied at the start);
209 - "$" resets the default parent to null (implied at the start);
212 otherwise the default parent is always the last node created
210 otherwise the default parent is always the last node created
213 - "<p" sets the default parent to the backref p
211 - "<p" sets the default parent to the backref p
214 - "*p" is a fork at parent p, which is a backref
212 - "*p" is a fork at parent p, which is a backref
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "/p2" is a merge of the preceding node and p2
214 - "/p2" is a merge of the preceding node and p2
217 - ":tag" defines a local tag for the preceding node
215 - ":tag" defines a local tag for the preceding node
218 - "@branch" sets the named branch for subsequent nodes
216 - "@branch" sets the named branch for subsequent nodes
219 - "#...\\n" is a comment up to the end of the line
217 - "#...\\n" is a comment up to the end of the line
220
218
221 Whitespace between the above elements is ignored.
219 Whitespace between the above elements is ignored.
222
220
223 A backref is either
221 A backref is either
224
222
225 - a number n, which references the node curr-n, where curr is the current
223 - a number n, which references the node curr-n, where curr is the current
226 node, or
224 node, or
227 - the name of a local tag you placed earlier using ":tag", or
225 - the name of a local tag you placed earlier using ":tag", or
228 - empty to denote the default parent.
226 - empty to denote the default parent.
229
227
230 All string valued-elements are either strictly alphanumeric, or must
228 All string valued-elements are either strictly alphanumeric, or must
231 be enclosed in double quotes ("..."), with "\\" as escape character.
229 be enclosed in double quotes ("..."), with "\\" as escape character.
232 """
230 """
233
231
234 if text is None:
232 if text is None:
235 ui.status(_(b"reading DAG from stdin\n"))
233 ui.status(_(b"reading DAG from stdin\n"))
236 text = ui.fin.read()
234 text = ui.fin.read()
237
235
238 cl = repo.changelog
236 cl = repo.changelog
239 if len(cl) > 0 and not from_existing:
237 if len(cl) > 0 and not from_existing:
240 raise error.Abort(_(b'repository is not empty'))
238 raise error.Abort(_(b'repository is not empty'))
241
239
242 # determine number of revs in DAG
240 # determine number of revs in DAG
243 total = 0
241 total = 0
244 for type, data in dagparser.parsedag(text):
242 for type, data in dagparser.parsedag(text):
245 if type == b'n':
243 if type == b'n':
246 total += 1
244 total += 1
247
245
248 if mergeable_file:
246 if mergeable_file:
249 linesperrev = 2
247 linesperrev = 2
250 # make a file with k lines per rev
248 # make a file with k lines per rev
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
249 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines.append(b"")
250 initialmergedlines.append(b"")
253
251
254 tags = []
252 tags = []
255 progress = ui.makeprogress(
253 progress = ui.makeprogress(
256 _(b'building'), unit=_(b'revisions'), total=total
254 _(b'building'), unit=_(b'revisions'), total=total
257 )
255 )
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 at = -1
257 at = -1
260 atbranch = b'default'
258 atbranch = b'default'
261 nodeids = []
259 nodeids = []
262 id = 0
260 id = 0
263 progress.update(id)
261 progress.update(id)
264 for type, data in dagparser.parsedag(text):
262 for type, data in dagparser.parsedag(text):
265 if type == b'n':
263 if type == b'n':
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 id, ps = data
265 id, ps = data
268
266
269 files = []
267 files = []
270 filecontent = {}
268 filecontent = {}
271
269
272 p2 = None
270 p2 = None
273 if mergeable_file:
271 if mergeable_file:
274 fn = b"mf"
272 fn = b"mf"
275 p1 = repo[ps[0]]
273 p1 = repo[ps[0]]
276 if len(ps) > 1:
274 if len(ps) > 1:
277 p2 = repo[ps[1]]
275 p2 = repo[ps[1]]
278 pa = p1.ancestor(p2)
276 pa = p1.ancestor(p2)
279 base, local, other = [
277 base, local, other = [
280 x[fn].data() for x in (pa, p1, p2)
278 x[fn].data() for x in (pa, p1, p2)
281 ]
279 ]
282 m3 = simplemerge.Merge3Text(base, local, other)
280 m3 = simplemerge.Merge3Text(base, local, other)
283 ml = [
281 ml = [
284 l.strip()
282 l.strip()
285 for l in simplemerge.render_minimized(m3)[0]
283 for l in simplemerge.render_minimized(m3)[0]
286 ]
284 ]
287 ml.append(b"")
285 ml.append(b"")
288 elif at > 0:
286 elif at > 0:
289 ml = p1[fn].data().split(b"\n")
287 ml = p1[fn].data().split(b"\n")
290 else:
288 else:
291 ml = initialmergedlines
289 ml = initialmergedlines
292 ml[id * linesperrev] += b" r%i" % id
290 ml[id * linesperrev] += b" r%i" % id
293 mergedtext = b"\n".join(ml)
291 mergedtext = b"\n".join(ml)
294 files.append(fn)
292 files.append(fn)
295 filecontent[fn] = mergedtext
293 filecontent[fn] = mergedtext
296
294
297 if overwritten_file:
295 if overwritten_file:
298 fn = b"of"
296 fn = b"of"
299 files.append(fn)
297 files.append(fn)
300 filecontent[fn] = b"r%i\n" % id
298 filecontent[fn] = b"r%i\n" % id
301
299
302 if new_file:
300 if new_file:
303 fn = b"nf%i" % id
301 fn = b"nf%i" % id
304 files.append(fn)
302 files.append(fn)
305 filecontent[fn] = b"r%i\n" % id
303 filecontent[fn] = b"r%i\n" % id
306 if len(ps) > 1:
304 if len(ps) > 1:
307 if not p2:
305 if not p2:
308 p2 = repo[ps[1]]
306 p2 = repo[ps[1]]
309 for fn in p2:
307 for fn in p2:
310 if fn.startswith(b"nf"):
308 if fn.startswith(b"nf"):
311 files.append(fn)
309 files.append(fn)
312 filecontent[fn] = p2[fn].data()
310 filecontent[fn] = p2[fn].data()
313
311
314 def fctxfn(repo, cx, path):
312 def fctxfn(repo, cx, path):
315 if path in filecontent:
313 if path in filecontent:
316 return context.memfilectx(
314 return context.memfilectx(
317 repo, cx, path, filecontent[path]
315 repo, cx, path, filecontent[path]
318 )
316 )
319 return None
317 return None
320
318
321 if len(ps) == 0 or ps[0] < 0:
319 if len(ps) == 0 or ps[0] < 0:
322 pars = [None, None]
320 pars = [None, None]
323 elif len(ps) == 1:
321 elif len(ps) == 1:
324 pars = [nodeids[ps[0]], None]
322 pars = [nodeids[ps[0]], None]
325 else:
323 else:
326 pars = [nodeids[p] for p in ps]
324 pars = [nodeids[p] for p in ps]
327 cx = context.memctx(
325 cx = context.memctx(
328 repo,
326 repo,
329 pars,
327 pars,
330 b"r%i" % id,
328 b"r%i" % id,
331 files,
329 files,
332 fctxfn,
330 fctxfn,
333 date=(id, 0),
331 date=(id, 0),
334 user=b"debugbuilddag",
332 user=b"debugbuilddag",
335 extra={b'branch': atbranch},
333 extra={b'branch': atbranch},
336 )
334 )
337 nodeid = repo.commitctx(cx)
335 nodeid = repo.commitctx(cx)
338 nodeids.append(nodeid)
336 nodeids.append(nodeid)
339 at = id
337 at = id
340 elif type == b'l':
338 elif type == b'l':
341 id, name = data
339 id, name = data
342 ui.note((b'tag %s\n' % name))
340 ui.note((b'tag %s\n' % name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 elif type == b'a':
342 elif type == b'a':
345 ui.note((b'branch %s\n' % data))
343 ui.note((b'branch %s\n' % data))
346 atbranch = data
344 atbranch = data
347 progress.update(id)
345 progress.update(id)
348
346
349 if tags:
347 if tags:
350 repo.vfs.write(b"localtags", b"".join(tags))
348 repo.vfs.write(b"localtags", b"".join(tags))
351
349
352
350
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 indent_string = b' ' * indent
352 indent_string = b' ' * indent
355 if all:
353 if all:
356 ui.writenoi18n(
354 ui.writenoi18n(
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 % indent_string
356 % indent_string
359 )
357 )
360
358
361 def showchunks(named):
359 def showchunks(named):
362 ui.write(b"\n%s%s\n" % (indent_string, named))
360 ui.write(b"\n%s%s\n" % (indent_string, named))
363 for deltadata in gen.deltaiter():
361 for deltadata in gen.deltaiter():
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 ui.write(
363 ui.write(
366 b"%s%s %s %s %s %s %d\n"
364 b"%s%s %s %s %s %s %d\n"
367 % (
365 % (
368 indent_string,
366 indent_string,
369 hex(node),
367 hex(node),
370 hex(p1),
368 hex(p1),
371 hex(p2),
369 hex(p2),
372 hex(cs),
370 hex(cs),
373 hex(deltabase),
371 hex(deltabase),
374 len(delta),
372 len(delta),
375 )
373 )
376 )
374 )
377
375
378 gen.changelogheader()
376 gen.changelogheader()
379 showchunks(b"changelog")
377 showchunks(b"changelog")
380 gen.manifestheader()
378 gen.manifestheader()
381 showchunks(b"manifest")
379 showchunks(b"manifest")
382 for chunkdata in iter(gen.filelogheader, {}):
380 for chunkdata in iter(gen.filelogheader, {}):
383 fname = chunkdata[b'filename']
381 fname = chunkdata[b'filename']
384 showchunks(fname)
382 showchunks(fname)
385 else:
383 else:
386 if isinstance(gen, bundle2.unbundle20):
384 if isinstance(gen, bundle2.unbundle20):
387 raise error.Abort(_(b'use debugbundle2 for this file'))
385 raise error.Abort(_(b'use debugbundle2 for this file'))
388 gen.changelogheader()
386 gen.changelogheader()
389 for deltadata in gen.deltaiter():
387 for deltadata in gen.deltaiter():
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392
390
393
391
394 def _debugobsmarkers(ui, part, indent=0, **opts):
392 def _debugobsmarkers(ui, part, indent=0, **opts):
395 """display version and markers contained in 'data'"""
393 """display version and markers contained in 'data'"""
396 opts = pycompat.byteskwargs(opts)
394 opts = pycompat.byteskwargs(opts)
397 data = part.read()
395 data = part.read()
398 indent_string = b' ' * indent
396 indent_string = b' ' * indent
399 try:
397 try:
400 version, markers = obsolete._readmarkers(data)
398 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
399 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
400 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
401 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
402 ui.write(msg)
405 else:
403 else:
406 msg = b"%sversion: %d (%d bytes)\n"
404 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
405 msg %= indent_string, version, len(data)
408 ui.write(msg)
406 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', opts)
407 fm = ui.formatter(b'debugobsolete', opts)
410 for rawmarker in sorted(markers):
408 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
409 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
410 fm.startitem()
413 fm.plain(indent_string)
411 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
412 cmdutil.showmarker(fm, m)
415 fm.end()
413 fm.end()
416
414
417
415
418 def _debugphaseheads(ui, data, indent=0):
416 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
417 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
418 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
419 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
420 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
421 for head in headsbyphase[phase]:
424 ui.write(indent_string)
422 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
424
427
425
428 def _quasirepr(thing):
426 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
428 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
430 )
433 return pycompat.bytestr(repr(thing))
431 return pycompat.bytestr(repr(thing))
434
432
435
433
436 def _debugbundle2(ui, gen, all=None, **opts):
434 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
435 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
436 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
437 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
439 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
440 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
441 if parttypes and part.type not in parttypes:
444 continue
442 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
443 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
445 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
446 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
447 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
448 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
450 if part.type == b'obsmarkers':
453 if not ui.quiet:
451 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
452 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
453 if part.type == b'phase-heads':
456 if not ui.quiet:
454 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
455 _debugphaseheads(ui, part, indent=4)
458
456
459
457
460 @command(
458 @command(
461 b'debugbundle',
459 b'debugbundle',
462 [
460 [
463 (b'a', b'all', None, _(b'show all details')),
461 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
462 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
464 ],
467 _(b'FILE'),
465 _(b'FILE'),
468 norepo=True,
466 norepo=True,
469 )
467 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
469 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
470 with hg.openpath(ui, bundlepath) as f:
473 if spec:
471 if spec:
474 spec = exchange.getbundlespec(ui, f)
472 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
473 ui.write(b'%s\n' % spec)
476 return
474 return
477
475
478 gen = exchange.readbundle(ui, f, bundlepath)
476 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
477 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
478 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
479 _debugchangegroup(ui, gen, all=all, **opts)
482
480
483
481
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
483 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
484 """lists the capabilities of a remote peer"""
487 opts = pycompat.byteskwargs(opts)
485 opts = pycompat.byteskwargs(opts)
488 peer = hg.peer(ui, opts, path)
486 peer = hg.peer(ui, opts, path)
489 try:
487 try:
490 caps = peer.capabilities()
488 caps = peer.capabilities()
491 ui.writenoi18n(b'Main capabilities:\n')
489 ui.writenoi18n(b'Main capabilities:\n')
492 for c in sorted(caps):
490 for c in sorted(caps):
493 ui.write(b' %s\n' % c)
491 ui.write(b' %s\n' % c)
494 b2caps = bundle2.bundle2caps(peer)
492 b2caps = bundle2.bundle2caps(peer)
495 if b2caps:
493 if b2caps:
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
494 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 for key, values in sorted(b2caps.items()):
495 for key, values in sorted(b2caps.items()):
498 ui.write(b' %s\n' % key)
496 ui.write(b' %s\n' % key)
499 for v in values:
497 for v in values:
500 ui.write(b' %s\n' % v)
498 ui.write(b' %s\n' % v)
501 finally:
499 finally:
502 peer.close()
500 peer.close()
503
501
504
502
505 @command(
503 @command(
506 b'debugchangedfiles',
504 b'debugchangedfiles',
507 [
505 [
508 (
506 (
509 b'',
507 b'',
510 b'compute',
508 b'compute',
511 False,
509 False,
512 b"compute information instead of reading it from storage",
510 b"compute information instead of reading it from storage",
513 ),
511 ),
514 ],
512 ],
515 b'REV',
513 b'REV',
516 )
514 )
517 def debugchangedfiles(ui, repo, rev, **opts):
515 def debugchangedfiles(ui, repo, rev, **opts):
518 """list the stored files changes for a revision"""
516 """list the stored files changes for a revision"""
519 ctx = logcmdutil.revsingle(repo, rev, None)
517 ctx = logcmdutil.revsingle(repo, rev, None)
520 files = None
518 files = None
521
519
522 if opts['compute']:
520 if opts['compute']:
523 files = metadata.compute_all_files_changes(ctx)
521 files = metadata.compute_all_files_changes(ctx)
524 else:
522 else:
525 sd = repo.changelog.sidedata(ctx.rev())
523 sd = repo.changelog.sidedata(ctx.rev())
526 files_block = sd.get(sidedata.SD_FILES)
524 files_block = sd.get(sidedata.SD_FILES)
527 if files_block is not None:
525 if files_block is not None:
528 files = metadata.decode_files_sidedata(sd)
526 files = metadata.decode_files_sidedata(sd)
529 if files is not None:
527 if files is not None:
530 for f in sorted(files.touched):
528 for f in sorted(files.touched):
531 if f in files.added:
529 if f in files.added:
532 action = b"added"
530 action = b"added"
533 elif f in files.removed:
531 elif f in files.removed:
534 action = b"removed"
532 action = b"removed"
535 elif f in files.merged:
533 elif f in files.merged:
536 action = b"merged"
534 action = b"merged"
537 elif f in files.salvaged:
535 elif f in files.salvaged:
538 action = b"salvaged"
536 action = b"salvaged"
539 else:
537 else:
540 action = b"touched"
538 action = b"touched"
541
539
542 copy_parent = b""
540 copy_parent = b""
543 copy_source = b""
541 copy_source = b""
544 if f in files.copied_from_p1:
542 if f in files.copied_from_p1:
545 copy_parent = b"p1"
543 copy_parent = b"p1"
546 copy_source = files.copied_from_p1[f]
544 copy_source = files.copied_from_p1[f]
547 elif f in files.copied_from_p2:
545 elif f in files.copied_from_p2:
548 copy_parent = b"p2"
546 copy_parent = b"p2"
549 copy_source = files.copied_from_p2[f]
547 copy_source = files.copied_from_p2[f]
550
548
551 data = (action, copy_parent, f, copy_source)
549 data = (action, copy_parent, f, copy_source)
552 template = b"%-8s %2s: %s, %s;\n"
550 template = b"%-8s %2s: %s, %s;\n"
553 ui.write(template % data)
551 ui.write(template % data)
554
552
555
553
556 @command(b'debugcheckstate', [], b'')
554 @command(b'debugcheckstate', [], b'')
557 def debugcheckstate(ui, repo):
555 def debugcheckstate(ui, repo):
558 """validate the correctness of the current dirstate"""
556 """validate the correctness of the current dirstate"""
559 parent1, parent2 = repo.dirstate.parents()
557 parent1, parent2 = repo.dirstate.parents()
560 m1 = repo[parent1].manifest()
558 m1 = repo[parent1].manifest()
561 m2 = repo[parent2].manifest()
559 m2 = repo[parent2].manifest()
562 errors = 0
560 errors = 0
563 for err in repo.dirstate.verify(m1, m2):
561 for err in repo.dirstate.verify(m1, m2):
564 ui.warn(err[0] % err[1:])
562 ui.warn(err[0] % err[1:])
565 errors += 1
563 errors += 1
566 if errors:
564 if errors:
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 raise error.Abort(errstr)
566 raise error.Abort(errstr)
569
567
570
568
571 @command(
569 @command(
572 b'debugcolor',
570 b'debugcolor',
573 [(b'', b'style', None, _(b'show all configured styles'))],
571 [(b'', b'style', None, _(b'show all configured styles'))],
574 b'hg debugcolor',
572 b'hg debugcolor',
575 )
573 )
576 def debugcolor(ui, repo, **opts):
574 def debugcolor(ui, repo, **opts):
577 """show available color, effects or style"""
575 """show available color, effects or style"""
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 if opts.get('style'):
577 if opts.get('style'):
580 return _debugdisplaystyle(ui)
578 return _debugdisplaystyle(ui)
581 else:
579 else:
582 return _debugdisplaycolor(ui)
580 return _debugdisplaycolor(ui)
583
581
584
582
585 def _debugdisplaycolor(ui):
583 def _debugdisplaycolor(ui):
586 ui = ui.copy()
584 ui = ui.copy()
587 ui._styles.clear()
585 ui._styles.clear()
588 for effect in color._activeeffects(ui).keys():
586 for effect in color._activeeffects(ui).keys():
589 ui._styles[effect] = effect
587 ui._styles[effect] = effect
590 if ui._terminfoparams:
588 if ui._terminfoparams:
591 for k, v in ui.configitems(b'color'):
589 for k, v in ui.configitems(b'color'):
592 if k.startswith(b'color.'):
590 if k.startswith(b'color.'):
593 ui._styles[k] = k[6:]
591 ui._styles[k] = k[6:]
594 elif k.startswith(b'terminfo.'):
592 elif k.startswith(b'terminfo.'):
595 ui._styles[k] = k[9:]
593 ui._styles[k] = k[9:]
596 ui.write(_(b'available colors:\n'))
594 ui.write(_(b'available colors:\n'))
597 # sort label with a '_' after the other to group '_background' entry.
595 # sort label with a '_' after the other to group '_background' entry.
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 for colorname, label in items:
597 for colorname, label in items:
600 ui.write(b'%s\n' % colorname, label=label)
598 ui.write(b'%s\n' % colorname, label=label)
601
599
602
600
603 def _debugdisplaystyle(ui):
601 def _debugdisplaystyle(ui):
604 ui.write(_(b'available style:\n'))
602 ui.write(_(b'available style:\n'))
605 if not ui._styles:
603 if not ui._styles:
606 return
604 return
607 width = max(len(s) for s in ui._styles)
605 width = max(len(s) for s in ui._styles)
608 for label, effects in sorted(ui._styles.items()):
606 for label, effects in sorted(ui._styles.items()):
609 ui.write(b'%s' % label, label=label)
607 ui.write(b'%s' % label, label=label)
610 if effects:
608 if effects:
611 # 50
609 # 50
612 ui.write(b': ')
610 ui.write(b': ')
613 ui.write(b' ' * (max(0, width - len(label))))
611 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b'\n')
613 ui.write(b'\n')
616
614
617
615
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 def debugcreatestreamclonebundle(ui, repo, fname):
617 def debugcreatestreamclonebundle(ui, repo, fname):
620 """create a stream clone bundle file
618 """create a stream clone bundle file
621
619
622 Stream bundles are special bundles that are essentially archives of
620 Stream bundles are special bundles that are essentially archives of
623 revlog files. They are commonly used for cloning very quickly.
621 revlog files. They are commonly used for cloning very quickly.
624 """
622 """
625 # TODO we may want to turn this into an abort when this functionality
623 # TODO we may want to turn this into an abort when this functionality
626 # is moved into `hg bundle`.
624 # is moved into `hg bundle`.
627 if phases.hassecret(repo):
625 if phases.hassecret(repo):
628 ui.warn(
626 ui.warn(
629 _(
627 _(
630 b'(warning: stream clone bundle will contain secret '
628 b'(warning: stream clone bundle will contain secret '
631 b'revisions)\n'
629 b'revisions)\n'
632 )
630 )
633 )
631 )
634
632
635 requirements, gen = streamclone.generatebundlev1(repo)
633 requirements, gen = streamclone.generatebundlev1(repo)
636 changegroup.writechunks(ui, gen, fname)
634 changegroup.writechunks(ui, gen, fname)
637
635
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639
637
640
638
641 @command(
639 @command(
642 b'debugdag',
640 b'debugdag',
643 [
641 [
644 (b't', b'tags', None, _(b'use tags as labels')),
642 (b't', b'tags', None, _(b'use tags as labels')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
643 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'', b'dots', None, _(b'use dots for runs')),
644 (b'', b'dots', None, _(b'use dots for runs')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
645 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 ],
646 ],
649 _(b'[OPTION]... [FILE [REV]...]'),
647 _(b'[OPTION]... [FILE [REV]...]'),
650 optionalrepo=True,
648 optionalrepo=True,
651 )
649 )
652 def debugdag(ui, repo, file_=None, *revs, **opts):
650 def debugdag(ui, repo, file_=None, *revs, **opts):
653 """format the changelog or an index DAG as a concise textual description
651 """format the changelog or an index DAG as a concise textual description
654
652
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
653 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 revision numbers, they get labeled in the output as rN.
654 revision numbers, they get labeled in the output as rN.
657
655
658 Otherwise, the changelog DAG of the current repo is emitted.
656 Otherwise, the changelog DAG of the current repo is emitted.
659 """
657 """
660 spaces = opts.get('spaces')
658 spaces = opts.get('spaces')
661 dots = opts.get('dots')
659 dots = opts.get('dots')
662 if file_:
660 if file_:
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 revs = {int(r) for r in revs}
662 revs = {int(r) for r in revs}
665
663
666 def events():
664 def events():
667 for r in rlog:
665 for r in rlog:
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 if r in revs:
667 if r in revs:
670 yield b'l', (r, b"r%i" % r)
668 yield b'l', (r, b"r%i" % r)
671
669
672 elif repo:
670 elif repo:
673 cl = repo.changelog
671 cl = repo.changelog
674 tags = opts.get('tags')
672 tags = opts.get('tags')
675 branches = opts.get('branches')
673 branches = opts.get('branches')
676 if tags:
674 if tags:
677 labels = {}
675 labels = {}
678 for l, n in repo.tags().items():
676 for l, n in repo.tags().items():
679 labels.setdefault(cl.rev(n), []).append(l)
677 labels.setdefault(cl.rev(n), []).append(l)
680
678
681 def events():
679 def events():
682 b = b"default"
680 b = b"default"
683 for r in cl:
681 for r in cl:
684 if branches:
682 if branches:
685 newb = cl.read(cl.node(r))[5][b'branch']
683 newb = cl.read(cl.node(r))[5][b'branch']
686 if newb != b:
684 if newb != b:
687 yield b'a', newb
685 yield b'a', newb
688 b = newb
686 b = newb
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 if tags:
688 if tags:
691 ls = labels.get(r)
689 ls = labels.get(r)
692 if ls:
690 if ls:
693 for l in ls:
691 for l in ls:
694 yield b'l', (r, l)
692 yield b'l', (r, l)
695
693
696 else:
694 else:
697 raise error.Abort(_(b'need repo for changelog dag'))
695 raise error.Abort(_(b'need repo for changelog dag'))
698
696
699 for line in dagparser.dagtextlines(
697 for line in dagparser.dagtextlines(
700 events(),
698 events(),
701 addspaces=spaces,
699 addspaces=spaces,
702 wraplabels=True,
700 wraplabels=True,
703 wrapannotations=True,
701 wrapannotations=True,
704 wrapnonlinear=dots,
702 wrapnonlinear=dots,
705 usedots=dots,
703 usedots=dots,
706 maxlinewidth=70,
704 maxlinewidth=70,
707 ):
705 ):
708 ui.write(line)
706 ui.write(line)
709 ui.write(b"\n")
707 ui.write(b"\n")
710
708
711
709
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 def debugdata(ui, repo, file_, rev=None, **opts):
711 def debugdata(ui, repo, file_, rev=None, **opts):
714 """dump the contents of a data file revision"""
712 """dump the contents of a data file revision"""
715 opts = pycompat.byteskwargs(opts)
713 opts = pycompat.byteskwargs(opts)
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if rev is not None:
715 if rev is not None:
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 file_, rev = None, file_
717 file_, rev = None, file_
720 elif rev is None:
718 elif rev is None:
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 try:
721 try:
724 ui.write(r.rawdata(r.lookup(rev)))
722 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
723 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
725
728
726
729 @command(
727 @command(
730 b'debugdate',
728 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
729 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
730 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
731 norepo=True,
734 optionalrepo=True,
732 optionalrepo=True,
735 )
733 )
736 def debugdate(ui, date, range=None, **opts):
734 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
735 """parse and display a date"""
738 if opts["extended"]:
736 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
738 else:
741 d = dateutil.parsedate(date)
739 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
740 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
742 if range:
745 m = dateutil.matchdate(range)
743 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
745
748
746
749 @command(
747 @command(
750 b'debugdeltachain',
748 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
750 _(b'-c|-m|FILE'),
753 optionalrepo=True,
751 optionalrepo=True,
754 )
752 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
753 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
754 """dump information about delta chains in a revlog
757
755
758 Output can be templatized. Available template keywords are:
756 Output can be templatized. Available template keywords are:
759
757
760 :``rev``: revision number
758 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
759 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
760 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
761 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
762 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
763 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
764 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
765 - base: a full snapshot
768 - snap: an intermediate snapshot
766 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
767 - p1: a delta against the first parent
770 - p2: a delta against the second parent
768 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
769 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
770 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
771 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
772 (when p2 has empty delta
775 - prev: a delta against the previous revision
773 - prev: a delta against the previous revision
776 - other: a delta against an arbitrary revision
774 - other: a delta against an arbitrary revision
777 :``compsize``: compressed size of revision
775 :``compsize``: compressed size of revision
778 :``uncompsize``: uncompressed size of revision
776 :``uncompsize``: uncompressed size of revision
779 :``chainsize``: total size of compressed revisions in chain
777 :``chainsize``: total size of compressed revisions in chain
780 :``chainratio``: total chain size divided by uncompressed revision size
778 :``chainratio``: total chain size divided by uncompressed revision size
781 (new delta chains typically start at ratio 2.00)
779 (new delta chains typically start at ratio 2.00)
782 :``lindist``: linear distance from base revision in delta chain to end
780 :``lindist``: linear distance from base revision in delta chain to end
783 of this revision
781 of this revision
784 :``extradist``: total size of revisions not part of this delta chain from
782 :``extradist``: total size of revisions not part of this delta chain from
785 base of delta chain to end of this revision; a measurement
783 base of delta chain to end of this revision; a measurement
786 of how much extra data we need to read/seek across to read
784 of how much extra data we need to read/seek across to read
787 the delta chain for this revision
785 the delta chain for this revision
788 :``extraratio``: extradist divided by chainsize; another representation of
786 :``extraratio``: extradist divided by chainsize; another representation of
789 how much unrelated data is needed to load this delta chain
787 how much unrelated data is needed to load this delta chain
790
788
791 If the repository is configured to use the sparse read, additional keywords
789 If the repository is configured to use the sparse read, additional keywords
792 are available:
790 are available:
793
791
794 :``readsize``: total size of data read from the disk for a revision
792 :``readsize``: total size of data read from the disk for a revision
795 (sum of the sizes of all the blocks)
793 (sum of the sizes of all the blocks)
796 :``largestblock``: size of the largest block of data read from the disk
794 :``largestblock``: size of the largest block of data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
795 :``readdensity``: density of useful bytes in the data read from the disk
798 :``srchunks``: in how many data hunks the whole revision would be read
796 :``srchunks``: in how many data hunks the whole revision would be read
799
797
800 The sparse read can be enabled with experimental.sparse-read = True
798 The sparse read can be enabled with experimental.sparse-read = True
801 """
799 """
802 opts = pycompat.byteskwargs(opts)
800 opts = pycompat.byteskwargs(opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
801 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 index = r.index
802 index = r.index
805 start = r.start
803 start = r.start
806 length = r.length
804 length = r.length
807 generaldelta = r._generaldelta
805 generaldelta = r._generaldelta
808 withsparseread = getattr(r, '_withsparseread', False)
806 withsparseread = getattr(r, '_withsparseread', False)
809
807
810 # security to avoid crash on corrupted revlogs
808 # security to avoid crash on corrupted revlogs
811 total_revs = len(index)
809 total_revs = len(index)
812
810
813 def revinfo(rev):
811 def revinfo(rev):
814 e = index[rev]
812 e = index[rev]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
813 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
814 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 chainsize = 0
815 chainsize = 0
818
816
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
817 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
818 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
819 p2 = e[revlog_constants.ENTRY_PARENT_2]
822
820
823 # If the parents of a revision has an empty delta, we never try to delta
821 # If the parents of a revision has an empty delta, we never try to delta
824 # against that parent, but directly against the delta base of that
822 # against that parent, but directly against the delta base of that
825 # parent (recursively). It avoids adding a useless entry in the chain.
823 # parent (recursively). It avoids adding a useless entry in the chain.
826 #
824 #
827 # However we need to detect that as a special case for delta-type, that
825 # However we need to detect that as a special case for delta-type, that
828 # is not simply "other".
826 # is not simply "other".
829 p1_base = p1
827 p1_base = p1
830 if p1 != nullrev and p1 < total_revs:
828 if p1 != nullrev and p1 < total_revs:
831 e1 = index[p1]
829 e1 = index[p1]
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
830 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
831 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 if (
832 if (
835 new_base == p1_base
833 new_base == p1_base
836 or new_base == nullrev
834 or new_base == nullrev
837 or new_base >= total_revs
835 or new_base >= total_revs
838 ):
836 ):
839 break
837 break
840 p1_base = new_base
838 p1_base = new_base
841 e1 = index[p1_base]
839 e1 = index[p1_base]
842 p2_base = p2
840 p2_base = p2
843 if p2 != nullrev and p2 < total_revs:
841 if p2 != nullrev and p2 < total_revs:
844 e2 = index[p2]
842 e2 = index[p2]
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
843 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
844 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 if (
845 if (
848 new_base == p2_base
846 new_base == p2_base
849 or new_base == nullrev
847 or new_base == nullrev
850 or new_base >= total_revs
848 or new_base >= total_revs
851 ):
849 ):
852 break
850 break
853 p2_base = new_base
851 p2_base = new_base
854 e2 = index[p2_base]
852 e2 = index[p2_base]
855
853
856 if generaldelta:
854 if generaldelta:
857 if base == p1:
855 if base == p1:
858 deltatype = b'p1'
856 deltatype = b'p1'
859 elif base == p2:
857 elif base == p2:
860 deltatype = b'p2'
858 deltatype = b'p2'
861 elif base == rev:
859 elif base == rev:
862 deltatype = b'base'
860 deltatype = b'base'
863 elif base == p1_base:
861 elif base == p1_base:
864 deltatype = b'skip1'
862 deltatype = b'skip1'
865 elif base == p2_base:
863 elif base == p2_base:
866 deltatype = b'skip2'
864 deltatype = b'skip2'
867 elif r.issnapshot(rev):
865 elif r.issnapshot(rev):
868 deltatype = b'snap'
866 deltatype = b'snap'
869 elif base == rev - 1:
867 elif base == rev - 1:
870 deltatype = b'prev'
868 deltatype = b'prev'
871 else:
869 else:
872 deltatype = b'other'
870 deltatype = b'other'
873 else:
871 else:
874 if base == rev:
872 if base == rev:
875 deltatype = b'base'
873 deltatype = b'base'
876 else:
874 else:
877 deltatype = b'prev'
875 deltatype = b'prev'
878
876
879 chain = r._deltachain(rev)[0]
877 chain = r._deltachain(rev)[0]
880 for iterrev in chain:
878 for iterrev in chain:
881 e = index[iterrev]
879 e = index[iterrev]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
880 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883
881
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
882 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885
883
886 fm = ui.formatter(b'debugdeltachain', opts)
884 fm = ui.formatter(b'debugdeltachain', opts)
887
885
888 fm.plain(
886 fm.plain(
889 b' rev p1 p2 chain# chainlen prev delta '
887 b' rev p1 p2 chain# chainlen prev delta '
890 b'size rawsize chainsize ratio lindist extradist '
888 b'size rawsize chainsize ratio lindist extradist '
891 b'extraratio'
889 b'extraratio'
892 )
890 )
893 if withsparseread:
891 if withsparseread:
894 fm.plain(b' readsize largestblk rddensity srchunks')
892 fm.plain(b' readsize largestblk rddensity srchunks')
895 fm.plain(b'\n')
893 fm.plain(b'\n')
896
894
897 chainbases = {}
895 chainbases = {}
898 for rev in r:
896 for rev in r:
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
897 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 chainbase = chain[0]
898 chainbase = chain[0]
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
899 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 basestart = start(chainbase)
900 basestart = start(chainbase)
903 revstart = start(rev)
901 revstart = start(rev)
904 lineardist = revstart + comp - basestart
902 lineardist = revstart + comp - basestart
905 extradist = lineardist - chainsize
903 extradist = lineardist - chainsize
906 try:
904 try:
907 prevrev = chain[-2]
905 prevrev = chain[-2]
908 except IndexError:
906 except IndexError:
909 prevrev = -1
907 prevrev = -1
910
908
911 if uncomp != 0:
909 if uncomp != 0:
912 chainratio = float(chainsize) / float(uncomp)
910 chainratio = float(chainsize) / float(uncomp)
913 else:
911 else:
914 chainratio = chainsize
912 chainratio = chainsize
915
913
916 if chainsize != 0:
914 if chainsize != 0:
917 extraratio = float(extradist) / float(chainsize)
915 extraratio = float(extradist) / float(chainsize)
918 else:
916 else:
919 extraratio = extradist
917 extraratio = extradist
920
918
921 fm.startitem()
919 fm.startitem()
922 fm.write(
920 fm.write(
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
921 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 b'uncompsize chainsize chainratio lindist extradist '
922 b'uncompsize chainsize chainratio lindist extradist '
925 b'extraratio',
923 b'extraratio',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
924 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 rev,
925 rev,
928 p1,
926 p1,
929 p2,
927 p2,
930 chainid,
928 chainid,
931 len(chain),
929 len(chain),
932 prevrev,
930 prevrev,
933 deltatype,
931 deltatype,
934 comp,
932 comp,
935 uncomp,
933 uncomp,
936 chainsize,
934 chainsize,
937 chainratio,
935 chainratio,
938 lineardist,
936 lineardist,
939 extradist,
937 extradist,
940 extraratio,
938 extraratio,
941 rev=rev,
939 rev=rev,
942 chainid=chainid,
940 chainid=chainid,
943 chainlen=len(chain),
941 chainlen=len(chain),
944 prevrev=prevrev,
942 prevrev=prevrev,
945 deltatype=deltatype,
943 deltatype=deltatype,
946 compsize=comp,
944 compsize=comp,
947 uncompsize=uncomp,
945 uncompsize=uncomp,
948 chainsize=chainsize,
946 chainsize=chainsize,
949 chainratio=chainratio,
947 chainratio=chainratio,
950 lindist=lineardist,
948 lindist=lineardist,
951 extradist=extradist,
949 extradist=extradist,
952 extraratio=extraratio,
950 extraratio=extraratio,
953 )
951 )
954 if withsparseread:
952 if withsparseread:
955 readsize = 0
953 readsize = 0
956 largestblock = 0
954 largestblock = 0
957 srchunks = 0
955 srchunks = 0
958
956
959 for revschunk in deltautil.slicechunk(r, chain):
957 for revschunk in deltautil.slicechunk(r, chain):
960 srchunks += 1
958 srchunks += 1
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
959 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 blksize = blkend - start(revschunk[0])
960 blksize = blkend - start(revschunk[0])
963
961
964 readsize += blksize
962 readsize += blksize
965 if largestblock < blksize:
963 if largestblock < blksize:
966 largestblock = blksize
964 largestblock = blksize
967
965
968 if readsize:
966 if readsize:
969 readdensity = float(chainsize) / float(readsize)
967 readdensity = float(chainsize) / float(readsize)
970 else:
968 else:
971 readdensity = 1
969 readdensity = 1
972
970
973 fm.write(
971 fm.write(
974 b'readsize largestblock readdensity srchunks',
972 b'readsize largestblock readdensity srchunks',
975 b' %10d %10d %9.5f %8d',
973 b' %10d %10d %9.5f %8d',
976 readsize,
974 readsize,
977 largestblock,
975 largestblock,
978 readdensity,
976 readdensity,
979 srchunks,
977 srchunks,
980 readsize=readsize,
978 readsize=readsize,
981 largestblock=largestblock,
979 largestblock=largestblock,
982 readdensity=readdensity,
980 readdensity=readdensity,
983 srchunks=srchunks,
981 srchunks=srchunks,
984 )
982 )
985
983
986 fm.plain(b'\n')
984 fm.plain(b'\n')
987
985
988 fm.end()
986 fm.end()
989
987
990
988
991 @command(
989 @command(
992 b'debug-delta-find',
990 b'debug-delta-find',
993 cmdutil.debugrevlogopts
991 cmdutil.debugrevlogopts
994 + cmdutil.formatteropts
992 + cmdutil.formatteropts
995 + [
993 + [
996 (
994 (
997 b'',
995 b'',
998 b'source',
996 b'source',
999 b'full',
997 b'full',
1000 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
998 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1001 ),
999 ),
1002 ],
1000 ],
1003 _(b'-c|-m|FILE REV'),
1001 _(b'-c|-m|FILE REV'),
1004 optionalrepo=True,
1002 optionalrepo=True,
1005 )
1003 )
1006 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1004 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1007 """display the computation to get to a valid delta for storing REV
1005 """display the computation to get to a valid delta for storing REV
1008
1006
1009 This command will replay the process used to find the "best" delta to store
1007 This command will replay the process used to find the "best" delta to store
1010 a revision and display information about all the steps used to get to that
1008 a revision and display information about all the steps used to get to that
1011 result.
1009 result.
1012
1010
1013 By default, the process is fed with a the full-text for the revision. This
1011 By default, the process is fed with a the full-text for the revision. This
1014 can be controlled with the --source flag.
1012 can be controlled with the --source flag.
1015
1013
1016 The revision use the revision number of the target storage (not changelog
1014 The revision use the revision number of the target storage (not changelog
1017 revision number).
1015 revision number).
1018
1016
1019 note: the process is initiated from a full text of the revision to store.
1017 note: the process is initiated from a full text of the revision to store.
1020 """
1018 """
1021 opts = pycompat.byteskwargs(opts)
1019 opts = pycompat.byteskwargs(opts)
1022 if arg_2 is None:
1020 if arg_2 is None:
1023 file_ = None
1021 file_ = None
1024 rev = arg_1
1022 rev = arg_1
1025 else:
1023 else:
1026 file_ = arg_1
1024 file_ = arg_1
1027 rev = arg_2
1025 rev = arg_2
1028
1026
1029 rev = int(rev)
1027 rev = int(rev)
1030
1028
1031 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1029 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1032
1033 deltacomputer = deltautil.deltacomputer(
1034 revlog,
1035 write_debug=ui.write,
1036 debug_search=not ui.quiet,
1037 )
1038
1039 node = revlog.node(rev)
1040 p1r, p2r = revlog.parentrevs(rev)
1030 p1r, p2r = revlog.parentrevs(rev)
1041 p1 = revlog.node(p1r)
1031
1042 p2 = revlog.node(p2r)
1032 if source == b'full':
1043 full_text = revlog.revision(rev)
1033 base_rev = nullrev
1044 btext = [full_text]
1034 elif source == b'storage':
1045 textlen = len(btext[0])
1035 base_rev = revlog.deltaparent(rev)
1046 cachedelta = None
1036 elif source == b'p1':
1047 flags = revlog.flags(rev)
1037 base_rev = p1r
1048
1038 elif source == b'p2':
1049 if source != b'full':
1039 base_rev = p2r
1050 if source == b'storage':
1040 elif source == b'prev':
1051 base_rev = revlog.deltaparent(rev)
1041 base_rev = rev - 1
1052 elif source == b'p1':
1042 else:
1053 base_rev = p1r
1043 raise error.InputError(b"invalid --source value: %s" % source)
1054 elif source == b'p2':
1044
1055 base_rev = p2r
1045 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1056 elif source == b'prev':
1057 base_rev = rev - 1
1058 else:
1059 raise error.InputError(b"invalid --source value: %s" % source)
1060
1061 if base_rev != nullrev:
1062 base_text = revlog.revision(base_rev)
1063 delta = mdiff.textdiff(base_text, full_text)
1064
1065 cachedelta = (base_rev, delta)
1066 btext = [None]
1067
1068 revinfo = revlogutils.revisioninfo(
1069 node,
1070 p1,
1071 p2,
1072 btext,
1073 textlen,
1074 cachedelta,
1075 flags,
1076 )
1077
1078 fh = revlog._datafp()
1079 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1080
1046
1081
1047
1082 @command(
1048 @command(
1083 b'debugdirstate|debugstate',
1049 b'debugdirstate|debugstate',
1084 [
1050 [
1085 (
1051 (
1086 b'',
1052 b'',
1087 b'nodates',
1053 b'nodates',
1088 None,
1054 None,
1089 _(b'do not display the saved mtime (DEPRECATED)'),
1055 _(b'do not display the saved mtime (DEPRECATED)'),
1090 ),
1056 ),
1091 (b'', b'dates', True, _(b'display the saved mtime')),
1057 (b'', b'dates', True, _(b'display the saved mtime')),
1092 (b'', b'datesort', None, _(b'sort by saved mtime')),
1058 (b'', b'datesort', None, _(b'sort by saved mtime')),
1093 (
1059 (
1094 b'',
1060 b'',
1095 b'docket',
1061 b'docket',
1096 False,
1062 False,
1097 _(b'display the docket (metadata file) instead'),
1063 _(b'display the docket (metadata file) instead'),
1098 ),
1064 ),
1099 (
1065 (
1100 b'',
1066 b'',
1101 b'all',
1067 b'all',
1102 False,
1068 False,
1103 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1069 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1104 ),
1070 ),
1105 ],
1071 ],
1106 _(b'[OPTION]...'),
1072 _(b'[OPTION]...'),
1107 )
1073 )
1108 def debugstate(ui, repo, **opts):
1074 def debugstate(ui, repo, **opts):
1109 """show the contents of the current dirstate"""
1075 """show the contents of the current dirstate"""
1110
1076
1111 if opts.get("docket"):
1077 if opts.get("docket"):
1112 if not repo.dirstate._use_dirstate_v2:
1078 if not repo.dirstate._use_dirstate_v2:
1113 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1079 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1114
1080
1115 docket = repo.dirstate._map.docket
1081 docket = repo.dirstate._map.docket
1116 (
1082 (
1117 start_offset,
1083 start_offset,
1118 root_nodes,
1084 root_nodes,
1119 nodes_with_entry,
1085 nodes_with_entry,
1120 nodes_with_copy,
1086 nodes_with_copy,
1121 unused_bytes,
1087 unused_bytes,
1122 _unused,
1088 _unused,
1123 ignore_pattern,
1089 ignore_pattern,
1124 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1090 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1125
1091
1126 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1092 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1127 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1093 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1128 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1094 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1129 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1095 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1130 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1096 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1131 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1097 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1132 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1098 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1133 ui.write(
1099 ui.write(
1134 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1100 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1135 )
1101 )
1136 return
1102 return
1137
1103
1138 nodates = not opts['dates']
1104 nodates = not opts['dates']
1139 if opts.get('nodates') is not None:
1105 if opts.get('nodates') is not None:
1140 nodates = True
1106 nodates = True
1141 datesort = opts.get('datesort')
1107 datesort = opts.get('datesort')
1142
1108
1143 if datesort:
1109 if datesort:
1144
1110
1145 def keyfunc(entry):
1111 def keyfunc(entry):
1146 filename, _state, _mode, _size, mtime = entry
1112 filename, _state, _mode, _size, mtime = entry
1147 return (mtime, filename)
1113 return (mtime, filename)
1148
1114
1149 else:
1115 else:
1150 keyfunc = None # sort by filename
1116 keyfunc = None # sort by filename
1151 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1117 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1152 entries.sort(key=keyfunc)
1118 entries.sort(key=keyfunc)
1153 for entry in entries:
1119 for entry in entries:
1154 filename, state, mode, size, mtime = entry
1120 filename, state, mode, size, mtime = entry
1155 if mtime == -1:
1121 if mtime == -1:
1156 timestr = b'unset '
1122 timestr = b'unset '
1157 elif nodates:
1123 elif nodates:
1158 timestr = b'set '
1124 timestr = b'set '
1159 else:
1125 else:
1160 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1126 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1161 timestr = encoding.strtolocal(timestr)
1127 timestr = encoding.strtolocal(timestr)
1162 if mode & 0o20000:
1128 if mode & 0o20000:
1163 mode = b'lnk'
1129 mode = b'lnk'
1164 else:
1130 else:
1165 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1131 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1166 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1132 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1167 for f in repo.dirstate.copies():
1133 for f in repo.dirstate.copies():
1168 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1134 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1169
1135
1170
1136
1171 @command(
1137 @command(
1172 b'debugdirstateignorepatternshash',
1138 b'debugdirstateignorepatternshash',
1173 [],
1139 [],
1174 _(b''),
1140 _(b''),
1175 )
1141 )
1176 def debugdirstateignorepatternshash(ui, repo, **opts):
1142 def debugdirstateignorepatternshash(ui, repo, **opts):
1177 """show the hash of ignore patterns stored in dirstate if v2,
1143 """show the hash of ignore patterns stored in dirstate if v2,
1178 or nothing for dirstate-v2
1144 or nothing for dirstate-v2
1179 """
1145 """
1180 if repo.dirstate._use_dirstate_v2:
1146 if repo.dirstate._use_dirstate_v2:
1181 docket = repo.dirstate._map.docket
1147 docket = repo.dirstate._map.docket
1182 hash_len = 20 # 160 bits for SHA-1
1148 hash_len = 20 # 160 bits for SHA-1
1183 hash_bytes = docket.tree_metadata[-hash_len:]
1149 hash_bytes = docket.tree_metadata[-hash_len:]
1184 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1150 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1185
1151
1186
1152
1187 @command(
1153 @command(
1188 b'debugdiscovery',
1154 b'debugdiscovery',
1189 [
1155 [
1190 (b'', b'old', None, _(b'use old-style discovery')),
1156 (b'', b'old', None, _(b'use old-style discovery')),
1191 (
1157 (
1192 b'',
1158 b'',
1193 b'nonheads',
1159 b'nonheads',
1194 None,
1160 None,
1195 _(b'use old-style discovery with non-heads included'),
1161 _(b'use old-style discovery with non-heads included'),
1196 ),
1162 ),
1197 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1163 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1198 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1164 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1199 (
1165 (
1200 b'',
1166 b'',
1201 b'local-as-revs',
1167 b'local-as-revs',
1202 b"",
1168 b"",
1203 b'treat local has having these revisions only',
1169 b'treat local has having these revisions only',
1204 ),
1170 ),
1205 (
1171 (
1206 b'',
1172 b'',
1207 b'remote-as-revs',
1173 b'remote-as-revs',
1208 b"",
1174 b"",
1209 b'use local as remote, with only these revisions',
1175 b'use local as remote, with only these revisions',
1210 ),
1176 ),
1211 ]
1177 ]
1212 + cmdutil.remoteopts
1178 + cmdutil.remoteopts
1213 + cmdutil.formatteropts,
1179 + cmdutil.formatteropts,
1214 _(b'[--rev REV] [OTHER]'),
1180 _(b'[--rev REV] [OTHER]'),
1215 )
1181 )
1216 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1182 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1217 """runs the changeset discovery protocol in isolation
1183 """runs the changeset discovery protocol in isolation
1218
1184
1219 The local peer can be "replaced" by a subset of the local repository by
1185 The local peer can be "replaced" by a subset of the local repository by
1220 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1186 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1221 can be "replaced" by a subset of the local repository using the
1187 can be "replaced" by a subset of the local repository using the
1222 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1188 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1223 discovery situations.
1189 discovery situations.
1224
1190
1225 The following developer oriented config are relevant for people playing with this command:
1191 The following developer oriented config are relevant for people playing with this command:
1226
1192
1227 * devel.discovery.exchange-heads=True
1193 * devel.discovery.exchange-heads=True
1228
1194
1229 If False, the discovery will not start with
1195 If False, the discovery will not start with
1230 remote head fetching and local head querying.
1196 remote head fetching and local head querying.
1231
1197
1232 * devel.discovery.grow-sample=True
1198 * devel.discovery.grow-sample=True
1233
1199
1234 If False, the sample size used in set discovery will not be increased
1200 If False, the sample size used in set discovery will not be increased
1235 through the process
1201 through the process
1236
1202
1237 * devel.discovery.grow-sample.dynamic=True
1203 * devel.discovery.grow-sample.dynamic=True
1238
1204
1239 When discovery.grow-sample.dynamic is True, the default, the sample size is
1205 When discovery.grow-sample.dynamic is True, the default, the sample size is
1240 adapted to the shape of the undecided set (it is set to the max of:
1206 adapted to the shape of the undecided set (it is set to the max of:
1241 <target-size>, len(roots(undecided)), len(heads(undecided)
1207 <target-size>, len(roots(undecided)), len(heads(undecided)
1242
1208
1243 * devel.discovery.grow-sample.rate=1.05
1209 * devel.discovery.grow-sample.rate=1.05
1244
1210
1245 the rate at which the sample grow
1211 the rate at which the sample grow
1246
1212
1247 * devel.discovery.randomize=True
1213 * devel.discovery.randomize=True
1248
1214
1249 If andom sampling during discovery are deterministic. It is meant for
1215 If andom sampling during discovery are deterministic. It is meant for
1250 integration tests.
1216 integration tests.
1251
1217
1252 * devel.discovery.sample-size=200
1218 * devel.discovery.sample-size=200
1253
1219
1254 Control the initial size of the discovery sample
1220 Control the initial size of the discovery sample
1255
1221
1256 * devel.discovery.sample-size.initial=100
1222 * devel.discovery.sample-size.initial=100
1257
1223
1258 Control the initial size of the discovery for initial change
1224 Control the initial size of the discovery for initial change
1259 """
1225 """
1260 opts = pycompat.byteskwargs(opts)
1226 opts = pycompat.byteskwargs(opts)
1261 unfi = repo.unfiltered()
1227 unfi = repo.unfiltered()
1262
1228
1263 # setup potential extra filtering
1229 # setup potential extra filtering
1264 local_revs = opts[b"local_as_revs"]
1230 local_revs = opts[b"local_as_revs"]
1265 remote_revs = opts[b"remote_as_revs"]
1231 remote_revs = opts[b"remote_as_revs"]
1266
1232
1267 # make sure tests are repeatable
1233 # make sure tests are repeatable
1268 random.seed(int(opts[b'seed']))
1234 random.seed(int(opts[b'seed']))
1269
1235
1270 if not remote_revs:
1236 if not remote_revs:
1271
1237
1272 remoteurl, branches = urlutil.get_unique_pull_path(
1238 remoteurl, branches = urlutil.get_unique_pull_path(
1273 b'debugdiscovery', repo, ui, remoteurl
1239 b'debugdiscovery', repo, ui, remoteurl
1274 )
1240 )
1275 remote = hg.peer(repo, opts, remoteurl)
1241 remote = hg.peer(repo, opts, remoteurl)
1276 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1242 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1277 else:
1243 else:
1278 branches = (None, [])
1244 branches = (None, [])
1279 remote_filtered_revs = logcmdutil.revrange(
1245 remote_filtered_revs = logcmdutil.revrange(
1280 unfi, [b"not (::(%s))" % remote_revs]
1246 unfi, [b"not (::(%s))" % remote_revs]
1281 )
1247 )
1282 remote_filtered_revs = frozenset(remote_filtered_revs)
1248 remote_filtered_revs = frozenset(remote_filtered_revs)
1283
1249
1284 def remote_func(x):
1250 def remote_func(x):
1285 return remote_filtered_revs
1251 return remote_filtered_revs
1286
1252
1287 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1253 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1288
1254
1289 remote = repo.peer()
1255 remote = repo.peer()
1290 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1256 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1291
1257
1292 if local_revs:
1258 if local_revs:
1293 local_filtered_revs = logcmdutil.revrange(
1259 local_filtered_revs = logcmdutil.revrange(
1294 unfi, [b"not (::(%s))" % local_revs]
1260 unfi, [b"not (::(%s))" % local_revs]
1295 )
1261 )
1296 local_filtered_revs = frozenset(local_filtered_revs)
1262 local_filtered_revs = frozenset(local_filtered_revs)
1297
1263
1298 def local_func(x):
1264 def local_func(x):
1299 return local_filtered_revs
1265 return local_filtered_revs
1300
1266
1301 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1267 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1302 repo = repo.filtered(b'debug-discovery-local-filter')
1268 repo = repo.filtered(b'debug-discovery-local-filter')
1303
1269
1304 data = {}
1270 data = {}
1305 if opts.get(b'old'):
1271 if opts.get(b'old'):
1306
1272
1307 def doit(pushedrevs, remoteheads, remote=remote):
1273 def doit(pushedrevs, remoteheads, remote=remote):
1308 if not util.safehasattr(remote, b'branches'):
1274 if not util.safehasattr(remote, b'branches'):
1309 # enable in-client legacy support
1275 # enable in-client legacy support
1310 remote = localrepo.locallegacypeer(remote.local())
1276 remote = localrepo.locallegacypeer(remote.local())
1311 if remote_revs:
1277 if remote_revs:
1312 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1278 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1313 remote._repo = r
1279 remote._repo = r
1314 common, _in, hds = treediscovery.findcommonincoming(
1280 common, _in, hds = treediscovery.findcommonincoming(
1315 repo, remote, force=True, audit=data
1281 repo, remote, force=True, audit=data
1316 )
1282 )
1317 common = set(common)
1283 common = set(common)
1318 if not opts.get(b'nonheads'):
1284 if not opts.get(b'nonheads'):
1319 ui.writenoi18n(
1285 ui.writenoi18n(
1320 b"unpruned common: %s\n"
1286 b"unpruned common: %s\n"
1321 % b" ".join(sorted(short(n) for n in common))
1287 % b" ".join(sorted(short(n) for n in common))
1322 )
1288 )
1323
1289
1324 clnode = repo.changelog.node
1290 clnode = repo.changelog.node
1325 common = repo.revs(b'heads(::%ln)', common)
1291 common = repo.revs(b'heads(::%ln)', common)
1326 common = {clnode(r) for r in common}
1292 common = {clnode(r) for r in common}
1327 return common, hds
1293 return common, hds
1328
1294
1329 else:
1295 else:
1330
1296
1331 def doit(pushedrevs, remoteheads, remote=remote):
1297 def doit(pushedrevs, remoteheads, remote=remote):
1332 nodes = None
1298 nodes = None
1333 if pushedrevs:
1299 if pushedrevs:
1334 revs = logcmdutil.revrange(repo, pushedrevs)
1300 revs = logcmdutil.revrange(repo, pushedrevs)
1335 nodes = [repo[r].node() for r in revs]
1301 nodes = [repo[r].node() for r in revs]
1336 common, any, hds = setdiscovery.findcommonheads(
1302 common, any, hds = setdiscovery.findcommonheads(
1337 ui,
1303 ui,
1338 repo,
1304 repo,
1339 remote,
1305 remote,
1340 ancestorsof=nodes,
1306 ancestorsof=nodes,
1341 audit=data,
1307 audit=data,
1342 abortwhenunrelated=False,
1308 abortwhenunrelated=False,
1343 )
1309 )
1344 return common, hds
1310 return common, hds
1345
1311
1346 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1312 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1347 localrevs = opts[b'rev']
1313 localrevs = opts[b'rev']
1348
1314
1349 fm = ui.formatter(b'debugdiscovery', opts)
1315 fm = ui.formatter(b'debugdiscovery', opts)
1350 if fm.strict_format:
1316 if fm.strict_format:
1351
1317
1352 @contextlib.contextmanager
1318 @contextlib.contextmanager
1353 def may_capture_output():
1319 def may_capture_output():
1354 ui.pushbuffer()
1320 ui.pushbuffer()
1355 yield
1321 yield
1356 data[b'output'] = ui.popbuffer()
1322 data[b'output'] = ui.popbuffer()
1357
1323
1358 else:
1324 else:
1359 may_capture_output = util.nullcontextmanager
1325 may_capture_output = util.nullcontextmanager
1360 with may_capture_output():
1326 with may_capture_output():
1361 with util.timedcm('debug-discovery') as t:
1327 with util.timedcm('debug-discovery') as t:
1362 common, hds = doit(localrevs, remoterevs)
1328 common, hds = doit(localrevs, remoterevs)
1363
1329
1364 # compute all statistics
1330 # compute all statistics
1365 if len(common) == 1 and repo.nullid in common:
1331 if len(common) == 1 and repo.nullid in common:
1366 common = set()
1332 common = set()
1367 heads_common = set(common)
1333 heads_common = set(common)
1368 heads_remote = set(hds)
1334 heads_remote = set(hds)
1369 heads_local = set(repo.heads())
1335 heads_local = set(repo.heads())
1370 # note: they cannot be a local or remote head that is in common and not
1336 # note: they cannot be a local or remote head that is in common and not
1371 # itself a head of common.
1337 # itself a head of common.
1372 heads_common_local = heads_common & heads_local
1338 heads_common_local = heads_common & heads_local
1373 heads_common_remote = heads_common & heads_remote
1339 heads_common_remote = heads_common & heads_remote
1374 heads_common_both = heads_common & heads_remote & heads_local
1340 heads_common_both = heads_common & heads_remote & heads_local
1375
1341
1376 all = repo.revs(b'all()')
1342 all = repo.revs(b'all()')
1377 common = repo.revs(b'::%ln', common)
1343 common = repo.revs(b'::%ln', common)
1378 roots_common = repo.revs(b'roots(::%ld)', common)
1344 roots_common = repo.revs(b'roots(::%ld)', common)
1379 missing = repo.revs(b'not ::%ld', common)
1345 missing = repo.revs(b'not ::%ld', common)
1380 heads_missing = repo.revs(b'heads(%ld)', missing)
1346 heads_missing = repo.revs(b'heads(%ld)', missing)
1381 roots_missing = repo.revs(b'roots(%ld)', missing)
1347 roots_missing = repo.revs(b'roots(%ld)', missing)
1382 assert len(common) + len(missing) == len(all)
1348 assert len(common) + len(missing) == len(all)
1383
1349
1384 initial_undecided = repo.revs(
1350 initial_undecided = repo.revs(
1385 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1351 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1386 )
1352 )
1387 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1353 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1388 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1354 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1389 common_initial_undecided = initial_undecided & common
1355 common_initial_undecided = initial_undecided & common
1390 missing_initial_undecided = initial_undecided & missing
1356 missing_initial_undecided = initial_undecided & missing
1391
1357
1392 data[b'elapsed'] = t.elapsed
1358 data[b'elapsed'] = t.elapsed
1393 data[b'nb-common-heads'] = len(heads_common)
1359 data[b'nb-common-heads'] = len(heads_common)
1394 data[b'nb-common-heads-local'] = len(heads_common_local)
1360 data[b'nb-common-heads-local'] = len(heads_common_local)
1395 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1361 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1396 data[b'nb-common-heads-both'] = len(heads_common_both)
1362 data[b'nb-common-heads-both'] = len(heads_common_both)
1397 data[b'nb-common-roots'] = len(roots_common)
1363 data[b'nb-common-roots'] = len(roots_common)
1398 data[b'nb-head-local'] = len(heads_local)
1364 data[b'nb-head-local'] = len(heads_local)
1399 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1365 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1400 data[b'nb-head-remote'] = len(heads_remote)
1366 data[b'nb-head-remote'] = len(heads_remote)
1401 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1367 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1402 heads_common_remote
1368 heads_common_remote
1403 )
1369 )
1404 data[b'nb-revs'] = len(all)
1370 data[b'nb-revs'] = len(all)
1405 data[b'nb-revs-common'] = len(common)
1371 data[b'nb-revs-common'] = len(common)
1406 data[b'nb-revs-missing'] = len(missing)
1372 data[b'nb-revs-missing'] = len(missing)
1407 data[b'nb-missing-heads'] = len(heads_missing)
1373 data[b'nb-missing-heads'] = len(heads_missing)
1408 data[b'nb-missing-roots'] = len(roots_missing)
1374 data[b'nb-missing-roots'] = len(roots_missing)
1409 data[b'nb-ini_und'] = len(initial_undecided)
1375 data[b'nb-ini_und'] = len(initial_undecided)
1410 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1376 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1411 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1377 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1412 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1378 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1413 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1379 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1414
1380
1415 fm.startitem()
1381 fm.startitem()
1416 fm.data(**pycompat.strkwargs(data))
1382 fm.data(**pycompat.strkwargs(data))
1417 # display discovery summary
1383 # display discovery summary
1418 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1384 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1419 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1385 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1420 if b'total-round-trips-heads' in data:
1386 if b'total-round-trips-heads' in data:
1421 fm.plain(
1387 fm.plain(
1422 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1388 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1423 )
1389 )
1424 if b'total-round-trips-branches' in data:
1390 if b'total-round-trips-branches' in data:
1425 fm.plain(
1391 fm.plain(
1426 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1392 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1427 % data
1393 % data
1428 )
1394 )
1429 if b'total-round-trips-between' in data:
1395 if b'total-round-trips-between' in data:
1430 fm.plain(
1396 fm.plain(
1431 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1397 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1432 )
1398 )
1433 fm.plain(b"queries: %(total-queries)9d\n" % data)
1399 fm.plain(b"queries: %(total-queries)9d\n" % data)
1434 if b'total-queries-branches' in data:
1400 if b'total-queries-branches' in data:
1435 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1401 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1436 if b'total-queries-between' in data:
1402 if b'total-queries-between' in data:
1437 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1403 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1438 fm.plain(b"heads summary:\n")
1404 fm.plain(b"heads summary:\n")
1439 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1405 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1440 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1406 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1441 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1407 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1442 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1408 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1443 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1409 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1444 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1410 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1445 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1411 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1446 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1412 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1447 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1413 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1448 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1414 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1449 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1415 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1450 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1416 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1451 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1417 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1452 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1418 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1453 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1419 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1454 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1420 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1455 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1421 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1456 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1422 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1457 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1423 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1458 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1424 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1459 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1425 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1460 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1426 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1461
1427
1462 if ui.verbose:
1428 if ui.verbose:
1463 fm.plain(
1429 fm.plain(
1464 b"common heads: %s\n"
1430 b"common heads: %s\n"
1465 % b" ".join(sorted(short(n) for n in heads_common))
1431 % b" ".join(sorted(short(n) for n in heads_common))
1466 )
1432 )
1467 fm.end()
1433 fm.end()
1468
1434
1469
1435
1470 _chunksize = 4 << 10
1436 _chunksize = 4 << 10
1471
1437
1472
1438
1473 @command(
1439 @command(
1474 b'debugdownload',
1440 b'debugdownload',
1475 [
1441 [
1476 (b'o', b'output', b'', _(b'path')),
1442 (b'o', b'output', b'', _(b'path')),
1477 ],
1443 ],
1478 optionalrepo=True,
1444 optionalrepo=True,
1479 )
1445 )
1480 def debugdownload(ui, repo, url, output=None, **opts):
1446 def debugdownload(ui, repo, url, output=None, **opts):
1481 """download a resource using Mercurial logic and config"""
1447 """download a resource using Mercurial logic and config"""
1482 fh = urlmod.open(ui, url, output)
1448 fh = urlmod.open(ui, url, output)
1483
1449
1484 dest = ui
1450 dest = ui
1485 if output:
1451 if output:
1486 dest = open(output, b"wb", _chunksize)
1452 dest = open(output, b"wb", _chunksize)
1487 try:
1453 try:
1488 data = fh.read(_chunksize)
1454 data = fh.read(_chunksize)
1489 while data:
1455 while data:
1490 dest.write(data)
1456 dest.write(data)
1491 data = fh.read(_chunksize)
1457 data = fh.read(_chunksize)
1492 finally:
1458 finally:
1493 if output:
1459 if output:
1494 dest.close()
1460 dest.close()
1495
1461
1496
1462
1497 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1463 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1498 def debugextensions(ui, repo, **opts):
1464 def debugextensions(ui, repo, **opts):
1499 '''show information about active extensions'''
1465 '''show information about active extensions'''
1500 opts = pycompat.byteskwargs(opts)
1466 opts = pycompat.byteskwargs(opts)
1501 exts = extensions.extensions(ui)
1467 exts = extensions.extensions(ui)
1502 hgver = util.version()
1468 hgver = util.version()
1503 fm = ui.formatter(b'debugextensions', opts)
1469 fm = ui.formatter(b'debugextensions', opts)
1504 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1470 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1505 isinternal = extensions.ismoduleinternal(extmod)
1471 isinternal = extensions.ismoduleinternal(extmod)
1506 extsource = None
1472 extsource = None
1507
1473
1508 if util.safehasattr(extmod, '__file__'):
1474 if util.safehasattr(extmod, '__file__'):
1509 extsource = pycompat.fsencode(extmod.__file__)
1475 extsource = pycompat.fsencode(extmod.__file__)
1510 elif getattr(sys, 'oxidized', False):
1476 elif getattr(sys, 'oxidized', False):
1511 extsource = pycompat.sysexecutable
1477 extsource = pycompat.sysexecutable
1512 if isinternal:
1478 if isinternal:
1513 exttestedwith = [] # never expose magic string to users
1479 exttestedwith = [] # never expose magic string to users
1514 else:
1480 else:
1515 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1481 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1516 extbuglink = getattr(extmod, 'buglink', None)
1482 extbuglink = getattr(extmod, 'buglink', None)
1517
1483
1518 fm.startitem()
1484 fm.startitem()
1519
1485
1520 if ui.quiet or ui.verbose:
1486 if ui.quiet or ui.verbose:
1521 fm.write(b'name', b'%s\n', extname)
1487 fm.write(b'name', b'%s\n', extname)
1522 else:
1488 else:
1523 fm.write(b'name', b'%s', extname)
1489 fm.write(b'name', b'%s', extname)
1524 if isinternal or hgver in exttestedwith:
1490 if isinternal or hgver in exttestedwith:
1525 fm.plain(b'\n')
1491 fm.plain(b'\n')
1526 elif not exttestedwith:
1492 elif not exttestedwith:
1527 fm.plain(_(b' (untested!)\n'))
1493 fm.plain(_(b' (untested!)\n'))
1528 else:
1494 else:
1529 lasttestedversion = exttestedwith[-1]
1495 lasttestedversion = exttestedwith[-1]
1530 fm.plain(b' (%s!)\n' % lasttestedversion)
1496 fm.plain(b' (%s!)\n' % lasttestedversion)
1531
1497
1532 fm.condwrite(
1498 fm.condwrite(
1533 ui.verbose and extsource,
1499 ui.verbose and extsource,
1534 b'source',
1500 b'source',
1535 _(b' location: %s\n'),
1501 _(b' location: %s\n'),
1536 extsource or b"",
1502 extsource or b"",
1537 )
1503 )
1538
1504
1539 if ui.verbose:
1505 if ui.verbose:
1540 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1506 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1541 fm.data(bundled=isinternal)
1507 fm.data(bundled=isinternal)
1542
1508
1543 fm.condwrite(
1509 fm.condwrite(
1544 ui.verbose and exttestedwith,
1510 ui.verbose and exttestedwith,
1545 b'testedwith',
1511 b'testedwith',
1546 _(b' tested with: %s\n'),
1512 _(b' tested with: %s\n'),
1547 fm.formatlist(exttestedwith, name=b'ver'),
1513 fm.formatlist(exttestedwith, name=b'ver'),
1548 )
1514 )
1549
1515
1550 fm.condwrite(
1516 fm.condwrite(
1551 ui.verbose and extbuglink,
1517 ui.verbose and extbuglink,
1552 b'buglink',
1518 b'buglink',
1553 _(b' bug reporting: %s\n'),
1519 _(b' bug reporting: %s\n'),
1554 extbuglink or b"",
1520 extbuglink or b"",
1555 )
1521 )
1556
1522
1557 fm.end()
1523 fm.end()
1558
1524
1559
1525
1560 @command(
1526 @command(
1561 b'debugfileset',
1527 b'debugfileset',
1562 [
1528 [
1563 (
1529 (
1564 b'r',
1530 b'r',
1565 b'rev',
1531 b'rev',
1566 b'',
1532 b'',
1567 _(b'apply the filespec on this revision'),
1533 _(b'apply the filespec on this revision'),
1568 _(b'REV'),
1534 _(b'REV'),
1569 ),
1535 ),
1570 (
1536 (
1571 b'',
1537 b'',
1572 b'all-files',
1538 b'all-files',
1573 False,
1539 False,
1574 _(b'test files from all revisions and working directory'),
1540 _(b'test files from all revisions and working directory'),
1575 ),
1541 ),
1576 (
1542 (
1577 b's',
1543 b's',
1578 b'show-matcher',
1544 b'show-matcher',
1579 None,
1545 None,
1580 _(b'print internal representation of matcher'),
1546 _(b'print internal representation of matcher'),
1581 ),
1547 ),
1582 (
1548 (
1583 b'p',
1549 b'p',
1584 b'show-stage',
1550 b'show-stage',
1585 [],
1551 [],
1586 _(b'print parsed tree at the given stage'),
1552 _(b'print parsed tree at the given stage'),
1587 _(b'NAME'),
1553 _(b'NAME'),
1588 ),
1554 ),
1589 ],
1555 ],
1590 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1556 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1591 )
1557 )
1592 def debugfileset(ui, repo, expr, **opts):
1558 def debugfileset(ui, repo, expr, **opts):
1593 '''parse and apply a fileset specification'''
1559 '''parse and apply a fileset specification'''
1594 from . import fileset
1560 from . import fileset
1595
1561
1596 fileset.symbols # force import of fileset so we have predicates to optimize
1562 fileset.symbols # force import of fileset so we have predicates to optimize
1597 opts = pycompat.byteskwargs(opts)
1563 opts = pycompat.byteskwargs(opts)
1598 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1564 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1599
1565
1600 stages = [
1566 stages = [
1601 (b'parsed', pycompat.identity),
1567 (b'parsed', pycompat.identity),
1602 (b'analyzed', filesetlang.analyze),
1568 (b'analyzed', filesetlang.analyze),
1603 (b'optimized', filesetlang.optimize),
1569 (b'optimized', filesetlang.optimize),
1604 ]
1570 ]
1605 stagenames = {n for n, f in stages}
1571 stagenames = {n for n, f in stages}
1606
1572
1607 showalways = set()
1573 showalways = set()
1608 if ui.verbose and not opts[b'show_stage']:
1574 if ui.verbose and not opts[b'show_stage']:
1609 # show parsed tree by --verbose (deprecated)
1575 # show parsed tree by --verbose (deprecated)
1610 showalways.add(b'parsed')
1576 showalways.add(b'parsed')
1611 if opts[b'show_stage'] == [b'all']:
1577 if opts[b'show_stage'] == [b'all']:
1612 showalways.update(stagenames)
1578 showalways.update(stagenames)
1613 else:
1579 else:
1614 for n in opts[b'show_stage']:
1580 for n in opts[b'show_stage']:
1615 if n not in stagenames:
1581 if n not in stagenames:
1616 raise error.Abort(_(b'invalid stage name: %s') % n)
1582 raise error.Abort(_(b'invalid stage name: %s') % n)
1617 showalways.update(opts[b'show_stage'])
1583 showalways.update(opts[b'show_stage'])
1618
1584
1619 tree = filesetlang.parse(expr)
1585 tree = filesetlang.parse(expr)
1620 for n, f in stages:
1586 for n, f in stages:
1621 tree = f(tree)
1587 tree = f(tree)
1622 if n in showalways:
1588 if n in showalways:
1623 if opts[b'show_stage'] or n != b'parsed':
1589 if opts[b'show_stage'] or n != b'parsed':
1624 ui.write(b"* %s:\n" % n)
1590 ui.write(b"* %s:\n" % n)
1625 ui.write(filesetlang.prettyformat(tree), b"\n")
1591 ui.write(filesetlang.prettyformat(tree), b"\n")
1626
1592
1627 files = set()
1593 files = set()
1628 if opts[b'all_files']:
1594 if opts[b'all_files']:
1629 for r in repo:
1595 for r in repo:
1630 c = repo[r]
1596 c = repo[r]
1631 files.update(c.files())
1597 files.update(c.files())
1632 files.update(c.substate)
1598 files.update(c.substate)
1633 if opts[b'all_files'] or ctx.rev() is None:
1599 if opts[b'all_files'] or ctx.rev() is None:
1634 wctx = repo[None]
1600 wctx = repo[None]
1635 files.update(
1601 files.update(
1636 repo.dirstate.walk(
1602 repo.dirstate.walk(
1637 scmutil.matchall(repo),
1603 scmutil.matchall(repo),
1638 subrepos=list(wctx.substate),
1604 subrepos=list(wctx.substate),
1639 unknown=True,
1605 unknown=True,
1640 ignored=True,
1606 ignored=True,
1641 )
1607 )
1642 )
1608 )
1643 files.update(wctx.substate)
1609 files.update(wctx.substate)
1644 else:
1610 else:
1645 files.update(ctx.files())
1611 files.update(ctx.files())
1646 files.update(ctx.substate)
1612 files.update(ctx.substate)
1647
1613
1648 m = ctx.matchfileset(repo.getcwd(), expr)
1614 m = ctx.matchfileset(repo.getcwd(), expr)
1649 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1615 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1650 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1616 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1651 for f in sorted(files):
1617 for f in sorted(files):
1652 if not m(f):
1618 if not m(f):
1653 continue
1619 continue
1654 ui.write(b"%s\n" % f)
1620 ui.write(b"%s\n" % f)
1655
1621
1656
1622
1657 @command(
1623 @command(
1658 b"debug-repair-issue6528",
1624 b"debug-repair-issue6528",
1659 [
1625 [
1660 (
1626 (
1661 b'',
1627 b'',
1662 b'to-report',
1628 b'to-report',
1663 b'',
1629 b'',
1664 _(b'build a report of affected revisions to this file'),
1630 _(b'build a report of affected revisions to this file'),
1665 _(b'FILE'),
1631 _(b'FILE'),
1666 ),
1632 ),
1667 (
1633 (
1668 b'',
1634 b'',
1669 b'from-report',
1635 b'from-report',
1670 b'',
1636 b'',
1671 _(b'repair revisions listed in this report file'),
1637 _(b'repair revisions listed in this report file'),
1672 _(b'FILE'),
1638 _(b'FILE'),
1673 ),
1639 ),
1674 (
1640 (
1675 b'',
1641 b'',
1676 b'paranoid',
1642 b'paranoid',
1677 False,
1643 False,
1678 _(b'check that both detection methods do the same thing'),
1644 _(b'check that both detection methods do the same thing'),
1679 ),
1645 ),
1680 ]
1646 ]
1681 + cmdutil.dryrunopts,
1647 + cmdutil.dryrunopts,
1682 )
1648 )
1683 def debug_repair_issue6528(ui, repo, **opts):
1649 def debug_repair_issue6528(ui, repo, **opts):
1684 """find affected revisions and repair them. See issue6528 for more details.
1650 """find affected revisions and repair them. See issue6528 for more details.
1685
1651
1686 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1652 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1687 computation of affected revisions for a given repository across clones.
1653 computation of affected revisions for a given repository across clones.
1688 The report format is line-based (with empty lines ignored):
1654 The report format is line-based (with empty lines ignored):
1689
1655
1690 ```
1656 ```
1691 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1657 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1692 ```
1658 ```
1693
1659
1694 There can be multiple broken revisions per filelog, they are separated by
1660 There can be multiple broken revisions per filelog, they are separated by
1695 a comma with no spaces. The only space is between the revision(s) and the
1661 a comma with no spaces. The only space is between the revision(s) and the
1696 filename.
1662 filename.
1697
1663
1698 Note that this does *not* mean that this repairs future affected revisions,
1664 Note that this does *not* mean that this repairs future affected revisions,
1699 that needs a separate fix at the exchange level that was introduced in
1665 that needs a separate fix at the exchange level that was introduced in
1700 Mercurial 5.9.1.
1666 Mercurial 5.9.1.
1701
1667
1702 There is a `--paranoid` flag to test that the fast implementation is correct
1668 There is a `--paranoid` flag to test that the fast implementation is correct
1703 by checking it against the slow implementation. Since this matter is quite
1669 by checking it against the slow implementation. Since this matter is quite
1704 urgent and testing every edge-case is probably quite costly, we use this
1670 urgent and testing every edge-case is probably quite costly, we use this
1705 method to test on large repositories as a fuzzing method of sorts.
1671 method to test on large repositories as a fuzzing method of sorts.
1706 """
1672 """
1707 cmdutil.check_incompatible_arguments(
1673 cmdutil.check_incompatible_arguments(
1708 opts, 'to_report', ['from_report', 'dry_run']
1674 opts, 'to_report', ['from_report', 'dry_run']
1709 )
1675 )
1710 dry_run = opts.get('dry_run')
1676 dry_run = opts.get('dry_run')
1711 to_report = opts.get('to_report')
1677 to_report = opts.get('to_report')
1712 from_report = opts.get('from_report')
1678 from_report = opts.get('from_report')
1713 paranoid = opts.get('paranoid')
1679 paranoid = opts.get('paranoid')
1714 # TODO maybe add filelog pattern and revision pattern parameters to help
1680 # TODO maybe add filelog pattern and revision pattern parameters to help
1715 # narrow down the search for users that know what they're looking for?
1681 # narrow down the search for users that know what they're looking for?
1716
1682
1717 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1683 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1718 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1684 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1719 raise error.Abort(_(msg))
1685 raise error.Abort(_(msg))
1720
1686
1721 rewrite.repair_issue6528(
1687 rewrite.repair_issue6528(
1722 ui,
1688 ui,
1723 repo,
1689 repo,
1724 dry_run=dry_run,
1690 dry_run=dry_run,
1725 to_report=to_report,
1691 to_report=to_report,
1726 from_report=from_report,
1692 from_report=from_report,
1727 paranoid=paranoid,
1693 paranoid=paranoid,
1728 )
1694 )
1729
1695
1730
1696
1731 @command(b'debugformat', [] + cmdutil.formatteropts)
1697 @command(b'debugformat', [] + cmdutil.formatteropts)
1732 def debugformat(ui, repo, **opts):
1698 def debugformat(ui, repo, **opts):
1733 """display format information about the current repository
1699 """display format information about the current repository
1734
1700
1735 Use --verbose to get extra information about current config value and
1701 Use --verbose to get extra information about current config value and
1736 Mercurial default."""
1702 Mercurial default."""
1737 opts = pycompat.byteskwargs(opts)
1703 opts = pycompat.byteskwargs(opts)
1738 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1704 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1739 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1705 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1740
1706
1741 def makeformatname(name):
1707 def makeformatname(name):
1742 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1708 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1743
1709
1744 fm = ui.formatter(b'debugformat', opts)
1710 fm = ui.formatter(b'debugformat', opts)
1745 if fm.isplain():
1711 if fm.isplain():
1746
1712
1747 def formatvalue(value):
1713 def formatvalue(value):
1748 if util.safehasattr(value, b'startswith'):
1714 if util.safehasattr(value, b'startswith'):
1749 return value
1715 return value
1750 if value:
1716 if value:
1751 return b'yes'
1717 return b'yes'
1752 else:
1718 else:
1753 return b'no'
1719 return b'no'
1754
1720
1755 else:
1721 else:
1756 formatvalue = pycompat.identity
1722 formatvalue = pycompat.identity
1757
1723
1758 fm.plain(b'format-variant')
1724 fm.plain(b'format-variant')
1759 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1725 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1760 fm.plain(b' repo')
1726 fm.plain(b' repo')
1761 if ui.verbose:
1727 if ui.verbose:
1762 fm.plain(b' config default')
1728 fm.plain(b' config default')
1763 fm.plain(b'\n')
1729 fm.plain(b'\n')
1764 for fv in upgrade.allformatvariant:
1730 for fv in upgrade.allformatvariant:
1765 fm.startitem()
1731 fm.startitem()
1766 repovalue = fv.fromrepo(repo)
1732 repovalue = fv.fromrepo(repo)
1767 configvalue = fv.fromconfig(repo)
1733 configvalue = fv.fromconfig(repo)
1768
1734
1769 if repovalue != configvalue:
1735 if repovalue != configvalue:
1770 namelabel = b'formatvariant.name.mismatchconfig'
1736 namelabel = b'formatvariant.name.mismatchconfig'
1771 repolabel = b'formatvariant.repo.mismatchconfig'
1737 repolabel = b'formatvariant.repo.mismatchconfig'
1772 elif repovalue != fv.default:
1738 elif repovalue != fv.default:
1773 namelabel = b'formatvariant.name.mismatchdefault'
1739 namelabel = b'formatvariant.name.mismatchdefault'
1774 repolabel = b'formatvariant.repo.mismatchdefault'
1740 repolabel = b'formatvariant.repo.mismatchdefault'
1775 else:
1741 else:
1776 namelabel = b'formatvariant.name.uptodate'
1742 namelabel = b'formatvariant.name.uptodate'
1777 repolabel = b'formatvariant.repo.uptodate'
1743 repolabel = b'formatvariant.repo.uptodate'
1778
1744
1779 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1745 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1780 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1746 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1781 if fv.default != configvalue:
1747 if fv.default != configvalue:
1782 configlabel = b'formatvariant.config.special'
1748 configlabel = b'formatvariant.config.special'
1783 else:
1749 else:
1784 configlabel = b'formatvariant.config.default'
1750 configlabel = b'formatvariant.config.default'
1785 fm.condwrite(
1751 fm.condwrite(
1786 ui.verbose,
1752 ui.verbose,
1787 b'config',
1753 b'config',
1788 b' %6s',
1754 b' %6s',
1789 formatvalue(configvalue),
1755 formatvalue(configvalue),
1790 label=configlabel,
1756 label=configlabel,
1791 )
1757 )
1792 fm.condwrite(
1758 fm.condwrite(
1793 ui.verbose,
1759 ui.verbose,
1794 b'default',
1760 b'default',
1795 b' %7s',
1761 b' %7s',
1796 formatvalue(fv.default),
1762 formatvalue(fv.default),
1797 label=b'formatvariant.default',
1763 label=b'formatvariant.default',
1798 )
1764 )
1799 fm.plain(b'\n')
1765 fm.plain(b'\n')
1800 fm.end()
1766 fm.end()
1801
1767
1802
1768
1803 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1769 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1804 def debugfsinfo(ui, path=b"."):
1770 def debugfsinfo(ui, path=b"."):
1805 """show information detected about current filesystem"""
1771 """show information detected about current filesystem"""
1806 ui.writenoi18n(b'path: %s\n' % path)
1772 ui.writenoi18n(b'path: %s\n' % path)
1807 ui.writenoi18n(
1773 ui.writenoi18n(
1808 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1774 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1809 )
1775 )
1810 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1776 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1811 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1777 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1812 ui.writenoi18n(
1778 ui.writenoi18n(
1813 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1779 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1814 )
1780 )
1815 ui.writenoi18n(
1781 ui.writenoi18n(
1816 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1782 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1817 )
1783 )
1818 casesensitive = b'(unknown)'
1784 casesensitive = b'(unknown)'
1819 try:
1785 try:
1820 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1786 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1821 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1787 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1822 except OSError:
1788 except OSError:
1823 pass
1789 pass
1824 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1790 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1825
1791
1826
1792
1827 @command(
1793 @command(
1828 b'debuggetbundle',
1794 b'debuggetbundle',
1829 [
1795 [
1830 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1796 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1831 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1797 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1832 (
1798 (
1833 b't',
1799 b't',
1834 b'type',
1800 b'type',
1835 b'bzip2',
1801 b'bzip2',
1836 _(b'bundle compression type to use'),
1802 _(b'bundle compression type to use'),
1837 _(b'TYPE'),
1803 _(b'TYPE'),
1838 ),
1804 ),
1839 ],
1805 ],
1840 _(b'REPO FILE [-H|-C ID]...'),
1806 _(b'REPO FILE [-H|-C ID]...'),
1841 norepo=True,
1807 norepo=True,
1842 )
1808 )
1843 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1844 """retrieves a bundle from a repo
1810 """retrieves a bundle from a repo
1845
1811
1846 Every ID must be a full-length hex node id string. Saves the bundle to the
1812 Every ID must be a full-length hex node id string. Saves the bundle to the
1847 given file.
1813 given file.
1848 """
1814 """
1849 opts = pycompat.byteskwargs(opts)
1815 opts = pycompat.byteskwargs(opts)
1850 repo = hg.peer(ui, opts, repopath)
1816 repo = hg.peer(ui, opts, repopath)
1851 if not repo.capable(b'getbundle'):
1817 if not repo.capable(b'getbundle'):
1852 raise error.Abort(b"getbundle() not supported by target repository")
1818 raise error.Abort(b"getbundle() not supported by target repository")
1853 args = {}
1819 args = {}
1854 if common:
1820 if common:
1855 args['common'] = [bin(s) for s in common]
1821 args['common'] = [bin(s) for s in common]
1856 if head:
1822 if head:
1857 args['heads'] = [bin(s) for s in head]
1823 args['heads'] = [bin(s) for s in head]
1858 # TODO: get desired bundlecaps from command line.
1824 # TODO: get desired bundlecaps from command line.
1859 args['bundlecaps'] = None
1825 args['bundlecaps'] = None
1860 bundle = repo.getbundle(b'debug', **args)
1826 bundle = repo.getbundle(b'debug', **args)
1861
1827
1862 bundletype = opts.get(b'type', b'bzip2').lower()
1828 bundletype = opts.get(b'type', b'bzip2').lower()
1863 btypes = {
1829 btypes = {
1864 b'none': b'HG10UN',
1830 b'none': b'HG10UN',
1865 b'bzip2': b'HG10BZ',
1831 b'bzip2': b'HG10BZ',
1866 b'gzip': b'HG10GZ',
1832 b'gzip': b'HG10GZ',
1867 b'bundle2': b'HG20',
1833 b'bundle2': b'HG20',
1868 }
1834 }
1869 bundletype = btypes.get(bundletype)
1835 bundletype = btypes.get(bundletype)
1870 if bundletype not in bundle2.bundletypes:
1836 if bundletype not in bundle2.bundletypes:
1871 raise error.Abort(_(b'unknown bundle type specified with --type'))
1837 raise error.Abort(_(b'unknown bundle type specified with --type'))
1872 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1873
1839
1874
1840
1875 @command(b'debugignore', [], b'[FILE]')
1841 @command(b'debugignore', [], b'[FILE]')
1876 def debugignore(ui, repo, *files, **opts):
1842 def debugignore(ui, repo, *files, **opts):
1877 """display the combined ignore pattern and information about ignored files
1843 """display the combined ignore pattern and information about ignored files
1878
1844
1879 With no argument display the combined ignore pattern.
1845 With no argument display the combined ignore pattern.
1880
1846
1881 Given space separated file names, shows if the given file is ignored and
1847 Given space separated file names, shows if the given file is ignored and
1882 if so, show the ignore rule (file and line number) that matched it.
1848 if so, show the ignore rule (file and line number) that matched it.
1883 """
1849 """
1884 ignore = repo.dirstate._ignore
1850 ignore = repo.dirstate._ignore
1885 if not files:
1851 if not files:
1886 # Show all the patterns
1852 # Show all the patterns
1887 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1853 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1888 else:
1854 else:
1889 m = scmutil.match(repo[None], pats=files)
1855 m = scmutil.match(repo[None], pats=files)
1890 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1856 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1891 for f in m.files():
1857 for f in m.files():
1892 nf = util.normpath(f)
1858 nf = util.normpath(f)
1893 ignored = None
1859 ignored = None
1894 ignoredata = None
1860 ignoredata = None
1895 if nf != b'.':
1861 if nf != b'.':
1896 if ignore(nf):
1862 if ignore(nf):
1897 ignored = nf
1863 ignored = nf
1898 ignoredata = repo.dirstate._ignorefileandline(nf)
1864 ignoredata = repo.dirstate._ignorefileandline(nf)
1899 else:
1865 else:
1900 for p in pathutil.finddirs(nf):
1866 for p in pathutil.finddirs(nf):
1901 if ignore(p):
1867 if ignore(p):
1902 ignored = p
1868 ignored = p
1903 ignoredata = repo.dirstate._ignorefileandline(p)
1869 ignoredata = repo.dirstate._ignorefileandline(p)
1904 break
1870 break
1905 if ignored:
1871 if ignored:
1906 if ignored == nf:
1872 if ignored == nf:
1907 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1873 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1908 else:
1874 else:
1909 ui.write(
1875 ui.write(
1910 _(
1876 _(
1911 b"%s is ignored because of "
1877 b"%s is ignored because of "
1912 b"containing directory %s\n"
1878 b"containing directory %s\n"
1913 )
1879 )
1914 % (uipathfn(f), ignored)
1880 % (uipathfn(f), ignored)
1915 )
1881 )
1916 ignorefile, lineno, line = ignoredata
1882 ignorefile, lineno, line = ignoredata
1917 ui.write(
1883 ui.write(
1918 _(b"(ignore rule in %s, line %d: '%s')\n")
1884 _(b"(ignore rule in %s, line %d: '%s')\n")
1919 % (ignorefile, lineno, line)
1885 % (ignorefile, lineno, line)
1920 )
1886 )
1921 else:
1887 else:
1922 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1888 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1923
1889
1924
1890
1925 @command(
1891 @command(
1926 b'debug-revlog-index|debugindex',
1892 b'debug-revlog-index|debugindex',
1927 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1893 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1928 _(b'-c|-m|FILE'),
1894 _(b'-c|-m|FILE'),
1929 )
1895 )
1930 def debugindex(ui, repo, file_=None, **opts):
1896 def debugindex(ui, repo, file_=None, **opts):
1931 """dump index data for a revlog"""
1897 """dump index data for a revlog"""
1932 opts = pycompat.byteskwargs(opts)
1898 opts = pycompat.byteskwargs(opts)
1933 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1899 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1934
1900
1935 fm = ui.formatter(b'debugindex', opts)
1901 fm = ui.formatter(b'debugindex', opts)
1936
1902
1937 revlog = getattr(store, b'_revlog', store)
1903 revlog = getattr(store, b'_revlog', store)
1938
1904
1939 return revlog_debug.debug_index(
1905 return revlog_debug.debug_index(
1940 ui,
1906 ui,
1941 repo,
1907 repo,
1942 formatter=fm,
1908 formatter=fm,
1943 revlog=revlog,
1909 revlog=revlog,
1944 full_node=ui.debugflag,
1910 full_node=ui.debugflag,
1945 )
1911 )
1946
1912
1947
1913
1948 @command(
1914 @command(
1949 b'debugindexdot',
1915 b'debugindexdot',
1950 cmdutil.debugrevlogopts,
1916 cmdutil.debugrevlogopts,
1951 _(b'-c|-m|FILE'),
1917 _(b'-c|-m|FILE'),
1952 optionalrepo=True,
1918 optionalrepo=True,
1953 )
1919 )
1954 def debugindexdot(ui, repo, file_=None, **opts):
1920 def debugindexdot(ui, repo, file_=None, **opts):
1955 """dump an index DAG as a graphviz dot file"""
1921 """dump an index DAG as a graphviz dot file"""
1956 opts = pycompat.byteskwargs(opts)
1922 opts = pycompat.byteskwargs(opts)
1957 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1923 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1958 ui.writenoi18n(b"digraph G {\n")
1924 ui.writenoi18n(b"digraph G {\n")
1959 for i in r:
1925 for i in r:
1960 node = r.node(i)
1926 node = r.node(i)
1961 pp = r.parents(node)
1927 pp = r.parents(node)
1962 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1928 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1963 if pp[1] != repo.nullid:
1929 if pp[1] != repo.nullid:
1964 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1965 ui.write(b"}\n")
1931 ui.write(b"}\n")
1966
1932
1967
1933
1968 @command(b'debugindexstats', [])
1934 @command(b'debugindexstats', [])
1969 def debugindexstats(ui, repo):
1935 def debugindexstats(ui, repo):
1970 """show stats related to the changelog index"""
1936 """show stats related to the changelog index"""
1971 repo.changelog.shortest(repo.nullid, 1)
1937 repo.changelog.shortest(repo.nullid, 1)
1972 index = repo.changelog.index
1938 index = repo.changelog.index
1973 if not util.safehasattr(index, b'stats'):
1939 if not util.safehasattr(index, b'stats'):
1974 raise error.Abort(_(b'debugindexstats only works with native code'))
1940 raise error.Abort(_(b'debugindexstats only works with native code'))
1975 for k, v in sorted(index.stats().items()):
1941 for k, v in sorted(index.stats().items()):
1976 ui.write(b'%s: %d\n' % (k, v))
1942 ui.write(b'%s: %d\n' % (k, v))
1977
1943
1978
1944
1979 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1945 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1980 def debuginstall(ui, **opts):
1946 def debuginstall(ui, **opts):
1981 """test Mercurial installation
1947 """test Mercurial installation
1982
1948
1983 Returns 0 on success.
1949 Returns 0 on success.
1984 """
1950 """
1985 opts = pycompat.byteskwargs(opts)
1951 opts = pycompat.byteskwargs(opts)
1986
1952
1987 problems = 0
1953 problems = 0
1988
1954
1989 fm = ui.formatter(b'debuginstall', opts)
1955 fm = ui.formatter(b'debuginstall', opts)
1990 fm.startitem()
1956 fm.startitem()
1991
1957
1992 # encoding might be unknown or wrong. don't translate these messages.
1958 # encoding might be unknown or wrong. don't translate these messages.
1993 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1959 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1994 err = None
1960 err = None
1995 try:
1961 try:
1996 codecs.lookup(pycompat.sysstr(encoding.encoding))
1962 codecs.lookup(pycompat.sysstr(encoding.encoding))
1997 except LookupError as inst:
1963 except LookupError as inst:
1998 err = stringutil.forcebytestr(inst)
1964 err = stringutil.forcebytestr(inst)
1999 problems += 1
1965 problems += 1
2000 fm.condwrite(
1966 fm.condwrite(
2001 err,
1967 err,
2002 b'encodingerror',
1968 b'encodingerror',
2003 b" %s\n (check that your locale is properly set)\n",
1969 b" %s\n (check that your locale is properly set)\n",
2004 err,
1970 err,
2005 )
1971 )
2006
1972
2007 # Python
1973 # Python
2008 pythonlib = None
1974 pythonlib = None
2009 if util.safehasattr(os, '__file__'):
1975 if util.safehasattr(os, '__file__'):
2010 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1976 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
2011 elif getattr(sys, 'oxidized', False):
1977 elif getattr(sys, 'oxidized', False):
2012 pythonlib = pycompat.sysexecutable
1978 pythonlib = pycompat.sysexecutable
2013
1979
2014 fm.write(
1980 fm.write(
2015 b'pythonexe',
1981 b'pythonexe',
2016 _(b"checking Python executable (%s)\n"),
1982 _(b"checking Python executable (%s)\n"),
2017 pycompat.sysexecutable or _(b"unknown"),
1983 pycompat.sysexecutable or _(b"unknown"),
2018 )
1984 )
2019 fm.write(
1985 fm.write(
2020 b'pythonimplementation',
1986 b'pythonimplementation',
2021 _(b"checking Python implementation (%s)\n"),
1987 _(b"checking Python implementation (%s)\n"),
2022 pycompat.sysbytes(platform.python_implementation()),
1988 pycompat.sysbytes(platform.python_implementation()),
2023 )
1989 )
2024 fm.write(
1990 fm.write(
2025 b'pythonver',
1991 b'pythonver',
2026 _(b"checking Python version (%s)\n"),
1992 _(b"checking Python version (%s)\n"),
2027 (b"%d.%d.%d" % sys.version_info[:3]),
1993 (b"%d.%d.%d" % sys.version_info[:3]),
2028 )
1994 )
2029 fm.write(
1995 fm.write(
2030 b'pythonlib',
1996 b'pythonlib',
2031 _(b"checking Python lib (%s)...\n"),
1997 _(b"checking Python lib (%s)...\n"),
2032 pythonlib or _(b"unknown"),
1998 pythonlib or _(b"unknown"),
2033 )
1999 )
2034
2000
2035 try:
2001 try:
2036 from . import rustext # pytype: disable=import-error
2002 from . import rustext # pytype: disable=import-error
2037
2003
2038 rustext.__doc__ # trigger lazy import
2004 rustext.__doc__ # trigger lazy import
2039 except ImportError:
2005 except ImportError:
2040 rustext = None
2006 rustext = None
2041
2007
2042 security = set(sslutil.supportedprotocols)
2008 security = set(sslutil.supportedprotocols)
2043 if sslutil.hassni:
2009 if sslutil.hassni:
2044 security.add(b'sni')
2010 security.add(b'sni')
2045
2011
2046 fm.write(
2012 fm.write(
2047 b'pythonsecurity',
2013 b'pythonsecurity',
2048 _(b"checking Python security support (%s)\n"),
2014 _(b"checking Python security support (%s)\n"),
2049 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2015 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2050 )
2016 )
2051
2017
2052 # These are warnings, not errors. So don't increment problem count. This
2018 # These are warnings, not errors. So don't increment problem count. This
2053 # may change in the future.
2019 # may change in the future.
2054 if b'tls1.2' not in security:
2020 if b'tls1.2' not in security:
2055 fm.plain(
2021 fm.plain(
2056 _(
2022 _(
2057 b' TLS 1.2 not supported by Python install; '
2023 b' TLS 1.2 not supported by Python install; '
2058 b'network connections lack modern security\n'
2024 b'network connections lack modern security\n'
2059 )
2025 )
2060 )
2026 )
2061 if b'sni' not in security:
2027 if b'sni' not in security:
2062 fm.plain(
2028 fm.plain(
2063 _(
2029 _(
2064 b' SNI not supported by Python install; may have '
2030 b' SNI not supported by Python install; may have '
2065 b'connectivity issues with some servers\n'
2031 b'connectivity issues with some servers\n'
2066 )
2032 )
2067 )
2033 )
2068
2034
2069 fm.plain(
2035 fm.plain(
2070 _(
2036 _(
2071 b"checking Rust extensions (%s)\n"
2037 b"checking Rust extensions (%s)\n"
2072 % (b'missing' if rustext is None else b'installed')
2038 % (b'missing' if rustext is None else b'installed')
2073 ),
2039 ),
2074 )
2040 )
2075
2041
2076 # TODO print CA cert info
2042 # TODO print CA cert info
2077
2043
2078 # hg version
2044 # hg version
2079 hgver = util.version()
2045 hgver = util.version()
2080 fm.write(
2046 fm.write(
2081 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2047 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2082 )
2048 )
2083 fm.write(
2049 fm.write(
2084 b'hgverextra',
2050 b'hgverextra',
2085 _(b"checking Mercurial custom build (%s)\n"),
2051 _(b"checking Mercurial custom build (%s)\n"),
2086 b'+'.join(hgver.split(b'+')[1:]),
2052 b'+'.join(hgver.split(b'+')[1:]),
2087 )
2053 )
2088
2054
2089 # compiled modules
2055 # compiled modules
2090 hgmodules = None
2056 hgmodules = None
2091 if util.safehasattr(sys.modules[__name__], '__file__'):
2057 if util.safehasattr(sys.modules[__name__], '__file__'):
2092 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2058 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2093 elif getattr(sys, 'oxidized', False):
2059 elif getattr(sys, 'oxidized', False):
2094 hgmodules = pycompat.sysexecutable
2060 hgmodules = pycompat.sysexecutable
2095
2061
2096 fm.write(
2062 fm.write(
2097 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2063 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2098 )
2064 )
2099 fm.write(
2065 fm.write(
2100 b'hgmodules',
2066 b'hgmodules',
2101 _(b"checking installed modules (%s)...\n"),
2067 _(b"checking installed modules (%s)...\n"),
2102 hgmodules or _(b"unknown"),
2068 hgmodules or _(b"unknown"),
2103 )
2069 )
2104
2070
2105 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2071 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2106 rustext = rustandc # for now, that's the only case
2072 rustext = rustandc # for now, that's the only case
2107 cext = policy.policy in (b'c', b'allow') or rustandc
2073 cext = policy.policy in (b'c', b'allow') or rustandc
2108 nopure = cext or rustext
2074 nopure = cext or rustext
2109 if nopure:
2075 if nopure:
2110 err = None
2076 err = None
2111 try:
2077 try:
2112 if cext:
2078 if cext:
2113 from .cext import ( # pytype: disable=import-error
2079 from .cext import ( # pytype: disable=import-error
2114 base85,
2080 base85,
2115 bdiff,
2081 bdiff,
2116 mpatch,
2082 mpatch,
2117 osutil,
2083 osutil,
2118 )
2084 )
2119
2085
2120 # quiet pyflakes
2086 # quiet pyflakes
2121 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2087 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2122 if rustext:
2088 if rustext:
2123 from .rustext import ( # pytype: disable=import-error
2089 from .rustext import ( # pytype: disable=import-error
2124 ancestor,
2090 ancestor,
2125 dirstate,
2091 dirstate,
2126 )
2092 )
2127
2093
2128 dir(ancestor), dir(dirstate) # quiet pyflakes
2094 dir(ancestor), dir(dirstate) # quiet pyflakes
2129 except Exception as inst:
2095 except Exception as inst:
2130 err = stringutil.forcebytestr(inst)
2096 err = stringutil.forcebytestr(inst)
2131 problems += 1
2097 problems += 1
2132 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2098 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2133
2099
2134 compengines = util.compengines._engines.values()
2100 compengines = util.compengines._engines.values()
2135 fm.write(
2101 fm.write(
2136 b'compengines',
2102 b'compengines',
2137 _(b'checking registered compression engines (%s)\n'),
2103 _(b'checking registered compression engines (%s)\n'),
2138 fm.formatlist(
2104 fm.formatlist(
2139 sorted(e.name() for e in compengines),
2105 sorted(e.name() for e in compengines),
2140 name=b'compengine',
2106 name=b'compengine',
2141 fmt=b'%s',
2107 fmt=b'%s',
2142 sep=b', ',
2108 sep=b', ',
2143 ),
2109 ),
2144 )
2110 )
2145 fm.write(
2111 fm.write(
2146 b'compenginesavail',
2112 b'compenginesavail',
2147 _(b'checking available compression engines (%s)\n'),
2113 _(b'checking available compression engines (%s)\n'),
2148 fm.formatlist(
2114 fm.formatlist(
2149 sorted(e.name() for e in compengines if e.available()),
2115 sorted(e.name() for e in compengines if e.available()),
2150 name=b'compengine',
2116 name=b'compengine',
2151 fmt=b'%s',
2117 fmt=b'%s',
2152 sep=b', ',
2118 sep=b', ',
2153 ),
2119 ),
2154 )
2120 )
2155 wirecompengines = compression.compengines.supportedwireengines(
2121 wirecompengines = compression.compengines.supportedwireengines(
2156 compression.SERVERROLE
2122 compression.SERVERROLE
2157 )
2123 )
2158 fm.write(
2124 fm.write(
2159 b'compenginesserver',
2125 b'compenginesserver',
2160 _(
2126 _(
2161 b'checking available compression engines '
2127 b'checking available compression engines '
2162 b'for wire protocol (%s)\n'
2128 b'for wire protocol (%s)\n'
2163 ),
2129 ),
2164 fm.formatlist(
2130 fm.formatlist(
2165 [e.name() for e in wirecompengines if e.wireprotosupport()],
2131 [e.name() for e in wirecompengines if e.wireprotosupport()],
2166 name=b'compengine',
2132 name=b'compengine',
2167 fmt=b'%s',
2133 fmt=b'%s',
2168 sep=b', ',
2134 sep=b', ',
2169 ),
2135 ),
2170 )
2136 )
2171 re2 = b'missing'
2137 re2 = b'missing'
2172 if util._re2:
2138 if util._re2:
2173 re2 = b'available'
2139 re2 = b'available'
2174 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2140 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2175 fm.data(re2=bool(util._re2))
2141 fm.data(re2=bool(util._re2))
2176
2142
2177 # templates
2143 # templates
2178 p = templater.templatedir()
2144 p = templater.templatedir()
2179 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2145 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2180 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2146 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2181 if p:
2147 if p:
2182 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2148 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2183 if m:
2149 if m:
2184 # template found, check if it is working
2150 # template found, check if it is working
2185 err = None
2151 err = None
2186 try:
2152 try:
2187 templater.templater.frommapfile(m)
2153 templater.templater.frommapfile(m)
2188 except Exception as inst:
2154 except Exception as inst:
2189 err = stringutil.forcebytestr(inst)
2155 err = stringutil.forcebytestr(inst)
2190 p = None
2156 p = None
2191 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2157 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2192 else:
2158 else:
2193 p = None
2159 p = None
2194 fm.condwrite(
2160 fm.condwrite(
2195 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2161 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2196 )
2162 )
2197 fm.condwrite(
2163 fm.condwrite(
2198 not m,
2164 not m,
2199 b'defaulttemplatenotfound',
2165 b'defaulttemplatenotfound',
2200 _(b" template '%s' not found\n"),
2166 _(b" template '%s' not found\n"),
2201 b"default",
2167 b"default",
2202 )
2168 )
2203 if not p:
2169 if not p:
2204 problems += 1
2170 problems += 1
2205 fm.condwrite(
2171 fm.condwrite(
2206 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2172 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2207 )
2173 )
2208
2174
2209 # editor
2175 # editor
2210 editor = ui.geteditor()
2176 editor = ui.geteditor()
2211 editor = util.expandpath(editor)
2177 editor = util.expandpath(editor)
2212 editorbin = procutil.shellsplit(editor)[0]
2178 editorbin = procutil.shellsplit(editor)[0]
2213 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2179 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2214 cmdpath = procutil.findexe(editorbin)
2180 cmdpath = procutil.findexe(editorbin)
2215 fm.condwrite(
2181 fm.condwrite(
2216 not cmdpath and editor == b'vi',
2182 not cmdpath and editor == b'vi',
2217 b'vinotfound',
2183 b'vinotfound',
2218 _(
2184 _(
2219 b" No commit editor set and can't find %s in PATH\n"
2185 b" No commit editor set and can't find %s in PATH\n"
2220 b" (specify a commit editor in your configuration"
2186 b" (specify a commit editor in your configuration"
2221 b" file)\n"
2187 b" file)\n"
2222 ),
2188 ),
2223 not cmdpath and editor == b'vi' and editorbin,
2189 not cmdpath and editor == b'vi' and editorbin,
2224 )
2190 )
2225 fm.condwrite(
2191 fm.condwrite(
2226 not cmdpath and editor != b'vi',
2192 not cmdpath and editor != b'vi',
2227 b'editornotfound',
2193 b'editornotfound',
2228 _(
2194 _(
2229 b" Can't find editor '%s' in PATH\n"
2195 b" Can't find editor '%s' in PATH\n"
2230 b" (specify a commit editor in your configuration"
2196 b" (specify a commit editor in your configuration"
2231 b" file)\n"
2197 b" file)\n"
2232 ),
2198 ),
2233 not cmdpath and editorbin,
2199 not cmdpath and editorbin,
2234 )
2200 )
2235 if not cmdpath and editor != b'vi':
2201 if not cmdpath and editor != b'vi':
2236 problems += 1
2202 problems += 1
2237
2203
2238 # check username
2204 # check username
2239 username = None
2205 username = None
2240 err = None
2206 err = None
2241 try:
2207 try:
2242 username = ui.username()
2208 username = ui.username()
2243 except error.Abort as e:
2209 except error.Abort as e:
2244 err = e.message
2210 err = e.message
2245 problems += 1
2211 problems += 1
2246
2212
2247 fm.condwrite(
2213 fm.condwrite(
2248 username, b'username', _(b"checking username (%s)\n"), username
2214 username, b'username', _(b"checking username (%s)\n"), username
2249 )
2215 )
2250 fm.condwrite(
2216 fm.condwrite(
2251 err,
2217 err,
2252 b'usernameerror',
2218 b'usernameerror',
2253 _(
2219 _(
2254 b"checking username...\n %s\n"
2220 b"checking username...\n %s\n"
2255 b" (specify a username in your configuration file)\n"
2221 b" (specify a username in your configuration file)\n"
2256 ),
2222 ),
2257 err,
2223 err,
2258 )
2224 )
2259
2225
2260 for name, mod in extensions.extensions():
2226 for name, mod in extensions.extensions():
2261 handler = getattr(mod, 'debuginstall', None)
2227 handler = getattr(mod, 'debuginstall', None)
2262 if handler is not None:
2228 if handler is not None:
2263 problems += handler(ui, fm)
2229 problems += handler(ui, fm)
2264
2230
2265 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2231 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2266 if not problems:
2232 if not problems:
2267 fm.data(problems=problems)
2233 fm.data(problems=problems)
2268 fm.condwrite(
2234 fm.condwrite(
2269 problems,
2235 problems,
2270 b'problems',
2236 b'problems',
2271 _(b"%d problems detected, please check your install!\n"),
2237 _(b"%d problems detected, please check your install!\n"),
2272 problems,
2238 problems,
2273 )
2239 )
2274 fm.end()
2240 fm.end()
2275
2241
2276 return problems
2242 return problems
2277
2243
2278
2244
2279 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2245 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2280 def debugknown(ui, repopath, *ids, **opts):
2246 def debugknown(ui, repopath, *ids, **opts):
2281 """test whether node ids are known to a repo
2247 """test whether node ids are known to a repo
2282
2248
2283 Every ID must be a full-length hex node id string. Returns a list of 0s
2249 Every ID must be a full-length hex node id string. Returns a list of 0s
2284 and 1s indicating unknown/known.
2250 and 1s indicating unknown/known.
2285 """
2251 """
2286 opts = pycompat.byteskwargs(opts)
2252 opts = pycompat.byteskwargs(opts)
2287 repo = hg.peer(ui, opts, repopath)
2253 repo = hg.peer(ui, opts, repopath)
2288 if not repo.capable(b'known'):
2254 if not repo.capable(b'known'):
2289 raise error.Abort(b"known() not supported by target repository")
2255 raise error.Abort(b"known() not supported by target repository")
2290 flags = repo.known([bin(s) for s in ids])
2256 flags = repo.known([bin(s) for s in ids])
2291 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2257 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2292
2258
2293
2259
2294 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2260 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2295 def debuglabelcomplete(ui, repo, *args):
2261 def debuglabelcomplete(ui, repo, *args):
2296 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2262 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2297 debugnamecomplete(ui, repo, *args)
2263 debugnamecomplete(ui, repo, *args)
2298
2264
2299
2265
2300 @command(
2266 @command(
2301 b'debuglocks',
2267 b'debuglocks',
2302 [
2268 [
2303 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2269 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2304 (
2270 (
2305 b'W',
2271 b'W',
2306 b'force-free-wlock',
2272 b'force-free-wlock',
2307 None,
2273 None,
2308 _(b'free the working state lock (DANGEROUS)'),
2274 _(b'free the working state lock (DANGEROUS)'),
2309 ),
2275 ),
2310 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2276 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2311 (
2277 (
2312 b'S',
2278 b'S',
2313 b'set-wlock',
2279 b'set-wlock',
2314 None,
2280 None,
2315 _(b'set the working state lock until stopped'),
2281 _(b'set the working state lock until stopped'),
2316 ),
2282 ),
2317 ],
2283 ],
2318 _(b'[OPTION]...'),
2284 _(b'[OPTION]...'),
2319 )
2285 )
2320 def debuglocks(ui, repo, **opts):
2286 def debuglocks(ui, repo, **opts):
2321 """show or modify state of locks
2287 """show or modify state of locks
2322
2288
2323 By default, this command will show which locks are held. This
2289 By default, this command will show which locks are held. This
2324 includes the user and process holding the lock, the amount of time
2290 includes the user and process holding the lock, the amount of time
2325 the lock has been held, and the machine name where the process is
2291 the lock has been held, and the machine name where the process is
2326 running if it's not local.
2292 running if it's not local.
2327
2293
2328 Locks protect the integrity of Mercurial's data, so should be
2294 Locks protect the integrity of Mercurial's data, so should be
2329 treated with care. System crashes or other interruptions may cause
2295 treated with care. System crashes or other interruptions may cause
2330 locks to not be properly released, though Mercurial will usually
2296 locks to not be properly released, though Mercurial will usually
2331 detect and remove such stale locks automatically.
2297 detect and remove such stale locks automatically.
2332
2298
2333 However, detecting stale locks may not always be possible (for
2299 However, detecting stale locks may not always be possible (for
2334 instance, on a shared filesystem). Removing locks may also be
2300 instance, on a shared filesystem). Removing locks may also be
2335 blocked by filesystem permissions.
2301 blocked by filesystem permissions.
2336
2302
2337 Setting a lock will prevent other commands from changing the data.
2303 Setting a lock will prevent other commands from changing the data.
2338 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2304 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2339 The set locks are removed when the command exits.
2305 The set locks are removed when the command exits.
2340
2306
2341 Returns 0 if no locks are held.
2307 Returns 0 if no locks are held.
2342
2308
2343 """
2309 """
2344
2310
2345 if opts.get('force_free_lock'):
2311 if opts.get('force_free_lock'):
2346 repo.svfs.tryunlink(b'lock')
2312 repo.svfs.tryunlink(b'lock')
2347 if opts.get('force_free_wlock'):
2313 if opts.get('force_free_wlock'):
2348 repo.vfs.tryunlink(b'wlock')
2314 repo.vfs.tryunlink(b'wlock')
2349 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2315 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2350 return 0
2316 return 0
2351
2317
2352 locks = []
2318 locks = []
2353 try:
2319 try:
2354 if opts.get('set_wlock'):
2320 if opts.get('set_wlock'):
2355 try:
2321 try:
2356 locks.append(repo.wlock(False))
2322 locks.append(repo.wlock(False))
2357 except error.LockHeld:
2323 except error.LockHeld:
2358 raise error.Abort(_(b'wlock is already held'))
2324 raise error.Abort(_(b'wlock is already held'))
2359 if opts.get('set_lock'):
2325 if opts.get('set_lock'):
2360 try:
2326 try:
2361 locks.append(repo.lock(False))
2327 locks.append(repo.lock(False))
2362 except error.LockHeld:
2328 except error.LockHeld:
2363 raise error.Abort(_(b'lock is already held'))
2329 raise error.Abort(_(b'lock is already held'))
2364 if len(locks):
2330 if len(locks):
2365 try:
2331 try:
2366 if ui.interactive():
2332 if ui.interactive():
2367 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2333 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2368 ui.promptchoice(prompt)
2334 ui.promptchoice(prompt)
2369 else:
2335 else:
2370 msg = b"%d locks held, waiting for signal\n"
2336 msg = b"%d locks held, waiting for signal\n"
2371 msg %= len(locks)
2337 msg %= len(locks)
2372 ui.status(msg)
2338 ui.status(msg)
2373 while True: # XXX wait for a signal
2339 while True: # XXX wait for a signal
2374 time.sleep(0.1)
2340 time.sleep(0.1)
2375 except KeyboardInterrupt:
2341 except KeyboardInterrupt:
2376 msg = b"signal-received releasing locks\n"
2342 msg = b"signal-received releasing locks\n"
2377 ui.status(msg)
2343 ui.status(msg)
2378 return 0
2344 return 0
2379 finally:
2345 finally:
2380 release(*locks)
2346 release(*locks)
2381
2347
2382 now = time.time()
2348 now = time.time()
2383 held = 0
2349 held = 0
2384
2350
2385 def report(vfs, name, method):
2351 def report(vfs, name, method):
2386 # this causes stale locks to get reaped for more accurate reporting
2352 # this causes stale locks to get reaped for more accurate reporting
2387 try:
2353 try:
2388 l = method(False)
2354 l = method(False)
2389 except error.LockHeld:
2355 except error.LockHeld:
2390 l = None
2356 l = None
2391
2357
2392 if l:
2358 if l:
2393 l.release()
2359 l.release()
2394 else:
2360 else:
2395 try:
2361 try:
2396 st = vfs.lstat(name)
2362 st = vfs.lstat(name)
2397 age = now - st[stat.ST_MTIME]
2363 age = now - st[stat.ST_MTIME]
2398 user = util.username(st.st_uid)
2364 user = util.username(st.st_uid)
2399 locker = vfs.readlock(name)
2365 locker = vfs.readlock(name)
2400 if b":" in locker:
2366 if b":" in locker:
2401 host, pid = locker.split(b':')
2367 host, pid = locker.split(b':')
2402 if host == socket.gethostname():
2368 if host == socket.gethostname():
2403 locker = b'user %s, process %s' % (user or b'None', pid)
2369 locker = b'user %s, process %s' % (user or b'None', pid)
2404 else:
2370 else:
2405 locker = b'user %s, process %s, host %s' % (
2371 locker = b'user %s, process %s, host %s' % (
2406 user or b'None',
2372 user or b'None',
2407 pid,
2373 pid,
2408 host,
2374 host,
2409 )
2375 )
2410 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2376 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2411 return 1
2377 return 1
2412 except FileNotFoundError:
2378 except FileNotFoundError:
2413 pass
2379 pass
2414
2380
2415 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2381 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2416 return 0
2382 return 0
2417
2383
2418 held += report(repo.svfs, b"lock", repo.lock)
2384 held += report(repo.svfs, b"lock", repo.lock)
2419 held += report(repo.vfs, b"wlock", repo.wlock)
2385 held += report(repo.vfs, b"wlock", repo.wlock)
2420
2386
2421 return held
2387 return held
2422
2388
2423
2389
2424 @command(
2390 @command(
2425 b'debugmanifestfulltextcache',
2391 b'debugmanifestfulltextcache',
2426 [
2392 [
2427 (b'', b'clear', False, _(b'clear the cache')),
2393 (b'', b'clear', False, _(b'clear the cache')),
2428 (
2394 (
2429 b'a',
2395 b'a',
2430 b'add',
2396 b'add',
2431 [],
2397 [],
2432 _(b'add the given manifest nodes to the cache'),
2398 _(b'add the given manifest nodes to the cache'),
2433 _(b'NODE'),
2399 _(b'NODE'),
2434 ),
2400 ),
2435 ],
2401 ],
2436 b'',
2402 b'',
2437 )
2403 )
2438 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2404 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2439 """show, clear or amend the contents of the manifest fulltext cache"""
2405 """show, clear or amend the contents of the manifest fulltext cache"""
2440
2406
2441 def getcache():
2407 def getcache():
2442 r = repo.manifestlog.getstorage(b'')
2408 r = repo.manifestlog.getstorage(b'')
2443 try:
2409 try:
2444 return r._fulltextcache
2410 return r._fulltextcache
2445 except AttributeError:
2411 except AttributeError:
2446 msg = _(
2412 msg = _(
2447 b"Current revlog implementation doesn't appear to have a "
2413 b"Current revlog implementation doesn't appear to have a "
2448 b"manifest fulltext cache\n"
2414 b"manifest fulltext cache\n"
2449 )
2415 )
2450 raise error.Abort(msg)
2416 raise error.Abort(msg)
2451
2417
2452 if opts.get('clear'):
2418 if opts.get('clear'):
2453 with repo.wlock():
2419 with repo.wlock():
2454 cache = getcache()
2420 cache = getcache()
2455 cache.clear(clear_persisted_data=True)
2421 cache.clear(clear_persisted_data=True)
2456 return
2422 return
2457
2423
2458 if add:
2424 if add:
2459 with repo.wlock():
2425 with repo.wlock():
2460 m = repo.manifestlog
2426 m = repo.manifestlog
2461 store = m.getstorage(b'')
2427 store = m.getstorage(b'')
2462 for n in add:
2428 for n in add:
2463 try:
2429 try:
2464 manifest = m[store.lookup(n)]
2430 manifest = m[store.lookup(n)]
2465 except error.LookupError as e:
2431 except error.LookupError as e:
2466 raise error.Abort(
2432 raise error.Abort(
2467 bytes(e), hint=b"Check your manifest node id"
2433 bytes(e), hint=b"Check your manifest node id"
2468 )
2434 )
2469 manifest.read() # stores revisision in cache too
2435 manifest.read() # stores revisision in cache too
2470 return
2436 return
2471
2437
2472 cache = getcache()
2438 cache = getcache()
2473 if not len(cache):
2439 if not len(cache):
2474 ui.write(_(b'cache empty\n'))
2440 ui.write(_(b'cache empty\n'))
2475 else:
2441 else:
2476 ui.write(
2442 ui.write(
2477 _(
2443 _(
2478 b'cache contains %d manifest entries, in order of most to '
2444 b'cache contains %d manifest entries, in order of most to '
2479 b'least recent:\n'
2445 b'least recent:\n'
2480 )
2446 )
2481 % (len(cache),)
2447 % (len(cache),)
2482 )
2448 )
2483 totalsize = 0
2449 totalsize = 0
2484 for nodeid in cache:
2450 for nodeid in cache:
2485 # Use cache.get to not update the LRU order
2451 # Use cache.get to not update the LRU order
2486 data = cache.peek(nodeid)
2452 data = cache.peek(nodeid)
2487 size = len(data)
2453 size = len(data)
2488 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2454 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2489 ui.write(
2455 ui.write(
2490 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2456 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2491 )
2457 )
2492 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2458 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2493 ui.write(
2459 ui.write(
2494 _(b'total cache data size %s, on-disk %s\n')
2460 _(b'total cache data size %s, on-disk %s\n')
2495 % (util.bytecount(totalsize), util.bytecount(ondisk))
2461 % (util.bytecount(totalsize), util.bytecount(ondisk))
2496 )
2462 )
2497
2463
2498
2464
2499 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2465 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2500 def debugmergestate(ui, repo, *args, **opts):
2466 def debugmergestate(ui, repo, *args, **opts):
2501 """print merge state
2467 """print merge state
2502
2468
2503 Use --verbose to print out information about whether v1 or v2 merge state
2469 Use --verbose to print out information about whether v1 or v2 merge state
2504 was chosen."""
2470 was chosen."""
2505
2471
2506 if ui.verbose:
2472 if ui.verbose:
2507 ms = mergestatemod.mergestate(repo)
2473 ms = mergestatemod.mergestate(repo)
2508
2474
2509 # sort so that reasonable information is on top
2475 # sort so that reasonable information is on top
2510 v1records = ms._readrecordsv1()
2476 v1records = ms._readrecordsv1()
2511 v2records = ms._readrecordsv2()
2477 v2records = ms._readrecordsv2()
2512
2478
2513 if not v1records and not v2records:
2479 if not v1records and not v2records:
2514 pass
2480 pass
2515 elif not v2records:
2481 elif not v2records:
2516 ui.writenoi18n(b'no version 2 merge state\n')
2482 ui.writenoi18n(b'no version 2 merge state\n')
2517 elif ms._v1v2match(v1records, v2records):
2483 elif ms._v1v2match(v1records, v2records):
2518 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2484 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2519 else:
2485 else:
2520 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2486 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2521
2487
2522 opts = pycompat.byteskwargs(opts)
2488 opts = pycompat.byteskwargs(opts)
2523 if not opts[b'template']:
2489 if not opts[b'template']:
2524 opts[b'template'] = (
2490 opts[b'template'] = (
2525 b'{if(commits, "", "no merge state found\n")}'
2491 b'{if(commits, "", "no merge state found\n")}'
2526 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2492 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2527 b'{files % "file: {path} (state \\"{state}\\")\n'
2493 b'{files % "file: {path} (state \\"{state}\\")\n'
2528 b'{if(local_path, "'
2494 b'{if(local_path, "'
2529 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2495 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2530 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2496 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2531 b' other path: {other_path} (node {other_node})\n'
2497 b' other path: {other_path} (node {other_node})\n'
2532 b'")}'
2498 b'")}'
2533 b'{if(rename_side, "'
2499 b'{if(rename_side, "'
2534 b' rename side: {rename_side}\n'
2500 b' rename side: {rename_side}\n'
2535 b' renamed path: {renamed_path}\n'
2501 b' renamed path: {renamed_path}\n'
2536 b'")}'
2502 b'")}'
2537 b'{extras % " extra: {key} = {value}\n"}'
2503 b'{extras % " extra: {key} = {value}\n"}'
2538 b'"}'
2504 b'"}'
2539 b'{extras % "extra: {file} ({key} = {value})\n"}'
2505 b'{extras % "extra: {file} ({key} = {value})\n"}'
2540 )
2506 )
2541
2507
2542 ms = mergestatemod.mergestate.read(repo)
2508 ms = mergestatemod.mergestate.read(repo)
2543
2509
2544 fm = ui.formatter(b'debugmergestate', opts)
2510 fm = ui.formatter(b'debugmergestate', opts)
2545 fm.startitem()
2511 fm.startitem()
2546
2512
2547 fm_commits = fm.nested(b'commits')
2513 fm_commits = fm.nested(b'commits')
2548 if ms.active():
2514 if ms.active():
2549 for name, node, label_index in (
2515 for name, node, label_index in (
2550 (b'local', ms.local, 0),
2516 (b'local', ms.local, 0),
2551 (b'other', ms.other, 1),
2517 (b'other', ms.other, 1),
2552 ):
2518 ):
2553 fm_commits.startitem()
2519 fm_commits.startitem()
2554 fm_commits.data(name=name)
2520 fm_commits.data(name=name)
2555 fm_commits.data(node=hex(node))
2521 fm_commits.data(node=hex(node))
2556 if ms._labels and len(ms._labels) > label_index:
2522 if ms._labels and len(ms._labels) > label_index:
2557 fm_commits.data(label=ms._labels[label_index])
2523 fm_commits.data(label=ms._labels[label_index])
2558 fm_commits.end()
2524 fm_commits.end()
2559
2525
2560 fm_files = fm.nested(b'files')
2526 fm_files = fm.nested(b'files')
2561 if ms.active():
2527 if ms.active():
2562 for f in ms:
2528 for f in ms:
2563 fm_files.startitem()
2529 fm_files.startitem()
2564 fm_files.data(path=f)
2530 fm_files.data(path=f)
2565 state = ms._state[f]
2531 state = ms._state[f]
2566 fm_files.data(state=state[0])
2532 fm_files.data(state=state[0])
2567 if state[0] in (
2533 if state[0] in (
2568 mergestatemod.MERGE_RECORD_UNRESOLVED,
2534 mergestatemod.MERGE_RECORD_UNRESOLVED,
2569 mergestatemod.MERGE_RECORD_RESOLVED,
2535 mergestatemod.MERGE_RECORD_RESOLVED,
2570 ):
2536 ):
2571 fm_files.data(local_key=state[1])
2537 fm_files.data(local_key=state[1])
2572 fm_files.data(local_path=state[2])
2538 fm_files.data(local_path=state[2])
2573 fm_files.data(ancestor_path=state[3])
2539 fm_files.data(ancestor_path=state[3])
2574 fm_files.data(ancestor_node=state[4])
2540 fm_files.data(ancestor_node=state[4])
2575 fm_files.data(other_path=state[5])
2541 fm_files.data(other_path=state[5])
2576 fm_files.data(other_node=state[6])
2542 fm_files.data(other_node=state[6])
2577 fm_files.data(local_flags=state[7])
2543 fm_files.data(local_flags=state[7])
2578 elif state[0] in (
2544 elif state[0] in (
2579 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2545 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2580 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2546 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2581 ):
2547 ):
2582 fm_files.data(renamed_path=state[1])
2548 fm_files.data(renamed_path=state[1])
2583 fm_files.data(rename_side=state[2])
2549 fm_files.data(rename_side=state[2])
2584 fm_extras = fm_files.nested(b'extras')
2550 fm_extras = fm_files.nested(b'extras')
2585 for k, v in sorted(ms.extras(f).items()):
2551 for k, v in sorted(ms.extras(f).items()):
2586 fm_extras.startitem()
2552 fm_extras.startitem()
2587 fm_extras.data(key=k)
2553 fm_extras.data(key=k)
2588 fm_extras.data(value=v)
2554 fm_extras.data(value=v)
2589 fm_extras.end()
2555 fm_extras.end()
2590
2556
2591 fm_files.end()
2557 fm_files.end()
2592
2558
2593 fm_extras = fm.nested(b'extras')
2559 fm_extras = fm.nested(b'extras')
2594 for f, d in sorted(ms.allextras().items()):
2560 for f, d in sorted(ms.allextras().items()):
2595 if f in ms:
2561 if f in ms:
2596 # If file is in mergestate, we have already processed it's extras
2562 # If file is in mergestate, we have already processed it's extras
2597 continue
2563 continue
2598 for k, v in d.items():
2564 for k, v in d.items():
2599 fm_extras.startitem()
2565 fm_extras.startitem()
2600 fm_extras.data(file=f)
2566 fm_extras.data(file=f)
2601 fm_extras.data(key=k)
2567 fm_extras.data(key=k)
2602 fm_extras.data(value=v)
2568 fm_extras.data(value=v)
2603 fm_extras.end()
2569 fm_extras.end()
2604
2570
2605 fm.end()
2571 fm.end()
2606
2572
2607
2573
2608 @command(b'debugnamecomplete', [], _(b'NAME...'))
2574 @command(b'debugnamecomplete', [], _(b'NAME...'))
2609 def debugnamecomplete(ui, repo, *args):
2575 def debugnamecomplete(ui, repo, *args):
2610 '''complete "names" - tags, open branch names, bookmark names'''
2576 '''complete "names" - tags, open branch names, bookmark names'''
2611
2577
2612 names = set()
2578 names = set()
2613 # since we previously only listed open branches, we will handle that
2579 # since we previously only listed open branches, we will handle that
2614 # specially (after this for loop)
2580 # specially (after this for loop)
2615 for name, ns in repo.names.items():
2581 for name, ns in repo.names.items():
2616 if name != b'branches':
2582 if name != b'branches':
2617 names.update(ns.listnames(repo))
2583 names.update(ns.listnames(repo))
2618 names.update(
2584 names.update(
2619 tag
2585 tag
2620 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2586 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2621 if not closed
2587 if not closed
2622 )
2588 )
2623 completions = set()
2589 completions = set()
2624 if not args:
2590 if not args:
2625 args = [b'']
2591 args = [b'']
2626 for a in args:
2592 for a in args:
2627 completions.update(n for n in names if n.startswith(a))
2593 completions.update(n for n in names if n.startswith(a))
2628 ui.write(b'\n'.join(sorted(completions)))
2594 ui.write(b'\n'.join(sorted(completions)))
2629 ui.write(b'\n')
2595 ui.write(b'\n')
2630
2596
2631
2597
2632 @command(
2598 @command(
2633 b'debugnodemap',
2599 b'debugnodemap',
2634 [
2600 [
2635 (
2601 (
2636 b'',
2602 b'',
2637 b'dump-new',
2603 b'dump-new',
2638 False,
2604 False,
2639 _(b'write a (new) persistent binary nodemap on stdout'),
2605 _(b'write a (new) persistent binary nodemap on stdout'),
2640 ),
2606 ),
2641 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2607 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2642 (
2608 (
2643 b'',
2609 b'',
2644 b'check',
2610 b'check',
2645 False,
2611 False,
2646 _(b'check that the data on disk data are correct.'),
2612 _(b'check that the data on disk data are correct.'),
2647 ),
2613 ),
2648 (
2614 (
2649 b'',
2615 b'',
2650 b'metadata',
2616 b'metadata',
2651 False,
2617 False,
2652 _(b'display the on disk meta data for the nodemap'),
2618 _(b'display the on disk meta data for the nodemap'),
2653 ),
2619 ),
2654 ],
2620 ],
2655 )
2621 )
2656 def debugnodemap(ui, repo, **opts):
2622 def debugnodemap(ui, repo, **opts):
2657 """write and inspect on disk nodemap"""
2623 """write and inspect on disk nodemap"""
2658 if opts['dump_new']:
2624 if opts['dump_new']:
2659 unfi = repo.unfiltered()
2625 unfi = repo.unfiltered()
2660 cl = unfi.changelog
2626 cl = unfi.changelog
2661 if util.safehasattr(cl.index, "nodemap_data_all"):
2627 if util.safehasattr(cl.index, "nodemap_data_all"):
2662 data = cl.index.nodemap_data_all()
2628 data = cl.index.nodemap_data_all()
2663 else:
2629 else:
2664 data = nodemap.persistent_data(cl.index)
2630 data = nodemap.persistent_data(cl.index)
2665 ui.write(data)
2631 ui.write(data)
2666 elif opts['dump_disk']:
2632 elif opts['dump_disk']:
2667 unfi = repo.unfiltered()
2633 unfi = repo.unfiltered()
2668 cl = unfi.changelog
2634 cl = unfi.changelog
2669 nm_data = nodemap.persisted_data(cl)
2635 nm_data = nodemap.persisted_data(cl)
2670 if nm_data is not None:
2636 if nm_data is not None:
2671 docket, data = nm_data
2637 docket, data = nm_data
2672 ui.write(data[:])
2638 ui.write(data[:])
2673 elif opts['check']:
2639 elif opts['check']:
2674 unfi = repo.unfiltered()
2640 unfi = repo.unfiltered()
2675 cl = unfi.changelog
2641 cl = unfi.changelog
2676 nm_data = nodemap.persisted_data(cl)
2642 nm_data = nodemap.persisted_data(cl)
2677 if nm_data is not None:
2643 if nm_data is not None:
2678 docket, data = nm_data
2644 docket, data = nm_data
2679 return nodemap.check_data(ui, cl.index, data)
2645 return nodemap.check_data(ui, cl.index, data)
2680 elif opts['metadata']:
2646 elif opts['metadata']:
2681 unfi = repo.unfiltered()
2647 unfi = repo.unfiltered()
2682 cl = unfi.changelog
2648 cl = unfi.changelog
2683 nm_data = nodemap.persisted_data(cl)
2649 nm_data = nodemap.persisted_data(cl)
2684 if nm_data is not None:
2650 if nm_data is not None:
2685 docket, data = nm_data
2651 docket, data = nm_data
2686 ui.write((b"uid: %s\n") % docket.uid)
2652 ui.write((b"uid: %s\n") % docket.uid)
2687 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2653 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2688 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2654 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2689 ui.write((b"data-length: %d\n") % docket.data_length)
2655 ui.write((b"data-length: %d\n") % docket.data_length)
2690 ui.write((b"data-unused: %d\n") % docket.data_unused)
2656 ui.write((b"data-unused: %d\n") % docket.data_unused)
2691 unused_perc = docket.data_unused * 100.0 / docket.data_length
2657 unused_perc = docket.data_unused * 100.0 / docket.data_length
2692 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2658 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2693
2659
2694
2660
2695 @command(
2661 @command(
2696 b'debugobsolete',
2662 b'debugobsolete',
2697 [
2663 [
2698 (b'', b'flags', 0, _(b'markers flag')),
2664 (b'', b'flags', 0, _(b'markers flag')),
2699 (
2665 (
2700 b'',
2666 b'',
2701 b'record-parents',
2667 b'record-parents',
2702 False,
2668 False,
2703 _(b'record parent information for the precursor'),
2669 _(b'record parent information for the precursor'),
2704 ),
2670 ),
2705 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2671 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2706 (
2672 (
2707 b'',
2673 b'',
2708 b'exclusive',
2674 b'exclusive',
2709 False,
2675 False,
2710 _(b'restrict display to markers only relevant to REV'),
2676 _(b'restrict display to markers only relevant to REV'),
2711 ),
2677 ),
2712 (b'', b'index', False, _(b'display index of the marker')),
2678 (b'', b'index', False, _(b'display index of the marker')),
2713 (b'', b'delete', [], _(b'delete markers specified by indices')),
2679 (b'', b'delete', [], _(b'delete markers specified by indices')),
2714 ]
2680 ]
2715 + cmdutil.commitopts2
2681 + cmdutil.commitopts2
2716 + cmdutil.formatteropts,
2682 + cmdutil.formatteropts,
2717 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2683 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2718 )
2684 )
2719 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2685 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2720 """create arbitrary obsolete marker
2686 """create arbitrary obsolete marker
2721
2687
2722 With no arguments, displays the list of obsolescence markers."""
2688 With no arguments, displays the list of obsolescence markers."""
2723
2689
2724 opts = pycompat.byteskwargs(opts)
2690 opts = pycompat.byteskwargs(opts)
2725
2691
2726 def parsenodeid(s):
2692 def parsenodeid(s):
2727 try:
2693 try:
2728 # We do not use revsingle/revrange functions here to accept
2694 # We do not use revsingle/revrange functions here to accept
2729 # arbitrary node identifiers, possibly not present in the
2695 # arbitrary node identifiers, possibly not present in the
2730 # local repository.
2696 # local repository.
2731 n = bin(s)
2697 n = bin(s)
2732 if len(n) != repo.nodeconstants.nodelen:
2698 if len(n) != repo.nodeconstants.nodelen:
2733 raise ValueError
2699 raise ValueError
2734 return n
2700 return n
2735 except ValueError:
2701 except ValueError:
2736 raise error.InputError(
2702 raise error.InputError(
2737 b'changeset references must be full hexadecimal '
2703 b'changeset references must be full hexadecimal '
2738 b'node identifiers'
2704 b'node identifiers'
2739 )
2705 )
2740
2706
2741 if opts.get(b'delete'):
2707 if opts.get(b'delete'):
2742 indices = []
2708 indices = []
2743 for v in opts.get(b'delete'):
2709 for v in opts.get(b'delete'):
2744 try:
2710 try:
2745 indices.append(int(v))
2711 indices.append(int(v))
2746 except ValueError:
2712 except ValueError:
2747 raise error.InputError(
2713 raise error.InputError(
2748 _(b'invalid index value: %r') % v,
2714 _(b'invalid index value: %r') % v,
2749 hint=_(b'use integers for indices'),
2715 hint=_(b'use integers for indices'),
2750 )
2716 )
2751
2717
2752 if repo.currenttransaction():
2718 if repo.currenttransaction():
2753 raise error.Abort(
2719 raise error.Abort(
2754 _(b'cannot delete obsmarkers in the middle of transaction.')
2720 _(b'cannot delete obsmarkers in the middle of transaction.')
2755 )
2721 )
2756
2722
2757 with repo.lock():
2723 with repo.lock():
2758 n = repair.deleteobsmarkers(repo.obsstore, indices)
2724 n = repair.deleteobsmarkers(repo.obsstore, indices)
2759 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2725 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2760
2726
2761 return
2727 return
2762
2728
2763 if precursor is not None:
2729 if precursor is not None:
2764 if opts[b'rev']:
2730 if opts[b'rev']:
2765 raise error.InputError(
2731 raise error.InputError(
2766 b'cannot select revision when creating marker'
2732 b'cannot select revision when creating marker'
2767 )
2733 )
2768 metadata = {}
2734 metadata = {}
2769 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2735 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2770 succs = tuple(parsenodeid(succ) for succ in successors)
2736 succs = tuple(parsenodeid(succ) for succ in successors)
2771 l = repo.lock()
2737 l = repo.lock()
2772 try:
2738 try:
2773 tr = repo.transaction(b'debugobsolete')
2739 tr = repo.transaction(b'debugobsolete')
2774 try:
2740 try:
2775 date = opts.get(b'date')
2741 date = opts.get(b'date')
2776 if date:
2742 if date:
2777 date = dateutil.parsedate(date)
2743 date = dateutil.parsedate(date)
2778 else:
2744 else:
2779 date = None
2745 date = None
2780 prec = parsenodeid(precursor)
2746 prec = parsenodeid(precursor)
2781 parents = None
2747 parents = None
2782 if opts[b'record_parents']:
2748 if opts[b'record_parents']:
2783 if prec not in repo.unfiltered():
2749 if prec not in repo.unfiltered():
2784 raise error.Abort(
2750 raise error.Abort(
2785 b'cannot used --record-parents on '
2751 b'cannot used --record-parents on '
2786 b'unknown changesets'
2752 b'unknown changesets'
2787 )
2753 )
2788 parents = repo.unfiltered()[prec].parents()
2754 parents = repo.unfiltered()[prec].parents()
2789 parents = tuple(p.node() for p in parents)
2755 parents = tuple(p.node() for p in parents)
2790 repo.obsstore.create(
2756 repo.obsstore.create(
2791 tr,
2757 tr,
2792 prec,
2758 prec,
2793 succs,
2759 succs,
2794 opts[b'flags'],
2760 opts[b'flags'],
2795 parents=parents,
2761 parents=parents,
2796 date=date,
2762 date=date,
2797 metadata=metadata,
2763 metadata=metadata,
2798 ui=ui,
2764 ui=ui,
2799 )
2765 )
2800 tr.close()
2766 tr.close()
2801 except ValueError as exc:
2767 except ValueError as exc:
2802 raise error.Abort(
2768 raise error.Abort(
2803 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2769 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2804 )
2770 )
2805 finally:
2771 finally:
2806 tr.release()
2772 tr.release()
2807 finally:
2773 finally:
2808 l.release()
2774 l.release()
2809 else:
2775 else:
2810 if opts[b'rev']:
2776 if opts[b'rev']:
2811 revs = logcmdutil.revrange(repo, opts[b'rev'])
2777 revs = logcmdutil.revrange(repo, opts[b'rev'])
2812 nodes = [repo[r].node() for r in revs]
2778 nodes = [repo[r].node() for r in revs]
2813 markers = list(
2779 markers = list(
2814 obsutil.getmarkers(
2780 obsutil.getmarkers(
2815 repo, nodes=nodes, exclusive=opts[b'exclusive']
2781 repo, nodes=nodes, exclusive=opts[b'exclusive']
2816 )
2782 )
2817 )
2783 )
2818 markers.sort(key=lambda x: x._data)
2784 markers.sort(key=lambda x: x._data)
2819 else:
2785 else:
2820 markers = obsutil.getmarkers(repo)
2786 markers = obsutil.getmarkers(repo)
2821
2787
2822 markerstoiter = markers
2788 markerstoiter = markers
2823 isrelevant = lambda m: True
2789 isrelevant = lambda m: True
2824 if opts.get(b'rev') and opts.get(b'index'):
2790 if opts.get(b'rev') and opts.get(b'index'):
2825 markerstoiter = obsutil.getmarkers(repo)
2791 markerstoiter = obsutil.getmarkers(repo)
2826 markerset = set(markers)
2792 markerset = set(markers)
2827 isrelevant = lambda m: m in markerset
2793 isrelevant = lambda m: m in markerset
2828
2794
2829 fm = ui.formatter(b'debugobsolete', opts)
2795 fm = ui.formatter(b'debugobsolete', opts)
2830 for i, m in enumerate(markerstoiter):
2796 for i, m in enumerate(markerstoiter):
2831 if not isrelevant(m):
2797 if not isrelevant(m):
2832 # marker can be irrelevant when we're iterating over a set
2798 # marker can be irrelevant when we're iterating over a set
2833 # of markers (markerstoiter) which is bigger than the set
2799 # of markers (markerstoiter) which is bigger than the set
2834 # of markers we want to display (markers)
2800 # of markers we want to display (markers)
2835 # this can happen if both --index and --rev options are
2801 # this can happen if both --index and --rev options are
2836 # provided and thus we need to iterate over all of the markers
2802 # provided and thus we need to iterate over all of the markers
2837 # to get the correct indices, but only display the ones that
2803 # to get the correct indices, but only display the ones that
2838 # are relevant to --rev value
2804 # are relevant to --rev value
2839 continue
2805 continue
2840 fm.startitem()
2806 fm.startitem()
2841 ind = i if opts.get(b'index') else None
2807 ind = i if opts.get(b'index') else None
2842 cmdutil.showmarker(fm, m, index=ind)
2808 cmdutil.showmarker(fm, m, index=ind)
2843 fm.end()
2809 fm.end()
2844
2810
2845
2811
2846 @command(
2812 @command(
2847 b'debugp1copies',
2813 b'debugp1copies',
2848 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2814 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2849 _(b'[-r REV]'),
2815 _(b'[-r REV]'),
2850 )
2816 )
2851 def debugp1copies(ui, repo, **opts):
2817 def debugp1copies(ui, repo, **opts):
2852 """dump copy information compared to p1"""
2818 """dump copy information compared to p1"""
2853
2819
2854 opts = pycompat.byteskwargs(opts)
2820 opts = pycompat.byteskwargs(opts)
2855 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2821 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2856 for dst, src in ctx.p1copies().items():
2822 for dst, src in ctx.p1copies().items():
2857 ui.write(b'%s -> %s\n' % (src, dst))
2823 ui.write(b'%s -> %s\n' % (src, dst))
2858
2824
2859
2825
2860 @command(
2826 @command(
2861 b'debugp2copies',
2827 b'debugp2copies',
2862 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2828 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2863 _(b'[-r REV]'),
2829 _(b'[-r REV]'),
2864 )
2830 )
2865 def debugp2copies(ui, repo, **opts):
2831 def debugp2copies(ui, repo, **opts):
2866 """dump copy information compared to p2"""
2832 """dump copy information compared to p2"""
2867
2833
2868 opts = pycompat.byteskwargs(opts)
2834 opts = pycompat.byteskwargs(opts)
2869 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2870 for dst, src in ctx.p2copies().items():
2836 for dst, src in ctx.p2copies().items():
2871 ui.write(b'%s -> %s\n' % (src, dst))
2837 ui.write(b'%s -> %s\n' % (src, dst))
2872
2838
2873
2839
2874 @command(
2840 @command(
2875 b'debugpathcomplete',
2841 b'debugpathcomplete',
2876 [
2842 [
2877 (b'f', b'full', None, _(b'complete an entire path')),
2843 (b'f', b'full', None, _(b'complete an entire path')),
2878 (b'n', b'normal', None, _(b'show only normal files')),
2844 (b'n', b'normal', None, _(b'show only normal files')),
2879 (b'a', b'added', None, _(b'show only added files')),
2845 (b'a', b'added', None, _(b'show only added files')),
2880 (b'r', b'removed', None, _(b'show only removed files')),
2846 (b'r', b'removed', None, _(b'show only removed files')),
2881 ],
2847 ],
2882 _(b'FILESPEC...'),
2848 _(b'FILESPEC...'),
2883 )
2849 )
2884 def debugpathcomplete(ui, repo, *specs, **opts):
2850 def debugpathcomplete(ui, repo, *specs, **opts):
2885 """complete part or all of a tracked path
2851 """complete part or all of a tracked path
2886
2852
2887 This command supports shells that offer path name completion. It
2853 This command supports shells that offer path name completion. It
2888 currently completes only files already known to the dirstate.
2854 currently completes only files already known to the dirstate.
2889
2855
2890 Completion extends only to the next path segment unless
2856 Completion extends only to the next path segment unless
2891 --full is specified, in which case entire paths are used."""
2857 --full is specified, in which case entire paths are used."""
2892
2858
2893 def complete(path, acceptable):
2859 def complete(path, acceptable):
2894 dirstate = repo.dirstate
2860 dirstate = repo.dirstate
2895 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2861 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2896 rootdir = repo.root + pycompat.ossep
2862 rootdir = repo.root + pycompat.ossep
2897 if spec != repo.root and not spec.startswith(rootdir):
2863 if spec != repo.root and not spec.startswith(rootdir):
2898 return [], []
2864 return [], []
2899 if os.path.isdir(spec):
2865 if os.path.isdir(spec):
2900 spec += b'/'
2866 spec += b'/'
2901 spec = spec[len(rootdir) :]
2867 spec = spec[len(rootdir) :]
2902 fixpaths = pycompat.ossep != b'/'
2868 fixpaths = pycompat.ossep != b'/'
2903 if fixpaths:
2869 if fixpaths:
2904 spec = spec.replace(pycompat.ossep, b'/')
2870 spec = spec.replace(pycompat.ossep, b'/')
2905 speclen = len(spec)
2871 speclen = len(spec)
2906 fullpaths = opts['full']
2872 fullpaths = opts['full']
2907 files, dirs = set(), set()
2873 files, dirs = set(), set()
2908 adddir, addfile = dirs.add, files.add
2874 adddir, addfile = dirs.add, files.add
2909 for f, st in dirstate.items():
2875 for f, st in dirstate.items():
2910 if f.startswith(spec) and st.state in acceptable:
2876 if f.startswith(spec) and st.state in acceptable:
2911 if fixpaths:
2877 if fixpaths:
2912 f = f.replace(b'/', pycompat.ossep)
2878 f = f.replace(b'/', pycompat.ossep)
2913 if fullpaths:
2879 if fullpaths:
2914 addfile(f)
2880 addfile(f)
2915 continue
2881 continue
2916 s = f.find(pycompat.ossep, speclen)
2882 s = f.find(pycompat.ossep, speclen)
2917 if s >= 0:
2883 if s >= 0:
2918 adddir(f[:s])
2884 adddir(f[:s])
2919 else:
2885 else:
2920 addfile(f)
2886 addfile(f)
2921 return files, dirs
2887 return files, dirs
2922
2888
2923 acceptable = b''
2889 acceptable = b''
2924 if opts['normal']:
2890 if opts['normal']:
2925 acceptable += b'nm'
2891 acceptable += b'nm'
2926 if opts['added']:
2892 if opts['added']:
2927 acceptable += b'a'
2893 acceptable += b'a'
2928 if opts['removed']:
2894 if opts['removed']:
2929 acceptable += b'r'
2895 acceptable += b'r'
2930 cwd = repo.getcwd()
2896 cwd = repo.getcwd()
2931 if not specs:
2897 if not specs:
2932 specs = [b'.']
2898 specs = [b'.']
2933
2899
2934 files, dirs = set(), set()
2900 files, dirs = set(), set()
2935 for spec in specs:
2901 for spec in specs:
2936 f, d = complete(spec, acceptable or b'nmar')
2902 f, d = complete(spec, acceptable or b'nmar')
2937 files.update(f)
2903 files.update(f)
2938 dirs.update(d)
2904 dirs.update(d)
2939 files.update(dirs)
2905 files.update(dirs)
2940 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2906 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2941 ui.write(b'\n')
2907 ui.write(b'\n')
2942
2908
2943
2909
2944 @command(
2910 @command(
2945 b'debugpathcopies',
2911 b'debugpathcopies',
2946 cmdutil.walkopts,
2912 cmdutil.walkopts,
2947 b'hg debugpathcopies REV1 REV2 [FILE]',
2913 b'hg debugpathcopies REV1 REV2 [FILE]',
2948 inferrepo=True,
2914 inferrepo=True,
2949 )
2915 )
2950 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2916 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2951 """show copies between two revisions"""
2917 """show copies between two revisions"""
2952 ctx1 = scmutil.revsingle(repo, rev1)
2918 ctx1 = scmutil.revsingle(repo, rev1)
2953 ctx2 = scmutil.revsingle(repo, rev2)
2919 ctx2 = scmutil.revsingle(repo, rev2)
2954 m = scmutil.match(ctx1, pats, opts)
2920 m = scmutil.match(ctx1, pats, opts)
2955 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2921 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2956 ui.write(b'%s -> %s\n' % (src, dst))
2922 ui.write(b'%s -> %s\n' % (src, dst))
2957
2923
2958
2924
2959 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2925 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2960 def debugpeer(ui, path):
2926 def debugpeer(ui, path):
2961 """establish a connection to a peer repository"""
2927 """establish a connection to a peer repository"""
2962 # Always enable peer request logging. Requires --debug to display
2928 # Always enable peer request logging. Requires --debug to display
2963 # though.
2929 # though.
2964 overrides = {
2930 overrides = {
2965 (b'devel', b'debug.peer-request'): True,
2931 (b'devel', b'debug.peer-request'): True,
2966 }
2932 }
2967
2933
2968 with ui.configoverride(overrides):
2934 with ui.configoverride(overrides):
2969 peer = hg.peer(ui, {}, path)
2935 peer = hg.peer(ui, {}, path)
2970
2936
2971 try:
2937 try:
2972 local = peer.local() is not None
2938 local = peer.local() is not None
2973 canpush = peer.canpush()
2939 canpush = peer.canpush()
2974
2940
2975 ui.write(_(b'url: %s\n') % peer.url())
2941 ui.write(_(b'url: %s\n') % peer.url())
2976 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2942 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2977 ui.write(
2943 ui.write(
2978 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2944 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2979 )
2945 )
2980 finally:
2946 finally:
2981 peer.close()
2947 peer.close()
2982
2948
2983
2949
2984 @command(
2950 @command(
2985 b'debugpickmergetool',
2951 b'debugpickmergetool',
2986 [
2952 [
2987 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2953 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2988 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2954 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2989 ]
2955 ]
2990 + cmdutil.walkopts
2956 + cmdutil.walkopts
2991 + cmdutil.mergetoolopts,
2957 + cmdutil.mergetoolopts,
2992 _(b'[PATTERN]...'),
2958 _(b'[PATTERN]...'),
2993 inferrepo=True,
2959 inferrepo=True,
2994 )
2960 )
2995 def debugpickmergetool(ui, repo, *pats, **opts):
2961 def debugpickmergetool(ui, repo, *pats, **opts):
2996 """examine which merge tool is chosen for specified file
2962 """examine which merge tool is chosen for specified file
2997
2963
2998 As described in :hg:`help merge-tools`, Mercurial examines
2964 As described in :hg:`help merge-tools`, Mercurial examines
2999 configurations below in this order to decide which merge tool is
2965 configurations below in this order to decide which merge tool is
3000 chosen for specified file.
2966 chosen for specified file.
3001
2967
3002 1. ``--tool`` option
2968 1. ``--tool`` option
3003 2. ``HGMERGE`` environment variable
2969 2. ``HGMERGE`` environment variable
3004 3. configurations in ``merge-patterns`` section
2970 3. configurations in ``merge-patterns`` section
3005 4. configuration of ``ui.merge``
2971 4. configuration of ``ui.merge``
3006 5. configurations in ``merge-tools`` section
2972 5. configurations in ``merge-tools`` section
3007 6. ``hgmerge`` tool (for historical reason only)
2973 6. ``hgmerge`` tool (for historical reason only)
3008 7. default tool for fallback (``:merge`` or ``:prompt``)
2974 7. default tool for fallback (``:merge`` or ``:prompt``)
3009
2975
3010 This command writes out examination result in the style below::
2976 This command writes out examination result in the style below::
3011
2977
3012 FILE = MERGETOOL
2978 FILE = MERGETOOL
3013
2979
3014 By default, all files known in the first parent context of the
2980 By default, all files known in the first parent context of the
3015 working directory are examined. Use file patterns and/or -I/-X
2981 working directory are examined. Use file patterns and/or -I/-X
3016 options to limit target files. -r/--rev is also useful to examine
2982 options to limit target files. -r/--rev is also useful to examine
3017 files in another context without actual updating to it.
2983 files in another context without actual updating to it.
3018
2984
3019 With --debug, this command shows warning messages while matching
2985 With --debug, this command shows warning messages while matching
3020 against ``merge-patterns`` and so on, too. It is recommended to
2986 against ``merge-patterns`` and so on, too. It is recommended to
3021 use this option with explicit file patterns and/or -I/-X options,
2987 use this option with explicit file patterns and/or -I/-X options,
3022 because this option increases amount of output per file according
2988 because this option increases amount of output per file according
3023 to configurations in hgrc.
2989 to configurations in hgrc.
3024
2990
3025 With -v/--verbose, this command shows configurations below at
2991 With -v/--verbose, this command shows configurations below at
3026 first (only if specified).
2992 first (only if specified).
3027
2993
3028 - ``--tool`` option
2994 - ``--tool`` option
3029 - ``HGMERGE`` environment variable
2995 - ``HGMERGE`` environment variable
3030 - configuration of ``ui.merge``
2996 - configuration of ``ui.merge``
3031
2997
3032 If merge tool is chosen before matching against
2998 If merge tool is chosen before matching against
3033 ``merge-patterns``, this command can't show any helpful
2999 ``merge-patterns``, this command can't show any helpful
3034 information, even with --debug. In such case, information above is
3000 information, even with --debug. In such case, information above is
3035 useful to know why a merge tool is chosen.
3001 useful to know why a merge tool is chosen.
3036 """
3002 """
3037 opts = pycompat.byteskwargs(opts)
3003 opts = pycompat.byteskwargs(opts)
3038 overrides = {}
3004 overrides = {}
3039 if opts[b'tool']:
3005 if opts[b'tool']:
3040 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3006 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3041 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3007 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3042
3008
3043 with ui.configoverride(overrides, b'debugmergepatterns'):
3009 with ui.configoverride(overrides, b'debugmergepatterns'):
3044 hgmerge = encoding.environ.get(b"HGMERGE")
3010 hgmerge = encoding.environ.get(b"HGMERGE")
3045 if hgmerge is not None:
3011 if hgmerge is not None:
3046 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3012 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3047 uimerge = ui.config(b"ui", b"merge")
3013 uimerge = ui.config(b"ui", b"merge")
3048 if uimerge:
3014 if uimerge:
3049 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3015 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3050
3016
3051 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3017 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3052 m = scmutil.match(ctx, pats, opts)
3018 m = scmutil.match(ctx, pats, opts)
3053 changedelete = opts[b'changedelete']
3019 changedelete = opts[b'changedelete']
3054 for path in ctx.walk(m):
3020 for path in ctx.walk(m):
3055 fctx = ctx[path]
3021 fctx = ctx[path]
3056 with ui.silent(
3022 with ui.silent(
3057 error=True
3023 error=True
3058 ) if not ui.debugflag else util.nullcontextmanager():
3024 ) if not ui.debugflag else util.nullcontextmanager():
3059 tool, toolpath = filemerge._picktool(
3025 tool, toolpath = filemerge._picktool(
3060 repo,
3026 repo,
3061 ui,
3027 ui,
3062 path,
3028 path,
3063 fctx.isbinary(),
3029 fctx.isbinary(),
3064 b'l' in fctx.flags(),
3030 b'l' in fctx.flags(),
3065 changedelete,
3031 changedelete,
3066 )
3032 )
3067 ui.write(b'%s = %s\n' % (path, tool))
3033 ui.write(b'%s = %s\n' % (path, tool))
3068
3034
3069
3035
3070 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3036 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3071 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3037 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3072 """access the pushkey key/value protocol
3038 """access the pushkey key/value protocol
3073
3039
3074 With two args, list the keys in the given namespace.
3040 With two args, list the keys in the given namespace.
3075
3041
3076 With five args, set a key to new if it currently is set to old.
3042 With five args, set a key to new if it currently is set to old.
3077 Reports success or failure.
3043 Reports success or failure.
3078 """
3044 """
3079
3045
3080 target = hg.peer(ui, {}, repopath)
3046 target = hg.peer(ui, {}, repopath)
3081 try:
3047 try:
3082 if keyinfo:
3048 if keyinfo:
3083 key, old, new = keyinfo
3049 key, old, new = keyinfo
3084 with target.commandexecutor() as e:
3050 with target.commandexecutor() as e:
3085 r = e.callcommand(
3051 r = e.callcommand(
3086 b'pushkey',
3052 b'pushkey',
3087 {
3053 {
3088 b'namespace': namespace,
3054 b'namespace': namespace,
3089 b'key': key,
3055 b'key': key,
3090 b'old': old,
3056 b'old': old,
3091 b'new': new,
3057 b'new': new,
3092 },
3058 },
3093 ).result()
3059 ).result()
3094
3060
3095 ui.status(pycompat.bytestr(r) + b'\n')
3061 ui.status(pycompat.bytestr(r) + b'\n')
3096 return not r
3062 return not r
3097 else:
3063 else:
3098 for k, v in sorted(target.listkeys(namespace).items()):
3064 for k, v in sorted(target.listkeys(namespace).items()):
3099 ui.write(
3065 ui.write(
3100 b"%s\t%s\n"
3066 b"%s\t%s\n"
3101 % (stringutil.escapestr(k), stringutil.escapestr(v))
3067 % (stringutil.escapestr(k), stringutil.escapestr(v))
3102 )
3068 )
3103 finally:
3069 finally:
3104 target.close()
3070 target.close()
3105
3071
3106
3072
3107 @command(b'debugpvec', [], _(b'A B'))
3073 @command(b'debugpvec', [], _(b'A B'))
3108 def debugpvec(ui, repo, a, b=None):
3074 def debugpvec(ui, repo, a, b=None):
3109 ca = scmutil.revsingle(repo, a)
3075 ca = scmutil.revsingle(repo, a)
3110 cb = scmutil.revsingle(repo, b)
3076 cb = scmutil.revsingle(repo, b)
3111 pa = pvec.ctxpvec(ca)
3077 pa = pvec.ctxpvec(ca)
3112 pb = pvec.ctxpvec(cb)
3078 pb = pvec.ctxpvec(cb)
3113 if pa == pb:
3079 if pa == pb:
3114 rel = b"="
3080 rel = b"="
3115 elif pa > pb:
3081 elif pa > pb:
3116 rel = b">"
3082 rel = b">"
3117 elif pa < pb:
3083 elif pa < pb:
3118 rel = b"<"
3084 rel = b"<"
3119 elif pa | pb:
3085 elif pa | pb:
3120 rel = b"|"
3086 rel = b"|"
3121 ui.write(_(b"a: %s\n") % pa)
3087 ui.write(_(b"a: %s\n") % pa)
3122 ui.write(_(b"b: %s\n") % pb)
3088 ui.write(_(b"b: %s\n") % pb)
3123 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3089 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3124 ui.write(
3090 ui.write(
3125 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3091 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3126 % (
3092 % (
3127 abs(pa._depth - pb._depth),
3093 abs(pa._depth - pb._depth),
3128 pvec._hamming(pa._vec, pb._vec),
3094 pvec._hamming(pa._vec, pb._vec),
3129 pa.distance(pb),
3095 pa.distance(pb),
3130 rel,
3096 rel,
3131 )
3097 )
3132 )
3098 )
3133
3099
3134
3100
3135 @command(
3101 @command(
3136 b'debugrebuilddirstate|debugrebuildstate',
3102 b'debugrebuilddirstate|debugrebuildstate',
3137 [
3103 [
3138 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3104 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3139 (
3105 (
3140 b'',
3106 b'',
3141 b'minimal',
3107 b'minimal',
3142 None,
3108 None,
3143 _(
3109 _(
3144 b'only rebuild files that are inconsistent with '
3110 b'only rebuild files that are inconsistent with '
3145 b'the working copy parent'
3111 b'the working copy parent'
3146 ),
3112 ),
3147 ),
3113 ),
3148 ],
3114 ],
3149 _(b'[-r REV]'),
3115 _(b'[-r REV]'),
3150 )
3116 )
3151 def debugrebuilddirstate(ui, repo, rev, **opts):
3117 def debugrebuilddirstate(ui, repo, rev, **opts):
3152 """rebuild the dirstate as it would look like for the given revision
3118 """rebuild the dirstate as it would look like for the given revision
3153
3119
3154 If no revision is specified the first current parent will be used.
3120 If no revision is specified the first current parent will be used.
3155
3121
3156 The dirstate will be set to the files of the given revision.
3122 The dirstate will be set to the files of the given revision.
3157 The actual working directory content or existing dirstate
3123 The actual working directory content or existing dirstate
3158 information such as adds or removes is not considered.
3124 information such as adds or removes is not considered.
3159
3125
3160 ``minimal`` will only rebuild the dirstate status for files that claim to be
3126 ``minimal`` will only rebuild the dirstate status for files that claim to be
3161 tracked but are not in the parent manifest, or that exist in the parent
3127 tracked but are not in the parent manifest, or that exist in the parent
3162 manifest but are not in the dirstate. It will not change adds, removes, or
3128 manifest but are not in the dirstate. It will not change adds, removes, or
3163 modified files that are in the working copy parent.
3129 modified files that are in the working copy parent.
3164
3130
3165 One use of this command is to make the next :hg:`status` invocation
3131 One use of this command is to make the next :hg:`status` invocation
3166 check the actual file content.
3132 check the actual file content.
3167 """
3133 """
3168 ctx = scmutil.revsingle(repo, rev)
3134 ctx = scmutil.revsingle(repo, rev)
3169 with repo.wlock():
3135 with repo.wlock():
3170 dirstate = repo.dirstate
3136 dirstate = repo.dirstate
3171 changedfiles = None
3137 changedfiles = None
3172 # See command doc for what minimal does.
3138 # See command doc for what minimal does.
3173 if opts.get('minimal'):
3139 if opts.get('minimal'):
3174 manifestfiles = set(ctx.manifest().keys())
3140 manifestfiles = set(ctx.manifest().keys())
3175 dirstatefiles = set(dirstate)
3141 dirstatefiles = set(dirstate)
3176 manifestonly = manifestfiles - dirstatefiles
3142 manifestonly = manifestfiles - dirstatefiles
3177 dsonly = dirstatefiles - manifestfiles
3143 dsonly = dirstatefiles - manifestfiles
3178 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3144 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3179 changedfiles = manifestonly | dsnotadded
3145 changedfiles = manifestonly | dsnotadded
3180
3146
3181 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3147 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3182
3148
3183
3149
3184 @command(
3150 @command(
3185 b'debugrebuildfncache',
3151 b'debugrebuildfncache',
3186 [
3152 [
3187 (
3153 (
3188 b'',
3154 b'',
3189 b'only-data',
3155 b'only-data',
3190 False,
3156 False,
3191 _(b'only look for wrong .d files (much faster)'),
3157 _(b'only look for wrong .d files (much faster)'),
3192 )
3158 )
3193 ],
3159 ],
3194 b'',
3160 b'',
3195 )
3161 )
3196 def debugrebuildfncache(ui, repo, **opts):
3162 def debugrebuildfncache(ui, repo, **opts):
3197 """rebuild the fncache file"""
3163 """rebuild the fncache file"""
3198 opts = pycompat.byteskwargs(opts)
3164 opts = pycompat.byteskwargs(opts)
3199 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3165 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3200
3166
3201
3167
3202 @command(
3168 @command(
3203 b'debugrename',
3169 b'debugrename',
3204 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3170 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3205 _(b'[-r REV] [FILE]...'),
3171 _(b'[-r REV] [FILE]...'),
3206 )
3172 )
3207 def debugrename(ui, repo, *pats, **opts):
3173 def debugrename(ui, repo, *pats, **opts):
3208 """dump rename information"""
3174 """dump rename information"""
3209
3175
3210 opts = pycompat.byteskwargs(opts)
3176 opts = pycompat.byteskwargs(opts)
3211 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3177 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3212 m = scmutil.match(ctx, pats, opts)
3178 m = scmutil.match(ctx, pats, opts)
3213 for abs in ctx.walk(m):
3179 for abs in ctx.walk(m):
3214 fctx = ctx[abs]
3180 fctx = ctx[abs]
3215 o = fctx.filelog().renamed(fctx.filenode())
3181 o = fctx.filelog().renamed(fctx.filenode())
3216 rel = repo.pathto(abs)
3182 rel = repo.pathto(abs)
3217 if o:
3183 if o:
3218 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3184 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3219 else:
3185 else:
3220 ui.write(_(b"%s not renamed\n") % rel)
3186 ui.write(_(b"%s not renamed\n") % rel)
3221
3187
3222
3188
3223 @command(b'debugrequires|debugrequirements', [], b'')
3189 @command(b'debugrequires|debugrequirements', [], b'')
3224 def debugrequirements(ui, repo):
3190 def debugrequirements(ui, repo):
3225 """print the current repo requirements"""
3191 """print the current repo requirements"""
3226 for r in sorted(repo.requirements):
3192 for r in sorted(repo.requirements):
3227 ui.write(b"%s\n" % r)
3193 ui.write(b"%s\n" % r)
3228
3194
3229
3195
3230 @command(
3196 @command(
3231 b'debugrevlog',
3197 b'debugrevlog',
3232 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3198 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3233 _(b'-c|-m|FILE'),
3199 _(b'-c|-m|FILE'),
3234 optionalrepo=True,
3200 optionalrepo=True,
3235 )
3201 )
3236 def debugrevlog(ui, repo, file_=None, **opts):
3202 def debugrevlog(ui, repo, file_=None, **opts):
3237 """show data and statistics about a revlog"""
3203 """show data and statistics about a revlog"""
3238 opts = pycompat.byteskwargs(opts)
3204 opts = pycompat.byteskwargs(opts)
3239 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3205 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3240
3206
3241 if opts.get(b"dump"):
3207 if opts.get(b"dump"):
3242 revlog_debug.dump(ui, r)
3208 revlog_debug.dump(ui, r)
3243 else:
3209 else:
3244 revlog_debug.debug_revlog(ui, r)
3210 revlog_debug.debug_revlog(ui, r)
3245 return 0
3211 return 0
3246
3212
3247
3213
3248 @command(
3214 @command(
3249 b'debugrevlogindex',
3215 b'debugrevlogindex',
3250 cmdutil.debugrevlogopts
3216 cmdutil.debugrevlogopts
3251 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3217 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3252 _(b'[-f FORMAT] -c|-m|FILE'),
3218 _(b'[-f FORMAT] -c|-m|FILE'),
3253 optionalrepo=True,
3219 optionalrepo=True,
3254 )
3220 )
3255 def debugrevlogindex(ui, repo, file_=None, **opts):
3221 def debugrevlogindex(ui, repo, file_=None, **opts):
3256 """dump the contents of a revlog index"""
3222 """dump the contents of a revlog index"""
3257 opts = pycompat.byteskwargs(opts)
3223 opts = pycompat.byteskwargs(opts)
3258 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3224 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3259 format = opts.get(b'format', 0)
3225 format = opts.get(b'format', 0)
3260 if format not in (0, 1):
3226 if format not in (0, 1):
3261 raise error.Abort(_(b"unknown format %d") % format)
3227 raise error.Abort(_(b"unknown format %d") % format)
3262
3228
3263 if ui.debugflag:
3229 if ui.debugflag:
3264 shortfn = hex
3230 shortfn = hex
3265 else:
3231 else:
3266 shortfn = short
3232 shortfn = short
3267
3233
3268 # There might not be anything in r, so have a sane default
3234 # There might not be anything in r, so have a sane default
3269 idlen = 12
3235 idlen = 12
3270 for i in r:
3236 for i in r:
3271 idlen = len(shortfn(r.node(i)))
3237 idlen = len(shortfn(r.node(i)))
3272 break
3238 break
3273
3239
3274 if format == 0:
3240 if format == 0:
3275 if ui.verbose:
3241 if ui.verbose:
3276 ui.writenoi18n(
3242 ui.writenoi18n(
3277 b" rev offset length linkrev %s %s p2\n"
3243 b" rev offset length linkrev %s %s p2\n"
3278 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3244 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3279 )
3245 )
3280 else:
3246 else:
3281 ui.writenoi18n(
3247 ui.writenoi18n(
3282 b" rev linkrev %s %s p2\n"
3248 b" rev linkrev %s %s p2\n"
3283 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3249 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3284 )
3250 )
3285 elif format == 1:
3251 elif format == 1:
3286 if ui.verbose:
3252 if ui.verbose:
3287 ui.writenoi18n(
3253 ui.writenoi18n(
3288 (
3254 (
3289 b" rev flag offset length size link p1"
3255 b" rev flag offset length size link p1"
3290 b" p2 %s\n"
3256 b" p2 %s\n"
3291 )
3257 )
3292 % b"nodeid".rjust(idlen)
3258 % b"nodeid".rjust(idlen)
3293 )
3259 )
3294 else:
3260 else:
3295 ui.writenoi18n(
3261 ui.writenoi18n(
3296 b" rev flag size link p1 p2 %s\n"
3262 b" rev flag size link p1 p2 %s\n"
3297 % b"nodeid".rjust(idlen)
3263 % b"nodeid".rjust(idlen)
3298 )
3264 )
3299
3265
3300 for i in r:
3266 for i in r:
3301 node = r.node(i)
3267 node = r.node(i)
3302 if format == 0:
3268 if format == 0:
3303 try:
3269 try:
3304 pp = r.parents(node)
3270 pp = r.parents(node)
3305 except Exception:
3271 except Exception:
3306 pp = [repo.nullid, repo.nullid]
3272 pp = [repo.nullid, repo.nullid]
3307 if ui.verbose:
3273 if ui.verbose:
3308 ui.write(
3274 ui.write(
3309 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3275 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3310 % (
3276 % (
3311 i,
3277 i,
3312 r.start(i),
3278 r.start(i),
3313 r.length(i),
3279 r.length(i),
3314 r.linkrev(i),
3280 r.linkrev(i),
3315 shortfn(node),
3281 shortfn(node),
3316 shortfn(pp[0]),
3282 shortfn(pp[0]),
3317 shortfn(pp[1]),
3283 shortfn(pp[1]),
3318 )
3284 )
3319 )
3285 )
3320 else:
3286 else:
3321 ui.write(
3287 ui.write(
3322 b"% 6d % 7d %s %s %s\n"
3288 b"% 6d % 7d %s %s %s\n"
3323 % (
3289 % (
3324 i,
3290 i,
3325 r.linkrev(i),
3291 r.linkrev(i),
3326 shortfn(node),
3292 shortfn(node),
3327 shortfn(pp[0]),
3293 shortfn(pp[0]),
3328 shortfn(pp[1]),
3294 shortfn(pp[1]),
3329 )
3295 )
3330 )
3296 )
3331 elif format == 1:
3297 elif format == 1:
3332 pr = r.parentrevs(i)
3298 pr = r.parentrevs(i)
3333 if ui.verbose:
3299 if ui.verbose:
3334 ui.write(
3300 ui.write(
3335 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3301 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3336 % (
3302 % (
3337 i,
3303 i,
3338 r.flags(i),
3304 r.flags(i),
3339 r.start(i),
3305 r.start(i),
3340 r.length(i),
3306 r.length(i),
3341 r.rawsize(i),
3307 r.rawsize(i),
3342 r.linkrev(i),
3308 r.linkrev(i),
3343 pr[0],
3309 pr[0],
3344 pr[1],
3310 pr[1],
3345 shortfn(node),
3311 shortfn(node),
3346 )
3312 )
3347 )
3313 )
3348 else:
3314 else:
3349 ui.write(
3315 ui.write(
3350 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3316 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3351 % (
3317 % (
3352 i,
3318 i,
3353 r.flags(i),
3319 r.flags(i),
3354 r.rawsize(i),
3320 r.rawsize(i),
3355 r.linkrev(i),
3321 r.linkrev(i),
3356 pr[0],
3322 pr[0],
3357 pr[1],
3323 pr[1],
3358 shortfn(node),
3324 shortfn(node),
3359 )
3325 )
3360 )
3326 )
3361
3327
3362
3328
3363 @command(
3329 @command(
3364 b'debugrevspec',
3330 b'debugrevspec',
3365 [
3331 [
3366 (
3332 (
3367 b'',
3333 b'',
3368 b'optimize',
3334 b'optimize',
3369 None,
3335 None,
3370 _(b'print parsed tree after optimizing (DEPRECATED)'),
3336 _(b'print parsed tree after optimizing (DEPRECATED)'),
3371 ),
3337 ),
3372 (
3338 (
3373 b'',
3339 b'',
3374 b'show-revs',
3340 b'show-revs',
3375 True,
3341 True,
3376 _(b'print list of result revisions (default)'),
3342 _(b'print list of result revisions (default)'),
3377 ),
3343 ),
3378 (
3344 (
3379 b's',
3345 b's',
3380 b'show-set',
3346 b'show-set',
3381 None,
3347 None,
3382 _(b'print internal representation of result set'),
3348 _(b'print internal representation of result set'),
3383 ),
3349 ),
3384 (
3350 (
3385 b'p',
3351 b'p',
3386 b'show-stage',
3352 b'show-stage',
3387 [],
3353 [],
3388 _(b'print parsed tree at the given stage'),
3354 _(b'print parsed tree at the given stage'),
3389 _(b'NAME'),
3355 _(b'NAME'),
3390 ),
3356 ),
3391 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3357 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3392 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3358 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3393 ],
3359 ],
3394 b'REVSPEC',
3360 b'REVSPEC',
3395 )
3361 )
3396 def debugrevspec(ui, repo, expr, **opts):
3362 def debugrevspec(ui, repo, expr, **opts):
3397 """parse and apply a revision specification
3363 """parse and apply a revision specification
3398
3364
3399 Use -p/--show-stage option to print the parsed tree at the given stages.
3365 Use -p/--show-stage option to print the parsed tree at the given stages.
3400 Use -p all to print tree at every stage.
3366 Use -p all to print tree at every stage.
3401
3367
3402 Use --no-show-revs option with -s or -p to print only the set
3368 Use --no-show-revs option with -s or -p to print only the set
3403 representation or the parsed tree respectively.
3369 representation or the parsed tree respectively.
3404
3370
3405 Use --verify-optimized to compare the optimized result with the unoptimized
3371 Use --verify-optimized to compare the optimized result with the unoptimized
3406 one. Returns 1 if the optimized result differs.
3372 one. Returns 1 if the optimized result differs.
3407 """
3373 """
3408 opts = pycompat.byteskwargs(opts)
3374 opts = pycompat.byteskwargs(opts)
3409 aliases = ui.configitems(b'revsetalias')
3375 aliases = ui.configitems(b'revsetalias')
3410 stages = [
3376 stages = [
3411 (b'parsed', lambda tree: tree),
3377 (b'parsed', lambda tree: tree),
3412 (
3378 (
3413 b'expanded',
3379 b'expanded',
3414 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3380 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3415 ),
3381 ),
3416 (b'concatenated', revsetlang.foldconcat),
3382 (b'concatenated', revsetlang.foldconcat),
3417 (b'analyzed', revsetlang.analyze),
3383 (b'analyzed', revsetlang.analyze),
3418 (b'optimized', revsetlang.optimize),
3384 (b'optimized', revsetlang.optimize),
3419 ]
3385 ]
3420 if opts[b'no_optimized']:
3386 if opts[b'no_optimized']:
3421 stages = stages[:-1]
3387 stages = stages[:-1]
3422 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3388 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3423 raise error.Abort(
3389 raise error.Abort(
3424 _(b'cannot use --verify-optimized with --no-optimized')
3390 _(b'cannot use --verify-optimized with --no-optimized')
3425 )
3391 )
3426 stagenames = {n for n, f in stages}
3392 stagenames = {n for n, f in stages}
3427
3393
3428 showalways = set()
3394 showalways = set()
3429 showchanged = set()
3395 showchanged = set()
3430 if ui.verbose and not opts[b'show_stage']:
3396 if ui.verbose and not opts[b'show_stage']:
3431 # show parsed tree by --verbose (deprecated)
3397 # show parsed tree by --verbose (deprecated)
3432 showalways.add(b'parsed')
3398 showalways.add(b'parsed')
3433 showchanged.update([b'expanded', b'concatenated'])
3399 showchanged.update([b'expanded', b'concatenated'])
3434 if opts[b'optimize']:
3400 if opts[b'optimize']:
3435 showalways.add(b'optimized')
3401 showalways.add(b'optimized')
3436 if opts[b'show_stage'] and opts[b'optimize']:
3402 if opts[b'show_stage'] and opts[b'optimize']:
3437 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3403 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3438 if opts[b'show_stage'] == [b'all']:
3404 if opts[b'show_stage'] == [b'all']:
3439 showalways.update(stagenames)
3405 showalways.update(stagenames)
3440 else:
3406 else:
3441 for n in opts[b'show_stage']:
3407 for n in opts[b'show_stage']:
3442 if n not in stagenames:
3408 if n not in stagenames:
3443 raise error.Abort(_(b'invalid stage name: %s') % n)
3409 raise error.Abort(_(b'invalid stage name: %s') % n)
3444 showalways.update(opts[b'show_stage'])
3410 showalways.update(opts[b'show_stage'])
3445
3411
3446 treebystage = {}
3412 treebystage = {}
3447 printedtree = None
3413 printedtree = None
3448 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3414 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3449 for n, f in stages:
3415 for n, f in stages:
3450 treebystage[n] = tree = f(tree)
3416 treebystage[n] = tree = f(tree)
3451 if n in showalways or (n in showchanged and tree != printedtree):
3417 if n in showalways or (n in showchanged and tree != printedtree):
3452 if opts[b'show_stage'] or n != b'parsed':
3418 if opts[b'show_stage'] or n != b'parsed':
3453 ui.write(b"* %s:\n" % n)
3419 ui.write(b"* %s:\n" % n)
3454 ui.write(revsetlang.prettyformat(tree), b"\n")
3420 ui.write(revsetlang.prettyformat(tree), b"\n")
3455 printedtree = tree
3421 printedtree = tree
3456
3422
3457 if opts[b'verify_optimized']:
3423 if opts[b'verify_optimized']:
3458 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3424 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3459 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3425 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3460 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3426 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3461 ui.writenoi18n(
3427 ui.writenoi18n(
3462 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3428 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3463 )
3429 )
3464 ui.writenoi18n(
3430 ui.writenoi18n(
3465 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3431 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3466 )
3432 )
3467 arevs = list(arevs)
3433 arevs = list(arevs)
3468 brevs = list(brevs)
3434 brevs = list(brevs)
3469 if arevs == brevs:
3435 if arevs == brevs:
3470 return 0
3436 return 0
3471 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3437 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3472 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3438 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3473 sm = difflib.SequenceMatcher(None, arevs, brevs)
3439 sm = difflib.SequenceMatcher(None, arevs, brevs)
3474 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3440 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3475 if tag in ('delete', 'replace'):
3441 if tag in ('delete', 'replace'):
3476 for c in arevs[alo:ahi]:
3442 for c in arevs[alo:ahi]:
3477 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3443 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3478 if tag in ('insert', 'replace'):
3444 if tag in ('insert', 'replace'):
3479 for c in brevs[blo:bhi]:
3445 for c in brevs[blo:bhi]:
3480 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3446 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3481 if tag == 'equal':
3447 if tag == 'equal':
3482 for c in arevs[alo:ahi]:
3448 for c in arevs[alo:ahi]:
3483 ui.write(b' %d\n' % c)
3449 ui.write(b' %d\n' % c)
3484 return 1
3450 return 1
3485
3451
3486 func = revset.makematcher(tree)
3452 func = revset.makematcher(tree)
3487 revs = func(repo)
3453 revs = func(repo)
3488 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3454 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3489 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3455 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3490 if not opts[b'show_revs']:
3456 if not opts[b'show_revs']:
3491 return
3457 return
3492 for c in revs:
3458 for c in revs:
3493 ui.write(b"%d\n" % c)
3459 ui.write(b"%d\n" % c)
3494
3460
3495
3461
3496 @command(
3462 @command(
3497 b'debugserve',
3463 b'debugserve',
3498 [
3464 [
3499 (
3465 (
3500 b'',
3466 b'',
3501 b'sshstdio',
3467 b'sshstdio',
3502 False,
3468 False,
3503 _(b'run an SSH server bound to process handles'),
3469 _(b'run an SSH server bound to process handles'),
3504 ),
3470 ),
3505 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3471 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3506 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3472 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3507 ],
3473 ],
3508 b'',
3474 b'',
3509 )
3475 )
3510 def debugserve(ui, repo, **opts):
3476 def debugserve(ui, repo, **opts):
3511 """run a server with advanced settings
3477 """run a server with advanced settings
3512
3478
3513 This command is similar to :hg:`serve`. It exists partially as a
3479 This command is similar to :hg:`serve`. It exists partially as a
3514 workaround to the fact that ``hg serve --stdio`` must have specific
3480 workaround to the fact that ``hg serve --stdio`` must have specific
3515 arguments for security reasons.
3481 arguments for security reasons.
3516 """
3482 """
3517 opts = pycompat.byteskwargs(opts)
3483 opts = pycompat.byteskwargs(opts)
3518
3484
3519 if not opts[b'sshstdio']:
3485 if not opts[b'sshstdio']:
3520 raise error.Abort(_(b'only --sshstdio is currently supported'))
3486 raise error.Abort(_(b'only --sshstdio is currently supported'))
3521
3487
3522 logfh = None
3488 logfh = None
3523
3489
3524 if opts[b'logiofd'] and opts[b'logiofile']:
3490 if opts[b'logiofd'] and opts[b'logiofile']:
3525 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3491 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3526
3492
3527 if opts[b'logiofd']:
3493 if opts[b'logiofd']:
3528 # Ideally we would be line buffered. But line buffering in binary
3494 # Ideally we would be line buffered. But line buffering in binary
3529 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3495 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3530 # buffering could have performance impacts. But since this isn't
3496 # buffering could have performance impacts. But since this isn't
3531 # performance critical code, it should be fine.
3497 # performance critical code, it should be fine.
3532 try:
3498 try:
3533 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3499 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3534 except OSError as e:
3500 except OSError as e:
3535 if e.errno != errno.ESPIPE:
3501 if e.errno != errno.ESPIPE:
3536 raise
3502 raise
3537 # can't seek a pipe, so `ab` mode fails on py3
3503 # can't seek a pipe, so `ab` mode fails on py3
3538 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3504 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3539 elif opts[b'logiofile']:
3505 elif opts[b'logiofile']:
3540 logfh = open(opts[b'logiofile'], b'ab', 0)
3506 logfh = open(opts[b'logiofile'], b'ab', 0)
3541
3507
3542 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3508 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3543 s.serve_forever()
3509 s.serve_forever()
3544
3510
3545
3511
3546 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3512 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3547 def debugsetparents(ui, repo, rev1, rev2=None):
3513 def debugsetparents(ui, repo, rev1, rev2=None):
3548 """manually set the parents of the current working directory (DANGEROUS)
3514 """manually set the parents of the current working directory (DANGEROUS)
3549
3515
3550 This command is not what you are looking for and should not be used. Using
3516 This command is not what you are looking for and should not be used. Using
3551 this command will most certainly results in slight corruption of the file
3517 this command will most certainly results in slight corruption of the file
3552 level histories withing your repository. DO NOT USE THIS COMMAND.
3518 level histories withing your repository. DO NOT USE THIS COMMAND.
3553
3519
3554 The command update the p1 and p2 field in the dirstate, and not touching
3520 The command update the p1 and p2 field in the dirstate, and not touching
3555 anything else. This useful for writing repository conversion tools, but
3521 anything else. This useful for writing repository conversion tools, but
3556 should be used with extreme care. For example, neither the working
3522 should be used with extreme care. For example, neither the working
3557 directory nor the dirstate is updated, so file status may be incorrect
3523 directory nor the dirstate is updated, so file status may be incorrect
3558 after running this command. Only used if you are one of the few people that
3524 after running this command. Only used if you are one of the few people that
3559 deeply unstand both conversion tools and file level histories. If you are
3525 deeply unstand both conversion tools and file level histories. If you are
3560 reading this help, you are not one of this people (most of them sailed west
3526 reading this help, you are not one of this people (most of them sailed west
3561 from Mithlond anyway.
3527 from Mithlond anyway.
3562
3528
3563 So one last time DO NOT USE THIS COMMAND.
3529 So one last time DO NOT USE THIS COMMAND.
3564
3530
3565 Returns 0 on success.
3531 Returns 0 on success.
3566 """
3532 """
3567
3533
3568 node1 = scmutil.revsingle(repo, rev1).node()
3534 node1 = scmutil.revsingle(repo, rev1).node()
3569 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3535 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3570
3536
3571 with repo.wlock():
3537 with repo.wlock():
3572 repo.setparents(node1, node2)
3538 repo.setparents(node1, node2)
3573
3539
3574
3540
3575 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3541 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3576 def debugsidedata(ui, repo, file_, rev=None, **opts):
3542 def debugsidedata(ui, repo, file_, rev=None, **opts):
3577 """dump the side data for a cl/manifest/file revision
3543 """dump the side data for a cl/manifest/file revision
3578
3544
3579 Use --verbose to dump the sidedata content."""
3545 Use --verbose to dump the sidedata content."""
3580 opts = pycompat.byteskwargs(opts)
3546 opts = pycompat.byteskwargs(opts)
3581 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3547 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3582 if rev is not None:
3548 if rev is not None:
3583 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3549 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3584 file_, rev = None, file_
3550 file_, rev = None, file_
3585 elif rev is None:
3551 elif rev is None:
3586 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3552 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3587 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3553 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3588 r = getattr(r, '_revlog', r)
3554 r = getattr(r, '_revlog', r)
3589 try:
3555 try:
3590 sidedata = r.sidedata(r.lookup(rev))
3556 sidedata = r.sidedata(r.lookup(rev))
3591 except KeyError:
3557 except KeyError:
3592 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3558 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3593 if sidedata:
3559 if sidedata:
3594 sidedata = list(sidedata.items())
3560 sidedata = list(sidedata.items())
3595 sidedata.sort()
3561 sidedata.sort()
3596 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3562 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3597 for key, value in sidedata:
3563 for key, value in sidedata:
3598 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3564 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3599 if ui.verbose:
3565 if ui.verbose:
3600 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3566 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3601
3567
3602
3568
3603 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3569 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3604 def debugssl(ui, repo, source=None, **opts):
3570 def debugssl(ui, repo, source=None, **opts):
3605 """test a secure connection to a server
3571 """test a secure connection to a server
3606
3572
3607 This builds the certificate chain for the server on Windows, installing the
3573 This builds the certificate chain for the server on Windows, installing the
3608 missing intermediates and trusted root via Windows Update if necessary. It
3574 missing intermediates and trusted root via Windows Update if necessary. It
3609 does nothing on other platforms.
3575 does nothing on other platforms.
3610
3576
3611 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3577 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3612 that server is used. See :hg:`help urls` for more information.
3578 that server is used. See :hg:`help urls` for more information.
3613
3579
3614 If the update succeeds, retry the original operation. Otherwise, the cause
3580 If the update succeeds, retry the original operation. Otherwise, the cause
3615 of the SSL error is likely another issue.
3581 of the SSL error is likely another issue.
3616 """
3582 """
3617 if not pycompat.iswindows:
3583 if not pycompat.iswindows:
3618 raise error.Abort(
3584 raise error.Abort(
3619 _(b'certificate chain building is only possible on Windows')
3585 _(b'certificate chain building is only possible on Windows')
3620 )
3586 )
3621
3587
3622 if not source:
3588 if not source:
3623 if not repo:
3589 if not repo:
3624 raise error.Abort(
3590 raise error.Abort(
3625 _(
3591 _(
3626 b"there is no Mercurial repository here, and no "
3592 b"there is no Mercurial repository here, and no "
3627 b"server specified"
3593 b"server specified"
3628 )
3594 )
3629 )
3595 )
3630 source = b"default"
3596 source = b"default"
3631
3597
3632 source, branches = urlutil.get_unique_pull_path(
3598 source, branches = urlutil.get_unique_pull_path(
3633 b'debugssl', repo, ui, source
3599 b'debugssl', repo, ui, source
3634 )
3600 )
3635 url = urlutil.url(source)
3601 url = urlutil.url(source)
3636
3602
3637 defaultport = {b'https': 443, b'ssh': 22}
3603 defaultport = {b'https': 443, b'ssh': 22}
3638 if url.scheme in defaultport:
3604 if url.scheme in defaultport:
3639 try:
3605 try:
3640 addr = (url.host, int(url.port or defaultport[url.scheme]))
3606 addr = (url.host, int(url.port or defaultport[url.scheme]))
3641 except ValueError:
3607 except ValueError:
3642 raise error.Abort(_(b"malformed port number in URL"))
3608 raise error.Abort(_(b"malformed port number in URL"))
3643 else:
3609 else:
3644 raise error.Abort(_(b"only https and ssh connections are supported"))
3610 raise error.Abort(_(b"only https and ssh connections are supported"))
3645
3611
3646 from . import win32
3612 from . import win32
3647
3613
3648 s = ssl.wrap_socket(
3614 s = ssl.wrap_socket(
3649 socket.socket(),
3615 socket.socket(),
3650 ssl_version=ssl.PROTOCOL_TLS,
3616 ssl_version=ssl.PROTOCOL_TLS,
3651 cert_reqs=ssl.CERT_NONE,
3617 cert_reqs=ssl.CERT_NONE,
3652 ca_certs=None,
3618 ca_certs=None,
3653 )
3619 )
3654
3620
3655 try:
3621 try:
3656 s.connect(addr)
3622 s.connect(addr)
3657 cert = s.getpeercert(True)
3623 cert = s.getpeercert(True)
3658
3624
3659 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3625 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3660
3626
3661 complete = win32.checkcertificatechain(cert, build=False)
3627 complete = win32.checkcertificatechain(cert, build=False)
3662
3628
3663 if not complete:
3629 if not complete:
3664 ui.status(_(b'certificate chain is incomplete, updating... '))
3630 ui.status(_(b'certificate chain is incomplete, updating... '))
3665
3631
3666 if not win32.checkcertificatechain(cert):
3632 if not win32.checkcertificatechain(cert):
3667 ui.status(_(b'failed.\n'))
3633 ui.status(_(b'failed.\n'))
3668 else:
3634 else:
3669 ui.status(_(b'done.\n'))
3635 ui.status(_(b'done.\n'))
3670 else:
3636 else:
3671 ui.status(_(b'full certificate chain is available\n'))
3637 ui.status(_(b'full certificate chain is available\n'))
3672 finally:
3638 finally:
3673 s.close()
3639 s.close()
3674
3640
3675
3641
3676 @command(
3642 @command(
3677 b"debugbackupbundle",
3643 b"debugbackupbundle",
3678 [
3644 [
3679 (
3645 (
3680 b"",
3646 b"",
3681 b"recover",
3647 b"recover",
3682 b"",
3648 b"",
3683 b"brings the specified changeset back into the repository",
3649 b"brings the specified changeset back into the repository",
3684 )
3650 )
3685 ]
3651 ]
3686 + cmdutil.logopts,
3652 + cmdutil.logopts,
3687 _(b"hg debugbackupbundle [--recover HASH]"),
3653 _(b"hg debugbackupbundle [--recover HASH]"),
3688 )
3654 )
3689 def debugbackupbundle(ui, repo, *pats, **opts):
3655 def debugbackupbundle(ui, repo, *pats, **opts):
3690 """lists the changesets available in backup bundles
3656 """lists the changesets available in backup bundles
3691
3657
3692 Without any arguments, this command prints a list of the changesets in each
3658 Without any arguments, this command prints a list of the changesets in each
3693 backup bundle.
3659 backup bundle.
3694
3660
3695 --recover takes a changeset hash and unbundles the first bundle that
3661 --recover takes a changeset hash and unbundles the first bundle that
3696 contains that hash, which puts that changeset back in your repository.
3662 contains that hash, which puts that changeset back in your repository.
3697
3663
3698 --verbose will print the entire commit message and the bundle path for that
3664 --verbose will print the entire commit message and the bundle path for that
3699 backup.
3665 backup.
3700 """
3666 """
3701 backups = list(
3667 backups = list(
3702 filter(
3668 filter(
3703 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3669 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3704 )
3670 )
3705 )
3671 )
3706 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3672 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3707
3673
3708 opts = pycompat.byteskwargs(opts)
3674 opts = pycompat.byteskwargs(opts)
3709 opts[b"bundle"] = b""
3675 opts[b"bundle"] = b""
3710 opts[b"force"] = None
3676 opts[b"force"] = None
3711 limit = logcmdutil.getlimit(opts)
3677 limit = logcmdutil.getlimit(opts)
3712
3678
3713 def display(other, chlist, displayer):
3679 def display(other, chlist, displayer):
3714 if opts.get(b"newest_first"):
3680 if opts.get(b"newest_first"):
3715 chlist.reverse()
3681 chlist.reverse()
3716 count = 0
3682 count = 0
3717 for n in chlist:
3683 for n in chlist:
3718 if limit is not None and count >= limit:
3684 if limit is not None and count >= limit:
3719 break
3685 break
3720 parents = [
3686 parents = [
3721 True for p in other.changelog.parents(n) if p != repo.nullid
3687 True for p in other.changelog.parents(n) if p != repo.nullid
3722 ]
3688 ]
3723 if opts.get(b"no_merges") and len(parents) == 2:
3689 if opts.get(b"no_merges") and len(parents) == 2:
3724 continue
3690 continue
3725 count += 1
3691 count += 1
3726 displayer.show(other[n])
3692 displayer.show(other[n])
3727
3693
3728 recovernode = opts.get(b"recover")
3694 recovernode = opts.get(b"recover")
3729 if recovernode:
3695 if recovernode:
3730 if scmutil.isrevsymbol(repo, recovernode):
3696 if scmutil.isrevsymbol(repo, recovernode):
3731 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3697 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3732 return
3698 return
3733 elif backups:
3699 elif backups:
3734 msg = _(
3700 msg = _(
3735 b"Recover changesets using: hg debugbackupbundle --recover "
3701 b"Recover changesets using: hg debugbackupbundle --recover "
3736 b"<changeset hash>\n\nAvailable backup changesets:"
3702 b"<changeset hash>\n\nAvailable backup changesets:"
3737 )
3703 )
3738 ui.status(msg, label=b"status.removed")
3704 ui.status(msg, label=b"status.removed")
3739 else:
3705 else:
3740 ui.status(_(b"no backup changesets found\n"))
3706 ui.status(_(b"no backup changesets found\n"))
3741 return
3707 return
3742
3708
3743 for backup in backups:
3709 for backup in backups:
3744 # Much of this is copied from the hg incoming logic
3710 # Much of this is copied from the hg incoming logic
3745 source = os.path.relpath(backup, encoding.getcwd())
3711 source = os.path.relpath(backup, encoding.getcwd())
3746 source, branches = urlutil.get_unique_pull_path(
3712 source, branches = urlutil.get_unique_pull_path(
3747 b'debugbackupbundle',
3713 b'debugbackupbundle',
3748 repo,
3714 repo,
3749 ui,
3715 ui,
3750 source,
3716 source,
3751 default_branches=opts.get(b'branch'),
3717 default_branches=opts.get(b'branch'),
3752 )
3718 )
3753 try:
3719 try:
3754 other = hg.peer(repo, opts, source)
3720 other = hg.peer(repo, opts, source)
3755 except error.LookupError as ex:
3721 except error.LookupError as ex:
3756 msg = _(b"\nwarning: unable to open bundle %s") % source
3722 msg = _(b"\nwarning: unable to open bundle %s") % source
3757 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3723 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3758 ui.warn(msg, hint=hint)
3724 ui.warn(msg, hint=hint)
3759 continue
3725 continue
3760 revs, checkout = hg.addbranchrevs(
3726 revs, checkout = hg.addbranchrevs(
3761 repo, other, branches, opts.get(b"rev")
3727 repo, other, branches, opts.get(b"rev")
3762 )
3728 )
3763
3729
3764 if revs:
3730 if revs:
3765 revs = [other.lookup(rev) for rev in revs]
3731 revs = [other.lookup(rev) for rev in revs]
3766
3732
3767 with ui.silent():
3733 with ui.silent():
3768 try:
3734 try:
3769 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3735 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3770 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3736 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3771 )
3737 )
3772 except error.LookupError:
3738 except error.LookupError:
3773 continue
3739 continue
3774
3740
3775 try:
3741 try:
3776 if not chlist:
3742 if not chlist:
3777 continue
3743 continue
3778 if recovernode:
3744 if recovernode:
3779 with repo.lock(), repo.transaction(b"unbundle") as tr:
3745 with repo.lock(), repo.transaction(b"unbundle") as tr:
3780 if scmutil.isrevsymbol(other, recovernode):
3746 if scmutil.isrevsymbol(other, recovernode):
3781 ui.status(_(b"Unbundling %s\n") % (recovernode))
3747 ui.status(_(b"Unbundling %s\n") % (recovernode))
3782 f = hg.openpath(ui, source)
3748 f = hg.openpath(ui, source)
3783 gen = exchange.readbundle(ui, f, source)
3749 gen = exchange.readbundle(ui, f, source)
3784 if isinstance(gen, bundle2.unbundle20):
3750 if isinstance(gen, bundle2.unbundle20):
3785 bundle2.applybundle(
3751 bundle2.applybundle(
3786 repo,
3752 repo,
3787 gen,
3753 gen,
3788 tr,
3754 tr,
3789 source=b"unbundle",
3755 source=b"unbundle",
3790 url=b"bundle:" + source,
3756 url=b"bundle:" + source,
3791 )
3757 )
3792 else:
3758 else:
3793 gen.apply(repo, b"unbundle", b"bundle:" + source)
3759 gen.apply(repo, b"unbundle", b"bundle:" + source)
3794 break
3760 break
3795 else:
3761 else:
3796 backupdate = encoding.strtolocal(
3762 backupdate = encoding.strtolocal(
3797 time.strftime(
3763 time.strftime(
3798 "%a %H:%M, %Y-%m-%d",
3764 "%a %H:%M, %Y-%m-%d",
3799 time.localtime(os.path.getmtime(source)),
3765 time.localtime(os.path.getmtime(source)),
3800 )
3766 )
3801 )
3767 )
3802 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3768 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3803 if ui.verbose:
3769 if ui.verbose:
3804 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3770 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3805 else:
3771 else:
3806 opts[
3772 opts[
3807 b"template"
3773 b"template"
3808 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3774 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3809 displayer = logcmdutil.changesetdisplayer(
3775 displayer = logcmdutil.changesetdisplayer(
3810 ui, other, opts, False
3776 ui, other, opts, False
3811 )
3777 )
3812 display(other, chlist, displayer)
3778 display(other, chlist, displayer)
3813 displayer.close()
3779 displayer.close()
3814 finally:
3780 finally:
3815 cleanupfn()
3781 cleanupfn()
3816
3782
3817
3783
3818 @command(
3784 @command(
3819 b'debugsub',
3785 b'debugsub',
3820 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3786 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3821 _(b'[-r REV] [REV]'),
3787 _(b'[-r REV] [REV]'),
3822 )
3788 )
3823 def debugsub(ui, repo, rev=None):
3789 def debugsub(ui, repo, rev=None):
3824 ctx = scmutil.revsingle(repo, rev, None)
3790 ctx = scmutil.revsingle(repo, rev, None)
3825 for k, v in sorted(ctx.substate.items()):
3791 for k, v in sorted(ctx.substate.items()):
3826 ui.writenoi18n(b'path %s\n' % k)
3792 ui.writenoi18n(b'path %s\n' % k)
3827 ui.writenoi18n(b' source %s\n' % v[0])
3793 ui.writenoi18n(b' source %s\n' % v[0])
3828 ui.writenoi18n(b' revision %s\n' % v[1])
3794 ui.writenoi18n(b' revision %s\n' % v[1])
3829
3795
3830
3796
3831 @command(b'debugshell', optionalrepo=True)
3797 @command(b'debugshell', optionalrepo=True)
3832 def debugshell(ui, repo):
3798 def debugshell(ui, repo):
3833 """run an interactive Python interpreter
3799 """run an interactive Python interpreter
3834
3800
3835 The local namespace is provided with a reference to the ui and
3801 The local namespace is provided with a reference to the ui and
3836 the repo instance (if available).
3802 the repo instance (if available).
3837 """
3803 """
3838 import code
3804 import code
3839
3805
3840 imported_objects = {
3806 imported_objects = {
3841 'ui': ui,
3807 'ui': ui,
3842 'repo': repo,
3808 'repo': repo,
3843 }
3809 }
3844
3810
3845 code.interact(local=imported_objects)
3811 code.interact(local=imported_objects)
3846
3812
3847
3813
3848 @command(
3814 @command(
3849 b'debugsuccessorssets',
3815 b'debugsuccessorssets',
3850 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3816 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3851 _(b'[REV]'),
3817 _(b'[REV]'),
3852 )
3818 )
3853 def debugsuccessorssets(ui, repo, *revs, **opts):
3819 def debugsuccessorssets(ui, repo, *revs, **opts):
3854 """show set of successors for revision
3820 """show set of successors for revision
3855
3821
3856 A successors set of changeset A is a consistent group of revisions that
3822 A successors set of changeset A is a consistent group of revisions that
3857 succeed A. It contains non-obsolete changesets only unless closests
3823 succeed A. It contains non-obsolete changesets only unless closests
3858 successors set is set.
3824 successors set is set.
3859
3825
3860 In most cases a changeset A has a single successors set containing a single
3826 In most cases a changeset A has a single successors set containing a single
3861 successor (changeset A replaced by A').
3827 successor (changeset A replaced by A').
3862
3828
3863 A changeset that is made obsolete with no successors are called "pruned".
3829 A changeset that is made obsolete with no successors are called "pruned".
3864 Such changesets have no successors sets at all.
3830 Such changesets have no successors sets at all.
3865
3831
3866 A changeset that has been "split" will have a successors set containing
3832 A changeset that has been "split" will have a successors set containing
3867 more than one successor.
3833 more than one successor.
3868
3834
3869 A changeset that has been rewritten in multiple different ways is called
3835 A changeset that has been rewritten in multiple different ways is called
3870 "divergent". Such changesets have multiple successor sets (each of which
3836 "divergent". Such changesets have multiple successor sets (each of which
3871 may also be split, i.e. have multiple successors).
3837 may also be split, i.e. have multiple successors).
3872
3838
3873 Results are displayed as follows::
3839 Results are displayed as follows::
3874
3840
3875 <rev1>
3841 <rev1>
3876 <successors-1A>
3842 <successors-1A>
3877 <rev2>
3843 <rev2>
3878 <successors-2A>
3844 <successors-2A>
3879 <successors-2B1> <successors-2B2> <successors-2B3>
3845 <successors-2B1> <successors-2B2> <successors-2B3>
3880
3846
3881 Here rev2 has two possible (i.e. divergent) successors sets. The first
3847 Here rev2 has two possible (i.e. divergent) successors sets. The first
3882 holds one element, whereas the second holds three (i.e. the changeset has
3848 holds one element, whereas the second holds three (i.e. the changeset has
3883 been split).
3849 been split).
3884 """
3850 """
3885 # passed to successorssets caching computation from one call to another
3851 # passed to successorssets caching computation from one call to another
3886 cache = {}
3852 cache = {}
3887 ctx2str = bytes
3853 ctx2str = bytes
3888 node2str = short
3854 node2str = short
3889 for rev in logcmdutil.revrange(repo, revs):
3855 for rev in logcmdutil.revrange(repo, revs):
3890 ctx = repo[rev]
3856 ctx = repo[rev]
3891 ui.write(b'%s\n' % ctx2str(ctx))
3857 ui.write(b'%s\n' % ctx2str(ctx))
3892 for succsset in obsutil.successorssets(
3858 for succsset in obsutil.successorssets(
3893 repo, ctx.node(), closest=opts['closest'], cache=cache
3859 repo, ctx.node(), closest=opts['closest'], cache=cache
3894 ):
3860 ):
3895 if succsset:
3861 if succsset:
3896 ui.write(b' ')
3862 ui.write(b' ')
3897 ui.write(node2str(succsset[0]))
3863 ui.write(node2str(succsset[0]))
3898 for node in succsset[1:]:
3864 for node in succsset[1:]:
3899 ui.write(b' ')
3865 ui.write(b' ')
3900 ui.write(node2str(node))
3866 ui.write(node2str(node))
3901 ui.write(b'\n')
3867 ui.write(b'\n')
3902
3868
3903
3869
3904 @command(b'debugtagscache', [])
3870 @command(b'debugtagscache', [])
3905 def debugtagscache(ui, repo):
3871 def debugtagscache(ui, repo):
3906 """display the contents of .hg/cache/hgtagsfnodes1"""
3872 """display the contents of .hg/cache/hgtagsfnodes1"""
3907 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3873 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3908 flog = repo.file(b'.hgtags')
3874 flog = repo.file(b'.hgtags')
3909 for r in repo:
3875 for r in repo:
3910 node = repo[r].node()
3876 node = repo[r].node()
3911 tagsnode = cache.getfnode(node, computemissing=False)
3877 tagsnode = cache.getfnode(node, computemissing=False)
3912 if tagsnode:
3878 if tagsnode:
3913 tagsnodedisplay = hex(tagsnode)
3879 tagsnodedisplay = hex(tagsnode)
3914 if not flog.hasnode(tagsnode):
3880 if not flog.hasnode(tagsnode):
3915 tagsnodedisplay += b' (unknown node)'
3881 tagsnodedisplay += b' (unknown node)'
3916 elif tagsnode is None:
3882 elif tagsnode is None:
3917 tagsnodedisplay = b'missing'
3883 tagsnodedisplay = b'missing'
3918 else:
3884 else:
3919 tagsnodedisplay = b'invalid'
3885 tagsnodedisplay = b'invalid'
3920
3886
3921 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3887 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3922
3888
3923
3889
3924 @command(
3890 @command(
3925 b'debugtemplate',
3891 b'debugtemplate',
3926 [
3892 [
3927 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3893 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3928 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3894 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3929 ],
3895 ],
3930 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3896 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3931 optionalrepo=True,
3897 optionalrepo=True,
3932 )
3898 )
3933 def debugtemplate(ui, repo, tmpl, **opts):
3899 def debugtemplate(ui, repo, tmpl, **opts):
3934 """parse and apply a template
3900 """parse and apply a template
3935
3901
3936 If -r/--rev is given, the template is processed as a log template and
3902 If -r/--rev is given, the template is processed as a log template and
3937 applied to the given changesets. Otherwise, it is processed as a generic
3903 applied to the given changesets. Otherwise, it is processed as a generic
3938 template.
3904 template.
3939
3905
3940 Use --verbose to print the parsed tree.
3906 Use --verbose to print the parsed tree.
3941 """
3907 """
3942 revs = None
3908 revs = None
3943 if opts['rev']:
3909 if opts['rev']:
3944 if repo is None:
3910 if repo is None:
3945 raise error.RepoError(
3911 raise error.RepoError(
3946 _(b'there is no Mercurial repository here (.hg not found)')
3912 _(b'there is no Mercurial repository here (.hg not found)')
3947 )
3913 )
3948 revs = logcmdutil.revrange(repo, opts['rev'])
3914 revs = logcmdutil.revrange(repo, opts['rev'])
3949
3915
3950 props = {}
3916 props = {}
3951 for d in opts['define']:
3917 for d in opts['define']:
3952 try:
3918 try:
3953 k, v = (e.strip() for e in d.split(b'=', 1))
3919 k, v = (e.strip() for e in d.split(b'=', 1))
3954 if not k or k == b'ui':
3920 if not k or k == b'ui':
3955 raise ValueError
3921 raise ValueError
3956 props[k] = v
3922 props[k] = v
3957 except ValueError:
3923 except ValueError:
3958 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3924 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3959
3925
3960 if ui.verbose:
3926 if ui.verbose:
3961 aliases = ui.configitems(b'templatealias')
3927 aliases = ui.configitems(b'templatealias')
3962 tree = templater.parse(tmpl)
3928 tree = templater.parse(tmpl)
3963 ui.note(templater.prettyformat(tree), b'\n')
3929 ui.note(templater.prettyformat(tree), b'\n')
3964 newtree = templater.expandaliases(tree, aliases)
3930 newtree = templater.expandaliases(tree, aliases)
3965 if newtree != tree:
3931 if newtree != tree:
3966 ui.notenoi18n(
3932 ui.notenoi18n(
3967 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3933 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3968 )
3934 )
3969
3935
3970 if revs is None:
3936 if revs is None:
3971 tres = formatter.templateresources(ui, repo)
3937 tres = formatter.templateresources(ui, repo)
3972 t = formatter.maketemplater(ui, tmpl, resources=tres)
3938 t = formatter.maketemplater(ui, tmpl, resources=tres)
3973 if ui.verbose:
3939 if ui.verbose:
3974 kwds, funcs = t.symbolsuseddefault()
3940 kwds, funcs = t.symbolsuseddefault()
3975 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3941 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3976 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3942 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3977 ui.write(t.renderdefault(props))
3943 ui.write(t.renderdefault(props))
3978 else:
3944 else:
3979 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3945 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3980 if ui.verbose:
3946 if ui.verbose:
3981 kwds, funcs = displayer.t.symbolsuseddefault()
3947 kwds, funcs = displayer.t.symbolsuseddefault()
3982 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3948 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3983 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3949 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3984 for r in revs:
3950 for r in revs:
3985 displayer.show(repo[r], **pycompat.strkwargs(props))
3951 displayer.show(repo[r], **pycompat.strkwargs(props))
3986 displayer.close()
3952 displayer.close()
3987
3953
3988
3954
3989 @command(
3955 @command(
3990 b'debuguigetpass',
3956 b'debuguigetpass',
3991 [
3957 [
3992 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3958 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3993 ],
3959 ],
3994 _(b'[-p TEXT]'),
3960 _(b'[-p TEXT]'),
3995 norepo=True,
3961 norepo=True,
3996 )
3962 )
3997 def debuguigetpass(ui, prompt=b''):
3963 def debuguigetpass(ui, prompt=b''):
3998 """show prompt to type password"""
3964 """show prompt to type password"""
3999 r = ui.getpass(prompt)
3965 r = ui.getpass(prompt)
4000 if r is None:
3966 if r is None:
4001 r = b"<default response>"
3967 r = b"<default response>"
4002 ui.writenoi18n(b'response: %s\n' % r)
3968 ui.writenoi18n(b'response: %s\n' % r)
4003
3969
4004
3970
4005 @command(
3971 @command(
4006 b'debuguiprompt',
3972 b'debuguiprompt',
4007 [
3973 [
4008 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3974 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4009 ],
3975 ],
4010 _(b'[-p TEXT]'),
3976 _(b'[-p TEXT]'),
4011 norepo=True,
3977 norepo=True,
4012 )
3978 )
4013 def debuguiprompt(ui, prompt=b''):
3979 def debuguiprompt(ui, prompt=b''):
4014 """show plain prompt"""
3980 """show plain prompt"""
4015 r = ui.prompt(prompt)
3981 r = ui.prompt(prompt)
4016 ui.writenoi18n(b'response: %s\n' % r)
3982 ui.writenoi18n(b'response: %s\n' % r)
4017
3983
4018
3984
4019 @command(b'debugupdatecaches', [])
3985 @command(b'debugupdatecaches', [])
4020 def debugupdatecaches(ui, repo, *pats, **opts):
3986 def debugupdatecaches(ui, repo, *pats, **opts):
4021 """warm all known caches in the repository"""
3987 """warm all known caches in the repository"""
4022 with repo.wlock(), repo.lock():
3988 with repo.wlock(), repo.lock():
4023 repo.updatecaches(caches=repository.CACHES_ALL)
3989 repo.updatecaches(caches=repository.CACHES_ALL)
4024
3990
4025
3991
4026 @command(
3992 @command(
4027 b'debugupgraderepo',
3993 b'debugupgraderepo',
4028 [
3994 [
4029 (
3995 (
4030 b'o',
3996 b'o',
4031 b'optimize',
3997 b'optimize',
4032 [],
3998 [],
4033 _(b'extra optimization to perform'),
3999 _(b'extra optimization to perform'),
4034 _(b'NAME'),
4000 _(b'NAME'),
4035 ),
4001 ),
4036 (b'', b'run', False, _(b'performs an upgrade')),
4002 (b'', b'run', False, _(b'performs an upgrade')),
4037 (b'', b'backup', True, _(b'keep the old repository content around')),
4003 (b'', b'backup', True, _(b'keep the old repository content around')),
4038 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4004 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4039 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4005 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4040 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4006 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4041 ],
4007 ],
4042 )
4008 )
4043 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4009 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4044 """upgrade a repository to use different features
4010 """upgrade a repository to use different features
4045
4011
4046 If no arguments are specified, the repository is evaluated for upgrade
4012 If no arguments are specified, the repository is evaluated for upgrade
4047 and a list of problems and potential optimizations is printed.
4013 and a list of problems and potential optimizations is printed.
4048
4014
4049 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4015 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4050 can be influenced via additional arguments. More details will be provided
4016 can be influenced via additional arguments. More details will be provided
4051 by the command output when run without ``--run``.
4017 by the command output when run without ``--run``.
4052
4018
4053 During the upgrade, the repository will be locked and no writes will be
4019 During the upgrade, the repository will be locked and no writes will be
4054 allowed.
4020 allowed.
4055
4021
4056 At the end of the upgrade, the repository may not be readable while new
4022 At the end of the upgrade, the repository may not be readable while new
4057 repository data is swapped in. This window will be as long as it takes to
4023 repository data is swapped in. This window will be as long as it takes to
4058 rename some directories inside the ``.hg`` directory. On most machines, this
4024 rename some directories inside the ``.hg`` directory. On most machines, this
4059 should complete almost instantaneously and the chances of a consumer being
4025 should complete almost instantaneously and the chances of a consumer being
4060 unable to access the repository should be low.
4026 unable to access the repository should be low.
4061
4027
4062 By default, all revlogs will be upgraded. You can restrict this using flags
4028 By default, all revlogs will be upgraded. You can restrict this using flags
4063 such as `--manifest`:
4029 such as `--manifest`:
4064
4030
4065 * `--manifest`: only optimize the manifest
4031 * `--manifest`: only optimize the manifest
4066 * `--no-manifest`: optimize all revlog but the manifest
4032 * `--no-manifest`: optimize all revlog but the manifest
4067 * `--changelog`: optimize the changelog only
4033 * `--changelog`: optimize the changelog only
4068 * `--no-changelog --no-manifest`: optimize filelogs only
4034 * `--no-changelog --no-manifest`: optimize filelogs only
4069 * `--filelogs`: optimize the filelogs only
4035 * `--filelogs`: optimize the filelogs only
4070 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4036 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4071 """
4037 """
4072 return upgrade.upgraderepo(
4038 return upgrade.upgraderepo(
4073 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4039 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4074 )
4040 )
4075
4041
4076
4042
4077 @command(
4043 @command(
4078 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4044 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4079 )
4045 )
4080 def debugwalk(ui, repo, *pats, **opts):
4046 def debugwalk(ui, repo, *pats, **opts):
4081 """show how files match on given patterns"""
4047 """show how files match on given patterns"""
4082 opts = pycompat.byteskwargs(opts)
4048 opts = pycompat.byteskwargs(opts)
4083 m = scmutil.match(repo[None], pats, opts)
4049 m = scmutil.match(repo[None], pats, opts)
4084 if ui.verbose:
4050 if ui.verbose:
4085 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4051 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4086 items = list(repo[None].walk(m))
4052 items = list(repo[None].walk(m))
4087 if not items:
4053 if not items:
4088 return
4054 return
4089 f = lambda fn: fn
4055 f = lambda fn: fn
4090 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4056 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4091 f = lambda fn: util.normpath(fn)
4057 f = lambda fn: util.normpath(fn)
4092 fmt = b'f %%-%ds %%-%ds %%s' % (
4058 fmt = b'f %%-%ds %%-%ds %%s' % (
4093 max([len(abs) for abs in items]),
4059 max([len(abs) for abs in items]),
4094 max([len(repo.pathto(abs)) for abs in items]),
4060 max([len(repo.pathto(abs)) for abs in items]),
4095 )
4061 )
4096 for abs in items:
4062 for abs in items:
4097 line = fmt % (
4063 line = fmt % (
4098 abs,
4064 abs,
4099 f(repo.pathto(abs)),
4065 f(repo.pathto(abs)),
4100 m.exact(abs) and b'exact' or b'',
4066 m.exact(abs) and b'exact' or b'',
4101 )
4067 )
4102 ui.write(b"%s\n" % line.rstrip())
4068 ui.write(b"%s\n" % line.rstrip())
4103
4069
4104
4070
4105 @command(b'debugwhyunstable', [], _(b'REV'))
4071 @command(b'debugwhyunstable', [], _(b'REV'))
4106 def debugwhyunstable(ui, repo, rev):
4072 def debugwhyunstable(ui, repo, rev):
4107 """explain instabilities of a changeset"""
4073 """explain instabilities of a changeset"""
4108 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4074 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4109 dnodes = b''
4075 dnodes = b''
4110 if entry.get(b'divergentnodes'):
4076 if entry.get(b'divergentnodes'):
4111 dnodes = (
4077 dnodes = (
4112 b' '.join(
4078 b' '.join(
4113 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4079 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4114 for ctx in entry[b'divergentnodes']
4080 for ctx in entry[b'divergentnodes']
4115 )
4081 )
4116 + b' '
4082 + b' '
4117 )
4083 )
4118 ui.write(
4084 ui.write(
4119 b'%s: %s%s %s\n'
4085 b'%s: %s%s %s\n'
4120 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4086 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4121 )
4087 )
4122
4088
4123
4089
4124 @command(
4090 @command(
4125 b'debugwireargs',
4091 b'debugwireargs',
4126 [
4092 [
4127 (b'', b'three', b'', b'three'),
4093 (b'', b'three', b'', b'three'),
4128 (b'', b'four', b'', b'four'),
4094 (b'', b'four', b'', b'four'),
4129 (b'', b'five', b'', b'five'),
4095 (b'', b'five', b'', b'five'),
4130 ]
4096 ]
4131 + cmdutil.remoteopts,
4097 + cmdutil.remoteopts,
4132 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4098 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4133 norepo=True,
4099 norepo=True,
4134 )
4100 )
4135 def debugwireargs(ui, repopath, *vals, **opts):
4101 def debugwireargs(ui, repopath, *vals, **opts):
4136 opts = pycompat.byteskwargs(opts)
4102 opts = pycompat.byteskwargs(opts)
4137 repo = hg.peer(ui, opts, repopath)
4103 repo = hg.peer(ui, opts, repopath)
4138 try:
4104 try:
4139 for opt in cmdutil.remoteopts:
4105 for opt in cmdutil.remoteopts:
4140 del opts[opt[1]]
4106 del opts[opt[1]]
4141 args = {}
4107 args = {}
4142 for k, v in opts.items():
4108 for k, v in opts.items():
4143 if v:
4109 if v:
4144 args[k] = v
4110 args[k] = v
4145 args = pycompat.strkwargs(args)
4111 args = pycompat.strkwargs(args)
4146 # run twice to check that we don't mess up the stream for the next command
4112 # run twice to check that we don't mess up the stream for the next command
4147 res1 = repo.debugwireargs(*vals, **args)
4113 res1 = repo.debugwireargs(*vals, **args)
4148 res2 = repo.debugwireargs(*vals, **args)
4114 res2 = repo.debugwireargs(*vals, **args)
4149 ui.write(b"%s\n" % res1)
4115 ui.write(b"%s\n" % res1)
4150 if res1 != res2:
4116 if res1 != res2:
4151 ui.warn(b"%s\n" % res2)
4117 ui.warn(b"%s\n" % res2)
4152 finally:
4118 finally:
4153 repo.close()
4119 repo.close()
4154
4120
4155
4121
4156 def _parsewirelangblocks(fh):
4122 def _parsewirelangblocks(fh):
4157 activeaction = None
4123 activeaction = None
4158 blocklines = []
4124 blocklines = []
4159 lastindent = 0
4125 lastindent = 0
4160
4126
4161 for line in fh:
4127 for line in fh:
4162 line = line.rstrip()
4128 line = line.rstrip()
4163 if not line:
4129 if not line:
4164 continue
4130 continue
4165
4131
4166 if line.startswith(b'#'):
4132 if line.startswith(b'#'):
4167 continue
4133 continue
4168
4134
4169 if not line.startswith(b' '):
4135 if not line.startswith(b' '):
4170 # New block. Flush previous one.
4136 # New block. Flush previous one.
4171 if activeaction:
4137 if activeaction:
4172 yield activeaction, blocklines
4138 yield activeaction, blocklines
4173
4139
4174 activeaction = line
4140 activeaction = line
4175 blocklines = []
4141 blocklines = []
4176 lastindent = 0
4142 lastindent = 0
4177 continue
4143 continue
4178
4144
4179 # Else we start with an indent.
4145 # Else we start with an indent.
4180
4146
4181 if not activeaction:
4147 if not activeaction:
4182 raise error.Abort(_(b'indented line outside of block'))
4148 raise error.Abort(_(b'indented line outside of block'))
4183
4149
4184 indent = len(line) - len(line.lstrip())
4150 indent = len(line) - len(line.lstrip())
4185
4151
4186 # If this line is indented more than the last line, concatenate it.
4152 # If this line is indented more than the last line, concatenate it.
4187 if indent > lastindent and blocklines:
4153 if indent > lastindent and blocklines:
4188 blocklines[-1] += line.lstrip()
4154 blocklines[-1] += line.lstrip()
4189 else:
4155 else:
4190 blocklines.append(line)
4156 blocklines.append(line)
4191 lastindent = indent
4157 lastindent = indent
4192
4158
4193 # Flush last block.
4159 # Flush last block.
4194 if activeaction:
4160 if activeaction:
4195 yield activeaction, blocklines
4161 yield activeaction, blocklines
4196
4162
4197
4163
4198 @command(
4164 @command(
4199 b'debugwireproto',
4165 b'debugwireproto',
4200 [
4166 [
4201 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4167 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4202 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4168 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4203 (
4169 (
4204 b'',
4170 b'',
4205 b'noreadstderr',
4171 b'noreadstderr',
4206 False,
4172 False,
4207 _(b'do not read from stderr of the remote'),
4173 _(b'do not read from stderr of the remote'),
4208 ),
4174 ),
4209 (
4175 (
4210 b'',
4176 b'',
4211 b'nologhandshake',
4177 b'nologhandshake',
4212 False,
4178 False,
4213 _(b'do not log I/O related to the peer handshake'),
4179 _(b'do not log I/O related to the peer handshake'),
4214 ),
4180 ),
4215 ]
4181 ]
4216 + cmdutil.remoteopts,
4182 + cmdutil.remoteopts,
4217 _(b'[PATH]'),
4183 _(b'[PATH]'),
4218 optionalrepo=True,
4184 optionalrepo=True,
4219 )
4185 )
4220 def debugwireproto(ui, repo, path=None, **opts):
4186 def debugwireproto(ui, repo, path=None, **opts):
4221 """send wire protocol commands to a server
4187 """send wire protocol commands to a server
4222
4188
4223 This command can be used to issue wire protocol commands to remote
4189 This command can be used to issue wire protocol commands to remote
4224 peers and to debug the raw data being exchanged.
4190 peers and to debug the raw data being exchanged.
4225
4191
4226 ``--localssh`` will start an SSH server against the current repository
4192 ``--localssh`` will start an SSH server against the current repository
4227 and connect to that. By default, the connection will perform a handshake
4193 and connect to that. By default, the connection will perform a handshake
4228 and establish an appropriate peer instance.
4194 and establish an appropriate peer instance.
4229
4195
4230 ``--peer`` can be used to bypass the handshake protocol and construct a
4196 ``--peer`` can be used to bypass the handshake protocol and construct a
4231 peer instance using the specified class type. Valid values are ``raw``,
4197 peer instance using the specified class type. Valid values are ``raw``,
4232 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4198 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4233 don't support higher-level command actions.
4199 don't support higher-level command actions.
4234
4200
4235 ``--noreadstderr`` can be used to disable automatic reading from stderr
4201 ``--noreadstderr`` can be used to disable automatic reading from stderr
4236 of the peer (for SSH connections only). Disabling automatic reading of
4202 of the peer (for SSH connections only). Disabling automatic reading of
4237 stderr is useful for making output more deterministic.
4203 stderr is useful for making output more deterministic.
4238
4204
4239 Commands are issued via a mini language which is specified via stdin.
4205 Commands are issued via a mini language which is specified via stdin.
4240 The language consists of individual actions to perform. An action is
4206 The language consists of individual actions to perform. An action is
4241 defined by a block. A block is defined as a line with no leading
4207 defined by a block. A block is defined as a line with no leading
4242 space followed by 0 or more lines with leading space. Blocks are
4208 space followed by 0 or more lines with leading space. Blocks are
4243 effectively a high-level command with additional metadata.
4209 effectively a high-level command with additional metadata.
4244
4210
4245 Lines beginning with ``#`` are ignored.
4211 Lines beginning with ``#`` are ignored.
4246
4212
4247 The following sections denote available actions.
4213 The following sections denote available actions.
4248
4214
4249 raw
4215 raw
4250 ---
4216 ---
4251
4217
4252 Send raw data to the server.
4218 Send raw data to the server.
4253
4219
4254 The block payload contains the raw data to send as one atomic send
4220 The block payload contains the raw data to send as one atomic send
4255 operation. The data may not actually be delivered in a single system
4221 operation. The data may not actually be delivered in a single system
4256 call: it depends on the abilities of the transport being used.
4222 call: it depends on the abilities of the transport being used.
4257
4223
4258 Each line in the block is de-indented and concatenated. Then, that
4224 Each line in the block is de-indented and concatenated. Then, that
4259 value is evaluated as a Python b'' literal. This allows the use of
4225 value is evaluated as a Python b'' literal. This allows the use of
4260 backslash escaping, etc.
4226 backslash escaping, etc.
4261
4227
4262 raw+
4228 raw+
4263 ----
4229 ----
4264
4230
4265 Behaves like ``raw`` except flushes output afterwards.
4231 Behaves like ``raw`` except flushes output afterwards.
4266
4232
4267 command <X>
4233 command <X>
4268 -----------
4234 -----------
4269
4235
4270 Send a request to run a named command, whose name follows the ``command``
4236 Send a request to run a named command, whose name follows the ``command``
4271 string.
4237 string.
4272
4238
4273 Arguments to the command are defined as lines in this block. The format of
4239 Arguments to the command are defined as lines in this block. The format of
4274 each line is ``<key> <value>``. e.g.::
4240 each line is ``<key> <value>``. e.g.::
4275
4241
4276 command listkeys
4242 command listkeys
4277 namespace bookmarks
4243 namespace bookmarks
4278
4244
4279 If the value begins with ``eval:``, it will be interpreted as a Python
4245 If the value begins with ``eval:``, it will be interpreted as a Python
4280 literal expression. Otherwise values are interpreted as Python b'' literals.
4246 literal expression. Otherwise values are interpreted as Python b'' literals.
4281 This allows sending complex types and encoding special byte sequences via
4247 This allows sending complex types and encoding special byte sequences via
4282 backslash escaping.
4248 backslash escaping.
4283
4249
4284 The following arguments have special meaning:
4250 The following arguments have special meaning:
4285
4251
4286 ``PUSHFILE``
4252 ``PUSHFILE``
4287 When defined, the *push* mechanism of the peer will be used instead
4253 When defined, the *push* mechanism of the peer will be used instead
4288 of the static request-response mechanism and the content of the
4254 of the static request-response mechanism and the content of the
4289 file specified in the value of this argument will be sent as the
4255 file specified in the value of this argument will be sent as the
4290 command payload.
4256 command payload.
4291
4257
4292 This can be used to submit a local bundle file to the remote.
4258 This can be used to submit a local bundle file to the remote.
4293
4259
4294 batchbegin
4260 batchbegin
4295 ----------
4261 ----------
4296
4262
4297 Instruct the peer to begin a batched send.
4263 Instruct the peer to begin a batched send.
4298
4264
4299 All ``command`` blocks are queued for execution until the next
4265 All ``command`` blocks are queued for execution until the next
4300 ``batchsubmit`` block.
4266 ``batchsubmit`` block.
4301
4267
4302 batchsubmit
4268 batchsubmit
4303 -----------
4269 -----------
4304
4270
4305 Submit previously queued ``command`` blocks as a batch request.
4271 Submit previously queued ``command`` blocks as a batch request.
4306
4272
4307 This action MUST be paired with a ``batchbegin`` action.
4273 This action MUST be paired with a ``batchbegin`` action.
4308
4274
4309 httprequest <method> <path>
4275 httprequest <method> <path>
4310 ---------------------------
4276 ---------------------------
4311
4277
4312 (HTTP peer only)
4278 (HTTP peer only)
4313
4279
4314 Send an HTTP request to the peer.
4280 Send an HTTP request to the peer.
4315
4281
4316 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4282 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4317
4283
4318 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4284 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4319 headers to add to the request. e.g. ``Accept: foo``.
4285 headers to add to the request. e.g. ``Accept: foo``.
4320
4286
4321 The following arguments are special:
4287 The following arguments are special:
4322
4288
4323 ``BODYFILE``
4289 ``BODYFILE``
4324 The content of the file defined as the value to this argument will be
4290 The content of the file defined as the value to this argument will be
4325 transferred verbatim as the HTTP request body.
4291 transferred verbatim as the HTTP request body.
4326
4292
4327 ``frame <type> <flags> <payload>``
4293 ``frame <type> <flags> <payload>``
4328 Send a unified protocol frame as part of the request body.
4294 Send a unified protocol frame as part of the request body.
4329
4295
4330 All frames will be collected and sent as the body to the HTTP
4296 All frames will be collected and sent as the body to the HTTP
4331 request.
4297 request.
4332
4298
4333 close
4299 close
4334 -----
4300 -----
4335
4301
4336 Close the connection to the server.
4302 Close the connection to the server.
4337
4303
4338 flush
4304 flush
4339 -----
4305 -----
4340
4306
4341 Flush data written to the server.
4307 Flush data written to the server.
4342
4308
4343 readavailable
4309 readavailable
4344 -------------
4310 -------------
4345
4311
4346 Close the write end of the connection and read all available data from
4312 Close the write end of the connection and read all available data from
4347 the server.
4313 the server.
4348
4314
4349 If the connection to the server encompasses multiple pipes, we poll both
4315 If the connection to the server encompasses multiple pipes, we poll both
4350 pipes and read available data.
4316 pipes and read available data.
4351
4317
4352 readline
4318 readline
4353 --------
4319 --------
4354
4320
4355 Read a line of output from the server. If there are multiple output
4321 Read a line of output from the server. If there are multiple output
4356 pipes, reads only the main pipe.
4322 pipes, reads only the main pipe.
4357
4323
4358 ereadline
4324 ereadline
4359 ---------
4325 ---------
4360
4326
4361 Like ``readline``, but read from the stderr pipe, if available.
4327 Like ``readline``, but read from the stderr pipe, if available.
4362
4328
4363 read <X>
4329 read <X>
4364 --------
4330 --------
4365
4331
4366 ``read()`` N bytes from the server's main output pipe.
4332 ``read()`` N bytes from the server's main output pipe.
4367
4333
4368 eread <X>
4334 eread <X>
4369 ---------
4335 ---------
4370
4336
4371 ``read()`` N bytes from the server's stderr pipe, if available.
4337 ``read()`` N bytes from the server's stderr pipe, if available.
4372
4338
4373 Specifying Unified Frame-Based Protocol Frames
4339 Specifying Unified Frame-Based Protocol Frames
4374 ----------------------------------------------
4340 ----------------------------------------------
4375
4341
4376 It is possible to emit a *Unified Frame-Based Protocol* by using special
4342 It is possible to emit a *Unified Frame-Based Protocol* by using special
4377 syntax.
4343 syntax.
4378
4344
4379 A frame is composed as a type, flags, and payload. These can be parsed
4345 A frame is composed as a type, flags, and payload. These can be parsed
4380 from a string of the form:
4346 from a string of the form:
4381
4347
4382 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4348 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4383
4349
4384 ``request-id`` and ``stream-id`` are integers defining the request and
4350 ``request-id`` and ``stream-id`` are integers defining the request and
4385 stream identifiers.
4351 stream identifiers.
4386
4352
4387 ``type`` can be an integer value for the frame type or the string name
4353 ``type`` can be an integer value for the frame type or the string name
4388 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4354 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4389 ``command-name``.
4355 ``command-name``.
4390
4356
4391 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4357 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4392 components. Each component (and there can be just one) can be an integer
4358 components. Each component (and there can be just one) can be an integer
4393 or a flag name for stream flags or frame flags, respectively. Values are
4359 or a flag name for stream flags or frame flags, respectively. Values are
4394 resolved to integers and then bitwise OR'd together.
4360 resolved to integers and then bitwise OR'd together.
4395
4361
4396 ``payload`` represents the raw frame payload. If it begins with
4362 ``payload`` represents the raw frame payload. If it begins with
4397 ``cbor:``, the following string is evaluated as Python code and the
4363 ``cbor:``, the following string is evaluated as Python code and the
4398 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4364 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4399 as a Python byte string literal.
4365 as a Python byte string literal.
4400 """
4366 """
4401 opts = pycompat.byteskwargs(opts)
4367 opts = pycompat.byteskwargs(opts)
4402
4368
4403 if opts[b'localssh'] and not repo:
4369 if opts[b'localssh'] and not repo:
4404 raise error.Abort(_(b'--localssh requires a repository'))
4370 raise error.Abort(_(b'--localssh requires a repository'))
4405
4371
4406 if opts[b'peer'] and opts[b'peer'] not in (
4372 if opts[b'peer'] and opts[b'peer'] not in (
4407 b'raw',
4373 b'raw',
4408 b'ssh1',
4374 b'ssh1',
4409 ):
4375 ):
4410 raise error.Abort(
4376 raise error.Abort(
4411 _(b'invalid value for --peer'),
4377 _(b'invalid value for --peer'),
4412 hint=_(b'valid values are "raw" and "ssh1"'),
4378 hint=_(b'valid values are "raw" and "ssh1"'),
4413 )
4379 )
4414
4380
4415 if path and opts[b'localssh']:
4381 if path and opts[b'localssh']:
4416 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4382 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4417
4383
4418 if ui.interactive():
4384 if ui.interactive():
4419 ui.write(_(b'(waiting for commands on stdin)\n'))
4385 ui.write(_(b'(waiting for commands on stdin)\n'))
4420
4386
4421 blocks = list(_parsewirelangblocks(ui.fin))
4387 blocks = list(_parsewirelangblocks(ui.fin))
4422
4388
4423 proc = None
4389 proc = None
4424 stdin = None
4390 stdin = None
4425 stdout = None
4391 stdout = None
4426 stderr = None
4392 stderr = None
4427 opener = None
4393 opener = None
4428
4394
4429 if opts[b'localssh']:
4395 if opts[b'localssh']:
4430 # We start the SSH server in its own process so there is process
4396 # We start the SSH server in its own process so there is process
4431 # separation. This prevents a whole class of potential bugs around
4397 # separation. This prevents a whole class of potential bugs around
4432 # shared state from interfering with server operation.
4398 # shared state from interfering with server operation.
4433 args = procutil.hgcmd() + [
4399 args = procutil.hgcmd() + [
4434 b'-R',
4400 b'-R',
4435 repo.root,
4401 repo.root,
4436 b'debugserve',
4402 b'debugserve',
4437 b'--sshstdio',
4403 b'--sshstdio',
4438 ]
4404 ]
4439 proc = subprocess.Popen(
4405 proc = subprocess.Popen(
4440 pycompat.rapply(procutil.tonativestr, args),
4406 pycompat.rapply(procutil.tonativestr, args),
4441 stdin=subprocess.PIPE,
4407 stdin=subprocess.PIPE,
4442 stdout=subprocess.PIPE,
4408 stdout=subprocess.PIPE,
4443 stderr=subprocess.PIPE,
4409 stderr=subprocess.PIPE,
4444 bufsize=0,
4410 bufsize=0,
4445 )
4411 )
4446
4412
4447 stdin = proc.stdin
4413 stdin = proc.stdin
4448 stdout = proc.stdout
4414 stdout = proc.stdout
4449 stderr = proc.stderr
4415 stderr = proc.stderr
4450
4416
4451 # We turn the pipes into observers so we can log I/O.
4417 # We turn the pipes into observers so we can log I/O.
4452 if ui.verbose or opts[b'peer'] == b'raw':
4418 if ui.verbose or opts[b'peer'] == b'raw':
4453 stdin = util.makeloggingfileobject(
4419 stdin = util.makeloggingfileobject(
4454 ui, proc.stdin, b'i', logdata=True
4420 ui, proc.stdin, b'i', logdata=True
4455 )
4421 )
4456 stdout = util.makeloggingfileobject(
4422 stdout = util.makeloggingfileobject(
4457 ui, proc.stdout, b'o', logdata=True
4423 ui, proc.stdout, b'o', logdata=True
4458 )
4424 )
4459 stderr = util.makeloggingfileobject(
4425 stderr = util.makeloggingfileobject(
4460 ui, proc.stderr, b'e', logdata=True
4426 ui, proc.stderr, b'e', logdata=True
4461 )
4427 )
4462
4428
4463 # --localssh also implies the peer connection settings.
4429 # --localssh also implies the peer connection settings.
4464
4430
4465 url = b'ssh://localserver'
4431 url = b'ssh://localserver'
4466 autoreadstderr = not opts[b'noreadstderr']
4432 autoreadstderr = not opts[b'noreadstderr']
4467
4433
4468 if opts[b'peer'] == b'ssh1':
4434 if opts[b'peer'] == b'ssh1':
4469 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4435 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4470 peer = sshpeer.sshv1peer(
4436 peer = sshpeer.sshv1peer(
4471 ui,
4437 ui,
4472 url,
4438 url,
4473 proc,
4439 proc,
4474 stdin,
4440 stdin,
4475 stdout,
4441 stdout,
4476 stderr,
4442 stderr,
4477 None,
4443 None,
4478 autoreadstderr=autoreadstderr,
4444 autoreadstderr=autoreadstderr,
4479 )
4445 )
4480 elif opts[b'peer'] == b'raw':
4446 elif opts[b'peer'] == b'raw':
4481 ui.write(_(b'using raw connection to peer\n'))
4447 ui.write(_(b'using raw connection to peer\n'))
4482 peer = None
4448 peer = None
4483 else:
4449 else:
4484 ui.write(_(b'creating ssh peer from handshake results\n'))
4450 ui.write(_(b'creating ssh peer from handshake results\n'))
4485 peer = sshpeer.makepeer(
4451 peer = sshpeer.makepeer(
4486 ui,
4452 ui,
4487 url,
4453 url,
4488 proc,
4454 proc,
4489 stdin,
4455 stdin,
4490 stdout,
4456 stdout,
4491 stderr,
4457 stderr,
4492 autoreadstderr=autoreadstderr,
4458 autoreadstderr=autoreadstderr,
4493 )
4459 )
4494
4460
4495 elif path:
4461 elif path:
4496 # We bypass hg.peer() so we can proxy the sockets.
4462 # We bypass hg.peer() so we can proxy the sockets.
4497 # TODO consider not doing this because we skip
4463 # TODO consider not doing this because we skip
4498 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4464 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4499 u = urlutil.url(path)
4465 u = urlutil.url(path)
4500 if u.scheme != b'http':
4466 if u.scheme != b'http':
4501 raise error.Abort(_(b'only http:// paths are currently supported'))
4467 raise error.Abort(_(b'only http:// paths are currently supported'))
4502
4468
4503 url, authinfo = u.authinfo()
4469 url, authinfo = u.authinfo()
4504 openerargs = {
4470 openerargs = {
4505 'useragent': b'Mercurial debugwireproto',
4471 'useragent': b'Mercurial debugwireproto',
4506 }
4472 }
4507
4473
4508 # Turn pipes/sockets into observers so we can log I/O.
4474 # Turn pipes/sockets into observers so we can log I/O.
4509 if ui.verbose:
4475 if ui.verbose:
4510 openerargs.update(
4476 openerargs.update(
4511 {
4477 {
4512 'loggingfh': ui,
4478 'loggingfh': ui,
4513 'loggingname': b's',
4479 'loggingname': b's',
4514 'loggingopts': {
4480 'loggingopts': {
4515 'logdata': True,
4481 'logdata': True,
4516 'logdataapis': False,
4482 'logdataapis': False,
4517 },
4483 },
4518 }
4484 }
4519 )
4485 )
4520
4486
4521 if ui.debugflag:
4487 if ui.debugflag:
4522 openerargs['loggingopts']['logdataapis'] = True
4488 openerargs['loggingopts']['logdataapis'] = True
4523
4489
4524 # Don't send default headers when in raw mode. This allows us to
4490 # Don't send default headers when in raw mode. This allows us to
4525 # bypass most of the behavior of our URL handling code so we can
4491 # bypass most of the behavior of our URL handling code so we can
4526 # have near complete control over what's sent on the wire.
4492 # have near complete control over what's sent on the wire.
4527 if opts[b'peer'] == b'raw':
4493 if opts[b'peer'] == b'raw':
4528 openerargs['sendaccept'] = False
4494 openerargs['sendaccept'] = False
4529
4495
4530 opener = urlmod.opener(ui, authinfo, **openerargs)
4496 opener = urlmod.opener(ui, authinfo, **openerargs)
4531
4497
4532 if opts[b'peer'] == b'raw':
4498 if opts[b'peer'] == b'raw':
4533 ui.write(_(b'using raw connection to peer\n'))
4499 ui.write(_(b'using raw connection to peer\n'))
4534 peer = None
4500 peer = None
4535 elif opts[b'peer']:
4501 elif opts[b'peer']:
4536 raise error.Abort(
4502 raise error.Abort(
4537 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4503 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4538 )
4504 )
4539 else:
4505 else:
4540 peer = httppeer.makepeer(ui, path, opener=opener)
4506 peer = httppeer.makepeer(ui, path, opener=opener)
4541
4507
4542 # We /could/ populate stdin/stdout with sock.makefile()...
4508 # We /could/ populate stdin/stdout with sock.makefile()...
4543 else:
4509 else:
4544 raise error.Abort(_(b'unsupported connection configuration'))
4510 raise error.Abort(_(b'unsupported connection configuration'))
4545
4511
4546 batchedcommands = None
4512 batchedcommands = None
4547
4513
4548 # Now perform actions based on the parsed wire language instructions.
4514 # Now perform actions based on the parsed wire language instructions.
4549 for action, lines in blocks:
4515 for action, lines in blocks:
4550 if action in (b'raw', b'raw+'):
4516 if action in (b'raw', b'raw+'):
4551 if not stdin:
4517 if not stdin:
4552 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4518 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4553
4519
4554 # Concatenate the data together.
4520 # Concatenate the data together.
4555 data = b''.join(l.lstrip() for l in lines)
4521 data = b''.join(l.lstrip() for l in lines)
4556 data = stringutil.unescapestr(data)
4522 data = stringutil.unescapestr(data)
4557 stdin.write(data)
4523 stdin.write(data)
4558
4524
4559 if action == b'raw+':
4525 if action == b'raw+':
4560 stdin.flush()
4526 stdin.flush()
4561 elif action == b'flush':
4527 elif action == b'flush':
4562 if not stdin:
4528 if not stdin:
4563 raise error.Abort(_(b'cannot call flush on this peer'))
4529 raise error.Abort(_(b'cannot call flush on this peer'))
4564 stdin.flush()
4530 stdin.flush()
4565 elif action.startswith(b'command'):
4531 elif action.startswith(b'command'):
4566 if not peer:
4532 if not peer:
4567 raise error.Abort(
4533 raise error.Abort(
4568 _(
4534 _(
4569 b'cannot send commands unless peer instance '
4535 b'cannot send commands unless peer instance '
4570 b'is available'
4536 b'is available'
4571 )
4537 )
4572 )
4538 )
4573
4539
4574 command = action.split(b' ', 1)[1]
4540 command = action.split(b' ', 1)[1]
4575
4541
4576 args = {}
4542 args = {}
4577 for line in lines:
4543 for line in lines:
4578 # We need to allow empty values.
4544 # We need to allow empty values.
4579 fields = line.lstrip().split(b' ', 1)
4545 fields = line.lstrip().split(b' ', 1)
4580 if len(fields) == 1:
4546 if len(fields) == 1:
4581 key = fields[0]
4547 key = fields[0]
4582 value = b''
4548 value = b''
4583 else:
4549 else:
4584 key, value = fields
4550 key, value = fields
4585
4551
4586 if value.startswith(b'eval:'):
4552 if value.startswith(b'eval:'):
4587 value = stringutil.evalpythonliteral(value[5:])
4553 value = stringutil.evalpythonliteral(value[5:])
4588 else:
4554 else:
4589 value = stringutil.unescapestr(value)
4555 value = stringutil.unescapestr(value)
4590
4556
4591 args[key] = value
4557 args[key] = value
4592
4558
4593 if batchedcommands is not None:
4559 if batchedcommands is not None:
4594 batchedcommands.append((command, args))
4560 batchedcommands.append((command, args))
4595 continue
4561 continue
4596
4562
4597 ui.status(_(b'sending %s command\n') % command)
4563 ui.status(_(b'sending %s command\n') % command)
4598
4564
4599 if b'PUSHFILE' in args:
4565 if b'PUSHFILE' in args:
4600 with open(args[b'PUSHFILE'], 'rb') as fh:
4566 with open(args[b'PUSHFILE'], 'rb') as fh:
4601 del args[b'PUSHFILE']
4567 del args[b'PUSHFILE']
4602 res, output = peer._callpush(
4568 res, output = peer._callpush(
4603 command, fh, **pycompat.strkwargs(args)
4569 command, fh, **pycompat.strkwargs(args)
4604 )
4570 )
4605 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4571 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4606 ui.status(
4572 ui.status(
4607 _(b'remote output: %s\n') % stringutil.escapestr(output)
4573 _(b'remote output: %s\n') % stringutil.escapestr(output)
4608 )
4574 )
4609 else:
4575 else:
4610 with peer.commandexecutor() as e:
4576 with peer.commandexecutor() as e:
4611 res = e.callcommand(command, args).result()
4577 res = e.callcommand(command, args).result()
4612
4578
4613 ui.status(
4579 ui.status(
4614 _(b'response: %s\n')
4580 _(b'response: %s\n')
4615 % stringutil.pprint(res, bprefix=True, indent=2)
4581 % stringutil.pprint(res, bprefix=True, indent=2)
4616 )
4582 )
4617
4583
4618 elif action == b'batchbegin':
4584 elif action == b'batchbegin':
4619 if batchedcommands is not None:
4585 if batchedcommands is not None:
4620 raise error.Abort(_(b'nested batchbegin not allowed'))
4586 raise error.Abort(_(b'nested batchbegin not allowed'))
4621
4587
4622 batchedcommands = []
4588 batchedcommands = []
4623 elif action == b'batchsubmit':
4589 elif action == b'batchsubmit':
4624 # There is a batching API we could go through. But it would be
4590 # There is a batching API we could go through. But it would be
4625 # difficult to normalize requests into function calls. It is easier
4591 # difficult to normalize requests into function calls. It is easier
4626 # to bypass this layer and normalize to commands + args.
4592 # to bypass this layer and normalize to commands + args.
4627 ui.status(
4593 ui.status(
4628 _(b'sending batch with %d sub-commands\n')
4594 _(b'sending batch with %d sub-commands\n')
4629 % len(batchedcommands)
4595 % len(batchedcommands)
4630 )
4596 )
4631 assert peer is not None
4597 assert peer is not None
4632 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4598 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4633 ui.status(
4599 ui.status(
4634 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4600 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4635 )
4601 )
4636
4602
4637 batchedcommands = None
4603 batchedcommands = None
4638
4604
4639 elif action.startswith(b'httprequest '):
4605 elif action.startswith(b'httprequest '):
4640 if not opener:
4606 if not opener:
4641 raise error.Abort(
4607 raise error.Abort(
4642 _(b'cannot use httprequest without an HTTP peer')
4608 _(b'cannot use httprequest without an HTTP peer')
4643 )
4609 )
4644
4610
4645 request = action.split(b' ', 2)
4611 request = action.split(b' ', 2)
4646 if len(request) != 3:
4612 if len(request) != 3:
4647 raise error.Abort(
4613 raise error.Abort(
4648 _(
4614 _(
4649 b'invalid httprequest: expected format is '
4615 b'invalid httprequest: expected format is '
4650 b'"httprequest <method> <path>'
4616 b'"httprequest <method> <path>'
4651 )
4617 )
4652 )
4618 )
4653
4619
4654 method, httppath = request[1:]
4620 method, httppath = request[1:]
4655 headers = {}
4621 headers = {}
4656 body = None
4622 body = None
4657 frames = []
4623 frames = []
4658 for line in lines:
4624 for line in lines:
4659 line = line.lstrip()
4625 line = line.lstrip()
4660 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4626 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4661 if m:
4627 if m:
4662 # Headers need to use native strings.
4628 # Headers need to use native strings.
4663 key = pycompat.strurl(m.group(1))
4629 key = pycompat.strurl(m.group(1))
4664 value = pycompat.strurl(m.group(2))
4630 value = pycompat.strurl(m.group(2))
4665 headers[key] = value
4631 headers[key] = value
4666 continue
4632 continue
4667
4633
4668 if line.startswith(b'BODYFILE '):
4634 if line.startswith(b'BODYFILE '):
4669 with open(line.split(b' ', 1), b'rb') as fh:
4635 with open(line.split(b' ', 1), b'rb') as fh:
4670 body = fh.read()
4636 body = fh.read()
4671 elif line.startswith(b'frame '):
4637 elif line.startswith(b'frame '):
4672 frame = wireprotoframing.makeframefromhumanstring(
4638 frame = wireprotoframing.makeframefromhumanstring(
4673 line[len(b'frame ') :]
4639 line[len(b'frame ') :]
4674 )
4640 )
4675
4641
4676 frames.append(frame)
4642 frames.append(frame)
4677 else:
4643 else:
4678 raise error.Abort(
4644 raise error.Abort(
4679 _(b'unknown argument to httprequest: %s') % line
4645 _(b'unknown argument to httprequest: %s') % line
4680 )
4646 )
4681
4647
4682 url = path + httppath
4648 url = path + httppath
4683
4649
4684 if frames:
4650 if frames:
4685 body = b''.join(bytes(f) for f in frames)
4651 body = b''.join(bytes(f) for f in frames)
4686
4652
4687 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4653 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4688
4654
4689 # urllib.Request insists on using has_data() as a proxy for
4655 # urllib.Request insists on using has_data() as a proxy for
4690 # determining the request method. Override that to use our
4656 # determining the request method. Override that to use our
4691 # explicitly requested method.
4657 # explicitly requested method.
4692 req.get_method = lambda: pycompat.sysstr(method)
4658 req.get_method = lambda: pycompat.sysstr(method)
4693
4659
4694 try:
4660 try:
4695 res = opener.open(req)
4661 res = opener.open(req)
4696 body = res.read()
4662 body = res.read()
4697 except util.urlerr.urlerror as e:
4663 except util.urlerr.urlerror as e:
4698 # read() method must be called, but only exists in Python 2
4664 # read() method must be called, but only exists in Python 2
4699 getattr(e, 'read', lambda: None)()
4665 getattr(e, 'read', lambda: None)()
4700 continue
4666 continue
4701
4667
4702 ct = res.headers.get('Content-Type')
4668 ct = res.headers.get('Content-Type')
4703 if ct == 'application/mercurial-cbor':
4669 if ct == 'application/mercurial-cbor':
4704 ui.write(
4670 ui.write(
4705 _(b'cbor> %s\n')
4671 _(b'cbor> %s\n')
4706 % stringutil.pprint(
4672 % stringutil.pprint(
4707 cborutil.decodeall(body), bprefix=True, indent=2
4673 cborutil.decodeall(body), bprefix=True, indent=2
4708 )
4674 )
4709 )
4675 )
4710
4676
4711 elif action == b'close':
4677 elif action == b'close':
4712 assert peer is not None
4678 assert peer is not None
4713 peer.close()
4679 peer.close()
4714 elif action == b'readavailable':
4680 elif action == b'readavailable':
4715 if not stdout or not stderr:
4681 if not stdout or not stderr:
4716 raise error.Abort(
4682 raise error.Abort(
4717 _(b'readavailable not available on this peer')
4683 _(b'readavailable not available on this peer')
4718 )
4684 )
4719
4685
4720 stdin.close()
4686 stdin.close()
4721 stdout.read()
4687 stdout.read()
4722 stderr.read()
4688 stderr.read()
4723
4689
4724 elif action == b'readline':
4690 elif action == b'readline':
4725 if not stdout:
4691 if not stdout:
4726 raise error.Abort(_(b'readline not available on this peer'))
4692 raise error.Abort(_(b'readline not available on this peer'))
4727 stdout.readline()
4693 stdout.readline()
4728 elif action == b'ereadline':
4694 elif action == b'ereadline':
4729 if not stderr:
4695 if not stderr:
4730 raise error.Abort(_(b'ereadline not available on this peer'))
4696 raise error.Abort(_(b'ereadline not available on this peer'))
4731 stderr.readline()
4697 stderr.readline()
4732 elif action.startswith(b'read '):
4698 elif action.startswith(b'read '):
4733 count = int(action.split(b' ', 1)[1])
4699 count = int(action.split(b' ', 1)[1])
4734 if not stdout:
4700 if not stdout:
4735 raise error.Abort(_(b'read not available on this peer'))
4701 raise error.Abort(_(b'read not available on this peer'))
4736 stdout.read(count)
4702 stdout.read(count)
4737 elif action.startswith(b'eread '):
4703 elif action.startswith(b'eread '):
4738 count = int(action.split(b' ', 1)[1])
4704 count = int(action.split(b' ', 1)[1])
4739 if not stderr:
4705 if not stderr:
4740 raise error.Abort(_(b'eread not available on this peer'))
4706 raise error.Abort(_(b'eread not available on this peer'))
4741 stderr.read(count)
4707 stderr.read(count)
4742 else:
4708 else:
4743 raise error.Abort(_(b'unknown action: %s') % action)
4709 raise error.Abort(_(b'unknown action: %s') % action)
4744
4710
4745 if batchedcommands is not None:
4711 if batchedcommands is not None:
4746 raise error.Abort(_(b'unclosed "batchbegin" request'))
4712 raise error.Abort(_(b'unclosed "batchbegin" request'))
4747
4713
4748 if peer:
4714 if peer:
4749 peer.close()
4715 peer.close()
4750
4716
4751 if proc:
4717 if proc:
4752 proc.kill()
4718 proc.kill()
@@ -1,621 +1,663
1 # revlogutils/debug.py - utility used for revlog debuging
1 # revlogutils/debug.py - utility used for revlog debuging
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2022 Octobus <contact@octobus.net>
4 # Copyright 2022 Octobus <contact@octobus.net>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import collections
9 import collections
10 import string
10 import string
11
11
12 from .. import (
12 from .. import (
13 mdiff,
13 node as nodemod,
14 node as nodemod,
15 revlogutils,
14 util,
16 util,
15 )
17 )
16
18
17 from . import (
19 from . import (
18 constants,
20 constants,
21 deltas as deltautil,
19 )
22 )
20
23
21 INDEX_ENTRY_DEBUG_COLUMN = []
24 INDEX_ENTRY_DEBUG_COLUMN = []
22
25
23 NODE_SIZE = object()
26 NODE_SIZE = object()
24
27
25
28
26 class _column_base:
29 class _column_base:
27 """constains the definition of a revlog column
30 """constains the definition of a revlog column
28
31
29 name: the column header,
32 name: the column header,
30 value_func: the function called to get a value,
33 value_func: the function called to get a value,
31 size: the width of the column,
34 size: the width of the column,
32 verbose_only: only include the column in verbose mode.
35 verbose_only: only include the column in verbose mode.
33 """
36 """
34
37
35 def __init__(self, name, value_func, size=None, verbose=False):
38 def __init__(self, name, value_func, size=None, verbose=False):
36 self.name = name
39 self.name = name
37 self.value_func = value_func
40 self.value_func = value_func
38 if size is not NODE_SIZE:
41 if size is not NODE_SIZE:
39 if size is None:
42 if size is None:
40 size = 8 # arbitrary default
43 size = 8 # arbitrary default
41 size = max(len(name), size)
44 size = max(len(name), size)
42 self._size = size
45 self._size = size
43 self.verbose_only = verbose
46 self.verbose_only = verbose
44
47
45 def get_size(self, node_size):
48 def get_size(self, node_size):
46 if self._size is NODE_SIZE:
49 if self._size is NODE_SIZE:
47 return node_size
50 return node_size
48 else:
51 else:
49 return self._size
52 return self._size
50
53
51
54
52 def debug_column(name, size=None, verbose=False):
55 def debug_column(name, size=None, verbose=False):
53 """decorated function is registered as a column
56 """decorated function is registered as a column
54
57
55 name: the name of the column,
58 name: the name of the column,
56 size: the expected size of the column.
59 size: the expected size of the column.
57 """
60 """
58
61
59 def register(func):
62 def register(func):
60 entry = _column_base(
63 entry = _column_base(
61 name=name,
64 name=name,
62 value_func=func,
65 value_func=func,
63 size=size,
66 size=size,
64 verbose=verbose,
67 verbose=verbose,
65 )
68 )
66 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
69 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
67 return entry
70 return entry
68
71
69 return register
72 return register
70
73
71
74
72 @debug_column(b"rev", size=6)
75 @debug_column(b"rev", size=6)
73 def _rev(index, rev, entry, hexfn):
76 def _rev(index, rev, entry, hexfn):
74 return b"%d" % rev
77 return b"%d" % rev
75
78
76
79
77 @debug_column(b"rank", size=6, verbose=True)
80 @debug_column(b"rank", size=6, verbose=True)
78 def rank(index, rev, entry, hexfn):
81 def rank(index, rev, entry, hexfn):
79 return b"%d" % entry[constants.ENTRY_RANK]
82 return b"%d" % entry[constants.ENTRY_RANK]
80
83
81
84
82 @debug_column(b"linkrev", size=6)
85 @debug_column(b"linkrev", size=6)
83 def _linkrev(index, rev, entry, hexfn):
86 def _linkrev(index, rev, entry, hexfn):
84 return b"%d" % entry[constants.ENTRY_LINK_REV]
87 return b"%d" % entry[constants.ENTRY_LINK_REV]
85
88
86
89
87 @debug_column(b"nodeid", size=NODE_SIZE)
90 @debug_column(b"nodeid", size=NODE_SIZE)
88 def _nodeid(index, rev, entry, hexfn):
91 def _nodeid(index, rev, entry, hexfn):
89 return hexfn(entry[constants.ENTRY_NODE_ID])
92 return hexfn(entry[constants.ENTRY_NODE_ID])
90
93
91
94
92 @debug_column(b"p1-rev", size=6, verbose=True)
95 @debug_column(b"p1-rev", size=6, verbose=True)
93 def _p1_rev(index, rev, entry, hexfn):
96 def _p1_rev(index, rev, entry, hexfn):
94 return b"%d" % entry[constants.ENTRY_PARENT_1]
97 return b"%d" % entry[constants.ENTRY_PARENT_1]
95
98
96
99
97 @debug_column(b"p1-nodeid", size=NODE_SIZE)
100 @debug_column(b"p1-nodeid", size=NODE_SIZE)
98 def _p1_node(index, rev, entry, hexfn):
101 def _p1_node(index, rev, entry, hexfn):
99 parent = entry[constants.ENTRY_PARENT_1]
102 parent = entry[constants.ENTRY_PARENT_1]
100 p_entry = index[parent]
103 p_entry = index[parent]
101 return hexfn(p_entry[constants.ENTRY_NODE_ID])
104 return hexfn(p_entry[constants.ENTRY_NODE_ID])
102
105
103
106
104 @debug_column(b"p2-rev", size=6, verbose=True)
107 @debug_column(b"p2-rev", size=6, verbose=True)
105 def _p2_rev(index, rev, entry, hexfn):
108 def _p2_rev(index, rev, entry, hexfn):
106 return b"%d" % entry[constants.ENTRY_PARENT_2]
109 return b"%d" % entry[constants.ENTRY_PARENT_2]
107
110
108
111
109 @debug_column(b"p2-nodeid", size=NODE_SIZE)
112 @debug_column(b"p2-nodeid", size=NODE_SIZE)
110 def _p2_node(index, rev, entry, hexfn):
113 def _p2_node(index, rev, entry, hexfn):
111 parent = entry[constants.ENTRY_PARENT_2]
114 parent = entry[constants.ENTRY_PARENT_2]
112 p_entry = index[parent]
115 p_entry = index[parent]
113 return hexfn(p_entry[constants.ENTRY_NODE_ID])
116 return hexfn(p_entry[constants.ENTRY_NODE_ID])
114
117
115
118
116 @debug_column(b"full-size", size=20, verbose=True)
119 @debug_column(b"full-size", size=20, verbose=True)
117 def full_size(index, rev, entry, hexfn):
120 def full_size(index, rev, entry, hexfn):
118 return b"%d" % entry[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
121 return b"%d" % entry[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
119
122
120
123
121 @debug_column(b"delta-base", size=6, verbose=True)
124 @debug_column(b"delta-base", size=6, verbose=True)
122 def delta_base(index, rev, entry, hexfn):
125 def delta_base(index, rev, entry, hexfn):
123 return b"%d" % entry[constants.ENTRY_DELTA_BASE]
126 return b"%d" % entry[constants.ENTRY_DELTA_BASE]
124
127
125
128
126 @debug_column(b"flags", size=2, verbose=True)
129 @debug_column(b"flags", size=2, verbose=True)
127 def flags(index, rev, entry, hexfn):
130 def flags(index, rev, entry, hexfn):
128 field = entry[constants.ENTRY_DATA_OFFSET]
131 field = entry[constants.ENTRY_DATA_OFFSET]
129 field &= 0xFFFF
132 field &= 0xFFFF
130 return b"%d" % field
133 return b"%d" % field
131
134
132
135
133 @debug_column(b"comp-mode", size=4, verbose=True)
136 @debug_column(b"comp-mode", size=4, verbose=True)
134 def compression_mode(index, rev, entry, hexfn):
137 def compression_mode(index, rev, entry, hexfn):
135 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSION_MODE]
138 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSION_MODE]
136
139
137
140
138 @debug_column(b"data-offset", size=20, verbose=True)
141 @debug_column(b"data-offset", size=20, verbose=True)
139 def data_offset(index, rev, entry, hexfn):
142 def data_offset(index, rev, entry, hexfn):
140 field = entry[constants.ENTRY_DATA_OFFSET]
143 field = entry[constants.ENTRY_DATA_OFFSET]
141 field >>= 16
144 field >>= 16
142 return b"%d" % field
145 return b"%d" % field
143
146
144
147
145 @debug_column(b"chunk-size", size=10, verbose=True)
148 @debug_column(b"chunk-size", size=10, verbose=True)
146 def data_chunk_size(index, rev, entry, hexfn):
149 def data_chunk_size(index, rev, entry, hexfn):
147 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSED_LENGTH]
150 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSED_LENGTH]
148
151
149
152
150 @debug_column(b"sd-comp-mode", size=7, verbose=True)
153 @debug_column(b"sd-comp-mode", size=7, verbose=True)
151 def sidedata_compression_mode(index, rev, entry, hexfn):
154 def sidedata_compression_mode(index, rev, entry, hexfn):
152 compression = entry[constants.ENTRY_SIDEDATA_COMPRESSION_MODE]
155 compression = entry[constants.ENTRY_SIDEDATA_COMPRESSION_MODE]
153 if compression == constants.COMP_MODE_PLAIN:
156 if compression == constants.COMP_MODE_PLAIN:
154 return b"plain"
157 return b"plain"
155 elif compression == constants.COMP_MODE_DEFAULT:
158 elif compression == constants.COMP_MODE_DEFAULT:
156 return b"default"
159 return b"default"
157 elif compression == constants.COMP_MODE_INLINE:
160 elif compression == constants.COMP_MODE_INLINE:
158 return b"inline"
161 return b"inline"
159 else:
162 else:
160 return b"%d" % compression
163 return b"%d" % compression
161
164
162
165
163 @debug_column(b"sidedata-offset", size=20, verbose=True)
166 @debug_column(b"sidedata-offset", size=20, verbose=True)
164 def sidedata_offset(index, rev, entry, hexfn):
167 def sidedata_offset(index, rev, entry, hexfn):
165 return b"%d" % entry[constants.ENTRY_SIDEDATA_OFFSET]
168 return b"%d" % entry[constants.ENTRY_SIDEDATA_OFFSET]
166
169
167
170
168 @debug_column(b"sd-chunk-size", size=10, verbose=True)
171 @debug_column(b"sd-chunk-size", size=10, verbose=True)
169 def sidedata_chunk_size(index, rev, entry, hexfn):
172 def sidedata_chunk_size(index, rev, entry, hexfn):
170 return b"%d" % entry[constants.ENTRY_SIDEDATA_COMPRESSED_LENGTH]
173 return b"%d" % entry[constants.ENTRY_SIDEDATA_COMPRESSED_LENGTH]
171
174
172
175
173 def debug_index(
176 def debug_index(
174 ui,
177 ui,
175 repo,
178 repo,
176 formatter,
179 formatter,
177 revlog,
180 revlog,
178 full_node,
181 full_node,
179 ):
182 ):
180 """display index data for a revlog"""
183 """display index data for a revlog"""
181 if full_node:
184 if full_node:
182 hexfn = nodemod.hex
185 hexfn = nodemod.hex
183 else:
186 else:
184 hexfn = nodemod.short
187 hexfn = nodemod.short
185
188
186 idlen = 12
189 idlen = 12
187 for i in revlog:
190 for i in revlog:
188 idlen = len(hexfn(revlog.node(i)))
191 idlen = len(hexfn(revlog.node(i)))
189 break
192 break
190
193
191 fm = formatter
194 fm = formatter
192
195
193 header_pieces = []
196 header_pieces = []
194 for column in INDEX_ENTRY_DEBUG_COLUMN:
197 for column in INDEX_ENTRY_DEBUG_COLUMN:
195 if column.verbose_only and not ui.verbose:
198 if column.verbose_only and not ui.verbose:
196 continue
199 continue
197 size = column.get_size(idlen)
200 size = column.get_size(idlen)
198 name = column.name
201 name = column.name
199 header_pieces.append(name.rjust(size))
202 header_pieces.append(name.rjust(size))
200
203
201 fm.plain(b' '.join(header_pieces) + b'\n')
204 fm.plain(b' '.join(header_pieces) + b'\n')
202
205
203 index = revlog.index
206 index = revlog.index
204
207
205 for rev in revlog:
208 for rev in revlog:
206 fm.startitem()
209 fm.startitem()
207 entry = index[rev]
210 entry = index[rev]
208 first = True
211 first = True
209 for column in INDEX_ENTRY_DEBUG_COLUMN:
212 for column in INDEX_ENTRY_DEBUG_COLUMN:
210 if column.verbose_only and not ui.verbose:
213 if column.verbose_only and not ui.verbose:
211 continue
214 continue
212 if not first:
215 if not first:
213 fm.plain(b' ')
216 fm.plain(b' ')
214 first = False
217 first = False
215
218
216 size = column.get_size(idlen)
219 size = column.get_size(idlen)
217 value = column.value_func(index, rev, entry, hexfn)
220 value = column.value_func(index, rev, entry, hexfn)
218 display = b"%%%ds" % size
221 display = b"%%%ds" % size
219 fm.write(column.name, display, value)
222 fm.write(column.name, display, value)
220 fm.plain(b'\n')
223 fm.plain(b'\n')
221
224
222 fm.end()
225 fm.end()
223
226
224
227
225 def dump(ui, revlog):
228 def dump(ui, revlog):
226 """perform the work for `hg debugrevlog --dump"""
229 """perform the work for `hg debugrevlog --dump"""
227 # XXX seems redundant with debug index ?
230 # XXX seems redundant with debug index ?
228 r = revlog
231 r = revlog
229 numrevs = len(r)
232 numrevs = len(r)
230 ui.write(
233 ui.write(
231 (
234 (
232 b"# rev p1rev p2rev start end deltastart base p1 p2"
235 b"# rev p1rev p2rev start end deltastart base p1 p2"
233 b" rawsize totalsize compression heads chainlen\n"
236 b" rawsize totalsize compression heads chainlen\n"
234 )
237 )
235 )
238 )
236 ts = 0
239 ts = 0
237 heads = set()
240 heads = set()
238
241
239 for rev in range(numrevs):
242 for rev in range(numrevs):
240 dbase = r.deltaparent(rev)
243 dbase = r.deltaparent(rev)
241 if dbase == -1:
244 if dbase == -1:
242 dbase = rev
245 dbase = rev
243 cbase = r.chainbase(rev)
246 cbase = r.chainbase(rev)
244 clen = r.chainlen(rev)
247 clen = r.chainlen(rev)
245 p1, p2 = r.parentrevs(rev)
248 p1, p2 = r.parentrevs(rev)
246 rs = r.rawsize(rev)
249 rs = r.rawsize(rev)
247 ts = ts + rs
250 ts = ts + rs
248 heads -= set(r.parentrevs(rev))
251 heads -= set(r.parentrevs(rev))
249 heads.add(rev)
252 heads.add(rev)
250 try:
253 try:
251 compression = ts / r.end(rev)
254 compression = ts / r.end(rev)
252 except ZeroDivisionError:
255 except ZeroDivisionError:
253 compression = 0
256 compression = 0
254 ui.write(
257 ui.write(
255 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
258 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
256 b"%11d %5d %8d\n"
259 b"%11d %5d %8d\n"
257 % (
260 % (
258 rev,
261 rev,
259 p1,
262 p1,
260 p2,
263 p2,
261 r.start(rev),
264 r.start(rev),
262 r.end(rev),
265 r.end(rev),
263 r.start(dbase),
266 r.start(dbase),
264 r.start(cbase),
267 r.start(cbase),
265 r.start(p1),
268 r.start(p1),
266 r.start(p2),
269 r.start(p2),
267 rs,
270 rs,
268 ts,
271 ts,
269 compression,
272 compression,
270 len(heads),
273 len(heads),
271 clen,
274 clen,
272 )
275 )
273 )
276 )
274
277
275
278
276 def debug_revlog(ui, revlog):
279 def debug_revlog(ui, revlog):
277 """code for `hg debugrevlog`"""
280 """code for `hg debugrevlog`"""
278 r = revlog
281 r = revlog
279 format = r._format_version
282 format = r._format_version
280 v = r._format_flags
283 v = r._format_flags
281 flags = []
284 flags = []
282 gdelta = False
285 gdelta = False
283 if v & constants.FLAG_INLINE_DATA:
286 if v & constants.FLAG_INLINE_DATA:
284 flags.append(b'inline')
287 flags.append(b'inline')
285 if v & constants.FLAG_GENERALDELTA:
288 if v & constants.FLAG_GENERALDELTA:
286 gdelta = True
289 gdelta = True
287 flags.append(b'generaldelta')
290 flags.append(b'generaldelta')
288 if not flags:
291 if not flags:
289 flags = [b'(none)']
292 flags = [b'(none)']
290
293
291 ### the total size of stored content if incompressed.
294 ### the total size of stored content if incompressed.
292 full_text_total_size = 0
295 full_text_total_size = 0
293 ### tracks merge vs single parent
296 ### tracks merge vs single parent
294 nummerges = 0
297 nummerges = 0
295
298
296 ### tracks ways the "delta" are build
299 ### tracks ways the "delta" are build
297 # nodelta
300 # nodelta
298 numempty = 0
301 numempty = 0
299 numemptytext = 0
302 numemptytext = 0
300 numemptydelta = 0
303 numemptydelta = 0
301 # full file content
304 # full file content
302 numfull = 0
305 numfull = 0
303 # intermediate snapshot against a prior snapshot
306 # intermediate snapshot against a prior snapshot
304 numsemi = 0
307 numsemi = 0
305 # snapshot count per depth
308 # snapshot count per depth
306 numsnapdepth = collections.defaultdict(lambda: 0)
309 numsnapdepth = collections.defaultdict(lambda: 0)
307 # number of snapshots with a non-ancestor delta
310 # number of snapshots with a non-ancestor delta
308 numsnapdepth_nad = collections.defaultdict(lambda: 0)
311 numsnapdepth_nad = collections.defaultdict(lambda: 0)
309 # delta against previous revision
312 # delta against previous revision
310 numprev = 0
313 numprev = 0
311 # delta against prev, where prev is a non-ancestor
314 # delta against prev, where prev is a non-ancestor
312 numprev_nad = 0
315 numprev_nad = 0
313 # delta against first or second parent (not prev)
316 # delta against first or second parent (not prev)
314 nump1 = 0
317 nump1 = 0
315 nump2 = 0
318 nump2 = 0
316 # delta against neither prev nor parents
319 # delta against neither prev nor parents
317 numother = 0
320 numother = 0
318 # delta against other that is a non-ancestor
321 # delta against other that is a non-ancestor
319 numother_nad = 0
322 numother_nad = 0
320 # delta against prev that are also first or second parent
323 # delta against prev that are also first or second parent
321 # (details of `numprev`)
324 # (details of `numprev`)
322 nump1prev = 0
325 nump1prev = 0
323 nump2prev = 0
326 nump2prev = 0
324
327
325 # data about delta chain of each revs
328 # data about delta chain of each revs
326 chainlengths = []
329 chainlengths = []
327 chainbases = []
330 chainbases = []
328 chainspans = []
331 chainspans = []
329
332
330 # data about each revision
333 # data about each revision
331 datasize = [None, 0, 0]
334 datasize = [None, 0, 0]
332 fullsize = [None, 0, 0]
335 fullsize = [None, 0, 0]
333 semisize = [None, 0, 0]
336 semisize = [None, 0, 0]
334 # snapshot count per depth
337 # snapshot count per depth
335 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
338 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
336 deltasize = [None, 0, 0]
339 deltasize = [None, 0, 0]
337 chunktypecounts = {}
340 chunktypecounts = {}
338 chunktypesizes = {}
341 chunktypesizes = {}
339
342
340 def addsize(size, l):
343 def addsize(size, l):
341 if l[0] is None or size < l[0]:
344 if l[0] is None or size < l[0]:
342 l[0] = size
345 l[0] = size
343 if size > l[1]:
346 if size > l[1]:
344 l[1] = size
347 l[1] = size
345 l[2] += size
348 l[2] += size
346
349
347 numrevs = len(r)
350 numrevs = len(r)
348 for rev in range(numrevs):
351 for rev in range(numrevs):
349 p1, p2 = r.parentrevs(rev)
352 p1, p2 = r.parentrevs(rev)
350 delta = r.deltaparent(rev)
353 delta = r.deltaparent(rev)
351 if format > 0:
354 if format > 0:
352 s = r.rawsize(rev)
355 s = r.rawsize(rev)
353 full_text_total_size += s
356 full_text_total_size += s
354 addsize(s, datasize)
357 addsize(s, datasize)
355 if p2 != nodemod.nullrev:
358 if p2 != nodemod.nullrev:
356 nummerges += 1
359 nummerges += 1
357 size = r.length(rev)
360 size = r.length(rev)
358 if delta == nodemod.nullrev:
361 if delta == nodemod.nullrev:
359 chainlengths.append(0)
362 chainlengths.append(0)
360 chainbases.append(r.start(rev))
363 chainbases.append(r.start(rev))
361 chainspans.append(size)
364 chainspans.append(size)
362 if size == 0:
365 if size == 0:
363 numempty += 1
366 numempty += 1
364 numemptytext += 1
367 numemptytext += 1
365 else:
368 else:
366 numfull += 1
369 numfull += 1
367 numsnapdepth[0] += 1
370 numsnapdepth[0] += 1
368 addsize(size, fullsize)
371 addsize(size, fullsize)
369 addsize(size, snapsizedepth[0])
372 addsize(size, snapsizedepth[0])
370 else:
373 else:
371 nad = (
374 nad = (
372 delta != p1 and delta != p2 and not r.isancestorrev(delta, rev)
375 delta != p1 and delta != p2 and not r.isancestorrev(delta, rev)
373 )
376 )
374 chainlengths.append(chainlengths[delta] + 1)
377 chainlengths.append(chainlengths[delta] + 1)
375 baseaddr = chainbases[delta]
378 baseaddr = chainbases[delta]
376 revaddr = r.start(rev)
379 revaddr = r.start(rev)
377 chainbases.append(baseaddr)
380 chainbases.append(baseaddr)
378 chainspans.append((revaddr - baseaddr) + size)
381 chainspans.append((revaddr - baseaddr) + size)
379 if size == 0:
382 if size == 0:
380 numempty += 1
383 numempty += 1
381 numemptydelta += 1
384 numemptydelta += 1
382 elif r.issnapshot(rev):
385 elif r.issnapshot(rev):
383 addsize(size, semisize)
386 addsize(size, semisize)
384 numsemi += 1
387 numsemi += 1
385 depth = r.snapshotdepth(rev)
388 depth = r.snapshotdepth(rev)
386 numsnapdepth[depth] += 1
389 numsnapdepth[depth] += 1
387 if nad:
390 if nad:
388 numsnapdepth_nad[depth] += 1
391 numsnapdepth_nad[depth] += 1
389 addsize(size, snapsizedepth[depth])
392 addsize(size, snapsizedepth[depth])
390 else:
393 else:
391 addsize(size, deltasize)
394 addsize(size, deltasize)
392 if delta == rev - 1:
395 if delta == rev - 1:
393 numprev += 1
396 numprev += 1
394 if delta == p1:
397 if delta == p1:
395 nump1prev += 1
398 nump1prev += 1
396 elif delta == p2:
399 elif delta == p2:
397 nump2prev += 1
400 nump2prev += 1
398 elif nad:
401 elif nad:
399 numprev_nad += 1
402 numprev_nad += 1
400 elif delta == p1:
403 elif delta == p1:
401 nump1 += 1
404 nump1 += 1
402 elif delta == p2:
405 elif delta == p2:
403 nump2 += 1
406 nump2 += 1
404 elif delta != nodemod.nullrev:
407 elif delta != nodemod.nullrev:
405 numother += 1
408 numother += 1
406 numother_nad += 1
409 numother_nad += 1
407
410
408 # Obtain data on the raw chunks in the revlog.
411 # Obtain data on the raw chunks in the revlog.
409 if util.safehasattr(r, '_getsegmentforrevs'):
412 if util.safehasattr(r, '_getsegmentforrevs'):
410 segment = r._getsegmentforrevs(rev, rev)[1]
413 segment = r._getsegmentforrevs(rev, rev)[1]
411 else:
414 else:
412 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
415 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
413 if segment:
416 if segment:
414 chunktype = bytes(segment[0:1])
417 chunktype = bytes(segment[0:1])
415 else:
418 else:
416 chunktype = b'empty'
419 chunktype = b'empty'
417
420
418 if chunktype not in chunktypecounts:
421 if chunktype not in chunktypecounts:
419 chunktypecounts[chunktype] = 0
422 chunktypecounts[chunktype] = 0
420 chunktypesizes[chunktype] = 0
423 chunktypesizes[chunktype] = 0
421
424
422 chunktypecounts[chunktype] += 1
425 chunktypecounts[chunktype] += 1
423 chunktypesizes[chunktype] += size
426 chunktypesizes[chunktype] += size
424
427
425 # Adjust size min value for empty cases
428 # Adjust size min value for empty cases
426 for size in (datasize, fullsize, semisize, deltasize):
429 for size in (datasize, fullsize, semisize, deltasize):
427 if size[0] is None:
430 if size[0] is None:
428 size[0] = 0
431 size[0] = 0
429
432
430 numdeltas = numrevs - numfull - numempty - numsemi
433 numdeltas = numrevs - numfull - numempty - numsemi
431 numoprev = numprev - nump1prev - nump2prev - numprev_nad
434 numoprev = numprev - nump1prev - nump2prev - numprev_nad
432 num_other_ancestors = numother - numother_nad
435 num_other_ancestors = numother - numother_nad
433 totalrawsize = datasize[2]
436 totalrawsize = datasize[2]
434 datasize[2] /= numrevs
437 datasize[2] /= numrevs
435 fulltotal = fullsize[2]
438 fulltotal = fullsize[2]
436 if numfull == 0:
439 if numfull == 0:
437 fullsize[2] = 0
440 fullsize[2] = 0
438 else:
441 else:
439 fullsize[2] /= numfull
442 fullsize[2] /= numfull
440 semitotal = semisize[2]
443 semitotal = semisize[2]
441 snaptotal = {}
444 snaptotal = {}
442 if numsemi > 0:
445 if numsemi > 0:
443 semisize[2] /= numsemi
446 semisize[2] /= numsemi
444 for depth in snapsizedepth:
447 for depth in snapsizedepth:
445 snaptotal[depth] = snapsizedepth[depth][2]
448 snaptotal[depth] = snapsizedepth[depth][2]
446 snapsizedepth[depth][2] /= numsnapdepth[depth]
449 snapsizedepth[depth][2] /= numsnapdepth[depth]
447
450
448 deltatotal = deltasize[2]
451 deltatotal = deltasize[2]
449 if numdeltas > 0:
452 if numdeltas > 0:
450 deltasize[2] /= numdeltas
453 deltasize[2] /= numdeltas
451 totalsize = fulltotal + semitotal + deltatotal
454 totalsize = fulltotal + semitotal + deltatotal
452 avgchainlen = sum(chainlengths) / numrevs
455 avgchainlen = sum(chainlengths) / numrevs
453 maxchainlen = max(chainlengths)
456 maxchainlen = max(chainlengths)
454 maxchainspan = max(chainspans)
457 maxchainspan = max(chainspans)
455 compratio = 1
458 compratio = 1
456 if totalsize:
459 if totalsize:
457 compratio = totalrawsize / totalsize
460 compratio = totalrawsize / totalsize
458
461
459 basedfmtstr = b'%%%dd\n'
462 basedfmtstr = b'%%%dd\n'
460 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
463 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
461
464
462 def dfmtstr(max):
465 def dfmtstr(max):
463 return basedfmtstr % len(str(max))
466 return basedfmtstr % len(str(max))
464
467
465 def pcfmtstr(max, padding=0):
468 def pcfmtstr(max, padding=0):
466 return basepcfmtstr % (len(str(max)), b' ' * padding)
469 return basepcfmtstr % (len(str(max)), b' ' * padding)
467
470
468 def pcfmt(value, total):
471 def pcfmt(value, total):
469 if total:
472 if total:
470 return (value, 100 * float(value) / total)
473 return (value, 100 * float(value) / total)
471 else:
474 else:
472 return value, 100.0
475 return value, 100.0
473
476
474 ui.writenoi18n(b'format : %d\n' % format)
477 ui.writenoi18n(b'format : %d\n' % format)
475 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
478 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
476
479
477 ui.write(b'\n')
480 ui.write(b'\n')
478 fmt = pcfmtstr(totalsize)
481 fmt = pcfmtstr(totalsize)
479 fmt2 = dfmtstr(totalsize)
482 fmt2 = dfmtstr(totalsize)
480 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
483 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
481 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
484 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
482 ui.writenoi18n(
485 ui.writenoi18n(
483 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
486 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
484 )
487 )
485 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
488 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
486 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
489 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
487 ui.writenoi18n(
490 ui.writenoi18n(
488 b' text : '
491 b' text : '
489 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
492 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
490 )
493 )
491 ui.writenoi18n(
494 ui.writenoi18n(
492 b' delta : '
495 b' delta : '
493 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
496 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
494 )
497 )
495 ui.writenoi18n(
498 ui.writenoi18n(
496 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
499 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
497 )
500 )
498 for depth in sorted(numsnapdepth):
501 for depth in sorted(numsnapdepth):
499 base = b' lvl-%-3d : ' % depth
502 base = b' lvl-%-3d : ' % depth
500 count = fmt % pcfmt(numsnapdepth[depth], numrevs)
503 count = fmt % pcfmt(numsnapdepth[depth], numrevs)
501 pieces = [base, count]
504 pieces = [base, count]
502 if numsnapdepth_nad[depth]:
505 if numsnapdepth_nad[depth]:
503 pieces[-1] = count = count[:-1] # drop the final '\n'
506 pieces[-1] = count = count[:-1] # drop the final '\n'
504 more = b' non-ancestor-bases: '
507 more = b' non-ancestor-bases: '
505 anc_count = fmt
508 anc_count = fmt
506 anc_count %= pcfmt(numsnapdepth_nad[depth], numsnapdepth[depth])
509 anc_count %= pcfmt(numsnapdepth_nad[depth], numsnapdepth[depth])
507 pieces.append(more)
510 pieces.append(more)
508 pieces.append(anc_count)
511 pieces.append(anc_count)
509 ui.write(b''.join(pieces))
512 ui.write(b''.join(pieces))
510 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
513 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
511 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
514 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
512 ui.writenoi18n(
515 ui.writenoi18n(
513 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
516 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
514 )
517 )
515 for depth in sorted(numsnapdepth):
518 for depth in sorted(numsnapdepth):
516 ui.write(
519 ui.write(
517 (b' lvl-%-3d : ' % depth)
520 (b' lvl-%-3d : ' % depth)
518 + fmt % pcfmt(snaptotal[depth], totalsize)
521 + fmt % pcfmt(snaptotal[depth], totalsize)
519 )
522 )
520 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
523 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
521
524
522 letters = string.ascii_letters.encode('ascii')
525 letters = string.ascii_letters.encode('ascii')
523
526
524 def fmtchunktype(chunktype):
527 def fmtchunktype(chunktype):
525 if chunktype == b'empty':
528 if chunktype == b'empty':
526 return b' %s : ' % chunktype
529 return b' %s : ' % chunktype
527 elif chunktype in letters:
530 elif chunktype in letters:
528 return b' 0x%s (%s) : ' % (nodemod.hex(chunktype), chunktype)
531 return b' 0x%s (%s) : ' % (nodemod.hex(chunktype), chunktype)
529 else:
532 else:
530 return b' 0x%s : ' % nodemod.hex(chunktype)
533 return b' 0x%s : ' % nodemod.hex(chunktype)
531
534
532 ui.write(b'\n')
535 ui.write(b'\n')
533 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
536 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
534 for chunktype in sorted(chunktypecounts):
537 for chunktype in sorted(chunktypecounts):
535 ui.write(fmtchunktype(chunktype))
538 ui.write(fmtchunktype(chunktype))
536 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
539 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
537 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
540 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
538 for chunktype in sorted(chunktypecounts):
541 for chunktype in sorted(chunktypecounts):
539 ui.write(fmtchunktype(chunktype))
542 ui.write(fmtchunktype(chunktype))
540 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
543 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
541
544
542 ui.write(b'\n')
545 ui.write(b'\n')
543 b_total = b"%d" % full_text_total_size
546 b_total = b"%d" % full_text_total_size
544 p_total = []
547 p_total = []
545 while len(b_total) > 3:
548 while len(b_total) > 3:
546 p_total.append(b_total[-3:])
549 p_total.append(b_total[-3:])
547 b_total = b_total[:-3]
550 b_total = b_total[:-3]
548 p_total.append(b_total)
551 p_total.append(b_total)
549 p_total.reverse()
552 p_total.reverse()
550 b_total = b' '.join(p_total)
553 b_total = b' '.join(p_total)
551
554
552 ui.write(b'\n')
555 ui.write(b'\n')
553 ui.writenoi18n(b'total-stored-content: %s bytes\n' % b_total)
556 ui.writenoi18n(b'total-stored-content: %s bytes\n' % b_total)
554 ui.write(b'\n')
557 ui.write(b'\n')
555 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
558 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
556 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
559 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
557 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
560 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
558 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
561 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
559 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
562 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
560
563
561 if format > 0:
564 if format > 0:
562 ui.write(b'\n')
565 ui.write(b'\n')
563 ui.writenoi18n(
566 ui.writenoi18n(
564 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
567 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
565 % tuple(datasize)
568 % tuple(datasize)
566 )
569 )
567 ui.writenoi18n(
570 ui.writenoi18n(
568 b'full revision size (min/max/avg) : %d / %d / %d\n'
571 b'full revision size (min/max/avg) : %d / %d / %d\n'
569 % tuple(fullsize)
572 % tuple(fullsize)
570 )
573 )
571 ui.writenoi18n(
574 ui.writenoi18n(
572 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
575 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
573 % tuple(semisize)
576 % tuple(semisize)
574 )
577 )
575 for depth in sorted(snapsizedepth):
578 for depth in sorted(snapsizedepth):
576 if depth == 0:
579 if depth == 0:
577 continue
580 continue
578 ui.writenoi18n(
581 ui.writenoi18n(
579 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
582 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
580 % ((depth,) + tuple(snapsizedepth[depth]))
583 % ((depth,) + tuple(snapsizedepth[depth]))
581 )
584 )
582 ui.writenoi18n(
585 ui.writenoi18n(
583 b'delta size (min/max/avg) : %d / %d / %d\n'
586 b'delta size (min/max/avg) : %d / %d / %d\n'
584 % tuple(deltasize)
587 % tuple(deltasize)
585 )
588 )
586
589
587 if numdeltas > 0:
590 if numdeltas > 0:
588 ui.write(b'\n')
591 ui.write(b'\n')
589 fmt = pcfmtstr(numdeltas)
592 fmt = pcfmtstr(numdeltas)
590 fmt2 = pcfmtstr(numdeltas, 4)
593 fmt2 = pcfmtstr(numdeltas, 4)
591 ui.writenoi18n(
594 ui.writenoi18n(
592 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
595 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
593 )
596 )
594 if numprev > 0:
597 if numprev > 0:
595 ui.writenoi18n(
598 ui.writenoi18n(
596 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
599 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
597 )
600 )
598 ui.writenoi18n(
601 ui.writenoi18n(
599 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
602 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
600 )
603 )
601 ui.writenoi18n(
604 ui.writenoi18n(
602 b' other-ancestor : ' + fmt2 % pcfmt(numoprev, numprev)
605 b' other-ancestor : ' + fmt2 % pcfmt(numoprev, numprev)
603 )
606 )
604 ui.writenoi18n(
607 ui.writenoi18n(
605 b' unrelated : ' + fmt2 % pcfmt(numoprev, numprev)
608 b' unrelated : ' + fmt2 % pcfmt(numoprev, numprev)
606 )
609 )
607 if gdelta:
610 if gdelta:
608 ui.writenoi18n(
611 ui.writenoi18n(
609 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
612 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
610 )
613 )
611 ui.writenoi18n(
614 ui.writenoi18n(
612 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
615 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
613 )
616 )
614 ui.writenoi18n(
617 ui.writenoi18n(
615 b'deltas against ancs : '
618 b'deltas against ancs : '
616 + fmt % pcfmt(num_other_ancestors, numdeltas)
619 + fmt % pcfmt(num_other_ancestors, numdeltas)
617 )
620 )
618 ui.writenoi18n(
621 ui.writenoi18n(
619 b'deltas against other : '
622 b'deltas against other : '
620 + fmt % pcfmt(numother_nad, numdeltas)
623 + fmt % pcfmt(numother_nad, numdeltas)
621 )
624 )
625
626
627 def debug_delta_find(ui, revlog, rev, base_rev=nodemod.nullrev):
628 """display the search process for a delta"""
629 deltacomputer = deltautil.deltacomputer(
630 revlog,
631 write_debug=ui.write,
632 debug_search=not ui.quiet,
633 )
634
635 node = revlog.node(rev)
636 p1r, p2r = revlog.parentrevs(rev)
637 p1 = revlog.node(p1r)
638 p2 = revlog.node(p2r)
639 full_text = revlog.revision(rev)
640 btext = [full_text]
641 textlen = len(btext[0])
642 cachedelta = None
643 flags = revlog.flags(rev)
644
645 if base_rev != nodemod.nullrev:
646 base_text = revlog.revision(base_rev)
647 delta = mdiff.textdiff(base_text, full_text)
648
649 cachedelta = (base_rev, delta)
650 btext = [None]
651
652 revinfo = revlogutils.revisioninfo(
653 node,
654 p1,
655 p2,
656 btext,
657 textlen,
658 cachedelta,
659 flags,
660 )
661
662 fh = revlog._datafp()
663 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
General Comments 0
You need to be logged in to leave comments. Login now